Remove history use_include_order configuration option (#86365)
parent
332d3e0f19
commit
0b0e977ce9
|
@ -34,13 +34,9 @@ from homeassistant.helpers.typing import ConfigType
|
|||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import websocket_api
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
HISTORY_ENTITIES_FILTER,
|
||||
HISTORY_FILTERS,
|
||||
HISTORY_USE_INCLUDE_ORDER,
|
||||
)
|
||||
from .const import DOMAIN
|
||||
from .helpers import entities_may_have_state_changes_after
|
||||
from .models import HistoryConfig
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -48,8 +44,11 @@ CONF_ORDER = "use_include_order"
|
|||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
|
||||
{vol.Optional(CONF_ORDER, default=False): cv.boolean}
|
||||
DOMAIN: vol.All(
|
||||
cv.deprecated(CONF_ORDER),
|
||||
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
|
||||
{vol.Optional(CONF_ORDER, default=False): cv.boolean}
|
||||
),
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
|
@ -67,18 +66,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
|
||||
possible_merged_entities_filter = convert_include_exclude_filter(merged_filter)
|
||||
|
||||
sqlalchemy_filter = None
|
||||
entity_filter = None
|
||||
if not possible_merged_entities_filter.empty_filter:
|
||||
hass.data[
|
||||
HISTORY_FILTERS
|
||||
] = filters = sqlalchemy_filter_from_include_exclude_conf(conf)
|
||||
hass.data[HISTORY_ENTITIES_FILTER] = possible_merged_entities_filter
|
||||
else:
|
||||
hass.data[HISTORY_FILTERS] = filters = None
|
||||
hass.data[HISTORY_ENTITIES_FILTER] = None
|
||||
sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf(conf)
|
||||
entity_filter = possible_merged_entities_filter
|
||||
|
||||
hass.data[HISTORY_USE_INCLUDE_ORDER] = use_include_order = conf.get(CONF_ORDER)
|
||||
|
||||
hass.http.register_view(HistoryPeriodView(filters, use_include_order))
|
||||
hass.data[DOMAIN] = HistoryConfig(sqlalchemy_filter, entity_filter)
|
||||
hass.http.register_view(HistoryPeriodView(sqlalchemy_filter))
|
||||
frontend.async_register_built_in_panel(hass, "history", "history", "hass:chart-box")
|
||||
websocket_api.async_setup(hass)
|
||||
return True
|
||||
|
@ -91,10 +86,9 @@ class HistoryPeriodView(HomeAssistantView):
|
|||
name = "api:history:view-period"
|
||||
extra_urls = ["/api/history/period/{datetime}"]
|
||||
|
||||
def __init__(self, filters: Filters | None, use_include_order: bool) -> None:
|
||||
def __init__(self, filters: Filters | None) -> None:
|
||||
"""Initialize the history period view."""
|
||||
self.filters = filters
|
||||
self.use_include_order = use_include_order
|
||||
|
||||
async def get(
|
||||
self, request: web.Request, datetime: str | None = None
|
||||
|
@ -194,15 +188,4 @@ class HistoryPeriodView(HomeAssistantView):
|
|||
"Extracted %d states in %fs", sum(map(len, states.values())), elapsed
|
||||
)
|
||||
|
||||
# Optionally reorder the result to respect the ordering given
|
||||
# by any entities explicitly included in the configuration.
|
||||
if not self.filters or not self.use_include_order:
|
||||
return self.json(list(states.values()))
|
||||
|
||||
sorted_result = [
|
||||
states.pop(order_entity)
|
||||
for order_entity in self.filters.included_entities
|
||||
if order_entity in states
|
||||
]
|
||||
sorted_result.extend(list(states.values()))
|
||||
return self.json(sorted_result)
|
||||
return self.json(list(states.values()))
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
"""History integration constants."""
|
||||
|
||||
DOMAIN = "history"
|
||||
HISTORY_FILTERS = "history_filters"
|
||||
HISTORY_ENTITIES_FILTER = "history_entities_filter"
|
||||
HISTORY_USE_INCLUDE_ORDER = "history_use_include_order"
|
||||
|
||||
EVENT_COALESCE_TIME = 0.35
|
||||
|
||||
MAX_PENDING_HISTORY_STATES = 2048
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
"""Models for the history integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.components.recorder.filters import Filters
|
||||
from homeassistant.helpers.entityfilter import EntityFilter
|
||||
|
||||
|
||||
@dataclass
|
||||
class HistoryConfig:
|
||||
"""Configuration for the history integration."""
|
||||
|
||||
sqlalchemy_filter: Filters | None = None
|
||||
entity_filter: EntityFilter | None = None
|
|
@ -38,14 +38,9 @@ from homeassistant.helpers.event import (
|
|||
from homeassistant.helpers.json import JSON_DUMP
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import (
|
||||
EVENT_COALESCE_TIME,
|
||||
HISTORY_ENTITIES_FILTER,
|
||||
HISTORY_FILTERS,
|
||||
HISTORY_USE_INCLUDE_ORDER,
|
||||
MAX_PENDING_HISTORY_STATES,
|
||||
)
|
||||
from .const import DOMAIN, EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES
|
||||
from .helpers import entities_may_have_state_changes_after
|
||||
from .models import HistoryConfig
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -75,38 +70,27 @@ def _ws_get_significant_states(
|
|||
end_time: dt | None,
|
||||
entity_ids: list[str] | None,
|
||||
filters: Filters | None,
|
||||
use_include_order: bool | None,
|
||||
include_start_time_state: bool,
|
||||
significant_changes_only: bool,
|
||||
minimal_response: bool,
|
||||
no_attributes: bool,
|
||||
) -> str:
|
||||
"""Fetch history significant_states and convert them to json in the executor."""
|
||||
states = history.get_significant_states(
|
||||
hass,
|
||||
start_time,
|
||||
end_time,
|
||||
entity_ids,
|
||||
filters,
|
||||
include_start_time_state,
|
||||
significant_changes_only,
|
||||
minimal_response,
|
||||
no_attributes,
|
||||
True,
|
||||
)
|
||||
|
||||
if not use_include_order or not filters:
|
||||
return JSON_DUMP(messages.result_message(msg_id, states))
|
||||
|
||||
return JSON_DUMP(
|
||||
messages.result_message(
|
||||
msg_id,
|
||||
{
|
||||
order_entity: states.pop(order_entity)
|
||||
for order_entity in filters.included_entities
|
||||
if order_entity in states
|
||||
}
|
||||
| states,
|
||||
history.get_significant_states(
|
||||
hass,
|
||||
start_time,
|
||||
end_time,
|
||||
entity_ids,
|
||||
filters,
|
||||
include_start_time_state,
|
||||
significant_changes_only,
|
||||
minimal_response,
|
||||
no_attributes,
|
||||
True,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -166,6 +150,7 @@ async def ws_get_history_during_period(
|
|||
|
||||
significant_changes_only = msg["significant_changes_only"]
|
||||
minimal_response = msg["minimal_response"]
|
||||
history_config: HistoryConfig = hass.data[DOMAIN]
|
||||
|
||||
connection.send_message(
|
||||
await get_instance(hass).async_add_executor_job(
|
||||
|
@ -175,8 +160,7 @@ async def ws_get_history_during_period(
|
|||
start_time,
|
||||
end_time,
|
||||
entity_ids,
|
||||
hass.data[HISTORY_FILTERS],
|
||||
hass.data[HISTORY_USE_INCLUDE_ORDER],
|
||||
history_config.sqlalchemy_filter,
|
||||
include_start_time_state,
|
||||
significant_changes_only,
|
||||
minimal_response,
|
||||
|
@ -413,9 +397,11 @@ async def ws_stream(
|
|||
utc_now = dt_util.utcnow()
|
||||
filters: Filters | None = None
|
||||
entities_filter: EntityFilter | None = None
|
||||
|
||||
if not entity_ids:
|
||||
filters = hass.data[HISTORY_FILTERS]
|
||||
entities_filter = hass.data[HISTORY_ENTITIES_FILTER]
|
||||
history_config: HistoryConfig = hass.data[DOMAIN]
|
||||
filters = history_config.sqlalchemy_filter
|
||||
entities_filter = history_config.entity_filter
|
||||
|
||||
if start_time := dt_util.parse_datetime(start_time_str):
|
||||
start_time = dt_util.as_utc(start_time)
|
||||
|
|
|
@ -1295,59 +1295,3 @@ async def test_history_during_period_bad_end_time(recorder_mock, hass, hass_ws_c
|
|||
response = await client.receive_json()
|
||||
assert not response["success"]
|
||||
assert response["error"]["code"] == "invalid_end_time"
|
||||
|
||||
|
||||
async def test_history_during_period_with_use_include_order(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
"""Test history_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
|
||||
await async_setup_component(
|
||||
hass,
|
||||
"history",
|
||||
{
|
||||
history.DOMAIN: {
|
||||
history.CONF_ORDER: True,
|
||||
CONF_INCLUDE: {
|
||||
CONF_ENTITIES: sort_order,
|
||||
CONF_DOMAINS: ["sensor"],
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await async_setup_component(hass, "sensor", {})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.one", "on", attributes={"any": "attr"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.two", "off", attributes={"any": "attr"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.three", "off", attributes={"any": "changed"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.four", "off", attributes={"any": "again"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("switch.excluded", "off", attributes={"any": "again"})
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
client = await hass_ws_client()
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 1,
|
||||
"type": "history/history_during_period",
|
||||
"start_time": now.isoformat(),
|
||||
"include_start_time_state": True,
|
||||
"significant_changes_only": False,
|
||||
"no_attributes": True,
|
||||
"minimal_response": True,
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["id"] == 1
|
||||
|
||||
assert list(response["result"]) == [
|
||||
*sort_order,
|
||||
"sensor.three",
|
||||
]
|
||||
|
|
|
@ -446,62 +446,6 @@ async def test_history_during_period_bad_end_time(recorder_mock, hass, hass_ws_c
|
|||
assert response["error"]["code"] == "invalid_end_time"
|
||||
|
||||
|
||||
async def test_history_during_period_with_use_include_order(
|
||||
recorder_mock, hass, hass_ws_client
|
||||
):
|
||||
"""Test history_during_period."""
|
||||
now = dt_util.utcnow()
|
||||
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
|
||||
await async_setup_component(
|
||||
hass,
|
||||
"history",
|
||||
{
|
||||
history.DOMAIN: {
|
||||
history.CONF_ORDER: True,
|
||||
CONF_INCLUDE: {
|
||||
CONF_ENTITIES: sort_order,
|
||||
CONF_DOMAINS: ["sensor"],
|
||||
},
|
||||
}
|
||||
},
|
||||
)
|
||||
await async_setup_component(hass, "sensor", {})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.one", "on", attributes={"any": "attr"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.two", "off", attributes={"any": "attr"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.three", "off", attributes={"any": "changed"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("sensor.four", "off", attributes={"any": "again"})
|
||||
await async_recorder_block_till_done(hass)
|
||||
hass.states.async_set("switch.excluded", "off", attributes={"any": "again"})
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
client = await hass_ws_client()
|
||||
await client.send_json(
|
||||
{
|
||||
"id": 1,
|
||||
"type": "history/history_during_period",
|
||||
"start_time": now.isoformat(),
|
||||
"include_start_time_state": True,
|
||||
"significant_changes_only": False,
|
||||
"no_attributes": True,
|
||||
"minimal_response": True,
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["id"] == 1
|
||||
|
||||
assert list(response["result"]) == [
|
||||
*sort_order,
|
||||
"sensor.three",
|
||||
]
|
||||
|
||||
|
||||
async def test_history_stream_historical_only(recorder_mock, hass, hass_ws_client):
|
||||
"""Test history stream."""
|
||||
now = dt_util.utcnow()
|
||||
|
|
Loading…
Reference in New Issue