Reduce logbook websocket payload size and parse json attributes via the DBM (#71895)
parent
37f81b261d
commit
089eb9960a
|
@ -40,7 +40,6 @@ from homeassistant.const import (
|
|||
ATTR_SERVICE,
|
||||
EVENT_CALL_SERVICE,
|
||||
EVENT_LOGBOOK_ENTRY,
|
||||
EVENT_STATE_CHANGED,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
Context,
|
||||
|
@ -65,14 +64,12 @@ from homeassistant.helpers.typing import ConfigType
|
|||
from homeassistant.loader import bind_hass
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .queries import statement_for_request
|
||||
from .queries import PSUEDO_EVENT_STATE_CHANGED, statement_for_request
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
FRIENDLY_NAME_JSON_EXTRACT = re.compile('"friendly_name": ?"([^"]+)"')
|
||||
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
|
||||
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
|
||||
ICON_JSON_EXTRACT = re.compile('"icon": ?"([^"]+)"')
|
||||
ATTR_MESSAGE = "message"
|
||||
|
||||
DOMAIN = "logbook"
|
||||
|
@ -235,6 +232,7 @@ def _ws_formatted_get_events(
|
|||
entities_filter,
|
||||
context_id,
|
||||
True,
|
||||
False,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
@ -368,6 +366,7 @@ class LogbookView(HomeAssistantView):
|
|||
self.entities_filter,
|
||||
context_id,
|
||||
False,
|
||||
True,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -385,6 +384,7 @@ def _humanify(
|
|||
],
|
||||
entity_name_cache: EntityNameCache,
|
||||
format_time: Callable[[Row], Any],
|
||||
include_entity_name: bool = True,
|
||||
) -> Generator[dict[str, Any], None, None]:
|
||||
"""Generate a converted list of events into entries."""
|
||||
# Continuous sensors, will be excluded from the logbook
|
||||
|
@ -419,13 +419,13 @@ def _humanify(
|
|||
continue
|
||||
event_type = row.event_type
|
||||
if event_type == EVENT_CALL_SERVICE or (
|
||||
event_type != EVENT_STATE_CHANGED
|
||||
event_type is not PSUEDO_EVENT_STATE_CHANGED
|
||||
and entities_filter is not None
|
||||
and not _keep_row(row, event_type)
|
||||
):
|
||||
continue
|
||||
|
||||
if event_type == EVENT_STATE_CHANGED:
|
||||
if event_type is PSUEDO_EVENT_STATE_CHANGED:
|
||||
entity_id = row.entity_id
|
||||
assert entity_id is not None
|
||||
# Skip continuous sensors
|
||||
|
@ -439,14 +439,15 @@ def _humanify(
|
|||
|
||||
data = {
|
||||
LOGBOOK_ENTRY_WHEN: format_time(row),
|
||||
LOGBOOK_ENTRY_NAME: entity_name_cache.get(entity_id, row),
|
||||
LOGBOOK_ENTRY_STATE: row.state,
|
||||
LOGBOOK_ENTRY_ENTITY_ID: entity_id,
|
||||
}
|
||||
if icon := _row_attributes_extract(row, ICON_JSON_EXTRACT):
|
||||
if include_entity_name:
|
||||
data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id, row)
|
||||
if icon := row.icon or row.old_format_icon:
|
||||
data[LOGBOOK_ENTRY_ICON] = icon
|
||||
|
||||
context_augmenter.augment(data, row, context_id)
|
||||
context_augmenter.augment(data, row, context_id, include_entity_name)
|
||||
yield data
|
||||
|
||||
elif event_type in external_events:
|
||||
|
@ -454,7 +455,7 @@ def _humanify(
|
|||
data = describe_event(event_cache.get(row))
|
||||
data[LOGBOOK_ENTRY_WHEN] = format_time(row)
|
||||
data[LOGBOOK_ENTRY_DOMAIN] = domain
|
||||
context_augmenter.augment(data, row, context_id)
|
||||
context_augmenter.augment(data, row, context_id, include_entity_name)
|
||||
yield data
|
||||
|
||||
elif event_type == EVENT_LOGBOOK_ENTRY:
|
||||
|
@ -474,7 +475,7 @@ def _humanify(
|
|||
LOGBOOK_ENTRY_DOMAIN: entry_domain,
|
||||
LOGBOOK_ENTRY_ENTITY_ID: entry_entity_id,
|
||||
}
|
||||
context_augmenter.augment(data, row, context_id)
|
||||
context_augmenter.augment(data, row, context_id, include_entity_name)
|
||||
yield data
|
||||
|
||||
|
||||
|
@ -487,6 +488,7 @@ def _get_events(
|
|||
entities_filter: EntityFilter | Callable[[str], bool] | None = None,
|
||||
context_id: str | None = None,
|
||||
timestamp: bool = False,
|
||||
include_entity_name: bool = True,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Get events for a period of time."""
|
||||
assert not (
|
||||
|
@ -540,6 +542,7 @@ def _get_events(
|
|||
external_events,
|
||||
entity_name_cache,
|
||||
format_time,
|
||||
include_entity_name,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -562,7 +565,9 @@ class ContextAugmenter:
|
|||
self.external_events = external_events
|
||||
self.event_cache = event_cache
|
||||
|
||||
def augment(self, data: dict[str, Any], row: Row, context_id: str) -> None:
|
||||
def augment(
|
||||
self, data: dict[str, Any], row: Row, context_id: str, include_entity_name: bool
|
||||
) -> None:
|
||||
"""Augment data from the row and cache."""
|
||||
if context_user_id := row.context_user_id:
|
||||
data[CONTEXT_USER_ID] = context_user_id
|
||||
|
@ -589,9 +594,10 @@ class ContextAugmenter:
|
|||
# State change
|
||||
if context_entity_id := context_row.entity_id:
|
||||
data[CONTEXT_ENTITY_ID] = context_entity_id
|
||||
data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get(
|
||||
context_entity_id, context_row
|
||||
)
|
||||
if include_entity_name:
|
||||
data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get(
|
||||
context_entity_id, context_row
|
||||
)
|
||||
data[CONTEXT_EVENT_TYPE] = event_type
|
||||
return
|
||||
|
||||
|
@ -619,9 +625,10 @@ class ContextAugmenter:
|
|||
if not (attr_entity_id := described.get(ATTR_ENTITY_ID)):
|
||||
return
|
||||
data[CONTEXT_ENTITY_ID] = attr_entity_id
|
||||
data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get(
|
||||
attr_entity_id, context_row
|
||||
)
|
||||
if include_entity_name:
|
||||
data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get(
|
||||
attr_entity_id, context_row
|
||||
)
|
||||
|
||||
|
||||
def _is_sensor_continuous(ent_reg: er.EntityRegistry, entity_id: str) -> bool:
|
||||
|
@ -735,8 +742,6 @@ class EntityNameCache:
|
|||
friendly_name := current_state.attributes.get(ATTR_FRIENDLY_NAME)
|
||||
):
|
||||
self._names[entity_id] = friendly_name
|
||||
elif extracted_name := _row_attributes_extract(row, FRIENDLY_NAME_JSON_EXTRACT):
|
||||
self._names[entity_id] = extracted_name
|
||||
else:
|
||||
return split_entity_id(entity_id)[1].replace("_", " ")
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ from collections.abc import Iterable
|
|||
from datetime import datetime as dt
|
||||
|
||||
import sqlalchemy
|
||||
from sqlalchemy import lambda_stmt, select, union_all
|
||||
from sqlalchemy import JSON, lambda_stmt, select, type_coerce, union_all
|
||||
from sqlalchemy.orm import Query, aliased
|
||||
from sqlalchemy.sql.elements import ClauseList
|
||||
from sqlalchemy.sql.expression import literal
|
||||
|
@ -16,6 +16,7 @@ from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
|||
from homeassistant.components.recorder.filters import Filters
|
||||
from homeassistant.components.recorder.models import (
|
||||
ENTITY_ID_LAST_UPDATED_INDEX,
|
||||
JSON_VARIENT_CAST,
|
||||
LAST_UPDATED_INDEX,
|
||||
EventData,
|
||||
Events,
|
||||
|
@ -23,7 +24,6 @@ from homeassistant.components.recorder.models import (
|
|||
States,
|
||||
)
|
||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||
from homeassistant.const import EVENT_STATE_CHANGED
|
||||
|
||||
ENTITY_ID_JSON_TEMPLATE = '%"entity_id":"{}"%'
|
||||
|
||||
|
@ -36,6 +36,22 @@ UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
|
|||
OLD_STATE = aliased(States, name="old_state")
|
||||
|
||||
|
||||
SHARED_ATTRS_JSON = type_coerce(
|
||||
StateAttributes.shared_attrs.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
|
||||
)
|
||||
OLD_FORMAT_ATTRS_JSON = type_coerce(
|
||||
States.attributes.cast(JSON_VARIENT_CAST), JSON(none_as_null=True)
|
||||
)
|
||||
|
||||
|
||||
PSUEDO_EVENT_STATE_CHANGED = None
|
||||
# Since we don't store event_types and None
|
||||
# and we don't store state_changed in events
|
||||
# we use a NULL for state_changed events
|
||||
# when we synthesize them from the states table
|
||||
# since it avoids another column being sent
|
||||
# in the payload
|
||||
|
||||
EVENT_COLUMNS = (
|
||||
Events.event_id.label("event_id"),
|
||||
Events.event_type.label("event_type"),
|
||||
|
@ -50,18 +66,20 @@ STATE_COLUMNS = (
|
|||
States.state_id.label("state_id"),
|
||||
States.state.label("state"),
|
||||
States.entity_id.label("entity_id"),
|
||||
States.attributes.label("attributes"),
|
||||
StateAttributes.shared_attrs.label("shared_attrs"),
|
||||
SHARED_ATTRS_JSON["icon"].as_string().label("icon"),
|
||||
OLD_FORMAT_ATTRS_JSON["icon"].as_string().label("old_format_icon"),
|
||||
)
|
||||
|
||||
|
||||
EMPTY_STATE_COLUMNS = (
|
||||
literal(value=None, type_=sqlalchemy.String).label("state_id"),
|
||||
literal(value=None, type_=sqlalchemy.String).label("state"),
|
||||
literal(value=None, type_=sqlalchemy.String).label("entity_id"),
|
||||
literal(value=None, type_=sqlalchemy.Text).label("attributes"),
|
||||
literal(value=None, type_=sqlalchemy.Text).label("shared_attrs"),
|
||||
literal(value=None, type_=sqlalchemy.String).label("icon"),
|
||||
literal(value=None, type_=sqlalchemy.String).label("old_format_icon"),
|
||||
)
|
||||
|
||||
|
||||
EVENT_ROWS_NO_STATES = (
|
||||
*EVENT_COLUMNS,
|
||||
EventData.shared_data.label("shared_data"),
|
||||
|
@ -326,7 +344,13 @@ def _select_states() -> Select:
|
|||
"""Generate a states select that formats the states table as event rows."""
|
||||
return select(
|
||||
literal(value=None, type_=sqlalchemy.Text).label("event_id"),
|
||||
literal(value=EVENT_STATE_CHANGED, type_=sqlalchemy.String).label("event_type"),
|
||||
# We use PSUEDO_EVENT_STATE_CHANGED aka None for
|
||||
# state_changed events since it takes up less
|
||||
# space in the response and every row has to be
|
||||
# marked with the event_type
|
||||
literal(value=PSUEDO_EVENT_STATE_CHANGED, type_=sqlalchemy.String).label(
|
||||
"event_type"
|
||||
),
|
||||
literal(value=None, type_=sqlalchemy.Text).label("event_data"),
|
||||
States.last_updated.label("time_fired"),
|
||||
States.context_id.label("context_id"),
|
||||
|
|
|
@ -102,6 +102,9 @@ class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc]
|
|||
return lambda value: None if value is None else ciso8601.parse_datetime(value)
|
||||
|
||||
|
||||
JSON_VARIENT_CAST = Text().with_variant(
|
||||
postgresql.JSON(none_as_null=True), "postgresql"
|
||||
)
|
||||
DATETIME_TYPE = (
|
||||
DateTime(timezone=True)
|
||||
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql")
|
||||
|
|
|
@ -30,7 +30,6 @@ from homeassistant.const import (
|
|||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_STATE_CHANGED,
|
||||
STATE_OFF,
|
||||
STATE_ON,
|
||||
)
|
||||
|
@ -327,7 +326,7 @@ def create_state_changed_event_from_old_new(
|
|||
],
|
||||
)
|
||||
|
||||
row.event_type = EVENT_STATE_CHANGED
|
||||
row.event_type = logbook.PSUEDO_EVENT_STATE_CHANGED
|
||||
row.event_data = "{}"
|
||||
row.shared_data = "{}"
|
||||
row.attributes = attributes_json
|
||||
|
@ -338,6 +337,9 @@ def create_state_changed_event_from_old_new(
|
|||
row.domain = entity_id and ha.split_entity_id(entity_id)[0]
|
||||
row.context_only = False
|
||||
row.context_id = None
|
||||
row.friendly_name = None
|
||||
row.icon = None
|
||||
row.old_format_icon = None
|
||||
row.context_user_id = None
|
||||
row.context_parent_id = None
|
||||
row.old_state_id = old_state and 1
|
||||
|
@ -719,7 +721,7 @@ async def test_logbook_entity_no_longer_in_state_machine(
|
|||
)
|
||||
assert response.status == HTTPStatus.OK
|
||||
json_dict = await response.json()
|
||||
assert json_dict[0]["name"] == "Alarm Control Panel"
|
||||
assert json_dict[0]["name"] == "area 001"
|
||||
|
||||
|
||||
async def test_filter_continuous_sensor_values(
|
||||
|
|
Loading…
Reference in New Issue