Complete refactoring of logbook humanify (#71830)

pull/71818/head^2
J. Nick Koston 2022-05-13 17:56:16 -04:00 committed by GitHub
parent e06ea5e03e
commit 663f6f8340
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 85 additions and 85 deletions

View File

@ -377,21 +377,54 @@ class LogbookView(HomeAssistantView):
def _humanify(
hass: HomeAssistant,
rows: Generator[Row, None, None],
entities_filter: EntityFilter | Callable[[str], bool] | None,
ent_reg: er.EntityRegistry,
external_events: dict[
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
],
entity_name_cache: EntityNameCache,
event_cache: EventCache,
context_augmenter: ContextAugmenter,
format_time: Callable[[Row], Any],
) -> Generator[dict[str, Any], None, None]:
"""Generate a converted list of events into entries."""
external_events = hass.data.get(DOMAIN, {})
# Continuous sensors, will be excluded from the logbook
continuous_sensors: dict[str, bool] = {}
event_data_cache: dict[str, dict[str, Any]] = {}
context_lookup: dict[str | None, Row | None] = {None: None}
event_cache = EventCache(event_data_cache)
context_augmenter = ContextAugmenter(
context_lookup, entity_name_cache, external_events, event_cache
)
# Process events
def _keep_row(row: Row, event_type: str) -> bool:
"""Check if the entity_filter rejects a row."""
assert entities_filter is not None
if entity_id := _row_event_data_extract(row, ENTITY_ID_JSON_EXTRACT):
return entities_filter(entity_id)
if event_type in external_events:
# If the entity_id isn't described, use the domain that describes
# the event for filtering.
domain: str | None = external_events[event_type][0]
else:
domain = _row_event_data_extract(row, DOMAIN_JSON_EXTRACT)
return domain is not None and entities_filter(f"{domain}._")
# Process rows
for row in rows:
context_id = row.context_id
context_lookup.setdefault(context_id, row)
if row.context_only:
continue
event_type = row.event_type
if event_type == EVENT_CALL_SERVICE or (
event_type != EVENT_STATE_CHANGED
and entities_filter is not None
and not _keep_row(row, event_type)
):
continue
if event_type == EVENT_STATE_CHANGED:
entity_id = row.entity_id
assert entity_id is not None
@ -399,7 +432,7 @@ def _humanify(
if (
is_continuous := continuous_sensors.get(entity_id)
) is None and split_entity_id(entity_id)[0] == SENSOR_DOMAIN:
is_continuous = _is_sensor_continuous(hass, entity_id)
is_continuous = _is_sensor_continuous(ent_reg, entity_id)
continuous_sensors[entity_id] = is_continuous
if is_continuous:
continue
@ -413,7 +446,7 @@ def _humanify(
if icon := _row_attributes_extract(row, ICON_JSON_EXTRACT):
data[LOGBOOK_ENTRY_ICON] = icon
context_augmenter.augment(data, row)
context_augmenter.augment(data, row, context_id)
yield data
elif event_type in external_events:
@ -421,27 +454,27 @@ def _humanify(
data = describe_event(event_cache.get(row))
data[LOGBOOK_ENTRY_WHEN] = format_time(row)
data[LOGBOOK_ENTRY_DOMAIN] = domain
context_augmenter.augment(data, row)
context_augmenter.augment(data, row, context_id)
yield data
elif event_type == EVENT_LOGBOOK_ENTRY:
event = event_cache.get(row)
if not (event_data := event.data):
continue
domain = event_data.get(ATTR_DOMAIN)
entity_id = event_data.get(ATTR_ENTITY_ID)
if domain is None and entity_id is not None:
entry_domain = event_data.get(ATTR_DOMAIN)
entry_entity_id = event_data.get(ATTR_ENTITY_ID)
if entry_domain is None and entry_entity_id is not None:
with suppress(IndexError):
domain = split_entity_id(str(entity_id))[0]
entry_domain = split_entity_id(str(entry_entity_id))[0]
data = {
LOGBOOK_ENTRY_WHEN: format_time(row),
LOGBOOK_ENTRY_NAME: event_data.get(ATTR_NAME),
LOGBOOK_ENTRY_MESSAGE: event_data.get(ATTR_MESSAGE),
LOGBOOK_ENTRY_DOMAIN: domain,
LOGBOOK_ENTRY_ENTITY_ID: entity_id,
LOGBOOK_ENTRY_DOMAIN: entry_domain,
LOGBOOK_ENTRY_ENTITY_ID: entry_entity_id,
}
context_augmenter.augment(data, row)
context_augmenter.augment(data, row, context_id)
yield data
@ -460,67 +493,34 @@ def _get_events(
entity_ids and context_id
), "can't pass in both entity_ids and context_id"
entity_name_cache = EntityNameCache(hass)
event_data_cache: dict[str, dict[str, Any]] = {}
context_lookup: dict[str | None, Row | None] = {None: None}
event_cache = EventCache(event_data_cache)
external_events: dict[
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
] = hass.data.get(DOMAIN, {})
context_augmenter = ContextAugmenter(
context_lookup, entity_name_cache, external_events, event_cache
)
event_types = (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
format_time = _row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
entity_name_cache = EntityNameCache(hass)
ent_reg = er.async_get(hass)
if entity_ids is not None:
entities_filter = generate_filter([], entity_ids, [], [])
def yield_rows(query: Query) -> Generator[Row, None, None]:
"""Yield Events that are not filtered away."""
def _keep_row(row: Row, event_type: str) -> bool:
"""Check if the entity_filter rejects a row."""
assert entities_filter is not None
if entity_id := _row_event_data_extract(row, ENTITY_ID_JSON_EXTRACT):
return entities_filter(entity_id)
if event_type in external_events:
# If the entity_id isn't described, use the domain that describes
# the event for filtering.
domain: str | None = external_events[event_type][0]
else:
domain = _row_event_data_extract(row, DOMAIN_JSON_EXTRACT)
return domain is not None and entities_filter(f"{domain}._")
"""Yield rows from the database."""
# end_day - start_day intentionally checks .days and not .total_seconds()
# since we don't want to switch over to buffered if they go
# over one day by a few hours since the UI makes it so easy to do that.
if entity_ids or context_id or (end_day - start_day).days <= 1:
rows = query.all()
else:
# Only buffer rows to reduce memory pressure
# if we expect the result set is going to be very large.
# What is considered very large is going to differ
# based on the hardware Home Assistant is running on.
#
# sqlalchemy suggests that is at least 10k, but for
# even and RPi3 that number seems higher in testing
# so we don't switch over until we request > 1 day+ of data.
#
rows = query.yield_per(1024)
for row in rows:
context_lookup.setdefault(row.context_id, row)
if not row.context_only:
event_type = row.event_type
if event_type != EVENT_CALL_SERVICE and (
entities_filter is None
or event_type == EVENT_STATE_CHANGED
or _keep_row(row, event_type)
):
yield row
if entity_ids is not None:
entities_filter = generate_filter([], entity_ids, [], [])
return query.all() # type: ignore[no-any-return]
# Only buffer rows to reduce memory pressure
# if we expect the result set is going to be very large.
# What is considered very large is going to differ
# based on the hardware Home Assistant is running on.
#
# sqlalchemy suggests that is at least 10k, but for
# even and RPi3 that number seems higher in testing
# so we don't switch over until we request > 1 day+ of data.
#
return query.yield_per(1024) # type: ignore[no-any-return]
stmt = statement_for_request(
start_day, end_day, event_types, entity_ids, filters, context_id
@ -534,11 +534,11 @@ def _get_events(
with session_scope(hass=hass) as session:
return list(
_humanify(
hass,
yield_rows(session.execute(stmt)),
entities_filter,
ent_reg,
external_events,
entity_name_cache,
event_cache,
context_augmenter,
format_time,
)
)
@ -562,12 +562,12 @@ class ContextAugmenter:
self.external_events = external_events
self.event_cache = event_cache
def augment(self, data: dict[str, Any], row: Row) -> None:
def augment(self, data: dict[str, Any], row: Row, context_id: str) -> None:
"""Augment data from the row and cache."""
if context_user_id := row.context_user_id:
data[CONTEXT_USER_ID] = context_user_id
if not (context_row := self.context_lookup.get(row.context_id)):
if not (context_row := self.context_lookup.get(context_id)):
return
if _rows_match(row, context_row):
@ -624,17 +624,13 @@ class ContextAugmenter:
)
def _is_sensor_continuous(
hass: HomeAssistant,
entity_id: str,
) -> bool:
def _is_sensor_continuous(ent_reg: er.EntityRegistry, entity_id: str) -> bool:
"""Determine if a sensor is continuous by checking its state class.
Sensors with a unit_of_measurement are also considered continuous, but are filtered
already by the SQL query generated by _get_events
"""
registry = er.async_get(hass)
if not (entry := registry.async_get(entity_id)):
if not (entry := ent_reg.async_get(entity_id)):
# Entity not registered, so can't have a state class
return False
return (

View File

@ -7,6 +7,7 @@ from typing import Any
from homeassistant.components import logbook
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
from homeassistant.core import Context
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
@ -30,6 +31,11 @@ class MockRow:
self.context_id = context.id if context else None
self.state = None
self.entity_id = None
self.state_id = None
self.event_id = None
self.shared_attrs = None
self.attributes = None
self.context_only = False
@property
def time_fired_minute(self):
@ -44,20 +50,16 @@ class MockRow:
def mock_humanify(hass_, rows):
"""Wrap humanify with mocked logbook objects."""
event_data_cache = {}
context_lookup = {}
entity_name_cache = logbook.EntityNameCache(hass_)
event_cache = logbook.EventCache(event_data_cache)
context_augmenter = logbook.ContextAugmenter(
context_lookup, entity_name_cache, {}, event_cache
)
ent_reg = er.async_get(hass_)
external_events = hass_.data.get(logbook.DOMAIN, {})
return list(
logbook._humanify(
hass_,
rows,
None,
ent_reg,
external_events,
entity_name_cache,
event_cache,
context_augmenter,
logbook._row_time_fired_isoformat,
),
)

View File

@ -323,6 +323,7 @@ def create_state_changed_event_from_old_new(
"old_state_id",
"shared_attrs",
"shared_data",
"context_only",
],
)
@ -335,6 +336,7 @@ def create_state_changed_event_from_old_new(
row.state = new_state and new_state.get("state")
row.entity_id = entity_id
row.domain = entity_id and ha.split_entity_id(entity_id)[0]
row.context_only = False
row.context_id = None
row.context_user_id = None
row.context_parent_id = None