2022-05-22 19:57:54 +00:00
|
|
|
"""Event parser and human readable log generator."""
|
|
|
|
from __future__ import annotations
|
|
|
|
|
2023-03-11 11:45:27 +00:00
|
|
|
from collections.abc import Callable
|
2022-05-22 19:57:54 +00:00
|
|
|
from dataclasses import dataclass
|
|
|
|
from typing import Any, cast
|
|
|
|
|
|
|
|
from sqlalchemy.engine.row import Row
|
|
|
|
|
2023-03-11 11:45:27 +00:00
|
|
|
from homeassistant.components.recorder.filters import Filters
|
2023-03-09 00:51:45 +00:00
|
|
|
from homeassistant.components.recorder.models import (
|
|
|
|
bytes_to_ulid_or_none,
|
|
|
|
bytes_to_uuid_hex_or_none,
|
|
|
|
ulid_to_bytes_or_none,
|
|
|
|
uuid_hex_to_bytes_or_none,
|
|
|
|
)
|
2022-05-22 19:57:54 +00:00
|
|
|
from homeassistant.const import ATTR_ICON, EVENT_STATE_CHANGED
|
|
|
|
from homeassistant.core import Context, Event, State, callback
|
2023-01-02 23:26:08 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2023-03-09 00:51:45 +00:00
|
|
|
from homeassistant.util.json import json_loads
|
|
|
|
from homeassistant.util.ulid import ulid_to_bytes
|
2022-05-22 19:57:54 +00:00
|
|
|
|
|
|
|
|
2023-04-14 18:22:39 +00:00
|
|
|
@dataclass(slots=True)
|
2023-03-11 11:45:27 +00:00
|
|
|
class LogbookConfig:
|
|
|
|
"""Configuration for the logbook integration."""
|
|
|
|
|
|
|
|
external_events: dict[
|
|
|
|
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
|
|
|
|
]
|
|
|
|
sqlalchemy_filter: Filters | None = None
|
2023-05-27 23:52:42 +00:00
|
|
|
entity_filter: Callable[[str], bool] | None = None
|
2023-03-11 11:45:27 +00:00
|
|
|
|
|
|
|
|
2022-05-22 19:57:54 +00:00
|
|
|
class LazyEventPartialState:
|
|
|
|
"""A lazy version of core Event with limited State joined in."""
|
|
|
|
|
|
|
|
__slots__ = [
|
|
|
|
"row",
|
|
|
|
"_event_data",
|
|
|
|
"_event_data_cache",
|
|
|
|
"event_type",
|
|
|
|
"entity_id",
|
|
|
|
"state",
|
2023-03-09 00:51:45 +00:00
|
|
|
"context_id_bin",
|
|
|
|
"context_user_id_bin",
|
|
|
|
"context_parent_id_bin",
|
2022-05-22 19:57:54 +00:00
|
|
|
"data",
|
|
|
|
]
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
row: Row | EventAsRow,
|
|
|
|
event_data_cache: dict[str, dict[str, Any]],
|
|
|
|
) -> None:
|
|
|
|
"""Init the lazy event."""
|
|
|
|
self.row = row
|
|
|
|
self._event_data: dict[str, Any] | None = None
|
|
|
|
self._event_data_cache = event_data_cache
|
|
|
|
self.event_type: str | None = self.row.event_type
|
|
|
|
self.entity_id: str | None = self.row.entity_id
|
|
|
|
self.state = self.row.state
|
2023-03-09 00:51:45 +00:00
|
|
|
self.context_id_bin: bytes | None = self.row.context_id_bin
|
|
|
|
self.context_user_id_bin: bytes | None = self.row.context_user_id_bin
|
|
|
|
self.context_parent_id_bin: bytes | None = self.row.context_parent_id_bin
|
2023-04-22 03:27:23 +00:00
|
|
|
# We need to explicitly check for the row is EventAsRow as the unhappy path
|
|
|
|
# to fetch row.data for Row is very expensive
|
|
|
|
if type(row) is EventAsRow: # pylint: disable=unidiomatic-typecheck
|
2022-05-22 19:57:54 +00:00
|
|
|
# If its an EventAsRow we can avoid the whole
|
|
|
|
# json decode process as we already have the data
|
2023-04-22 03:27:23 +00:00
|
|
|
self.data = row.data
|
2022-05-22 19:57:54 +00:00
|
|
|
return
|
2023-04-22 12:25:22 +00:00
|
|
|
source = cast(str, self.row.event_data)
|
2022-05-22 19:57:54 +00:00
|
|
|
if not source:
|
|
|
|
self.data = {}
|
|
|
|
elif event_data := self._event_data_cache.get(source):
|
|
|
|
self.data = event_data
|
|
|
|
else:
|
|
|
|
self.data = self._event_data_cache[source] = cast(
|
2023-03-09 00:51:45 +00:00
|
|
|
dict[str, Any], json_loads(source)
|
2022-05-22 19:57:54 +00:00
|
|
|
)
|
|
|
|
|
2023-03-09 00:51:45 +00:00
|
|
|
@property
|
|
|
|
def context_id(self) -> str | None:
|
|
|
|
"""Return the context id."""
|
|
|
|
return bytes_to_ulid_or_none(self.context_id_bin)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def context_user_id(self) -> str | None:
|
|
|
|
"""Return the context user id."""
|
|
|
|
return bytes_to_uuid_hex_or_none(self.context_user_id_bin)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def context_parent_id(self) -> str | None:
|
|
|
|
"""Return the context parent id."""
|
|
|
|
return bytes_to_ulid_or_none(self.context_parent_id_bin)
|
|
|
|
|
2022-05-22 19:57:54 +00:00
|
|
|
|
2023-04-14 18:22:39 +00:00
|
|
|
@dataclass(slots=True, frozen=True)
|
2022-05-22 19:57:54 +00:00
|
|
|
class EventAsRow:
|
|
|
|
"""Convert an event to a row."""
|
|
|
|
|
|
|
|
data: dict[str, Any]
|
|
|
|
context: Context
|
2023-03-09 00:51:45 +00:00
|
|
|
context_id_bin: bytes
|
2023-01-02 23:26:08 +00:00
|
|
|
time_fired_ts: float
|
2023-04-22 12:25:22 +00:00
|
|
|
row_id: int
|
2022-05-22 19:57:54 +00:00
|
|
|
event_data: str | None = None
|
|
|
|
entity_id: str | None = None
|
|
|
|
icon: str | None = None
|
2023-03-09 00:51:45 +00:00
|
|
|
context_user_id_bin: bytes | None = None
|
|
|
|
context_parent_id_bin: bytes | None = None
|
2022-05-22 19:57:54 +00:00
|
|
|
event_type: str | None = None
|
|
|
|
state: str | None = None
|
|
|
|
context_only: None = None
|
|
|
|
|
|
|
|
|
|
|
|
@callback
|
2023-02-08 14:17:32 +00:00
|
|
|
def async_event_to_row(event: Event) -> EventAsRow:
|
2022-05-22 19:57:54 +00:00
|
|
|
"""Convert an event to a row."""
|
|
|
|
if event.event_type != EVENT_STATE_CHANGED:
|
2023-03-09 00:51:45 +00:00
|
|
|
context = event.context
|
2022-05-22 19:57:54 +00:00
|
|
|
return EventAsRow(
|
|
|
|
data=event.data,
|
|
|
|
context=event.context,
|
|
|
|
event_type=event.event_type,
|
2023-03-09 00:51:45 +00:00
|
|
|
context_id_bin=ulid_to_bytes(context.id),
|
|
|
|
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
|
|
|
|
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
|
2023-01-02 23:26:08 +00:00
|
|
|
time_fired_ts=dt_util.utc_to_timestamp(event.time_fired),
|
2023-04-22 12:25:22 +00:00
|
|
|
row_id=hash(event),
|
2022-05-22 19:57:54 +00:00
|
|
|
)
|
|
|
|
# States are prefiltered so we never get states
|
|
|
|
# that are missing new_state or old_state
|
|
|
|
# since the logbook does not show these
|
|
|
|
new_state: State = event.data["new_state"]
|
2023-03-09 00:51:45 +00:00
|
|
|
context = new_state.context
|
2022-05-22 19:57:54 +00:00
|
|
|
return EventAsRow(
|
|
|
|
data=event.data,
|
|
|
|
context=event.context,
|
|
|
|
entity_id=new_state.entity_id,
|
|
|
|
state=new_state.state,
|
2023-03-09 00:51:45 +00:00
|
|
|
context_id_bin=ulid_to_bytes(context.id),
|
|
|
|
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
|
|
|
|
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
|
2023-01-02 23:26:08 +00:00
|
|
|
time_fired_ts=dt_util.utc_to_timestamp(new_state.last_updated),
|
2023-04-22 12:25:22 +00:00
|
|
|
row_id=hash(event),
|
2022-05-22 19:57:54 +00:00
|
|
|
icon=new_state.attributes.get(ATTR_ICON),
|
|
|
|
)
|