2016-07-02 18:22:51 +00:00
|
|
|
"""Models for SQLAlchemy."""
|
2021-07-13 19:21:45 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2021-09-16 08:57:15 +00:00
|
|
|
from datetime import datetime, timedelta
|
2019-12-08 17:48:18 +00:00
|
|
|
import json
|
2016-07-02 18:22:51 +00:00
|
|
|
import logging
|
2022-03-29 07:45:25 +00:00
|
|
|
from typing import Any, TypedDict, cast, overload
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-05-13 00:11:43 +00:00
|
|
|
import ciso8601
|
2022-03-18 10:23:13 +00:00
|
|
|
from fnvhash import fnv1a_32
|
2017-05-02 16:18:47 +00:00
|
|
|
from sqlalchemy import (
|
2022-03-18 10:23:13 +00:00
|
|
|
BigInteger,
|
2019-07-31 19:25:30 +00:00
|
|
|
Boolean,
|
|
|
|
Column,
|
|
|
|
DateTime,
|
2021-05-16 17:23:37 +00:00
|
|
|
Float,
|
2019-07-31 19:25:30 +00:00
|
|
|
ForeignKey,
|
2021-05-23 02:10:27 +00:00
|
|
|
Identity,
|
2019-07-31 19:25:30 +00:00
|
|
|
Index,
|
|
|
|
Integer,
|
2022-05-02 07:10:34 +00:00
|
|
|
SmallInteger,
|
2019-07-31 19:25:30 +00:00
|
|
|
String,
|
|
|
|
Text,
|
|
|
|
distinct,
|
|
|
|
)
|
2022-05-13 00:11:43 +00:00
|
|
|
from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite
|
2022-03-25 00:58:38 +00:00
|
|
|
from sqlalchemy.engine.row import Row
|
2021-09-16 08:57:15 +00:00
|
|
|
from sqlalchemy.ext.declarative import declared_attr
|
2021-08-09 03:33:47 +00:00
|
|
|
from sqlalchemy.orm import declarative_base, relationship
|
2019-10-18 17:14:54 +00:00
|
|
|
from sqlalchemy.orm.session import Session
|
2016-07-11 07:46:56 +00:00
|
|
|
|
2022-05-11 22:52:22 +00:00
|
|
|
from homeassistant.components.websocket_api.const import (
|
|
|
|
COMPRESSED_STATE_ATTRIBUTES,
|
|
|
|
COMPRESSED_STATE_LAST_CHANGED,
|
|
|
|
COMPRESSED_STATE_LAST_UPDATED,
|
|
|
|
COMPRESSED_STATE_STATE,
|
|
|
|
)
|
2021-05-25 17:58:01 +00:00
|
|
|
from homeassistant.const import (
|
|
|
|
MAX_LENGTH_EVENT_CONTEXT_ID,
|
|
|
|
MAX_LENGTH_EVENT_EVENT_TYPE,
|
|
|
|
MAX_LENGTH_EVENT_ORIGIN,
|
|
|
|
MAX_LENGTH_STATE_ENTITY_ID,
|
|
|
|
MAX_LENGTH_STATE_STATE,
|
|
|
|
)
|
2022-03-30 03:13:08 +00:00
|
|
|
from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id
|
2019-12-08 17:48:18 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-04-03 11:51:42 +00:00
|
|
|
from .const import ALL_DOMAIN_EXCLUDE_ATTRS, JSON_DUMP
|
2022-03-20 06:33:37 +00:00
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
# SQLAlchemy Schema
|
|
|
|
# pylint: disable=invalid-name
|
|
|
|
Base = declarative_base()
|
|
|
|
|
2022-05-02 07:10:34 +00:00
|
|
|
SCHEMA_VERSION = 28
|
2017-02-03 03:04:14 +00:00
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2020-06-22 17:06:02 +00:00
|
|
|
DB_TIMEZONE = "+00:00"
|
Improve history api performance (#35822)
* Improve history api performance
A new option "minimal_response" reduces the amount of data
sent between the first and last history states to only the
"last_changed" and "state" fields.
Calling to_native is now avoided where possible and only
done at the end for rows that will be returned in the response.
When sending the `minimal_response` option, the history
api now returns a json response similar to the following
for an entity
Testing:
History API Response time for 1 day
Average of 10 runs with minimal_response
Before: 19.89s. (content length : 3427428)
After: 8.44s (content length: 592199)
```
[{
"attributes": {--TRUNCATED--},
"context": {--TRUNCATED--},
"entity_id": "binary_sensor.powerwall_status",
"last_changed": "2020-05-18T23:20:03.213000+00:00",
"last_updated": "2020-05-18T23:20:03.213000+00:00",
"state": "on"
},
...
{
"last_changed": "2020-05-19T00:41:08Z",
"state": "unavailable"
},
...
{
"attributes": {--TRUNCATED--},
"context": {--TRUNCATED--},
"entity_id": "binary_sensor.powerwall_status",
"last_changed": "2020-05-19T00:42:08.069698+00:00",
"last_updated": "2020-05-19T00:42:08.069698+00:00",
"state": "on"
}]
```
* Remove impossible state check
* Remove another impossible state check
* Update homeassistant/components/history/__init__.py
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
* Reorder to save some indent per review
* Make query response make sense with to_native=False
* Update test for 00:00 to Z change
* Update homeassistant/components/recorder/models.py
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2020-05-27 02:53:56 +00:00
|
|
|
|
2020-08-21 12:20:46 +00:00
|
|
|
TABLE_EVENTS = "events"
|
2022-05-02 02:01:17 +00:00
|
|
|
TABLE_EVENT_DATA = "event_data"
|
2020-08-21 12:20:46 +00:00
|
|
|
TABLE_STATES = "states"
|
2022-03-18 10:23:13 +00:00
|
|
|
TABLE_STATE_ATTRIBUTES = "state_attributes"
|
2020-08-21 12:20:46 +00:00
|
|
|
TABLE_RECORDER_RUNS = "recorder_runs"
|
|
|
|
TABLE_SCHEMA_CHANGES = "schema_changes"
|
2021-05-16 17:23:37 +00:00
|
|
|
TABLE_STATISTICS = "statistics"
|
2021-06-30 11:32:17 +00:00
|
|
|
TABLE_STATISTICS_META = "statistics_meta"
|
2021-08-20 05:10:45 +00:00
|
|
|
TABLE_STATISTICS_RUNS = "statistics_runs"
|
2021-09-16 08:57:15 +00:00
|
|
|
TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
|
2021-05-16 17:23:37 +00:00
|
|
|
|
|
|
|
ALL_TABLES = [
|
|
|
|
TABLE_STATES,
|
2022-05-08 19:15:06 +00:00
|
|
|
TABLE_STATE_ATTRIBUTES,
|
2021-05-16 17:23:37 +00:00
|
|
|
TABLE_EVENTS,
|
2022-05-08 19:15:06 +00:00
|
|
|
TABLE_EVENT_DATA,
|
2021-05-16 17:23:37 +00:00
|
|
|
TABLE_RECORDER_RUNS,
|
|
|
|
TABLE_SCHEMA_CHANGES,
|
|
|
|
TABLE_STATISTICS,
|
2021-06-30 11:32:17 +00:00
|
|
|
TABLE_STATISTICS_META,
|
2021-08-20 05:10:45 +00:00
|
|
|
TABLE_STATISTICS_RUNS,
|
2021-09-16 08:57:15 +00:00
|
|
|
TABLE_STATISTICS_SHORT_TERM,
|
2021-05-16 17:23:37 +00:00
|
|
|
]
|
2020-08-21 12:20:46 +00:00
|
|
|
|
2022-05-08 19:15:06 +00:00
|
|
|
TABLES_TO_CHECK = [
|
|
|
|
TABLE_STATES,
|
|
|
|
TABLE_EVENTS,
|
|
|
|
TABLE_RECORDER_RUNS,
|
|
|
|
TABLE_SCHEMA_CHANGES,
|
|
|
|
]
|
|
|
|
|
2022-05-14 22:01:36 +00:00
|
|
|
LAST_UPDATED_INDEX = "ix_states_last_updated"
|
|
|
|
ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated"
|
2022-05-08 19:15:06 +00:00
|
|
|
|
2022-03-18 10:23:13 +00:00
|
|
|
EMPTY_JSON_OBJECT = "{}"
|
|
|
|
|
|
|
|
|
2022-05-13 00:11:43 +00:00
|
|
|
class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc]
|
|
|
|
"""Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
|
|
|
|
|
|
|
|
def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def]
|
|
|
|
"""Offload the datetime parsing to ciso8601."""
|
|
|
|
return lambda value: None if value is None else ciso8601.parse_datetime(value)
|
|
|
|
|
|
|
|
|
2022-05-16 03:48:57 +00:00
|
|
|
JSON_VARIENT_CAST = Text().with_variant(
|
|
|
|
postgresql.JSON(none_as_null=True), "postgresql"
|
|
|
|
)
|
2022-05-18 06:58:30 +00:00
|
|
|
JSONB_VARIENT_CAST = Text().with_variant(
|
|
|
|
postgresql.JSONB(none_as_null=True), "postgresql"
|
|
|
|
)
|
2022-05-13 00:11:43 +00:00
|
|
|
DATETIME_TYPE = (
|
|
|
|
DateTime(timezone=True)
|
|
|
|
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql")
|
|
|
|
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite")
|
2021-04-07 07:13:55 +00:00
|
|
|
)
|
2021-08-24 09:18:59 +00:00
|
|
|
DOUBLE_TYPE = (
|
|
|
|
Float()
|
|
|
|
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql")
|
|
|
|
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
|
2021-08-26 08:06:53 +00:00
|
|
|
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
|
2021-08-24 09:18:59 +00:00
|
|
|
)
|
2022-05-02 07:10:34 +00:00
|
|
|
EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote]
|
|
|
|
EVENT_ORIGIN_TO_IDX = {origin: idx for idx, origin in enumerate(EVENT_ORIGIN_ORDER)}
|
2021-04-07 07:13:55 +00:00
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-05-18 14:52:46 +00:00
|
|
|
class UnsupportedDialect(Exception):
|
|
|
|
"""The dialect or its version is not supported."""
|
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class Events(Base): # type: ignore[misc,valid-type]
|
2016-07-02 18:22:51 +00:00
|
|
|
"""Event history data."""
|
|
|
|
|
2021-07-02 14:28:16 +00:00
|
|
|
__table_args__ = (
|
|
|
|
# Used for fetching events at a specific time
|
|
|
|
# see logbook
|
|
|
|
Index("ix_events_event_type_time_fired", "event_type", "time_fired"),
|
|
|
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
|
|
|
)
|
2020-08-21 12:20:46 +00:00
|
|
|
__tablename__ = TABLE_EVENTS
|
2022-05-16 04:25:07 +00:00
|
|
|
event_id = Column(Integer, Identity(), primary_key=True)
|
2021-05-25 17:58:01 +00:00
|
|
|
event_type = Column(String(MAX_LENGTH_EVENT_EVENT_TYPE))
|
2021-03-10 18:12:58 +00:00
|
|
|
event_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
2022-05-16 04:25:07 +00:00
|
|
|
origin = Column(String(MAX_LENGTH_EVENT_ORIGIN)) # no longer used for new rows
|
2022-05-02 07:10:34 +00:00
|
|
|
origin_idx = Column(SmallInteger)
|
2021-04-07 07:13:55 +00:00
|
|
|
time_fired = Column(DATETIME_TYPE, index=True)
|
2021-05-25 17:58:01 +00:00
|
|
|
context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True)
|
2022-05-02 07:10:34 +00:00
|
|
|
context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
|
|
|
context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
2022-05-02 02:01:17 +00:00
|
|
|
data_id = Column(Integer, ForeignKey("event_data.data_id"), index=True)
|
|
|
|
event_data_rel = relationship("EventData")
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2021-03-11 17:52:07 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
|
|
|
return (
|
|
|
|
f"<recorder.Events("
|
2022-05-02 02:01:17 +00:00
|
|
|
f"id={self.event_id}, type='{self.event_type}', "
|
2022-05-02 07:10:34 +00:00
|
|
|
f"origin_idx='{self.origin_idx}', time_fired='{self.time_fired}'"
|
2022-05-02 02:01:17 +00:00
|
|
|
f", data_id={self.data_id})>"
|
2021-03-11 17:52:07 +00:00
|
|
|
)
|
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
@staticmethod
|
2022-05-02 02:01:17 +00:00
|
|
|
def from_event(event: Event) -> Events:
|
2016-07-11 19:38:35 +00:00
|
|
|
"""Create an event database object from a native event."""
|
2019-03-01 18:08:38 +00:00
|
|
|
return Events(
|
|
|
|
event_type=event.event_type,
|
2022-05-02 02:01:17 +00:00
|
|
|
event_data=None,
|
2022-05-02 07:10:34 +00:00
|
|
|
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
2019-03-01 18:08:38 +00:00
|
|
|
time_fired=event.time_fired,
|
|
|
|
context_id=event.context.id,
|
|
|
|
context_user_id=event.context.user_id,
|
2020-06-18 03:26:41 +00:00
|
|
|
context_parent_id=event.context.parent_id,
|
2019-03-01 18:08:38 +00:00
|
|
|
)
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def to_native(self, validate_entity_id: bool = True) -> Event | None:
|
2021-08-20 05:10:45 +00:00
|
|
|
"""Convert to a native HA Event."""
|
2020-06-23 17:57:52 +00:00
|
|
|
context = Context(
|
|
|
|
id=self.context_id,
|
|
|
|
user_id=self.context_user_id,
|
|
|
|
parent_id=self.context_parent_id,
|
|
|
|
)
|
2016-07-02 18:22:51 +00:00
|
|
|
try:
|
|
|
|
return Event(
|
|
|
|
self.event_type,
|
2022-05-02 02:01:17 +00:00
|
|
|
json.loads(self.event_data) if self.event_data else {},
|
2022-05-02 07:10:34 +00:00
|
|
|
EventOrigin(self.origin)
|
|
|
|
if self.origin
|
|
|
|
else EVENT_ORIGIN_ORDER[self.origin_idx],
|
Improve history api performance (#35822)
* Improve history api performance
A new option "minimal_response" reduces the amount of data
sent between the first and last history states to only the
"last_changed" and "state" fields.
Calling to_native is now avoided where possible and only
done at the end for rows that will be returned in the response.
When sending the `minimal_response` option, the history
api now returns a json response similar to the following
for an entity
Testing:
History API Response time for 1 day
Average of 10 runs with minimal_response
Before: 19.89s. (content length : 3427428)
After: 8.44s (content length: 592199)
```
[{
"attributes": {--TRUNCATED--},
"context": {--TRUNCATED--},
"entity_id": "binary_sensor.powerwall_status",
"last_changed": "2020-05-18T23:20:03.213000+00:00",
"last_updated": "2020-05-18T23:20:03.213000+00:00",
"state": "on"
},
...
{
"last_changed": "2020-05-19T00:41:08Z",
"state": "unavailable"
},
...
{
"attributes": {--TRUNCATED--},
"context": {--TRUNCATED--},
"entity_id": "binary_sensor.powerwall_status",
"last_changed": "2020-05-19T00:42:08.069698+00:00",
"last_updated": "2020-05-19T00:42:08.069698+00:00",
"state": "on"
}]
```
* Remove impossible state check
* Remove another impossible state check
* Update homeassistant/components/history/__init__.py
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
* Reorder to save some indent per review
* Make query response make sense with to_native=False
* Update test for 00:00 to Z change
* Update homeassistant/components/recorder/models.py
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2020-05-27 02:53:56 +00:00
|
|
|
process_timestamp(self.time_fired),
|
2018-08-10 16:09:01 +00:00
|
|
|
context=context,
|
2016-07-02 18:22:51 +00:00
|
|
|
)
|
|
|
|
except ValueError:
|
|
|
|
# When json.loads fails
|
|
|
|
_LOGGER.exception("Error converting to event: %s", self)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2022-05-02 02:01:17 +00:00
|
|
|
class EventData(Base): # type: ignore[misc,valid-type]
|
|
|
|
"""Event data history."""
|
|
|
|
|
|
|
|
__table_args__ = (
|
|
|
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
|
|
|
)
|
|
|
|
__tablename__ = TABLE_EVENT_DATA
|
|
|
|
data_id = Column(Integer, Identity(), primary_key=True)
|
|
|
|
hash = Column(BigInteger, index=True)
|
|
|
|
# Note that this is not named attributes to avoid confusion with the states table
|
|
|
|
shared_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
|
|
|
return (
|
|
|
|
f"<recorder.EventData("
|
|
|
|
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
|
|
|
|
f")>"
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def from_event(event: Event) -> EventData:
|
|
|
|
"""Create object from an event."""
|
|
|
|
shared_data = JSON_DUMP(event.data)
|
|
|
|
return EventData(
|
|
|
|
shared_data=shared_data, hash=EventData.hash_shared_data(shared_data)
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def shared_data_from_event(event: Event) -> str:
|
|
|
|
"""Create shared_attrs from an event."""
|
|
|
|
return JSON_DUMP(event.data)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def hash_shared_data(shared_data: str) -> int:
|
|
|
|
"""Return the hash of json encoded shared data."""
|
|
|
|
return cast(int, fnv1a_32(shared_data.encode("utf-8")))
|
|
|
|
|
|
|
|
def to_native(self) -> dict[str, Any]:
|
|
|
|
"""Convert to an HA state object."""
|
|
|
|
try:
|
|
|
|
return cast(dict[str, Any], json.loads(self.shared_data))
|
|
|
|
except ValueError:
|
|
|
|
_LOGGER.exception("Error converting row to event data: %s", self)
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class States(Base): # type: ignore[misc,valid-type]
|
2016-07-02 18:22:51 +00:00
|
|
|
"""State change history."""
|
|
|
|
|
2021-07-02 14:28:16 +00:00
|
|
|
__table_args__ = (
|
|
|
|
# Used for fetching the state of entities at a specific time
|
|
|
|
# (get_states in history.py)
|
2022-05-14 22:01:36 +00:00
|
|
|
Index(ENTITY_ID_LAST_UPDATED_INDEX, "entity_id", "last_updated"),
|
2021-07-02 14:28:16 +00:00
|
|
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
|
|
|
)
|
2020-08-21 12:20:46 +00:00
|
|
|
__tablename__ = TABLE_STATES
|
2021-05-23 02:10:27 +00:00
|
|
|
state_id = Column(Integer, Identity(), primary_key=True)
|
2021-05-25 17:58:01 +00:00
|
|
|
entity_id = Column(String(MAX_LENGTH_STATE_ENTITY_ID))
|
|
|
|
state = Column(String(MAX_LENGTH_STATE_STATE))
|
2022-05-16 04:25:07 +00:00
|
|
|
attributes = Column(
|
|
|
|
Text().with_variant(mysql.LONGTEXT, "mysql")
|
|
|
|
) # no longer used for new rows
|
|
|
|
event_id = Column( # no longer used for new rows
|
2020-11-28 18:42:29 +00:00
|
|
|
Integer, ForeignKey("events.event_id", ondelete="CASCADE"), index=True
|
|
|
|
)
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed = Column(DATETIME_TYPE)
|
2021-04-07 07:13:55 +00:00
|
|
|
last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True)
|
2021-05-23 02:10:27 +00:00
|
|
|
old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True)
|
2022-03-18 10:23:13 +00:00
|
|
|
attributes_id = Column(
|
|
|
|
Integer, ForeignKey("state_attributes.attributes_id"), index=True
|
|
|
|
)
|
2022-05-02 07:10:34 +00:00
|
|
|
context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True)
|
|
|
|
context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
|
|
|
context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
|
|
|
origin_idx = Column(SmallInteger) # 0 is local, 1 is remote
|
2020-09-30 11:11:43 +00:00
|
|
|
old_state = relationship("States", remote_side=[state_id])
|
2022-03-18 10:23:13 +00:00
|
|
|
state_attributes = relationship("StateAttributes")
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2021-03-11 17:52:07 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
|
|
|
return (
|
|
|
|
f"<recorder.States("
|
2022-03-20 11:28:17 +00:00
|
|
|
f"id={self.state_id}, entity_id='{self.entity_id}', "
|
2021-03-11 17:52:07 +00:00
|
|
|
f"state='{self.state}', event_id='{self.event_id}', "
|
|
|
|
f"last_updated='{self.last_updated.isoformat(sep=' ', timespec='seconds')}', "
|
2022-03-18 10:23:13 +00:00
|
|
|
f"old_state_id={self.old_state_id}, attributes_id={self.attributes_id}"
|
2021-03-11 17:52:07 +00:00
|
|
|
f")>"
|
|
|
|
)
|
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
@staticmethod
|
2022-03-29 07:45:25 +00:00
|
|
|
def from_event(event: Event) -> States:
|
2016-07-11 07:46:56 +00:00
|
|
|
"""Create object from a state_changed event."""
|
2019-07-31 19:25:30 +00:00
|
|
|
entity_id = event.data["entity_id"]
|
2022-03-20 06:33:37 +00:00
|
|
|
state: State | None = event.data.get("new_state")
|
2022-05-02 07:10:34 +00:00
|
|
|
dbstate = States(
|
|
|
|
entity_id=entity_id,
|
|
|
|
attributes=None,
|
|
|
|
context_id=event.context.id,
|
|
|
|
context_user_id=event.context.user_id,
|
|
|
|
context_parent_id=event.context.parent_id,
|
|
|
|
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
|
|
|
)
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-03-20 06:33:37 +00:00
|
|
|
# None state means the state was removed from the state machine
|
2016-07-02 18:22:51 +00:00
|
|
|
if state is None:
|
2019-07-31 19:25:30 +00:00
|
|
|
dbstate.state = ""
|
2016-07-11 07:46:56 +00:00
|
|
|
dbstate.last_updated = event.time_fired
|
2022-05-14 19:06:31 +00:00
|
|
|
dbstate.last_changed = None
|
|
|
|
return dbstate
|
|
|
|
|
|
|
|
dbstate.state = state.state
|
|
|
|
dbstate.last_updated = state.last_updated
|
|
|
|
if state.last_updated == state.last_changed:
|
|
|
|
dbstate.last_changed = None
|
2016-07-02 18:22:51 +00:00
|
|
|
else:
|
|
|
|
dbstate.last_changed = state.last_changed
|
|
|
|
|
2016-07-11 07:46:56 +00:00
|
|
|
return dbstate
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-03-20 06:33:37 +00:00
|
|
|
def to_native(self, validate_entity_id: bool = True) -> State | None:
|
2016-07-02 18:22:51 +00:00
|
|
|
"""Convert to an HA state object."""
|
2022-05-02 07:10:34 +00:00
|
|
|
context = Context(
|
|
|
|
id=self.context_id,
|
|
|
|
user_id=self.context_user_id,
|
|
|
|
parent_id=self.context_parent_id,
|
|
|
|
)
|
2016-07-02 18:22:51 +00:00
|
|
|
try:
|
2022-05-14 19:06:31 +00:00
|
|
|
attrs = json.loads(self.attributes) if self.attributes else {}
|
2016-07-02 18:22:51 +00:00
|
|
|
except ValueError:
|
|
|
|
# When json.loads fails
|
|
|
|
_LOGGER.exception("Error converting row to state: %s", self)
|
|
|
|
return None
|
2022-05-14 19:06:31 +00:00
|
|
|
if self.last_changed is None or self.last_changed == self.last_updated:
|
|
|
|
last_changed = last_updated = process_timestamp(self.last_updated)
|
|
|
|
else:
|
|
|
|
last_updated = process_timestamp(self.last_updated)
|
|
|
|
last_changed = process_timestamp(self.last_changed)
|
|
|
|
return State(
|
|
|
|
self.entity_id,
|
|
|
|
self.state,
|
|
|
|
# Join the state_attributes table on attributes_id to get the attributes
|
|
|
|
# for newer states
|
|
|
|
attrs,
|
|
|
|
last_changed,
|
|
|
|
last_updated,
|
|
|
|
context=context,
|
|
|
|
validate_entity_id=validate_entity_id,
|
|
|
|
)
|
2016-07-02 18:22:51 +00:00
|
|
|
|
|
|
|
|
2022-03-18 10:23:13 +00:00
|
|
|
class StateAttributes(Base): # type: ignore[misc,valid-type]
|
|
|
|
"""State attribute change history."""
|
|
|
|
|
|
|
|
__table_args__ = (
|
|
|
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
|
|
|
)
|
|
|
|
__tablename__ = TABLE_STATE_ATTRIBUTES
|
|
|
|
attributes_id = Column(Integer, Identity(), primary_key=True)
|
|
|
|
hash = Column(BigInteger, index=True)
|
|
|
|
# Note that this is not named attributes to avoid confusion with the states table
|
|
|
|
shared_attrs = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
|
|
|
return (
|
|
|
|
f"<recorder.StateAttributes("
|
|
|
|
f"id={self.attributes_id}, hash='{self.hash}', attributes='{self.shared_attrs}'"
|
|
|
|
f")>"
|
|
|
|
)
|
|
|
|
|
|
|
|
@staticmethod
|
2022-03-20 06:33:37 +00:00
|
|
|
def from_event(event: Event) -> StateAttributes:
|
2022-03-18 10:23:13 +00:00
|
|
|
"""Create object from a state_changed event."""
|
2022-03-20 06:33:37 +00:00
|
|
|
state: State | None = event.data.get("new_state")
|
|
|
|
# None state means the state was removed from the state machine
|
|
|
|
dbstate = StateAttributes(
|
|
|
|
shared_attrs="{}" if state is None else JSON_DUMP(state.attributes)
|
|
|
|
)
|
|
|
|
dbstate.hash = StateAttributes.hash_shared_attrs(dbstate.shared_attrs)
|
2022-03-18 10:23:13 +00:00
|
|
|
return dbstate
|
|
|
|
|
2022-03-20 06:33:37 +00:00
|
|
|
@staticmethod
|
2022-03-30 03:13:08 +00:00
|
|
|
def shared_attrs_from_event(
|
|
|
|
event: Event, exclude_attrs_by_domain: dict[str, set[str]]
|
|
|
|
) -> str:
|
2022-03-20 06:33:37 +00:00
|
|
|
"""Create shared_attrs from a state_changed event."""
|
|
|
|
state: State | None = event.data.get("new_state")
|
|
|
|
# None state means the state was removed from the state machine
|
2022-03-30 03:13:08 +00:00
|
|
|
if state is None:
|
|
|
|
return "{}"
|
|
|
|
domain = split_entity_id(state.entity_id)[0]
|
2022-04-03 11:51:42 +00:00
|
|
|
exclude_attrs = (
|
|
|
|
exclude_attrs_by_domain.get(domain, set()) | ALL_DOMAIN_EXCLUDE_ATTRS
|
|
|
|
)
|
|
|
|
return JSON_DUMP(
|
|
|
|
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
|
|
|
|
)
|
2022-03-20 06:33:37 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def hash_shared_attrs(shared_attrs: str) -> int:
|
|
|
|
"""Return the hash of json encoded shared attributes."""
|
2022-03-29 07:45:25 +00:00
|
|
|
return cast(int, fnv1a_32(shared_attrs.encode("utf-8")))
|
2022-03-20 06:33:37 +00:00
|
|
|
|
|
|
|
def to_native(self) -> dict[str, Any]:
|
2022-03-18 10:23:13 +00:00
|
|
|
"""Convert to an HA state object."""
|
|
|
|
try:
|
2022-03-29 07:45:25 +00:00
|
|
|
return cast(dict[str, Any], json.loads(self.shared_attrs))
|
2022-03-18 10:23:13 +00:00
|
|
|
except ValueError:
|
|
|
|
# When json.loads fails
|
|
|
|
_LOGGER.exception("Error converting row to state attributes: %s", self)
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
2021-09-22 20:31:33 +00:00
|
|
|
class StatisticResult(TypedDict):
|
|
|
|
"""Statistic result data class.
|
|
|
|
|
|
|
|
Allows multiple datapoints for the same statistic_id.
|
|
|
|
"""
|
|
|
|
|
|
|
|
meta: StatisticMetaData
|
2021-10-26 08:26:50 +00:00
|
|
|
stat: StatisticData
|
2021-09-22 20:31:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
class StatisticDataBase(TypedDict):
|
|
|
|
"""Mandatory fields for statistic data class."""
|
|
|
|
|
|
|
|
start: datetime
|
|
|
|
|
|
|
|
|
|
|
|
class StatisticData(StatisticDataBase, total=False):
|
2021-07-13 19:21:45 +00:00
|
|
|
"""Statistic data class."""
|
|
|
|
|
|
|
|
mean: float
|
|
|
|
min: float
|
|
|
|
max: float
|
2021-08-24 15:02:34 +00:00
|
|
|
last_reset: datetime | None
|
2021-07-13 19:21:45 +00:00
|
|
|
state: float
|
|
|
|
sum: float
|
|
|
|
|
|
|
|
|
2021-09-16 08:57:15 +00:00
|
|
|
class StatisticsBase:
|
|
|
|
"""Statistics base class."""
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-09-20 22:38:42 +00:00
|
|
|
id = Column(Integer, Identity(), primary_key=True)
|
2021-05-16 17:23:37 +00:00
|
|
|
created = Column(DATETIME_TYPE, default=dt_util.utcnow)
|
2021-09-16 08:57:15 +00:00
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
@declared_attr # type: ignore[misc]
|
|
|
|
def metadata_id(self) -> Column:
|
2021-09-16 08:57:15 +00:00
|
|
|
"""Define the metadata_id column for sub classes."""
|
|
|
|
return Column(
|
|
|
|
Integer,
|
|
|
|
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
|
|
|
|
index=True,
|
|
|
|
)
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
start = Column(DATETIME_TYPE, index=True)
|
2021-08-24 09:18:59 +00:00
|
|
|
mean = Column(DOUBLE_TYPE)
|
|
|
|
min = Column(DOUBLE_TYPE)
|
|
|
|
max = Column(DOUBLE_TYPE)
|
2021-08-24 15:02:34 +00:00
|
|
|
last_reset = Column(DATETIME_TYPE)
|
2021-08-24 09:18:59 +00:00
|
|
|
state = Column(DOUBLE_TYPE)
|
|
|
|
sum = Column(DOUBLE_TYPE)
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-09-16 08:57:15 +00:00
|
|
|
@classmethod
|
2022-03-29 07:45:25 +00:00
|
|
|
def from_stats(cls, metadata_id: int, stats: StatisticData) -> StatisticsBase:
|
2021-05-16 17:23:37 +00:00
|
|
|
"""Create object from a statistics."""
|
2022-02-18 09:37:38 +00:00
|
|
|
return cls( # type: ignore[call-arg,misc]
|
2021-07-02 11:17:00 +00:00
|
|
|
metadata_id=metadata_id,
|
2021-05-16 17:23:37 +00:00
|
|
|
**stats,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class Statistics(Base, StatisticsBase): # type: ignore[misc,valid-type]
|
2021-09-16 08:57:15 +00:00
|
|
|
"""Long term statistics."""
|
|
|
|
|
|
|
|
duration = timedelta(hours=1)
|
|
|
|
|
|
|
|
__table_args__ = (
|
|
|
|
# Used for fetching statistics for a certain entity at a specific time
|
2021-12-13 13:15:36 +00:00
|
|
|
Index("ix_statistics_statistic_id_start", "metadata_id", "start", unique=True),
|
2021-09-16 08:57:15 +00:00
|
|
|
)
|
|
|
|
__tablename__ = TABLE_STATISTICS
|
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class StatisticsShortTerm(Base, StatisticsBase): # type: ignore[misc,valid-type]
|
2021-09-16 08:57:15 +00:00
|
|
|
"""Short term statistics."""
|
|
|
|
|
|
|
|
duration = timedelta(minutes=5)
|
|
|
|
|
|
|
|
__table_args__ = (
|
|
|
|
# Used for fetching statistics for a certain entity at a specific time
|
2021-12-13 13:15:36 +00:00
|
|
|
Index(
|
|
|
|
"ix_statistics_short_term_statistic_id_start",
|
|
|
|
"metadata_id",
|
|
|
|
"start",
|
|
|
|
unique=True,
|
|
|
|
),
|
2021-09-16 08:57:15 +00:00
|
|
|
)
|
|
|
|
__tablename__ = TABLE_STATISTICS_SHORT_TERM
|
|
|
|
|
|
|
|
|
2021-09-22 20:31:33 +00:00
|
|
|
class StatisticMetaData(TypedDict):
|
2021-07-13 19:21:45 +00:00
|
|
|
"""Statistic meta data class."""
|
|
|
|
|
|
|
|
has_mean: bool
|
|
|
|
has_sum: bool
|
2021-10-26 08:26:50 +00:00
|
|
|
name: str | None
|
|
|
|
source: str
|
|
|
|
statistic_id: str
|
|
|
|
unit_of_measurement: str | None
|
2021-07-13 19:21:45 +00:00
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class StatisticsMeta(Base): # type: ignore[misc,valid-type]
|
2021-06-30 11:32:17 +00:00
|
|
|
"""Statistics meta data."""
|
|
|
|
|
2021-09-09 20:55:51 +00:00
|
|
|
__table_args__ = (
|
|
|
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
|
|
|
)
|
2021-06-30 11:32:17 +00:00
|
|
|
__tablename__ = TABLE_STATISTICS_META
|
2021-09-20 22:38:42 +00:00
|
|
|
id = Column(Integer, Identity(), primary_key=True)
|
2021-07-02 11:17:00 +00:00
|
|
|
statistic_id = Column(String(255), index=True)
|
2021-06-30 11:32:17 +00:00
|
|
|
source = Column(String(32))
|
|
|
|
unit_of_measurement = Column(String(255))
|
2021-07-02 11:17:00 +00:00
|
|
|
has_mean = Column(Boolean)
|
|
|
|
has_sum = Column(Boolean)
|
2021-10-26 08:26:50 +00:00
|
|
|
name = Column(String(255))
|
2021-06-30 11:32:17 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2021-10-26 08:26:50 +00:00
|
|
|
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
|
2021-06-30 11:32:17 +00:00
|
|
|
"""Create object from meta data."""
|
2021-10-26 08:26:50 +00:00
|
|
|
return StatisticsMeta(**meta)
|
2021-06-30 11:32:17 +00:00
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class RecorderRuns(Base): # type: ignore[misc,valid-type]
|
2016-07-02 18:22:51 +00:00
|
|
|
"""Representation of recorder run."""
|
|
|
|
|
2021-07-02 14:28:16 +00:00
|
|
|
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
|
2020-08-21 12:20:46 +00:00
|
|
|
__tablename__ = TABLE_RECORDER_RUNS
|
2021-05-23 02:10:27 +00:00
|
|
|
run_id = Column(Integer, Identity(), primary_key=True)
|
2020-05-19 17:13:27 +00:00
|
|
|
start = Column(DateTime(timezone=True), default=dt_util.utcnow)
|
2016-07-02 18:22:51 +00:00
|
|
|
end = Column(DateTime(timezone=True))
|
|
|
|
closed_incorrect = Column(Boolean, default=False)
|
2020-05-19 17:13:27 +00:00
|
|
|
created = Column(DateTime(timezone=True), default=dt_util.utcnow)
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2021-03-11 17:52:07 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
2021-03-15 02:46:21 +00:00
|
|
|
end = (
|
|
|
|
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
|
|
|
|
)
|
2021-03-11 17:52:07 +00:00
|
|
|
return (
|
|
|
|
f"<recorder.RecorderRuns("
|
2021-03-15 02:46:21 +00:00
|
|
|
f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', "
|
|
|
|
f"end={end}, closed_incorrect={self.closed_incorrect}, "
|
|
|
|
f"created='{self.created.isoformat(sep=' ', timespec='seconds')}'"
|
2021-03-11 17:52:07 +00:00
|
|
|
f")>"
|
|
|
|
)
|
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def entity_ids(self, point_in_time: datetime | None = None) -> list[str]:
|
2016-07-02 18:22:51 +00:00
|
|
|
"""Return the entity ids that existed in this run.
|
|
|
|
|
|
|
|
Specify point_in_time if you want to know which existed at that point
|
|
|
|
in time inside the run.
|
|
|
|
"""
|
2016-07-11 07:46:56 +00:00
|
|
|
session = Session.object_session(self)
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
assert session is not None, "RecorderRuns need to be persisted"
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2016-07-11 07:46:56 +00:00
|
|
|
query = session.query(distinct(States.entity_id)).filter(
|
2019-07-31 19:25:30 +00:00
|
|
|
States.last_updated >= self.start
|
|
|
|
)
|
2016-07-11 07:46:56 +00:00
|
|
|
|
|
|
|
if point_in_time is not None:
|
|
|
|
query = query.filter(States.last_updated < point_in_time)
|
|
|
|
elif self.end is not None:
|
|
|
|
query = query.filter(States.last_updated < self.end)
|
|
|
|
|
|
|
|
return [row[0] for row in query]
|
2016-07-02 18:22:51 +00:00
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def to_native(self, validate_entity_id: bool = True) -> RecorderRuns:
|
2016-07-02 18:22:51 +00:00
|
|
|
"""Return self, native format is this model."""
|
|
|
|
return self
|
2016-07-11 07:46:56 +00:00
|
|
|
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class SchemaChanges(Base): # type: ignore[misc,valid-type]
|
2017-02-03 03:04:14 +00:00
|
|
|
"""Representation of schema version changes."""
|
|
|
|
|
2020-08-21 12:20:46 +00:00
|
|
|
__tablename__ = TABLE_SCHEMA_CHANGES
|
2021-05-23 02:10:27 +00:00
|
|
|
change_id = Column(Integer, Identity(), primary_key=True)
|
2017-02-03 03:04:14 +00:00
|
|
|
schema_version = Column(Integer)
|
2020-05-19 17:13:27 +00:00
|
|
|
changed = Column(DateTime(timezone=True), default=dt_util.utcnow)
|
2017-02-03 03:04:14 +00:00
|
|
|
|
2021-03-11 17:52:07 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
|
|
|
return (
|
|
|
|
f"<recorder.SchemaChanges("
|
|
|
|
f"id={self.change_id}, schema_version={self.schema_version}, "
|
|
|
|
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
|
|
|
|
f")>"
|
|
|
|
)
|
|
|
|
|
2017-02-03 03:04:14 +00:00
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
class StatisticsRuns(Base): # type: ignore[misc,valid-type]
|
2021-08-20 05:10:45 +00:00
|
|
|
"""Representation of statistics run."""
|
|
|
|
|
|
|
|
__tablename__ = TABLE_STATISTICS_RUNS
|
2021-09-20 22:38:42 +00:00
|
|
|
run_id = Column(Integer, Identity(), primary_key=True)
|
2022-04-26 20:11:57 +00:00
|
|
|
start = Column(DateTime(timezone=True), index=True)
|
2021-08-20 05:10:45 +00:00
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Return string representation of instance for debugging."""
|
|
|
|
return (
|
|
|
|
f"<recorder.StatisticsRuns("
|
|
|
|
f"id={self.run_id}, start='{self.start.isoformat(sep=' ', timespec='seconds')}', "
|
|
|
|
f")>"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-09-13 08:02:24 +00:00
|
|
|
@overload
|
|
|
|
def process_timestamp(ts: None) -> None:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def process_timestamp(ts: datetime) -> datetime:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2021-09-16 08:57:15 +00:00
|
|
|
def process_timestamp(ts: datetime | None) -> datetime | None:
|
2016-07-11 07:46:56 +00:00
|
|
|
"""Process a timestamp into datetime object."""
|
|
|
|
if ts is None:
|
|
|
|
return None
|
2018-07-23 08:16:05 +00:00
|
|
|
if ts.tzinfo is None:
|
2020-06-15 18:53:05 +00:00
|
|
|
return ts.replace(tzinfo=dt_util.UTC)
|
2017-07-06 06:30:01 +00:00
|
|
|
|
|
|
|
return dt_util.as_utc(ts)
|
2020-06-22 17:06:02 +00:00
|
|
|
|
|
|
|
|
2021-09-13 08:02:24 +00:00
|
|
|
@overload
|
|
|
|
def process_timestamp_to_utc_isoformat(ts: None) -> None:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def process_timestamp_to_utc_isoformat(ts: datetime) -> str:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def process_timestamp_to_utc_isoformat(ts: datetime | None) -> str | None:
|
2020-06-22 17:06:02 +00:00
|
|
|
"""Process a timestamp into UTC isotime."""
|
|
|
|
if ts is None:
|
|
|
|
return None
|
2020-09-30 11:10:11 +00:00
|
|
|
if ts.tzinfo == dt_util.UTC:
|
|
|
|
return ts.isoformat()
|
2020-06-22 17:06:02 +00:00
|
|
|
if ts.tzinfo is None:
|
|
|
|
return f"{ts.isoformat()}{DB_TIMEZONE}"
|
2020-09-30 11:10:11 +00:00
|
|
|
return ts.astimezone(dt_util.UTC).isoformat()
|
2021-05-11 07:21:57 +00:00
|
|
|
|
|
|
|
|
2022-05-11 22:52:22 +00:00
|
|
|
def process_datetime_to_timestamp(ts: datetime) -> float:
|
2022-05-13 00:12:50 +00:00
|
|
|
"""Process a datebase datetime to epoch.
|
|
|
|
|
|
|
|
Mirrors the behavior of process_timestamp_to_utc_isoformat
|
|
|
|
except it returns the epoch time.
|
|
|
|
"""
|
2022-05-14 19:12:08 +00:00
|
|
|
if ts.tzinfo is None or ts.tzinfo == dt_util.UTC:
|
|
|
|
return dt_util.utc_to_timestamp(ts)
|
2022-05-13 00:12:50 +00:00
|
|
|
return ts.timestamp()
|
2022-05-11 22:52:22 +00:00
|
|
|
|
|
|
|
|
2021-05-11 07:21:57 +00:00
|
|
|
class LazyState(State):
|
|
|
|
"""A lazy version of core State."""
|
|
|
|
|
|
|
|
__slots__ = [
|
|
|
|
"_row",
|
|
|
|
"_attributes",
|
|
|
|
"_last_changed",
|
|
|
|
"_last_updated",
|
|
|
|
"_context",
|
2022-05-11 22:52:22 +00:00
|
|
|
"attr_cache",
|
2021-05-11 07:21:57 +00:00
|
|
|
]
|
|
|
|
|
2022-03-25 00:58:38 +00:00
|
|
|
def __init__( # pylint: disable=super-init-not-called
|
2022-05-11 22:52:22 +00:00
|
|
|
self,
|
|
|
|
row: Row,
|
|
|
|
attr_cache: dict[str, dict[str, Any]],
|
|
|
|
start_time: datetime | None = None,
|
2022-03-25 00:58:38 +00:00
|
|
|
) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Init the lazy state."""
|
|
|
|
self._row = row
|
2022-03-29 07:45:25 +00:00
|
|
|
self.entity_id: str = self._row.entity_id
|
2021-05-11 07:21:57 +00:00
|
|
|
self.state = self._row.state or ""
|
2022-03-29 07:45:25 +00:00
|
|
|
self._attributes: dict[str, Any] | None = None
|
2022-05-11 22:52:22 +00:00
|
|
|
self._last_changed: datetime | None = start_time
|
|
|
|
self._last_updated: datetime | None = start_time
|
2022-03-29 07:45:25 +00:00
|
|
|
self._context: Context | None = None
|
2022-05-11 22:52:22 +00:00
|
|
|
self.attr_cache = attr_cache
|
2021-05-11 07:21:57 +00:00
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def attributes(self) -> dict[str, Any]: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""State attributes."""
|
2022-03-18 10:23:13 +00:00
|
|
|
if self._attributes is None:
|
2022-05-11 22:52:22 +00:00
|
|
|
self._attributes = decode_attributes_from_row(self._row, self.attr_cache)
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._attributes
|
|
|
|
|
|
|
|
@attributes.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def attributes(self, value: dict[str, Any]) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set attributes."""
|
|
|
|
self._attributes = value
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def context(self) -> Context: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""State context."""
|
2022-03-29 07:45:25 +00:00
|
|
|
if self._context is None:
|
|
|
|
self._context = Context(id=None) # type: ignore[arg-type]
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._context
|
|
|
|
|
|
|
|
@context.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def context(self, value: Context) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set context."""
|
|
|
|
self._context = value
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_changed(self) -> datetime: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Last changed datetime."""
|
2022-03-29 07:45:25 +00:00
|
|
|
if self._last_changed is None:
|
2022-05-14 19:06:31 +00:00
|
|
|
if (last_changed := self._row.last_changed) is not None:
|
|
|
|
self._last_changed = process_timestamp(last_changed)
|
|
|
|
else:
|
|
|
|
self._last_changed = self.last_updated
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._last_changed
|
|
|
|
|
|
|
|
@last_changed.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_changed(self, value: datetime) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set last changed datetime."""
|
|
|
|
self._last_changed = value
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_updated(self) -> datetime: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Last updated datetime."""
|
2022-03-29 07:45:25 +00:00
|
|
|
if self._last_updated is None:
|
2022-05-14 19:06:31 +00:00
|
|
|
self._last_updated = process_timestamp(self._row.last_updated)
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._last_updated
|
|
|
|
|
|
|
|
@last_updated.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_updated(self, value: datetime) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set last updated datetime."""
|
|
|
|
self._last_updated = value
|
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def as_dict(self) -> dict[str, Any]: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Return a dict representation of the LazyState.
|
|
|
|
|
|
|
|
Async friendly.
|
|
|
|
|
|
|
|
To be used for JSON serialization.
|
|
|
|
"""
|
2022-03-18 10:23:13 +00:00
|
|
|
if self._last_changed is None and self._last_updated is None:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_updated_isoformat = process_timestamp_to_utc_isoformat(
|
|
|
|
self._row.last_updated
|
2021-05-11 07:21:57 +00:00
|
|
|
)
|
2022-04-25 18:46:37 +00:00
|
|
|
if (
|
2022-05-14 19:06:31 +00:00
|
|
|
self._row.last_changed is None
|
2022-04-25 18:46:37 +00:00
|
|
|
or self._row.last_changed == self._row.last_updated
|
|
|
|
):
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = last_updated_isoformat
|
2022-03-18 10:23:13 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = process_timestamp_to_utc_isoformat(
|
|
|
|
self._row.last_changed
|
2022-03-18 10:23:13 +00:00
|
|
|
)
|
2021-05-11 07:21:57 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_updated_isoformat = self.last_updated.isoformat()
|
2022-03-18 10:23:13 +00:00
|
|
|
if self.last_changed == self.last_updated:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = last_updated_isoformat
|
2022-03-18 10:23:13 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = self.last_changed.isoformat()
|
2021-05-11 07:21:57 +00:00
|
|
|
return {
|
|
|
|
"entity_id": self.entity_id,
|
|
|
|
"state": self.state,
|
|
|
|
"attributes": self._attributes or self.attributes,
|
|
|
|
"last_changed": last_changed_isoformat,
|
|
|
|
"last_updated": last_updated_isoformat,
|
|
|
|
}
|
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def __eq__(self, other: Any) -> bool:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Return the comparison."""
|
|
|
|
return (
|
|
|
|
other.__class__ in [self.__class__, State]
|
|
|
|
and self.entity_id == other.entity_id
|
|
|
|
and self.state == other.state
|
|
|
|
and self.attributes == other.attributes
|
|
|
|
)
|
2022-05-11 22:52:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def decode_attributes_from_row(
|
|
|
|
row: Row, attr_cache: dict[str, dict[str, Any]]
|
|
|
|
) -> dict[str, Any]:
|
|
|
|
"""Decode attributes from a database row."""
|
|
|
|
source: str = row.shared_attrs or row.attributes
|
|
|
|
if (attributes := attr_cache.get(source)) is not None:
|
|
|
|
return attributes
|
|
|
|
if not source or source == EMPTY_JSON_OBJECT:
|
|
|
|
return {}
|
|
|
|
try:
|
|
|
|
attr_cache[source] = attributes = json.loads(source)
|
|
|
|
except ValueError:
|
|
|
|
_LOGGER.exception("Error converting row to state attributes: %s", source)
|
|
|
|
attr_cache[source] = attributes = {}
|
|
|
|
return attributes
|
|
|
|
|
|
|
|
|
|
|
|
def row_to_compressed_state(
|
|
|
|
row: Row,
|
|
|
|
attr_cache: dict[str, dict[str, Any]],
|
|
|
|
start_time: datetime | None = None,
|
|
|
|
) -> dict[str, Any]:
|
|
|
|
"""Convert a database row to a compressed state."""
|
2022-05-15 06:03:56 +00:00
|
|
|
comp_state = {
|
|
|
|
COMPRESSED_STATE_STATE: row.state,
|
|
|
|
COMPRESSED_STATE_ATTRIBUTES: decode_attributes_from_row(row, attr_cache),
|
|
|
|
}
|
2022-05-11 22:52:22 +00:00
|
|
|
if start_time:
|
2022-05-15 06:03:56 +00:00
|
|
|
comp_state[COMPRESSED_STATE_LAST_UPDATED] = start_time.timestamp()
|
2022-05-11 22:52:22 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
row_last_updated: datetime = row.last_updated
|
2022-05-15 06:03:56 +00:00
|
|
|
comp_state[COMPRESSED_STATE_LAST_UPDATED] = process_datetime_to_timestamp(
|
|
|
|
row_last_updated
|
|
|
|
)
|
2022-05-11 22:52:22 +00:00
|
|
|
if (
|
2022-05-15 06:03:56 +00:00
|
|
|
row_changed_changed := row.last_changed
|
|
|
|
) and row_last_updated != row_changed_changed:
|
|
|
|
comp_state[COMPRESSED_STATE_LAST_CHANGED] = process_datetime_to_timestamp(
|
|
|
|
row_changed_changed
|
2022-05-11 22:52:22 +00:00
|
|
|
)
|
2022-05-15 06:03:56 +00:00
|
|
|
return comp_state
|