2022-06-07 12:41:43 +00:00
|
|
|
"""Models for Recorder."""
|
2021-07-13 19:21:45 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2022-07-22 13:11:34 +00:00
|
|
|
import asyncio
|
|
|
|
from dataclasses import dataclass, field
|
2022-06-07 12:41:43 +00:00
|
|
|
from datetime import datetime
|
2016-07-02 18:22:51 +00:00
|
|
|
import logging
|
2022-06-07 12:41:43 +00:00
|
|
|
from typing import Any, TypedDict, overload
|
|
|
|
|
2022-03-25 00:58:38 +00:00
|
|
|
from sqlalchemy.engine.row import Row
|
2016-07-11 07:46:56 +00:00
|
|
|
|
2022-05-11 22:52:22 +00:00
|
|
|
from homeassistant.components.websocket_api.const import (
|
|
|
|
COMPRESSED_STATE_ATTRIBUTES,
|
|
|
|
COMPRESSED_STATE_LAST_CHANGED,
|
|
|
|
COMPRESSED_STATE_LAST_UPDATED,
|
|
|
|
COMPRESSED_STATE_STATE,
|
|
|
|
)
|
2022-06-07 12:41:43 +00:00
|
|
|
from homeassistant.core import Context, State
|
2022-06-23 12:32:26 +00:00
|
|
|
from homeassistant.helpers.json import json_loads
|
2019-12-08 17:48:18 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2016-07-02 18:22:51 +00:00
|
|
|
|
|
|
|
# pylint: disable=invalid-name
|
2017-02-03 03:04:14 +00:00
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2020-06-22 17:06:02 +00:00
|
|
|
DB_TIMEZONE = "+00:00"
|
Improve history api performance (#35822)
* Improve history api performance
A new option "minimal_response" reduces the amount of data
sent between the first and last history states to only the
"last_changed" and "state" fields.
Calling to_native is now avoided where possible and only
done at the end for rows that will be returned in the response.
When sending the `minimal_response` option, the history
api now returns a json response similar to the following
for an entity
Testing:
History API Response time for 1 day
Average of 10 runs with minimal_response
Before: 19.89s. (content length : 3427428)
After: 8.44s (content length: 592199)
```
[{
"attributes": {--TRUNCATED--},
"context": {--TRUNCATED--},
"entity_id": "binary_sensor.powerwall_status",
"last_changed": "2020-05-18T23:20:03.213000+00:00",
"last_updated": "2020-05-18T23:20:03.213000+00:00",
"state": "on"
},
...
{
"last_changed": "2020-05-19T00:41:08Z",
"state": "unavailable"
},
...
{
"attributes": {--TRUNCATED--},
"context": {--TRUNCATED--},
"entity_id": "binary_sensor.powerwall_status",
"last_changed": "2020-05-19T00:42:08.069698+00:00",
"last_updated": "2020-05-19T00:42:08.069698+00:00",
"state": "on"
}]
```
* Remove impossible state check
* Remove another impossible state check
* Update homeassistant/components/history/__init__.py
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
* Reorder to save some indent per review
* Make query response make sense with to_native=False
* Update test for 00:00 to Z change
* Update homeassistant/components/recorder/models.py
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io>
2020-05-27 02:53:56 +00:00
|
|
|
|
2022-03-18 10:23:13 +00:00
|
|
|
EMPTY_JSON_OBJECT = "{}"
|
|
|
|
|
|
|
|
|
2022-05-18 14:52:46 +00:00
|
|
|
class UnsupportedDialect(Exception):
|
|
|
|
"""The dialect or its version is not supported."""
|
|
|
|
|
|
|
|
|
2022-07-22 13:11:34 +00:00
|
|
|
@dataclass
|
|
|
|
class RecorderData:
|
|
|
|
"""Recorder data stored in hass.data."""
|
|
|
|
|
|
|
|
recorder_platforms: dict[str, Any] = field(default_factory=dict)
|
|
|
|
db_connected: asyncio.Future = field(default_factory=asyncio.Future)
|
|
|
|
|
|
|
|
|
2021-09-22 20:31:33 +00:00
|
|
|
class StatisticResult(TypedDict):
|
|
|
|
"""Statistic result data class.
|
|
|
|
|
|
|
|
Allows multiple datapoints for the same statistic_id.
|
|
|
|
"""
|
|
|
|
|
|
|
|
meta: StatisticMetaData
|
2021-10-26 08:26:50 +00:00
|
|
|
stat: StatisticData
|
2021-09-22 20:31:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
class StatisticDataBase(TypedDict):
|
|
|
|
"""Mandatory fields for statistic data class."""
|
|
|
|
|
|
|
|
start: datetime
|
|
|
|
|
|
|
|
|
|
|
|
class StatisticData(StatisticDataBase, total=False):
|
2021-07-13 19:21:45 +00:00
|
|
|
"""Statistic data class."""
|
|
|
|
|
|
|
|
mean: float
|
|
|
|
min: float
|
|
|
|
max: float
|
2021-08-24 15:02:34 +00:00
|
|
|
last_reset: datetime | None
|
2021-07-13 19:21:45 +00:00
|
|
|
state: float
|
|
|
|
sum: float
|
|
|
|
|
|
|
|
|
2021-09-22 20:31:33 +00:00
|
|
|
class StatisticMetaData(TypedDict):
|
2021-07-13 19:21:45 +00:00
|
|
|
"""Statistic meta data class."""
|
|
|
|
|
|
|
|
has_mean: bool
|
|
|
|
has_sum: bool
|
2021-10-26 08:26:50 +00:00
|
|
|
name: str | None
|
|
|
|
source: str
|
|
|
|
statistic_id: str
|
|
|
|
unit_of_measurement: str | None
|
2021-07-13 19:21:45 +00:00
|
|
|
|
|
|
|
|
2021-09-13 08:02:24 +00:00
|
|
|
@overload
|
|
|
|
def process_timestamp(ts: None) -> None:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def process_timestamp(ts: datetime) -> datetime:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2021-09-16 08:57:15 +00:00
|
|
|
def process_timestamp(ts: datetime | None) -> datetime | None:
|
2016-07-11 07:46:56 +00:00
|
|
|
"""Process a timestamp into datetime object."""
|
|
|
|
if ts is None:
|
|
|
|
return None
|
2018-07-23 08:16:05 +00:00
|
|
|
if ts.tzinfo is None:
|
2020-06-15 18:53:05 +00:00
|
|
|
return ts.replace(tzinfo=dt_util.UTC)
|
2017-07-06 06:30:01 +00:00
|
|
|
|
|
|
|
return dt_util.as_utc(ts)
|
2020-06-22 17:06:02 +00:00
|
|
|
|
|
|
|
|
2021-09-13 08:02:24 +00:00
|
|
|
@overload
|
|
|
|
def process_timestamp_to_utc_isoformat(ts: None) -> None:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def process_timestamp_to_utc_isoformat(ts: datetime) -> str:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def process_timestamp_to_utc_isoformat(ts: datetime | None) -> str | None:
|
2020-06-22 17:06:02 +00:00
|
|
|
"""Process a timestamp into UTC isotime."""
|
|
|
|
if ts is None:
|
|
|
|
return None
|
2020-09-30 11:10:11 +00:00
|
|
|
if ts.tzinfo == dt_util.UTC:
|
|
|
|
return ts.isoformat()
|
2020-06-22 17:06:02 +00:00
|
|
|
if ts.tzinfo is None:
|
|
|
|
return f"{ts.isoformat()}{DB_TIMEZONE}"
|
2020-09-30 11:10:11 +00:00
|
|
|
return ts.astimezone(dt_util.UTC).isoformat()
|
2021-05-11 07:21:57 +00:00
|
|
|
|
|
|
|
|
2022-05-11 22:52:22 +00:00
|
|
|
def process_datetime_to_timestamp(ts: datetime) -> float:
|
2022-05-13 00:12:50 +00:00
|
|
|
"""Process a datebase datetime to epoch.
|
|
|
|
|
|
|
|
Mirrors the behavior of process_timestamp_to_utc_isoformat
|
|
|
|
except it returns the epoch time.
|
|
|
|
"""
|
2022-05-14 19:12:08 +00:00
|
|
|
if ts.tzinfo is None or ts.tzinfo == dt_util.UTC:
|
|
|
|
return dt_util.utc_to_timestamp(ts)
|
2022-05-13 00:12:50 +00:00
|
|
|
return ts.timestamp()
|
2022-05-11 22:52:22 +00:00
|
|
|
|
|
|
|
|
2021-05-11 07:21:57 +00:00
|
|
|
class LazyState(State):
|
|
|
|
"""A lazy version of core State."""
|
|
|
|
|
|
|
|
__slots__ = [
|
|
|
|
"_row",
|
|
|
|
"_attributes",
|
|
|
|
"_last_changed",
|
|
|
|
"_last_updated",
|
|
|
|
"_context",
|
2022-05-11 22:52:22 +00:00
|
|
|
"attr_cache",
|
2021-05-11 07:21:57 +00:00
|
|
|
]
|
|
|
|
|
2022-03-25 00:58:38 +00:00
|
|
|
def __init__( # pylint: disable=super-init-not-called
|
2022-05-11 22:52:22 +00:00
|
|
|
self,
|
|
|
|
row: Row,
|
|
|
|
attr_cache: dict[str, dict[str, Any]],
|
|
|
|
start_time: datetime | None = None,
|
2022-03-25 00:58:38 +00:00
|
|
|
) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Init the lazy state."""
|
|
|
|
self._row = row
|
2022-03-29 07:45:25 +00:00
|
|
|
self.entity_id: str = self._row.entity_id
|
2021-05-11 07:21:57 +00:00
|
|
|
self.state = self._row.state or ""
|
2022-03-29 07:45:25 +00:00
|
|
|
self._attributes: dict[str, Any] | None = None
|
2022-05-11 22:52:22 +00:00
|
|
|
self._last_changed: datetime | None = start_time
|
|
|
|
self._last_updated: datetime | None = start_time
|
2022-03-29 07:45:25 +00:00
|
|
|
self._context: Context | None = None
|
2022-05-11 22:52:22 +00:00
|
|
|
self.attr_cache = attr_cache
|
2021-05-11 07:21:57 +00:00
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def attributes(self) -> dict[str, Any]: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""State attributes."""
|
2022-03-18 10:23:13 +00:00
|
|
|
if self._attributes is None:
|
2022-05-11 22:52:22 +00:00
|
|
|
self._attributes = decode_attributes_from_row(self._row, self.attr_cache)
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._attributes
|
|
|
|
|
|
|
|
@attributes.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def attributes(self, value: dict[str, Any]) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set attributes."""
|
|
|
|
self._attributes = value
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def context(self) -> Context: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""State context."""
|
2022-03-29 07:45:25 +00:00
|
|
|
if self._context is None:
|
2022-05-27 03:54:26 +00:00
|
|
|
self._context = Context(id=None)
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._context
|
|
|
|
|
|
|
|
@context.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def context(self, value: Context) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set context."""
|
|
|
|
self._context = value
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_changed(self) -> datetime: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Last changed datetime."""
|
2022-03-29 07:45:25 +00:00
|
|
|
if self._last_changed is None:
|
2022-05-14 19:06:31 +00:00
|
|
|
if (last_changed := self._row.last_changed) is not None:
|
|
|
|
self._last_changed = process_timestamp(last_changed)
|
|
|
|
else:
|
|
|
|
self._last_changed = self.last_updated
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._last_changed
|
|
|
|
|
|
|
|
@last_changed.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_changed(self, value: datetime) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set last changed datetime."""
|
|
|
|
self._last_changed = value
|
|
|
|
|
2022-02-18 09:37:38 +00:00
|
|
|
@property # type: ignore[override]
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_updated(self) -> datetime: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Last updated datetime."""
|
2022-03-29 07:45:25 +00:00
|
|
|
if self._last_updated is None:
|
2022-05-14 19:06:31 +00:00
|
|
|
self._last_updated = process_timestamp(self._row.last_updated)
|
2021-05-11 07:21:57 +00:00
|
|
|
return self._last_updated
|
|
|
|
|
|
|
|
@last_updated.setter
|
2022-03-29 07:45:25 +00:00
|
|
|
def last_updated(self, value: datetime) -> None:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Set last updated datetime."""
|
|
|
|
self._last_updated = value
|
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def as_dict(self) -> dict[str, Any]: # type: ignore[override]
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Return a dict representation of the LazyState.
|
|
|
|
|
|
|
|
Async friendly.
|
|
|
|
|
|
|
|
To be used for JSON serialization.
|
|
|
|
"""
|
2022-03-18 10:23:13 +00:00
|
|
|
if self._last_changed is None and self._last_updated is None:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_updated_isoformat = process_timestamp_to_utc_isoformat(
|
|
|
|
self._row.last_updated
|
2021-05-11 07:21:57 +00:00
|
|
|
)
|
2022-04-25 18:46:37 +00:00
|
|
|
if (
|
2022-05-14 19:06:31 +00:00
|
|
|
self._row.last_changed is None
|
2022-04-25 18:46:37 +00:00
|
|
|
or self._row.last_changed == self._row.last_updated
|
|
|
|
):
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = last_updated_isoformat
|
2022-03-18 10:23:13 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = process_timestamp_to_utc_isoformat(
|
|
|
|
self._row.last_changed
|
2022-03-18 10:23:13 +00:00
|
|
|
)
|
2021-05-11 07:21:57 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_updated_isoformat = self.last_updated.isoformat()
|
2022-03-18 10:23:13 +00:00
|
|
|
if self.last_changed == self.last_updated:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = last_updated_isoformat
|
2022-03-18 10:23:13 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
last_changed_isoformat = self.last_changed.isoformat()
|
2021-05-11 07:21:57 +00:00
|
|
|
return {
|
|
|
|
"entity_id": self.entity_id,
|
|
|
|
"state": self.state,
|
|
|
|
"attributes": self._attributes or self.attributes,
|
|
|
|
"last_changed": last_changed_isoformat,
|
|
|
|
"last_updated": last_updated_isoformat,
|
|
|
|
}
|
|
|
|
|
2022-03-29 07:45:25 +00:00
|
|
|
def __eq__(self, other: Any) -> bool:
|
2021-05-11 07:21:57 +00:00
|
|
|
"""Return the comparison."""
|
|
|
|
return (
|
|
|
|
other.__class__ in [self.__class__, State]
|
|
|
|
and self.entity_id == other.entity_id
|
|
|
|
and self.state == other.state
|
|
|
|
and self.attributes == other.attributes
|
|
|
|
)
|
2022-05-11 22:52:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def decode_attributes_from_row(
|
|
|
|
row: Row, attr_cache: dict[str, dict[str, Any]]
|
|
|
|
) -> dict[str, Any]:
|
|
|
|
"""Decode attributes from a database row."""
|
|
|
|
source: str = row.shared_attrs or row.attributes
|
|
|
|
if (attributes := attr_cache.get(source)) is not None:
|
|
|
|
return attributes
|
|
|
|
if not source or source == EMPTY_JSON_OBJECT:
|
|
|
|
return {}
|
|
|
|
try:
|
2022-06-23 12:32:26 +00:00
|
|
|
attr_cache[source] = attributes = json_loads(source)
|
2022-05-11 22:52:22 +00:00
|
|
|
except ValueError:
|
|
|
|
_LOGGER.exception("Error converting row to state attributes: %s", source)
|
|
|
|
attr_cache[source] = attributes = {}
|
|
|
|
return attributes
|
|
|
|
|
|
|
|
|
|
|
|
def row_to_compressed_state(
|
|
|
|
row: Row,
|
|
|
|
attr_cache: dict[str, dict[str, Any]],
|
|
|
|
start_time: datetime | None = None,
|
|
|
|
) -> dict[str, Any]:
|
|
|
|
"""Convert a database row to a compressed state."""
|
2022-05-15 06:03:56 +00:00
|
|
|
comp_state = {
|
|
|
|
COMPRESSED_STATE_STATE: row.state,
|
|
|
|
COMPRESSED_STATE_ATTRIBUTES: decode_attributes_from_row(row, attr_cache),
|
|
|
|
}
|
2022-05-11 22:52:22 +00:00
|
|
|
if start_time:
|
2022-05-15 06:03:56 +00:00
|
|
|
comp_state[COMPRESSED_STATE_LAST_UPDATED] = start_time.timestamp()
|
2022-05-11 22:52:22 +00:00
|
|
|
else:
|
2022-05-14 19:06:31 +00:00
|
|
|
row_last_updated: datetime = row.last_updated
|
2022-05-15 06:03:56 +00:00
|
|
|
comp_state[COMPRESSED_STATE_LAST_UPDATED] = process_datetime_to_timestamp(
|
|
|
|
row_last_updated
|
|
|
|
)
|
2022-05-11 22:52:22 +00:00
|
|
|
if (
|
2022-05-15 06:03:56 +00:00
|
|
|
row_changed_changed := row.last_changed
|
|
|
|
) and row_last_updated != row_changed_changed:
|
|
|
|
comp_state[COMPRESSED_STATE_LAST_CHANGED] = process_datetime_to_timestamp(
|
|
|
|
row_changed_changed
|
2022-05-11 22:52:22 +00:00
|
|
|
)
|
2022-05-15 06:03:56 +00:00
|
|
|
return comp_state
|