Reduce size of get_statistics_during_period WS API response (#82131)

pull/82207/head
Erik Montnemery 2022-11-16 17:36:30 +01:00 committed by GitHub
parent bb64b39d0e
commit 607a0e7697
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 588 additions and 591 deletions

View File

@ -267,7 +267,7 @@ async def _insert_sum_statistics(
statistic_id = metadata["statistic_id"]
last_stats = await get_instance(hass).async_add_executor_job(
get_last_statistics, hass, 1, statistic_id, False
get_last_statistics, hass, 1, statistic_id, False, {"sum"}
)
if statistic_id in last_stats:
sum_ = last_stats[statistic_id][0]["sum"] or 0

View File

@ -272,8 +272,8 @@ async def ws_get_fossil_energy_consumption(
end_time,
statistic_ids,
"hour",
True,
{"energy": UnitOfEnergy.KILO_WATT_HOUR},
{"mean", "sum"},
)
def _combine_sum_statistics(

View File

@ -53,13 +53,7 @@ from .db_schema import (
StatisticsRuns,
StatisticsShortTerm,
)
from .models import (
StatisticData,
StatisticMetaData,
StatisticResult,
process_timestamp,
process_timestamp_to_utc_isoformat,
)
from .models import StatisticData, StatisticMetaData, StatisticResult, process_timestamp
from .util import (
execute,
execute_stmt_lambda_element,
@ -947,6 +941,7 @@ def _reduce_statistics(
same_period: Callable[[datetime, datetime], bool],
period_start_end: Callable[[datetime], tuple[datetime, datetime]],
period: timedelta,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to daily or monthly statistics."""
result: dict[str, list[dict[str, Any]]] = defaultdict(list)
@ -963,19 +958,24 @@ def _reduce_statistics(
if not same_period(prev_stat["start"], statistic["start"]):
start, end = period_start_end(prev_stat["start"])
# The previous statistic was the last entry of the period
result[statistic_id].append(
{
"statistic_id": statistic_id,
"start": start.isoformat(),
"end": end.isoformat(),
"mean": mean(mean_values) if mean_values else None,
"min": min(min_values) if min_values else None,
"max": max(max_values) if max_values else None,
"last_reset": prev_stat.get("last_reset"),
"state": prev_stat.get("state"),
"sum": prev_stat["sum"],
}
)
row: dict[str, Any] = {
"start": start,
"end": end,
}
if "mean" in types:
row["mean"] = mean(mean_values) if mean_values else None
if "min" in types:
row["min"] = min(min_values) if min_values else None
if "max" in types:
row["max"] = max(max_values) if max_values else None
if "last_reset" in types:
row["last_reset"] = prev_stat.get("last_reset")
if "state" in types:
row["state"] = prev_stat.get("state")
if "sum" in types:
row["sum"] = prev_stat["sum"]
result[statistic_id].append(row)
max_values = []
mean_values = []
min_values = []
@ -1007,11 +1007,12 @@ def day_start_end(time: datetime) -> tuple[datetime, datetime]:
def _reduce_statistics_per_day(
stats: dict[str, list[dict[str, Any]]]
stats: dict[str, list[dict[str, Any]]],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to daily statistics."""
return _reduce_statistics(stats, same_day, day_start_end, timedelta(days=1))
return _reduce_statistics(stats, same_day, day_start_end, timedelta(days=1), types)
def same_week(time1: datetime, time2: datetime) -> bool:
@ -1037,10 +1038,13 @@ def week_start_end(time: datetime) -> tuple[datetime, datetime]:
def _reduce_statistics_per_week(
stats: dict[str, list[dict[str, Any]]],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to weekly statistics."""
return _reduce_statistics(stats, same_week, week_start_end, timedelta(days=7))
return _reduce_statistics(
stats, same_week, week_start_end, timedelta(days=7), types
)
def same_month(time1: datetime, time2: datetime) -> bool:
@ -1063,53 +1067,47 @@ def month_start_end(time: datetime) -> tuple[datetime, datetime]:
def _reduce_statistics_per_month(
stats: dict[str, list[dict[str, Any]]],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Reduce hourly statistics to monthly statistics."""
return _reduce_statistics(stats, same_month, month_start_end, timedelta(days=31))
return _reduce_statistics(
stats, same_month, month_start_end, timedelta(days=31), types
)
def _statistics_during_period_stmt(
start_time: datetime,
end_time: datetime | None,
metadata_ids: list[int] | None,
table: type[Statistics | StatisticsShortTerm],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> StatementLambdaElement:
"""Prepare a database query for statistics during a given period.
This prepares a lambda_stmt query, so we don't insert the parameters yet.
"""
stmt = lambda_stmt(
lambda: select(*QUERY_STATISTICS).filter(Statistics.start >= start_time)
)
columns = [table.metadata_id, table.start]
if "last_reset" in types:
columns.append(table.last_reset)
if "max" in types:
columns.append(table.max)
if "mean" in types:
columns.append(table.mean)
if "min" in types:
columns.append(table.min)
if "state" in types:
columns.append(table.state)
if "sum" in types:
columns.append(table.sum)
stmt = lambda_stmt(lambda: select(columns).filter(table.start >= start_time))
if end_time is not None:
stmt += lambda q: q.filter(Statistics.start < end_time)
stmt += lambda q: q.filter(table.start < end_time)
if metadata_ids:
stmt += lambda q: q.filter(Statistics.metadata_id.in_(metadata_ids))
stmt += lambda q: q.order_by(Statistics.metadata_id, Statistics.start)
return stmt
def _statistics_during_period_stmt_short_term(
start_time: datetime,
end_time: datetime | None,
metadata_ids: list[int] | None,
) -> StatementLambdaElement:
"""Prepare a database query for short term statistics during a given period.
This prepares a lambda_stmt query, so we don't insert the parameters yet.
"""
stmt = lambda_stmt(
lambda: select(*QUERY_STATISTICS_SHORT_TERM).filter(
StatisticsShortTerm.start >= start_time
)
)
if end_time is not None:
stmt += lambda q: q.filter(StatisticsShortTerm.start < end_time)
if metadata_ids:
stmt += lambda q: q.filter(StatisticsShortTerm.metadata_id.in_(metadata_ids))
stmt += lambda q: q.order_by(
StatisticsShortTerm.metadata_id, StatisticsShortTerm.start
)
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
stmt += lambda q: q.order_by(table.metadata_id, table.start)
return stmt
@ -1119,7 +1117,7 @@ def _get_max_mean_min_statistic_in_sub_period(
start_time: datetime | None,
end_time: datetime | None,
table: type[Statistics | StatisticsShortTerm],
types: set[str],
types: set[Literal["max", "mean", "min", "change"]],
metadata_id: int,
) -> None:
"""Return max, mean and min during the period."""
@ -1160,7 +1158,7 @@ def _get_max_mean_min_statistic(
tail_end_time: datetime | None,
tail_only: bool,
metadata_id: int,
types: set[str],
types: set[Literal["max", "mean", "min", "change"]],
) -> dict[str, float | None]:
"""Return max, mean and min during the period.
@ -1380,7 +1378,7 @@ def statistic_during_period(
start_time: datetime | None,
end_time: datetime | None,
statistic_id: str,
types: set[str] | None,
types: set[Literal["max", "mean", "min", "change"]] | None,
units: dict[str, str] | None,
) -> dict[str, Any]:
"""Return a statistic data point for the UTC period start_time - end_time."""
@ -1534,11 +1532,11 @@ def statistic_during_period(
def statistics_during_period(
hass: HomeAssistant,
start_time: datetime,
end_time: datetime | None = None,
statistic_ids: list[str] | None = None,
period: Literal["5minute", "day", "hour", "week", "month"] = "hour",
start_time_as_datetime: bool = False,
units: dict[str, str] | None = None,
end_time: datetime | None,
statistic_ids: list[str] | None,
period: Literal["5minute", "day", "hour", "week", "month"],
units: dict[str, str] | None,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict[str, Any]]]:
"""Return statistic data points during UTC period start_time - end_time.
@ -1556,14 +1554,12 @@ def statistics_during_period(
if statistic_ids is not None:
metadata_ids = [metadata_id for metadata_id, _ in metadata.values()]
if period == "5minute":
table = StatisticsShortTerm
stmt = _statistics_during_period_stmt_short_term(
start_time, end_time, metadata_ids
)
else:
table = Statistics
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids)
table: type[Statistics | StatisticsShortTerm] = (
Statistics if period != "5minute" else StatisticsShortTerm
)
stmt = _statistics_during_period_stmt(
start_time, end_time, metadata_ids, table, types
)
stats = execute_stmt_lambda_element(session, stmt)
if not stats:
@ -1579,8 +1575,8 @@ def statistics_during_period(
True,
table,
start_time,
start_time_as_datetime,
units,
types,
)
result = _sorted_statistics_to_dict(
@ -1592,17 +1588,17 @@ def statistics_during_period(
True,
table,
start_time,
True,
units,
types,
)
if period == "day":
return _reduce_statistics_per_day(result)
return _reduce_statistics_per_day(result, types)
if period == "week":
return _reduce_statistics_per_week(result)
return _reduce_statistics_per_week(result, types)
return _reduce_statistics_per_month(result)
return _reduce_statistics_per_month(result, types)
def _get_last_statistics_stmt(
@ -1637,6 +1633,7 @@ def _get_last_statistics(
statistic_id: str,
convert_units: bool,
table: type[Statistics | StatisticsShortTerm],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict]]:
"""Return the last number_of_stats statistics for a given statistic_id."""
statistic_ids = [statistic_id]
@ -1665,26 +1662,34 @@ def _get_last_statistics(
convert_units,
table,
None,
False,
None,
types,
)
def get_last_statistics(
hass: HomeAssistant, number_of_stats: int, statistic_id: str, convert_units: bool
hass: HomeAssistant,
number_of_stats: int,
statistic_id: str,
convert_units: bool,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict]]:
"""Return the last number_of_stats statistics for a statistic_id."""
return _get_last_statistics(
hass, number_of_stats, statistic_id, convert_units, Statistics
hass, number_of_stats, statistic_id, convert_units, Statistics, types
)
def get_last_short_term_statistics(
hass: HomeAssistant, number_of_stats: int, statistic_id: str, convert_units: bool
hass: HomeAssistant,
number_of_stats: int,
statistic_id: str,
convert_units: bool,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict]]:
"""Return the last number_of_stats short term statistics for a statistic_id."""
return _get_last_statistics(
hass, number_of_stats, statistic_id, convert_units, StatisticsShortTerm
hass, number_of_stats, statistic_id, convert_units, StatisticsShortTerm, types
)
@ -1720,6 +1725,7 @@ def _latest_short_term_statistics_stmt(
def get_latest_short_term_statistics(
hass: HomeAssistant,
statistic_ids: list[str],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
metadata: dict[str, tuple[int, StatisticMetaData]] | None = None,
) -> dict[str, list[dict]]:
"""Return the latest short term statistics for a list of statistic_ids."""
@ -1749,8 +1755,8 @@ def get_latest_short_term_statistics(
False,
StatisticsShortTerm,
None,
False,
None,
types,
)
@ -1759,31 +1765,38 @@ def _statistics_at_time(
metadata_ids: set[int],
table: type[Statistics | StatisticsShortTerm],
start_time: datetime,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> list | None:
"""Return last known statistics, earlier than start_time, for the metadata_ids."""
# Fetch metadata for the given (or all) statistic_ids
if table == StatisticsShortTerm:
base_query = QUERY_STATISTICS_SHORT_TERM
else:
base_query = QUERY_STATISTICS
columns = [table.metadata_id, table.start]
if "last_reset" in types:
columns.append(table.last_reset)
if "max" in types:
columns.append(table.max)
if "mean" in types:
columns.append(table.mean)
if "min" in types:
columns.append(table.min)
if "state" in types:
columns.append(table.state)
if "sum" in types:
columns.append(table.sum)
query = session.query(*base_query)
stmt = lambda_stmt(lambda: select(columns))
most_recent_statistic_ids = (
session.query(
func.max(table.id).label("max_id"),
)
lambda_stmt(lambda: select(func.max(table.id).label("max_id")))
.filter(table.start < start_time)
.filter(table.metadata_id.in_(metadata_ids))
.group_by(table.metadata_id)
.subquery()
)
most_recent_statistic_ids = most_recent_statistic_ids.group_by(table.metadata_id)
most_recent_statistic_ids = most_recent_statistic_ids.subquery()
query = query.join(
stmt += lambda q: q.join(
most_recent_statistic_ids,
table.id == most_recent_statistic_ids.c.max_id,
)
return execute(query)
return execute_stmt_lambda_element(session, stmt)
def _sorted_statistics_to_dict(
@ -1795,8 +1808,8 @@ def _sorted_statistics_to_dict(
convert_units: bool,
table: type[Statistics | StatisticsShortTerm],
start_time: datetime | None,
start_time_as_datetime: bool,
units: dict[str, str] | None,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> dict[str, list[dict]]:
"""Convert SQL results into JSON friendly data structure."""
result: dict = defaultdict(list)
@ -1822,7 +1835,9 @@ def _sorted_statistics_to_dict(
# Fetch last known statistics for the needed metadata IDs
if need_stat_at_start_time:
assert start_time # Can not be None if need_stat_at_start_time is not empty
tmp = _statistics_at_time(session, need_stat_at_start_time, table, start_time)
tmp = _statistics_at_time(
session, need_stat_at_start_time, table, start_time, types
)
if tmp:
for stat in tmp:
stats_at_start_time[stat.metadata_id] = (stat,)
@ -1841,21 +1856,24 @@ def _sorted_statistics_to_dict(
for db_state in chain(stats_at_start_time.get(meta_id, ()), group):
start = process_timestamp(db_state.start)
end = start + table.duration
ent_results.append(
{
"statistic_id": statistic_id,
"start": start if start_time_as_datetime else start.isoformat(),
"end": end.isoformat(),
"mean": convert(db_state.mean),
"min": convert(db_state.min),
"max": convert(db_state.max),
"last_reset": process_timestamp_to_utc_isoformat(
db_state.last_reset
),
"state": convert(db_state.state),
"sum": convert(db_state.sum),
}
)
row = {
"start": start,
"end": end,
}
if "mean" in types:
row["mean"] = convert(db_state.mean)
if "min" in types:
row["min"] = convert(db_state.min)
if "max" in types:
row["max"] = convert(db_state.max)
if "last_reset" in types:
row["last_reset"] = process_timestamp(db_state.last_reset)
if "state" in types:
row["state"] = convert(db_state.state)
if "sum" in types:
row["sum"] = convert(db_state.sum)
ent_results.append(row)
# Filter out the empty lists if some states had 0 results.
return {metadata[key]["statistic_id"]: val for key, val in result.items() if val}

View File

@ -65,7 +65,7 @@ def _ws_get_statistic_during_period(
start_time: dt | None,
end_time: dt | None,
statistic_id: str,
types: set[str] | None,
types: set[Literal["max", "mean", "min", "change"]] | None,
units: dict[str, str],
) -> str:
"""Fetch statistics and convert them to json in the executor."""
@ -101,7 +101,9 @@ def _ws_get_statistic_during_period(
}
),
vol.Optional("statistic_id"): str,
vol.Optional("types"): vol.All([str], vol.Coerce(set)),
vol.Optional("types"): vol.All(
[vol.Any("max", "mean", "min", "change")], vol.Coerce(set)
),
vol.Optional("units"): vol.Schema(
{
vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS),
@ -210,16 +212,27 @@ def _ws_get_statistics_during_period(
statistic_ids: list[str] | None,
period: Literal["5minute", "day", "hour", "week", "month"],
units: dict[str, str],
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
) -> str:
"""Fetch statistics and convert them to json in the executor."""
return JSON_DUMP(
messages.result_message(
msg_id,
statistics_during_period(
hass, start_time, end_time, statistic_ids, period, units=units
),
)
result = statistics_during_period(
hass,
start_time,
end_time,
statistic_ids,
period,
units,
types,
)
for statistic_id in result:
for item in result[statistic_id]:
if (start := item.get("start")) is not None:
item["start"] = int(start.timestamp() * 1000)
if (end := item.get("end")) is not None:
item["end"] = int(end.timestamp() * 1000)
if (last_reset := item.get("last_reset")) is not None:
item["last_reset"] = int(last_reset.timestamp() * 1000)
return JSON_DUMP(messages.result_message(msg_id, result))
async def ws_handle_get_statistics_during_period(
@ -244,6 +257,8 @@ async def ws_handle_get_statistics_during_period(
else:
end_time = None
if (types := msg.get("types")) is None:
types = {"last_reset", "max", "mean", "min", "state", "sum"}
connection.send_message(
await get_instance(hass).async_add_executor_job(
_ws_get_statistics_during_period,
@ -254,6 +269,7 @@ async def ws_handle_get_statistics_during_period(
msg.get("statistic_ids"),
msg.get("period"),
msg.get("units"),
types,
)
)
@ -277,6 +293,10 @@ async def ws_handle_get_statistics_during_period(
vol.Optional("volume"): vol.In(VolumeConverter.VALID_UNITS),
}
),
vol.Optional("types"): vol.All(
[vol.Any("last_reset", "max", "mean", "min", "state", "sum")],
vol.Coerce(set),
),
}
)
@websocket_api.async_response

View File

@ -450,7 +450,7 @@ def _compile_statistics( # noqa: C901
to_query.append(entity_id)
last_stats = statistics.get_latest_short_term_statistics(
hass, to_query, metadata=old_metadatas
hass, to_query, {"last_reset", "state", "sum"}, metadata=old_metadatas
)
for ( # pylint: disable=too-many-nested-blocks
entity_id,
@ -508,6 +508,8 @@ def _compile_statistics( # noqa: C901
if entity_id in last_stats:
# We have compiled history for this sensor before, use that as a starting point
last_reset = old_last_reset = last_stats[entity_id][0]["last_reset"]
if old_last_reset is not None:
last_reset = old_last_reset = old_last_reset.isoformat()
new_state = old_state = last_stats[entity_id][0]["state"]
_sum = last_stats[entity_id][0]["sum"] or 0.0

View File

@ -591,7 +591,7 @@ class TibberDataCoordinator(DataUpdateCoordinator):
)
last_stats = await get_instance(self.hass).async_add_executor_job(
get_last_statistics, self.hass, 1, statistic_id, True
get_last_statistics, self.hass, 1, statistic_id, True, {}
)
if not last_stats:
@ -613,7 +613,7 @@ class TibberDataCoordinator(DataUpdateCoordinator):
else home.hourly_consumption_data
)
from_time = dt_util.parse_datetime(hourly_data[0]["from"])
from_time = hourly_data[0]["from"]
if from_time is None:
continue
start = from_time - timedelta(hours=1)
@ -624,7 +624,8 @@ class TibberDataCoordinator(DataUpdateCoordinator):
None,
[statistic_id],
"hour",
True,
None,
{"sum"},
)
_sum = stat[statistic_id][0]["sum"]
last_stats_time = stat[statistic_id][0]["start"]

View File

@ -114,7 +114,7 @@ async def test_demo_statistics_growth(recorder_mock, mock_history, hass):
await async_wait_recording_done(hass)
statistics = await get_instance(hass).async_add_executor_job(
get_last_statistics, hass, 1, statistic_id, False
get_last_statistics, hass, 1, statistic_id, False, {"sum"}
)
assert statistics[statistic_id][0]["sum"] > 2**20
assert statistics[statistic_id][0]["sum"] <= (2**20 + 24)

View File

@ -5,7 +5,7 @@ import asyncio
from dataclasses import dataclass
from datetime import datetime
import time
from typing import Any, cast
from typing import Any, Literal, cast
from sqlalchemy import create_engine
from sqlalchemy.orm.session import Session
@ -137,3 +137,21 @@ def run_information_with_session(
session.expunge(res)
return cast(RecorderRuns, res)
return res
def statistics_during_period(
hass: HomeAssistant,
start_time: datetime,
end_time: datetime | None = None,
statistic_ids: list[str] | None = None,
period: Literal["5minute", "day", "hour", "week", "month"] = "hour",
units: dict[str, str] | None = None,
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]]
| None = None,
) -> dict[str, list[dict[str, Any]]]:
"""Call statistics_during_period with defaults for simpler tests."""
if types is None:
types = {"last_reset", "max", "mean", "min", "state", "sum"}
return statistics.statistics_during_period(
hass, start_time, end_time, statistic_ids, period, units, types
)

View File

@ -14,7 +14,7 @@ from homeassistant.components import recorder
from homeassistant.components.recorder import history, statistics
from homeassistant.components.recorder.const import SQLITE_URL_PREFIX
from homeassistant.components.recorder.db_schema import StatisticsShortTerm
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
from homeassistant.components.recorder.models import process_timestamp
from homeassistant.components.recorder.statistics import (
async_add_external_statistics,
async_import_statistics,
@ -25,7 +25,6 @@ from homeassistant.components.recorder.statistics import (
get_latest_short_term_statistics,
get_metadata,
list_statistic_ids,
statistics_during_period,
)
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import TEMP_CELSIUS
@ -35,7 +34,12 @@ from homeassistant.helpers import recorder as recorder_helper
from homeassistant.setup import setup_component
import homeassistant.util.dt as dt_util
from .common import async_wait_recording_done, do_adhoc_statistics, wait_recording_done
from .common import (
async_wait_recording_done,
do_adhoc_statistics,
statistics_during_period,
wait_recording_done,
)
from tests.common import get_test_home_assistant, mock_registry
@ -52,22 +56,29 @@ def test_compile_hourly_statistics(hass_recorder):
assert dict(states) == dict(hist)
# Should not fail if there is nothing there yet
stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
stats = get_latest_short_term_statistics(
hass, ["sensor.test1"], {"last_reset", "max", "mean", "min", "state", "sum"}
)
assert stats == {}
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
assert stats == {}
stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
stats = get_last_short_term_statistics(
hass,
0,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {}
do_adhoc_statistics(hass, start=zero)
do_adhoc_statistics(hass, start=four)
wait_recording_done(hass)
expected_1 = {
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(14.915254237288135),
"min": approx(10.0),
"max": approx(20.0),
@ -76,9 +87,8 @@ def test_compile_hourly_statistics(hass_recorder):
"sum": None,
}
expected_2 = {
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(four),
"end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)),
"start": process_timestamp(four),
"end": process_timestamp(four + timedelta(minutes=5)),
"mean": approx(20.0),
"min": approx(20.0),
"max": approx(20.0),
@ -86,14 +96,8 @@ def test_compile_hourly_statistics(hass_recorder):
"state": None,
"sum": None,
}
expected_stats1 = [
{**expected_1, "statistic_id": "sensor.test1"},
{**expected_2, "statistic_id": "sensor.test1"},
]
expected_stats2 = [
{**expected_1, "statistic_id": "sensor.test2"},
{**expected_2, "statistic_id": "sensor.test2"},
]
expected_stats1 = [expected_1, expected_2]
expected_stats2 = [expected_1, expected_2]
# Test statistics_during_period
stats = statistics_during_period(hass, zero, period="5minute")
@ -119,32 +123,71 @@ def test_compile_hourly_statistics(hass_recorder):
assert stats == {}
# Test get_last_short_term_statistics and get_latest_short_term_statistics
stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
stats = get_last_short_term_statistics(
hass,
0,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {}
stats = get_last_short_term_statistics(hass, 1, "sensor.test1", True)
assert stats == {"sensor.test1": [{**expected_2, "statistic_id": "sensor.test1"}]}
stats = get_last_short_term_statistics(
hass,
1,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {"sensor.test1": [expected_2]}
stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
assert stats == {"sensor.test1": [{**expected_2, "statistic_id": "sensor.test1"}]}
stats = get_latest_short_term_statistics(
hass, ["sensor.test1"], {"last_reset", "max", "mean", "min", "state", "sum"}
)
assert stats == {"sensor.test1": [expected_2]}
metadata = get_metadata(hass, statistic_ids=['sensor.test1"'])
stats = get_latest_short_term_statistics(hass, ["sensor.test1"], metadata=metadata)
assert stats == {"sensor.test1": [{**expected_2, "statistic_id": "sensor.test1"}]}
stats = get_latest_short_term_statistics(
hass,
["sensor.test1"],
{"last_reset", "max", "mean", "min", "state", "sum"},
metadata=metadata,
)
assert stats == {"sensor.test1": [expected_2]}
stats = get_last_short_term_statistics(hass, 2, "sensor.test1", True)
stats = get_last_short_term_statistics(
hass,
2,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {"sensor.test1": expected_stats1[::-1]}
stats = get_last_short_term_statistics(hass, 3, "sensor.test1", True)
stats = get_last_short_term_statistics(
hass,
3,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {"sensor.test1": expected_stats1[::-1]}
stats = get_last_short_term_statistics(hass, 1, "sensor.test3", True)
stats = get_last_short_term_statistics(
hass,
1,
"sensor.test3",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {}
instance.get_session().query(StatisticsShortTerm).delete()
# Should not fail there is nothing in the table
stats = get_latest_short_term_statistics(hass, ["sensor.test1"])
stats = get_latest_short_term_statistics(
hass, ["sensor.test1"], {"last_reset", "max", "mean", "min", "state", "sum"}
)
assert stats == {}
@ -218,9 +261,8 @@ def test_compile_periodic_statistics_exception(
do_adhoc_statistics(hass, start=now + timedelta(minutes=5))
wait_recording_done(hass)
expected_1 = {
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(now),
"end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)),
"start": process_timestamp(now),
"end": process_timestamp(now + timedelta(minutes=5)),
"mean": None,
"min": None,
"max": None,
@ -229,9 +271,8 @@ def test_compile_periodic_statistics_exception(
"sum": None,
}
expected_2 = {
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)),
"end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=10)),
"start": process_timestamp(now + timedelta(minutes=5)),
"end": process_timestamp(now + timedelta(minutes=10)),
"mean": None,
"min": None,
"max": None,
@ -239,17 +280,9 @@ def test_compile_periodic_statistics_exception(
"state": None,
"sum": None,
}
expected_stats1 = [
{**expected_1, "statistic_id": "sensor.test1"},
{**expected_2, "statistic_id": "sensor.test1"},
]
expected_stats2 = [
{**expected_2, "statistic_id": "sensor.test2"},
]
expected_stats3 = [
{**expected_1, "statistic_id": "sensor.test3"},
{**expected_2, "statistic_id": "sensor.test3"},
]
expected_stats1 = [expected_1, expected_2]
expected_stats2 = [expected_2]
expected_stats3 = [expected_1, expected_2]
stats = statistics_during_period(hass, now, period="5minute")
assert stats == {
@ -286,15 +319,20 @@ def test_rename_entity(hass_recorder):
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
assert stats == {}
stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
stats = get_last_short_term_statistics(
hass,
0,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {}
do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass)
expected_1 = {
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(14.915254237288135),
"min": approx(10.0),
"max": approx(20.0),
@ -302,15 +340,9 @@ def test_rename_entity(hass_recorder):
"state": None,
"sum": None,
}
expected_stats1 = [
{**expected_1, "statistic_id": "sensor.test1"},
]
expected_stats2 = [
{**expected_1, "statistic_id": "sensor.test2"},
]
expected_stats99 = [
{**expected_1, "statistic_id": "sensor.test99"},
]
expected_stats1 = [expected_1]
expected_stats2 = [expected_1]
expected_stats99 = [expected_1]
stats = statistics_during_period(hass, zero, period="5minute")
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
@ -353,15 +385,20 @@ def test_rename_entity_collision(hass_recorder, caplog):
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
stats = statistics_during_period(hass, zero, period="5minute", **kwargs)
assert stats == {}
stats = get_last_short_term_statistics(hass, 0, "sensor.test1", True)
stats = get_last_short_term_statistics(
hass,
0,
"sensor.test1",
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert stats == {}
do_adhoc_statistics(hass, start=zero)
wait_recording_done(hass)
expected_1 = {
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(14.915254237288135),
"min": approx(10.0),
"max": approx(20.0),
@ -369,12 +406,8 @@ def test_rename_entity_collision(hass_recorder, caplog):
"state": None,
"sum": None,
}
expected_stats1 = [
{**expected_1, "statistic_id": "sensor.test1"},
]
expected_stats2 = [
{**expected_1, "statistic_id": "sensor.test2"},
]
expected_stats1 = [expected_1]
expected_stats2 = [expected_1]
stats = statistics_during_period(hass, zero, period="5minute")
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
@ -465,7 +498,7 @@ async def test_import_statistics(
zero = dt_util.utcnow()
last_reset = dt_util.parse_datetime(last_reset_str) if last_reset_str else None
last_reset_utc_str = dt_util.as_utc(last_reset).isoformat() if last_reset else None
last_reset_utc = dt_util.as_utc(last_reset) if last_reset else None
period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)
@ -497,24 +530,22 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(0.0),
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(1.0),
"sum": approx(3.0),
},
@ -546,17 +577,22 @@ async def test_import_statistics(
},
)
}
last_stats = get_last_statistics(hass, 1, statistic_id, True)
last_stats = get_last_statistics(
hass,
1,
statistic_id,
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert last_stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(1.0),
"sum": approx(3.0),
},
@ -576,9 +612,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
@ -587,13 +622,12 @@ async def test_import_statistics(
"sum": approx(6.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(1.0),
"sum": approx(3.0),
},
@ -643,24 +677,22 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"max": approx(1.0),
"mean": approx(2.0),
"min": approx(3.0),
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(4.0),
"sum": approx(5.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(1.0),
"sum": approx(3.0),
},
@ -686,24 +718,22 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period1),
"end": process_timestamp(period1 + timedelta(hours=1)),
"max": approx(1.0),
"mean": approx(2.0),
"min": approx(3.0),
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(4.0),
"sum": approx(5.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": process_timestamp(period2),
"end": process_timestamp(period2 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": last_reset_utc_str,
"last_reset": last_reset_utc,
"state": approx(1.0),
"sum": approx(1000 * 1000 + 3.0),
},
@ -947,9 +977,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"statistic_id": "test:total_energy_import",
"start": week1_start.isoformat(),
"end": week1_end.isoformat(),
"start": week1_start,
"end": week1_end,
"max": None,
"mean": None,
"min": None,
@ -958,9 +987,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
"sum": 3.0,
},
{
"statistic_id": "test:total_energy_import",
"start": week2_start.isoformat(),
"end": week2_end.isoformat(),
"start": week2_start,
"end": week2_end,
"max": None,
"mean": None,
"min": None,
@ -980,9 +1008,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"statistic_id": "test:total_energy_import",
"start": week1_start.isoformat(),
"end": week1_end.isoformat(),
"start": week1_start,
"end": week1_end,
"max": None,
"mean": None,
"min": None,
@ -991,9 +1018,8 @@ def test_weekly_statistics(hass_recorder, caplog, timezone):
"sum": 3.0,
},
{
"statistic_id": "test:total_energy_import",
"start": week2_start.isoformat(),
"end": week2_end.isoformat(),
"start": week2_start,
"end": week2_end,
"max": None,
"mean": None,
"min": None,
@ -1085,9 +1111,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"statistic_id": "test:total_energy_import",
"start": sep_start.isoformat(),
"end": sep_end.isoformat(),
"start": sep_start,
"end": sep_end,
"max": None,
"mean": None,
"min": None,
@ -1096,9 +1121,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
"sum": approx(3.0),
},
{
"statistic_id": "test:total_energy_import",
"start": oct_start.isoformat(),
"end": oct_end.isoformat(),
"start": oct_start,
"end": oct_end,
"max": None,
"mean": None,
"min": None,
@ -1122,9 +1146,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
assert stats == {
"test:total_energy_import": [
{
"statistic_id": "test:total_energy_import",
"start": sep_start.isoformat(),
"end": sep_end.isoformat(),
"start": sep_start,
"end": sep_end,
"max": None,
"mean": None,
"min": None,
@ -1133,9 +1156,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
"sum": approx(3.0),
},
{
"statistic_id": "test:total_energy_import",
"start": oct_start.isoformat(),
"end": oct_end.isoformat(),
"start": oct_start,
"end": oct_end,
"max": None,
"mean": None,
"min": None,

View File

@ -17,7 +17,6 @@ from homeassistant.components.recorder.statistics import (
get_last_statistics,
get_metadata,
list_statistic_ids,
statistics_during_period,
)
from homeassistant.helpers import recorder as recorder_helper
from homeassistant.setup import async_setup_component
@ -29,6 +28,7 @@ from .common import (
async_wait_recording_done,
create_engine_test,
do_adhoc_statistics,
statistics_during_period,
)
from tests.common import async_fire_time_changed
@ -167,9 +167,8 @@ async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(10),
"min": approx(10),
"max": approx(10),
@ -180,6 +179,28 @@ async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
]
}
await client.send_json(
{
"id": 3,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
"period": "5minute",
"types": ["mean"],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(10),
}
]
}
@freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.timezone.utc))
@pytest.mark.parametrize("offset", (0, 1, 2))
@ -895,9 +916,8 @@ async def test_statistics_during_period_unit_conversion(
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
@ -924,9 +944,8 @@ async def test_statistics_during_period_unit_conversion(
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(converted_value),
"min": approx(converted_value),
"max": approx(converted_value),
@ -989,9 +1008,8 @@ async def test_sum_statistics_during_period_unit_conversion(
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": None,
"min": None,
"max": None,
@ -1018,9 +1036,8 @@ async def test_sum_statistics_during_period_unit_conversion(
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": None,
"min": None,
"max": None,
@ -1150,9 +1167,8 @@ async def test_statistics_during_period_in_the_past(
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": stats_start.isoformat(),
"end": (stats_start + timedelta(minutes=5)).isoformat(),
"start": int(stats_start.timestamp() * 1000),
"end": int((stats_start + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(10),
"min": approx(10),
"max": approx(10),
@ -1178,9 +1194,8 @@ async def test_statistics_during_period_in_the_past(
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": start_of_day.isoformat(),
"end": (start_of_day + timedelta(days=1)).isoformat(),
"start": int(start_of_day.timestamp() * 1000),
"end": int((start_of_day + timedelta(days=1)).timestamp() * 1000),
"mean": approx(10),
"min": approx(10),
"max": approx(10),
@ -1468,9 +1483,8 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
expected_response = {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
@ -1481,9 +1495,8 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
],
"sensor.test2": [
{
"statistic_id": "sensor.test2",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(value * 2),
"min": approx(value * 2),
"max": approx(value * 2),
@ -1494,9 +1507,8 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
],
"sensor.test3": [
{
"statistic_id": "sensor.test3",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"start": int(now.timestamp() * 1000),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"mean": approx(value * 3),
"min": approx(value * 3),
"max": approx(value * 3),
@ -1638,14 +1650,13 @@ async def test_update_statistics_metadata(
assert response["result"] == {
"sensor.test": [
{
"end": (now + timedelta(minutes=5)).isoformat(),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"last_reset": None,
"max": 10.0,
"mean": 10.0,
"min": 10.0,
"start": now.isoformat(),
"start": int(now.timestamp() * 1000),
"state": None,
"statistic_id": "sensor.test",
"sum": None,
}
],
@ -1700,14 +1711,13 @@ async def test_change_statistics_unit(recorder_mock, hass, hass_ws_client):
assert response["result"] == {
"sensor.test": [
{
"end": (now + timedelta(minutes=5)).isoformat(),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"last_reset": None,
"max": 10.0,
"mean": 10.0,
"min": 10.0,
"start": now.isoformat(),
"start": int(now.timestamp() * 1000),
"state": None,
"statistic_id": "sensor.test",
"sum": None,
}
],
@ -1756,14 +1766,13 @@ async def test_change_statistics_unit(recorder_mock, hass, hass_ws_client):
assert response["result"] == {
"sensor.test": [
{
"end": (now + timedelta(minutes=5)).isoformat(),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"last_reset": None,
"max": 10000.0,
"mean": 10000.0,
"min": 10000.0,
"start": now.isoformat(),
"start": int(now.timestamp() * 1000),
"state": None,
"statistic_id": "sensor.test",
"sum": None,
}
],
@ -1796,14 +1805,13 @@ async def test_change_statistics_unit_errors(
expected_statistics = {
"sensor.test": [
{
"end": (now + timedelta(minutes=5)).isoformat(),
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
"last_reset": None,
"max": 10.0,
"mean": 10.0,
"min": 10.0,
"start": now.isoformat(),
"start": int(now.timestamp() * 1000),
"state": None,
"statistic_id": "sensor.test",
"sum": None,
}
],
@ -2292,9 +2300,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": (period1 + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
@ -2303,9 +2310,8 @@ async def test_import_statistics(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2341,13 +2347,18 @@ async def test_import_statistics(
},
)
}
last_stats = get_last_statistics(hass, 1, statistic_id, True)
last_stats = get_last_statistics(
hass,
1,
statistic_id,
True,
{"last_reset", "max", "mean", "min", "state", "sum"},
)
assert last_stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2383,9 +2394,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2394,9 +2404,8 @@ async def test_import_statistics(
"sum": approx(6.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2435,9 +2444,8 @@ async def test_import_statistics(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": approx(1.0),
"mean": approx(2.0),
"min": approx(3.0),
@ -2446,9 +2454,8 @@ async def test_import_statistics(
"sum": approx(5.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2519,9 +2526,8 @@ async def test_adjust_sum_statistics_energy(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2530,9 +2536,8 @@ async def test_adjust_sum_statistics_energy(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2588,9 +2593,8 @@ async def test_adjust_sum_statistics_energy(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
@ -2599,9 +2603,8 @@ async def test_adjust_sum_statistics_energy(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2631,9 +2634,8 @@ async def test_adjust_sum_statistics_energy(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
@ -2642,9 +2644,8 @@ async def test_adjust_sum_statistics_energy(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2715,9 +2716,8 @@ async def test_adjust_sum_statistics_gas(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2726,9 +2726,8 @@ async def test_adjust_sum_statistics_gas(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2784,9 +2783,8 @@ async def test_adjust_sum_statistics_gas(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
@ -2795,9 +2793,8 @@ async def test_adjust_sum_statistics_gas(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2827,9 +2824,8 @@ async def test_adjust_sum_statistics_gas(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
@ -2838,9 +2834,8 @@ async def test_adjust_sum_statistics_gas(
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2926,9 +2921,8 @@ async def test_adjust_sum_statistics_errors(
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"start": period1,
"end": period1 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,
@ -2937,9 +2931,8 @@ async def test_adjust_sum_statistics_errors(
"sum": approx(2.0 * factor),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"start": period2,
"end": period2 + timedelta(hours=1),
"max": None,
"mean": None,
"min": None,

View File

@ -14,13 +14,12 @@ from homeassistant.components.recorder.db_schema import StatisticsMeta
from homeassistant.components.recorder.models import (
StatisticData,
StatisticMetaData,
process_timestamp_to_utc_isoformat,
process_timestamp,
)
from homeassistant.components.recorder.statistics import (
async_import_statistics,
get_metadata,
list_statistic_ids,
statistics_during_period,
)
from homeassistant.components.recorder.util import get_instance, session_scope
from homeassistant.const import STATE_UNAVAILABLE
@ -32,6 +31,7 @@ from tests.components.recorder.common import (
async_recorder_block_till_done,
async_wait_recording_done,
do_adhoc_statistics,
statistics_during_period,
wait_recording_done,
)
@ -153,9 +153,8 @@ def test_compile_hourly_statistics(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -227,9 +226,8 @@ def test_compile_hourly_statistics_purged_state_changes(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -332,9 +330,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(13.050847),
"min": approx(-10.0),
"max": approx(30.0),
@ -345,9 +342,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test2": [
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": 13.05084745762712,
"min": -10.0,
"max": 30.0,
@ -358,9 +354,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test3": [
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": 13.05084745762712,
"min": -10.0,
"max": 30.0,
@ -371,9 +366,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test6": [
{
"statistic_id": "sensor.test6",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(13.050847),
"min": approx(-10.0),
"max": approx(30.0),
@ -384,9 +378,8 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
],
"sensor.test7": [
{
"statistic_id": "sensor.test7",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(13.050847),
"min": approx(-10.0),
"max": approx(30.0),
@ -492,35 +485,32 @@ async def test_compile_hourly_sum_statistics_amount(
expected_stats = {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(period0),
"last_reset": process_timestamp(period0),
"state": approx(factor * seq[2]),
"sum": approx(factor * 10.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(factor * seq[5]),
"sum": approx(factor * 40.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(factor * seq[8]),
"sum": approx(factor * 70.0),
},
@ -684,26 +674,22 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(dt_util.as_local(one)),
"last_reset": process_timestamp(dt_util.as_local(one)),
"state": approx(factor * seq[7]),
"sum": approx(factor * (sum(seq) - seq[0])),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=5)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=10)),
"start": process_timestamp(zero + timedelta(minutes=5)),
"end": process_timestamp(zero + timedelta(minutes=10)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(dt_util.as_local(two)),
"last_reset": process_timestamp(dt_util.as_local(two)),
"state": approx(factor * seq[7]),
"sum": approx(factor * (2 * sum(seq) - seq[0])),
},
@ -784,13 +770,12 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(dt_util.as_local(one)),
"last_reset": process_timestamp(dt_util.as_local(one)),
"state": approx(factor * seq[7]),
"sum": approx(factor * (sum(seq) - seq[0] - seq[3])),
},
@ -868,13 +853,12 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(one),
"last_reset": process_timestamp(one),
"state": approx(factor * seq[7]),
"sum": approx(factor * (seq[2] + seq[3] + seq[4] + seq[6] + seq[7])),
},
@ -992,9 +976,8 @@ def test_compile_hourly_sum_statistics_negative_state(
stats = statistics_during_period(hass, zero, period="5minute")
assert stats[entity_id] == [
{
"statistic_id": entity_id,
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"max": None,
"mean": None,
"min": None,
@ -1081,9 +1064,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
@ -1092,9 +1074,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
"sum": approx(factor * 10.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
@ -1103,9 +1084,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
"sum": approx(factor * 30.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
@ -1183,9 +1163,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
@ -1194,9 +1173,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
"sum": approx(factor * 10.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
@ -1205,9 +1183,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
"sum": approx(factor * 50.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
@ -1297,9 +1274,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
"sensor.test1": [
{
"last_reset": None,
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
@ -1308,9 +1284,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
},
{
"last_reset": None,
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
@ -1319,9 +1294,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
},
{
"last_reset": None,
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
@ -1390,35 +1364,32 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(period0),
"last_reset": process_timestamp(period0),
"state": approx(20.0),
"sum": approx(10.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(40.0),
"sum": approx(40.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(70.0),
"sum": approx(70.0),
},
@ -1500,105 +1471,96 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(period0),
"last_reset": process_timestamp(period0),
"state": approx(20.0),
"sum": approx(10.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(40.0),
"sum": approx(40.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(70.0),
"sum": approx(70.0),
},
],
"sensor.test2": [
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(period0),
"last_reset": process_timestamp(period0),
"state": approx(130.0),
"sum": approx(20.0),
},
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(45.0),
"sum": approx(-65.0),
},
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(75.0),
"sum": approx(-35.0),
},
],
"sensor.test3": [
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(period0),
"last_reset": process_timestamp(period0),
"state": approx(5.0),
"sum": approx(5.0),
},
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(50.0),
"sum": approx(60.0),
},
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(period2),
"end": process_timestamp_to_utc_isoformat(period2_end),
"start": process_timestamp(period2),
"end": process_timestamp(period2_end),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"last_reset": process_timestamp(four),
"state": approx(90.0),
"sum": approx(100.0),
},
@ -1654,9 +1616,8 @@ def test_compile_hourly_statistics_unchanged(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(four),
"end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)),
"start": process_timestamp(four),
"end": process_timestamp(four + timedelta(minutes=5)),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
@ -1687,9 +1648,8 @@ def test_compile_hourly_statistics_partially_unavailable(hass_recorder, caplog):
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(21.1864406779661),
"min": approx(10.0),
"max": approx(25.0),
@ -1757,9 +1717,8 @@ def test_compile_hourly_statistics_unavailable(
assert stats == {
"sensor.test2": [
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(four),
"end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)),
"start": process_timestamp(four),
"end": process_timestamp(four + timedelta(minutes=5)),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
@ -1974,9 +1933,8 @@ def test_compile_hourly_statistics_changing_units_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2009,9 +1967,8 @@ def test_compile_hourly_statistics_changing_units_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2149,9 +2106,8 @@ def test_compile_hourly_statistics_changing_units_3(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2184,9 +2140,8 @@ def test_compile_hourly_statistics_changing_units_3(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2263,9 +2218,8 @@ def test_compile_hourly_statistics_equivalent_units_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2294,9 +2248,8 @@ def test_compile_hourly_statistics_equivalent_units_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2305,11 +2258,8 @@ def test_compile_hourly_statistics_equivalent_units_1(
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=10)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
@ -2381,13 +2331,8 @@ def test_compile_hourly_statistics_equivalent_units_2(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(seconds=30 * 5)
),
"end": process_timestamp_to_utc_isoformat(
zero + timedelta(seconds=30 * 15)
),
"start": process_timestamp(zero + timedelta(seconds=30 * 5)),
"end": process_timestamp(zero + timedelta(seconds=30 * 15)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2451,9 +2396,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean1),
"min": approx(min),
"max": approx(max),
@ -2496,9 +2440,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean1),
"min": approx(min),
"max": approx(max),
@ -2507,11 +2450,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=10)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
@ -2554,9 +2494,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean1),
"min": approx(min),
"max": approx(max),
@ -2565,11 +2504,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=10)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
@ -2578,11 +2514,8 @@ def test_compile_hourly_statistics_changing_device_class_1(
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=20)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=25)),
"start": process_timestamp(zero + timedelta(minutes=20)),
"end": process_timestamp(zero + timedelta(minutes=25)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
@ -2647,9 +2580,8 @@ def test_compile_hourly_statistics_changing_device_class_2(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2692,9 +2624,8 @@ def test_compile_hourly_statistics_changing_device_class_2(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)),
"start": process_timestamp(zero),
"end": process_timestamp(zero + timedelta(minutes=5)),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2703,11 +2634,8 @@ def test_compile_hourly_statistics_changing_device_class_2(
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(
zero + timedelta(minutes=10)
),
"end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=15)),
"start": process_timestamp(zero + timedelta(minutes=10)),
"end": process_timestamp(zero + timedelta(minutes=15)),
"mean": approx(mean2),
"min": approx(min),
"max": approx(max),
@ -2823,9 +2751,8 @@ def test_compile_hourly_statistics_changing_statistics(
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period0),
"end": process_timestamp_to_utc_isoformat(period0_end),
"start": process_timestamp(period0),
"end": process_timestamp(period0_end),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
@ -2834,9 +2761,8 @@ def test_compile_hourly_statistics_changing_statistics(
"sum": None,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(period1),
"end": process_timestamp_to_utc_isoformat(period1_end),
"start": process_timestamp(period1),
"end": process_timestamp(period1_end),
"mean": None,
"min": None,
"max": None,
@ -3071,9 +2997,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"statistic_id": entity_id,
"start": process_timestamp_to_utc_isoformat(start),
"end": process_timestamp_to_utc_isoformat(end),
"start": process_timestamp(start),
"end": process_timestamp(end),
"mean": approx(expected_average),
"min": approx(expected_minimum),
"max": approx(expected_maximum),
@ -3129,9 +3054,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"statistic_id": entity_id,
"start": process_timestamp_to_utc_isoformat(start),
"end": process_timestamp_to_utc_isoformat(end),
"start": process_timestamp(start),
"end": process_timestamp(end),
"mean": approx(expected_average),
"min": approx(expected_minimum),
"max": approx(expected_maximum),
@ -3187,9 +3111,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"statistic_id": entity_id,
"start": process_timestamp_to_utc_isoformat(start),
"end": process_timestamp_to_utc_isoformat(end),
"start": process_timestamp(start),
"end": process_timestamp(end),
"mean": approx(expected_average),
"min": approx(expected_minimum),
"max": approx(expected_maximum),
@ -3245,9 +3168,8 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
)
expected_stats[entity_id].append(
{
"statistic_id": entity_id,
"start": process_timestamp_to_utc_isoformat(start),
"end": process_timestamp_to_utc_isoformat(end),
"start": process_timestamp(start),
"end": process_timestamp(end),
"mean": approx(expected_average),
"min": approx(expected_minimum),
"max": approx(expected_maximum),

View File

@ -35,7 +35,8 @@ async def test_async_setup_entry(recorder_mock, hass):
None,
[statistic_id],
"hour",
True,
None,
{"start", "state", "mean", "min", "max", "last_reset", "sum"},
)
assert len(stats) == 1