From 8c5efafdd8d69ac8b9cd5217c188ee0a99b99dfe Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Thu, 16 Sep 2021 10:57:15 +0200 Subject: [PATCH] Add 5-minute statistics for sensors (#56006) * Add 5-minute statistics for sensors * Address pylint issues * Black * Apply suggestion from code review * Apply suggestions from code review * Improve tests --- homeassistant/components/history/__init__.py | 2 + homeassistant/components/recorder/__init__.py | 17 +- .../components/recorder/migration.py | 14 +- homeassistant/components/recorder/models.py | 61 +- .../components/recorder/statistics.py | 197 +++++- homeassistant/components/recorder/util.py | 8 +- tests/components/history/test_init.py | 13 +- tests/components/recorder/conftest.py | 6 +- tests/components/recorder/test_init.py | 30 +- tests/components/recorder/test_statistics.py | 64 +- tests/components/sensor/test_recorder.py | 662 +++++++++++++----- tests/conftest.py | 4 +- 12 files changed, 781 insertions(+), 297 deletions(-) diff --git a/homeassistant/components/history/__init__.py b/homeassistant/components/history/__init__.py index e05b6466a24..4f50a5e66be 100644 --- a/homeassistant/components/history/__init__.py +++ b/homeassistant/components/history/__init__.py @@ -124,6 +124,7 @@ class LazyState(history_models.LazyState): vol.Required("start_time"): str, vol.Optional("end_time"): str, vol.Optional("statistic_ids"): [str], + vol.Required("period"): vol.Any("hour", "5minute"), } ) @websocket_api.async_response @@ -157,6 +158,7 @@ async def ws_get_statistics_during_period( start_time, end_time, msg.get("statistic_ids"), + msg.get("period"), ) connection.send_result(msg["id"], statistics) diff --git a/homeassistant/components/recorder/__init__.py b/homeassistant/components/recorder/__init__.py index d045726bc22..bbf67b23c52 100644 --- a/homeassistant/components/recorder/__init__.py +++ b/homeassistant/components/recorder/__init__.py @@ -565,7 +565,7 @@ class Recorder(threading.Thread): self.queue.put(PerodicCleanupTask()) @callback - def async_hourly_statistics(self, now): + def async_periodic_statistics(self, now): """Trigger the hourly statistics run.""" start = statistics.get_start_time() self.queue.put(StatisticsTask(start)) @@ -582,9 +582,9 @@ class Recorder(threading.Thread): self.hass, self.async_nightly_tasks, hour=4, minute=12, second=0 ) - # Compile hourly statistics every hour at *:12 + # Compile short term statistics every 5 minutes async_track_time_change( - self.hass, self.async_hourly_statistics, minute=12, second=0 + self.hass, self.async_periodic_statistics, minute=range(0, 60, 5), second=10 ) def run(self): @@ -995,20 +995,21 @@ class Recorder(threading.Thread): def _schedule_compile_missing_statistics(self, session: Session) -> None: """Add tasks for missing statistics runs.""" now = dt_util.utcnow() - last_hour = now.replace(minute=0, second=0, microsecond=0) + last_period_minutes = now.minute - now.minute % 5 + last_period = now.replace(minute=last_period_minutes, second=0, microsecond=0) start = now - timedelta(days=self.keep_days) start = start.replace(minute=0, second=0, microsecond=0) # Find the newest statistics run, if any if last_run := session.query(func.max(StatisticsRuns.start)).scalar(): - start = max(start, process_timestamp(last_run) + timedelta(hours=1)) + start = max(start, process_timestamp(last_run) + timedelta(minutes=5)) # Add tasks - while start < last_hour: - end = start + timedelta(hours=1) + while start < last_period: + end = start + timedelta(minutes=5) _LOGGER.debug("Compiling missing statistics for %s-%s", start, end) self.queue.put(StatisticsTask(start)) - start = start + timedelta(hours=1) + start = end def _end_session(self): """End the recorder session.""" diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 74d95bb6c9c..c7ccefeca02 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -1,6 +1,5 @@ """Schema migration helpers.""" import contextlib -from datetime import timedelta import logging import sqlalchemy @@ -13,8 +12,6 @@ from sqlalchemy.exc import ( ) from sqlalchemy.schema import AddConstraint, DropConstraint -import homeassistant.util.dt as dt_util - from .models import ( SCHEMA_VERSION, TABLE_STATES, @@ -24,6 +21,7 @@ from .models import ( StatisticsMeta, StatisticsRuns, ) +from .statistics import get_start_time from .util import session_scope _LOGGER = logging.getLogger(__name__) @@ -483,10 +481,7 @@ def _apply_update(engine, session, new_version, old_version): # noqa: C901 elif new_version == 19: # This adds the statistic runs table, insert a fake run to prevent duplicating # statistics. - now = dt_util.utcnow() - start = now.replace(minute=0, second=0, microsecond=0) - start = start - timedelta(hours=1) - session.add(StatisticsRuns(start=start)) + session.add(StatisticsRuns(start=get_start_time())) elif new_version == 20: # This changed the precision of statistics from float to double if engine.dialect.name in ["mysql", "oracle", "postgresql"]: @@ -537,10 +532,7 @@ def _inspect_schema_version(engine, session): for index in indexes: if index["column_names"] == ["time_fired"]: # Schema addition from version 1 detected. New DB. - now = dt_util.utcnow() - start = now.replace(minute=0, second=0, microsecond=0) - start = start - timedelta(hours=1) - session.add(StatisticsRuns(start=start)) + session.add(StatisticsRuns(start=get_start_time())) session.add(SchemaChanges(schema_version=SCHEMA_VERSION)) return SCHEMA_VERSION diff --git a/homeassistant/components/recorder/models.py b/homeassistant/components/recorder/models.py index 5132dfc72bb..1c5c9fa90f0 100644 --- a/homeassistant/components/recorder/models.py +++ b/homeassistant/components/recorder/models.py @@ -1,7 +1,7 @@ """Models for SQLAlchemy.""" from __future__ import annotations -from datetime import datetime +from datetime import datetime, timedelta import json import logging from typing import TypedDict, overload @@ -20,6 +20,7 @@ from sqlalchemy import ( distinct, ) from sqlalchemy.dialects import mysql, oracle, postgresql +from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import declarative_base, relationship from sqlalchemy.orm.session import Session @@ -52,6 +53,7 @@ TABLE_SCHEMA_CHANGES = "schema_changes" TABLE_STATISTICS = "statistics" TABLE_STATISTICS_META = "statistics_meta" TABLE_STATISTICS_RUNS = "statistics_runs" +TABLE_STATISTICS_SHORT_TERM = "statistics_short_term" ALL_TABLES = [ TABLE_STATES, @@ -61,6 +63,7 @@ ALL_TABLES = [ TABLE_STATISTICS, TABLE_STATISTICS_META, TABLE_STATISTICS_RUNS, + TABLE_STATISTICS_SHORT_TERM, ] DATETIME_TYPE = DateTime(timezone=True).with_variant( @@ -232,21 +235,21 @@ class StatisticData(TypedDict, total=False): sum_increase: float -class Statistics(Base): # type: ignore - """Statistics.""" +class StatisticsBase: + """Statistics base class.""" - __table_args__ = ( - # Used for fetching statistics for a certain entity at a specific time - Index("ix_statistics_statistic_id_start", "metadata_id", "start"), - ) - __tablename__ = TABLE_STATISTICS id = Column(Integer, primary_key=True) created = Column(DATETIME_TYPE, default=dt_util.utcnow) - metadata_id = Column( - Integer, - ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), - index=True, - ) + + @declared_attr + def metadata_id(self): + """Define the metadata_id column for sub classes.""" + return Column( + Integer, + ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"), + index=True, + ) + start = Column(DATETIME_TYPE, index=True) mean = Column(DOUBLE_TYPE) min = Column(DOUBLE_TYPE) @@ -256,16 +259,40 @@ class Statistics(Base): # type: ignore sum = Column(DOUBLE_TYPE) sum_increase = Column(DOUBLE_TYPE) - @staticmethod - def from_stats(metadata_id: str, start: datetime, stats: StatisticData): + @classmethod + def from_stats(cls, metadata_id: str, start: datetime, stats: StatisticData): """Create object from a statistics.""" - return Statistics( + return cls( # type: ignore metadata_id=metadata_id, start=start, **stats, ) +class Statistics(Base, StatisticsBase): # type: ignore + """Long term statistics.""" + + duration = timedelta(hours=1) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index("ix_statistics_statistic_id_start", "metadata_id", "start"), + ) + __tablename__ = TABLE_STATISTICS + + +class StatisticsShortTerm(Base, StatisticsBase): # type: ignore + """Short term statistics.""" + + duration = timedelta(minutes=5) + + __table_args__ = ( + # Used for fetching statistics for a certain entity at a specific time + Index("ix_statistics_short_term_statistic_id_start", "metadata_id", "start"), + ) + __tablename__ = TABLE_STATISTICS_SHORT_TERM + + class StatisticMetaData(TypedDict, total=False): """Statistic meta data class.""" @@ -401,7 +428,7 @@ def process_timestamp(ts: datetime) -> datetime: ... -def process_timestamp(ts): +def process_timestamp(ts: datetime | None) -> datetime | None: """Process a timestamp into datetime object.""" if ts is None: return None diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 6ed612c60ad..afdaacca380 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -2,13 +2,14 @@ from __future__ import annotations from collections import defaultdict +from collections.abc import Iterable import dataclasses from datetime import datetime, timedelta from itertools import groupby import logging from typing import TYPE_CHECKING, Any, Callable -from sqlalchemy import bindparam +from sqlalchemy import bindparam, func from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext import baked from sqlalchemy.orm.scoping import scoped_session @@ -33,6 +34,7 @@ from .models import ( Statistics, StatisticsMeta, StatisticsRuns, + StatisticsShortTerm, process_timestamp, process_timestamp_to_utc_isoformat, ) @@ -53,6 +55,40 @@ QUERY_STATISTICS = [ Statistics.sum_increase, ] +QUERY_STATISTICS_SHORT_TERM = [ + StatisticsShortTerm.metadata_id, + StatisticsShortTerm.start, + StatisticsShortTerm.mean, + StatisticsShortTerm.min, + StatisticsShortTerm.max, + StatisticsShortTerm.last_reset, + StatisticsShortTerm.state, + StatisticsShortTerm.sum, + StatisticsShortTerm.sum_increase, +] + +QUERY_STATISTICS_SUMMARY_MEAN = [ + StatisticsShortTerm.metadata_id, + func.avg(StatisticsShortTerm.mean), + func.min(StatisticsShortTerm.min), + func.max(StatisticsShortTerm.max), +] + +QUERY_STATISTICS_SUMMARY_SUM = [ + StatisticsShortTerm.metadata_id, + StatisticsShortTerm.start, + StatisticsShortTerm.last_reset, + StatisticsShortTerm.state, + StatisticsShortTerm.sum, + StatisticsShortTerm.sum_increase, + func.row_number() + .over( + partition_by=StatisticsShortTerm.metadata_id, + order_by=StatisticsShortTerm.start.desc(), + ) + .label("rownum"), +] + QUERY_STATISTIC_META = [ StatisticsMeta.id, StatisticsMeta.statistic_id, @@ -67,7 +103,9 @@ QUERY_STATISTIC_META_ID = [ ] STATISTICS_BAKERY = "recorder_statistics_bakery" -STATISTICS_META_BAKERY = "recorder_statistics_bakery" +STATISTICS_META_BAKERY = "recorder_statistics_meta_bakery" +STATISTICS_SHORT_TERM_BAKERY = "recorder_statistics_short_term_bakery" + # Convert pressure and temperature statistics from the native unit used for statistics # to the units configured by the user @@ -108,6 +146,7 @@ def async_setup(hass: HomeAssistant) -> None: """Set up the history hooks.""" hass.data[STATISTICS_BAKERY] = baked.bakery() hass.data[STATISTICS_META_BAKERY] = baked.bakery() + hass.data[STATISTICS_SHORT_TERM_BAKERY] = baked.bakery() def entity_id_changed(event: Event) -> None: """Handle entity_id changed.""" @@ -137,9 +176,11 @@ def async_setup(hass: HomeAssistant) -> None: def get_start_time() -> datetime: """Return start time.""" - last_hour = dt_util.utcnow() - timedelta(hours=1) - start = last_hour.replace(minute=0, second=0, microsecond=0) - return start + now = dt_util.utcnow() + current_period_minutes = now.minute - now.minute % 5 + current_period = now.replace(minute=current_period_minutes, second=0, microsecond=0) + last_period = current_period - timedelta(minutes=5) + return last_period def _get_metadata_ids( @@ -204,11 +245,76 @@ def _update_or_add_metadata( return metadata_id +def compile_hourly_statistics( + instance: Recorder, session: scoped_session, start: datetime +) -> None: + """Compile hourly statistics.""" + start_time = start.replace(minute=0) + end_time = start_time + timedelta(hours=1) + # Get last hour's average, min, max + summary = {} + baked_query = instance.hass.data[STATISTICS_SHORT_TERM_BAKERY]( + lambda session: session.query(*QUERY_STATISTICS_SUMMARY_MEAN) + ) + + baked_query += lambda q: q.filter( + StatisticsShortTerm.start >= bindparam("start_time") + ) + baked_query += lambda q: q.filter(StatisticsShortTerm.start < bindparam("end_time")) + baked_query += lambda q: q.group_by(StatisticsShortTerm.metadata_id) + baked_query += lambda q: q.order_by(StatisticsShortTerm.metadata_id) + + stats = execute( + baked_query(session).params(start_time=start_time, end_time=end_time) + ) + + if stats: + for stat in stats: + metadata_id, _mean, _min, _max = stat + summary[metadata_id] = { + "metadata_id": metadata_id, + "mean": _mean, + "min": _min, + "max": _max, + } + + # Get last hour's sum + subquery = ( + session.query(*QUERY_STATISTICS_SUMMARY_SUM) + .filter(StatisticsShortTerm.start >= bindparam("start_time")) + .filter(StatisticsShortTerm.start < bindparam("end_time")) + .subquery() + ) + query = ( + session.query(subquery) + .filter(subquery.c.rownum == 1) + .order_by(subquery.c.metadata_id) + ) + stats = execute(query.params(start_time=start_time, end_time=end_time)) + + if stats: + for stat in stats: + metadata_id, start, last_reset, state, _sum, sum_increase, _ = stat + summary[metadata_id] = { + **summary.get(metadata_id, {}), + **{ + "metadata_id": metadata_id, + "last_reset": process_timestamp(last_reset), + "state": state, + "sum": _sum, + "sum_increase": sum_increase, + }, + } + + for stat in summary.values(): + session.add(Statistics.from_stats(stat.pop("metadata_id"), start_time, stat)) + + @retryable_database_job("statistics") def compile_statistics(instance: Recorder, start: datetime) -> bool: """Compile statistics.""" start = dt_util.as_utc(start) - end = start + timedelta(hours=1) + end = start + timedelta(minutes=5) with session_scope(session=instance.get_session()) as session: # type: ignore if session.query(StatisticsRuns).filter_by(start=start).first(): @@ -232,13 +338,20 @@ def compile_statistics(instance: Recorder, start: datetime) -> bool: instance.hass, session, entity_id, stat["meta"] ) try: - session.add(Statistics.from_stats(metadata_id, start, stat["stat"])) + session.add( + StatisticsShortTerm.from_stats(metadata_id, start, stat["stat"]) + ) except SQLAlchemyError: _LOGGER.exception( "Unexpected exception when inserting statistics %s:%s ", metadata_id, stat, ) + + if start.minute == 55: + # A full hour is ready, summarize it + compile_hourly_statistics(instance, session, start) + session.add(StatisticsRuns(start=start)) return True @@ -354,11 +467,36 @@ def list_statistic_ids( ] +def _statistics_during_period_query( + hass: HomeAssistant, + end_time: datetime | None, + statistic_ids: list[str] | None, + bakery: Any, + base_query: Iterable, + table: type[Statistics | StatisticsShortTerm], +) -> Callable: + baked_query = hass.data[bakery](lambda session: session.query(*base_query)) + + baked_query += lambda q: q.filter(table.start >= bindparam("start_time")) + + if end_time is not None: + baked_query += lambda q: q.filter(table.start < bindparam("end_time")) + + if statistic_ids is not None: + baked_query += lambda q: q.filter( + table.metadata_id.in_(bindparam("metadata_ids")) + ) + + baked_query += lambda q: q.order_by(table.metadata_id, table.start) + return baked_query # type: ignore[no-any-return] + + def statistics_during_period( hass: HomeAssistant, start_time: datetime, end_time: datetime | None = None, statistic_ids: list[str] | None = None, + period: str = "hour", ) -> dict[str, list[dict[str, str]]]: """Return states changes during UTC period start_time - end_time.""" metadata = None @@ -367,23 +505,22 @@ def statistics_during_period( if not metadata: return {} - baked_query = hass.data[STATISTICS_BAKERY]( - lambda session: session.query(*QUERY_STATISTICS) - ) - - baked_query += lambda q: q.filter(Statistics.start >= bindparam("start_time")) - - if end_time is not None: - baked_query += lambda q: q.filter(Statistics.start < bindparam("end_time")) - metadata_ids = None if statistic_ids is not None: - baked_query += lambda q: q.filter( - Statistics.metadata_id.in_(bindparam("metadata_ids")) - ) metadata_ids = list(metadata.keys()) - baked_query += lambda q: q.order_by(Statistics.metadata_id, Statistics.start) + if period == "hour": + bakery = STATISTICS_BAKERY + base_query = QUERY_STATISTICS + table = Statistics + else: + bakery = STATISTICS_SHORT_TERM_BAKERY + base_query = QUERY_STATISTICS_SHORT_TERM + table = StatisticsShortTerm + + baked_query = _statistics_during_period_query( + hass, end_time, statistic_ids, bakery, base_query, table + ) stats = execute( baked_query(session).params( @@ -392,7 +529,9 @@ def statistics_during_period( ) if not stats: return {} - return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata, True) + return _sorted_statistics_to_dict( + hass, stats, statistic_ids, metadata, True, table.duration + ) def get_last_statistics( @@ -405,15 +544,15 @@ def get_last_statistics( if not metadata: return {} - baked_query = hass.data[STATISTICS_BAKERY]( - lambda session: session.query(*QUERY_STATISTICS) + baked_query = hass.data[STATISTICS_SHORT_TERM_BAKERY]( + lambda session: session.query(*QUERY_STATISTICS_SHORT_TERM) ) baked_query += lambda q: q.filter_by(metadata_id=bindparam("metadata_id")) metadata_id = next(iter(metadata.keys())) baked_query += lambda q: q.order_by( - Statistics.metadata_id, Statistics.start.desc() + StatisticsShortTerm.metadata_id, StatisticsShortTerm.start.desc() ) baked_query += lambda q: q.limit(bindparam("number_of_stats")) @@ -427,7 +566,12 @@ def get_last_statistics( return {} return _sorted_statistics_to_dict( - hass, stats, statistic_ids, metadata, convert_units + hass, + stats, + statistic_ids, + metadata, + convert_units, + StatisticsShortTerm.duration, ) @@ -437,6 +581,7 @@ def _sorted_statistics_to_dict( statistic_ids: list[str] | None, metadata: dict[str, StatisticMetaData], convert_units: bool, + duration: timedelta, ) -> dict[str, list[dict]]: """Convert SQL results into JSON friendly data structure.""" result: dict = defaultdict(list) @@ -463,7 +608,7 @@ def _sorted_statistics_to_dict( ent_results = result[meta_id] for db_state in group: start = process_timestamp(db_state.start) - end = start + timedelta(hours=1) + end = start + duration ent_results.append( { "statistic_id": statistic_id, diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index f492b754125..a3ca0514b4c 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -25,6 +25,7 @@ from .models import ( TABLE_STATISTICS, TABLE_STATISTICS_META, TABLE_STATISTICS_RUNS, + TABLE_STATISTICS_SHORT_TERM, RecorderRuns, process_timestamp, ) @@ -185,7 +186,12 @@ def basic_sanity_check(cursor): for table in ALL_TABLES: # The statistics tables may not be present in old databases - if table in [TABLE_STATISTICS, TABLE_STATISTICS_META, TABLE_STATISTICS_RUNS]: + if table in [ + TABLE_STATISTICS, + TABLE_STATISTICS_META, + TABLE_STATISTICS_RUNS, + TABLE_STATISTICS_SHORT_TERM, + ]: continue if table in (TABLE_RECORDER_RUNS, TABLE_SCHEMA_CHANGES): cursor.execute(f"SELECT * FROM {table};") # nosec # not injection diff --git a/tests/components/history/test_init.py b/tests/components/history/test_init.py index b237659d528..661d703725d 100644 --- a/tests/components/history/test_init.py +++ b/tests/components/history/test_init.py @@ -875,7 +875,7 @@ async def test_statistics_during_period( await hass.async_add_executor_job(trigger_db_commit, hass) await hass.async_block_till_done() - hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now) + hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) client = await hass_ws_client() @@ -886,19 +886,20 @@ async def test_statistics_during_period( "start_time": now.isoformat(), "end_time": now.isoformat(), "statistic_ids": ["sensor.test"], + "period": "hour", } ) response = await client.receive_json() assert response["success"] assert response["result"] == {} - client = await hass_ws_client() await client.send_json( { - "id": 1, + "id": 2, "type": "history/statistics_during_period", "start_time": now.isoformat(), "statistic_ids": ["sensor.test"], + "period": "5minute", } ) response = await client.receive_json() @@ -908,7 +909,7 @@ async def test_statistics_during_period( { "statistic_id": "sensor.test", "start": now.isoformat(), - "end": (now + timedelta(hours=1)).isoformat(), + "end": (now + timedelta(minutes=5)).isoformat(), "mean": approx(value), "min": approx(value), "max": approx(value), @@ -938,6 +939,7 @@ async def test_statistics_during_period_bad_start_time(hass, hass_ws_client): "id": 1, "type": "history/statistics_during_period", "start_time": "cats", + "period": "5minute", } ) response = await client.receive_json() @@ -964,6 +966,7 @@ async def test_statistics_during_period_bad_end_time(hass, hass_ws_client): "type": "history/statistics_during_period", "start_time": now.isoformat(), "end_time": "dogs", + "period": "5minute", } ) response = await client.receive_json() @@ -1011,7 +1014,7 @@ async def test_list_statistic_ids(hass, hass_ws_client, units, attributes, unit) {"statistic_id": "sensor.test", "unit_of_measurement": unit} ] - hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now) + hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now) await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done) # Remove the state, statistics will now be fetched from the database hass.states.async_remove("sensor.test") diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index 2a29513a88e..e7786307b69 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -30,9 +30,11 @@ async def async_setup_recorder_instance( hass: HomeAssistant, config: ConfigType | None = None ) -> Recorder: """Setup and return recorder instance.""" # noqa: D401 - stats = recorder.Recorder.async_hourly_statistics if enable_statistics else None + stats = ( + recorder.Recorder.async_periodic_statistics if enable_statistics else None + ) with patch( - "homeassistant.components.recorder.Recorder.async_hourly_statistics", + "homeassistant.components.recorder.Recorder.async_periodic_statistics", side_effect=stats, autospec=True, ): diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index fa0e8b7349b..e41a0da34ba 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -700,41 +700,41 @@ def test_auto_statistics(hass_recorder): tz = dt_util.get_time_zone("Europe/Copenhagen") dt_util.set_default_time_zone(tz) - # Statistics is scheduled to happen at *:12am every hour. Exercise this behavior by + # Statistics is scheduled to happen every 5 minutes. Exercise this behavior by # firing time changed events and advancing the clock around this time. Pick an # arbitrary year in the future to avoid boundary conditions relative to the current # date. # - # The clock is started at 4:15am then advanced forward below + # The clock is started at 4:16am then advanced forward below now = dt_util.utcnow() - test_time = datetime(now.year + 2, 1, 1, 4, 15, 0, tzinfo=tz) + test_time = datetime(now.year + 2, 1, 1, 4, 16, 0, tzinfo=tz) run_tasks_at_time(hass, test_time) with patch( "homeassistant.components.recorder.statistics.compile_statistics", return_value=True, ) as compile_statistics: - # Advance one hour, and the statistics task should run - test_time = test_time + timedelta(hours=1) + # Advance 5 minutes, and the statistics task should run + test_time = test_time + timedelta(minutes=5) run_tasks_at_time(hass, test_time) assert len(compile_statistics.mock_calls) == 1 compile_statistics.reset_mock() - # Advance one hour, and the statistics task should run again - test_time = test_time + timedelta(hours=1) + # Advance 5 minutes, and the statistics task should run again + test_time = test_time + timedelta(minutes=5) run_tasks_at_time(hass, test_time) assert len(compile_statistics.mock_calls) == 1 compile_statistics.reset_mock() - # Advance less than one full hour. The task should not run. - test_time = test_time + timedelta(minutes=50) + # Advance less than 5 minutes. The task should not run. + test_time = test_time + timedelta(minutes=3) run_tasks_at_time(hass, test_time) assert len(compile_statistics.mock_calls) == 0 - # Advance to the next hour, and the statistics task should run again - test_time = test_time + timedelta(hours=1) + # Advance 5 minutes, and the statistics task should run again + test_time = test_time + timedelta(minutes=5) run_tasks_at_time(hass, test_time) assert len(compile_statistics.mock_calls) == 1 @@ -754,8 +754,8 @@ def test_statistics_runs_initiated(hass_recorder): assert len(statistics_runs) == 1 last_run = process_timestamp(statistics_runs[0].start) assert process_timestamp(last_run) == now.replace( - minute=0, second=0, microsecond=0 - ) - timedelta(hours=1) + minute=now.minute - now.minute % 5, second=0, microsecond=0 + ) - timedelta(minutes=5) def test_compile_missing_statistics(tmpdir): @@ -776,7 +776,7 @@ def test_compile_missing_statistics(tmpdir): statistics_runs = list(session.query(StatisticsRuns)) assert len(statistics_runs) == 1 last_run = process_timestamp(statistics_runs[0].start) - assert last_run == now - timedelta(hours=1) + assert last_run == now - timedelta(minutes=5) wait_recording_done(hass) wait_recording_done(hass) @@ -795,7 +795,7 @@ def test_compile_missing_statistics(tmpdir): with session_scope(hass=hass) as session: statistics_runs = list(session.query(StatisticsRuns)) - assert len(statistics_runs) == 2 + assert len(statistics_runs) == 13 # 12 5-minute runs last_run = process_timestamp(statistics_runs[1].start) assert last_run == now diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index 1e723e7e2ca..d82f74c155a 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -9,7 +9,7 @@ from pytest import approx from homeassistant.components.recorder import history from homeassistant.components.recorder.const import DATA_INSTANCE from homeassistant.components.recorder.models import ( - Statistics, + StatisticsShortTerm, process_timestamp_to_utc_isoformat, ) from homeassistant.components.recorder.statistics import ( @@ -34,18 +34,18 @@ def test_compile_hourly_statistics(hass_recorder): assert dict(states) == dict(hist) for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): - stats = statistics_during_period(hass, zero, **kwargs) + stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} stats = get_last_statistics(hass, 0, "sensor.test1", True) assert stats == {} - recorder.do_adhoc_statistics(period="hourly", start=zero) - recorder.do_adhoc_statistics(period="hourly", start=four) + recorder.do_adhoc_statistics(start=zero) + recorder.do_adhoc_statistics(start=four) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(14.915254237288135), "min": approx(10.0), "max": approx(20.0), @@ -58,7 +58,7 @@ def test_compile_hourly_statistics(hass_recorder): expected_2 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(four), - "end": process_timestamp_to_utc_isoformat(four + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)), "mean": approx(20.0), "min": approx(20.0), "max": approx(20.0), @@ -78,13 +78,17 @@ def test_compile_hourly_statistics(hass_recorder): ] # Test statistics_during_period - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} - stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test2"]) + stats = statistics_during_period( + hass, zero, statistic_ids=["sensor.test2"], period="5minute" + ) assert stats == {"sensor.test2": expected_stats2} - stats = statistics_during_period(hass, zero, statistic_ids=["sensor.test3"]) + stats = statistics_during_period( + hass, zero, statistic_ids=["sensor.test3"], period="5minute" + ) assert stats == {} # Test get_last_statistics @@ -130,7 +134,7 @@ def mock_sensor_statistics(): def mock_from_stats(): """Mock out Statistics.from_stats.""" counter = 0 - real_from_stats = Statistics.from_stats + real_from_stats = StatisticsShortTerm.from_stats def from_stats(metadata_id, start, stats): nonlocal counter @@ -140,17 +144,17 @@ def mock_from_stats(): return real_from_stats(metadata_id, start, stats) with patch( - "homeassistant.components.recorder.statistics.Statistics.from_stats", + "homeassistant.components.recorder.statistics.StatisticsShortTerm.from_stats", side_effect=from_stats, autospec=True, ): yield -def test_compile_hourly_statistics_exception( +def test_compile_periodic_statistics_exception( hass_recorder, mock_sensor_statistics, mock_from_stats ): - """Test exception handling when compiling hourly statistics.""" + """Test exception handling when compiling periodic statistics.""" def mock_from_stats(): raise ValueError @@ -160,13 +164,13 @@ def test_compile_hourly_statistics_exception( setup_component(hass, "sensor", {}) now = dt_util.utcnow() - recorder.do_adhoc_statistics(period="hourly", start=now) - recorder.do_adhoc_statistics(period="hourly", start=now + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=now) + recorder.do_adhoc_statistics(start=now + timedelta(minutes=5)) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(now), - "end": process_timestamp_to_utc_isoformat(now + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)), "mean": None, "min": None, "max": None, @@ -178,8 +182,8 @@ def test_compile_hourly_statistics_exception( } expected_2 = { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(now + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(now + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(now + timedelta(minutes=5)), + "end": process_timestamp_to_utc_isoformat(now + timedelta(minutes=10)), "mean": None, "min": None, "max": None, @@ -201,7 +205,7 @@ def test_compile_hourly_statistics_exception( {**expected_2, "statistic_id": "sensor.test3"}, ] - stats = statistics_during_period(hass, now) + stats = statistics_during_period(hass, now, period="5minute") assert stats == { "sensor.test1": expected_stats1, "sensor.test2": expected_stats2, @@ -229,17 +233,17 @@ def test_rename_entity(hass_recorder): assert dict(states) == dict(hist) for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}): - stats = statistics_during_period(hass, zero, **kwargs) + stats = statistics_during_period(hass, zero, period="5minute", **kwargs) assert stats == {} stats = get_last_statistics(hass, 0, "sensor.test1", True) assert stats == {} - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) expected_1 = { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(14.915254237288135), "min": approx(10.0), "max": approx(20.0), @@ -259,13 +263,13 @@ def test_rename_entity(hass_recorder): {**expected_1, "statistic_id": "sensor.test99"}, ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2} entity_reg.async_update_entity(reg_entry.entity_id, new_entity_id="sensor.test99") hass.block_till_done() - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == {"sensor.test99": expected_stats99, "sensor.test2": expected_stats2} @@ -285,7 +289,7 @@ def test_statistics_duplicated(hass_recorder, caplog): with patch( "homeassistant.components.sensor.recorder.compile_statistics" ) as compile_statistics: - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert compile_statistics.called compile_statistics.reset_mock() @@ -293,7 +297,7 @@ def test_statistics_duplicated(hass_recorder, caplog): assert "Statistics already compiled" not in caplog.text caplog.clear() - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert not compile_statistics.called compile_statistics.reset_mock() @@ -332,10 +336,10 @@ def record_states(hass): return hass.states.get(entity_id) zero = dt_util.utcnow() - one = zero + timedelta(minutes=1) - two = one + timedelta(minutes=15) - three = two + timedelta(minutes=30) - four = three + timedelta(minutes=15) + one = zero + timedelta(seconds=1 * 5) + two = one + timedelta(seconds=15 * 5) + three = two + timedelta(seconds=30 * 5) + four = three + timedelta(seconds=15 * 5) states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []} with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one): diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 74adf717e5b..c1278202443 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -2,6 +2,7 @@ # pylint: disable=protected-access,invalid-name from datetime import timedelta import math +from statistics import mean from unittest.mock import patch import pytest @@ -83,19 +84,19 @@ def test_compile_hourly_statistics( hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(mean), "min": approx(min), "max": approx(max), @@ -146,7 +147,7 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ @@ -154,13 +155,13 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes {"statistic_id": "sensor.test6", "unit_of_measurement": "°C"}, {"statistic_id": "sensor.test7", "unit_of_measurement": "°C"}, ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(13.050847), "min": approx(-10.0), "max": approx(30.0), @@ -175,7 +176,7 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes { "statistic_id": "sensor.test6", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(13.050847), "min": approx(-10.0), "max": approx(30.0), @@ -190,7 +191,7 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes { "statistic_id": "sensor.test7", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(13.050847), "min": approx(-10.0), "max": approx(30.0), @@ -227,7 +228,10 @@ def test_compile_hourly_sum_statistics_amount( hass_recorder, caplog, units, state_class, device_class, unit, display_unit, factor ): """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period2 = period0 + timedelta(minutes=10) + period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() hass.config.units = units recorder = hass.data[DATA_INSTANCE] @@ -241,34 +245,34 @@ def test_compile_hourly_sum_statistics_amount( seq = [10, 15, 20, 10, 30, 40, 50, 60, 70] four, eight, states = record_meter_states( - hass, zero, "sensor.test1", attributes, seq + hass, period0, "sensor.test1", attributes, seq ) hist = history.get_significant_states( - hass, zero - timedelta.resolution, eight + timedelta.resolution + hass, period0 - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": display_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, - "last_reset": process_timestamp_to_utc_isoformat(zero), + "last_reset": process_timestamp_to_utc_isoformat(period0), "state": approx(factor * seq[2]), "sum": approx(factor * 10.0), "sum_decrease": approx(factor * 0.0), @@ -276,8 +280,8 @@ def test_compile_hourly_sum_statistics_amount( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -289,8 +293,8 @@ def test_compile_hourly_sum_statistics_amount( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -341,7 +345,7 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change( states = {"sensor.test1": []} one = zero for i in range(len(seq)): - one = one + timedelta(minutes=1) + one = one + timedelta(seconds=5) _states = record_meter_state( hass, one, "sensor.test1", attributes, seq[i : i + 1] ) @@ -355,19 +359,19 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change( ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "max": None, "mean": None, "min": None, @@ -408,7 +412,7 @@ def test_compile_hourly_sum_statistics_nan_inf_state( states = {"sensor.test1": []} one = zero for i in range(len(seq)): - one = one + timedelta(minutes=1) + one = one + timedelta(seconds=5) _states = record_meter_state( hass, one, "sensor.test1", attributes, seq[i : i + 1] ) @@ -422,19 +426,19 @@ def test_compile_hourly_sum_statistics_nan_inf_state( ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "max": None, "mean": None, "min": None, @@ -466,7 +470,10 @@ def test_compile_hourly_sum_statistics_total_no_reset( hass_recorder, caplog, device_class, unit, native_unit, factor ): """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period2 = period0 + timedelta(minutes=10) + period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) @@ -478,30 +485,30 @@ def test_compile_hourly_sum_statistics_total_no_reset( seq = [10, 15, 20, 10, 30, 40, 50, 60, 70] four, eight, states = record_meter_states( - hass, zero, "sensor.test1", attributes, seq + hass, period0, "sensor.test1", attributes, seq ) hist = history.get_significant_states( - hass, zero - timedelta.resolution, eight + timedelta.resolution + hass, period0 - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, @@ -513,8 +520,8 @@ def test_compile_hourly_sum_statistics_total_no_reset( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -526,8 +533,8 @@ def test_compile_hourly_sum_statistics_total_no_reset( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -555,7 +562,10 @@ def test_compile_hourly_sum_statistics_total_increasing( hass_recorder, caplog, device_class, unit, native_unit, factor ): """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period2 = period0 + timedelta(minutes=10) + period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) @@ -567,30 +577,30 @@ def test_compile_hourly_sum_statistics_total_increasing( seq = [10, 15, 20, 10, 30, 40, 50, 60, 70] four, eight, states = record_meter_states( - hass, zero, "sensor.test1", attributes, seq + hass, period0, "sensor.test1", attributes, seq ) hist = history.get_significant_states( - hass, zero - timedelta.resolution, eight + timedelta.resolution + hass, period0 - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, @@ -602,8 +612,8 @@ def test_compile_hourly_sum_statistics_total_increasing( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -615,8 +625,8 @@ def test_compile_hourly_sum_statistics_total_increasing( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -642,7 +652,10 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( hass_recorder, caplog, device_class, unit, native_unit, factor ): """Test small dips in sensor readings do not trigger a reset.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period2 = period0 + timedelta(minutes=10) + period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) @@ -654,16 +667,16 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( seq = [10, 15, 20, 19, 30, 40, 39, 60, 70] four, eight, states = record_meter_states( - hass, zero, "sensor.test1", attributes, seq + hass, period0, "sensor.test1", attributes, seq ) hist = history.get_significant_states( - hass, zero - timedelta.resolution, eight + timedelta.resolution + hass, period0 - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) assert ( "Entity sensor.test1 has state class total_increasing, but its state is not " @@ -671,7 +684,7 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( "home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A" "+recorder%22" ) not in caplog.text - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=period2) wait_recording_done(hass) assert ( "Entity sensor.test1 has state class total_increasing, but its state is not " @@ -683,14 +696,14 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "last_reset": None, "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, @@ -702,8 +715,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( { "last_reset": None, "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -715,8 +728,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( { "last_reset": None, "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -732,7 +745,10 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip( def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog): """Test compiling hourly statistics.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period2 = period0 + timedelta(minutes=10) + period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) @@ -755,41 +771,41 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog): seq4 = [0, 0, 5, 10, 30, 50, 60, 80, 90] four, eight, states = record_meter_states( - hass, zero, "sensor.test1", sns1_attr, seq1 + hass, period0, "sensor.test1", sns1_attr, seq1 ) - _, _, _states = record_meter_states(hass, zero, "sensor.test2", sns2_attr, seq2) + _, _, _states = record_meter_states(hass, period0, "sensor.test2", sns2_attr, seq2) states = {**states, **_states} - _, _, _states = record_meter_states(hass, zero, "sensor.test3", sns3_attr, seq3) + _, _, _states = record_meter_states(hass, period0, "sensor.test3", sns3_attr, seq3) states = {**states, **_states} - _, _, _states = record_meter_states(hass, zero, "sensor.test4", sns4_attr, seq4) + _, _, _states = record_meter_states(hass, period0, "sensor.test4", sns4_attr, seq4) states = {**states, **_states} hist = history.get_significant_states( - hass, zero - timedelta.resolution, eight + timedelta.resolution + hass, period0 - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": "kWh"} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, - "last_reset": process_timestamp_to_utc_isoformat(zero), + "last_reset": process_timestamp_to_utc_isoformat(period0), "state": approx(20.0), "sum": approx(10.0), "sum_decrease": approx(0.0), @@ -797,8 +813,8 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog): }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -810,8 +826,8 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog): }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -828,7 +844,10 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog): def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): """Test compiling multiple hourly statistics.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period2 = period0 + timedelta(minutes=10) + period2_end = period0 + timedelta(minutes=15) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) @@ -846,24 +865,24 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): seq4 = [0, 0, 5, 10, 30, 50, 60, 80, 90] four, eight, states = record_meter_states( - hass, zero, "sensor.test1", sns1_attr, seq1 + hass, period0, "sensor.test1", sns1_attr, seq1 ) - _, _, _states = record_meter_states(hass, zero, "sensor.test2", sns2_attr, seq2) + _, _, _states = record_meter_states(hass, period0, "sensor.test2", sns2_attr, seq2) states = {**states, **_states} - _, _, _states = record_meter_states(hass, zero, "sensor.test3", sns3_attr, seq3) + _, _, _states = record_meter_states(hass, period0, "sensor.test3", sns3_attr, seq3) states = {**states, **_states} - _, _, _states = record_meter_states(hass, zero, "sensor.test4", sns4_attr, seq4) + _, _, _states = record_meter_states(hass, period0, "sensor.test4", sns4_attr, seq4) states = {**states, **_states} hist = history.get_significant_states( - hass, zero - timedelta.resolution, eight + timedelta.resolution + hass, period0 - timedelta.resolution, eight + timedelta.resolution ) assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"] - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=period2) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ @@ -871,17 +890,17 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): {"statistic_id": "sensor.test2", "unit_of_measurement": "kWh"}, {"statistic_id": "sensor.test3", "unit_of_measurement": "kWh"}, ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, - "last_reset": process_timestamp_to_utc_isoformat(zero), + "last_reset": process_timestamp_to_utc_isoformat(period0), "state": approx(20.0), "sum": approx(10.0), "sum_decrease": approx(0.0), @@ -889,8 +908,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -902,8 +921,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -917,12 +936,12 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): "sensor.test2": [ { "statistic_id": "sensor.test2", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, - "last_reset": process_timestamp_to_utc_isoformat(zero), + "last_reset": process_timestamp_to_utc_isoformat(period0), "state": approx(130.0), "sum": approx(20.0), "sum_decrease": approx(0.0), @@ -930,8 +949,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): }, { "statistic_id": "sensor.test2", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -943,8 +962,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): }, { "statistic_id": "sensor.test2", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -958,12 +977,12 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): "sensor.test3": [ { "statistic_id": "sensor.test3", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "max": None, "mean": None, "min": None, - "last_reset": process_timestamp_to_utc_isoformat(zero), + "last_reset": process_timestamp_to_utc_isoformat(period0), "state": approx(5.0 / 1000), "sum": approx(5.0 / 1000), "sum_decrease": approx(0.0 / 1000), @@ -971,8 +990,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): }, { "statistic_id": "sensor.test3", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "max": None, "mean": None, "min": None, @@ -984,8 +1003,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog): }, { "statistic_id": "sensor.test3", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=3)), + "start": process_timestamp_to_utc_isoformat(period2), + "end": process_timestamp_to_utc_isoformat(period2_end), "max": None, "mean": None, "min": None, @@ -1033,15 +1052,15 @@ def test_compile_hourly_statistics_unchanged( hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=four) + recorder.do_adhoc_statistics(start=four) wait_recording_done(hass) - stats = statistics_during_period(hass, four) + stats = statistics_during_period(hass, four, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(four), - "end": process_timestamp_to_utc_isoformat(four + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)), "mean": approx(value), "min": approx(value), "max": approx(value), @@ -1068,15 +1087,15 @@ def test_compile_hourly_statistics_partially_unavailable(hass_recorder, caplog): hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(21.1864406779661), "min": approx(10.0), "max": approx(25.0), @@ -1128,15 +1147,15 @@ def test_compile_hourly_statistics_unavailable( hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=four) + recorder.do_adhoc_statistics(start=four) wait_recording_done(hass) - stats = statistics_during_period(hass, four) + stats = statistics_during_period(hass, four, period="5minute") assert stats == { "sensor.test2": [ { "statistic_id": "sensor.test2", "start": process_timestamp_to_utc_isoformat(four), - "end": process_timestamp_to_utc_isoformat(four + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(four + timedelta(minutes=5)), "mean": approx(value), "min": approx(value), "max": approx(value), @@ -1161,7 +1180,7 @@ def test_compile_hourly_statistics_fails(hass_recorder, caplog): "homeassistant.components.sensor.recorder.compile_statistics", side_effect=Exception, ): - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert "Error while processing event StatisticsTask" in caplog.text @@ -1266,30 +1285,30 @@ def test_compile_hourly_statistics_changing_units_1( four, states = record_states(hass, zero, "sensor.test1", attributes) attributes["unit_of_measurement"] = "cats" four, _states = record_states( - hass, zero + timedelta(hours=1), "sensor.test1", attributes + hass, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] four, _states = record_states( - hass, zero + timedelta(hours=2), "sensor.test1", attributes + hass, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert "does not match the unit of already compiled" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(mean), "min": approx(min), "max": approx(max), @@ -1302,7 +1321,7 @@ def test_compile_hourly_statistics_changing_units_1( ] } - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert ( "The unit of sensor.test1 (cats) does not match the unit of already compiled " @@ -1312,13 +1331,13 @@ def test_compile_hourly_statistics_changing_units_1( assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(mean), "min": approx(min), "max": approx(max), @@ -1358,13 +1377,13 @@ def test_compile_hourly_statistics_changing_units_2( four, states = record_states(hass, zero, "sensor.test1", attributes) attributes["unit_of_measurement"] = "cats" four, _states = record_states( - hass, zero + timedelta(hours=1), "sensor.test1", attributes + hass, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(minutes=30)) + recorder.do_adhoc_statistics(start=zero + timedelta(seconds=30 * 5)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" in caplog.text assert "and matches the unit of already compiled statistics" not in caplog.text @@ -1372,7 +1391,7 @@ def test_compile_hourly_statistics_changing_units_2( assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": "cats"} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == {} assert "Error while processing event StatisticsTask" not in caplog.text @@ -1402,31 +1421,31 @@ def test_compile_hourly_statistics_changing_units_3( } four, states = record_states(hass, zero, "sensor.test1", attributes) four, _states = record_states( - hass, zero + timedelta(hours=1), "sensor.test1", attributes + hass, zero + timedelta(minutes=5), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] attributes["unit_of_measurement"] = "cats" four, _states = record_states( - hass, zero + timedelta(hours=2), "sensor.test1", attributes + hass, zero + timedelta(minutes=10), "sensor.test1", attributes ) states["sensor.test1"] += _states["sensor.test1"] hist = history.get_significant_states(hass, zero, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero) + recorder.do_adhoc_statistics(start=zero) wait_recording_done(hass) assert "does not match the unit of already compiled" not in caplog.text statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(mean), "min": approx(min), "max": approx(max), @@ -1439,7 +1458,7 @@ def test_compile_hourly_statistics_changing_units_3( ] } - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2)) + recorder.do_adhoc_statistics(start=zero + timedelta(minutes=10)) wait_recording_done(hass) assert "The unit of sensor.test1 is changing" in caplog.text assert f"matches the unit of already compiled statistics ({unit})" in caplog.text @@ -1447,13 +1466,13 @@ def test_compile_hourly_statistics_changing_units_3( assert statistic_ids == [ {"statistic_id": "sensor.test1", "unit_of_measurement": native_unit} ] - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, zero, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "end": process_timestamp_to_utc_isoformat(zero + timedelta(minutes=5)), "mean": approx(mean), "min": approx(min), "max": approx(max), @@ -1478,7 +1497,9 @@ def test_compile_hourly_statistics_changing_statistics( hass_recorder, caplog, device_class, unit, native_unit, mean, min, max ): """Test compiling hourly statistics where units change during an hour.""" - zero = dt_util.utcnow() + period0 = dt_util.utcnow() + period0_end = period1 = period0 + timedelta(minutes=5) + period1_end = period0 + timedelta(minutes=10) hass = hass_recorder() recorder = hass.data[DATA_INSTANCE] setup_component(hass, "sensor", {}) @@ -1492,8 +1513,8 @@ def test_compile_hourly_statistics_changing_statistics( "state_class": "total_increasing", "unit_of_measurement": unit, } - four, states = record_states(hass, zero, "sensor.test1", attributes_1) - recorder.do_adhoc_statistics(period="hourly", start=zero) + four, states = record_states(hass, period0, "sensor.test1", attributes_1) + recorder.do_adhoc_statistics(start=period0) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ @@ -1508,14 +1529,12 @@ def test_compile_hourly_statistics_changing_statistics( } # Add more states, with changed state class - four, _states = record_states( - hass, zero + timedelta(hours=1), "sensor.test1", attributes_2 - ) + four, _states = record_states(hass, period1, "sensor.test1", attributes_2) states["sensor.test1"] += _states["sensor.test1"] - hist = history.get_significant_states(hass, zero, four) + hist = history.get_significant_states(hass, period0, four) assert dict(states) == dict(hist) - recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1)) + recorder.do_adhoc_statistics(start=period1) wait_recording_done(hass) statistic_ids = list_statistic_ids(hass) assert statistic_ids == [ @@ -1528,13 +1547,13 @@ def test_compile_hourly_statistics_changing_statistics( "statistic_id": "sensor.test1", "unit_of_measurement": None, } - stats = statistics_during_period(hass, zero) + stats = statistics_during_period(hass, period0, period="5minute") assert stats == { "sensor.test1": [ { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), + "start": process_timestamp_to_utc_isoformat(period0), + "end": process_timestamp_to_utc_isoformat(period0_end), "mean": approx(mean), "min": approx(min), "max": approx(max), @@ -1546,8 +1565,8 @@ def test_compile_hourly_statistics_changing_statistics( }, { "statistic_id": "sensor.test1", - "start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)), - "end": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)), + "start": process_timestamp_to_utc_isoformat(period1), + "end": process_timestamp_to_utc_isoformat(period1_end), "mean": None, "min": None, "max": None, @@ -1563,12 +1582,288 @@ def test_compile_hourly_statistics_changing_statistics( assert "Error while processing event StatisticsTask" not in caplog.text -def record_states(hass, zero, entity_id, attributes): +def test_compile_statistics_hourly_summary(hass_recorder, caplog): + """Test compiling hourly statistics.""" + zero = dt_util.utcnow() + zero = zero.replace(minute=0, second=0, microsecond=0) + # Travel to the future, recorder gets confused otherwise because states are added + # before the start of the recorder_run + zero += timedelta(hours=1) + hass = hass_recorder() + recorder = hass.data[DATA_INSTANCE] + setup_component(hass, "sensor", {}) + attributes = { + "device_class": None, + "state_class": "measurement", + "unit_of_measurement": "%", + } + + sum_attributes = { + "device_class": None, + "state_class": "total", + "unit_of_measurement": "EUR", + } + + def _weighted_average(seq, i, last_state): + total = 0 + duration = 0 + durations = [50, 200, 45] + if i > 0: + total += last_state * 5 + duration += 5 + for j, dur in enumerate(durations): + total += seq[j] * dur + duration += dur + return total / duration + + def _min(seq, last_state): + if last_state is None: + return min(seq) + return min([*seq, last_state]) + + def _max(seq, last_state): + if last_state is None: + return max(seq) + return max([*seq, last_state]) + + def _sum(seq, last_state, last_sum): + if last_state is None: + return seq[-1] - seq[0] + return last_sum[-1] + seq[-1] - last_state + + # Generate states for two hours + states = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test4": [], + } + expected_minima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} + expected_maxima = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} + expected_averages = {"sensor.test1": [], "sensor.test2": [], "sensor.test3": []} + expected_states = {"sensor.test4": []} + expected_sums = {"sensor.test4": []} + expected_decreases = {"sensor.test4": []} + expected_increases = {"sensor.test4": []} + last_states = { + "sensor.test1": None, + "sensor.test2": None, + "sensor.test3": None, + "sensor.test4": None, + } + start = zero + for i in range(24): + seq = [-10, 15, 30] + # test1 has same value in every period + four, _states = record_states(hass, start, "sensor.test1", attributes, seq) + states["sensor.test1"] += _states["sensor.test1"] + last_state = last_states["sensor.test1"] + expected_minima["sensor.test1"].append(_min(seq, last_state)) + expected_maxima["sensor.test1"].append(_max(seq, last_state)) + expected_averages["sensor.test1"].append(_weighted_average(seq, i, last_state)) + last_states["sensor.test1"] = seq[-1] + # test2 values change: min/max at the last state + seq = [-10 * (i + 1), 15 * (i + 1), 30 * (i + 1)] + four, _states = record_states(hass, start, "sensor.test2", attributes, seq) + states["sensor.test2"] += _states["sensor.test2"] + last_state = last_states["sensor.test2"] + expected_minima["sensor.test2"].append(_min(seq, last_state)) + expected_maxima["sensor.test2"].append(_max(seq, last_state)) + expected_averages["sensor.test2"].append(_weighted_average(seq, i, last_state)) + last_states["sensor.test2"] = seq[-1] + # test3 values change: min/max at the first state + seq = [-10 * (23 - i + 1), 15 * (23 - i + 1), 30 * (23 - i + 1)] + four, _states = record_states(hass, start, "sensor.test3", attributes, seq) + states["sensor.test3"] += _states["sensor.test3"] + last_state = last_states["sensor.test3"] + expected_minima["sensor.test3"].append(_min(seq, last_state)) + expected_maxima["sensor.test3"].append(_max(seq, last_state)) + expected_averages["sensor.test3"].append(_weighted_average(seq, i, last_state)) + last_states["sensor.test3"] = seq[-1] + # test4 values grow + seq = [i, i + 0.5, i + 0.75] + start_meter = start + for j in range(len(seq)): + _states = record_meter_state( + hass, start_meter, "sensor.test4", sum_attributes, seq[j : j + 1] + ) + start_meter = start + timedelta(minutes=1) + states["sensor.test4"] += _states["sensor.test4"] + last_state = last_states["sensor.test4"] + expected_states["sensor.test4"].append(seq[-1]) + expected_sums["sensor.test4"].append( + _sum(seq, last_state, expected_sums["sensor.test4"]) + ) + expected_decreases["sensor.test4"].append(0) + expected_increases["sensor.test4"].append( + _sum(seq, last_state, expected_increases["sensor.test4"]) + ) + last_states["sensor.test4"] = seq[-1] + + start += timedelta(minutes=5) + hist = history.get_significant_states( + hass, zero - timedelta.resolution, four, significant_changes_only=False + ) + assert dict(states) == dict(hist) + wait_recording_done(hass) + + # Generate 5-minute statistics for two hours + start = zero + for i in range(24): + recorder.do_adhoc_statistics(start=start) + wait_recording_done(hass) + start += timedelta(minutes=5) + + statistic_ids = list_statistic_ids(hass) + assert statistic_ids == [ + {"statistic_id": "sensor.test1", "unit_of_measurement": "%"}, + {"statistic_id": "sensor.test2", "unit_of_measurement": "%"}, + {"statistic_id": "sensor.test3", "unit_of_measurement": "%"}, + {"statistic_id": "sensor.test4", "unit_of_measurement": "EUR"}, + ] + + stats = statistics_during_period(hass, zero, period="5minute") + expected_stats = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test4": [], + } + start = zero + end = zero + timedelta(minutes=5) + for i in range(24): + for entity_id in [ + "sensor.test1", + "sensor.test2", + "sensor.test3", + "sensor.test4", + ]: + expected_average = ( + expected_averages[entity_id][i] + if entity_id in expected_averages + else None + ) + expected_minimum = ( + expected_minima[entity_id][i] if entity_id in expected_minima else None + ) + expected_maximum = ( + expected_maxima[entity_id][i] if entity_id in expected_maxima else None + ) + expected_state = ( + expected_states[entity_id][i] if entity_id in expected_states else None + ) + expected_sum = ( + expected_sums[entity_id][i] if entity_id in expected_sums else None + ) + expected_decrease = ( + expected_decreases[entity_id][i] + if entity_id in expected_decreases + else None + ) + expected_increase = ( + expected_increases[entity_id][i] + if entity_id in expected_increases + else None + ) + expected_stats[entity_id].append( + { + "statistic_id": entity_id, + "start": process_timestamp_to_utc_isoformat(start), + "end": process_timestamp_to_utc_isoformat(end), + "mean": approx(expected_average), + "min": approx(expected_minimum), + "max": approx(expected_maximum), + "last_reset": None, + "state": expected_state, + "sum": expected_sum, + "sum_decrease": expected_decrease, + "sum_increase": expected_increase, + } + ) + start += timedelta(minutes=5) + end += timedelta(minutes=5) + assert stats == expected_stats + + stats = statistics_during_period(hass, zero, period="hour") + expected_stats = { + "sensor.test1": [], + "sensor.test2": [], + "sensor.test3": [], + "sensor.test4": [], + } + start = zero + end = zero + timedelta(hours=1) + for i in range(2): + for entity_id in [ + "sensor.test1", + "sensor.test2", + "sensor.test3", + "sensor.test4", + ]: + expected_average = ( + mean(expected_averages[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_averages + else None + ) + expected_minimum = ( + min(expected_minima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_minima + else None + ) + expected_maximum = ( + max(expected_maxima[entity_id][i * 12 : (i + 1) * 12]) + if entity_id in expected_maxima + else None + ) + expected_state = ( + expected_states[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_states + else None + ) + expected_sum = ( + expected_sums[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_sums + else None + ) + expected_decrease = ( + expected_decreases[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_decreases + else None + ) + expected_increase = ( + expected_increases[entity_id][(i + 1) * 12 - 1] + if entity_id in expected_increases + else None + ) + expected_stats[entity_id].append( + { + "statistic_id": entity_id, + "start": process_timestamp_to_utc_isoformat(start), + "end": process_timestamp_to_utc_isoformat(end), + "mean": approx(expected_average), + "min": approx(expected_minimum), + "max": approx(expected_maximum), + "last_reset": None, + "state": expected_state, + "sum": expected_sum, + "sum_decrease": expected_decrease, + "sum_increase": expected_increase, + } + ) + start += timedelta(hours=1) + end += timedelta(hours=1) + assert stats == expected_stats + assert "Error while processing event StatisticsTask" not in caplog.text + + +def record_states(hass, zero, entity_id, attributes, seq=None): """Record some test states. We inject a bunch of state updates for measurement sensors. """ attributes = dict(attributes) + if seq is None: + seq = [-10, 15, 30] def set_state(entity_id, state, **kwargs): """Set the state.""" @@ -1576,20 +1871,26 @@ def record_states(hass, zero, entity_id, attributes): wait_recording_done(hass) return hass.states.get(entity_id) - one = zero + timedelta(minutes=1) - two = one + timedelta(minutes=10) - three = two + timedelta(minutes=40) - four = three + timedelta(minutes=10) + one = zero + timedelta(seconds=1 * 5) + two = one + timedelta(seconds=10 * 5) + three = two + timedelta(seconds=40 * 5) + four = three + timedelta(seconds=10 * 5) states = {entity_id: []} with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one): - states[entity_id].append(set_state(entity_id, "-10", attributes=attributes)) + states[entity_id].append( + set_state(entity_id, str(seq[0]), attributes=attributes) + ) with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two): - states[entity_id].append(set_state(entity_id, "15", attributes=attributes)) + states[entity_id].append( + set_state(entity_id, str(seq[1]), attributes=attributes) + ) with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three): - states[entity_id].append(set_state(entity_id, "30", attributes=attributes)) + states[entity_id].append( + set_state(entity_id, str(seq[2]), attributes=attributes) + ) return four, states @@ -1606,14 +1907,14 @@ def record_meter_states(hass, zero, entity_id, _attributes, seq): wait_recording_done(hass) return hass.states.get(entity_id) - one = zero + timedelta(minutes=15) - two = one + timedelta(minutes=30) - three = two + timedelta(minutes=15) - four = three + timedelta(minutes=15) - five = four + timedelta(minutes=30) - six = five + timedelta(minutes=15) - seven = six + timedelta(minutes=15) - eight = seven + timedelta(minutes=30) + one = zero + timedelta(seconds=15 * 5) # 00:01:15 + two = one + timedelta(seconds=30 * 5) # 00:03:45 + three = two + timedelta(seconds=15 * 5) # 00:05:00 + four = three + timedelta(seconds=15 * 5) # 00:06:15 + five = four + timedelta(seconds=30 * 5) # 00:08:45 + six = five + timedelta(seconds=15 * 5) # 00:10:00 + seven = six + timedelta(seconds=15 * 5) # 00:11:45 + eight = seven + timedelta(seconds=30 * 5) # 00:13:45 attributes = dict(_attributes) if "last_reset" in _attributes: @@ -1667,7 +1968,8 @@ def record_meter_state(hass, zero, entity_id, _attributes, seq): return hass.states.get(entity_id) attributes = dict(_attributes) - attributes["last_reset"] = zero.isoformat() + if "last_reset" in _attributes: + attributes["last_reset"] = zero.isoformat() states = {entity_id: []} with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=zero): @@ -1688,10 +1990,10 @@ def record_states_partially_unavailable(hass, zero, entity_id, attributes): wait_recording_done(hass) return hass.states.get(entity_id) - one = zero + timedelta(minutes=1) - two = one + timedelta(minutes=15) - three = two + timedelta(minutes=30) - four = three + timedelta(minutes=15) + one = zero + timedelta(seconds=1 * 5) + two = one + timedelta(seconds=15 * 5) + three = two + timedelta(seconds=30 * 5) + four = three + timedelta(seconds=15 * 5) states = {entity_id: []} with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one): diff --git a/tests/conftest.py b/tests/conftest.py index 0b3db0bf832..9ee6bbc680b 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -636,9 +636,9 @@ def enable_statistics(): def hass_recorder(enable_statistics, hass_storage): """Home Assistant fixture with in-memory recorder.""" hass = get_test_home_assistant() - stats = recorder.Recorder.async_hourly_statistics if enable_statistics else None + stats = recorder.Recorder.async_periodic_statistics if enable_statistics else None with patch( - "homeassistant.components.recorder.Recorder.async_hourly_statistics", + "homeassistant.components.recorder.Recorder.async_periodic_statistics", side_effect=stats, autospec=True, ):