Compile statistics for energy sensors (#50829)

* Compile statistics for energy sensors

* Update tests

* Rename abs_value to state

* Tweak

* Recreate statistics table

* Pylint

* Try to fix test

* Fix statistics for multiple energy sensors

* Fix energy statistics when last_reset is not set
pull/49640/head
Erik Montnemery 2021-05-20 13:05:15 +02:00 committed by GitHub
parent aaae4cfc8f
commit e16a8063a5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 437 additions and 112 deletions

View File

@ -63,7 +63,6 @@ from .util import (
_LOGGER = logging.getLogger(__name__)
SERVICE_PURGE = "purge"
SERVICE_STATISTICS = "statistics"
SERVICE_ENABLE = "enable"
SERVICE_DISABLE = "disable"

View File

@ -11,7 +11,7 @@ from sqlalchemy.exc import (
)
from sqlalchemy.schema import AddConstraint, DropConstraint
from .models import SCHEMA_VERSION, TABLE_STATES, Base, SchemaChanges
from .models import SCHEMA_VERSION, TABLE_STATES, Base, SchemaChanges, Statistics
from .util import session_scope
_LOGGER = logging.getLogger(__name__)
@ -415,6 +415,11 @@ def _apply_update(engine, session, new_version, old_version):
)
elif new_version == 14:
_modify_columns(connection, engine, "events", ["event_type VARCHAR(64)"])
elif new_version == 15:
if sqlalchemy.inspect(engine).has_table(Statistics.__tablename__):
# Recreate the statistics table
Statistics.__table__.drop(engine)
Statistics.__table__.create(engine)
else:
raise ValueError(f"No schema migration defined for version {new_version}")

View File

@ -28,7 +28,7 @@ import homeassistant.util.dt as dt_util
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 14
SCHEMA_VERSION = 15
_LOGGER = logging.getLogger(__name__)
@ -38,7 +38,6 @@ TABLE_EVENTS = "events"
TABLE_STATES = "states"
TABLE_RECORDER_RUNS = "recorder_runs"
TABLE_SCHEMA_CHANGES = "schema_changes"
TABLE_STATISTICS = "statistics"
ALL_TABLES = [
@ -223,6 +222,9 @@ class Statistics(Base): # type: ignore
mean = Column(Float())
min = Column(Float())
max = Column(Float())
last_reset = Column(DATETIME_TYPE)
state = Column(Float())
sum = Column(Float())
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time

View File

@ -25,6 +25,9 @@ QUERY_STATISTICS = [
Statistics.mean,
Statistics.min,
Statistics.max,
Statistics.last_reset,
Statistics.state,
Statistics.sum,
]
STATISTICS_BAKERY = "recorder_statistics_bakery"
@ -97,16 +100,38 @@ def statistics_during_period(hass, start_time, end_time=None, statistic_id=None)
statistic_ids = [statistic_id] if statistic_id is not None else None
return _sorted_statistics_to_dict(
hass, session, stats, start_time, statistic_ids
return _sorted_statistics_to_dict(stats, statistic_ids)
def get_last_statistics(hass, number_of_stats, statistic_id=None):
"""Return the last number_of_stats statistics."""
with session_scope(hass=hass) as session:
baked_query = hass.data[STATISTICS_BAKERY](
lambda session: session.query(*QUERY_STATISTICS)
)
if statistic_id is not None:
baked_query += lambda q: q.filter_by(statistic_id=bindparam("statistic_id"))
baked_query += lambda q: q.order_by(
Statistics.statistic_id, Statistics.start.desc()
)
baked_query += lambda q: q.limit(bindparam("number_of_stats"))
stats = execute(
baked_query(session).params(
number_of_stats=number_of_stats, statistic_id=statistic_id
)
)
statistic_ids = [statistic_id] if statistic_id is not None else None
return _sorted_statistics_to_dict(stats, statistic_ids)
def _sorted_statistics_to_dict(
hass,
session,
stats,
start_time,
statistic_ids,
):
"""Convert SQL results into JSON friendly data structure."""
@ -130,6 +155,9 @@ def _sorted_statistics_to_dict(
"mean": db_state.mean,
"min": db_state.min,
"max": db_state.max,
"last_reset": _process_timestamp_to_utc_isoformat(db_state.last_reset),
"state": db_state.state,
"sum": db_state.sum,
}
for db_state in group
)

View File

@ -2,16 +2,18 @@
from __future__ import annotations
import datetime
import statistics
import itertools
from statistics import fmean
from homeassistant.components.recorder import history
from homeassistant.components.recorder import history, statistics
from homeassistant.components.sensor import ATTR_STATE_CLASS, STATE_CLASS_MEASUREMENT
from homeassistant.const import ATTR_DEVICE_CLASS
from homeassistant.core import HomeAssistant
import homeassistant.util.dt as dt_util
from . import DOMAIN
DEVICE_CLASS_STATISTICS = {"temperature": {"mean", "min", "max"}}
DEVICE_CLASS_STATISTICS = {"temperature": {"mean", "min", "max"}, "energy": {"sum"}}
def _get_entities(hass: HomeAssistant) -> list[tuple[str, str]]:
@ -50,7 +52,7 @@ def compile_statistics(
# Get history between start and end
history_list = history.get_significant_states( # type: ignore
hass, start, end, [i[0] for i in entities]
hass, start - datetime.timedelta.resolution, end, [i[0] for i in entities]
)
for entity_id, device_class in entities:
@ -60,7 +62,9 @@ def compile_statistics(
continue
entity_history = history_list[entity_id]
fstates = [float(el.state) for el in entity_history if _is_number(el.state)]
fstates = [
(float(el.state), el) for el in entity_history if _is_number(el.state)
]
if not fstates:
continue
@ -69,13 +73,49 @@ def compile_statistics(
# Make calculations
if "max" in wanted_statistics:
result[entity_id]["max"] = max(fstates)
result[entity_id]["max"] = max(*itertools.islice(zip(*fstates), 1))
if "min" in wanted_statistics:
result[entity_id]["min"] = min(fstates)
result[entity_id]["min"] = min(*itertools.islice(zip(*fstates), 1))
# Note: The average calculation will be incorrect for unevenly spaced readings,
# this needs to be improved by weighting with time between measurements
if "mean" in wanted_statistics:
result[entity_id]["mean"] = statistics.fmean(fstates)
result[entity_id]["mean"] = fmean(*itertools.islice(zip(*fstates), 1))
if "sum" in wanted_statistics:
last_reset = old_last_reset = None
new_state = old_state = None
_sum = 0
last_stats = statistics.get_last_statistics(hass, 1, entity_id) # type: ignore
if entity_id in last_stats:
# We have compiled history for this sensor before, use that as a starting point
last_reset = old_last_reset = last_stats[entity_id][0]["last_reset"]
new_state = old_state = last_stats[entity_id][0]["state"]
_sum = last_stats[entity_id][0]["sum"]
for fstate, state in fstates:
if "last_reset" not in state.attributes:
continue
if (last_reset := state.attributes["last_reset"]) != old_last_reset:
# The sensor has been reset, update the sum
if old_state is not None:
_sum += new_state - old_state
# ..and update the starting point
new_state = fstate
old_last_reset = last_reset
old_state = new_state
else:
new_state = fstate
if last_reset is None or new_state is None or old_state is None:
# No valid updates
result.pop(entity_id)
continue
# Update the sum with the last state
_sum += new_state - old_state
result[entity_id]["last_reset"] = dt_util.parse_datetime(last_reset)
result[entity_id]["sum"] = _sum
result[entity_id]["state"] = new_state
return result

View File

@ -2,28 +2,8 @@
import pytest
from homeassistant.components import history
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.setup import setup_component
from tests.common import get_test_home_assistant, init_recorder_component
@pytest.fixture
def hass_recorder():
"""Home Assistant fixture with in-memory recorder."""
hass = get_test_home_assistant()
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
hass.start()
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
return hass
yield setup_recorder
hass.stop()
@pytest.fixture
def hass_history(hass_recorder):

View File

@ -3,9 +3,11 @@ from __future__ import annotations
from collections.abc import AsyncGenerator
from typing import Awaitable, Callable, cast
from unittest.mock import patch
import pytest
from homeassistant.components import recorder
from homeassistant.components.recorder import Recorder
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.core import HomeAssistant
@ -13,47 +15,32 @@ from homeassistant.helpers.typing import ConfigType
from .common import async_recorder_block_till_done
from tests.common import (
async_init_recorder_component,
get_test_home_assistant,
init_recorder_component,
)
from tests.common import async_init_recorder_component
SetupRecorderInstanceT = Callable[..., Awaitable[Recorder]]
@pytest.fixture
def hass_recorder():
"""Home Assistant fixture with in-memory recorder."""
hass = get_test_home_assistant()
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
hass.start()
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
return hass
yield setup_recorder
hass.stop()
@pytest.fixture
async def async_setup_recorder_instance() -> AsyncGenerator[
SetupRecorderInstanceT, None
]:
async def async_setup_recorder_instance(
enable_statistics,
) -> AsyncGenerator[SetupRecorderInstanceT, None]:
"""Yield callable to setup recorder instance."""
async def async_setup_recorder(
hass: HomeAssistant, config: ConfigType | None = None
) -> Recorder:
"""Setup and return recorder instance.""" # noqa: D401
await async_init_recorder_component(hass, config)
await hass.async_block_till_done()
instance = cast(Recorder, hass.data[DATA_INSTANCE])
await async_recorder_block_till_done(hass, instance)
assert isinstance(instance, Recorder)
return instance
stats = recorder.Recorder.async_hourly_statistics if enable_statistics else None
with patch(
"homeassistant.components.recorder.Recorder.async_hourly_statistics",
side_effect=stats,
autospec=True,
):
await async_init_recorder_component(hass, config)
await hass.async_block_till_done()
instance = cast(Recorder, hass.data[DATA_INSTANCE])
await async_recorder_block_till_done(hass, instance)
assert isinstance(instance, Recorder)
return instance
yield async_setup_recorder

View File

@ -4,6 +4,7 @@ from datetime import datetime, timedelta
import sqlite3
from unittest.mock import patch
import pytest
from sqlalchemy.exc import DatabaseError, OperationalError, SQLAlchemyError
from homeassistant.components import recorder
@ -682,6 +683,7 @@ def test_auto_purge_disabled(hass_recorder):
dt_util.set_default_time_zone(original_tz)
@pytest.mark.parametrize("enable_statistics", [True])
def test_auto_statistics(hass_recorder):
"""Test periodic statistics scheduling."""
hass = hass_recorder()

View File

@ -33,6 +33,9 @@ def test_compile_hourly_statistics(hass_recorder):
"mean": 15.0,
"min": 10.0,
"max": 20.0,
"last_reset": None,
"state": None,
"sum": None,
}
]
}

View File

@ -15,28 +15,7 @@ from homeassistant.util import dt as dt_util
from .common import corrupt_db_file
from tests.common import (
async_init_recorder_component,
get_test_home_assistant,
init_recorder_component,
)
@pytest.fixture
def hass_recorder():
"""Home Assistant fixture with in-memory recorder."""
hass = get_test_home_assistant()
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
hass.start()
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
return hass
yield setup_recorder
hass.stop()
from tests.common import async_init_recorder_component
def test_session_scope_not_setup(hass_recorder):

View File

@ -3,8 +3,6 @@
from datetime import timedelta
from unittest.mock import patch, sentinel
import pytest
from homeassistant.components.recorder import history
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
@ -13,27 +11,9 @@ from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.setup import setup_component
import homeassistant.util.dt as dt_util
from tests.common import get_test_home_assistant, init_recorder_component
from tests.components.recorder.common import wait_recording_done
@pytest.fixture
def hass_recorder():
"""Home Assistant fixture with in-memory recorder."""
hass = get_test_home_assistant()
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
hass.start()
hass.block_till_done()
hass.data[DATA_INSTANCE].block_till_done()
return hass
yield setup_recorder
hass.stop()
def test_compile_hourly_statistics(hass_recorder):
"""Test compiling hourly statistics."""
hass = hass_recorder()
@ -54,11 +34,198 @@ def test_compile_hourly_statistics(hass_recorder):
"mean": 15.0,
"min": 10.0,
"max": 20.0,
"last_reset": None,
"state": None,
"sum": None,
}
]
}
def test_compile_hourly_energy_statistics(hass_recorder):
"""Test compiling hourly statistics."""
hass = hass_recorder()
recorder = hass.data[DATA_INSTANCE]
setup_component(hass, "sensor", {})
sns1_attr = {"device_class": "energy", "state_class": "measurement"}
sns2_attr = {"device_class": "energy"}
sns3_attr = {}
zero, four, eight, states = record_energy_states(
hass, sns1_attr, sns2_attr, sns3_attr
)
hist = history.get_significant_states(
hass, zero - timedelta.resolution, eight + timedelta.resolution
)
assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]
recorder.do_adhoc_statistics(period="hourly", start=zero)
wait_recording_done(hass)
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1))
wait_recording_done(hass)
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2))
wait_recording_done(hass)
stats = statistics_during_period(hass, zero)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(zero),
"state": 20.0,
"sum": 10.0,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 40.0,
"sum": 10.0,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 70.0,
"sum": 40.0,
},
]
}
def test_compile_hourly_energy_statistics2(hass_recorder):
"""Test compiling hourly statistics."""
hass = hass_recorder()
recorder = hass.data[DATA_INSTANCE]
setup_component(hass, "sensor", {})
sns1_attr = {"device_class": "energy", "state_class": "measurement"}
sns2_attr = {"device_class": "energy", "state_class": "measurement"}
sns3_attr = {"device_class": "energy", "state_class": "measurement"}
zero, four, eight, states = record_energy_states(
hass, sns1_attr, sns2_attr, sns3_attr
)
hist = history.get_significant_states(
hass, zero - timedelta.resolution, eight + timedelta.resolution
)
assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]
recorder.do_adhoc_statistics(period="hourly", start=zero)
wait_recording_done(hass)
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1))
wait_recording_done(hass)
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2))
wait_recording_done(hass)
stats = statistics_during_period(hass, zero)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(zero),
"state": 20.0,
"sum": 10.0,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 40.0,
"sum": 10.0,
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 70.0,
"sum": 40.0,
},
],
"sensor.test2": [
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(zero),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(zero),
"state": 130.0,
"sum": 20.0,
},
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 45.0,
"sum": -95.0,
},
{
"statistic_id": "sensor.test2",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 75.0,
"sum": -65.0,
},
],
"sensor.test3": [
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(zero),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(zero),
"state": 5.0,
"sum": 5.0,
},
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 50.0,
"sum": 30.0,
},
{
"statistic_id": "sensor.test3",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(four),
"state": 90.0,
"sum": 70.0,
},
],
}
def test_compile_hourly_statistics_unchanged(hass_recorder):
"""Test compiling hourly statistics, with no changes during the hour."""
hass = hass_recorder()
@ -79,6 +246,9 @@ def test_compile_hourly_statistics_unchanged(hass_recorder):
"mean": 20.0,
"min": 20.0,
"max": 20.0,
"last_reset": None,
"state": None,
"sum": None,
}
]
}
@ -104,6 +274,9 @@ def test_compile_hourly_statistics_partially_unavailable(hass_recorder):
"mean": 17.5,
"min": 10.0,
"max": 25.0,
"last_reset": None,
"state": None,
"sum": None,
}
]
}
@ -127,7 +300,7 @@ def test_compile_hourly_statistics_unavailable(hass_recorder):
def record_states(hass):
"""Record some test states.
We inject a bunch of state updates temperature sensors.
We inject a bunch of state updates for temperature sensors.
"""
mp = "media_player.test"
sns1 = "sensor.test1"
@ -174,6 +347,98 @@ def record_states(hass):
return zero, four, states
def record_energy_states(hass, _sns1_attr, _sns2_attr, _sns3_attr):
"""Record some test states.
We inject a bunch of state updates for energy sensors.
"""
sns1 = "sensor.test1"
sns2 = "sensor.test2"
sns3 = "sensor.test3"
sns4 = "sensor.test4"
def set_state(entity_id, state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
zero = dt_util.utcnow()
one = zero + timedelta(minutes=15)
two = one + timedelta(minutes=30)
three = two + timedelta(minutes=15)
four = three + timedelta(minutes=15)
five = four + timedelta(minutes=30)
six = five + timedelta(minutes=15)
seven = six + timedelta(minutes=15)
eight = seven + timedelta(minutes=30)
sns1_attr = {**_sns1_attr, "last_reset": zero.isoformat()}
sns2_attr = {**_sns2_attr, "last_reset": zero.isoformat()}
sns3_attr = {**_sns3_attr, "last_reset": zero.isoformat()}
sns4_attr = {**_sns3_attr}
states = {sns1: [], sns2: [], sns3: [], sns4: []}
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=zero):
states[sns1].append(set_state(sns1, "10", attributes=sns1_attr)) # Sum 0
states[sns2].append(set_state(sns2, "110", attributes=sns2_attr)) # Sum 0
states[sns3].append(set_state(sns3, "0", attributes=sns3_attr)) # Sum 0
states[sns4].append(set_state(sns4, "0", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
states[sns1].append(set_state(sns1, "15", attributes=sns1_attr)) # Sum 5
states[sns2].append(set_state(sns2, "120", attributes=sns2_attr)) # Sum 10
states[sns3].append(set_state(sns3, "0", attributes=sns3_attr)) # Sum 0
states[sns4].append(set_state(sns4, "0", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr)) # Sum 10
states[sns2].append(set_state(sns2, "130", attributes=sns2_attr)) # Sum 20
states[sns3].append(set_state(sns3, "5", attributes=sns3_attr)) # Sum 5
states[sns4].append(set_state(sns4, "5", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
states[sns1].append(set_state(sns1, "10", attributes=sns1_attr)) # Sum 0
states[sns2].append(set_state(sns2, "0", attributes=sns2_attr)) # Sum -110
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr)) # Sum 10
states[sns4].append(set_state(sns4, "10", attributes=sns4_attr)) # -
sns1_attr = {**_sns1_attr, "last_reset": four.isoformat()}
sns2_attr = {**_sns2_attr, "last_reset": four.isoformat()}
sns3_attr = {**_sns3_attr, "last_reset": four.isoformat()}
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=four):
states[sns1].append(set_state(sns1, "30", attributes=sns1_attr)) # Sum 0
states[sns2].append(set_state(sns2, "30", attributes=sns2_attr)) # Sum -110
states[sns3].append(set_state(sns3, "30", attributes=sns3_attr)) # Sum 10
states[sns4].append(set_state(sns4, "30", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=five):
states[sns1].append(set_state(sns1, "40", attributes=sns1_attr)) # Sum 10
states[sns2].append(set_state(sns2, "45", attributes=sns2_attr)) # Sum -95
states[sns3].append(set_state(sns3, "50", attributes=sns3_attr)) # Sum 30
states[sns4].append(set_state(sns4, "50", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=six):
states[sns1].append(set_state(sns1, "50", attributes=sns1_attr)) # Sum 20
states[sns2].append(set_state(sns2, "55", attributes=sns2_attr)) # Sum -85
states[sns3].append(set_state(sns3, "60", attributes=sns3_attr)) # Sum 40
states[sns4].append(set_state(sns4, "60", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=seven):
states[sns1].append(set_state(sns1, "60", attributes=sns1_attr)) # Sum 30
states[sns2].append(set_state(sns2, "65", attributes=sns2_attr)) # Sum -75
states[sns3].append(set_state(sns3, "80", attributes=sns3_attr)) # Sum 60
states[sns4].append(set_state(sns4, "80", attributes=sns4_attr)) # -
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=eight):
states[sns1].append(set_state(sns1, "70", attributes=sns1_attr)) # Sum 40
states[sns2].append(set_state(sns2, "75", attributes=sns2_attr)) # Sum -65
states[sns3].append(set_state(sns3, "90", attributes=sns3_attr)) # Sum 70
return zero, four, eight, states
def record_states_partially_unavailable(hass):
"""Record some test states.

View File

@ -16,7 +16,7 @@ from homeassistant import core as ha, loader, runner, util
from homeassistant.auth.const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY
from homeassistant.auth.models import Credentials
from homeassistant.auth.providers import homeassistant, legacy_api_password
from homeassistant.components import mqtt
from homeassistant.components import mqtt, recorder
from homeassistant.components.websocket_api.auth import (
TYPE_AUTH,
TYPE_AUTH_OK,
@ -39,6 +39,8 @@ from tests.common import ( # noqa: E402, isort:skip
MockUser,
async_fire_mqtt_message,
async_test_home_assistant,
get_test_home_assistant,
init_recorder_component,
mock_storage as mock_storage,
)
from tests.test_util.aiohttp import mock_aiohttp_client # noqa: E402, isort:skip
@ -595,3 +597,36 @@ def legacy_patchable_time():
def enable_custom_integrations(hass):
"""Enable custom integrations defined in the test dir."""
hass.data.pop(loader.DATA_CUSTOM_COMPONENTS)
@pytest.fixture
def enable_statistics():
"""Fixture to control enabling of recorder's statistics compilation.
To enable statistics, tests can be marked with:
@pytest.mark.parametrize("enable_statistics", [True])
"""
return False
@pytest.fixture
def hass_recorder(enable_statistics):
"""Home Assistant fixture with in-memory recorder."""
hass = get_test_home_assistant()
stats = recorder.Recorder.async_hourly_statistics if enable_statistics else None
with patch(
"homeassistant.components.recorder.Recorder.async_hourly_statistics",
side_effect=stats,
autospec=True,
):
def setup_recorder(config=None):
"""Set up with params."""
init_recorder_component(hass, config)
hass.start()
hass.block_till_done()
hass.data[recorder.DATA_INSTANCE].block_till_done()
return hass
yield setup_recorder
hass.stop()