Avoid returning statistics columns that the metadata knows are impossible (#92095)
parent
d138bbe26e
commit
bb4c03ce3c
|
@ -1518,6 +1518,28 @@ def _generate_select_columns_for_types_stmt(
|
|||
return lambda_stmt(lambda: columns, track_on=track_on)
|
||||
|
||||
|
||||
def _extract_metadata_and_discard_impossible_columns(
|
||||
metadata: dict[str, tuple[int, StatisticMetaData]],
|
||||
types: set[Literal["last_reset", "max", "mean", "min", "state", "sum"]],
|
||||
) -> list[int]:
|
||||
"""Extract metadata ids from metadata and discard impossible columns."""
|
||||
metadata_ids = []
|
||||
has_mean = False
|
||||
has_sum = False
|
||||
for metadata_id, stats_metadata in metadata.values():
|
||||
metadata_ids.append(metadata_id)
|
||||
has_mean |= stats_metadata["has_mean"]
|
||||
has_sum |= stats_metadata["has_sum"]
|
||||
if not has_mean:
|
||||
types.discard("mean")
|
||||
types.discard("min")
|
||||
types.discard("max")
|
||||
if not has_sum:
|
||||
types.discard("sum")
|
||||
types.discard("state")
|
||||
return metadata_ids
|
||||
|
||||
|
||||
def _statistics_during_period_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
|
@ -1547,7 +1569,7 @@ def _statistics_during_period_with_session(
|
|||
|
||||
metadata_ids = None
|
||||
if statistic_ids is not None:
|
||||
metadata_ids = [metadata_id for metadata_id, _ in metadata.values()]
|
||||
metadata_ids = _extract_metadata_and_discard_impossible_columns(metadata, types)
|
||||
|
||||
table: type[Statistics | StatisticsShortTerm] = (
|
||||
Statistics if period != "5minute" else StatisticsShortTerm
|
||||
|
@ -1661,7 +1683,8 @@ def _get_last_statistics(
|
|||
)
|
||||
if not metadata:
|
||||
return {}
|
||||
metadata_id = metadata[statistic_id][0]
|
||||
metadata_ids = _extract_metadata_and_discard_impossible_columns(metadata, types)
|
||||
metadata_id = metadata_ids[0]
|
||||
if table == Statistics:
|
||||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats)
|
||||
else:
|
||||
|
@ -1753,11 +1776,7 @@ def get_latest_short_term_statistics(
|
|||
)
|
||||
if not metadata:
|
||||
return {}
|
||||
metadata_ids = [
|
||||
metadata[statistic_id][0]
|
||||
for statistic_id in statistic_ids
|
||||
if statistic_id in metadata
|
||||
]
|
||||
metadata_ids = _extract_metadata_and_discard_impossible_columns(metadata, types)
|
||||
stmt = _latest_short_term_statistics_stmt(metadata_ids)
|
||||
stats = cast(
|
||||
Sequence[Row], execute_stmt_lambda_element(session, stmt, orm_rows=False)
|
||||
|
|
|
@ -557,8 +557,11 @@ def _compile_statistics( # noqa: C901
|
|||
last_stat = last_stats[entity_id][0]
|
||||
last_reset = _timestamp_to_isoformat_or_none(last_stat["last_reset"])
|
||||
old_last_reset = last_reset
|
||||
new_state = old_state = last_stat["state"]
|
||||
_sum = last_stat["sum"] or 0.0
|
||||
# If there are no previous values and has_sum
|
||||
# was previously false there will be no last_stat
|
||||
# for state or sum
|
||||
new_state = old_state = last_stat.get("state")
|
||||
_sum = last_stat.get("sum") or 0.0
|
||||
|
||||
for fstate, state in valid_float_states:
|
||||
reset = False
|
||||
|
|
|
@ -92,6 +92,12 @@ def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant])
|
|||
do_adhoc_statistics(hass, start=zero)
|
||||
do_adhoc_statistics(hass, start=four)
|
||||
wait_recording_done(hass)
|
||||
|
||||
metadata = get_metadata(hass, statistic_ids={"sensor.test1", "sensor.test2"})
|
||||
assert metadata["sensor.test1"][1]["has_mean"] is True
|
||||
assert metadata["sensor.test1"][1]["has_sum"] is False
|
||||
assert metadata["sensor.test2"][1]["has_mean"] is True
|
||||
assert metadata["sensor.test2"][1]["has_sum"] is False
|
||||
expected_1 = {
|
||||
"start": process_timestamp(zero).timestamp(),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)).timestamp(),
|
||||
|
@ -99,8 +105,6 @@ def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant])
|
|||
"min": pytest.approx(10.0),
|
||||
"max": pytest.approx(20.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
expected_2 = {
|
||||
"start": process_timestamp(four).timestamp(),
|
||||
|
@ -109,32 +113,44 @@ def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant])
|
|||
"min": pytest.approx(20.0),
|
||||
"max": pytest.approx(20.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
expected_stats1 = [expected_1, expected_2]
|
||||
expected_stats2 = [expected_1, expected_2]
|
||||
|
||||
# Test statistics_during_period
|
||||
stats = statistics_during_period(hass, zero, period="5minute")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="5minute", statistic_ids={"sensor.test1", "sensor.test2"}
|
||||
)
|
||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
||||
|
||||
# Test statistics_during_period with a far future start and end date
|
||||
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
|
||||
stats = statistics_during_period(hass, future, end_time=future, period="5minute")
|
||||
stats = statistics_during_period(
|
||||
hass,
|
||||
future,
|
||||
end_time=future,
|
||||
period="5minute",
|
||||
statistic_ids={"sensor.test1", "sensor.test2"},
|
||||
)
|
||||
assert stats == {}
|
||||
|
||||
# Test statistics_during_period with a far future end date
|
||||
stats = statistics_during_period(hass, zero, end_time=future, period="5minute")
|
||||
stats = statistics_during_period(
|
||||
hass,
|
||||
zero,
|
||||
end_time=future,
|
||||
period="5minute",
|
||||
statistic_ids={"sensor.test1", "sensor.test2"},
|
||||
)
|
||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
||||
|
||||
stats = statistics_during_period(
|
||||
hass, zero, statistic_ids=["sensor.test2"], period="5minute"
|
||||
hass, zero, statistic_ids={"sensor.test2"}, period="5minute"
|
||||
)
|
||||
assert stats == {"sensor.test2": expected_stats2}
|
||||
|
||||
stats = statistics_during_period(
|
||||
hass, zero, statistic_ids=["sensor.test3"], period="5minute"
|
||||
hass, zero, statistic_ids={"sensor.test3"}, period="5minute"
|
||||
)
|
||||
assert stats == {}
|
||||
|
||||
|
@ -567,15 +583,14 @@ async def test_import_statistics(
|
|||
|
||||
import_fn(hass, external_metadata, (external_statistics1, external_statistics2))
|
||||
await async_wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": process_timestamp(period1).timestamp(),
|
||||
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
|
@ -583,9 +598,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period2).timestamp(),
|
||||
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -631,9 +643,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period2).timestamp(),
|
||||
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -650,15 +659,14 @@ async def test_import_statistics(
|
|||
}
|
||||
import_fn(hass, external_metadata, (external_statistics,))
|
||||
await async_wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": process_timestamp(period1).timestamp(),
|
||||
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(5.0),
|
||||
"sum": pytest.approx(6.0),
|
||||
|
@ -666,9 +674,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period2).timestamp(),
|
||||
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -716,15 +721,14 @@ async def test_import_statistics(
|
|||
},
|
||||
)
|
||||
}
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": process_timestamp(period1).timestamp(),
|
||||
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": pytest.approx(1.0),
|
||||
"mean": pytest.approx(2.0),
|
||||
"min": pytest.approx(3.0),
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(4.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
|
@ -732,9 +736,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period2).timestamp(),
|
||||
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -757,15 +758,14 @@ async def test_import_statistics(
|
|||
assert response["success"]
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": process_timestamp(period1).timestamp(),
|
||||
"end": process_timestamp(period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": pytest.approx(1.0),
|
||||
"mean": pytest.approx(2.0),
|
||||
"min": pytest.approx(3.0),
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(4.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
|
@ -773,9 +773,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period2).timestamp(),
|
||||
"end": process_timestamp(period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": datetime_to_timestamp_or_none(last_reset_utc),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(1000 * 1000 + 3.0),
|
||||
|
@ -1020,7 +1017,9 @@ def test_weekly_statistics(
|
|||
|
||||
async_add_external_statistics(hass, external_metadata, external_statistics)
|
||||
wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="week")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="week", statistic_ids={"test:total_energy_import"}
|
||||
)
|
||||
week1_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-03 00:00:00"))
|
||||
week1_end = dt_util.as_utc(dt_util.parse_datetime("2022-10-10 00:00:00"))
|
||||
week2_start = dt_util.as_utc(dt_util.parse_datetime("2022-10-10 00:00:00"))
|
||||
|
@ -1030,9 +1029,6 @@ def test_weekly_statistics(
|
|||
{
|
||||
"start": week1_start.timestamp(),
|
||||
"end": week1_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": 1.0,
|
||||
"sum": 3.0,
|
||||
|
@ -1040,9 +1036,6 @@ def test_weekly_statistics(
|
|||
{
|
||||
"start": week2_start.timestamp(),
|
||||
"end": week2_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": 3.0,
|
||||
"sum": 5.0,
|
||||
|
@ -1061,9 +1054,6 @@ def test_weekly_statistics(
|
|||
{
|
||||
"start": week1_start.timestamp(),
|
||||
"end": week1_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": 1.0,
|
||||
"sum": 3.0,
|
||||
|
@ -1071,9 +1061,6 @@ def test_weekly_statistics(
|
|||
{
|
||||
"start": week2_start.timestamp(),
|
||||
"end": week2_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": 3.0,
|
||||
"sum": 5.0,
|
||||
|
@ -1158,7 +1145,9 @@ def test_monthly_statistics(
|
|||
|
||||
async_add_external_statistics(hass, external_metadata, external_statistics)
|
||||
wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="month")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="month", statistic_ids={"test:total_energy_import"}
|
||||
)
|
||||
sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||
sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||
oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||
|
@ -1168,9 +1157,6 @@ def test_monthly_statistics(
|
|||
{
|
||||
"start": sep_start.timestamp(),
|
||||
"end": sep_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -1178,9 +1164,6 @@ def test_monthly_statistics(
|
|||
{
|
||||
"start": oct_start.timestamp(),
|
||||
"end": oct_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(3.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
|
@ -1203,9 +1186,6 @@ def test_monthly_statistics(
|
|||
{
|
||||
"start": sep_start.timestamp(),
|
||||
"end": sep_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -1213,9 +1193,6 @@ def test_monthly_statistics(
|
|||
{
|
||||
"start": oct_start.timestamp(),
|
||||
"end": oct_end.timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(3.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
|
|
|
@ -188,8 +188,6 @@ async def test_statistics_during_period(
|
|||
"min": pytest.approx(10),
|
||||
"max": pytest.approx(10),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -955,8 +953,6 @@ async def test_statistics_during_period_unit_conversion(
|
|||
"min": pytest.approx(value),
|
||||
"max": pytest.approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -983,8 +979,6 @@ async def test_statistics_during_period_unit_conversion(
|
|||
"min": pytest.approx(converted_value),
|
||||
"max": pytest.approx(converted_value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1043,9 +1037,6 @@ async def test_sum_statistics_during_period_unit_conversion(
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"max": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(value),
|
||||
"sum": pytest.approx(value),
|
||||
|
@ -1071,9 +1062,6 @@ async def test_sum_statistics_during_period_unit_conversion(
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"max": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(converted_value),
|
||||
"sum": pytest.approx(converted_value),
|
||||
|
@ -1211,8 +1199,6 @@ async def test_statistics_during_period_in_the_past(
|
|||
"min": pytest.approx(10),
|
||||
"max": pytest.approx(10),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1238,8 +1224,6 @@ async def test_statistics_during_period_in_the_past(
|
|||
"min": pytest.approx(10),
|
||||
"max": pytest.approx(10),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1696,8 +1680,6 @@ async def test_clear_statistics(
|
|||
"min": pytest.approx(value),
|
||||
"max": pytest.approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
"sensor.test2": [
|
||||
|
@ -1708,8 +1690,6 @@ async def test_clear_statistics(
|
|||
"min": pytest.approx(value * 2),
|
||||
"max": pytest.approx(value * 2),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
"sensor.test3": [
|
||||
|
@ -1720,8 +1700,6 @@ async def test_clear_statistics(
|
|||
"min": pytest.approx(value * 3),
|
||||
"max": pytest.approx(value * 3),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
@ -1873,8 +1851,6 @@ async def test_update_statistics_metadata(
|
|||
"mean": 10.0,
|
||||
"min": 10.0,
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
@ -1937,8 +1913,6 @@ async def test_change_statistics_unit(
|
|||
"mean": 10.0,
|
||||
"min": 10.0,
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
@ -1993,8 +1967,6 @@ async def test_change_statistics_unit(
|
|||
"mean": 10000.0,
|
||||
"min": 10000.0,
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
@ -2036,8 +2008,6 @@ async def test_change_statistics_unit_errors(
|
|||
"mean": 10.0,
|
||||
"min": 10.0,
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"state": None,
|
||||
"sum": None,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
@ -2558,15 +2528,14 @@ async def test_import_statistics(
|
|||
assert response["result"] is None
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": period1.timestamp(),
|
||||
"end": (period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
|
@ -2574,9 +2543,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": period2.timestamp(),
|
||||
"end": (period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -2622,9 +2588,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": period2.timestamp(),
|
||||
"end": (period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -2653,15 +2616,14 @@ async def test_import_statistics(
|
|||
assert response["result"] is None
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": period1.timestamp(),
|
||||
"end": (period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(5.0),
|
||||
"sum": pytest.approx(6.0),
|
||||
|
@ -2669,9 +2631,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": period2.timestamp(),
|
||||
"end": (period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
@ -2703,15 +2662,14 @@ async def test_import_statistics(
|
|||
assert response["result"] is None
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
stats = statistics_during_period(hass, zero, period="hour")
|
||||
stats = statistics_during_period(
|
||||
hass, zero, period="hour", statistic_ids={statistic_id}
|
||||
)
|
||||
assert stats == {
|
||||
statistic_id: [
|
||||
{
|
||||
"start": period1.timestamp(),
|
||||
"end": (period1 + timedelta(hours=1)).timestamp(),
|
||||
"max": pytest.approx(1.0),
|
||||
"mean": pytest.approx(2.0),
|
||||
"min": pytest.approx(3.0),
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(4.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
|
@ -2719,9 +2677,6 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": period2.timestamp(),
|
||||
"end": (period2 + timedelta(hours=1)).timestamp(),
|
||||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
|
|
|
@ -47,9 +47,6 @@ async def test_async_setup_entry(recorder_mock: Recorder, hass: HomeAssistant) -
|
|||
for k, stat in enumerate(stats[statistic_id]):
|
||||
assert stat["start"] == dt_util.parse_datetime(data[k]["from"]).timestamp()
|
||||
assert stat["state"] == data[k][key]
|
||||
assert stat["mean"] is None
|
||||
assert stat["min"] is None
|
||||
assert stat["max"] is None
|
||||
assert stat["last_reset"] is None
|
||||
|
||||
_sum += data[k][key]
|
||||
|
|
Loading…
Reference in New Issue