Correct sum statistics when only last_reset has changed (#55498)

Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
pull/55509/head
Erik Montnemery 2021-09-01 06:30:52 +02:00 committed by GitHub
parent 9e41a37284
commit 93c086d830
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 137 additions and 14 deletions

View File

@ -282,6 +282,21 @@ def reset_detected(
return state < 0.9 * previous_state
def _wanted_statistics(
entities: list[tuple[str, str, str | None]]
) -> dict[str, set[str]]:
"""Prepare a dict with wanted statistics for entities."""
wanted_statistics = {}
for entity_id, state_class, device_class in entities:
if device_class in DEVICE_CLASS_STATISTICS[state_class]:
wanted_statistics[entity_id] = DEVICE_CLASS_STATISTICS[state_class][
device_class
]
else:
wanted_statistics[entity_id] = DEFAULT_STATISTICS[state_class]
return wanted_statistics
def compile_statistics( # noqa: C901
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
) -> dict:
@ -293,17 +308,32 @@ def compile_statistics( # noqa: C901
entities = _get_entities(hass)
wanted_statistics = _wanted_statistics(entities)
# Get history between start and end
history_list = history.get_significant_states( # type: ignore
hass, start - datetime.timedelta.resolution, end, [i[0] for i in entities]
)
entities_full_history = [i[0] for i in entities if "sum" in wanted_statistics[i[0]]]
history_list = {}
if entities_full_history:
history_list = history.get_significant_states( # type: ignore
hass,
start - datetime.timedelta.resolution,
end,
entity_ids=entities_full_history,
significant_changes_only=False,
)
entities_significant_history = [
i[0] for i in entities if "sum" not in wanted_statistics[i[0]]
]
if entities_significant_history:
_history_list = history.get_significant_states( # type: ignore
hass,
start - datetime.timedelta.resolution,
end,
entity_ids=entities_significant_history,
)
history_list = {**history_list, **_history_list}
for entity_id, state_class, device_class in entities:
if device_class in DEVICE_CLASS_STATISTICS[state_class]:
wanted_statistics = DEVICE_CLASS_STATISTICS[state_class][device_class]
else:
wanted_statistics = DEFAULT_STATISTICS[state_class]
if entity_id not in history_list:
continue
@ -336,21 +366,21 @@ def compile_statistics( # noqa: C901
# Set meta data
result[entity_id]["meta"] = {
"unit_of_measurement": unit,
"has_mean": "mean" in wanted_statistics,
"has_sum": "sum" in wanted_statistics,
"has_mean": "mean" in wanted_statistics[entity_id],
"has_sum": "sum" in wanted_statistics[entity_id],
}
# Make calculations
stat: dict = {}
if "max" in wanted_statistics:
if "max" in wanted_statistics[entity_id]:
stat["max"] = max(*itertools.islice(zip(*fstates), 1))
if "min" in wanted_statistics:
if "min" in wanted_statistics[entity_id]:
stat["min"] = min(*itertools.islice(zip(*fstates), 1))
if "mean" in wanted_statistics:
if "mean" in wanted_statistics[entity_id]:
stat["mean"] = _time_weighted_average(fstates, start, end)
if "sum" in wanted_statistics:
if "sum" in wanted_statistics[entity_id]:
last_reset = old_last_reset = None
new_state = old_state = None
_sum = 0

View File

@ -278,6 +278,77 @@ def test_compile_hourly_sum_statistics_amount(
assert "Detected new cycle for sensor.test1, value dropped" not in caplog.text
@pytest.mark.parametrize("state_class", ["measurement"])
@pytest.mark.parametrize(
"device_class,unit,native_unit,factor",
[
("energy", "kWh", "kWh", 1),
("energy", "Wh", "kWh", 1 / 1000),
("monetary", "EUR", "EUR", 1),
("monetary", "SEK", "SEK", 1),
("gas", "", "", 1),
("gas", "ft³", "", 0.0283168466),
],
)
def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
hass_recorder, caplog, state_class, device_class, unit, native_unit, factor
):
"""Test compiling hourly statistics."""
zero = dt_util.utcnow()
hass = hass_recorder()
recorder = hass.data[DATA_INSTANCE]
setup_component(hass, "sensor", {})
attributes = {
"device_class": device_class,
"state_class": state_class,
"unit_of_measurement": unit,
"last_reset": None,
}
seq = [10, 15, 15, 15, 20, 20, 20, 10]
# Make sure the sequence has consecutive equal states
assert seq[1] == seq[2] == seq[3]
states = {"sensor.test1": []}
one = zero
for i in range(len(seq)):
one = one + timedelta(minutes=1)
_states = record_meter_state(
hass, one, "sensor.test1", attributes, seq[i : i + 1]
)
states["sensor.test1"].extend(_states["sensor.test1"])
hist = history.get_significant_states(
hass,
zero - timedelta.resolution,
one + timedelta.resolution,
significant_changes_only=False,
)
assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]
recorder.do_adhoc_statistics(period="hourly", start=zero)
wait_recording_done(hass)
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{"statistic_id": "sensor.test1", "unit_of_measurement": native_unit}
]
stats = statistics_during_period(hass, zero)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"max": None,
"mean": None,
"min": None,
"last_reset": process_timestamp_to_utc_isoformat(one),
"state": approx(factor * seq[7]),
"sum": approx(factor * (sum(seq) - seq[0])),
},
]
}
assert "Error while processing event StatisticsTask" not in caplog.text
@pytest.mark.parametrize(
"device_class,unit,native_unit,factor",
[
@ -1309,6 +1380,28 @@ def record_meter_states(hass, zero, entity_id, _attributes, seq):
return four, eight, states
def record_meter_state(hass, zero, entity_id, _attributes, seq):
"""Record test state.
We inject a state update for meter sensor.
"""
def set_state(entity_id, state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
attributes = dict(_attributes)
attributes["last_reset"] = zero.isoformat()
states = {entity_id: []}
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=zero):
states[entity_id].append(set_state(entity_id, seq[0], attributes=attributes))
return states
def record_states_partially_unavailable(hass, zero, entity_id, attributes):
"""Record some test states.