Fix caching of latest short term stats after insertion of external stats (#101490)
parent
948bbdd2bf
commit
26c7ba38d0
|
@ -1924,7 +1924,13 @@ def get_latest_short_term_statistics(
|
|||
for metadata_id in missing_metadata_ids
|
||||
if (
|
||||
latest_id := cache_latest_short_term_statistic_id_for_metadata_id(
|
||||
run_cache, session, metadata_id
|
||||
# orm_rows=False is used here because we are in
|
||||
# a read-only session, and there will never be
|
||||
# any pending inserts in the session.
|
||||
run_cache,
|
||||
session,
|
||||
metadata_id,
|
||||
orm_rows=False,
|
||||
)
|
||||
)
|
||||
is not None
|
||||
|
@ -2310,8 +2316,14 @@ def _import_statistics_with_session(
|
|||
# We just inserted new short term statistics, so we need to update the
|
||||
# ShortTermStatisticsRunCache with the latest id for the metadata_id
|
||||
run_cache = get_short_term_statistics_run_cache(instance.hass)
|
||||
#
|
||||
# Because we are in the same session and we want to read rows
|
||||
# that have not been flushed yet, we need to pass orm_rows=True
|
||||
# to cache_latest_short_term_statistic_id_for_metadata_id
|
||||
# to ensure that it gets the rows that were just inserted
|
||||
#
|
||||
cache_latest_short_term_statistic_id_for_metadata_id(
|
||||
run_cache, session, metadata_id
|
||||
run_cache, session, metadata_id, orm_rows=True
|
||||
)
|
||||
|
||||
return True
|
||||
|
@ -2326,7 +2338,10 @@ def get_short_term_statistics_run_cache(
|
|||
|
||||
|
||||
def cache_latest_short_term_statistic_id_for_metadata_id(
|
||||
run_cache: ShortTermStatisticsRunCache, session: Session, metadata_id: int
|
||||
run_cache: ShortTermStatisticsRunCache,
|
||||
session: Session,
|
||||
metadata_id: int,
|
||||
orm_rows: bool,
|
||||
) -> int | None:
|
||||
"""Cache the latest short term statistic for a given metadata_id.
|
||||
|
||||
|
@ -2339,7 +2354,11 @@ def cache_latest_short_term_statistic_id_for_metadata_id(
|
|||
execute_stmt_lambda_element(
|
||||
session,
|
||||
_find_latest_short_term_statistic_for_metadata_id_stmt(metadata_id),
|
||||
orm_rows=False,
|
||||
orm_rows=orm_rows
|
||||
# _import_statistics_with_session needs to be able
|
||||
# to read back the rows it just inserted without
|
||||
# a flush so we have to pass orm_rows so we get
|
||||
# back the latest data.
|
||||
),
|
||||
):
|
||||
id_: int = latest[0].id
|
||||
|
|
|
@ -14,6 +14,7 @@ from homeassistant.components.recorder.db_schema import Statistics, StatisticsSh
|
|||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
get_latest_short_term_statistics,
|
||||
get_metadata,
|
||||
get_short_term_statistics_run_cache,
|
||||
list_statistic_ids,
|
||||
|
@ -635,6 +636,22 @@ async def test_statistic_during_period(
|
|||
"change": (imported_stats_5min[-1]["sum"] - imported_stats_5min[0]["sum"])
|
||||
* 1000,
|
||||
}
|
||||
stats = get_latest_short_term_statistics(
|
||||
hass, {"sensor.test"}, {"last_reset", "max", "mean", "min", "state", "sum"}
|
||||
)
|
||||
start = imported_stats_5min[-1]["start"].timestamp()
|
||||
end = start + (5 * 60)
|
||||
assert stats == {
|
||||
"sensor.test": [
|
||||
{
|
||||
"end": end,
|
||||
"last_reset": None,
|
||||
"start": start,
|
||||
"state": None,
|
||||
"sum": 38.0,
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.UTC))
|
||||
|
|
Loading…
Reference in New Issue