2021-05-16 17:23:37 +00:00
|
|
|
"""Statistics helper."""
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
from collections import defaultdict
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from itertools import groupby
|
|
|
|
import logging
|
2021-07-13 19:21:45 +00:00
|
|
|
from typing import TYPE_CHECKING, Any, Callable
|
2021-05-16 17:23:37 +00:00
|
|
|
|
|
|
|
from sqlalchemy import bindparam
|
|
|
|
from sqlalchemy.ext import baked
|
2021-07-13 19:21:45 +00:00
|
|
|
from sqlalchemy.orm.scoping import scoped_session
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-06-30 12:53:42 +00:00
|
|
|
from homeassistant.const import PRESSURE_PA, TEMP_CELSIUS
|
2021-07-14 11:23:11 +00:00
|
|
|
from homeassistant.core import Event, HomeAssistant, callback
|
|
|
|
from homeassistant.helpers import entity_registry
|
2021-05-16 17:23:37 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2021-06-30 12:53:42 +00:00
|
|
|
import homeassistant.util.pressure as pressure_util
|
|
|
|
import homeassistant.util.temperature as temperature_util
|
2021-07-13 19:21:45 +00:00
|
|
|
from homeassistant.util.unit_system import UnitSystem
|
2021-05-16 17:23:37 +00:00
|
|
|
|
|
|
|
from .const import DOMAIN
|
2021-07-13 19:21:45 +00:00
|
|
|
from .models import (
|
|
|
|
StatisticMetaData,
|
|
|
|
Statistics,
|
|
|
|
StatisticsMeta,
|
|
|
|
process_timestamp_to_utc_isoformat,
|
|
|
|
)
|
2021-05-16 17:23:37 +00:00
|
|
|
from .util import execute, retryable_database_job, session_scope
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
from . import Recorder
|
|
|
|
|
|
|
|
QUERY_STATISTICS = [
|
2021-07-02 11:17:00 +00:00
|
|
|
Statistics.metadata_id,
|
2021-05-16 17:23:37 +00:00
|
|
|
Statistics.start,
|
|
|
|
Statistics.mean,
|
|
|
|
Statistics.min,
|
|
|
|
Statistics.max,
|
2021-05-20 11:05:15 +00:00
|
|
|
Statistics.last_reset,
|
|
|
|
Statistics.state,
|
|
|
|
Statistics.sum,
|
2021-05-16 17:23:37 +00:00
|
|
|
]
|
|
|
|
|
2021-06-30 11:32:17 +00:00
|
|
|
QUERY_STATISTIC_META = [
|
2021-07-02 11:17:00 +00:00
|
|
|
StatisticsMeta.id,
|
2021-06-30 11:32:17 +00:00
|
|
|
StatisticsMeta.statistic_id,
|
|
|
|
StatisticsMeta.unit_of_measurement,
|
|
|
|
]
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
STATISTICS_BAKERY = "recorder_statistics_bakery"
|
2021-06-30 11:32:17 +00:00
|
|
|
STATISTICS_META_BAKERY = "recorder_statistics_bakery"
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-07-02 07:51:47 +00:00
|
|
|
# Convert pressure and temperature statistics from the native unit used for statistics
|
|
|
|
# to the units configured by the user
|
2021-06-30 12:53:42 +00:00
|
|
|
UNIT_CONVERSIONS = {
|
|
|
|
PRESSURE_PA: lambda x, units: pressure_util.convert(
|
|
|
|
x, PRESSURE_PA, units.pressure_unit
|
2021-06-30 14:53:51 +00:00
|
|
|
)
|
|
|
|
if x is not None
|
|
|
|
else None,
|
2021-06-30 12:53:42 +00:00
|
|
|
TEMP_CELSIUS: lambda x, units: temperature_util.convert(
|
|
|
|
x, TEMP_CELSIUS, units.temperature_unit
|
2021-06-30 14:53:51 +00:00
|
|
|
)
|
|
|
|
if x is not None
|
|
|
|
else None,
|
2021-06-30 12:53:42 +00:00
|
|
|
}
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def async_setup(hass: HomeAssistant) -> None:
|
2021-05-16 17:23:37 +00:00
|
|
|
"""Set up the history hooks."""
|
|
|
|
hass.data[STATISTICS_BAKERY] = baked.bakery()
|
2021-06-30 11:32:17 +00:00
|
|
|
hass.data[STATISTICS_META_BAKERY] = baked.bakery()
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-07-14 11:23:11 +00:00
|
|
|
def entity_id_changed(event: Event) -> None:
|
|
|
|
"""Handle entity_id changed."""
|
|
|
|
old_entity_id = event.data["old_entity_id"]
|
|
|
|
entity_id = event.data["entity_id"]
|
|
|
|
with session_scope(hass=hass) as session:
|
|
|
|
session.query(StatisticsMeta).filter(
|
|
|
|
StatisticsMeta.statistic_id == old_entity_id
|
|
|
|
and StatisticsMeta.source == DOMAIN
|
|
|
|
).update({StatisticsMeta.statistic_id: entity_id})
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def entity_registry_changed_filter(event: Event) -> bool:
|
|
|
|
"""Handle entity_id changed filter."""
|
|
|
|
if event.data["action"] != "update" or "old_entity_id" not in event.data:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
if hass.is_running:
|
|
|
|
hass.bus.async_listen(
|
|
|
|
entity_registry.EVENT_ENTITY_REGISTRY_UPDATED,
|
|
|
|
entity_id_changed,
|
|
|
|
event_filter=entity_registry_changed_filter,
|
|
|
|
)
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def get_start_time() -> datetime:
|
2021-05-16 17:23:37 +00:00
|
|
|
"""Return start time."""
|
|
|
|
last_hour = dt_util.utcnow() - timedelta(hours=1)
|
|
|
|
start = last_hour.replace(minute=0, second=0, microsecond=0)
|
|
|
|
return start
|
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def _get_metadata_ids(
|
|
|
|
hass: HomeAssistant, session: scoped_session, statistic_ids: list[str]
|
|
|
|
) -> list[str]:
|
2021-07-02 11:17:00 +00:00
|
|
|
"""Resolve metadata_id for a list of statistic_ids."""
|
|
|
|
baked_query = hass.data[STATISTICS_META_BAKERY](
|
|
|
|
lambda session: session.query(*QUERY_STATISTIC_META)
|
|
|
|
)
|
|
|
|
baked_query += lambda q: q.filter(
|
|
|
|
StatisticsMeta.statistic_id.in_(bindparam("statistic_ids"))
|
|
|
|
)
|
|
|
|
result = execute(baked_query(session).params(statistic_ids=statistic_ids))
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
return [id for id, _, _ in result] if result else []
|
2021-07-02 11:17:00 +00:00
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def _get_or_add_metadata_id(
|
|
|
|
hass: HomeAssistant,
|
|
|
|
session: scoped_session,
|
|
|
|
statistic_id: str,
|
|
|
|
metadata: StatisticMetaData,
|
|
|
|
) -> str:
|
2021-07-02 11:17:00 +00:00
|
|
|
"""Get metadata_id for a statistic_id, add if it doesn't exist."""
|
|
|
|
metadata_id = _get_metadata_ids(hass, session, [statistic_id])
|
|
|
|
if not metadata_id:
|
|
|
|
unit = metadata["unit_of_measurement"]
|
|
|
|
has_mean = metadata["has_mean"]
|
|
|
|
has_sum = metadata["has_sum"]
|
|
|
|
session.add(
|
|
|
|
StatisticsMeta.from_meta(DOMAIN, statistic_id, unit, has_mean, has_sum)
|
|
|
|
)
|
|
|
|
metadata_id = _get_metadata_ids(hass, session, [statistic_id])
|
|
|
|
return metadata_id[0]
|
|
|
|
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
@retryable_database_job("statistics")
|
2021-07-13 19:21:45 +00:00
|
|
|
def compile_statistics(instance: Recorder, start: datetime) -> bool:
|
2021-05-16 17:23:37 +00:00
|
|
|
"""Compile statistics."""
|
|
|
|
start = dt_util.as_utc(start)
|
|
|
|
end = start + timedelta(hours=1)
|
2021-07-02 11:17:00 +00:00
|
|
|
_LOGGER.debug("Compiling statistics for %s-%s", start, end)
|
2021-05-16 17:23:37 +00:00
|
|
|
platform_stats = []
|
|
|
|
for domain, platform in instance.hass.data[DOMAIN].items():
|
|
|
|
if not hasattr(platform, "compile_statistics"):
|
|
|
|
continue
|
|
|
|
platform_stats.append(platform.compile_statistics(instance.hass, start, end))
|
|
|
|
_LOGGER.debug(
|
|
|
|
"Statistics for %s during %s-%s: %s", domain, start, end, platform_stats[-1]
|
|
|
|
)
|
|
|
|
|
|
|
|
with session_scope(session=instance.get_session()) as session: # type: ignore
|
|
|
|
for stats in platform_stats:
|
|
|
|
for entity_id, stat in stats.items():
|
2021-07-02 11:17:00 +00:00
|
|
|
metadata_id = _get_or_add_metadata_id(
|
|
|
|
instance.hass, session, entity_id, stat["meta"]
|
2021-06-30 11:32:17 +00:00
|
|
|
)
|
2021-07-02 11:17:00 +00:00
|
|
|
session.add(Statistics.from_stats(metadata_id, start, stat["stat"]))
|
2021-05-16 17:23:37 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def _get_metadata(
|
|
|
|
hass: HomeAssistant,
|
|
|
|
session: scoped_session,
|
|
|
|
statistic_ids: list[str] | None,
|
|
|
|
statistic_type: str | None,
|
|
|
|
) -> dict[str, dict[str, str]]:
|
2021-06-30 11:32:17 +00:00
|
|
|
"""Fetch meta data."""
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def _meta(metas: list, wanted_metadata_id: str) -> dict[str, str] | None:
|
2021-07-02 11:17:00 +00:00
|
|
|
meta = None
|
|
|
|
for metadata_id, statistic_id, unit in metas:
|
|
|
|
if metadata_id == wanted_metadata_id:
|
|
|
|
meta = {"unit_of_measurement": unit, "statistic_id": statistic_id}
|
2021-06-30 11:32:17 +00:00
|
|
|
return meta
|
|
|
|
|
|
|
|
baked_query = hass.data[STATISTICS_META_BAKERY](
|
|
|
|
lambda session: session.query(*QUERY_STATISTIC_META)
|
|
|
|
)
|
|
|
|
if statistic_ids is not None:
|
|
|
|
baked_query += lambda q: q.filter(
|
|
|
|
StatisticsMeta.statistic_id.in_(bindparam("statistic_ids"))
|
|
|
|
)
|
2021-07-02 11:17:00 +00:00
|
|
|
if statistic_type == "mean":
|
|
|
|
baked_query += lambda q: q.filter(StatisticsMeta.has_mean.isnot(False))
|
2021-07-13 08:35:55 +00:00
|
|
|
elif statistic_type == "sum":
|
2021-07-02 11:17:00 +00:00
|
|
|
baked_query += lambda q: q.filter(StatisticsMeta.has_sum.isnot(False))
|
2021-07-13 08:35:55 +00:00
|
|
|
elif statistic_type is not None:
|
|
|
|
return {}
|
2021-06-30 11:32:17 +00:00
|
|
|
result = execute(baked_query(session).params(statistic_ids=statistic_ids))
|
2021-07-13 19:21:45 +00:00
|
|
|
if not result:
|
|
|
|
return {}
|
2021-06-30 11:32:17 +00:00
|
|
|
|
2021-07-02 11:17:00 +00:00
|
|
|
metadata_ids = [metadata[0] for metadata in result]
|
2021-07-13 19:21:45 +00:00
|
|
|
metadata = {}
|
|
|
|
for _id in metadata_ids:
|
|
|
|
meta = _meta(result, _id)
|
|
|
|
if meta:
|
|
|
|
metadata[_id] = meta
|
|
|
|
return metadata
|
2021-06-30 11:32:17 +00:00
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def _configured_unit(unit: str, units: UnitSystem) -> str:
|
2021-07-02 07:51:47 +00:00
|
|
|
"""Return the pressure and temperature units configured by the user."""
|
2021-06-30 14:53:51 +00:00
|
|
|
if unit == PRESSURE_PA:
|
|
|
|
return units.pressure_unit
|
|
|
|
if unit == TEMP_CELSIUS:
|
|
|
|
return units.temperature_unit
|
|
|
|
return unit
|
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def list_statistic_ids(
|
|
|
|
hass: HomeAssistant, statistic_type: str | None = None
|
|
|
|
) -> list[dict[str, str] | None]:
|
2021-07-02 07:51:47 +00:00
|
|
|
"""Return statistic_ids and meta data."""
|
2021-06-30 14:53:51 +00:00
|
|
|
units = hass.config.units
|
2021-07-14 09:54:55 +00:00
|
|
|
statistic_ids = {}
|
2021-06-18 19:32:30 +00:00
|
|
|
with session_scope(hass=hass) as session:
|
2021-07-02 13:40:54 +00:00
|
|
|
metadata = _get_metadata(hass, session, None, statistic_type)
|
2021-06-30 11:32:17 +00:00
|
|
|
|
2021-07-02 13:40:54 +00:00
|
|
|
for meta in metadata.values():
|
2021-07-02 11:17:00 +00:00
|
|
|
unit = _configured_unit(meta["unit_of_measurement"], units)
|
|
|
|
meta["unit_of_measurement"] = unit
|
2021-07-02 07:51:47 +00:00
|
|
|
|
2021-07-14 09:54:55 +00:00
|
|
|
statistic_ids = {
|
|
|
|
meta["statistic_id"]: meta["unit_of_measurement"]
|
|
|
|
for meta in metadata.values()
|
|
|
|
}
|
|
|
|
|
|
|
|
for platform in hass.data[DOMAIN].values():
|
|
|
|
if not hasattr(platform, "list_statistic_ids"):
|
|
|
|
continue
|
|
|
|
platform_statistic_ids = platform.list_statistic_ids(hass, statistic_type)
|
|
|
|
|
|
|
|
for statistic_id, unit in platform_statistic_ids.items():
|
|
|
|
unit = _configured_unit(unit, units)
|
|
|
|
platform_statistic_ids[statistic_id] = unit
|
|
|
|
|
|
|
|
statistic_ids = {**statistic_ids, **platform_statistic_ids}
|
|
|
|
|
|
|
|
return [
|
|
|
|
{"statistic_id": _id, "unit_of_measurement": unit}
|
|
|
|
for _id, unit in statistic_ids.items()
|
|
|
|
]
|
2021-06-18 19:32:30 +00:00
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def statistics_during_period(
|
|
|
|
hass: HomeAssistant,
|
|
|
|
start_time: datetime,
|
|
|
|
end_time: datetime | None = None,
|
|
|
|
statistic_ids: list[str] | None = None,
|
|
|
|
) -> dict[str, list[dict[str, str]]]:
|
2021-05-16 17:23:37 +00:00
|
|
|
"""Return states changes during UTC period start_time - end_time."""
|
2021-07-02 13:40:54 +00:00
|
|
|
metadata = None
|
2021-05-16 17:23:37 +00:00
|
|
|
with session_scope(hass=hass) as session:
|
2021-07-02 13:40:54 +00:00
|
|
|
metadata = _get_metadata(hass, session, statistic_ids, None)
|
|
|
|
if not metadata:
|
|
|
|
return {}
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
baked_query = hass.data[STATISTICS_BAKERY](
|
|
|
|
lambda session: session.query(*QUERY_STATISTICS)
|
|
|
|
)
|
|
|
|
|
|
|
|
baked_query += lambda q: q.filter(Statistics.start >= bindparam("start_time"))
|
|
|
|
|
|
|
|
if end_time is not None:
|
|
|
|
baked_query += lambda q: q.filter(Statistics.start < bindparam("end_time"))
|
|
|
|
|
2021-07-02 11:17:00 +00:00
|
|
|
metadata_ids = None
|
2021-06-18 19:03:13 +00:00
|
|
|
if statistic_ids is not None:
|
|
|
|
baked_query += lambda q: q.filter(
|
2021-07-02 11:17:00 +00:00
|
|
|
Statistics.metadata_id.in_(bindparam("metadata_ids"))
|
2021-06-18 19:03:13 +00:00
|
|
|
)
|
2021-07-02 13:40:54 +00:00
|
|
|
metadata_ids = list(metadata.keys())
|
2021-05-16 17:23:37 +00:00
|
|
|
|
2021-07-02 11:17:00 +00:00
|
|
|
baked_query += lambda q: q.order_by(Statistics.metadata_id, Statistics.start)
|
2021-05-16 17:23:37 +00:00
|
|
|
|
|
|
|
stats = execute(
|
|
|
|
baked_query(session).params(
|
2021-07-02 11:17:00 +00:00
|
|
|
start_time=start_time, end_time=end_time, metadata_ids=metadata_ids
|
2021-05-16 17:23:37 +00:00
|
|
|
)
|
|
|
|
)
|
2021-07-13 19:21:45 +00:00
|
|
|
if not stats:
|
|
|
|
return {}
|
2021-07-02 13:40:54 +00:00
|
|
|
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata)
|
2021-05-20 11:05:15 +00:00
|
|
|
|
|
|
|
|
2021-07-13 19:21:45 +00:00
|
|
|
def get_last_statistics(
|
|
|
|
hass: HomeAssistant, number_of_stats: int, statistic_id: str
|
|
|
|
) -> dict[str, list[dict]]:
|
2021-07-02 13:40:54 +00:00
|
|
|
"""Return the last number_of_stats statistics for a statistic_id."""
|
|
|
|
statistic_ids = [statistic_id]
|
2021-05-20 11:05:15 +00:00
|
|
|
with session_scope(hass=hass) as session:
|
2021-07-02 13:40:54 +00:00
|
|
|
metadata = _get_metadata(hass, session, statistic_ids, None)
|
|
|
|
if not metadata:
|
|
|
|
return {}
|
|
|
|
|
2021-05-20 11:05:15 +00:00
|
|
|
baked_query = hass.data[STATISTICS_BAKERY](
|
|
|
|
lambda session: session.query(*QUERY_STATISTICS)
|
|
|
|
)
|
|
|
|
|
2021-07-02 13:40:54 +00:00
|
|
|
baked_query += lambda q: q.filter_by(metadata_id=bindparam("metadata_id"))
|
|
|
|
metadata_id = next(iter(metadata.keys()))
|
2021-05-20 11:05:15 +00:00
|
|
|
|
|
|
|
baked_query += lambda q: q.order_by(
|
2021-07-02 11:17:00 +00:00
|
|
|
Statistics.metadata_id, Statistics.start.desc()
|
2021-05-16 17:23:37 +00:00
|
|
|
)
|
|
|
|
|
2021-05-20 11:05:15 +00:00
|
|
|
baked_query += lambda q: q.limit(bindparam("number_of_stats"))
|
|
|
|
|
|
|
|
stats = execute(
|
|
|
|
baked_query(session).params(
|
2021-07-02 11:17:00 +00:00
|
|
|
number_of_stats=number_of_stats, metadata_id=metadata_id
|
2021-05-20 11:05:15 +00:00
|
|
|
)
|
|
|
|
)
|
2021-07-13 19:21:45 +00:00
|
|
|
if not stats:
|
|
|
|
return {}
|
2021-05-20 11:05:15 +00:00
|
|
|
|
2021-07-02 13:40:54 +00:00
|
|
|
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata)
|
2021-05-20 11:05:15 +00:00
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
|
|
|
|
def _sorted_statistics_to_dict(
|
2021-07-13 19:21:45 +00:00
|
|
|
hass: HomeAssistant,
|
|
|
|
stats: list,
|
|
|
|
statistic_ids: list[str] | None,
|
|
|
|
metadata: dict[str, dict[str, str]],
|
|
|
|
) -> dict[str, list[dict]]:
|
2021-05-16 17:23:37 +00:00
|
|
|
"""Convert SQL results into JSON friendly data structure."""
|
2021-07-13 19:21:45 +00:00
|
|
|
result: dict = defaultdict(list)
|
2021-06-30 12:53:42 +00:00
|
|
|
units = hass.config.units
|
|
|
|
|
2021-05-16 17:23:37 +00:00
|
|
|
# Set all statistic IDs to empty lists in result set to maintain the order
|
|
|
|
if statistic_ids is not None:
|
|
|
|
for stat_id in statistic_ids:
|
|
|
|
result[stat_id] = []
|
|
|
|
|
2021-06-30 12:53:42 +00:00
|
|
|
# Called in a tight loop so cache the function here
|
2021-05-16 17:23:37 +00:00
|
|
|
_process_timestamp_to_utc_isoformat = process_timestamp_to_utc_isoformat
|
|
|
|
|
2021-06-30 14:53:51 +00:00
|
|
|
# Append all statistic entries, and do unit conversion
|
2021-07-13 19:21:45 +00:00
|
|
|
for meta_id, group in groupby(stats, lambda stat: stat.metadata_id): # type: ignore
|
2021-07-02 13:40:54 +00:00
|
|
|
unit = metadata[meta_id]["unit_of_measurement"]
|
|
|
|
statistic_id = metadata[meta_id]["statistic_id"]
|
2021-07-13 19:21:45 +00:00
|
|
|
convert: Callable[[Any, Any], float | None] = UNIT_CONVERSIONS.get(
|
|
|
|
unit, lambda x, units: x # type: ignore
|
|
|
|
)
|
2021-07-02 11:17:00 +00:00
|
|
|
ent_results = result[meta_id]
|
2021-05-16 17:23:37 +00:00
|
|
|
ent_results.extend(
|
|
|
|
{
|
2021-07-02 11:17:00 +00:00
|
|
|
"statistic_id": statistic_id,
|
2021-05-16 17:23:37 +00:00
|
|
|
"start": _process_timestamp_to_utc_isoformat(db_state.start),
|
2021-06-30 12:53:42 +00:00
|
|
|
"mean": convert(db_state.mean, units),
|
|
|
|
"min": convert(db_state.min, units),
|
|
|
|
"max": convert(db_state.max, units),
|
2021-05-20 11:05:15 +00:00
|
|
|
"last_reset": _process_timestamp_to_utc_isoformat(db_state.last_reset),
|
2021-06-30 12:53:42 +00:00
|
|
|
"state": convert(db_state.state, units),
|
|
|
|
"sum": convert(db_state.sum, units),
|
2021-05-16 17:23:37 +00:00
|
|
|
}
|
|
|
|
for db_state in group
|
|
|
|
)
|
|
|
|
|
|
|
|
# Filter out the empty lists if some states had 0 results.
|
2021-07-02 13:40:54 +00:00
|
|
|
return {metadata[key]["statistic_id"]: val for key, val in result.items() if val}
|