2020-01-31 22:47:40 +00:00
|
|
|
"""Helpers to help coordinate updates."""
|
2021-03-17 17:34:19 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
import asyncio
|
|
|
|
from datetime import datetime, timedelta
|
|
|
|
import logging
|
|
|
|
from time import monotonic
|
2021-03-17 17:34:19 +00:00
|
|
|
from typing import Any, Awaitable, Callable, Generic, TypeVar
|
2020-07-30 15:04:00 +00:00
|
|
|
import urllib.error
|
2020-01-31 22:47:40 +00:00
|
|
|
|
2020-03-04 16:05:46 +00:00
|
|
|
import aiohttp
|
2020-07-30 15:04:00 +00:00
|
|
|
import requests
|
2020-03-04 16:05:46 +00:00
|
|
|
|
2021-04-10 05:41:29 +00:00
|
|
|
from homeassistant import config_entries
|
2021-01-21 23:23:50 +00:00
|
|
|
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
|
|
|
from homeassistant.core import CALLBACK_TYPE, Event, HassJob, HomeAssistant, callback
|
2021-04-10 05:41:29 +00:00
|
|
|
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
2020-08-30 10:02:37 +00:00
|
|
|
from homeassistant.helpers import entity, event
|
2020-01-31 22:47:40 +00:00
|
|
|
from homeassistant.util.dt import utcnow
|
|
|
|
|
|
|
|
from .debounce import Debouncer
|
|
|
|
|
2020-02-01 16:14:28 +00:00
|
|
|
REQUEST_REFRESH_DEFAULT_COOLDOWN = 10
|
|
|
|
REQUEST_REFRESH_DEFAULT_IMMEDIATE = True
|
|
|
|
|
2020-07-30 15:04:00 +00:00
|
|
|
T = TypeVar("T")
|
|
|
|
|
2021-02-08 10:59:46 +00:00
|
|
|
# mypy: disallow-any-generics
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
|
|
|
|
class UpdateFailed(Exception):
|
|
|
|
"""Raised when an update has failed."""
|
|
|
|
|
|
|
|
|
2020-07-30 15:04:00 +00:00
|
|
|
class DataUpdateCoordinator(Generic[T]):
|
2020-01-31 22:47:40 +00:00
|
|
|
"""Class to manage fetching data from single endpoint."""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
logger: logging.Logger,
|
2020-02-06 17:29:29 +00:00
|
|
|
*,
|
2020-01-31 22:47:40 +00:00
|
|
|
name: str,
|
2021-03-17 17:34:19 +00:00
|
|
|
update_interval: timedelta | None = None,
|
|
|
|
update_method: Callable[[], Awaitable[T]] | None = None,
|
|
|
|
request_refresh_debouncer: Debouncer | None = None,
|
2020-01-31 22:47:40 +00:00
|
|
|
):
|
|
|
|
"""Initialize global data updater."""
|
|
|
|
self.hass = hass
|
|
|
|
self.logger = logger
|
|
|
|
self.name = name
|
|
|
|
self.update_method = update_method
|
|
|
|
self.update_interval = update_interval
|
|
|
|
|
2021-03-17 17:34:19 +00:00
|
|
|
self.data: T | None = None
|
2020-01-31 22:47:40 +00:00
|
|
|
|
2021-03-17 17:34:19 +00:00
|
|
|
self._listeners: list[CALLBACK_TYPE] = []
|
2020-10-07 14:51:50 +00:00
|
|
|
self._job = HassJob(self._handle_refresh_interval)
|
2021-03-17 17:34:19 +00:00
|
|
|
self._unsub_refresh: CALLBACK_TYPE | None = None
|
|
|
|
self._request_refresh_task: asyncio.TimerHandle | None = None
|
2020-02-06 17:29:29 +00:00
|
|
|
self.last_update_success = True
|
2021-03-29 22:51:39 +00:00
|
|
|
self.last_exception: Exception | None = None
|
2020-02-06 17:29:29 +00:00
|
|
|
|
2020-02-01 16:14:28 +00:00
|
|
|
if request_refresh_debouncer is None:
|
|
|
|
request_refresh_debouncer = Debouncer(
|
|
|
|
hass,
|
|
|
|
logger,
|
2020-02-06 17:29:29 +00:00
|
|
|
cooldown=REQUEST_REFRESH_DEFAULT_COOLDOWN,
|
|
|
|
immediate=REQUEST_REFRESH_DEFAULT_IMMEDIATE,
|
|
|
|
function=self.async_refresh,
|
2020-02-01 16:14:28 +00:00
|
|
|
)
|
2020-02-06 17:29:29 +00:00
|
|
|
else:
|
|
|
|
request_refresh_debouncer.function = self.async_refresh
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
self._debounced_refresh = request_refresh_debouncer
|
|
|
|
|
2021-01-21 23:23:50 +00:00
|
|
|
self.hass.bus.async_listen_once(
|
|
|
|
EVENT_HOMEASSISTANT_STOP, self._async_stop_refresh
|
|
|
|
)
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
@callback
|
2020-04-03 18:15:42 +00:00
|
|
|
def async_add_listener(self, update_callback: CALLBACK_TYPE) -> Callable[[], None]:
|
2020-01-31 22:47:40 +00:00
|
|
|
"""Listen for data updates."""
|
|
|
|
schedule_refresh = not self._listeners
|
|
|
|
|
|
|
|
self._listeners.append(update_callback)
|
|
|
|
|
|
|
|
# This is the first listener, set up interval.
|
|
|
|
if schedule_refresh:
|
|
|
|
self._schedule_refresh()
|
|
|
|
|
2020-04-03 18:15:42 +00:00
|
|
|
@callback
|
|
|
|
def remove_listener() -> None:
|
|
|
|
"""Remove update listener."""
|
|
|
|
self.async_remove_listener(update_callback)
|
|
|
|
|
|
|
|
return remove_listener
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
@callback
|
|
|
|
def async_remove_listener(self, update_callback: CALLBACK_TYPE) -> None:
|
|
|
|
"""Remove data update."""
|
|
|
|
self._listeners.remove(update_callback)
|
|
|
|
|
|
|
|
if not self._listeners and self._unsub_refresh:
|
|
|
|
self._unsub_refresh()
|
|
|
|
self._unsub_refresh = None
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def _schedule_refresh(self) -> None:
|
|
|
|
"""Schedule a refresh."""
|
2020-07-10 22:48:20 +00:00
|
|
|
if self.update_interval is None:
|
|
|
|
return
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
if self._unsub_refresh:
|
|
|
|
self._unsub_refresh()
|
|
|
|
self._unsub_refresh = None
|
|
|
|
|
2020-03-21 23:57:12 +00:00
|
|
|
# We _floor_ utcnow to create a schedule on a rounded second,
|
|
|
|
# minimizing the time between the point and the real activation.
|
|
|
|
# That way we obtain a constant update frequency,
|
|
|
|
# as long as the update process takes less than a second
|
2020-06-29 16:39:24 +00:00
|
|
|
self._unsub_refresh = event.async_track_point_in_utc_time(
|
2020-03-21 23:57:12 +00:00
|
|
|
self.hass,
|
2020-10-07 14:51:50 +00:00
|
|
|
self._job,
|
2020-03-21 23:57:12 +00:00
|
|
|
utcnow().replace(microsecond=0) + self.update_interval,
|
2020-01-31 22:47:40 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
async def _handle_refresh_interval(self, _now: datetime) -> None:
|
|
|
|
"""Handle a refresh interval occurrence."""
|
|
|
|
self._unsub_refresh = None
|
2020-02-01 16:14:28 +00:00
|
|
|
await self.async_refresh()
|
2020-01-31 22:47:40 +00:00
|
|
|
|
|
|
|
async def async_request_refresh(self) -> None:
|
|
|
|
"""Request a refresh.
|
|
|
|
|
|
|
|
Refresh will wait a bit to see if it can batch them.
|
|
|
|
"""
|
|
|
|
await self._debounced_refresh.async_call()
|
|
|
|
|
2021-03-17 17:34:19 +00:00
|
|
|
async def _async_update_data(self) -> T | None:
|
2020-03-12 23:27:19 +00:00
|
|
|
"""Fetch the latest data from the source."""
|
|
|
|
if self.update_method is None:
|
|
|
|
raise NotImplementedError("Update method not implemented")
|
|
|
|
return await self.update_method()
|
|
|
|
|
2021-03-29 22:51:39 +00:00
|
|
|
async def async_config_entry_first_refresh(self) -> None:
|
|
|
|
"""Refresh data for the first time when a config entry is setup.
|
|
|
|
|
|
|
|
Will automatically raise ConfigEntryNotReady if the refresh
|
|
|
|
fails. Additionally logging is handled by config entry setup
|
|
|
|
to ensure that multiple retries do not cause log spam.
|
|
|
|
"""
|
2021-04-10 05:41:29 +00:00
|
|
|
await self._async_refresh(log_failures=False, raise_on_auth_failed=True)
|
2021-03-29 22:51:39 +00:00
|
|
|
if self.last_update_success:
|
|
|
|
return
|
|
|
|
ex = ConfigEntryNotReady()
|
|
|
|
ex.__cause__ = self.last_exception
|
|
|
|
raise ex
|
|
|
|
|
2020-02-01 16:14:28 +00:00
|
|
|
async def async_refresh(self) -> None:
|
2021-03-29 22:51:39 +00:00
|
|
|
"""Refresh data and log errors."""
|
|
|
|
await self._async_refresh(log_failures=True)
|
|
|
|
|
2021-04-10 05:41:29 +00:00
|
|
|
async def _async_refresh(
|
|
|
|
self, log_failures: bool = True, raise_on_auth_failed: bool = False
|
|
|
|
) -> None:
|
2020-03-12 23:27:19 +00:00
|
|
|
"""Refresh data."""
|
2020-01-31 22:47:40 +00:00
|
|
|
if self._unsub_refresh:
|
|
|
|
self._unsub_refresh()
|
|
|
|
self._unsub_refresh = None
|
|
|
|
|
|
|
|
self._debounced_refresh.async_cancel()
|
2020-11-02 08:54:08 +00:00
|
|
|
start = monotonic()
|
2021-04-10 05:41:29 +00:00
|
|
|
auth_failed = False
|
2020-01-31 22:47:40 +00:00
|
|
|
|
|
|
|
try:
|
2020-03-12 23:27:19 +00:00
|
|
|
self.data = await self._async_update_data()
|
2020-01-31 22:47:40 +00:00
|
|
|
|
2021-03-29 22:51:39 +00:00
|
|
|
except (asyncio.TimeoutError, requests.exceptions.Timeout) as err:
|
|
|
|
self.last_exception = err
|
2020-03-04 16:05:46 +00:00
|
|
|
if self.last_update_success:
|
2021-03-29 22:51:39 +00:00
|
|
|
if log_failures:
|
|
|
|
self.logger.error("Timeout fetching %s data", self.name)
|
2020-03-04 16:05:46 +00:00
|
|
|
self.last_update_success = False
|
|
|
|
|
2020-07-30 15:04:00 +00:00
|
|
|
except (aiohttp.ClientError, requests.exceptions.RequestException) as err:
|
2021-03-29 22:51:39 +00:00
|
|
|
self.last_exception = err
|
2020-03-04 16:05:46 +00:00
|
|
|
if self.last_update_success:
|
2021-03-29 22:51:39 +00:00
|
|
|
if log_failures:
|
|
|
|
self.logger.error("Error requesting %s data: %s", self.name, err)
|
2020-03-04 16:05:46 +00:00
|
|
|
self.last_update_success = False
|
|
|
|
|
2020-07-30 15:04:00 +00:00
|
|
|
except urllib.error.URLError as err:
|
2021-03-29 22:51:39 +00:00
|
|
|
self.last_exception = err
|
2020-07-30 15:04:00 +00:00
|
|
|
if self.last_update_success:
|
2021-03-29 22:51:39 +00:00
|
|
|
if log_failures:
|
|
|
|
if err.reason == "timed out":
|
|
|
|
self.logger.error("Timeout fetching %s data", self.name)
|
|
|
|
else:
|
|
|
|
self.logger.error(
|
|
|
|
"Error requesting %s data: %s", self.name, err
|
|
|
|
)
|
2020-07-30 15:04:00 +00:00
|
|
|
self.last_update_success = False
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
except UpdateFailed as err:
|
2021-03-29 22:51:39 +00:00
|
|
|
self.last_exception = err
|
2020-02-06 17:29:29 +00:00
|
|
|
if self.last_update_success:
|
2021-03-29 22:51:39 +00:00
|
|
|
if log_failures:
|
|
|
|
self.logger.error("Error fetching %s data: %s", self.name, err)
|
2020-02-06 17:29:29 +00:00
|
|
|
self.last_update_success = False
|
2020-01-31 22:47:40 +00:00
|
|
|
|
2021-04-10 05:41:29 +00:00
|
|
|
except ConfigEntryAuthFailed as err:
|
|
|
|
auth_failed = True
|
|
|
|
self.last_exception = err
|
|
|
|
if self.last_update_success:
|
|
|
|
if log_failures:
|
|
|
|
self.logger.error(
|
|
|
|
"Authentication failed while fetching %s data: %s",
|
|
|
|
self.name,
|
|
|
|
err,
|
|
|
|
)
|
|
|
|
self.last_update_success = False
|
|
|
|
if raise_on_auth_failed:
|
|
|
|
raise
|
|
|
|
|
|
|
|
config_entry = config_entries.current_entry.get()
|
|
|
|
if config_entry:
|
|
|
|
config_entry.async_start_reauth(self.hass)
|
2020-03-12 23:27:19 +00:00
|
|
|
except NotImplementedError as err:
|
2021-03-29 22:51:39 +00:00
|
|
|
self.last_exception = err
|
2020-03-12 23:27:19 +00:00
|
|
|
raise err
|
|
|
|
|
2020-01-31 22:47:40 +00:00
|
|
|
except Exception as err: # pylint: disable=broad-except
|
2021-03-29 22:51:39 +00:00
|
|
|
self.last_exception = err
|
2020-02-06 17:29:29 +00:00
|
|
|
self.last_update_success = False
|
2021-03-29 22:51:39 +00:00
|
|
|
if log_failures:
|
|
|
|
self.logger.exception(
|
|
|
|
"Unexpected error fetching %s data: %s", self.name, err
|
|
|
|
)
|
2020-01-31 22:47:40 +00:00
|
|
|
|
|
|
|
else:
|
2020-02-06 17:29:29 +00:00
|
|
|
if not self.last_update_success:
|
|
|
|
self.last_update_success = True
|
2020-02-20 15:51:15 +00:00
|
|
|
self.logger.info("Fetching %s data recovered", self.name)
|
2020-01-31 22:47:40 +00:00
|
|
|
|
|
|
|
finally:
|
|
|
|
self.logger.debug(
|
|
|
|
"Finished fetching %s data in %.3f seconds",
|
|
|
|
self.name,
|
|
|
|
monotonic() - start,
|
|
|
|
)
|
2021-04-10 05:41:29 +00:00
|
|
|
if not auth_failed and self._listeners:
|
2020-04-16 23:44:14 +00:00
|
|
|
self._schedule_refresh()
|
2020-01-31 22:47:40 +00:00
|
|
|
|
|
|
|
for update_callback in self._listeners:
|
|
|
|
update_callback()
|
2020-08-30 10:02:37 +00:00
|
|
|
|
2020-11-02 08:54:08 +00:00
|
|
|
@callback
|
|
|
|
def async_set_updated_data(self, data: T) -> None:
|
|
|
|
"""Manually update data, notify listeners and reset refresh interval."""
|
|
|
|
if self._unsub_refresh:
|
|
|
|
self._unsub_refresh()
|
|
|
|
self._unsub_refresh = None
|
|
|
|
|
|
|
|
self._debounced_refresh.async_cancel()
|
|
|
|
|
|
|
|
self.data = data
|
|
|
|
self.last_update_success = True
|
|
|
|
self.logger.debug(
|
|
|
|
"Manually updated %s data",
|
|
|
|
self.name,
|
|
|
|
)
|
|
|
|
|
|
|
|
if self._listeners:
|
|
|
|
self._schedule_refresh()
|
|
|
|
|
|
|
|
for update_callback in self._listeners:
|
|
|
|
update_callback()
|
|
|
|
|
2021-01-21 23:23:50 +00:00
|
|
|
@callback
|
|
|
|
def _async_stop_refresh(self, _: Event) -> None:
|
|
|
|
"""Stop refreshing when Home Assistant is stopping."""
|
|
|
|
self.update_interval = None
|
|
|
|
if self._unsub_refresh:
|
|
|
|
self._unsub_refresh()
|
|
|
|
self._unsub_refresh = None
|
|
|
|
|
2020-08-30 10:02:37 +00:00
|
|
|
|
|
|
|
class CoordinatorEntity(entity.Entity):
|
|
|
|
"""A class for entities using DataUpdateCoordinator."""
|
|
|
|
|
2021-02-08 10:59:46 +00:00
|
|
|
def __init__(self, coordinator: DataUpdateCoordinator[Any]) -> None:
|
2020-08-30 10:02:37 +00:00
|
|
|
"""Create the entity with a DataUpdateCoordinator."""
|
|
|
|
self.coordinator = coordinator
|
|
|
|
|
|
|
|
@property
|
|
|
|
def should_poll(self) -> bool:
|
|
|
|
"""No need to poll. Coordinator notifies entity of updates."""
|
|
|
|
return False
|
|
|
|
|
|
|
|
@property
|
|
|
|
def available(self) -> bool:
|
|
|
|
"""Return if entity is available."""
|
|
|
|
return self.coordinator.last_update_success
|
|
|
|
|
|
|
|
async def async_added_to_hass(self) -> None:
|
|
|
|
"""When entity is added to hass."""
|
2020-08-30 13:42:33 +00:00
|
|
|
await super().async_added_to_hass()
|
2020-08-30 10:02:37 +00:00
|
|
|
self.async_on_remove(
|
2020-10-02 07:16:37 +00:00
|
|
|
self.coordinator.async_add_listener(self._handle_coordinator_update)
|
2020-08-30 10:02:37 +00:00
|
|
|
)
|
|
|
|
|
2020-10-02 07:16:37 +00:00
|
|
|
@callback
|
|
|
|
def _handle_coordinator_update(self) -> None:
|
|
|
|
"""Handle updated data from the coordinator."""
|
|
|
|
self.async_write_ha_state()
|
|
|
|
|
2020-08-30 10:02:37 +00:00
|
|
|
async def async_update(self) -> None:
|
|
|
|
"""Update the entity.
|
|
|
|
|
|
|
|
Only used by the generic entity update service.
|
|
|
|
"""
|
2020-08-30 17:10:22 +00:00
|
|
|
# Ignore manual update requests if the entity is disabled
|
|
|
|
if not self.enabled:
|
|
|
|
return
|
|
|
|
|
2020-08-30 10:02:37 +00:00
|
|
|
await self.coordinator.async_request_refresh()
|