2019-06-12 16:29:28 +00:00
|
|
|
"""Manage config entries in Home Assistant."""
|
2021-02-12 09:58:20 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
import asyncio
|
2021-04-26 17:46:55 +00:00
|
|
|
from collections.abc import Iterable, Mapping
|
2021-04-09 17:14:33 +00:00
|
|
|
from contextvars import ContextVar
|
2021-11-25 01:30:02 +00:00
|
|
|
import dataclasses
|
2021-05-20 17:19:20 +00:00
|
|
|
from enum import Enum
|
2019-02-15 17:30:47 +00:00
|
|
|
import functools
|
2019-12-09 15:42:10 +00:00
|
|
|
import logging
|
2021-01-26 21:19:10 +00:00
|
|
|
from types import MappingProxyType, MethodType
|
2021-11-15 17:05:45 +00:00
|
|
|
from typing import TYPE_CHECKING, Any, Callable, Optional, cast
|
2019-02-22 16:59:43 +00:00
|
|
|
import weakref
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-04-11 08:26:36 +00:00
|
|
|
from homeassistant import data_entry_flow, loader
|
2021-12-15 19:53:21 +00:00
|
|
|
from homeassistant.backports.enum import StrEnum
|
2021-04-14 02:16:26 +00:00
|
|
|
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP
|
2021-04-04 00:00:22 +00:00
|
|
|
from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback
|
2021-04-10 05:41:29 +00:00
|
|
|
from homeassistant.exceptions import (
|
|
|
|
ConfigEntryAuthFailed,
|
|
|
|
ConfigEntryNotReady,
|
|
|
|
HomeAssistantError,
|
|
|
|
)
|
2021-03-03 18:12:37 +00:00
|
|
|
from homeassistant.helpers import device_registry, entity_registry
|
2019-10-28 20:36:26 +00:00
|
|
|
from homeassistant.helpers.event import Event
|
2021-12-15 19:53:21 +00:00
|
|
|
from homeassistant.helpers.frame import report
|
2021-08-16 21:12:06 +00:00
|
|
|
from homeassistant.helpers.typing import (
|
|
|
|
UNDEFINED,
|
|
|
|
ConfigType,
|
|
|
|
DiscoveryInfoType,
|
|
|
|
UndefinedType,
|
|
|
|
)
|
2019-12-09 15:42:10 +00:00
|
|
|
from homeassistant.setup import async_process_deps_reqs, async_setup_component
|
|
|
|
from homeassistant.util.decorator import Registry
|
2020-08-24 15:21:30 +00:00
|
|
|
import homeassistant.util.uuid as uuid_util
|
2019-07-25 06:08:20 +00:00
|
|
|
|
2021-11-15 17:05:45 +00:00
|
|
|
if TYPE_CHECKING:
|
2021-11-16 11:19:50 +00:00
|
|
|
from homeassistant.components.dhcp import DhcpServiceInfo
|
2021-12-03 13:05:56 +00:00
|
|
|
from homeassistant.components.hassio import HassioServiceInfo
|
2021-12-03 18:34:48 +00:00
|
|
|
from homeassistant.components.mqtt import MqttServiceInfo
|
2021-11-29 16:10:07 +00:00
|
|
|
from homeassistant.components.ssdp import SsdpServiceInfo
|
2021-11-16 18:03:50 +00:00
|
|
|
from homeassistant.components.usb import UsbServiceInfo
|
2021-11-15 17:05:45 +00:00
|
|
|
from homeassistant.components.zeroconf import ZeroconfServiceInfo
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2018-08-09 11:24:14 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
SOURCE_DISCOVERY = "discovery"
|
2020-06-15 11:38:38 +00:00
|
|
|
SOURCE_HASSIO = "hassio"
|
|
|
|
SOURCE_HOMEKIT = "homekit"
|
2019-07-31 19:25:30 +00:00
|
|
|
SOURCE_IMPORT = "import"
|
2020-05-13 13:11:00 +00:00
|
|
|
SOURCE_INTEGRATION_DISCOVERY = "integration_discovery"
|
2020-10-07 16:30:51 +00:00
|
|
|
SOURCE_MQTT = "mqtt"
|
2019-10-29 06:32:57 +00:00
|
|
|
SOURCE_SSDP = "ssdp"
|
2021-08-20 19:04:18 +00:00
|
|
|
SOURCE_USB = "usb"
|
2019-10-29 06:32:57 +00:00
|
|
|
SOURCE_USER = "user"
|
|
|
|
SOURCE_ZEROCONF = "zeroconf"
|
2021-01-14 08:09:08 +00:00
|
|
|
SOURCE_DHCP = "dhcp"
|
2019-12-21 10:22:07 +00:00
|
|
|
|
|
|
|
# If a user wants to hide a discovery from the UI they can "Ignore" it. The config_entries/ignore_flow
|
|
|
|
# websocket command creates a config entry with this source and while it exists normal discoveries
|
|
|
|
# with the same unique id are ignored.
|
2019-12-18 06:41:01 +00:00
|
|
|
SOURCE_IGNORE = "ignore"
|
2018-08-09 11:24:14 +00:00
|
|
|
|
2019-12-21 10:22:07 +00:00
|
|
|
# This is used when a user uses the "Stop Ignoring" button in the UI (the
|
|
|
|
# config_entries/ignore_flow websocket command). It's triggered after the "ignore" config entry has
|
|
|
|
# been removed and unloaded.
|
|
|
|
SOURCE_UNIGNORE = "unignore"
|
|
|
|
|
2020-09-21 02:15:48 +00:00
|
|
|
# This is used to signal that re-authentication is required by the user.
|
|
|
|
SOURCE_REAUTH = "reauth"
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
HANDLERS = Registry()
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
STORAGE_KEY = "core.config_entries"
|
2018-06-25 16:53:49 +00:00
|
|
|
STORAGE_VERSION = 1
|
|
|
|
|
|
|
|
# Deprecated since 0.73
|
2019-07-31 19:25:30 +00:00
|
|
|
PATH_CONFIG = ".config_entries.json"
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
SAVE_DELAY = 1
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
|
|
|
|
class ConfigEntryState(Enum):
|
|
|
|
"""Config entry state."""
|
|
|
|
|
|
|
|
LOADED = "loaded", True
|
|
|
|
"""The config entry has been set up successfully"""
|
|
|
|
SETUP_ERROR = "setup_error", True
|
|
|
|
"""There was an error while trying to set up this config entry"""
|
|
|
|
MIGRATION_ERROR = "migration_error", False
|
|
|
|
"""There was an error while trying to migrate the config entry to a new version"""
|
|
|
|
SETUP_RETRY = "setup_retry", True
|
|
|
|
"""The config entry was not ready to be set up yet, but might be later"""
|
|
|
|
NOT_LOADED = "not_loaded", True
|
|
|
|
"""The config entry has not been loaded"""
|
|
|
|
FAILED_UNLOAD = "failed_unload", False
|
|
|
|
"""An error occurred when trying to unload the entry"""
|
|
|
|
|
|
|
|
_recoverable: bool
|
|
|
|
|
|
|
|
def __new__(cls: type[object], value: str, recoverable: bool) -> ConfigEntryState:
|
|
|
|
"""Create new ConfigEntryState."""
|
|
|
|
obj = object.__new__(cls)
|
|
|
|
obj._value_ = value
|
|
|
|
obj._recoverable = recoverable
|
|
|
|
return cast("ConfigEntryState", obj)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def recoverable(self) -> bool:
|
|
|
|
"""Get if the state is recoverable."""
|
|
|
|
return self._recoverable
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2020-06-15 11:38:38 +00:00
|
|
|
DEFAULT_DISCOVERY_UNIQUE_ID = "default_discovery_unique_id"
|
2019-07-31 19:25:30 +00:00
|
|
|
DISCOVERY_NOTIFICATION_ID = "config_entry_discovery"
|
2020-01-03 16:28:05 +00:00
|
|
|
DISCOVERY_SOURCES = (
|
|
|
|
SOURCE_SSDP,
|
2021-08-20 19:04:18 +00:00
|
|
|
SOURCE_USB,
|
|
|
|
SOURCE_DHCP,
|
|
|
|
SOURCE_HOMEKIT,
|
2020-01-03 16:28:05 +00:00
|
|
|
SOURCE_ZEROCONF,
|
2021-08-20 15:02:03 +00:00
|
|
|
SOURCE_HOMEKIT,
|
|
|
|
SOURCE_DHCP,
|
2020-01-03 16:28:05 +00:00
|
|
|
SOURCE_DISCOVERY,
|
|
|
|
SOURCE_IMPORT,
|
|
|
|
SOURCE_UNIGNORE,
|
|
|
|
)
|
2018-04-22 19:00:24 +00:00
|
|
|
|
2020-10-15 20:46:27 +00:00
|
|
|
RECONFIGURE_NOTIFICATION_ID = "config_entry_reconfigure"
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
EVENT_FLOW_DISCOVERED = "config_entry_discovered"
|
2018-06-18 03:03:29 +00:00
|
|
|
|
2021-12-15 19:53:21 +00:00
|
|
|
|
|
|
|
class ConfigEntryDisabler(StrEnum):
|
|
|
|
"""What disabled a config entry."""
|
|
|
|
|
|
|
|
USER = "user"
|
|
|
|
|
|
|
|
|
|
|
|
# DISABLED_* is deprecated, to be removed in 2022.3
|
|
|
|
DISABLED_USER = ConfigEntryDisabler.USER.value
|
2021-05-04 18:08:51 +00:00
|
|
|
|
|
|
|
RELOAD_AFTER_UPDATE_DELAY = 30
|
|
|
|
|
|
|
|
# Deprecated: Connection classes
|
|
|
|
# These aren't used anymore since 2021.6.0
|
|
|
|
# Mainly here not to break custom integrations.
|
2019-07-31 19:25:30 +00:00
|
|
|
CONN_CLASS_CLOUD_PUSH = "cloud_push"
|
|
|
|
CONN_CLASS_CLOUD_POLL = "cloud_poll"
|
|
|
|
CONN_CLASS_LOCAL_PUSH = "local_push"
|
|
|
|
CONN_CLASS_LOCAL_POLL = "local_poll"
|
|
|
|
CONN_CLASS_ASSUMED = "assumed"
|
|
|
|
CONN_CLASS_UNKNOWN = "unknown"
|
2018-09-17 08:12:46 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
class ConfigError(HomeAssistantError):
|
|
|
|
"""Error while configuring an account."""
|
|
|
|
|
|
|
|
|
|
|
|
class UnknownEntry(ConfigError):
|
|
|
|
"""Unknown entry specified."""
|
|
|
|
|
|
|
|
|
|
|
|
class OperationNotAllowed(ConfigError):
|
|
|
|
"""Raised when a config entry operation is not allowed."""
|
|
|
|
|
|
|
|
|
2020-07-22 15:06:37 +00:00
|
|
|
UpdateListenerType = Callable[[HomeAssistant, "ConfigEntry"], Any]
|
|
|
|
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
class ConfigEntry:
|
|
|
|
"""Hold a configuration entry."""
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
__slots__ = (
|
|
|
|
"entry_id",
|
|
|
|
"version",
|
|
|
|
"domain",
|
|
|
|
"title",
|
|
|
|
"data",
|
|
|
|
"options",
|
2019-12-16 11:27:43 +00:00
|
|
|
"unique_id",
|
2020-08-25 22:59:22 +00:00
|
|
|
"supports_unload",
|
2021-06-01 20:34:31 +00:00
|
|
|
"pref_disable_new_entities",
|
|
|
|
"pref_disable_polling",
|
2019-07-31 19:25:30 +00:00
|
|
|
"source",
|
|
|
|
"state",
|
2021-02-21 03:21:39 +00:00
|
|
|
"disabled_by",
|
2019-07-31 19:25:30 +00:00
|
|
|
"_setup_lock",
|
|
|
|
"update_listeners",
|
2021-04-23 07:23:43 +00:00
|
|
|
"reason",
|
2019-07-31 19:25:30 +00:00
|
|
|
"_async_cancel_retry_setup",
|
2021-04-09 17:14:33 +00:00
|
|
|
"_on_unload",
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
version: int,
|
|
|
|
domain: str,
|
|
|
|
title: str,
|
2021-04-15 17:17:07 +00:00
|
|
|
data: Mapping[str, Any],
|
2019-07-31 19:25:30 +00:00
|
|
|
source: str,
|
2021-06-01 20:34:31 +00:00
|
|
|
pref_disable_new_entities: bool | None = None,
|
|
|
|
pref_disable_polling: bool | None = None,
|
2021-05-06 05:14:01 +00:00
|
|
|
options: Mapping[str, Any] | None = None,
|
2021-03-17 16:34:55 +00:00
|
|
|
unique_id: str | None = None,
|
|
|
|
entry_id: str | None = None,
|
2021-05-20 17:19:20 +00:00
|
|
|
state: ConfigEntryState = ConfigEntryState.NOT_LOADED,
|
2021-12-15 19:53:21 +00:00
|
|
|
disabled_by: ConfigEntryDisabler | None = None,
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Initialize a config entry."""
|
|
|
|
# Unique id of the config entry
|
2020-10-07 14:37:01 +00:00
|
|
|
self.entry_id = entry_id or uuid_util.random_uuid_hex()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
# Version of the configuration.
|
|
|
|
self.version = version
|
|
|
|
|
|
|
|
# Domain the configuration belongs to
|
|
|
|
self.domain = domain
|
|
|
|
|
|
|
|
# Title of the configuration
|
|
|
|
self.title = title
|
|
|
|
|
|
|
|
# Config data
|
2020-03-09 21:07:50 +00:00
|
|
|
self.data = MappingProxyType(data)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
# Entry options
|
2020-03-09 21:07:50 +00:00
|
|
|
self.options = MappingProxyType(options or {})
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2019-08-18 04:34:11 +00:00
|
|
|
# Entry system options
|
2021-06-01 20:34:31 +00:00
|
|
|
if pref_disable_new_entities is None:
|
|
|
|
pref_disable_new_entities = False
|
|
|
|
|
|
|
|
self.pref_disable_new_entities = pref_disable_new_entities
|
|
|
|
|
|
|
|
if pref_disable_polling is None:
|
|
|
|
pref_disable_polling = False
|
|
|
|
|
|
|
|
self.pref_disable_polling = pref_disable_polling
|
2019-08-18 04:34:11 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
# Source of the configuration (user, discovery, cloud)
|
|
|
|
self.source = source
|
|
|
|
|
|
|
|
# State of the entry (LOADED, NOT_LOADED)
|
|
|
|
self.state = state
|
|
|
|
|
2019-12-16 11:27:43 +00:00
|
|
|
# Unique ID of this entry.
|
|
|
|
self.unique_id = unique_id
|
|
|
|
|
2021-02-21 03:21:39 +00:00
|
|
|
# Config entry is disabled
|
2021-12-15 19:53:21 +00:00
|
|
|
if isinstance(disabled_by, str) and not isinstance(
|
|
|
|
disabled_by, ConfigEntryDisabler
|
|
|
|
):
|
|
|
|
report( # type: ignore[unreachable]
|
|
|
|
"uses str for config entry disabled_by. This is deprecated and will "
|
|
|
|
"stop working in Home Assistant 2022.3, it should be updated to use "
|
|
|
|
"ConfigEntryDisabler instead",
|
|
|
|
error_if_core=False,
|
|
|
|
)
|
|
|
|
disabled_by = ConfigEntryDisabler(disabled_by)
|
2021-02-21 03:21:39 +00:00
|
|
|
self.disabled_by = disabled_by
|
|
|
|
|
2020-08-25 22:59:22 +00:00
|
|
|
# Supports unload
|
|
|
|
self.supports_unload = False
|
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
# Listeners to call on update
|
2021-03-17 16:34:55 +00:00
|
|
|
self.update_listeners: list[
|
|
|
|
weakref.ReferenceType[UpdateListenerType] | weakref.WeakMethod
|
2021-01-26 21:19:10 +00:00
|
|
|
] = []
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2021-04-23 07:23:43 +00:00
|
|
|
# Reason why config entry is in a failed state
|
|
|
|
self.reason: str | None = None
|
|
|
|
|
2018-10-04 13:53:50 +00:00
|
|
|
# Function to cancel a scheduled retry
|
2021-03-17 16:34:55 +00:00
|
|
|
self._async_cancel_retry_setup: Callable[[], Any] | None = None
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2021-04-09 17:14:33 +00:00
|
|
|
# Hold list for functions to call on unload.
|
|
|
|
self._on_unload: list[CALLBACK_TYPE] | None = None
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
async def async_setup(
|
2019-07-31 19:25:30 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
*,
|
2021-03-17 16:34:55 +00:00
|
|
|
integration: loader.Integration | None = None,
|
2019-07-31 19:25:30 +00:00
|
|
|
tries: int = 0,
|
|
|
|
) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Set up an entry."""
|
2021-04-09 17:14:33 +00:00
|
|
|
current_entry.set(self)
|
2021-02-21 03:21:39 +00:00
|
|
|
if self.source == SOURCE_IGNORE or self.disabled_by:
|
2019-12-18 06:41:01 +00:00
|
|
|
return
|
|
|
|
|
2019-04-15 02:07:05 +00:00
|
|
|
if integration is None:
|
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
|
2020-08-25 22:59:22 +00:00
|
|
|
self.supports_unload = await support_entry_unload(hass, self.domain)
|
|
|
|
|
2019-05-13 08:16:55 +00:00
|
|
|
try:
|
|
|
|
component = integration.get_component()
|
|
|
|
except ImportError as err:
|
|
|
|
_LOGGER.error(
|
2020-02-13 16:27:00 +00:00
|
|
|
"Error importing integration %s to set up %s configuration entry: %s",
|
2019-07-31 19:25:30 +00:00
|
|
|
integration.domain,
|
|
|
|
self.domain,
|
|
|
|
err,
|
|
|
|
)
|
2019-05-13 08:16:55 +00:00
|
|
|
if self.domain == integration.domain:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.SETUP_ERROR
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = "Import error"
|
2019-05-13 08:16:55 +00:00
|
|
|
return
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-08-23 00:32:43 +00:00
|
|
|
if self.domain == integration.domain:
|
|
|
|
try:
|
|
|
|
integration.get_platform("config_flow")
|
|
|
|
except ImportError as err:
|
|
|
|
_LOGGER.error(
|
2020-02-13 16:27:00 +00:00
|
|
|
"Error importing platform config_flow from integration %s to set up %s configuration entry: %s",
|
2019-08-23 00:32:43 +00:00
|
|
|
integration.domain,
|
|
|
|
self.domain,
|
|
|
|
err,
|
|
|
|
)
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.SETUP_ERROR
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = "Import error"
|
2019-08-23 00:32:43 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
# Perform migration
|
2019-02-15 17:30:47 +00:00
|
|
|
if not await self.async_migrate(hass):
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.MIGRATION_ERROR
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = None
|
2019-02-15 17:30:47 +00:00
|
|
|
return
|
|
|
|
|
2021-04-23 07:23:43 +00:00
|
|
|
error_reason = None
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
try:
|
2020-08-27 11:56:20 +00:00
|
|
|
result = await component.async_setup_entry(hass, self) # type: ignore
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
if not isinstance(result, bool):
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"%s.async_setup_entry did not return boolean", integration.domain
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
result = False
|
2021-04-10 05:41:29 +00:00
|
|
|
except ConfigEntryAuthFailed as ex:
|
|
|
|
message = str(ex)
|
|
|
|
auth_base_message = "could not authenticate"
|
2021-04-23 07:23:43 +00:00
|
|
|
error_reason = message or auth_base_message
|
2021-04-10 05:41:29 +00:00
|
|
|
auth_message = (
|
|
|
|
f"{auth_base_message}: {message}" if message else auth_base_message
|
|
|
|
)
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Config entry '%s' for %s integration %s",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
auth_message,
|
|
|
|
)
|
|
|
|
self._async_process_on_unload()
|
|
|
|
self.async_start_reauth(hass)
|
|
|
|
result = False
|
2021-03-29 10:25:40 +00:00
|
|
|
except ConfigEntryNotReady as ex:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.SETUP_RETRY
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = str(ex) or None
|
2019-07-31 19:25:30 +00:00
|
|
|
wait_time = 2 ** min(tries, 4) * 5
|
2018-10-04 13:53:50 +00:00
|
|
|
tries += 1
|
2021-03-29 10:25:40 +00:00
|
|
|
message = str(ex)
|
|
|
|
ready_message = f"ready yet: {message}" if message else "ready yet"
|
2021-03-02 20:50:28 +00:00
|
|
|
if tries == 1:
|
|
|
|
_LOGGER.warning(
|
2021-03-29 10:25:40 +00:00
|
|
|
"Config entry '%s' for %s integration not %s; Retrying in background",
|
2021-03-02 20:50:28 +00:00
|
|
|
self.title,
|
|
|
|
self.domain,
|
2021-03-29 10:25:40 +00:00
|
|
|
ready_message,
|
2021-03-02 20:50:28 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
_LOGGER.debug(
|
2021-03-29 10:25:40 +00:00
|
|
|
"Config entry '%s' for %s integration not %s; Retrying in %d seconds",
|
2021-03-02 20:50:28 +00:00
|
|
|
self.title,
|
|
|
|
self.domain,
|
2021-03-29 10:25:40 +00:00
|
|
|
ready_message,
|
2021-03-02 20:50:28 +00:00
|
|
|
wait_time,
|
|
|
|
)
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2021-04-04 00:00:22 +00:00
|
|
|
async def setup_again(*_: Any) -> None:
|
2018-10-04 13:53:50 +00:00
|
|
|
"""Run setup again."""
|
|
|
|
self._async_cancel_retry_setup = None
|
2019-07-31 19:25:30 +00:00
|
|
|
await self.async_setup(hass, integration=integration, tries=tries)
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2021-04-04 00:00:22 +00:00
|
|
|
if hass.state == CoreState.running:
|
|
|
|
self._async_cancel_retry_setup = hass.helpers.event.async_call_later(
|
|
|
|
wait_time, setup_again
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self._async_cancel_retry_setup = hass.bus.async_listen_once(
|
|
|
|
EVENT_HOMEASSISTANT_STARTED, setup_again
|
|
|
|
)
|
2021-04-09 17:14:33 +00:00
|
|
|
|
|
|
|
self._async_process_on_unload()
|
2018-10-04 13:53:50 +00:00
|
|
|
return
|
2018-02-16 22:07:38 +00:00
|
|
|
except Exception: # pylint: disable=broad-except
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error setting up entry %s for %s", self.title, integration.domain
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
result = False
|
|
|
|
|
2018-04-09 14:09:08 +00:00
|
|
|
# Only store setup result as state if it was not forwarded.
|
2019-04-15 02:07:05 +00:00
|
|
|
if self.domain != integration.domain:
|
2018-04-09 14:09:08 +00:00
|
|
|
return
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
if result:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.LOADED
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = None
|
2018-02-16 22:07:38 +00:00
|
|
|
else:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.SETUP_ERROR
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = error_reason
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-04-14 02:16:26 +00:00
|
|
|
async def async_shutdown(self) -> None:
|
|
|
|
"""Call when Home Assistant is stopping."""
|
|
|
|
self.async_cancel_retry_setup()
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def async_cancel_retry_setup(self) -> None:
|
|
|
|
"""Cancel retry setup."""
|
|
|
|
if self._async_cancel_retry_setup is not None:
|
|
|
|
self._async_cancel_retry_setup()
|
|
|
|
self._async_cancel_retry_setup = None
|
|
|
|
|
2019-07-20 21:35:59 +00:00
|
|
|
async def async_unload(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, hass: HomeAssistant, *, integration: loader.Integration | None = None
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> bool:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Unload an entry.
|
|
|
|
|
|
|
|
Returns if unload is possible and was successful.
|
|
|
|
"""
|
2019-12-20 20:49:07 +00:00
|
|
|
if self.source == SOURCE_IGNORE:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.NOT_LOADED
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = None
|
2019-12-20 20:49:07 +00:00
|
|
|
return True
|
|
|
|
|
2021-06-02 08:00:24 +00:00
|
|
|
if self.state == ConfigEntryState.NOT_LOADED:
|
|
|
|
return True
|
|
|
|
|
2019-04-15 02:07:05 +00:00
|
|
|
if integration is None:
|
2020-05-25 19:40:06 +00:00
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
except loader.IntegrationNotFound:
|
|
|
|
# The integration was likely a custom_component
|
|
|
|
# that was uninstalled, or an integration
|
|
|
|
# that has been renamed without removing the config
|
|
|
|
# entry.
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.NOT_LOADED
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = None
|
2020-05-25 19:40:06 +00:00
|
|
|
return True
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-04-15 02:07:05 +00:00
|
|
|
component = integration.get_component()
|
|
|
|
|
|
|
|
if integration.domain == self.domain:
|
2021-05-20 17:19:20 +00:00
|
|
|
if not self.state.recoverable:
|
2019-03-01 04:27:20 +00:00
|
|
|
return False
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if self.state is not ConfigEntryState.LOADED:
|
2021-04-14 02:16:26 +00:00
|
|
|
self.async_cancel_retry_setup()
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.NOT_LOADED
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = None
|
2018-10-04 13:53:50 +00:00
|
|
|
return True
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
supports_unload = hasattr(component, "async_unload_entry")
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
if not supports_unload:
|
2019-04-15 02:07:05 +00:00
|
|
|
if integration.domain == self.domain:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.FAILED_UNLOAD
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = "Unload not supported"
|
2018-02-16 22:07:38 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
2020-08-27 11:56:20 +00:00
|
|
|
result = await component.async_unload_entry(hass, self) # type: ignore
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2018-10-04 13:53:50 +00:00
|
|
|
assert isinstance(result, bool)
|
|
|
|
|
|
|
|
# Only adjust state if we unloaded the component
|
2019-04-15 02:07:05 +00:00
|
|
|
if result and integration.domain == self.domain:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.NOT_LOADED
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = None
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-04-09 17:14:33 +00:00
|
|
|
self._async_process_on_unload()
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
return result
|
|
|
|
except Exception: # pylint: disable=broad-except
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error unloading entry %s for %s", self.title, integration.domain
|
|
|
|
)
|
2019-04-15 02:07:05 +00:00
|
|
|
if integration.domain == self.domain:
|
2021-05-20 17:19:20 +00:00
|
|
|
self.state = ConfigEntryState.FAILED_UNLOAD
|
2021-04-23 07:23:43 +00:00
|
|
|
self.reason = "Unknown error"
|
2018-02-16 22:07:38 +00:00
|
|
|
return False
|
|
|
|
|
2019-03-02 05:13:55 +00:00
|
|
|
async def async_remove(self, hass: HomeAssistant) -> None:
|
|
|
|
"""Invoke remove callback on component."""
|
2019-12-20 20:49:07 +00:00
|
|
|
if self.source == SOURCE_IGNORE:
|
|
|
|
return
|
|
|
|
|
2020-05-25 19:40:06 +00:00
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
except loader.IntegrationNotFound:
|
|
|
|
# The integration was likely a custom_component
|
|
|
|
# that was uninstalled, or an integration
|
|
|
|
# that has been renamed without removing the config
|
|
|
|
# entry.
|
|
|
|
return
|
|
|
|
|
2019-04-15 02:07:05 +00:00
|
|
|
component = integration.get_component()
|
2019-07-31 19:25:30 +00:00
|
|
|
if not hasattr(component, "async_remove_entry"):
|
2019-03-02 05:13:55 +00:00
|
|
|
return
|
|
|
|
try:
|
2020-08-27 11:56:20 +00:00
|
|
|
await component.async_remove_entry(hass, self) # type: ignore
|
2019-03-02 05:13:55 +00:00
|
|
|
except Exception: # pylint: disable=broad-except
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error calling entry remove callback %s for %s",
|
|
|
|
self.title,
|
|
|
|
integration.domain,
|
|
|
|
)
|
2019-03-02 05:13:55 +00:00
|
|
|
|
2019-02-15 17:30:47 +00:00
|
|
|
async def async_migrate(self, hass: HomeAssistant) -> bool:
|
|
|
|
"""Migrate an entry.
|
|
|
|
|
|
|
|
Returns True if config entry is up-to-date or has been migrated.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (handler := HANDLERS.get(self.domain)) is None:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"Flow handler not found for entry %s for %s", self.title, self.domain
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
# Handler may be a partial
|
2019-07-07 01:58:33 +00:00
|
|
|
while isinstance(handler, functools.partial):
|
2019-02-15 17:30:47 +00:00
|
|
|
handler = handler.func
|
|
|
|
|
|
|
|
if self.version == handler.VERSION:
|
|
|
|
return True
|
|
|
|
|
2019-05-13 08:16:55 +00:00
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
component = integration.get_component()
|
2019-07-31 19:25:30 +00:00
|
|
|
supports_migrate = hasattr(component, "async_migrate_entry")
|
2019-02-15 17:30:47 +00:00
|
|
|
if not supports_migrate:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"Migration handler not found for entry %s for %s",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
2020-08-27 11:56:20 +00:00
|
|
|
result = await component.async_migrate_entry(hass, self) # type: ignore
|
2019-02-15 17:30:47 +00:00
|
|
|
if not isinstance(result, bool):
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"%s.async_migrate_entry did not return boolean", self.domain
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
if result:
|
|
|
|
# pylint: disable=protected-access
|
2019-10-18 20:06:33 +00:00
|
|
|
hass.config_entries._async_schedule_save()
|
2019-02-15 17:30:47 +00:00
|
|
|
return result
|
|
|
|
except Exception: # pylint: disable=broad-except
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error migrating entry %s for %s", self.title, self.domain
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
|
2020-07-22 15:06:37 +00:00
|
|
|
def add_update_listener(self, listener: UpdateListenerType) -> CALLBACK_TYPE:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Listen for when entry is updated.
|
|
|
|
|
|
|
|
Returns function to unlisten.
|
|
|
|
"""
|
2021-01-26 21:19:10 +00:00
|
|
|
weak_listener: Any
|
|
|
|
# weakref.ref is not applicable to a bound method, e.g. method of a class instance, as reference will die immediately
|
|
|
|
if hasattr(listener, "__self__"):
|
|
|
|
weak_listener = weakref.WeakMethod(cast(MethodType, listener))
|
|
|
|
else:
|
|
|
|
weak_listener = weakref.ref(listener)
|
2019-02-22 16:59:43 +00:00
|
|
|
self.update_listeners.append(weak_listener)
|
|
|
|
|
|
|
|
return lambda: self.update_listeners.remove(weak_listener)
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
def as_dict(self) -> dict[str, Any]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Return dictionary version of this entry."""
|
|
|
|
return {
|
2019-07-31 19:25:30 +00:00
|
|
|
"entry_id": self.entry_id,
|
|
|
|
"version": self.version,
|
|
|
|
"domain": self.domain,
|
|
|
|
"title": self.title,
|
2020-03-09 21:07:50 +00:00
|
|
|
"data": dict(self.data),
|
|
|
|
"options": dict(self.options),
|
2021-06-01 20:34:31 +00:00
|
|
|
"pref_disable_new_entities": self.pref_disable_new_entities,
|
|
|
|
"pref_disable_polling": self.pref_disable_polling,
|
2019-07-31 19:25:30 +00:00
|
|
|
"source": self.source,
|
2019-12-16 18:45:09 +00:00
|
|
|
"unique_id": self.unique_id,
|
2021-02-21 03:21:39 +00:00
|
|
|
"disabled_by": self.disabled_by,
|
2018-02-16 22:07:38 +00:00
|
|
|
}
|
|
|
|
|
2021-04-09 17:14:33 +00:00
|
|
|
@callback
|
|
|
|
def async_on_unload(self, func: CALLBACK_TYPE) -> None:
|
|
|
|
"""Add a function to call when config entry is unloaded."""
|
|
|
|
if self._on_unload is None:
|
|
|
|
self._on_unload = []
|
|
|
|
self._on_unload.append(func)
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def _async_process_on_unload(self) -> None:
|
|
|
|
"""Process the on_unload callbacks."""
|
|
|
|
if self._on_unload is not None:
|
|
|
|
while self._on_unload:
|
|
|
|
self._on_unload.pop()()
|
|
|
|
|
2021-04-10 05:41:29 +00:00
|
|
|
@callback
|
|
|
|
def async_start_reauth(self, hass: HomeAssistant) -> None:
|
|
|
|
"""Start a reauth flow."""
|
|
|
|
flow_context = {
|
|
|
|
"source": SOURCE_REAUTH,
|
|
|
|
"entry_id": self.entry_id,
|
|
|
|
"unique_id": self.unique_id,
|
|
|
|
}
|
|
|
|
|
2021-10-22 17:19:49 +00:00
|
|
|
for flow in hass.config_entries.flow.async_progress_by_handler(self.domain):
|
2021-04-10 05:41:29 +00:00
|
|
|
if flow["context"] == flow_context:
|
|
|
|
return
|
|
|
|
|
|
|
|
hass.async_create_task(
|
|
|
|
hass.config_entries.flow.async_init(
|
|
|
|
self.domain,
|
|
|
|
context=flow_context,
|
|
|
|
data=self.data,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2021-04-09 17:14:33 +00:00
|
|
|
|
|
|
|
current_entry: ContextVar[ConfigEntry | None] = ContextVar(
|
|
|
|
"current_entry", default=None
|
|
|
|
)
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
class ConfigEntriesFlowManager(data_entry_flow.FlowManager):
|
|
|
|
"""Manage all the config entry flows that are in progress."""
|
|
|
|
|
|
|
|
def __init__(
|
2021-08-16 21:12:06 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
config_entries: ConfigEntries,
|
|
|
|
hass_config: ConfigType,
|
2021-05-20 15:53:29 +00:00
|
|
|
) -> None:
|
2020-01-03 10:52:01 +00:00
|
|
|
"""Initialize the config entry flow manager."""
|
|
|
|
super().__init__(hass)
|
|
|
|
self.config_entries = config_entries
|
|
|
|
self._hass_config = hass_config
|
|
|
|
|
2021-10-22 17:19:49 +00:00
|
|
|
@callback
|
|
|
|
def _async_has_other_discovery_flows(self, flow_id: str) -> bool:
|
|
|
|
"""Check if there are any other discovery flows in progress."""
|
|
|
|
return any(
|
|
|
|
flow.context["source"] in DISCOVERY_SOURCES and flow.flow_id != flow_id
|
|
|
|
for flow in self._progress.values()
|
|
|
|
)
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_finish_flow(
|
2021-04-29 11:40:51 +00:00
|
|
|
self, flow: data_entry_flow.FlowHandler, result: data_entry_flow.FlowResult
|
|
|
|
) -> data_entry_flow.FlowResult:
|
2020-01-03 10:52:01 +00:00
|
|
|
"""Finish a config flow and add an entry."""
|
|
|
|
flow = cast(ConfigFlow, flow)
|
|
|
|
|
|
|
|
# Remove notification if no other discovery config entries in progress
|
2021-10-22 17:19:49 +00:00
|
|
|
if not self._async_has_other_discovery_flows(flow.flow_id):
|
2020-01-03 10:52:01 +00:00
|
|
|
self.hass.components.persistent_notification.async_dismiss(
|
|
|
|
DISCOVERY_NOTIFICATION_ID
|
|
|
|
)
|
|
|
|
|
|
|
|
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
|
|
|
return result
|
|
|
|
|
|
|
|
# Check if config entry exists with unique ID. Unload it.
|
|
|
|
existing_entry = None
|
|
|
|
|
2021-05-26 14:29:52 +00:00
|
|
|
# Abort all flows in progress with same unique ID
|
|
|
|
# or the default discovery ID
|
2021-10-22 17:19:49 +00:00
|
|
|
for progress_flow in self.async_progress_by_handler(flow.handler):
|
2021-05-26 14:29:52 +00:00
|
|
|
progress_unique_id = progress_flow["context"].get("unique_id")
|
2021-10-22 17:19:49 +00:00
|
|
|
if progress_flow["flow_id"] != flow.flow_id and (
|
|
|
|
(flow.unique_id and progress_unique_id == flow.unique_id)
|
|
|
|
or progress_unique_id == DEFAULT_DISCOVERY_UNIQUE_ID
|
2021-05-26 14:29:52 +00:00
|
|
|
):
|
|
|
|
self.async_abort(progress_flow["flow_id"])
|
2020-01-03 10:52:01 +00:00
|
|
|
|
2021-05-26 14:29:52 +00:00
|
|
|
if flow.unique_id is not None:
|
2020-06-15 11:38:38 +00:00
|
|
|
# Reset unique ID when the default discovery ID has been used
|
|
|
|
if flow.unique_id == DEFAULT_DISCOVERY_UNIQUE_ID:
|
|
|
|
await flow.async_set_unique_id(None)
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
# Find existing entry.
|
|
|
|
for check_entry in self.config_entries.async_entries(result["handler"]):
|
|
|
|
if check_entry.unique_id == flow.unique_id:
|
|
|
|
existing_entry = check_entry
|
|
|
|
break
|
|
|
|
|
|
|
|
# Unload the entry before setting up the new one.
|
|
|
|
# We will remove it only after the other one is set up,
|
|
|
|
# so that device customizations are not getting lost.
|
2021-05-20 17:19:20 +00:00
|
|
|
if existing_entry is not None and existing_entry.state.recoverable:
|
2020-01-03 10:52:01 +00:00
|
|
|
await self.config_entries.async_unload(existing_entry.entry_id)
|
|
|
|
|
|
|
|
entry = ConfigEntry(
|
|
|
|
version=result["version"],
|
|
|
|
domain=result["handler"],
|
|
|
|
title=result["title"],
|
|
|
|
data=result["data"],
|
2021-05-06 05:14:01 +00:00
|
|
|
options=result["options"],
|
2020-01-03 10:52:01 +00:00
|
|
|
source=flow.context["source"],
|
|
|
|
unique_id=flow.unique_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
await self.config_entries.async_add(entry)
|
|
|
|
|
|
|
|
if existing_entry is not None:
|
|
|
|
await self.config_entries.async_remove(existing_entry.entry_id)
|
|
|
|
|
|
|
|
result["result"] = entry
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def async_create_flow(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, handler_key: Any, *, context: dict | None = None, data: Any = None
|
2021-02-12 09:58:20 +00:00
|
|
|
) -> ConfigFlow:
|
2020-01-03 10:52:01 +00:00
|
|
|
"""Create a flow for specified handler.
|
|
|
|
|
|
|
|
Handler key is the domain of the component that we want to set up.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(self.hass, handler_key)
|
2020-08-28 11:50:32 +00:00
|
|
|
except loader.IntegrationNotFound as err:
|
2020-01-03 10:52:01 +00:00
|
|
|
_LOGGER.error("Cannot find integration %s", handler_key)
|
2020-08-28 11:50:32 +00:00
|
|
|
raise data_entry_flow.UnknownHandler from err
|
2020-01-03 10:52:01 +00:00
|
|
|
|
|
|
|
# Make sure requirements and dependencies of component are resolved
|
|
|
|
await async_process_deps_reqs(self.hass, self._hass_config, integration)
|
|
|
|
|
|
|
|
try:
|
|
|
|
integration.get_platform("config_flow")
|
|
|
|
except ImportError as err:
|
|
|
|
_LOGGER.error(
|
2020-02-13 16:27:00 +00:00
|
|
|
"Error occurred loading configuration flow for integration %s: %s",
|
2020-01-03 10:52:01 +00:00
|
|
|
handler_key,
|
|
|
|
err,
|
|
|
|
)
|
|
|
|
raise data_entry_flow.UnknownHandler
|
|
|
|
|
2021-09-18 23:31:35 +00:00
|
|
|
if (handler := HANDLERS.get(handler_key)) is None:
|
2020-01-03 10:52:01 +00:00
|
|
|
raise data_entry_flow.UnknownHandler
|
|
|
|
|
|
|
|
if not context or "source" not in context:
|
|
|
|
raise KeyError("Context not set or doesn't have a source set")
|
|
|
|
|
2020-01-03 16:28:05 +00:00
|
|
|
flow = cast(ConfigFlow, handler())
|
|
|
|
flow.init_step = context["source"]
|
|
|
|
return flow
|
|
|
|
|
|
|
|
async def async_post_init(
|
2021-04-29 11:40:51 +00:00
|
|
|
self, flow: data_entry_flow.FlowHandler, result: data_entry_flow.FlowResult
|
2020-01-03 16:28:05 +00:00
|
|
|
) -> None:
|
|
|
|
"""After a flow is initialised trigger new flow notifications."""
|
|
|
|
source = flow.context["source"]
|
2020-01-03 10:52:01 +00:00
|
|
|
|
|
|
|
# Create notification.
|
|
|
|
if source in DISCOVERY_SOURCES:
|
|
|
|
self.hass.bus.async_fire(EVENT_FLOW_DISCOVERED)
|
|
|
|
self.hass.components.persistent_notification.async_create(
|
|
|
|
title="New devices discovered",
|
|
|
|
message=(
|
|
|
|
"We have discovered new devices on your network. "
|
2020-10-21 17:09:45 +00:00
|
|
|
"[Check it out](/config/integrations)."
|
2020-01-03 10:52:01 +00:00
|
|
|
),
|
|
|
|
notification_id=DISCOVERY_NOTIFICATION_ID,
|
|
|
|
)
|
2020-10-15 20:46:27 +00:00
|
|
|
elif source == SOURCE_REAUTH:
|
|
|
|
self.hass.components.persistent_notification.async_create(
|
|
|
|
title="Integration requires reconfiguration",
|
|
|
|
message=(
|
|
|
|
"At least one of your integrations requires reconfiguration to "
|
2020-10-21 17:09:45 +00:00
|
|
|
"continue functioning. [Check it out](/config/integrations)."
|
2020-10-15 20:46:27 +00:00
|
|
|
),
|
|
|
|
notification_id=RECONFIGURE_NOTIFICATION_ID,
|
|
|
|
)
|
2020-01-03 10:52:01 +00:00
|
|
|
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
class ConfigEntries:
|
|
|
|
"""Manage the configuration entries.
|
|
|
|
|
|
|
|
An instance of this object is available via `hass.config_entries`.
|
|
|
|
"""
|
|
|
|
|
2021-08-16 21:12:06 +00:00
|
|
|
def __init__(self, hass: HomeAssistant, hass_config: ConfigType) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Initialize the entry manager."""
|
|
|
|
self.hass = hass
|
2020-01-03 10:52:01 +00:00
|
|
|
self.flow = ConfigEntriesFlowManager(hass, self, hass_config)
|
2019-02-22 16:59:43 +00:00
|
|
|
self.options = OptionsFlowManager(hass)
|
2018-02-16 22:07:38 +00:00
|
|
|
self._hass_config = hass_config
|
2021-03-22 04:44:29 +00:00
|
|
|
self._entries: dict[str, ConfigEntry] = {}
|
2021-09-17 20:39:00 +00:00
|
|
|
self._domain_index: dict[str, list[str]] = {}
|
2018-06-25 16:53:49 +00:00
|
|
|
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
|
2019-08-23 00:32:43 +00:00
|
|
|
EntityRegistryDisabledHandler(hass).async_setup()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
@callback
|
2021-03-29 11:06:44 +00:00
|
|
|
def async_domains(
|
|
|
|
self, include_ignore: bool = False, include_disabled: bool = False
|
|
|
|
) -> list[str]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Return domains for which we have entries."""
|
2021-03-29 11:06:44 +00:00
|
|
|
return list(
|
|
|
|
{
|
|
|
|
entry.domain: None
|
|
|
|
for entry in self._entries.values()
|
|
|
|
if (include_ignore or entry.source != SOURCE_IGNORE)
|
|
|
|
and (include_disabled or not entry.disabled_by)
|
|
|
|
}
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def async_get_entry(self, entry_id: str) -> ConfigEntry | None:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Return entry with matching entry_id."""
|
2021-03-22 04:44:29 +00:00
|
|
|
return self._entries.get(entry_id)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def async_entries(self, domain: str | None = None) -> list[ConfigEntry]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Return all entries or entries for a specific domain."""
|
|
|
|
if domain is None:
|
2021-03-22 04:44:29 +00:00
|
|
|
return list(self._entries.values())
|
2021-09-17 20:39:00 +00:00
|
|
|
return [
|
|
|
|
self._entries[entry_id] for entry_id in self._domain_index.get(domain, [])
|
|
|
|
]
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_add(self, entry: ConfigEntry) -> None:
|
|
|
|
"""Add and setup an entry."""
|
2021-03-22 04:44:29 +00:00
|
|
|
if entry.entry_id in self._entries:
|
|
|
|
raise HomeAssistantError(
|
|
|
|
f"An entry with the id {entry.entry_id} already exists."
|
|
|
|
)
|
|
|
|
self._entries[entry.entry_id] = entry
|
2021-09-17 20:39:00 +00:00
|
|
|
self._domain_index.setdefault(entry.domain, []).append(entry.entry_id)
|
2020-01-03 10:52:01 +00:00
|
|
|
await self.async_setup(entry.entry_id)
|
|
|
|
self._async_schedule_save()
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
async def async_remove(self, entry_id: str) -> dict[str, Any]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Remove an entry."""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2018-02-16 22:07:38 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if not entry.state.recoverable:
|
|
|
|
unload_success = entry.state is not ConfigEntryState.FAILED_UNLOAD
|
2019-03-01 04:27:20 +00:00
|
|
|
else:
|
|
|
|
unload_success = await self.async_unload(entry_id)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-03-02 05:13:55 +00:00
|
|
|
await entry.async_remove(self.hass)
|
|
|
|
|
2021-03-22 04:44:29 +00:00
|
|
|
del self._entries[entry.entry_id]
|
2021-09-17 20:39:00 +00:00
|
|
|
self._domain_index[entry.domain].remove(entry.entry_id)
|
|
|
|
if not self._domain_index[entry.domain]:
|
|
|
|
del self._domain_index[entry.domain]
|
2019-03-01 04:27:20 +00:00
|
|
|
self._async_schedule_save()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
dev_reg, ent_reg = await asyncio.gather(
|
|
|
|
self.hass.helpers.device_registry.async_get_registry(),
|
|
|
|
self.hass.helpers.entity_registry.async_get_registry(),
|
|
|
|
)
|
2018-09-04 07:00:14 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
dev_reg.async_clear_config_entry(entry_id)
|
|
|
|
ent_reg.async_clear_config_entry(entry_id)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-07-02 18:56:51 +00:00
|
|
|
# If the configuration entry is removed during reauth, it should
|
|
|
|
# abort any reauth flow that is active for the removed entry.
|
2021-10-22 17:19:49 +00:00
|
|
|
for progress_flow in self.hass.config_entries.flow.async_progress_by_handler(
|
|
|
|
entry.domain
|
|
|
|
):
|
2021-07-02 18:56:51 +00:00
|
|
|
context = progress_flow.get("context")
|
|
|
|
if (
|
|
|
|
context
|
|
|
|
and context["source"] == SOURCE_REAUTH
|
|
|
|
and "entry_id" in context
|
|
|
|
and context["entry_id"] == entry_id
|
|
|
|
and "flow_id" in progress_flow
|
|
|
|
):
|
|
|
|
self.hass.config_entries.flow.async_abort(progress_flow["flow_id"])
|
|
|
|
|
2019-12-21 10:22:07 +00:00
|
|
|
# After we have fully removed an "ignore" config entry we can try and rediscover it so that a
|
|
|
|
# user is able to immediately start configuring it. We do this by starting a new flow with
|
|
|
|
# the 'unignore' step. If the integration doesn't implement async_step_unignore then
|
|
|
|
# this will be a no-op.
|
|
|
|
if entry.source == SOURCE_IGNORE:
|
|
|
|
self.hass.async_create_task(
|
|
|
|
self.hass.config_entries.flow.async_init(
|
|
|
|
entry.domain,
|
|
|
|
context={"source": SOURCE_UNIGNORE},
|
|
|
|
data={"unique_id": entry.unique_id},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
return {"require_restart": not unload_success}
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-04-14 02:16:26 +00:00
|
|
|
async def _async_shutdown(self, event: Event) -> None:
|
|
|
|
"""Call when Home Assistant is stopping."""
|
|
|
|
await asyncio.gather(
|
2021-07-19 08:46:09 +00:00
|
|
|
*(entry.async_shutdown() for entry in self._entries.values())
|
2021-04-14 02:16:26 +00:00
|
|
|
)
|
2021-04-15 17:13:42 +00:00
|
|
|
await self.flow.async_shutdown()
|
2021-04-14 02:16:26 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
async def async_initialize(self) -> None:
|
|
|
|
"""Initialize config entry config."""
|
2018-06-25 16:53:49 +00:00
|
|
|
# Migrating for config entries stored before 0.73
|
|
|
|
config = await self.hass.helpers.storage.async_migrator(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass.config.path(PATH_CONFIG),
|
|
|
|
self._store,
|
|
|
|
old_conf_migrate_func=_old_conf_migrator,
|
2018-06-25 16:53:49 +00:00
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-04-14 02:16:26 +00:00
|
|
|
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._async_shutdown)
|
|
|
|
|
2018-06-25 21:21:38 +00:00
|
|
|
if config is None:
|
2021-03-22 04:44:29 +00:00
|
|
|
self._entries = {}
|
2021-09-17 20:39:00 +00:00
|
|
|
self._domain_index = {}
|
2018-06-25 21:21:38 +00:00
|
|
|
return
|
|
|
|
|
2021-06-01 20:34:31 +00:00
|
|
|
entries = {}
|
2021-09-17 20:39:00 +00:00
|
|
|
domain_index: dict[str, list[str]] = {}
|
2021-06-01 20:34:31 +00:00
|
|
|
|
|
|
|
for entry in config["entries"]:
|
|
|
|
pref_disable_new_entities = entry.get("pref_disable_new_entities")
|
|
|
|
|
|
|
|
# Between 0.98 and 2021.6 we stored 'disable_new_entities' in a system options dictionary
|
|
|
|
if pref_disable_new_entities is None and "system_options" in entry:
|
|
|
|
pref_disable_new_entities = entry.get("system_options", {}).get(
|
|
|
|
"disable_new_entities"
|
|
|
|
)
|
|
|
|
|
2021-09-17 20:39:00 +00:00
|
|
|
domain = entry["domain"]
|
|
|
|
entry_id = entry["entry_id"]
|
|
|
|
|
|
|
|
entries[entry_id] = ConfigEntry(
|
2019-07-31 19:25:30 +00:00
|
|
|
version=entry["version"],
|
2021-09-17 20:39:00 +00:00
|
|
|
domain=domain,
|
|
|
|
entry_id=entry_id,
|
2019-07-31 19:25:30 +00:00
|
|
|
data=entry["data"],
|
|
|
|
source=entry["source"],
|
|
|
|
title=entry["title"],
|
2019-02-22 16:59:43 +00:00
|
|
|
# New in 0.89
|
2019-07-31 19:25:30 +00:00
|
|
|
options=entry.get("options"),
|
2019-12-16 18:45:09 +00:00
|
|
|
# New in 0.104
|
|
|
|
unique_id=entry.get("unique_id"),
|
2021-02-21 03:21:39 +00:00
|
|
|
# New in 2021.3
|
2021-12-15 19:53:21 +00:00
|
|
|
disabled_by=ConfigEntryDisabler(entry["disabled_by"])
|
|
|
|
if entry.get("disabled_by")
|
|
|
|
else None,
|
2021-06-01 20:34:31 +00:00
|
|
|
# New in 2021.6
|
|
|
|
pref_disable_new_entities=pref_disable_new_entities,
|
|
|
|
pref_disable_polling=entry.get("pref_disable_polling"),
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2021-09-17 20:39:00 +00:00
|
|
|
domain_index.setdefault(domain, []).append(entry_id)
|
2021-06-01 20:34:31 +00:00
|
|
|
|
2021-09-17 20:39:00 +00:00
|
|
|
self._domain_index = domain_index
|
2021-06-01 20:34:31 +00:00
|
|
|
self._entries = entries
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
async def async_setup(self, entry_id: str) -> bool:
|
|
|
|
"""Set up a config entry.
|
|
|
|
|
|
|
|
Return True if entry has been successfully loaded.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2019-03-01 04:27:20 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if entry.state is not ConfigEntryState.NOT_LOADED:
|
2019-03-01 04:27:20 +00:00
|
|
|
raise OperationNotAllowed
|
|
|
|
|
|
|
|
# Setup Component if not set up yet
|
|
|
|
if entry.domain in self.hass.config.components:
|
|
|
|
await entry.async_setup(self.hass)
|
|
|
|
else:
|
|
|
|
# Setting up the component will set up all its config entries
|
|
|
|
result = await async_setup_component(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass, entry.domain, self._hass_config
|
|
|
|
)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
|
|
|
if not result:
|
|
|
|
return result
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
return entry.state is ConfigEntryState.LOADED # type: ignore[comparison-overlap] # mypy bug?
|
2019-03-01 04:27:20 +00:00
|
|
|
|
|
|
|
async def async_unload(self, entry_id: str) -> bool:
|
|
|
|
"""Unload a config entry."""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2019-03-01 04:27:20 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if not entry.state.recoverable:
|
2019-03-01 04:27:20 +00:00
|
|
|
raise OperationNotAllowed
|
|
|
|
|
|
|
|
return await entry.async_unload(self.hass)
|
|
|
|
|
|
|
|
async def async_reload(self, entry_id: str) -> bool:
|
|
|
|
"""Reload an entry.
|
|
|
|
|
|
|
|
If an entry was not loaded, will just load.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2021-02-21 03:21:39 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
unload_result = await self.async_unload(entry_id)
|
|
|
|
|
2021-02-21 03:21:39 +00:00
|
|
|
if not unload_result or entry.disabled_by:
|
2019-03-01 04:27:20 +00:00
|
|
|
return unload_result
|
|
|
|
|
|
|
|
return await self.async_setup(entry_id)
|
|
|
|
|
2021-02-21 03:21:39 +00:00
|
|
|
async def async_set_disabled_by(
|
2021-12-15 19:53:21 +00:00
|
|
|
self, entry_id: str, disabled_by: ConfigEntryDisabler | None
|
2021-02-21 03:21:39 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Disable an entry.
|
|
|
|
|
|
|
|
If disabled_by is changed, the config entry will be reloaded.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2021-02-21 03:21:39 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2021-12-15 19:53:21 +00:00
|
|
|
if isinstance(disabled_by, str) and not isinstance(
|
|
|
|
disabled_by, ConfigEntryDisabler
|
|
|
|
):
|
|
|
|
report( # type: ignore[unreachable]
|
|
|
|
"uses str for config entry disabled_by. This is deprecated and will "
|
|
|
|
"stop working in Home Assistant 2022.3, it should be updated to use "
|
|
|
|
"ConfigEntryDisabler instead",
|
|
|
|
error_if_core=False,
|
|
|
|
)
|
|
|
|
disabled_by = ConfigEntryDisabler(disabled_by)
|
|
|
|
|
|
|
|
if entry.disabled_by is disabled_by:
|
2021-02-21 03:21:39 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
entry.disabled_by = disabled_by
|
|
|
|
self._async_schedule_save()
|
|
|
|
|
2021-03-03 18:12:37 +00:00
|
|
|
dev_reg = device_registry.async_get(self.hass)
|
|
|
|
ent_reg = entity_registry.async_get(self.hass)
|
|
|
|
|
|
|
|
if not entry.disabled_by:
|
|
|
|
# The config entry will no longer be disabled, enable devices and entities
|
|
|
|
device_registry.async_config_entry_disabled_by_changed(dev_reg, entry)
|
|
|
|
entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry)
|
|
|
|
|
|
|
|
# Load or unload the config entry
|
2021-03-01 11:38:49 +00:00
|
|
|
reload_result = await self.async_reload(entry_id)
|
|
|
|
|
2021-03-03 18:12:37 +00:00
|
|
|
if entry.disabled_by:
|
|
|
|
# The config entry has been disabled, disable devices and entities
|
|
|
|
device_registry.async_config_entry_disabled_by_changed(dev_reg, entry)
|
|
|
|
entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry)
|
2021-02-21 03:21:39 +00:00
|
|
|
|
2021-03-01 11:38:49 +00:00
|
|
|
return reload_result
|
2021-02-21 03:21:39 +00:00
|
|
|
|
2018-09-25 10:21:11 +00:00
|
|
|
@callback
|
2019-08-19 23:45:17 +00:00
|
|
|
def async_update_entry(
|
2019-10-28 20:36:26 +00:00
|
|
|
self,
|
|
|
|
entry: ConfigEntry,
|
|
|
|
*,
|
2021-06-01 20:34:31 +00:00
|
|
|
unique_id: str | None | UndefinedType = UNDEFINED,
|
|
|
|
title: str | UndefinedType = UNDEFINED,
|
2021-03-17 16:34:55 +00:00
|
|
|
data: dict | UndefinedType = UNDEFINED,
|
2021-04-15 17:17:07 +00:00
|
|
|
options: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
2021-06-01 20:34:31 +00:00
|
|
|
pref_disable_new_entities: bool | UndefinedType = UNDEFINED,
|
|
|
|
pref_disable_polling: bool | UndefinedType = UNDEFINED,
|
2020-08-08 18:23:56 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Update a config entry.
|
|
|
|
|
|
|
|
If the entry was changed, the update_listeners are
|
|
|
|
fired and this function returns True
|
|
|
|
|
|
|
|
If the entry was not changed, the update_listeners are
|
|
|
|
not fired and this function returns False
|
|
|
|
"""
|
|
|
|
changed = False
|
|
|
|
|
2021-06-01 20:34:31 +00:00
|
|
|
for attr, value in (
|
|
|
|
("unique_id", unique_id),
|
|
|
|
("title", title),
|
|
|
|
("pref_disable_new_entities", pref_disable_new_entities),
|
|
|
|
("pref_disable_polling", pref_disable_polling),
|
|
|
|
):
|
|
|
|
if value == UNDEFINED or getattr(entry, attr) == value:
|
|
|
|
continue
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2021-06-01 20:34:31 +00:00
|
|
|
setattr(entry, attr, value)
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = True
|
2020-03-09 21:07:50 +00:00
|
|
|
|
2020-12-19 11:46:27 +00:00
|
|
|
if data is not UNDEFINED and entry.data != data: # type: ignore
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = True
|
2020-03-09 21:07:50 +00:00
|
|
|
entry.data = MappingProxyType(data)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2021-04-15 17:17:07 +00:00
|
|
|
if options is not UNDEFINED and entry.options != options:
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = True
|
2020-03-09 21:07:50 +00:00
|
|
|
entry.options = MappingProxyType(options)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2020-08-08 18:23:56 +00:00
|
|
|
if not changed:
|
|
|
|
return False
|
|
|
|
|
2019-08-19 23:45:17 +00:00
|
|
|
for listener_ref in entry.update_listeners:
|
2021-09-18 23:31:35 +00:00
|
|
|
if (listener := listener_ref()) is not None:
|
2020-07-22 15:06:37 +00:00
|
|
|
self.hass.async_create_task(listener(self.hass, entry))
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2018-09-25 10:21:11 +00:00
|
|
|
self._async_schedule_save()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2020-08-08 18:23:56 +00:00
|
|
|
return True
|
|
|
|
|
2021-04-26 17:46:55 +00:00
|
|
|
@callback
|
|
|
|
def async_setup_platforms(
|
|
|
|
self, entry: ConfigEntry, platforms: Iterable[str]
|
|
|
|
) -> None:
|
|
|
|
"""Forward the setup of an entry to platforms."""
|
|
|
|
for platform in platforms:
|
|
|
|
self.hass.async_create_task(self.async_forward_entry_setup(entry, platform))
|
|
|
|
|
2019-10-28 20:36:26 +00:00
|
|
|
async def async_forward_entry_setup(self, entry: ConfigEntry, domain: str) -> bool:
|
2018-04-09 14:09:08 +00:00
|
|
|
"""Forward the setup of an entry to a different component.
|
|
|
|
|
|
|
|
By default an entry is setup with the component it belongs to. If that
|
|
|
|
component also has related platforms, the component will have to
|
|
|
|
forward the entry to be setup by that component.
|
|
|
|
|
|
|
|
You don't want to await this coroutine if it is called as part of the
|
|
|
|
setup of a component, because it can cause a deadlock.
|
|
|
|
"""
|
|
|
|
# Setup Component if not set up yet
|
2019-04-15 02:07:05 +00:00
|
|
|
if domain not in self.hass.config.components:
|
2019-07-31 19:25:30 +00:00
|
|
|
result = await async_setup_component(self.hass, domain, self._hass_config)
|
2018-04-09 14:09:08 +00:00
|
|
|
|
|
|
|
if not result:
|
|
|
|
return False
|
|
|
|
|
2019-04-15 02:07:05 +00:00
|
|
|
integration = await loader.async_get_integration(self.hass, domain)
|
|
|
|
|
|
|
|
await entry.async_setup(self.hass, integration=integration)
|
2019-10-28 20:36:26 +00:00
|
|
|
return True
|
2018-04-09 14:09:08 +00:00
|
|
|
|
2021-04-26 17:46:55 +00:00
|
|
|
async def async_unload_platforms(
|
|
|
|
self, entry: ConfigEntry, platforms: Iterable[str]
|
|
|
|
) -> bool:
|
|
|
|
"""Forward the unloading of an entry to platforms."""
|
|
|
|
return all(
|
|
|
|
await asyncio.gather(
|
2021-07-19 08:46:09 +00:00
|
|
|
*(
|
2021-04-26 17:46:55 +00:00
|
|
|
self.async_forward_entry_unload(entry, platform)
|
|
|
|
for platform in platforms
|
2021-07-19 08:46:09 +00:00
|
|
|
)
|
2021-04-26 17:46:55 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2019-10-28 20:36:26 +00:00
|
|
|
async def async_forward_entry_unload(self, entry: ConfigEntry, domain: str) -> bool:
|
2018-04-12 12:28:54 +00:00
|
|
|
"""Forward the unloading of an entry to a different component."""
|
|
|
|
# It was never loaded.
|
2019-04-15 02:07:05 +00:00
|
|
|
if domain not in self.hass.config.components:
|
2018-04-12 12:28:54 +00:00
|
|
|
return True
|
|
|
|
|
2019-04-15 02:07:05 +00:00
|
|
|
integration = await loader.async_get_integration(self.hass, domain)
|
|
|
|
|
|
|
|
return await entry.async_unload(self.hass, integration=integration)
|
2018-04-12 12:28:54 +00:00
|
|
|
|
2020-02-14 18:00:22 +00:00
|
|
|
@callback
|
2019-02-22 16:59:43 +00:00
|
|
|
def _async_schedule_save(self) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Save the entity registry to a file."""
|
2018-08-17 18:18:21 +00:00
|
|
|
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
|
|
|
|
|
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def _data_to_save(self) -> dict[str, list[dict[str, Any]]]:
|
2018-08-17 18:18:21 +00:00
|
|
|
"""Return data to save."""
|
2021-03-22 04:44:29 +00:00
|
|
|
return {"entries": [entry.as_dict() for entry in self._entries.values()]}
|
2018-06-25 16:53:49 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
async def _old_conf_migrator(old_config: dict[str, Any]) -> dict[str, Any]:
|
2018-06-25 16:53:49 +00:00
|
|
|
"""Migrate the pre-0.73 config format to the latest version."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return {"entries": old_config}
|
2018-09-14 09:57:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
class ConfigFlow(data_entry_flow.FlowHandler):
|
|
|
|
"""Base class for config flows with some helpers."""
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
def __init_subclass__(cls, domain: str | None = None, **kwargs: Any) -> None:
|
2019-08-20 17:46:51 +00:00
|
|
|
"""Initialize a subclass, register if possible."""
|
|
|
|
super().__init_subclass__(**kwargs) # type: ignore
|
|
|
|
if domain is not None:
|
|
|
|
HANDLERS.register(domain)(cls)
|
|
|
|
|
2019-12-16 18:45:09 +00:00
|
|
|
@property
|
2021-03-17 16:34:55 +00:00
|
|
|
def unique_id(self) -> str | None:
|
2019-12-16 18:45:09 +00:00
|
|
|
"""Return unique ID if available."""
|
|
|
|
if not self.context:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return cast(Optional[str], self.context.get("unique_id"))
|
|
|
|
|
2019-08-15 21:11:55 +00:00
|
|
|
@staticmethod
|
|
|
|
@callback
|
2021-02-12 09:58:20 +00:00
|
|
|
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
2019-08-15 21:11:55 +00:00
|
|
|
"""Get the options flow for this handler."""
|
|
|
|
raise data_entry_flow.UnknownHandler
|
|
|
|
|
2021-12-07 20:50:34 +00:00
|
|
|
@classmethod
|
|
|
|
@callback
|
|
|
|
def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool:
|
|
|
|
"""Return options flow support for this handler."""
|
|
|
|
return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow
|
|
|
|
|
2021-05-11 20:00:12 +00:00
|
|
|
@callback
|
|
|
|
def _async_abort_entries_match(
|
|
|
|
self, match_dict: dict[str, Any] | None = None
|
|
|
|
) -> None:
|
|
|
|
"""Abort if current entries match all data."""
|
|
|
|
if match_dict is None:
|
|
|
|
match_dict = {} # Match any entry
|
|
|
|
for entry in self._async_current_entries(include_ignore=False):
|
|
|
|
if all(item in entry.data.items() for item in match_dict.items()):
|
|
|
|
raise data_entry_flow.AbortFlow("already_configured")
|
|
|
|
|
2019-12-16 18:45:09 +00:00
|
|
|
@callback
|
2020-03-09 21:07:50 +00:00
|
|
|
def _abort_if_unique_id_configured(
|
2020-08-27 11:56:20 +00:00
|
|
|
self,
|
2021-03-17 16:34:55 +00:00
|
|
|
updates: dict[Any, Any] | None = None,
|
2020-08-27 11:56:20 +00:00
|
|
|
reload_on_update: bool = True,
|
2020-03-09 21:07:50 +00:00
|
|
|
) -> None:
|
2019-12-16 18:45:09 +00:00
|
|
|
"""Abort if the unique ID is already configured."""
|
|
|
|
if self.unique_id is None:
|
|
|
|
return
|
|
|
|
|
2021-02-04 10:08:10 +00:00
|
|
|
for entry in self._async_current_entries(include_ignore=True):
|
2020-01-23 19:21:19 +00:00
|
|
|
if entry.unique_id == self.unique_id:
|
2020-08-08 18:23:56 +00:00
|
|
|
if updates is not None:
|
|
|
|
changed = self.hass.config_entries.async_update_entry(
|
2020-01-23 19:21:19 +00:00
|
|
|
entry, data={**entry.data, **updates}
|
|
|
|
)
|
2020-08-24 08:54:26 +00:00
|
|
|
if (
|
|
|
|
changed
|
|
|
|
and reload_on_update
|
2021-05-20 17:19:20 +00:00
|
|
|
and entry.state
|
|
|
|
in (ConfigEntryState.LOADED, ConfigEntryState.SETUP_RETRY)
|
2020-08-24 08:54:26 +00:00
|
|
|
):
|
2020-08-08 18:23:56 +00:00
|
|
|
self.hass.async_create_task(
|
|
|
|
self.hass.config_entries.async_reload(entry.entry_id)
|
|
|
|
)
|
2020-12-07 08:25:04 +00:00
|
|
|
# Allow ignored entries to be configured on manual user step
|
|
|
|
if entry.source == SOURCE_IGNORE and self.source == SOURCE_USER:
|
|
|
|
continue
|
2020-01-23 19:21:19 +00:00
|
|
|
raise data_entry_flow.AbortFlow("already_configured")
|
2019-12-16 18:45:09 +00:00
|
|
|
|
2019-12-16 11:27:43 +00:00
|
|
|
async def async_set_unique_id(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, unique_id: str | None = None, *, raise_on_progress: bool = True
|
|
|
|
) -> ConfigEntry | None:
|
2019-12-16 11:27:43 +00:00
|
|
|
"""Set a unique ID for the config flow.
|
|
|
|
|
|
|
|
Returns optionally existing config entry with same ID.
|
|
|
|
"""
|
2020-06-15 11:38:38 +00:00
|
|
|
if unique_id is None:
|
2021-03-02 08:02:04 +00:00
|
|
|
self.context["unique_id"] = None
|
2020-06-15 11:38:38 +00:00
|
|
|
return None
|
|
|
|
|
2019-12-16 11:27:43 +00:00
|
|
|
if raise_on_progress:
|
2021-08-24 04:01:21 +00:00
|
|
|
for progress in self._async_in_progress(include_uninitialized=True):
|
2019-12-16 11:27:43 +00:00
|
|
|
if progress["context"].get("unique_id") == unique_id:
|
2019-12-16 18:45:09 +00:00
|
|
|
raise data_entry_flow.AbortFlow("already_in_progress")
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2021-03-02 08:02:04 +00:00
|
|
|
self.context["unique_id"] = unique_id
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2020-06-15 11:38:38 +00:00
|
|
|
# Abort discoveries done using the default discovery unique id
|
|
|
|
if unique_id != DEFAULT_DISCOVERY_UNIQUE_ID:
|
2021-08-24 04:01:21 +00:00
|
|
|
for progress in self._async_in_progress(include_uninitialized=True):
|
2020-06-15 11:38:38 +00:00
|
|
|
if progress["context"].get("unique_id") == DEFAULT_DISCOVERY_UNIQUE_ID:
|
|
|
|
self.hass.config_entries.flow.async_abort(progress["flow_id"])
|
|
|
|
|
2021-02-04 10:08:10 +00:00
|
|
|
for entry in self._async_current_entries(include_ignore=True):
|
2019-12-16 11:27:43 +00:00
|
|
|
if entry.unique_id == unique_id:
|
|
|
|
return entry
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2021-03-08 18:54:51 +00:00
|
|
|
@callback
|
|
|
|
def _set_confirm_only(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
"""Mark the config flow as only needing user confirmation to finish flow."""
|
|
|
|
self.context["confirm_only"] = True
|
|
|
|
|
2018-09-14 09:57:31 +00:00
|
|
|
@callback
|
2021-03-22 04:57:49 +00:00
|
|
|
def _async_current_entries(
|
|
|
|
self, include_ignore: bool | None = None
|
|
|
|
) -> list[ConfigEntry]:
|
2021-02-04 10:08:10 +00:00
|
|
|
"""Return current entries.
|
|
|
|
|
|
|
|
If the flow is user initiated, filter out ignored entries unless include_ignore is True.
|
|
|
|
"""
|
|
|
|
config_entries = self.hass.config_entries.async_entries(self.handler)
|
|
|
|
|
2021-03-22 04:57:49 +00:00
|
|
|
if (
|
|
|
|
include_ignore is True
|
|
|
|
or include_ignore is None
|
|
|
|
and self.source != SOURCE_USER
|
|
|
|
):
|
2021-02-04 10:08:10 +00:00
|
|
|
return config_entries
|
|
|
|
|
|
|
|
return [entry for entry in config_entries if entry.source != SOURCE_IGNORE]
|
2018-09-14 09:57:31 +00:00
|
|
|
|
2019-12-16 18:45:09 +00:00
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def _async_current_ids(self, include_ignore: bool = True) -> set[str | None]:
|
2019-12-16 18:45:09 +00:00
|
|
|
"""Return current unique IDs."""
|
2020-04-04 18:05:15 +00:00
|
|
|
return {
|
2019-12-16 18:45:09 +00:00
|
|
|
entry.unique_id
|
|
|
|
for entry in self.hass.config_entries.async_entries(self.handler)
|
2019-12-18 06:41:01 +00:00
|
|
|
if include_ignore or entry.source != SOURCE_IGNORE
|
2020-04-04 18:05:15 +00:00
|
|
|
}
|
2019-12-16 18:45:09 +00:00
|
|
|
|
2018-09-14 09:57:31 +00:00
|
|
|
@callback
|
2021-04-15 17:17:07 +00:00
|
|
|
def _async_in_progress(
|
|
|
|
self, include_uninitialized: bool = False
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> list[data_entry_flow.FlowResult]:
|
2018-09-14 09:57:31 +00:00
|
|
|
"""Return other in progress flows for current domain."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return [
|
|
|
|
flw
|
2021-10-22 17:19:49 +00:00
|
|
|
for flw in self.hass.config_entries.flow.async_progress_by_handler(
|
|
|
|
self.handler, include_uninitialized=include_uninitialized
|
2021-03-25 17:35:01 +00:00
|
|
|
)
|
2021-10-22 17:19:49 +00:00
|
|
|
if flw["flow_id"] != self.flow_id
|
2019-07-31 19:25:30 +00:00
|
|
|
]
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2021-04-15 17:17:07 +00:00
|
|
|
async def async_step_ignore(
|
|
|
|
self, user_input: dict[str, Any]
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2019-12-18 06:41:01 +00:00
|
|
|
"""Ignore this config flow."""
|
|
|
|
await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False)
|
2021-01-12 08:26:20 +00:00
|
|
|
return self.async_create_entry(title=user_input["title"], data={})
|
2019-12-18 06:41:01 +00:00
|
|
|
|
2021-04-15 17:17:07 +00:00
|
|
|
async def async_step_unignore(
|
|
|
|
self, user_input: dict[str, Any]
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2019-12-21 10:22:07 +00:00
|
|
|
"""Rediscover a config entry by it's unique_id."""
|
|
|
|
return self.async_abort(reason="not_implemented")
|
|
|
|
|
2020-06-15 11:38:38 +00:00
|
|
|
async def async_step_user(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, user_input: dict[str, Any] | None = None
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2020-06-15 11:38:38 +00:00
|
|
|
"""Handle a flow initiated by the user."""
|
|
|
|
return self.async_abort(reason="not_implemented")
|
|
|
|
|
|
|
|
async def _async_handle_discovery_without_unique_id(self) -> None:
|
|
|
|
"""Mark this flow discovered, without a unique identifier.
|
|
|
|
|
|
|
|
If a flow initiated by discovery, doesn't have a unique ID, this can
|
|
|
|
be used alternatively. It will ensure only 1 flow is started and only
|
|
|
|
when the handler has no existing config entries.
|
|
|
|
|
|
|
|
It ensures that the discovery can be ignored by the user.
|
|
|
|
"""
|
|
|
|
if self.unique_id is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Abort if the handler has config entries already
|
|
|
|
if self._async_current_entries():
|
|
|
|
raise data_entry_flow.AbortFlow("already_configured")
|
|
|
|
|
|
|
|
# Use an special unique id to differentiate
|
|
|
|
await self.async_set_unique_id(DEFAULT_DISCOVERY_UNIQUE_ID)
|
|
|
|
self._abort_if_unique_id_configured()
|
|
|
|
|
|
|
|
# Abort if any other flow for this handler is already in progress
|
2021-03-25 17:35:01 +00:00
|
|
|
if self._async_in_progress(include_uninitialized=True):
|
2020-06-15 11:38:38 +00:00
|
|
|
raise data_entry_flow.AbortFlow("already_in_progress")
|
|
|
|
|
|
|
|
async def async_step_discovery(
|
2021-04-15 17:17:07 +00:00
|
|
|
self, discovery_info: DiscoveryInfoType
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2020-06-15 11:38:38 +00:00
|
|
|
"""Handle a flow initialized by discovery."""
|
|
|
|
await self._async_handle_discovery_without_unique_id()
|
|
|
|
return await self.async_step_user()
|
|
|
|
|
2020-10-15 20:46:27 +00:00
|
|
|
@callback
|
|
|
|
def async_abort(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, *, reason: str, description_placeholders: dict | None = None
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2020-10-15 20:46:27 +00:00
|
|
|
"""Abort the config flow."""
|
|
|
|
# Remove reauth notification if no reauth flows are in progress
|
|
|
|
if self.source == SOURCE_REAUTH and not any(
|
|
|
|
ent["context"]["source"] == SOURCE_REAUTH
|
2021-10-22 17:19:49 +00:00
|
|
|
for ent in self.hass.config_entries.flow.async_progress_by_handler(
|
|
|
|
self.handler
|
|
|
|
)
|
2020-10-15 20:46:27 +00:00
|
|
|
if ent["flow_id"] != self.flow_id
|
|
|
|
):
|
|
|
|
self.hass.components.persistent_notification.async_dismiss(
|
|
|
|
RECONFIGURE_NOTIFICATION_ID
|
|
|
|
)
|
|
|
|
|
|
|
|
return super().async_abort(
|
|
|
|
reason=reason, description_placeholders=description_placeholders
|
|
|
|
)
|
|
|
|
|
2021-04-17 10:42:31 +00:00
|
|
|
async def async_step_hassio(
|
2021-12-03 13:05:56 +00:00
|
|
|
self, discovery_info: HassioServiceInfo
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by HASS IO discovery."""
|
2021-12-03 13:05:56 +00:00
|
|
|
return await self.async_step_discovery(discovery_info.config)
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_homekit(
|
2021-11-15 23:27:04 +00:00
|
|
|
self, discovery_info: ZeroconfServiceInfo
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by Homekit discovery."""
|
2021-11-25 01:30:02 +00:00
|
|
|
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_mqtt(
|
2021-11-16 12:30:38 +00:00
|
|
|
self, discovery_info: MqttServiceInfo
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by MQTT discovery."""
|
2021-11-25 01:30:02 +00:00
|
|
|
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_ssdp(
|
2021-11-29 16:10:07 +00:00
|
|
|
self, discovery_info: SsdpServiceInfo
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by SSDP discovery."""
|
2021-11-29 16:10:07 +00:00
|
|
|
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_zeroconf(
|
2021-11-15 17:05:45 +00:00
|
|
|
self, discovery_info: ZeroconfServiceInfo
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by Zeroconf discovery."""
|
2021-11-25 01:30:02 +00:00
|
|
|
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_dhcp(
|
2021-11-16 11:19:50 +00:00
|
|
|
self, discovery_info: DhcpServiceInfo
|
2021-04-29 11:40:51 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by DHCP discovery."""
|
2021-11-25 01:30:02 +00:00
|
|
|
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
|
2020-06-15 11:38:38 +00:00
|
|
|
|
2021-08-20 19:04:18 +00:00
|
|
|
async def async_step_usb(
|
2021-11-16 18:03:50 +00:00
|
|
|
self, discovery_info: UsbServiceInfo
|
2021-08-20 19:04:18 +00:00
|
|
|
) -> data_entry_flow.FlowResult:
|
|
|
|
"""Handle a flow initialized by USB discovery."""
|
2021-11-25 01:30:02 +00:00
|
|
|
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
|
2021-08-20 19:04:18 +00:00
|
|
|
|
2021-05-06 05:14:01 +00:00
|
|
|
@callback
|
|
|
|
def async_create_entry( # pylint: disable=arguments-differ
|
|
|
|
self,
|
|
|
|
*,
|
|
|
|
title: str,
|
|
|
|
data: Mapping[str, Any],
|
|
|
|
description: str | None = None,
|
|
|
|
description_placeholders: dict | None = None,
|
|
|
|
options: Mapping[str, Any] | None = None,
|
|
|
|
) -> data_entry_flow.FlowResult:
|
|
|
|
"""Finish config flow and create a config entry."""
|
|
|
|
result = super().async_create_entry(
|
|
|
|
title=title,
|
|
|
|
data=data,
|
|
|
|
description=description,
|
|
|
|
description_placeholders=description_placeholders,
|
|
|
|
)
|
|
|
|
|
|
|
|
result["options"] = options or {}
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
class OptionsFlowManager(data_entry_flow.FlowManager):
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Flow to set options for a configuration entry."""
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_create_flow(
|
|
|
|
self,
|
|
|
|
handler_key: Any,
|
|
|
|
*,
|
2021-03-17 16:34:55 +00:00
|
|
|
context: dict[str, Any] | None = None,
|
|
|
|
data: dict[str, Any] | None = None,
|
2021-02-12 09:58:20 +00:00
|
|
|
) -> OptionsFlow:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Create an options flow for a config entry.
|
|
|
|
|
|
|
|
Entry_id and flow.handler is the same thing to map entry with flow.
|
|
|
|
"""
|
2020-01-03 10:52:01 +00:00
|
|
|
entry = self.hass.config_entries.async_get_entry(handler_key)
|
2019-02-22 16:59:43 +00:00
|
|
|
if entry is None:
|
2020-01-03 10:52:01 +00:00
|
|
|
raise UnknownEntry(handler_key)
|
2019-08-15 21:11:55 +00:00
|
|
|
|
|
|
|
if entry.domain not in HANDLERS:
|
|
|
|
raise data_entry_flow.UnknownHandler
|
|
|
|
|
2020-04-06 10:51:48 +00:00
|
|
|
return cast(OptionsFlow, HANDLERS[entry.domain].async_get_options_flow(entry))
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_finish_flow(
|
2021-04-29 11:40:51 +00:00
|
|
|
self, flow: data_entry_flow.FlowHandler, result: data_entry_flow.FlowResult
|
|
|
|
) -> data_entry_flow.FlowResult:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Finish an options flow and update options for configuration entry.
|
|
|
|
|
|
|
|
Flow.handler and entry_id is the same thing to map flow with entry.
|
|
|
|
"""
|
2020-01-03 10:52:01 +00:00
|
|
|
flow = cast(OptionsFlow, flow)
|
|
|
|
|
2020-11-09 07:59:42 +00:00
|
|
|
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
|
|
|
return result
|
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
entry = self.hass.config_entries.async_get_entry(flow.handler)
|
|
|
|
if entry is None:
|
2020-01-03 10:52:01 +00:00
|
|
|
raise UnknownEntry(flow.handler)
|
2020-06-23 00:49:01 +00:00
|
|
|
if result["data"] is not None:
|
|
|
|
self.hass.config_entries.async_update_entry(entry, options=result["data"])
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
result["result"] = True
|
2019-02-22 16:59:43 +00:00
|
|
|
return result
|
2019-08-15 21:11:55 +00:00
|
|
|
|
|
|
|
|
|
|
|
class OptionsFlow(data_entry_flow.FlowHandler):
|
|
|
|
"""Base class for config option flows."""
|
|
|
|
|
2019-10-28 20:36:26 +00:00
|
|
|
handler: str
|
2019-08-18 04:34:11 +00:00
|
|
|
|
|
|
|
|
2019-08-23 00:32:43 +00:00
|
|
|
class EntityRegistryDisabledHandler:
|
|
|
|
"""Handler to handle when entities related to config entries updating disabled_by."""
|
|
|
|
|
|
|
|
def __init__(self, hass: HomeAssistant) -> None:
|
|
|
|
"""Initialize the handler."""
|
|
|
|
self.hass = hass
|
2021-03-17 16:34:55 +00:00
|
|
|
self.registry: entity_registry.EntityRegistry | None = None
|
|
|
|
self.changed: set[str] = set()
|
|
|
|
self._remove_call_later: Callable[[], None] | None = None
|
2019-08-23 00:32:43 +00:00
|
|
|
|
|
|
|
@callback
|
|
|
|
def async_setup(self) -> None:
|
|
|
|
"""Set up the disable handler."""
|
|
|
|
self.hass.bus.async_listen(
|
2021-02-14 19:42:55 +00:00
|
|
|
entity_registry.EVENT_ENTITY_REGISTRY_UPDATED,
|
|
|
|
self._handle_entry_updated,
|
|
|
|
event_filter=_handle_entry_updated_filter,
|
2019-08-23 00:32:43 +00:00
|
|
|
)
|
|
|
|
|
2019-10-28 20:36:26 +00:00
|
|
|
async def _handle_entry_updated(self, event: Event) -> None:
|
2019-08-23 00:32:43 +00:00
|
|
|
"""Handle entity registry entry update."""
|
|
|
|
if self.registry is None:
|
|
|
|
self.registry = await entity_registry.async_get_registry(self.hass)
|
|
|
|
|
|
|
|
entity_entry = self.registry.async_get(event.data["entity_id"])
|
|
|
|
|
|
|
|
if (
|
|
|
|
# Stop if no entry found
|
|
|
|
entity_entry is None
|
|
|
|
# Stop if entry not connected to config entry
|
|
|
|
or entity_entry.config_entry_id is None
|
|
|
|
# Stop if the entry got disabled. In that case the entity handles it
|
|
|
|
# themselves.
|
|
|
|
or entity_entry.disabled_by
|
|
|
|
):
|
|
|
|
return
|
|
|
|
|
|
|
|
config_entry = self.hass.config_entries.async_get_entry(
|
|
|
|
entity_entry.config_entry_id
|
|
|
|
)
|
2019-10-28 20:36:26 +00:00
|
|
|
assert config_entry is not None
|
2019-08-23 00:32:43 +00:00
|
|
|
|
2020-08-25 22:59:22 +00:00
|
|
|
if config_entry.entry_id not in self.changed and config_entry.supports_unload:
|
2019-08-23 00:32:43 +00:00
|
|
|
self.changed.add(config_entry.entry_id)
|
|
|
|
|
|
|
|
if not self.changed:
|
|
|
|
return
|
|
|
|
|
|
|
|
# We are going to delay reloading on *every* entity registry change so that
|
|
|
|
# if a user is happily clicking along, it will only reload at the end.
|
|
|
|
|
|
|
|
if self._remove_call_later:
|
|
|
|
self._remove_call_later()
|
|
|
|
|
|
|
|
self._remove_call_later = self.hass.helpers.event.async_call_later(
|
2020-11-09 18:47:45 +00:00
|
|
|
RELOAD_AFTER_UPDATE_DELAY, self._handle_reload
|
2019-08-23 00:32:43 +00:00
|
|
|
)
|
|
|
|
|
2019-10-28 20:36:26 +00:00
|
|
|
async def _handle_reload(self, _now: Any) -> None:
|
2019-08-23 00:32:43 +00:00
|
|
|
"""Handle a reload."""
|
|
|
|
self._remove_call_later = None
|
|
|
|
to_reload = self.changed
|
|
|
|
self.changed = set()
|
|
|
|
|
|
|
|
_LOGGER.info(
|
2020-02-13 16:27:00 +00:00
|
|
|
"Reloading configuration entries because disabled_by changed in entity registry: %s",
|
2019-08-23 00:32:43 +00:00
|
|
|
", ".join(self.changed),
|
|
|
|
)
|
|
|
|
|
|
|
|
await asyncio.gather(
|
2021-07-19 08:46:09 +00:00
|
|
|
*(self.hass.config_entries.async_reload(entry_id) for entry_id in to_reload)
|
2019-08-23 00:32:43 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-02-14 19:42:55 +00:00
|
|
|
@callback
|
|
|
|
def _handle_entry_updated_filter(event: Event) -> bool:
|
2021-03-03 18:12:37 +00:00
|
|
|
"""Handle entity registry entry update filter.
|
|
|
|
|
|
|
|
Only handle changes to "disabled_by".
|
|
|
|
If "disabled_by" was DISABLED_CONFIG_ENTRY, reload is not needed.
|
|
|
|
"""
|
|
|
|
if (
|
|
|
|
event.data["action"] != "update"
|
|
|
|
or "disabled_by" not in event.data["changes"]
|
|
|
|
or event.data["changes"]["disabled_by"] == entity_registry.DISABLED_CONFIG_ENTRY
|
|
|
|
):
|
2021-02-14 19:42:55 +00:00
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2019-08-23 00:32:43 +00:00
|
|
|
async def support_entry_unload(hass: HomeAssistant, domain: str) -> bool:
|
|
|
|
"""Test if a domain supports entry unloading."""
|
|
|
|
integration = await loader.async_get_integration(hass, domain)
|
|
|
|
component = integration.get_component()
|
|
|
|
return hasattr(component, "async_unload_entry")
|