2019-06-12 16:29:28 +00:00
|
|
|
"""Manage config entries in Home Assistant."""
|
2024-03-08 15:36:11 +00:00
|
|
|
|
2021-02-12 09:58:20 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
import asyncio
|
2024-08-16 21:48:03 +00:00
|
|
|
from collections import UserDict, defaultdict
|
2024-07-01 09:51:51 +00:00
|
|
|
from collections.abc import (
|
|
|
|
Callable,
|
|
|
|
Coroutine,
|
|
|
|
Generator,
|
|
|
|
Hashable,
|
|
|
|
Iterable,
|
|
|
|
Mapping,
|
|
|
|
ValuesView,
|
|
|
|
)
|
2021-04-09 17:14:33 +00:00
|
|
|
from contextvars import ContextVar
|
2022-11-24 11:18:09 +00:00
|
|
|
from copy import deepcopy
|
2024-07-29 20:08:46 +00:00
|
|
|
from datetime import datetime
|
2023-07-23 21:19:24 +00:00
|
|
|
from enum import Enum, StrEnum
|
2019-02-15 17:30:47 +00:00
|
|
|
import functools
|
2024-10-03 20:28:00 +00:00
|
|
|
from functools import cache
|
2019-12-09 15:42:10 +00:00
|
|
|
import logging
|
2022-12-28 01:59:42 +00:00
|
|
|
from random import randint
|
2023-07-03 18:56:21 +00:00
|
|
|
from types import MappingProxyType
|
2024-04-30 09:29:43 +00:00
|
|
|
from typing import TYPE_CHECKING, Any, Generic, Self, cast
|
2023-02-07 04:29:47 +00:00
|
|
|
|
2024-02-21 02:57:36 +00:00
|
|
|
from async_interrupt import interrupt
|
2024-10-03 20:28:00 +00:00
|
|
|
from propcache import cached_property
|
2024-07-01 09:51:51 +00:00
|
|
|
from typing_extensions import TypeVar
|
2024-02-21 02:57:36 +00:00
|
|
|
|
2021-12-23 19:14:47 +00:00
|
|
|
from . import data_entry_flow, loader
|
2022-01-11 16:24:59 +00:00
|
|
|
from .components import persistent_notification
|
2022-01-09 05:08:04 +00:00
|
|
|
from .const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP, Platform
|
2024-01-31 14:05:52 +00:00
|
|
|
from .core import (
|
|
|
|
CALLBACK_TYPE,
|
2024-07-29 11:35:36 +00:00
|
|
|
DOMAIN as HOMEASSISTANT_DOMAIN,
|
2024-01-31 14:05:52 +00:00
|
|
|
CoreState,
|
|
|
|
Event,
|
|
|
|
HassJob,
|
2024-02-29 14:47:36 +00:00
|
|
|
HassJobType,
|
2024-01-31 14:05:52 +00:00
|
|
|
HomeAssistant,
|
|
|
|
callback,
|
|
|
|
)
|
2024-10-08 10:18:45 +00:00
|
|
|
from .data_entry_flow import FLOW_NOT_COMPLETE_STEPS, FlowContext, FlowResult
|
2022-11-25 10:33:03 +00:00
|
|
|
from .exceptions import (
|
|
|
|
ConfigEntryAuthFailed,
|
|
|
|
ConfigEntryError,
|
|
|
|
ConfigEntryNotReady,
|
|
|
|
HomeAssistantError,
|
|
|
|
)
|
2024-01-31 14:05:52 +00:00
|
|
|
from .helpers import device_registry, entity_registry, issue_registry as ir, storage
|
2023-06-16 02:15:07 +00:00
|
|
|
from .helpers.debounce import Debouncer
|
2024-09-23 14:49:21 +00:00
|
|
|
from .helpers.discovery_flow import DiscoveryKey
|
2024-05-05 20:29:43 +00:00
|
|
|
from .helpers.dispatcher import SignalType, async_dispatcher_send_internal
|
2022-12-28 01:59:42 +00:00
|
|
|
from .helpers.event import (
|
|
|
|
RANDOM_MICROSECOND_MAX,
|
|
|
|
RANDOM_MICROSECOND_MIN,
|
|
|
|
async_call_later,
|
|
|
|
)
|
2021-12-23 19:14:47 +00:00
|
|
|
from .helpers.frame import report
|
2024-10-03 17:51:09 +00:00
|
|
|
from .helpers.json import json_bytes, json_bytes_sorted, json_fragment
|
2021-12-23 19:14:47 +00:00
|
|
|
from .helpers.typing import UNDEFINED, ConfigType, DiscoveryInfoType, UndefinedType
|
2024-02-08 14:39:01 +00:00
|
|
|
from .loader import async_suggest_report_issue
|
2024-03-19 01:45:34 +00:00
|
|
|
from .setup import (
|
|
|
|
DATA_SETUP_DONE,
|
|
|
|
SetupPhases,
|
|
|
|
async_pause_setup,
|
|
|
|
async_process_deps_reqs,
|
|
|
|
async_setup_component,
|
|
|
|
async_start_setup,
|
|
|
|
)
|
2024-06-03 08:34:09 +00:00
|
|
|
from .util import ulid as ulid_util
|
2024-02-27 04:55:41 +00:00
|
|
|
from .util.async_ import create_eager_task
|
2021-12-23 19:14:47 +00:00
|
|
|
from .util.decorator import Registry
|
2024-07-29 20:08:46 +00:00
|
|
|
from .util.dt import utc_from_timestamp, utcnow
|
2024-05-13 08:16:18 +00:00
|
|
|
from .util.enum import try_parse_enum
|
2019-07-25 06:08:20 +00:00
|
|
|
|
2021-11-15 17:05:45 +00:00
|
|
|
if TYPE_CHECKING:
|
2022-07-30 00:53:33 +00:00
|
|
|
from .components.bluetooth import BluetoothServiceInfoBleak
|
2021-12-23 19:14:47 +00:00
|
|
|
from .components.dhcp import DhcpServiceInfo
|
|
|
|
from .components.hassio import HassioServiceInfo
|
|
|
|
from .components.ssdp import SsdpServiceInfo
|
|
|
|
from .components.usb import UsbServiceInfo
|
|
|
|
from .components.zeroconf import ZeroconfServiceInfo
|
2022-07-14 16:09:09 +00:00
|
|
|
from .helpers.service_info.mqtt import MqttServiceInfo
|
2021-11-15 17:05:45 +00:00
|
|
|
|
2024-01-26 06:20:19 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2018-08-09 11:24:14 +00:00
|
|
|
|
2022-07-08 23:55:31 +00:00
|
|
|
SOURCE_BLUETOOTH = "bluetooth"
|
2022-02-10 10:12:38 +00:00
|
|
|
SOURCE_DHCP = "dhcp"
|
2019-07-31 19:25:30 +00:00
|
|
|
SOURCE_DISCOVERY = "discovery"
|
2024-02-26 17:04:33 +00:00
|
|
|
SOURCE_HARDWARE = "hardware"
|
2020-06-15 11:38:38 +00:00
|
|
|
SOURCE_HASSIO = "hassio"
|
|
|
|
SOURCE_HOMEKIT = "homekit"
|
2019-07-31 19:25:30 +00:00
|
|
|
SOURCE_IMPORT = "import"
|
2020-05-13 13:11:00 +00:00
|
|
|
SOURCE_INTEGRATION_DISCOVERY = "integration_discovery"
|
2020-10-07 16:30:51 +00:00
|
|
|
SOURCE_MQTT = "mqtt"
|
2019-10-29 06:32:57 +00:00
|
|
|
SOURCE_SSDP = "ssdp"
|
2024-03-05 15:01:31 +00:00
|
|
|
SOURCE_SYSTEM = "system"
|
2021-08-20 19:04:18 +00:00
|
|
|
SOURCE_USB = "usb"
|
2019-10-29 06:32:57 +00:00
|
|
|
SOURCE_USER = "user"
|
|
|
|
SOURCE_ZEROCONF = "zeroconf"
|
2019-12-21 10:22:07 +00:00
|
|
|
|
2023-01-15 22:00:51 +00:00
|
|
|
# If a user wants to hide a discovery from the UI they can "Ignore" it. The
|
|
|
|
# config_entries/ignore_flow websocket command creates a config entry with this
|
|
|
|
# source and while it exists normal discoveries with the same unique id are ignored.
|
2019-12-18 06:41:01 +00:00
|
|
|
SOURCE_IGNORE = "ignore"
|
2018-08-09 11:24:14 +00:00
|
|
|
|
2020-09-21 02:15:48 +00:00
|
|
|
# This is used to signal that re-authentication is required by the user.
|
|
|
|
SOURCE_REAUTH = "reauth"
|
|
|
|
|
2024-03-01 11:29:35 +00:00
|
|
|
# This is used to initiate a reconfigure flow by the user.
|
|
|
|
SOURCE_RECONFIGURE = "reconfigure"
|
|
|
|
|
2022-02-23 19:58:42 +00:00
|
|
|
HANDLERS: Registry[str, type[ConfigFlow]] = Registry()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
STORAGE_KEY = "core.config_entries"
|
2018-06-25 16:53:49 +00:00
|
|
|
STORAGE_VERSION = 1
|
2024-09-23 14:49:21 +00:00
|
|
|
STORAGE_VERSION_MINOR = 4
|
2018-06-25 16:53:49 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
SAVE_DELAY = 1
|
|
|
|
|
2023-06-16 02:15:07 +00:00
|
|
|
DISCOVERY_COOLDOWN = 1
|
|
|
|
|
2024-04-30 09:29:43 +00:00
|
|
|
_DataT = TypeVar("_DataT", default=Any)
|
2021-12-27 16:55:17 +00:00
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
|
|
|
|
class ConfigEntryState(Enum):
|
|
|
|
"""Config entry state."""
|
|
|
|
|
|
|
|
LOADED = "loaded", True
|
|
|
|
"""The config entry has been set up successfully"""
|
|
|
|
SETUP_ERROR = "setup_error", True
|
|
|
|
"""There was an error while trying to set up this config entry"""
|
|
|
|
MIGRATION_ERROR = "migration_error", False
|
|
|
|
"""There was an error while trying to migrate the config entry to a new version"""
|
|
|
|
SETUP_RETRY = "setup_retry", True
|
|
|
|
"""The config entry was not ready to be set up yet, but might be later"""
|
|
|
|
NOT_LOADED = "not_loaded", True
|
|
|
|
"""The config entry has not been loaded"""
|
|
|
|
FAILED_UNLOAD = "failed_unload", False
|
|
|
|
"""An error occurred when trying to unload the entry"""
|
2022-06-13 06:05:08 +00:00
|
|
|
SETUP_IN_PROGRESS = "setup_in_progress", False
|
2022-06-07 05:48:49 +00:00
|
|
|
"""The config entry is setting up."""
|
2021-05-20 17:19:20 +00:00
|
|
|
|
|
|
|
_recoverable: bool
|
|
|
|
|
2023-02-07 04:29:47 +00:00
|
|
|
def __new__(cls, value: str, recoverable: bool) -> Self:
|
2021-05-20 17:19:20 +00:00
|
|
|
"""Create new ConfigEntryState."""
|
|
|
|
obj = object.__new__(cls)
|
|
|
|
obj._value_ = value
|
2024-05-06 18:33:26 +00:00
|
|
|
obj._recoverable = recoverable # noqa: SLF001
|
2021-12-27 16:55:17 +00:00
|
|
|
return obj
|
2021-05-20 17:19:20 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def recoverable(self) -> bool:
|
2022-06-13 06:05:08 +00:00
|
|
|
"""Get if the state is recoverable.
|
|
|
|
|
2022-06-13 20:56:08 +00:00
|
|
|
If the entry state is recoverable, unloads
|
2022-06-13 06:05:08 +00:00
|
|
|
and reloads are allowed.
|
|
|
|
"""
|
2021-05-20 17:19:20 +00:00
|
|
|
return self._recoverable
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2020-06-15 11:38:38 +00:00
|
|
|
DEFAULT_DISCOVERY_UNIQUE_ID = "default_discovery_unique_id"
|
2019-07-31 19:25:30 +00:00
|
|
|
DISCOVERY_NOTIFICATION_ID = "config_entry_discovery"
|
2022-05-31 03:24:34 +00:00
|
|
|
DISCOVERY_SOURCES = {
|
2022-07-08 23:55:31 +00:00
|
|
|
SOURCE_BLUETOOTH,
|
2021-08-20 15:02:03 +00:00
|
|
|
SOURCE_DHCP,
|
2020-01-03 16:28:05 +00:00
|
|
|
SOURCE_DISCOVERY,
|
2024-02-26 17:04:33 +00:00
|
|
|
SOURCE_HARDWARE,
|
2024-10-16 11:34:28 +00:00
|
|
|
SOURCE_HASSIO,
|
2022-02-10 10:12:38 +00:00
|
|
|
SOURCE_HOMEKIT,
|
2020-01-03 16:28:05 +00:00
|
|
|
SOURCE_IMPORT,
|
2022-02-05 16:36:04 +00:00
|
|
|
SOURCE_INTEGRATION_DISCOVERY,
|
2022-02-10 10:12:38 +00:00
|
|
|
SOURCE_MQTT,
|
|
|
|
SOURCE_SSDP,
|
2024-10-16 09:27:48 +00:00
|
|
|
SOURCE_SYSTEM,
|
2022-02-10 10:12:38 +00:00
|
|
|
SOURCE_USB,
|
|
|
|
SOURCE_ZEROCONF,
|
2022-05-31 03:24:34 +00:00
|
|
|
}
|
2018-04-22 19:00:24 +00:00
|
|
|
|
2020-10-15 20:46:27 +00:00
|
|
|
RECONFIGURE_NOTIFICATION_ID = "config_entry_reconfigure"
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
EVENT_FLOW_DISCOVERED = "config_entry_discovered"
|
2018-06-18 03:03:29 +00:00
|
|
|
|
2024-03-27 13:25:02 +00:00
|
|
|
SIGNAL_CONFIG_ENTRY_CHANGED = SignalType["ConfigEntryChange", "ConfigEntry"](
|
|
|
|
"config_entry_changed"
|
|
|
|
)
|
2022-09-05 03:57:43 +00:00
|
|
|
|
2024-09-23 19:48:11 +00:00
|
|
|
|
|
|
|
@cache
|
|
|
|
def signal_discovered_config_entry_removed(
|
|
|
|
discovery_domain: str,
|
2024-09-24 06:44:11 +00:00
|
|
|
) -> SignalType[ConfigEntry]:
|
2024-09-23 19:48:11 +00:00
|
|
|
"""Format signal."""
|
|
|
|
return SignalType(f"{discovery_domain}_discovered_config_entry_removed")
|
|
|
|
|
|
|
|
|
2023-09-02 21:46:53 +00:00
|
|
|
NO_RESET_TRIES_STATES = {
|
|
|
|
ConfigEntryState.SETUP_RETRY,
|
|
|
|
ConfigEntryState.SETUP_IN_PROGRESS,
|
|
|
|
}
|
|
|
|
|
2022-09-05 03:57:43 +00:00
|
|
|
|
|
|
|
class ConfigEntryChange(StrEnum):
|
|
|
|
"""What was changed in a config entry."""
|
|
|
|
|
|
|
|
ADDED = "added"
|
|
|
|
REMOVED = "removed"
|
|
|
|
UPDATED = "updated"
|
|
|
|
|
2021-12-15 19:53:21 +00:00
|
|
|
|
|
|
|
class ConfigEntryDisabler(StrEnum):
|
|
|
|
"""What disabled a config entry."""
|
|
|
|
|
|
|
|
USER = "user"
|
|
|
|
|
|
|
|
|
|
|
|
# DISABLED_* is deprecated, to be removed in 2022.3
|
|
|
|
DISABLED_USER = ConfigEntryDisabler.USER.value
|
2021-05-04 18:08:51 +00:00
|
|
|
|
|
|
|
RELOAD_AFTER_UPDATE_DELAY = 30
|
|
|
|
|
|
|
|
# Deprecated: Connection classes
|
|
|
|
# These aren't used anymore since 2021.6.0
|
|
|
|
# Mainly here not to break custom integrations.
|
2019-07-31 19:25:30 +00:00
|
|
|
CONN_CLASS_CLOUD_PUSH = "cloud_push"
|
|
|
|
CONN_CLASS_CLOUD_POLL = "cloud_poll"
|
|
|
|
CONN_CLASS_LOCAL_PUSH = "local_push"
|
|
|
|
CONN_CLASS_LOCAL_POLL = "local_poll"
|
|
|
|
CONN_CLASS_ASSUMED = "assumed"
|
|
|
|
CONN_CLASS_UNKNOWN = "unknown"
|
2018-09-17 08:12:46 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
class ConfigError(HomeAssistantError):
|
|
|
|
"""Error while configuring an account."""
|
|
|
|
|
|
|
|
|
|
|
|
class UnknownEntry(ConfigError):
|
|
|
|
"""Unknown entry specified."""
|
|
|
|
|
|
|
|
|
|
|
|
class OperationNotAllowed(ConfigError):
|
|
|
|
"""Raised when a config entry operation is not allowed."""
|
|
|
|
|
|
|
|
|
2024-05-17 12:42:21 +00:00
|
|
|
type UpdateListenerType = Callable[
|
|
|
|
[HomeAssistant, ConfigEntry], Coroutine[Any, Any, None]
|
|
|
|
]
|
2020-07-22 15:06:37 +00:00
|
|
|
|
2024-10-03 17:51:09 +00:00
|
|
|
STATE_KEYS = {
|
2024-03-28 09:52:21 +00:00
|
|
|
"state",
|
|
|
|
"reason",
|
|
|
|
"error_reason_translation_key",
|
|
|
|
"error_reason_translation_placeholders",
|
|
|
|
}
|
2024-10-03 17:51:09 +00:00
|
|
|
FROZEN_CONFIG_ENTRY_ATTRS = {"entry_id", "domain", *STATE_KEYS}
|
2024-02-16 16:15:05 +00:00
|
|
|
UPDATE_ENTRY_CONFIG_ENTRY_ATTRS = {
|
|
|
|
"unique_id",
|
|
|
|
"title",
|
|
|
|
"data",
|
|
|
|
"options",
|
|
|
|
"pref_disable_new_entities",
|
|
|
|
"pref_disable_polling",
|
|
|
|
"minor_version",
|
|
|
|
"version",
|
|
|
|
}
|
|
|
|
|
2020-07-22 15:06:37 +00:00
|
|
|
|
2024-10-08 10:18:45 +00:00
|
|
|
class ConfigFlowContext(FlowContext, total=False):
|
|
|
|
"""Typed context dict for config flow."""
|
|
|
|
|
|
|
|
alternative_domain: str
|
|
|
|
configuration_url: str
|
|
|
|
confirm_only: bool
|
|
|
|
discovery_key: DiscoveryKey
|
|
|
|
entry_id: str
|
|
|
|
title_placeholders: Mapping[str, str]
|
|
|
|
unique_id: str | None
|
|
|
|
|
|
|
|
|
|
|
|
class ConfigFlowResult(FlowResult[ConfigFlowContext, str], total=False):
|
2024-03-01 12:07:13 +00:00
|
|
|
"""Typed result dict for config flow."""
|
|
|
|
|
|
|
|
minor_version: int
|
2024-05-13 08:11:33 +00:00
|
|
|
options: Mapping[str, Any]
|
2024-03-01 12:07:13 +00:00
|
|
|
version: int
|
2024-02-29 15:52:39 +00:00
|
|
|
|
|
|
|
|
2024-09-10 18:39:51 +00:00
|
|
|
def _validate_item(*, disabled_by: ConfigEntryDisabler | Any | None = None) -> None:
|
|
|
|
"""Validate config entry item."""
|
|
|
|
|
|
|
|
# Deprecated in 2022.1, stopped working in 2024.10
|
|
|
|
if disabled_by is not None and not isinstance(disabled_by, ConfigEntryDisabler):
|
|
|
|
raise TypeError(
|
|
|
|
f"disabled_by must be a ConfigEntryDisabler value, got {disabled_by}"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-04-30 09:29:43 +00:00
|
|
|
class ConfigEntry(Generic[_DataT]):
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Hold a configuration entry."""
|
|
|
|
|
2024-02-16 16:15:05 +00:00
|
|
|
entry_id: str
|
|
|
|
domain: str
|
|
|
|
title: str
|
|
|
|
data: MappingProxyType[str, Any]
|
2024-04-30 09:29:43 +00:00
|
|
|
runtime_data: _DataT
|
2024-02-16 16:15:05 +00:00
|
|
|
options: MappingProxyType[str, Any]
|
|
|
|
unique_id: str | None
|
|
|
|
state: ConfigEntryState
|
|
|
|
reason: str | None
|
2024-03-28 09:52:21 +00:00
|
|
|
error_reason_translation_key: str | None
|
|
|
|
error_reason_translation_placeholders: dict[str, Any] | None
|
2024-02-16 16:15:05 +00:00
|
|
|
pref_disable_new_entities: bool
|
|
|
|
pref_disable_polling: bool
|
|
|
|
version: int
|
2024-04-12 02:16:01 +00:00
|
|
|
source: str
|
2024-02-16 16:15:05 +00:00
|
|
|
minor_version: int
|
2024-04-12 02:16:01 +00:00
|
|
|
disabled_by: ConfigEntryDisabler | None
|
|
|
|
supports_unload: bool | None
|
|
|
|
supports_remove_device: bool | None
|
|
|
|
_supports_options: bool | None
|
|
|
|
_supports_reconfigure: bool | None
|
|
|
|
update_listeners: list[UpdateListenerType]
|
|
|
|
_async_cancel_retry_setup: Callable[[], Any] | None
|
|
|
|
_on_unload: list[Callable[[], Coroutine[Any, Any, None] | None]] | None
|
2024-04-30 23:47:12 +00:00
|
|
|
setup_lock: asyncio.Lock
|
2024-04-12 02:16:01 +00:00
|
|
|
_reauth_lock: asyncio.Lock
|
|
|
|
_tasks: set[asyncio.Future[Any]]
|
|
|
|
_background_tasks: set[asyncio.Future[Any]]
|
|
|
|
_integration_for_domain: loader.Integration | None
|
|
|
|
_tries: int
|
2024-07-29 20:08:46 +00:00
|
|
|
created_at: datetime
|
|
|
|
modified_at: datetime
|
2024-09-23 19:48:11 +00:00
|
|
|
discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]]
|
2024-02-16 16:15:05 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
2023-12-12 07:44:35 +00:00
|
|
|
*,
|
2024-07-29 20:08:46 +00:00
|
|
|
created_at: datetime | None = None,
|
2021-04-15 17:17:07 +00:00
|
|
|
data: Mapping[str, Any],
|
2024-05-13 07:39:18 +00:00
|
|
|
disabled_by: ConfigEntryDisabler | None = None,
|
2024-09-23 19:48:11 +00:00
|
|
|
discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]],
|
2024-05-13 07:39:18 +00:00
|
|
|
domain: str,
|
|
|
|
entry_id: str | None = None,
|
|
|
|
minor_version: int,
|
2024-07-29 20:08:46 +00:00
|
|
|
modified_at: datetime | None = None,
|
2024-05-13 07:39:18 +00:00
|
|
|
options: Mapping[str, Any] | None,
|
2021-06-01 20:34:31 +00:00
|
|
|
pref_disable_new_entities: bool | None = None,
|
|
|
|
pref_disable_polling: bool | None = None,
|
2024-05-13 07:39:18 +00:00
|
|
|
source: str,
|
2021-05-20 17:19:20 +00:00
|
|
|
state: ConfigEntryState = ConfigEntryState.NOT_LOADED,
|
2024-05-13 07:39:18 +00:00
|
|
|
title: str,
|
|
|
|
unique_id: str | None,
|
|
|
|
version: int,
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Initialize a config entry."""
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter = object.__setattr__
|
2018-02-16 22:07:38 +00:00
|
|
|
# Unique id of the config entry
|
2024-06-03 08:34:09 +00:00
|
|
|
_setter(self, "entry_id", entry_id or ulid_util.ulid_now())
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
# Version of the configuration.
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "version", version)
|
|
|
|
_setter(self, "minor_version", minor_version)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
# Domain the configuration belongs to
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "domain", domain)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
# Title of the configuration
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "title", title)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
# Config data
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "data", MappingProxyType(data))
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
# Entry options
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "options", MappingProxyType(options or {}))
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2019-08-18 04:34:11 +00:00
|
|
|
# Entry system options
|
2021-06-01 20:34:31 +00:00
|
|
|
if pref_disable_new_entities is None:
|
|
|
|
pref_disable_new_entities = False
|
|
|
|
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "pref_disable_new_entities", pref_disable_new_entities)
|
2021-06-01 20:34:31 +00:00
|
|
|
|
|
|
|
if pref_disable_polling is None:
|
|
|
|
pref_disable_polling = False
|
|
|
|
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "pref_disable_polling", pref_disable_polling)
|
2019-08-18 04:34:11 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
# Source of the configuration (user, discovery, cloud)
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "source", source)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
# State of the entry (LOADED, NOT_LOADED)
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "state", state)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-12-16 11:27:43 +00:00
|
|
|
# Unique ID of this entry.
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "unique_id", unique_id)
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2021-02-21 03:21:39 +00:00
|
|
|
# Config entry is disabled
|
2024-09-10 18:39:51 +00:00
|
|
|
_validate_item(disabled_by=disabled_by)
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "disabled_by", disabled_by)
|
2021-02-21 03:21:39 +00:00
|
|
|
|
2020-08-25 22:59:22 +00:00
|
|
|
# Supports unload
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "supports_unload", None)
|
2020-08-25 22:59:22 +00:00
|
|
|
|
2022-02-21 09:11:18 +00:00
|
|
|
# Supports remove device
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "supports_remove_device", None)
|
2022-02-21 09:11:18 +00:00
|
|
|
|
2024-01-26 06:20:19 +00:00
|
|
|
# Supports options
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_supports_options", None)
|
2024-01-26 06:20:19 +00:00
|
|
|
|
2024-03-01 11:29:35 +00:00
|
|
|
# Supports reconfigure
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_supports_reconfigure", None)
|
2024-03-01 11:29:35 +00:00
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
# Listeners to call on update
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "update_listeners", [])
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2021-04-23 07:23:43 +00:00
|
|
|
# Reason why config entry is in a failed state
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(self, "reason", None)
|
2024-03-28 09:52:21 +00:00
|
|
|
_setter(self, "error_reason_translation_key", None)
|
|
|
|
_setter(self, "error_reason_translation_placeholders", None)
|
2021-04-23 07:23:43 +00:00
|
|
|
|
2018-10-04 13:53:50 +00:00
|
|
|
# Function to cancel a scheduled retry
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_async_cancel_retry_setup", None)
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2023-04-17 12:41:25 +00:00
|
|
|
# Hold list for actions to call on unload.
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_on_unload", None)
|
2021-04-09 17:14:33 +00:00
|
|
|
|
2022-05-28 08:49:55 +00:00
|
|
|
# Reload lock to prevent conflicting reloads
|
2024-04-30 23:47:12 +00:00
|
|
|
_setter(self, "setup_lock", asyncio.Lock())
|
2023-10-31 17:38:05 +00:00
|
|
|
# Reauth lock to prevent concurrent reauth flows
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_reauth_lock", asyncio.Lock())
|
2022-05-28 08:49:55 +00:00
|
|
|
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_tasks", set())
|
|
|
|
_setter(self, "_background_tasks", set())
|
2022-06-29 07:38:35 +00:00
|
|
|
|
2024-04-12 02:16:01 +00:00
|
|
|
_setter(self, "_integration_for_domain", None)
|
|
|
|
_setter(self, "_tries", 0)
|
2024-07-29 20:08:46 +00:00
|
|
|
_setter(self, "created_at", created_at or utcnow())
|
|
|
|
_setter(self, "modified_at", modified_at or utcnow())
|
2024-09-23 14:49:21 +00:00
|
|
|
_setter(self, "discovery_keys", discovery_keys)
|
2023-09-02 21:46:53 +00:00
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Representation of ConfigEntry."""
|
|
|
|
return (
|
|
|
|
f"<ConfigEntry entry_id={self.entry_id} version={self.version} domain={self.domain} "
|
|
|
|
f"title={self.title} state={self.state} unique_id={self.unique_id}>"
|
|
|
|
)
|
|
|
|
|
2024-02-16 16:15:05 +00:00
|
|
|
def __setattr__(self, key: str, value: Any) -> None:
|
|
|
|
"""Set an attribute."""
|
|
|
|
if key in UPDATE_ENTRY_CONFIG_ENTRY_ATTRS:
|
2024-09-03 13:56:00 +00:00
|
|
|
raise AttributeError(
|
|
|
|
f"{key} cannot be changed directly, use async_update_entry instead"
|
2024-02-16 16:15:05 +00:00
|
|
|
)
|
2024-09-03 13:56:00 +00:00
|
|
|
if key in FROZEN_CONFIG_ENTRY_ATTRS:
|
2024-02-16 16:15:05 +00:00
|
|
|
raise AttributeError(f"{key} cannot be changed")
|
|
|
|
|
|
|
|
super().__setattr__(key, value)
|
2024-10-03 17:51:09 +00:00
|
|
|
self.clear_state_cache()
|
|
|
|
self.clear_storage_cache()
|
2024-02-16 16:15:05 +00:00
|
|
|
|
2024-01-26 06:20:19 +00:00
|
|
|
@property
|
|
|
|
def supports_options(self) -> bool:
|
|
|
|
"""Return if entry supports config options."""
|
|
|
|
if self._supports_options is None and (handler := HANDLERS.get(self.domain)):
|
|
|
|
# work out if handler has support for options flow
|
2024-02-18 01:52:39 +00:00
|
|
|
object.__setattr__(
|
|
|
|
self, "_supports_options", handler.async_supports_options_flow(self)
|
|
|
|
)
|
2024-01-26 06:20:19 +00:00
|
|
|
return self._supports_options or False
|
|
|
|
|
2024-03-01 11:29:35 +00:00
|
|
|
@property
|
|
|
|
def supports_reconfigure(self) -> bool:
|
2024-05-13 07:26:18 +00:00
|
|
|
"""Return if entry supports reconfigure step."""
|
2024-03-01 11:29:35 +00:00
|
|
|
if self._supports_reconfigure is None and (
|
|
|
|
handler := HANDLERS.get(self.domain)
|
|
|
|
):
|
|
|
|
# work out if handler has support for reconfigure step
|
|
|
|
object.__setattr__(
|
|
|
|
self,
|
|
|
|
"_supports_reconfigure",
|
|
|
|
hasattr(handler, "async_step_reconfigure"),
|
|
|
|
)
|
|
|
|
return self._supports_reconfigure or False
|
|
|
|
|
2024-10-03 17:51:09 +00:00
|
|
|
def clear_state_cache(self) -> None:
|
|
|
|
"""Clear cached properties that are included in as_json_fragment."""
|
2024-04-03 10:21:37 +00:00
|
|
|
self.__dict__.pop("as_json_fragment", None)
|
2024-02-18 01:52:39 +00:00
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def as_json_fragment(self) -> json_fragment:
|
2024-10-03 17:51:09 +00:00
|
|
|
"""Return JSON fragment of a config entry that is used for the API."""
|
2024-02-18 01:52:39 +00:00
|
|
|
json_repr = {
|
2024-07-29 20:08:46 +00:00
|
|
|
"created_at": self.created_at.timestamp(),
|
2024-02-18 01:52:39 +00:00
|
|
|
"entry_id": self.entry_id,
|
|
|
|
"domain": self.domain,
|
2024-07-29 20:08:46 +00:00
|
|
|
"modified_at": self.modified_at.timestamp(),
|
2024-02-18 01:52:39 +00:00
|
|
|
"title": self.title,
|
|
|
|
"source": self.source,
|
|
|
|
"state": self.state.value,
|
|
|
|
"supports_options": self.supports_options,
|
|
|
|
"supports_remove_device": self.supports_remove_device or False,
|
|
|
|
"supports_unload": self.supports_unload or False,
|
2024-05-13 07:26:18 +00:00
|
|
|
"supports_reconfigure": self.supports_reconfigure,
|
2024-02-18 01:52:39 +00:00
|
|
|
"pref_disable_new_entities": self.pref_disable_new_entities,
|
|
|
|
"pref_disable_polling": self.pref_disable_polling,
|
|
|
|
"disabled_by": self.disabled_by,
|
|
|
|
"reason": self.reason,
|
2024-03-28 09:52:21 +00:00
|
|
|
"error_reason_translation_key": self.error_reason_translation_key,
|
|
|
|
"error_reason_translation_placeholders": self.error_reason_translation_placeholders,
|
2024-02-18 01:52:39 +00:00
|
|
|
}
|
|
|
|
return json_fragment(json_bytes(json_repr))
|
|
|
|
|
2024-10-03 17:51:09 +00:00
|
|
|
def clear_storage_cache(self) -> None:
|
|
|
|
"""Clear cached properties that are included in as_storage_fragment."""
|
|
|
|
self.__dict__.pop("as_storage_fragment", None)
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
def as_storage_fragment(self) -> json_fragment:
|
|
|
|
"""Return a storage fragment for this entry."""
|
|
|
|
return json_fragment(json_bytes_sorted(self.as_dict()))
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
async def async_setup(
|
2019-07-31 19:25:30 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
*,
|
2021-03-17 16:34:55 +00:00
|
|
|
integration: loader.Integration | None = None,
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Set up an entry."""
|
2021-02-21 03:21:39 +00:00
|
|
|
if self.source == SOURCE_IGNORE or self.disabled_by:
|
2019-12-18 06:41:01 +00:00
|
|
|
return
|
|
|
|
|
2024-10-16 16:02:37 +00:00
|
|
|
current_entry.set(self)
|
|
|
|
try:
|
|
|
|
await self.__async_setup_with_context(hass, integration)
|
|
|
|
finally:
|
|
|
|
current_entry.set(None)
|
|
|
|
|
|
|
|
async def __async_setup_with_context(
|
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
integration: loader.Integration | None,
|
|
|
|
) -> None:
|
|
|
|
"""Set up an entry, with current_entry set."""
|
2024-02-13 12:28:52 +00:00
|
|
|
if integration is None and not (integration := self._integration_for_domain):
|
2019-04-15 02:07:05 +00:00
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
2023-09-02 21:46:53 +00:00
|
|
|
self._integration_for_domain = integration
|
2019-04-15 02:07:05 +00:00
|
|
|
|
2022-06-07 05:48:49 +00:00
|
|
|
# Only store setup result as state if it was not forwarded.
|
2024-02-13 12:28:52 +00:00
|
|
|
if domain_is_integration := self.domain == integration.domain:
|
2024-05-10 22:09:28 +00:00
|
|
|
if self.state in (
|
|
|
|
ConfigEntryState.LOADED,
|
|
|
|
ConfigEntryState.SETUP_IN_PROGRESS,
|
|
|
|
):
|
|
|
|
raise OperationNotAllowed(
|
|
|
|
f"The config entry {self.title} ({self.domain}) with entry_id"
|
2024-05-11 23:20:08 +00:00
|
|
|
f" {self.entry_id} cannot be set up because it is already loaded "
|
|
|
|
f"in the {self.state} state"
|
|
|
|
)
|
|
|
|
if not self.setup_lock.locked():
|
|
|
|
raise OperationNotAllowed(
|
|
|
|
f"The config entry {self.title} ({self.domain}) with entry_id"
|
|
|
|
f" {self.entry_id} cannot be set up because it does not hold "
|
|
|
|
"the setup lock"
|
2024-05-10 22:09:28 +00:00
|
|
|
)
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(hass, ConfigEntryState.SETUP_IN_PROGRESS, None)
|
2022-06-07 05:48:49 +00:00
|
|
|
|
2023-04-26 08:23:18 +00:00
|
|
|
if self.supports_unload is None:
|
|
|
|
self.supports_unload = await support_entry_unload(hass, self.domain)
|
|
|
|
if self.supports_remove_device is None:
|
|
|
|
self.supports_remove_device = await support_remove_from_device(
|
|
|
|
hass, self.domain
|
|
|
|
)
|
2019-05-13 08:16:55 +00:00
|
|
|
try:
|
2024-03-05 14:59:52 +00:00
|
|
|
component = await integration.async_get_component()
|
2019-05-13 08:16:55 +00:00
|
|
|
except ImportError as err:
|
|
|
|
_LOGGER.error(
|
2020-02-13 16:27:00 +00:00
|
|
|
"Error importing integration %s to set up %s configuration entry: %s",
|
2019-07-31 19:25:30 +00:00
|
|
|
integration.domain,
|
|
|
|
self.domain,
|
|
|
|
err,
|
|
|
|
)
|
2024-02-13 12:28:52 +00:00
|
|
|
if domain_is_integration:
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(
|
|
|
|
hass, ConfigEntryState.SETUP_ERROR, "Import error"
|
|
|
|
)
|
2019-05-13 08:16:55 +00:00
|
|
|
return
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-03-15 22:15:36 +00:00
|
|
|
if domain_is_integration:
|
2019-08-23 00:32:43 +00:00
|
|
|
try:
|
2024-03-21 08:34:33 +00:00
|
|
|
await integration.async_get_platform("config_flow")
|
2019-08-23 00:32:43 +00:00
|
|
|
except ImportError as err:
|
|
|
|
_LOGGER.error(
|
2022-12-22 09:12:50 +00:00
|
|
|
(
|
|
|
|
"Error importing platform config_flow from integration %s to"
|
|
|
|
" set up %s configuration entry: %s"
|
|
|
|
),
|
2019-08-23 00:32:43 +00:00
|
|
|
integration.domain,
|
|
|
|
self.domain,
|
|
|
|
err,
|
|
|
|
)
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(
|
|
|
|
hass, ConfigEntryState.SETUP_ERROR, "Import error"
|
|
|
|
)
|
2019-08-23 00:32:43 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
# Perform migration
|
2019-02-15 17:30:47 +00:00
|
|
|
if not await self.async_migrate(hass):
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(hass, ConfigEntryState.MIGRATION_ERROR, None)
|
2019-02-15 17:30:47 +00:00
|
|
|
return
|
|
|
|
|
2024-03-19 01:45:34 +00:00
|
|
|
setup_phase = SetupPhases.CONFIG_ENTRY_SETUP
|
|
|
|
else:
|
|
|
|
setup_phase = SetupPhases.CONFIG_ENTRY_PLATFORM_SETUP
|
|
|
|
|
2021-04-23 07:23:43 +00:00
|
|
|
error_reason = None
|
2024-03-28 09:52:21 +00:00
|
|
|
error_reason_translation_key = None
|
|
|
|
error_reason_translation_placeholders = None
|
2021-04-23 07:23:43 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
try:
|
2024-03-19 01:45:34 +00:00
|
|
|
with async_start_setup(
|
|
|
|
hass, integration=self.domain, group=self.entry_id, phase=setup_phase
|
|
|
|
):
|
|
|
|
result = await component.async_setup_entry(hass, self)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
if not isinstance(result, bool):
|
2023-03-31 18:19:58 +00:00
|
|
|
_LOGGER.error( # type: ignore[unreachable]
|
2019-07-31 19:25:30 +00:00
|
|
|
"%s.async_setup_entry did not return boolean", integration.domain
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
result = False
|
2023-11-25 07:30:18 +00:00
|
|
|
except ConfigEntryError as exc:
|
|
|
|
error_reason = str(exc) or "Unknown fatal config entry error"
|
2024-03-28 09:52:21 +00:00
|
|
|
error_reason_translation_key = exc.translation_key
|
|
|
|
error_reason_translation_placeholders = exc.translation_placeholders
|
2022-11-25 10:33:03 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error setting up entry %s for %s: %s",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
error_reason,
|
|
|
|
)
|
2023-04-17 12:41:25 +00:00
|
|
|
await self._async_process_on_unload(hass)
|
2022-11-25 10:33:03 +00:00
|
|
|
result = False
|
2023-11-25 07:30:18 +00:00
|
|
|
except ConfigEntryAuthFailed as exc:
|
|
|
|
message = str(exc)
|
2021-04-10 05:41:29 +00:00
|
|
|
auth_base_message = "could not authenticate"
|
2021-04-23 07:23:43 +00:00
|
|
|
error_reason = message or auth_base_message
|
2024-03-28 09:52:21 +00:00
|
|
|
error_reason_translation_key = exc.translation_key
|
|
|
|
error_reason_translation_placeholders = exc.translation_placeholders
|
2021-04-10 05:41:29 +00:00
|
|
|
auth_message = (
|
|
|
|
f"{auth_base_message}: {message}" if message else auth_base_message
|
|
|
|
)
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Config entry '%s' for %s integration %s",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
auth_message,
|
|
|
|
)
|
2023-04-17 12:41:25 +00:00
|
|
|
await self._async_process_on_unload(hass)
|
2021-04-10 05:41:29 +00:00
|
|
|
self.async_start_reauth(hass)
|
|
|
|
result = False
|
2023-11-25 07:30:18 +00:00
|
|
|
except ConfigEntryNotReady as exc:
|
2024-02-13 12:28:52 +00:00
|
|
|
message = str(exc)
|
2024-03-28 09:52:21 +00:00
|
|
|
error_reason_translation_key = exc.translation_key
|
|
|
|
error_reason_translation_placeholders = exc.translation_placeholders
|
|
|
|
self._async_set_state(
|
|
|
|
hass,
|
|
|
|
ConfigEntryState.SETUP_RETRY,
|
|
|
|
message or None,
|
|
|
|
error_reason_translation_key,
|
|
|
|
error_reason_translation_placeholders,
|
|
|
|
)
|
2023-09-02 21:46:53 +00:00
|
|
|
wait_time = 2 ** min(self._tries, 4) * 5 + (
|
2022-12-28 01:59:42 +00:00
|
|
|
randint(RANDOM_MICROSECOND_MIN, RANDOM_MICROSECOND_MAX) / 1000000
|
|
|
|
)
|
2023-09-02 21:46:53 +00:00
|
|
|
self._tries += 1
|
2021-03-29 10:25:40 +00:00
|
|
|
ready_message = f"ready yet: {message}" if message else "ready yet"
|
2023-10-05 20:12:01 +00:00
|
|
|
_LOGGER.debug(
|
|
|
|
(
|
|
|
|
"Config entry '%s' for %s integration not %s; Retrying in %d"
|
|
|
|
" seconds"
|
|
|
|
),
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
ready_message,
|
|
|
|
wait_time,
|
|
|
|
)
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2024-01-12 09:21:26 +00:00
|
|
|
if hass.state is CoreState.running:
|
2022-01-11 21:30:59 +00:00
|
|
|
self._async_cancel_retry_setup = async_call_later(
|
2024-02-29 14:47:36 +00:00
|
|
|
hass,
|
|
|
|
wait_time,
|
|
|
|
HassJob(
|
|
|
|
functools.partial(self._async_setup_again, hass),
|
|
|
|
job_type=HassJobType.Callback,
|
2024-03-24 19:09:24 +00:00
|
|
|
cancel_on_shutdown=True,
|
2024-02-29 14:47:36 +00:00
|
|
|
),
|
2021-04-04 00:00:22 +00:00
|
|
|
)
|
|
|
|
else:
|
2024-02-29 14:47:36 +00:00
|
|
|
self._async_cancel_retry_setup = hass.bus.async_listen(
|
2023-09-02 21:46:53 +00:00
|
|
|
EVENT_HOMEASSISTANT_STARTED,
|
|
|
|
functools.partial(self._async_setup_again, hass),
|
2021-04-04 00:00:22 +00:00
|
|
|
)
|
2021-04-09 17:14:33 +00:00
|
|
|
|
2023-04-17 12:41:25 +00:00
|
|
|
await self._async_process_on_unload(hass)
|
2018-10-04 13:53:50 +00:00
|
|
|
return
|
2023-02-27 14:29:14 +00:00
|
|
|
# pylint: disable-next=broad-except
|
|
|
|
except (asyncio.CancelledError, SystemExit, Exception):
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error setting up entry %s for %s", self.title, integration.domain
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
result = False
|
|
|
|
|
2024-02-16 16:13:35 +00:00
|
|
|
#
|
|
|
|
# After successfully calling async_setup_entry, it is important that this function
|
|
|
|
# does not yield to the event loop by using `await` or `async with` or
|
|
|
|
# similar until after the state has been set by calling self._async_set_state.
|
|
|
|
#
|
|
|
|
# Otherwise we risk that any `call_soon`s
|
|
|
|
# created by an integration will be executed before the state is set.
|
|
|
|
#
|
|
|
|
|
2018-04-09 14:09:08 +00:00
|
|
|
# Only store setup result as state if it was not forwarded.
|
2024-02-13 12:28:52 +00:00
|
|
|
if not domain_is_integration:
|
2018-04-09 14:09:08 +00:00
|
|
|
return
|
|
|
|
|
2024-02-29 14:47:36 +00:00
|
|
|
self.async_cancel_retry_setup()
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
if result:
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(hass, ConfigEntryState.LOADED, None)
|
2018-02-16 22:07:38 +00:00
|
|
|
else:
|
2024-03-28 09:52:21 +00:00
|
|
|
self._async_set_state(
|
|
|
|
hass,
|
|
|
|
ConfigEntryState.SETUP_ERROR,
|
|
|
|
error_reason,
|
|
|
|
error_reason_translation_key,
|
|
|
|
error_reason_translation_placeholders,
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-02-29 14:47:36 +00:00
|
|
|
@callback
|
|
|
|
def _async_setup_again(self, hass: HomeAssistant, *_: Any) -> None:
|
|
|
|
"""Schedule setup again.
|
|
|
|
|
|
|
|
This method is a callback to ensure that _async_cancel_retry_setup
|
|
|
|
is unset as soon as its callback is called.
|
|
|
|
"""
|
|
|
|
self._async_cancel_retry_setup = None
|
2023-09-02 21:46:53 +00:00
|
|
|
# Check again when we fire in case shutdown
|
|
|
|
# has started so we do not block shutdown
|
|
|
|
if not hass.is_stopping:
|
2024-04-28 13:13:51 +00:00
|
|
|
hass.async_create_background_task(
|
2024-04-30 23:47:12 +00:00
|
|
|
self.async_setup_locked(hass),
|
2024-03-13 00:19:22 +00:00
|
|
|
f"config entry retry {self.domain} {self.title}",
|
|
|
|
eager_start=True,
|
|
|
|
)
|
2023-09-02 21:46:53 +00:00
|
|
|
|
2024-04-30 23:47:12 +00:00
|
|
|
async def async_setup_locked(
|
|
|
|
self, hass: HomeAssistant, integration: loader.Integration | None = None
|
|
|
|
) -> None:
|
|
|
|
"""Set up while holding the setup lock."""
|
|
|
|
async with self.setup_lock:
|
2024-05-20 07:47:47 +00:00
|
|
|
if self.state is ConfigEntryState.LOADED:
|
|
|
|
# If something loaded the config entry while
|
|
|
|
# we were waiting for the lock, we should not
|
|
|
|
# set it up again.
|
|
|
|
_LOGGER.debug(
|
|
|
|
"Not setting up %s (%s %s) again, already loaded",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
self.entry_id,
|
|
|
|
)
|
|
|
|
return
|
2024-04-30 23:47:12 +00:00
|
|
|
await self.async_setup(hass, integration=integration)
|
2024-04-13 20:26:41 +00:00
|
|
|
|
2024-02-21 02:38:24 +00:00
|
|
|
@callback
|
|
|
|
def async_shutdown(self) -> None:
|
2021-04-14 02:16:26 +00:00
|
|
|
"""Call when Home Assistant is stopping."""
|
|
|
|
self.async_cancel_retry_setup()
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def async_cancel_retry_setup(self) -> None:
|
|
|
|
"""Cancel retry setup."""
|
|
|
|
if self._async_cancel_retry_setup is not None:
|
|
|
|
self._async_cancel_retry_setup()
|
|
|
|
self._async_cancel_retry_setup = None
|
|
|
|
|
2019-07-20 21:35:59 +00:00
|
|
|
async def async_unload(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, hass: HomeAssistant, *, integration: loader.Integration | None = None
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> bool:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Unload an entry.
|
|
|
|
|
|
|
|
Returns if unload is possible and was successful.
|
|
|
|
"""
|
2019-12-20 20:49:07 +00:00
|
|
|
if self.source == SOURCE_IGNORE:
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(hass, ConfigEntryState.NOT_LOADED, None)
|
2019-12-20 20:49:07 +00:00
|
|
|
return True
|
|
|
|
|
2021-06-02 08:00:24 +00:00
|
|
|
if self.state == ConfigEntryState.NOT_LOADED:
|
|
|
|
return True
|
|
|
|
|
2023-09-02 21:46:53 +00:00
|
|
|
if not integration and (integration := self._integration_for_domain) is None:
|
2020-05-25 19:40:06 +00:00
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
except loader.IntegrationNotFound:
|
|
|
|
# The integration was likely a custom_component
|
|
|
|
# that was uninstalled, or an integration
|
|
|
|
# that has been renamed without removing the config
|
|
|
|
# entry.
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(hass, ConfigEntryState.NOT_LOADED, None)
|
2020-05-25 19:40:06 +00:00
|
|
|
return True
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-03-05 14:59:52 +00:00
|
|
|
component = await integration.async_get_component()
|
2019-04-15 02:07:05 +00:00
|
|
|
|
2024-05-08 21:59:37 +00:00
|
|
|
if domain_is_integration := self.domain == integration.domain:
|
2024-05-11 23:20:08 +00:00
|
|
|
if not self.setup_lock.locked():
|
|
|
|
raise OperationNotAllowed(
|
|
|
|
f"The config entry {self.title} ({self.domain}) with entry_id"
|
|
|
|
f" {self.entry_id} cannot be unloaded because it does not hold "
|
|
|
|
"the setup lock"
|
|
|
|
)
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if not self.state.recoverable:
|
2019-03-01 04:27:20 +00:00
|
|
|
return False
|
2018-10-04 13:53:50 +00:00
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if self.state is not ConfigEntryState.LOADED:
|
2021-04-14 02:16:26 +00:00
|
|
|
self.async_cancel_retry_setup()
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(hass, ConfigEntryState.NOT_LOADED, None)
|
2018-10-04 13:53:50 +00:00
|
|
|
return True
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
supports_unload = hasattr(component, "async_unload_entry")
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
if not supports_unload:
|
2024-05-08 21:59:37 +00:00
|
|
|
if domain_is_integration:
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(
|
2022-09-05 03:57:43 +00:00
|
|
|
hass, ConfigEntryState.FAILED_UNLOAD, "Unload not supported"
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
2021-12-27 16:55:17 +00:00
|
|
|
result = await component.async_unload_entry(hass, self)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2018-10-04 13:53:50 +00:00
|
|
|
assert isinstance(result, bool)
|
|
|
|
|
|
|
|
# Only adjust state if we unloaded the component
|
2024-06-11 02:11:07 +00:00
|
|
|
if domain_is_integration and result:
|
2024-05-08 21:59:37 +00:00
|
|
|
await self._async_process_on_unload(hass)
|
2024-06-11 02:11:07 +00:00
|
|
|
if hasattr(self, "runtime_data"):
|
|
|
|
object.__delattr__(self, "runtime_data")
|
|
|
|
|
|
|
|
self._async_set_state(hass, ConfigEntryState.NOT_LOADED, None)
|
|
|
|
|
2024-05-07 12:00:27 +00:00
|
|
|
except Exception as exc:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error unloading entry %s for %s", self.title, integration.domain
|
|
|
|
)
|
2024-05-08 21:59:37 +00:00
|
|
|
if domain_is_integration:
|
2023-07-17 06:58:12 +00:00
|
|
|
self._async_set_state(
|
2023-11-25 07:30:18 +00:00
|
|
|
hass, ConfigEntryState.FAILED_UNLOAD, str(exc) or "Unknown error"
|
2022-09-05 03:57:43 +00:00
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
return False
|
2024-03-30 09:37:59 +00:00
|
|
|
return result
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-03-02 05:13:55 +00:00
|
|
|
async def async_remove(self, hass: HomeAssistant) -> None:
|
|
|
|
"""Invoke remove callback on component."""
|
2024-07-29 20:08:46 +00:00
|
|
|
old_modified_at = self.modified_at
|
|
|
|
object.__setattr__(self, "modified_at", utcnow())
|
2024-10-03 17:51:09 +00:00
|
|
|
self.clear_state_cache()
|
|
|
|
self.clear_storage_cache()
|
2024-07-29 20:08:46 +00:00
|
|
|
|
2019-12-20 20:49:07 +00:00
|
|
|
if self.source == SOURCE_IGNORE:
|
|
|
|
return
|
|
|
|
|
2024-05-11 23:20:08 +00:00
|
|
|
if not self.setup_lock.locked():
|
|
|
|
raise OperationNotAllowed(
|
|
|
|
f"The config entry {self.title} ({self.domain}) with entry_id"
|
|
|
|
f" {self.entry_id} cannot be removed because it does not hold "
|
|
|
|
"the setup lock"
|
|
|
|
)
|
|
|
|
|
2023-09-02 21:46:53 +00:00
|
|
|
if not (integration := self._integration_for_domain):
|
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
except loader.IntegrationNotFound:
|
|
|
|
# The integration was likely a custom_component
|
|
|
|
# that was uninstalled, or an integration
|
|
|
|
# that has been renamed without removing the config
|
|
|
|
# entry.
|
|
|
|
return
|
2020-05-25 19:40:06 +00:00
|
|
|
|
2024-03-05 14:59:52 +00:00
|
|
|
component = await integration.async_get_component()
|
2019-07-31 19:25:30 +00:00
|
|
|
if not hasattr(component, "async_remove_entry"):
|
2019-03-02 05:13:55 +00:00
|
|
|
return
|
|
|
|
try:
|
2021-12-27 16:55:17 +00:00
|
|
|
await component.async_remove_entry(hass, self)
|
2024-05-07 12:00:27 +00:00
|
|
|
except Exception:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error calling entry remove callback %s for %s",
|
|
|
|
self.title,
|
|
|
|
integration.domain,
|
|
|
|
)
|
2024-07-29 20:08:46 +00:00
|
|
|
# Restore modified_at
|
|
|
|
object.__setattr__(self, "modified_at", old_modified_at)
|
2019-03-02 05:13:55 +00:00
|
|
|
|
2022-09-05 03:57:43 +00:00
|
|
|
@callback
|
2023-07-17 06:58:12 +00:00
|
|
|
def _async_set_state(
|
2024-03-28 09:52:21 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
state: ConfigEntryState,
|
|
|
|
reason: str | None,
|
|
|
|
error_reason_translation_key: str | None = None,
|
|
|
|
error_reason_translation_placeholders: dict[str, str] | None = None,
|
2022-09-05 03:57:43 +00:00
|
|
|
) -> None:
|
|
|
|
"""Set the state of the config entry."""
|
2023-09-02 21:46:53 +00:00
|
|
|
if state not in NO_RESET_TRIES_STATES:
|
|
|
|
self._tries = 0
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter = object.__setattr__
|
|
|
|
_setter(self, "state", state)
|
|
|
|
_setter(self, "reason", reason)
|
2024-03-28 09:52:21 +00:00
|
|
|
_setter(self, "error_reason_translation_key", error_reason_translation_key)
|
|
|
|
_setter(
|
|
|
|
self,
|
|
|
|
"error_reason_translation_placeholders",
|
|
|
|
error_reason_translation_placeholders,
|
|
|
|
)
|
2024-10-03 17:51:09 +00:00
|
|
|
self.clear_state_cache()
|
|
|
|
# Storage cache is not cleared here because the state is not stored
|
|
|
|
# in storage and we do not want to clear the cache on every state change
|
|
|
|
# since state changes are frequent.
|
2024-05-05 20:29:43 +00:00
|
|
|
async_dispatcher_send_internal(
|
2022-09-05 03:57:43 +00:00
|
|
|
hass, SIGNAL_CONFIG_ENTRY_CHANGED, ConfigEntryChange.UPDATED, self
|
|
|
|
)
|
|
|
|
|
2019-02-15 17:30:47 +00:00
|
|
|
async def async_migrate(self, hass: HomeAssistant) -> bool:
|
|
|
|
"""Migrate an entry.
|
|
|
|
|
|
|
|
Returns True if config entry is up-to-date or has been migrated.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (handler := HANDLERS.get(self.domain)) is None:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"Flow handler not found for entry %s for %s", self.title, self.domain
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
# Handler may be a partial
|
2022-02-23 19:58:42 +00:00
|
|
|
# Keep for backwards compatibility
|
|
|
|
# https://github.com/home-assistant/core/pull/67087#discussion_r812559950
|
2019-07-07 01:58:33 +00:00
|
|
|
while isinstance(handler, functools.partial):
|
2022-02-23 19:58:42 +00:00
|
|
|
handler = handler.func # type: ignore[unreachable]
|
2019-02-15 17:30:47 +00:00
|
|
|
|
2023-12-12 07:44:35 +00:00
|
|
|
same_major_version = self.version == handler.VERSION
|
|
|
|
if same_major_version and self.minor_version == handler.MINOR_VERSION:
|
2019-02-15 17:30:47 +00:00
|
|
|
return True
|
|
|
|
|
2024-03-15 22:15:36 +00:00
|
|
|
if not (integration := self._integration_for_domain):
|
|
|
|
integration = await loader.async_get_integration(hass, self.domain)
|
|
|
|
component = await integration.async_get_component()
|
|
|
|
supports_migrate = hasattr(component, "async_migrate_entry")
|
|
|
|
if not supports_migrate:
|
2023-12-12 07:44:35 +00:00
|
|
|
if same_major_version:
|
|
|
|
return True
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"Migration handler not found for entry %s for %s",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
2021-12-27 16:55:17 +00:00
|
|
|
result = await component.async_migrate_entry(hass, self)
|
2019-02-15 17:30:47 +00:00
|
|
|
if not isinstance(result, bool):
|
2023-03-31 18:19:58 +00:00
|
|
|
_LOGGER.error( # type: ignore[unreachable]
|
2019-07-31 19:25:30 +00:00
|
|
|
"%s.async_migrate_entry did not return boolean", self.domain
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
|
|
|
if result:
|
2024-05-06 18:33:26 +00:00
|
|
|
hass.config_entries._async_schedule_save() # noqa: SLF001
|
2024-05-07 12:00:27 +00:00
|
|
|
except Exception:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.exception(
|
|
|
|
"Error migrating entry %s for %s", self.title, self.domain
|
|
|
|
)
|
2019-02-15 17:30:47 +00:00
|
|
|
return False
|
2024-03-30 09:37:59 +00:00
|
|
|
return result
|
2019-02-15 17:30:47 +00:00
|
|
|
|
2020-07-22 15:06:37 +00:00
|
|
|
def add_update_listener(self, listener: UpdateListenerType) -> CALLBACK_TYPE:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Listen for when entry is updated.
|
|
|
|
|
|
|
|
Returns function to unlisten.
|
|
|
|
"""
|
2023-07-03 18:56:21 +00:00
|
|
|
self.update_listeners.append(listener)
|
|
|
|
return lambda: self.update_listeners.remove(listener)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
def as_dict(self) -> dict[str, Any]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Return dictionary version of this entry."""
|
|
|
|
return {
|
2024-07-29 20:08:46 +00:00
|
|
|
"created_at": self.created_at.isoformat(),
|
2024-05-13 07:39:18 +00:00
|
|
|
"data": dict(self.data),
|
2024-09-23 19:48:11 +00:00
|
|
|
"discovery_keys": dict(self.discovery_keys),
|
2024-05-13 07:39:18 +00:00
|
|
|
"disabled_by": self.disabled_by,
|
|
|
|
"domain": self.domain,
|
2019-07-31 19:25:30 +00:00
|
|
|
"entry_id": self.entry_id,
|
2023-12-12 07:44:35 +00:00
|
|
|
"minor_version": self.minor_version,
|
2024-07-29 20:08:46 +00:00
|
|
|
"modified_at": self.modified_at.isoformat(),
|
2020-03-09 21:07:50 +00:00
|
|
|
"options": dict(self.options),
|
2021-06-01 20:34:31 +00:00
|
|
|
"pref_disable_new_entities": self.pref_disable_new_entities,
|
|
|
|
"pref_disable_polling": self.pref_disable_polling,
|
2019-07-31 19:25:30 +00:00
|
|
|
"source": self.source,
|
2024-05-13 07:39:18 +00:00
|
|
|
"title": self.title,
|
2019-12-16 18:45:09 +00:00
|
|
|
"unique_id": self.unique_id,
|
2024-05-13 07:39:18 +00:00
|
|
|
"version": self.version,
|
2018-02-16 22:07:38 +00:00
|
|
|
}
|
|
|
|
|
2021-04-09 17:14:33 +00:00
|
|
|
@callback
|
2023-04-17 12:41:25 +00:00
|
|
|
def async_on_unload(
|
|
|
|
self, func: Callable[[], Coroutine[Any, Any, None] | None]
|
|
|
|
) -> None:
|
2021-04-09 17:14:33 +00:00
|
|
|
"""Add a function to call when config entry is unloaded."""
|
|
|
|
if self._on_unload is None:
|
|
|
|
self._on_unload = []
|
|
|
|
self._on_unload.append(func)
|
|
|
|
|
2023-04-17 12:41:25 +00:00
|
|
|
async def _async_process_on_unload(self, hass: HomeAssistant) -> None:
|
2022-06-29 07:38:35 +00:00
|
|
|
"""Process the on_unload callbacks and wait for pending tasks."""
|
2021-04-09 17:14:33 +00:00
|
|
|
if self._on_unload is not None:
|
|
|
|
while self._on_unload:
|
2023-04-17 12:41:25 +00:00
|
|
|
if job := self._on_unload.pop()():
|
2024-03-17 00:43:49 +00:00
|
|
|
self.async_create_task(hass, job, eager_start=True)
|
2021-04-09 17:14:33 +00:00
|
|
|
|
2024-03-09 02:45:10 +00:00
|
|
|
if not self._tasks and not self._background_tasks:
|
2023-02-17 18:50:05 +00:00
|
|
|
return
|
|
|
|
|
2023-07-20 00:22:38 +00:00
|
|
|
cancel_message = f"Config entry {self.title} with {self.domain} unloading"
|
2024-03-09 02:45:10 +00:00
|
|
|
for task in self._background_tasks:
|
2023-07-20 00:22:38 +00:00
|
|
|
task.cancel(cancel_message)
|
2023-02-17 18:50:05 +00:00
|
|
|
|
|
|
|
_, pending = await asyncio.wait(
|
2024-03-09 02:45:10 +00:00
|
|
|
[*self._tasks, *self._background_tasks], timeout=10
|
2023-02-17 18:50:05 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
for task in pending:
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Unloading %s (%s) config entry. Task %s did not complete in time",
|
|
|
|
self.title,
|
|
|
|
self.domain,
|
|
|
|
task,
|
|
|
|
)
|
2022-06-29 07:38:35 +00:00
|
|
|
|
2021-04-10 05:41:29 +00:00
|
|
|
@callback
|
2022-08-02 16:20:37 +00:00
|
|
|
def async_start_reauth(
|
2022-08-02 20:38:38 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
2024-10-08 10:18:45 +00:00
|
|
|
context: ConfigFlowContext | None = None,
|
2022-08-02 20:38:38 +00:00
|
|
|
data: dict[str, Any] | None = None,
|
2022-08-02 16:20:37 +00:00
|
|
|
) -> None:
|
2021-04-10 05:41:29 +00:00
|
|
|
"""Start a reauth flow."""
|
2023-10-31 17:38:05 +00:00
|
|
|
# We will check this again in the task when we hold the lock,
|
|
|
|
# but we also check it now to try to avoid creating the task.
|
2024-03-01 11:29:35 +00:00
|
|
|
if any(self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH})):
|
|
|
|
# Reauth or Reconfigure flow already in progress for this entry
|
2022-11-08 03:19:57 +00:00
|
|
|
return
|
2021-04-10 05:41:29 +00:00
|
|
|
hass.async_create_task(
|
2023-10-31 17:38:05 +00:00
|
|
|
self._async_init_reauth(hass, context, data),
|
|
|
|
f"config entry reauth {self.title} {self.domain} {self.entry_id}",
|
2024-04-13 23:38:33 +00:00
|
|
|
eager_start=True,
|
2023-10-31 17:38:05 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
async def _async_init_reauth(
|
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
2024-10-08 10:18:45 +00:00
|
|
|
context: ConfigFlowContext | None = None,
|
2023-10-31 17:38:05 +00:00
|
|
|
data: dict[str, Any] | None = None,
|
|
|
|
) -> None:
|
|
|
|
"""Start a reauth flow."""
|
|
|
|
async with self._reauth_lock:
|
2024-03-01 11:29:35 +00:00
|
|
|
if any(
|
|
|
|
self.async_get_active_flows(hass, {SOURCE_RECONFIGURE, SOURCE_REAUTH})
|
|
|
|
):
|
|
|
|
# Reauth or Reconfigure flow already in progress for this entry
|
2023-10-31 17:38:05 +00:00
|
|
|
return
|
2024-01-31 14:05:52 +00:00
|
|
|
result = await hass.config_entries.flow.async_init(
|
2021-04-10 05:41:29 +00:00
|
|
|
self.domain,
|
2024-10-08 10:18:45 +00:00
|
|
|
context=ConfigFlowContext(
|
|
|
|
source=SOURCE_REAUTH,
|
|
|
|
entry_id=self.entry_id,
|
|
|
|
title_placeholders={"name": self.title},
|
|
|
|
unique_id=self.unique_id,
|
|
|
|
)
|
2022-11-08 03:19:57 +00:00
|
|
|
| (context or {}),
|
2022-08-02 20:38:38 +00:00
|
|
|
data=self.data | (data or {}),
|
2023-10-31 17:38:05 +00:00
|
|
|
)
|
2024-01-31 14:05:52 +00:00
|
|
|
if result["type"] not in FLOW_NOT_COMPLETE_STEPS:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Create an issue, there's no need to hold the lock when doing that
|
|
|
|
issue_id = f"config_entry_reauth_{self.domain}_{self.entry_id}"
|
|
|
|
ir.async_create_issue(
|
|
|
|
hass,
|
2024-07-29 11:35:36 +00:00
|
|
|
HOMEASSISTANT_DOMAIN,
|
2024-01-31 14:05:52 +00:00
|
|
|
issue_id,
|
|
|
|
data={"flow_id": result["flow_id"]},
|
|
|
|
is_fixable=False,
|
|
|
|
issue_domain=self.domain,
|
|
|
|
severity=ir.IssueSeverity.ERROR,
|
|
|
|
translation_key="config_entry_reauth",
|
2024-02-26 13:19:37 +00:00
|
|
|
translation_placeholders={"name": self.title},
|
2024-01-31 14:05:52 +00:00
|
|
|
)
|
2021-04-10 05:41:29 +00:00
|
|
|
|
2022-11-09 22:36:50 +00:00
|
|
|
@callback
|
|
|
|
def async_get_active_flows(
|
|
|
|
self, hass: HomeAssistant, sources: set[str]
|
2024-06-06 15:02:13 +00:00
|
|
|
) -> Generator[ConfigFlowResult]:
|
2022-11-09 22:36:50 +00:00
|
|
|
"""Get any active flows of certain sources for this entry."""
|
|
|
|
return (
|
|
|
|
flow
|
2023-06-11 08:41:38 +00:00
|
|
|
for flow in hass.config_entries.flow.async_progress_by_handler(
|
2023-10-31 17:38:05 +00:00
|
|
|
self.domain,
|
|
|
|
match_context={"entry_id": self.entry_id},
|
|
|
|
include_uninitialized=True,
|
2023-06-11 08:41:38 +00:00
|
|
|
)
|
2022-11-09 22:36:50 +00:00
|
|
|
if flow["context"].get("source") in sources
|
|
|
|
)
|
|
|
|
|
2022-06-29 07:38:35 +00:00
|
|
|
@callback
|
2024-05-18 09:43:32 +00:00
|
|
|
def async_create_task[_R](
|
2023-03-05 11:46:02 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
target: Coroutine[Any, Any, _R],
|
|
|
|
name: str | None = None,
|
2024-04-06 21:22:02 +00:00
|
|
|
eager_start: bool = True,
|
2022-06-29 07:38:35 +00:00
|
|
|
) -> asyncio.Task[_R]:
|
2023-03-19 20:53:21 +00:00
|
|
|
"""Create a task from within the event loop.
|
2022-06-29 07:38:35 +00:00
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
|
|
|
|
target: target to call.
|
|
|
|
"""
|
2024-04-28 22:29:00 +00:00
|
|
|
task = hass.async_create_task_internal(
|
2024-02-26 16:36:46 +00:00
|
|
|
target, f"{name} {self.title} {self.domain} {self.entry_id}", eager_start
|
2023-03-05 11:46:02 +00:00
|
|
|
)
|
2024-04-03 09:49:32 +00:00
|
|
|
if eager_start and task.done():
|
2024-02-28 05:09:34 +00:00
|
|
|
return task
|
2023-02-17 18:50:05 +00:00
|
|
|
self._tasks.add(task)
|
|
|
|
task.add_done_callback(self._tasks.remove)
|
|
|
|
|
|
|
|
return task
|
|
|
|
|
|
|
|
@callback
|
2024-05-18 09:43:32 +00:00
|
|
|
def async_create_background_task[_R](
|
2024-02-26 16:36:46 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
target: Coroutine[Any, Any, _R],
|
|
|
|
name: str,
|
2024-04-06 20:59:24 +00:00
|
|
|
eager_start: bool = True,
|
2023-02-17 18:50:05 +00:00
|
|
|
) -> asyncio.Task[_R]:
|
|
|
|
"""Create a background task tied to the config entry lifecycle.
|
2022-06-29 07:38:35 +00:00
|
|
|
|
2023-02-17 18:50:05 +00:00
|
|
|
Background tasks are automatically canceled when config entry is unloaded.
|
2022-06-29 07:38:35 +00:00
|
|
|
|
2024-03-08 04:32:26 +00:00
|
|
|
A background task is different from a normal task:
|
|
|
|
|
|
|
|
- Will not block startup
|
|
|
|
- Will be automatically cancelled on shutdown
|
|
|
|
- Calls to async_block_till_done will not wait for completion
|
|
|
|
|
|
|
|
This method must be run in the event loop.
|
2023-02-17 18:50:05 +00:00
|
|
|
"""
|
2024-02-26 16:36:46 +00:00
|
|
|
task = hass.async_create_background_task(target, name, eager_start)
|
2024-02-28 05:09:34 +00:00
|
|
|
if task.done():
|
|
|
|
return task
|
2023-02-17 18:50:05 +00:00
|
|
|
self._background_tasks.add(task)
|
|
|
|
task.add_done_callback(self._background_tasks.remove)
|
2022-06-29 07:38:35 +00:00
|
|
|
return task
|
|
|
|
|
2021-04-09 17:14:33 +00:00
|
|
|
|
|
|
|
current_entry: ContextVar[ConfigEntry | None] = ContextVar(
|
|
|
|
"current_entry", default=None
|
|
|
|
)
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-02-21 02:57:36 +00:00
|
|
|
class FlowCancelledError(Exception):
|
|
|
|
"""Error to indicate that a flow has been cancelled."""
|
|
|
|
|
|
|
|
|
2024-06-13 01:06:11 +00:00
|
|
|
def _report_non_awaited_platform_forwards(entry: ConfigEntry, what: str) -> None:
|
|
|
|
"""Report non awaited platform forwards."""
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
report(
|
2024-06-13 01:06:11 +00:00
|
|
|
f"calls {what} for integration {entry.domain} with "
|
|
|
|
f"title: {entry.title} and entry_id: {entry.entry_id}, "
|
|
|
|
f"during setup without awaiting {what}, which can cause "
|
|
|
|
"the setup lock to be released before the setup is done. "
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
"This will stop working in Home Assistant 2025.1",
|
|
|
|
error_if_integration=False,
|
|
|
|
error_if_core=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-10-08 10:18:45 +00:00
|
|
|
class ConfigEntriesFlowManager(
|
|
|
|
data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult]
|
|
|
|
):
|
2020-01-03 10:52:01 +00:00
|
|
|
"""Manage all the config entry flows that are in progress."""
|
|
|
|
|
2024-02-29 15:52:39 +00:00
|
|
|
_flow_result = ConfigFlowResult
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
def __init__(
|
2021-08-16 21:12:06 +00:00
|
|
|
self,
|
|
|
|
hass: HomeAssistant,
|
|
|
|
config_entries: ConfigEntries,
|
|
|
|
hass_config: ConfigType,
|
2021-05-20 15:53:29 +00:00
|
|
|
) -> None:
|
2020-01-03 10:52:01 +00:00
|
|
|
"""Initialize the config entry flow manager."""
|
|
|
|
super().__init__(hass)
|
|
|
|
self.config_entries = config_entries
|
|
|
|
self._hass_config = hass_config
|
2024-08-16 21:48:03 +00:00
|
|
|
self._pending_import_flows: defaultdict[
|
|
|
|
str, dict[str, asyncio.Future[None]]
|
|
|
|
] = defaultdict(dict)
|
|
|
|
self._initialize_futures: defaultdict[str, set[asyncio.Future[None]]] = (
|
|
|
|
defaultdict(set)
|
|
|
|
)
|
2024-02-21 06:09:45 +00:00
|
|
|
self._discovery_debouncer = Debouncer[None](
|
2023-06-16 02:15:07 +00:00
|
|
|
hass,
|
|
|
|
_LOGGER,
|
|
|
|
cooldown=DISCOVERY_COOLDOWN,
|
|
|
|
immediate=True,
|
|
|
|
function=self._async_discovery,
|
2024-04-28 13:13:37 +00:00
|
|
|
background=True,
|
2023-06-16 02:15:07 +00:00
|
|
|
)
|
2023-01-17 14:26:17 +00:00
|
|
|
|
2023-01-18 09:44:18 +00:00
|
|
|
async def async_wait_import_flow_initialized(self, handler: str) -> None:
|
|
|
|
"""Wait till all import flows in progress are initialized."""
|
|
|
|
if not (current := self._pending_import_flows.get(handler)):
|
2023-01-17 14:26:17 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
await asyncio.wait(current.values())
|
2020-01-03 10:52:01 +00:00
|
|
|
|
2021-10-22 17:19:49 +00:00
|
|
|
@callback
|
|
|
|
def _async_has_other_discovery_flows(self, flow_id: str) -> bool:
|
|
|
|
"""Check if there are any other discovery flows in progress."""
|
2024-08-03 22:33:46 +00:00
|
|
|
for flow in self._progress.values():
|
|
|
|
if flow.flow_id != flow_id and flow.context["source"] in DISCOVERY_SOURCES:
|
|
|
|
return True
|
|
|
|
return False
|
2021-10-22 17:19:49 +00:00
|
|
|
|
2023-01-17 14:26:17 +00:00
|
|
|
async def async_init(
|
2024-10-08 10:18:45 +00:00
|
|
|
self,
|
|
|
|
handler: str,
|
|
|
|
*,
|
|
|
|
context: ConfigFlowContext | None = None,
|
|
|
|
data: Any = None,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2023-01-17 14:26:17 +00:00
|
|
|
"""Start a configuration flow."""
|
2023-01-18 09:44:18 +00:00
|
|
|
if not context or "source" not in context:
|
|
|
|
raise KeyError("Context not set or doesn't have a source set")
|
2023-01-17 14:26:17 +00:00
|
|
|
|
2024-06-03 08:34:09 +00:00
|
|
|
flow_id = ulid_util.ulid_now()
|
2024-02-27 17:28:19 +00:00
|
|
|
|
2024-02-26 18:00:33 +00:00
|
|
|
# Avoid starting a config flow on an integration that only supports
|
|
|
|
# a single config entry, but which already has an entry
|
|
|
|
if (
|
2024-05-23 06:41:12 +00:00
|
|
|
context.get("source")
|
2024-09-25 18:29:14 +00:00
|
|
|
not in {SOURCE_IGNORE, SOURCE_REAUTH, SOURCE_RECONFIGURE}
|
2024-05-11 07:47:17 +00:00
|
|
|
and self.config_entries.async_has_entries(handler, include_ignore=False)
|
2024-02-26 18:00:33 +00:00
|
|
|
and await _support_single_config_entry_only(self.hass, handler)
|
|
|
|
):
|
2024-02-29 15:52:39 +00:00
|
|
|
return ConfigFlowResult(
|
2024-02-27 17:28:19 +00:00
|
|
|
type=data_entry_flow.FlowResultType.ABORT,
|
|
|
|
flow_id=flow_id,
|
|
|
|
handler=handler,
|
|
|
|
reason="single_instance_allowed",
|
2024-07-29 11:35:36 +00:00
|
|
|
translation_domain=HOMEASSISTANT_DOMAIN,
|
2024-02-26 18:00:33 +00:00
|
|
|
)
|
|
|
|
|
2024-02-21 02:57:36 +00:00
|
|
|
loop = self.hass.loop
|
2023-01-17 14:26:17 +00:00
|
|
|
|
2024-02-21 02:57:36 +00:00
|
|
|
if context["source"] == SOURCE_IMPORT:
|
2024-08-16 21:48:03 +00:00
|
|
|
self._pending_import_flows[handler][flow_id] = loop.create_future()
|
2023-01-17 14:26:17 +00:00
|
|
|
|
2024-02-21 02:57:36 +00:00
|
|
|
cancel_init_future = loop.create_future()
|
2024-08-16 21:48:03 +00:00
|
|
|
handler_init_futures = self._initialize_futures[handler]
|
|
|
|
handler_init_futures.add(cancel_init_future)
|
2023-01-17 14:26:17 +00:00
|
|
|
try:
|
2024-02-21 02:57:36 +00:00
|
|
|
async with interrupt(
|
|
|
|
cancel_init_future,
|
|
|
|
FlowCancelledError,
|
|
|
|
"Config entry initialize canceled: Home Assistant is shutting down",
|
|
|
|
):
|
|
|
|
flow, result = await self._async_init(flow_id, handler, context, data)
|
|
|
|
except FlowCancelledError as ex:
|
|
|
|
raise asyncio.CancelledError from ex
|
2023-01-17 14:26:17 +00:00
|
|
|
finally:
|
2024-08-16 21:48:03 +00:00
|
|
|
handler_init_futures.remove(cancel_init_future)
|
|
|
|
if not handler_init_futures:
|
|
|
|
del self._initialize_futures[handler]
|
|
|
|
if handler in self._pending_import_flows:
|
|
|
|
self._pending_import_flows[handler].pop(flow_id, None)
|
|
|
|
if not self._pending_import_flows[handler]:
|
|
|
|
del self._pending_import_flows[handler]
|
2023-01-17 14:26:17 +00:00
|
|
|
|
|
|
|
if result["type"] != data_entry_flow.FlowResultType.ABORT:
|
|
|
|
await self.async_post_init(flow, result)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def _async_init(
|
|
|
|
self,
|
|
|
|
flow_id: str,
|
|
|
|
handler: str,
|
2024-10-08 10:18:45 +00:00
|
|
|
context: ConfigFlowContext,
|
2023-01-17 14:26:17 +00:00
|
|
|
data: Any,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> tuple[ConfigFlow, ConfigFlowResult]:
|
2023-01-17 14:26:17 +00:00
|
|
|
"""Run the init in a task to allow it to be canceled at shutdown."""
|
|
|
|
flow = await self.async_create_flow(handler, context=context, data=data)
|
|
|
|
if not flow:
|
|
|
|
raise data_entry_flow.UnknownFlow("Flow was not created")
|
|
|
|
flow.hass = self.hass
|
|
|
|
flow.handler = handler
|
|
|
|
flow.flow_id = flow_id
|
|
|
|
flow.context = context
|
|
|
|
flow.init_data = data
|
|
|
|
self._async_add_flow_progress(flow)
|
|
|
|
try:
|
|
|
|
result = await self._async_handle_step(flow, flow.init_step, data)
|
|
|
|
finally:
|
2024-08-16 21:48:03 +00:00
|
|
|
self._set_pending_import_done(flow)
|
2023-01-17 14:26:17 +00:00
|
|
|
return flow, result
|
|
|
|
|
2024-08-16 21:48:03 +00:00
|
|
|
def _set_pending_import_done(self, flow: ConfigFlow) -> None:
|
|
|
|
"""Set pending import flow as done."""
|
|
|
|
if (
|
|
|
|
(handler_import_flows := self._pending_import_flows.get(flow.handler))
|
|
|
|
and (init_done := handler_import_flows.get(flow.flow_id))
|
|
|
|
and not init_done.done()
|
|
|
|
):
|
|
|
|
init_done.set_result(None)
|
|
|
|
|
2024-02-24 07:37:33 +00:00
|
|
|
@callback
|
|
|
|
def async_shutdown(self) -> None:
|
2023-01-17 14:26:17 +00:00
|
|
|
"""Cancel any initializing flows."""
|
2024-02-21 02:57:36 +00:00
|
|
|
for future_list in self._initialize_futures.values():
|
|
|
|
for future in future_list:
|
|
|
|
future.set_result(None)
|
2024-02-24 07:37:33 +00:00
|
|
|
self._discovery_debouncer.async_shutdown()
|
2023-01-17 14:26:17 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_finish_flow(
|
2024-03-05 21:52:11 +00:00
|
|
|
self,
|
2024-10-08 10:18:45 +00:00
|
|
|
flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult],
|
2024-03-05 21:52:11 +00:00
|
|
|
result: ConfigFlowResult,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2024-09-18 16:19:13 +00:00
|
|
|
"""Finish a config flow and add an entry.
|
|
|
|
|
|
|
|
This method is called when a flow step returns FlowResultType.ABORT or
|
|
|
|
FlowResultType.CREATE_ENTRY.
|
|
|
|
"""
|
2020-01-03 10:52:01 +00:00
|
|
|
flow = cast(ConfigFlow, flow)
|
|
|
|
|
2023-01-17 14:26:17 +00:00
|
|
|
# Mark the step as done.
|
|
|
|
# We do this to avoid a circular dependency where async_finish_flow sets up a
|
|
|
|
# new entry, which needs the integration to be set up, which is waiting for
|
|
|
|
# init to be done.
|
2024-08-16 21:48:03 +00:00
|
|
|
self._set_pending_import_done(flow)
|
2023-01-17 14:26:17 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
# Remove notification if no other discovery config entries in progress
|
2021-10-22 17:19:49 +00:00
|
|
|
if not self._async_has_other_discovery_flows(flow.flow_id):
|
2022-01-11 16:24:59 +00:00
|
|
|
persistent_notification.async_dismiss(self.hass, DISCOVERY_NOTIFICATION_ID)
|
2020-01-03 10:52:01 +00:00
|
|
|
|
2024-01-31 14:05:52 +00:00
|
|
|
# Clean up issue if this is a reauth flow
|
|
|
|
if flow.context["source"] == SOURCE_REAUTH:
|
|
|
|
if (entry_id := flow.context.get("entry_id")) is not None and (
|
|
|
|
entry := self.config_entries.async_get_entry(entry_id)
|
|
|
|
) is not None:
|
|
|
|
issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}"
|
2024-07-29 11:35:36 +00:00
|
|
|
ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id)
|
2024-01-31 14:05:52 +00:00
|
|
|
|
2022-06-08 05:02:44 +00:00
|
|
|
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
2024-09-23 14:49:21 +00:00
|
|
|
# If there's an ignored config entry with a matching unique ID,
|
|
|
|
# update the discovery key.
|
|
|
|
if (
|
|
|
|
(discovery_key := flow.context.get("discovery_key"))
|
|
|
|
and (unique_id := flow.unique_id) is not None
|
|
|
|
and (
|
|
|
|
entry := self.config_entries.async_entry_for_domain_unique_id(
|
|
|
|
result["handler"], unique_id
|
|
|
|
)
|
|
|
|
)
|
2024-09-23 19:48:11 +00:00
|
|
|
and discovery_key
|
|
|
|
not in (
|
|
|
|
known_discovery_keys := entry.discovery_keys.get(
|
|
|
|
discovery_key.domain, ()
|
|
|
|
)
|
|
|
|
)
|
2024-09-23 14:49:21 +00:00
|
|
|
):
|
2024-09-23 19:48:11 +00:00
|
|
|
new_discovery_keys = MappingProxyType(
|
|
|
|
entry.discovery_keys
|
|
|
|
| {
|
|
|
|
discovery_key.domain: tuple(
|
|
|
|
[*known_discovery_keys, discovery_key][-10:]
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2024-09-23 14:49:21 +00:00
|
|
|
_LOGGER.debug(
|
|
|
|
"Updating discovery keys for %s entry %s %s -> %s",
|
|
|
|
entry.domain,
|
|
|
|
unique_id,
|
2024-09-23 19:48:11 +00:00
|
|
|
entry.discovery_keys,
|
2024-09-23 14:49:21 +00:00
|
|
|
new_discovery_keys,
|
|
|
|
)
|
|
|
|
self.config_entries.async_update_entry(
|
|
|
|
entry, discovery_keys=new_discovery_keys
|
|
|
|
)
|
2020-01-03 10:52:01 +00:00
|
|
|
return result
|
|
|
|
|
2024-02-27 17:28:19 +00:00
|
|
|
# Avoid adding a config entry for a integration
|
|
|
|
# that only supports a single config entry, but already has an entry
|
|
|
|
if (
|
2024-05-11 07:47:17 +00:00
|
|
|
self.config_entries.async_has_entries(flow.handler, include_ignore=False)
|
|
|
|
and await _support_single_config_entry_only(self.hass, flow.handler)
|
2024-02-27 17:28:19 +00:00
|
|
|
and flow.context["source"] != SOURCE_IGNORE
|
|
|
|
):
|
2024-02-29 15:52:39 +00:00
|
|
|
return ConfigFlowResult(
|
2024-02-27 17:28:19 +00:00
|
|
|
type=data_entry_flow.FlowResultType.ABORT,
|
|
|
|
flow_id=flow.flow_id,
|
|
|
|
handler=flow.handler,
|
|
|
|
reason="single_instance_allowed",
|
2024-07-29 11:35:36 +00:00
|
|
|
translation_domain=HOMEASSISTANT_DOMAIN,
|
2024-02-27 17:28:19 +00:00
|
|
|
)
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
# Check if config entry exists with unique ID. Unload it.
|
|
|
|
existing_entry = None
|
|
|
|
|
2021-05-26 14:29:52 +00:00
|
|
|
# Abort all flows in progress with same unique ID
|
|
|
|
# or the default discovery ID
|
2021-10-22 17:19:49 +00:00
|
|
|
for progress_flow in self.async_progress_by_handler(flow.handler):
|
2021-05-26 14:29:52 +00:00
|
|
|
progress_unique_id = progress_flow["context"].get("unique_id")
|
2024-02-26 18:00:33 +00:00
|
|
|
progress_flow_id = progress_flow["flow_id"]
|
|
|
|
|
|
|
|
if progress_flow_id != flow.flow_id and (
|
2021-10-22 17:19:49 +00:00
|
|
|
(flow.unique_id and progress_unique_id == flow.unique_id)
|
|
|
|
or progress_unique_id == DEFAULT_DISCOVERY_UNIQUE_ID
|
2021-05-26 14:29:52 +00:00
|
|
|
):
|
2024-02-26 18:00:33 +00:00
|
|
|
self.async_abort(progress_flow_id)
|
|
|
|
|
|
|
|
# Abort any flows in progress for the same handler
|
|
|
|
# when integration allows only one config entry
|
|
|
|
if (
|
|
|
|
progress_flow_id != flow.flow_id
|
|
|
|
and await _support_single_config_entry_only(self.hass, flow.handler)
|
|
|
|
):
|
|
|
|
self.async_abort(progress_flow_id)
|
2020-01-03 10:52:01 +00:00
|
|
|
|
2021-05-26 14:29:52 +00:00
|
|
|
if flow.unique_id is not None:
|
2020-06-15 11:38:38 +00:00
|
|
|
# Reset unique ID when the default discovery ID has been used
|
|
|
|
if flow.unique_id == DEFAULT_DISCOVERY_UNIQUE_ID:
|
|
|
|
await flow.async_set_unique_id(None)
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
# Find existing entry.
|
2024-05-11 07:47:17 +00:00
|
|
|
existing_entry = self.config_entries.async_entry_for_domain_unique_id(
|
|
|
|
result["handler"], flow.unique_id
|
|
|
|
)
|
2020-01-03 10:52:01 +00:00
|
|
|
|
|
|
|
# Unload the entry before setting up the new one.
|
|
|
|
# We will remove it only after the other one is set up,
|
|
|
|
# so that device customizations are not getting lost.
|
2021-05-20 17:19:20 +00:00
|
|
|
if existing_entry is not None and existing_entry.state.recoverable:
|
2020-01-03 10:52:01 +00:00
|
|
|
await self.config_entries.async_unload(existing_entry.entry_id)
|
|
|
|
|
2024-09-23 14:49:21 +00:00
|
|
|
discovery_key = flow.context.get("discovery_key")
|
2024-09-23 19:48:11 +00:00
|
|
|
discovery_keys = (
|
|
|
|
MappingProxyType({discovery_key.domain: (discovery_key,)})
|
|
|
|
if discovery_key
|
|
|
|
else MappingProxyType({})
|
|
|
|
)
|
2020-01-03 10:52:01 +00:00
|
|
|
entry = ConfigEntry(
|
|
|
|
data=result["data"],
|
2024-09-23 14:49:21 +00:00
|
|
|
discovery_keys=discovery_keys,
|
2024-05-13 07:39:18 +00:00
|
|
|
domain=result["handler"],
|
|
|
|
minor_version=result["minor_version"],
|
2021-05-06 05:14:01 +00:00
|
|
|
options=result["options"],
|
2020-01-03 10:52:01 +00:00
|
|
|
source=flow.context["source"],
|
2024-05-13 07:39:18 +00:00
|
|
|
title=result["title"],
|
2020-01-03 10:52:01 +00:00
|
|
|
unique_id=flow.unique_id,
|
2024-05-13 07:39:18 +00:00
|
|
|
version=result["version"],
|
2020-01-03 10:52:01 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
await self.config_entries.async_add(entry)
|
|
|
|
|
|
|
|
if existing_entry is not None:
|
|
|
|
await self.config_entries.async_remove(existing_entry.entry_id)
|
|
|
|
|
|
|
|
result["result"] = entry
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def async_create_flow(
|
2024-10-08 10:18:45 +00:00
|
|
|
self,
|
|
|
|
handler_key: str,
|
|
|
|
*,
|
|
|
|
context: ConfigFlowContext | None = None,
|
|
|
|
data: Any = None,
|
2021-02-12 09:58:20 +00:00
|
|
|
) -> ConfigFlow:
|
2020-01-03 10:52:01 +00:00
|
|
|
"""Create a flow for specified handler.
|
|
|
|
|
|
|
|
Handler key is the domain of the component that we want to set up.
|
|
|
|
"""
|
2023-08-07 06:25:03 +00:00
|
|
|
handler = await _async_get_flow_handler(
|
|
|
|
self.hass, handler_key, self._hass_config
|
|
|
|
)
|
2020-01-03 10:52:01 +00:00
|
|
|
if not context or "source" not in context:
|
|
|
|
raise KeyError("Context not set or doesn't have a source set")
|
|
|
|
|
2022-02-23 19:58:42 +00:00
|
|
|
flow = handler()
|
2020-01-03 16:28:05 +00:00
|
|
|
flow.init_step = context["source"]
|
|
|
|
return flow
|
|
|
|
|
|
|
|
async def async_post_init(
|
2024-03-05 21:52:11 +00:00
|
|
|
self,
|
2024-10-08 10:18:45 +00:00
|
|
|
flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult],
|
2024-03-05 21:52:11 +00:00
|
|
|
result: ConfigFlowResult,
|
2020-01-03 16:28:05 +00:00
|
|
|
) -> None:
|
|
|
|
"""After a flow is initialised trigger new flow notifications."""
|
|
|
|
source = flow.context["source"]
|
2020-01-03 10:52:01 +00:00
|
|
|
|
|
|
|
# Create notification.
|
|
|
|
if source in DISCOVERY_SOURCES:
|
2023-06-16 02:15:07 +00:00
|
|
|
await self._discovery_debouncer.async_call()
|
2020-10-15 20:46:27 +00:00
|
|
|
elif source == SOURCE_REAUTH:
|
2022-01-11 16:24:59 +00:00
|
|
|
persistent_notification.async_create(
|
|
|
|
self.hass,
|
2020-10-15 20:46:27 +00:00
|
|
|
title="Integration requires reconfiguration",
|
|
|
|
message=(
|
|
|
|
"At least one of your integrations requires reconfiguration to "
|
2020-10-21 17:09:45 +00:00
|
|
|
"continue functioning. [Check it out](/config/integrations)."
|
2020-10-15 20:46:27 +00:00
|
|
|
),
|
|
|
|
notification_id=RECONFIGURE_NOTIFICATION_ID,
|
|
|
|
)
|
2020-01-03 10:52:01 +00:00
|
|
|
|
2023-06-16 02:15:07 +00:00
|
|
|
@callback
|
|
|
|
def _async_discovery(self) -> None:
|
|
|
|
"""Handle discovery."""
|
2024-04-24 09:26:48 +00:00
|
|
|
# async_fire_internal is used here because this is only
|
|
|
|
# called from the Debouncer so we know the usage is safe
|
|
|
|
self.hass.bus.async_fire_internal(EVENT_FLOW_DISCOVERED)
|
2023-06-16 02:15:07 +00:00
|
|
|
persistent_notification.async_create(
|
|
|
|
self.hass,
|
|
|
|
title="New devices discovered",
|
|
|
|
message=(
|
|
|
|
"We have discovered new devices on your network. "
|
|
|
|
"[Check it out](/config/integrations)."
|
|
|
|
),
|
|
|
|
notification_id=DISCOVERY_NOTIFICATION_ID,
|
|
|
|
)
|
|
|
|
|
2024-09-27 08:51:36 +00:00
|
|
|
@callback
|
|
|
|
def async_has_matching_discovery_flow(
|
2024-10-08 10:18:45 +00:00
|
|
|
self, handler: str, match_context: ConfigFlowContext, data: Any
|
2024-09-27 08:51:36 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Check if an existing matching discovery flow is in progress.
|
|
|
|
|
|
|
|
A flow with the same handler, context, and data.
|
|
|
|
|
|
|
|
If match_context is passed, only return flows with a context that is a
|
|
|
|
superset of match_context.
|
|
|
|
"""
|
|
|
|
if not (flows := self._handler_progress_index.get(handler)):
|
|
|
|
return False
|
|
|
|
match_items = match_context.items()
|
|
|
|
for progress in flows:
|
|
|
|
if match_items <= progress.context.items() and progress.init_data == data:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
@callback
|
|
|
|
def async_has_matching_flow(self, flow: ConfigFlow) -> bool:
|
|
|
|
"""Check if an existing matching flow is in progress."""
|
|
|
|
if not (flows := self._handler_progress_index.get(flow.handler)):
|
|
|
|
return False
|
2024-09-27 11:41:55 +00:00
|
|
|
for other_flow in set(flows):
|
2024-09-27 08:51:36 +00:00
|
|
|
if other_flow is not flow and flow.is_matching(other_flow): # type: ignore[arg-type]
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
class ConfigEntryItems(UserDict[str, ConfigEntry]):
|
|
|
|
"""Container for config items, maps config_entry_id -> entry.
|
|
|
|
|
|
|
|
Maintains two additional indexes:
|
|
|
|
- domain -> list[ConfigEntry]
|
|
|
|
- domain -> unique_id -> ConfigEntry
|
|
|
|
"""
|
|
|
|
|
2024-02-08 14:39:01 +00:00
|
|
|
def __init__(self, hass: HomeAssistant) -> None:
|
2024-01-13 20:34:15 +00:00
|
|
|
"""Initialize the container."""
|
|
|
|
super().__init__()
|
2024-02-08 14:39:01 +00:00
|
|
|
self._hass = hass
|
2024-01-13 20:34:15 +00:00
|
|
|
self._domain_index: dict[str, list[ConfigEntry]] = {}
|
2024-10-03 20:27:15 +00:00
|
|
|
self._domain_unique_id_index: dict[str, dict[str, list[ConfigEntry]]] = {}
|
2024-01-13 20:34:15 +00:00
|
|
|
|
|
|
|
def values(self) -> ValuesView[ConfigEntry]:
|
|
|
|
"""Return the underlying values to avoid __iter__ overhead."""
|
|
|
|
return self.data.values()
|
|
|
|
|
|
|
|
def __setitem__(self, entry_id: str, entry: ConfigEntry) -> None:
|
|
|
|
"""Add an item."""
|
|
|
|
data = self.data
|
|
|
|
if entry_id in data:
|
|
|
|
# This is likely a bug in a test that is adding the same entry twice.
|
|
|
|
# In the future, once we have fixed the tests, this will raise HomeAssistantError.
|
|
|
|
_LOGGER.error("An entry with the id %s already exists", entry_id)
|
|
|
|
self._unindex_entry(entry_id)
|
|
|
|
data[entry_id] = entry
|
2024-02-09 13:51:02 +00:00
|
|
|
self._index_entry(entry)
|
|
|
|
|
|
|
|
def _index_entry(self, entry: ConfigEntry) -> None:
|
|
|
|
"""Index an entry."""
|
2024-01-13 20:34:15 +00:00
|
|
|
self._domain_index.setdefault(entry.domain, []).append(entry)
|
|
|
|
if entry.unique_id is not None:
|
2024-02-08 14:39:01 +00:00
|
|
|
unique_id_hash = entry.unique_id
|
2024-09-10 13:30:03 +00:00
|
|
|
if not isinstance(entry.unique_id, str):
|
|
|
|
# Guard against integrations using unhashable unique_id
|
|
|
|
# In HA Core 2024.9, we should remove the guard and instead fail
|
|
|
|
if not isinstance(entry.unique_id, Hashable): # type: ignore[unreachable]
|
|
|
|
unique_id_hash = str(entry.unique_id)
|
|
|
|
# Checks for other non-string was added in HA Core 2024.10
|
|
|
|
# In HA Core 2025.10, we should remove the error and instead fail
|
2024-02-08 14:39:01 +00:00
|
|
|
report_issue = async_suggest_report_issue(
|
|
|
|
self._hass, integration_domain=entry.domain
|
|
|
|
)
|
|
|
|
_LOGGER.error(
|
|
|
|
(
|
|
|
|
"Config entry '%s' from integration %s has an invalid unique_id"
|
|
|
|
" '%s', please %s"
|
|
|
|
),
|
|
|
|
entry.title,
|
|
|
|
entry.domain,
|
|
|
|
entry.unique_id,
|
|
|
|
report_issue,
|
|
|
|
)
|
|
|
|
|
2024-10-03 20:27:15 +00:00
|
|
|
self._domain_unique_id_index.setdefault(entry.domain, {}).setdefault(
|
|
|
|
unique_id_hash, []
|
|
|
|
).append(entry)
|
2024-01-13 20:34:15 +00:00
|
|
|
|
|
|
|
def _unindex_entry(self, entry_id: str) -> None:
|
|
|
|
"""Unindex an entry."""
|
|
|
|
entry = self.data[entry_id]
|
|
|
|
domain = entry.domain
|
|
|
|
self._domain_index[domain].remove(entry)
|
|
|
|
if not self._domain_index[domain]:
|
|
|
|
del self._domain_index[domain]
|
|
|
|
if (unique_id := entry.unique_id) is not None:
|
2024-02-08 14:39:01 +00:00
|
|
|
# Check type first to avoid expensive isinstance call
|
|
|
|
if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721
|
|
|
|
unique_id = str(entry.unique_id) # type: ignore[unreachable]
|
2024-10-03 20:27:15 +00:00
|
|
|
self._domain_unique_id_index[domain][unique_id].remove(entry)
|
|
|
|
if not self._domain_unique_id_index[domain][unique_id]:
|
|
|
|
del self._domain_unique_id_index[domain][unique_id]
|
2024-01-13 20:34:15 +00:00
|
|
|
if not self._domain_unique_id_index[domain]:
|
|
|
|
del self._domain_unique_id_index[domain]
|
|
|
|
|
|
|
|
def __delitem__(self, entry_id: str) -> None:
|
|
|
|
"""Remove an item."""
|
|
|
|
self._unindex_entry(entry_id)
|
|
|
|
super().__delitem__(entry_id)
|
|
|
|
|
2024-02-09 13:51:02 +00:00
|
|
|
def update_unique_id(self, entry: ConfigEntry, new_unique_id: str | None) -> None:
|
|
|
|
"""Update unique id for an entry.
|
|
|
|
|
|
|
|
This method mutates the entry with the new unique id and updates the indexes.
|
|
|
|
"""
|
|
|
|
entry_id = entry.entry_id
|
|
|
|
self._unindex_entry(entry_id)
|
2024-02-16 16:15:05 +00:00
|
|
|
object.__setattr__(entry, "unique_id", new_unique_id)
|
2024-02-09 13:51:02 +00:00
|
|
|
self._index_entry(entry)
|
2024-10-03 17:51:09 +00:00
|
|
|
entry.clear_state_cache()
|
|
|
|
entry.clear_storage_cache()
|
2024-02-09 13:51:02 +00:00
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
def get_entries_for_domain(self, domain: str) -> list[ConfigEntry]:
|
|
|
|
"""Get entries for a domain."""
|
|
|
|
return self._domain_index.get(domain, [])
|
|
|
|
|
|
|
|
def get_entry_by_domain_and_unique_id(
|
|
|
|
self, domain: str, unique_id: str
|
|
|
|
) -> ConfigEntry | None:
|
|
|
|
"""Get entry by domain and unique id."""
|
2024-02-08 14:39:01 +00:00
|
|
|
# Check type first to avoid expensive isinstance call
|
|
|
|
if type(unique_id) is not str and not isinstance(unique_id, Hashable): # noqa: E721
|
|
|
|
unique_id = str(unique_id) # type: ignore[unreachable]
|
2024-10-03 20:27:15 +00:00
|
|
|
entries = self._domain_unique_id_index.get(domain, {}).get(unique_id)
|
|
|
|
if not entries:
|
|
|
|
return None
|
|
|
|
return entries[0]
|
2024-01-13 20:34:15 +00:00
|
|
|
|
|
|
|
|
2024-05-13 08:16:18 +00:00
|
|
|
class ConfigEntryStore(storage.Store[dict[str, list[dict[str, Any]]]]):
|
|
|
|
"""Class to help storing config entry data."""
|
|
|
|
|
|
|
|
def __init__(self, hass: HomeAssistant) -> None:
|
|
|
|
"""Initialize storage class."""
|
|
|
|
super().__init__(
|
|
|
|
hass,
|
|
|
|
STORAGE_VERSION,
|
|
|
|
STORAGE_KEY,
|
|
|
|
minor_version=STORAGE_VERSION_MINOR,
|
|
|
|
)
|
|
|
|
|
|
|
|
async def _async_migrate_func(
|
|
|
|
self,
|
|
|
|
old_major_version: int,
|
|
|
|
old_minor_version: int,
|
|
|
|
old_data: dict[str, Any],
|
|
|
|
) -> dict[str, Any]:
|
|
|
|
"""Migrate to the new version."""
|
|
|
|
data = old_data
|
2024-07-29 20:08:46 +00:00
|
|
|
if old_major_version == 1:
|
|
|
|
if old_minor_version < 2:
|
|
|
|
# Version 1.2 implements migration and freezes the available keys
|
|
|
|
for entry in data["entries"]:
|
|
|
|
# Populate keys which were introduced before version 1.2
|
|
|
|
|
|
|
|
pref_disable_new_entities = entry.get("pref_disable_new_entities")
|
|
|
|
if pref_disable_new_entities is None and "system_options" in entry:
|
|
|
|
pref_disable_new_entities = entry.get("system_options", {}).get(
|
|
|
|
"disable_new_entities"
|
|
|
|
)
|
|
|
|
|
|
|
|
entry.setdefault("disabled_by", entry.get("disabled_by"))
|
|
|
|
entry.setdefault("minor_version", entry.get("minor_version", 1))
|
|
|
|
entry.setdefault("options", entry.get("options", {}))
|
|
|
|
entry.setdefault(
|
|
|
|
"pref_disable_new_entities", pref_disable_new_entities
|
|
|
|
)
|
|
|
|
entry.setdefault(
|
|
|
|
"pref_disable_polling", entry.get("pref_disable_polling")
|
2024-05-13 08:16:18 +00:00
|
|
|
)
|
2024-07-29 20:08:46 +00:00
|
|
|
entry.setdefault("unique_id", entry.get("unique_id"))
|
2024-05-13 08:16:18 +00:00
|
|
|
|
2024-07-29 20:08:46 +00:00
|
|
|
if old_minor_version < 3:
|
|
|
|
# Version 1.3 adds the created_at and modified_at fields
|
|
|
|
created_at = utc_from_timestamp(0).isoformat()
|
|
|
|
for entry in data["entries"]:
|
|
|
|
entry["created_at"] = entry["modified_at"] = created_at
|
2024-05-13 08:16:18 +00:00
|
|
|
|
2024-09-23 14:49:21 +00:00
|
|
|
if old_minor_version < 4:
|
|
|
|
# Version 1.4 adds discovery_keys
|
|
|
|
for entry in data["entries"]:
|
2024-09-23 19:48:11 +00:00
|
|
|
entry["discovery_keys"] = {}
|
2024-09-23 14:49:21 +00:00
|
|
|
|
2024-05-13 08:16:18 +00:00
|
|
|
if old_major_version > 1:
|
|
|
|
raise NotImplementedError
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
class ConfigEntries:
|
|
|
|
"""Manage the configuration entries.
|
|
|
|
|
|
|
|
An instance of this object is available via `hass.config_entries`.
|
|
|
|
"""
|
|
|
|
|
2021-08-16 21:12:06 +00:00
|
|
|
def __init__(self, hass: HomeAssistant, hass_config: ConfigType) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Initialize the entry manager."""
|
|
|
|
self.hass = hass
|
2020-01-03 10:52:01 +00:00
|
|
|
self.flow = ConfigEntriesFlowManager(hass, self, hass_config)
|
2019-02-22 16:59:43 +00:00
|
|
|
self.options = OptionsFlowManager(hass)
|
2018-02-16 22:07:38 +00:00
|
|
|
self._hass_config = hass_config
|
2024-02-08 14:39:01 +00:00
|
|
|
self._entries = ConfigEntryItems(hass)
|
2024-05-13 08:16:18 +00:00
|
|
|
self._store = ConfigEntryStore(hass)
|
2019-08-23 00:32:43 +00:00
|
|
|
EntityRegistryDisabledHandler(hass).async_setup()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
|
|
|
@callback
|
2021-03-29 11:06:44 +00:00
|
|
|
def async_domains(
|
|
|
|
self, include_ignore: bool = False, include_disabled: bool = False
|
|
|
|
) -> list[str]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Return domains for which we have entries."""
|
2021-03-29 11:06:44 +00:00
|
|
|
return list(
|
|
|
|
{
|
|
|
|
entry.domain: None
|
|
|
|
for entry in self._entries.values()
|
|
|
|
if (include_ignore or entry.source != SOURCE_IGNORE)
|
|
|
|
and (include_disabled or not entry.disabled_by)
|
|
|
|
}
|
|
|
|
)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def async_get_entry(self, entry_id: str) -> ConfigEntry | None:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Return entry with matching entry_id."""
|
2024-01-13 20:34:15 +00:00
|
|
|
return self._entries.data.get(entry_id)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2024-03-05 01:59:12 +00:00
|
|
|
@callback
|
|
|
|
def async_entry_ids(self) -> list[str]:
|
|
|
|
"""Return entry ids."""
|
|
|
|
return list(self._entries.data)
|
|
|
|
|
2024-05-11 07:47:17 +00:00
|
|
|
@callback
|
|
|
|
def async_has_entries(
|
|
|
|
self, domain: str, include_ignore: bool = True, include_disabled: bool = True
|
|
|
|
) -> bool:
|
|
|
|
"""Return if there are entries for a domain."""
|
|
|
|
entries = self._entries.get_entries_for_domain(domain)
|
|
|
|
if include_ignore and include_disabled:
|
|
|
|
return bool(entries)
|
2024-08-03 22:33:46 +00:00
|
|
|
for entry in entries:
|
|
|
|
if (include_ignore or entry.source != SOURCE_IGNORE) and (
|
|
|
|
include_disabled or not entry.disabled_by
|
|
|
|
):
|
|
|
|
return True
|
|
|
|
return False
|
2024-05-11 07:47:17 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
@callback
|
2024-02-17 09:34:03 +00:00
|
|
|
def async_entries(
|
|
|
|
self,
|
|
|
|
domain: str | None = None,
|
|
|
|
include_ignore: bool = True,
|
|
|
|
include_disabled: bool = True,
|
|
|
|
) -> list[ConfigEntry]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Return all entries or entries for a specific domain."""
|
|
|
|
if domain is None:
|
2024-02-17 09:34:03 +00:00
|
|
|
entries: Iterable[ConfigEntry] = self._entries.values()
|
|
|
|
else:
|
|
|
|
entries = self._entries.get_entries_for_domain(domain)
|
|
|
|
|
|
|
|
if include_ignore and include_disabled:
|
|
|
|
return list(entries)
|
|
|
|
|
|
|
|
return [
|
|
|
|
entry
|
|
|
|
for entry in entries
|
|
|
|
if (include_ignore or entry.source != SOURCE_IGNORE)
|
|
|
|
and (include_disabled or not entry.disabled_by)
|
|
|
|
]
|
2024-01-13 20:34:15 +00:00
|
|
|
|
2024-08-27 12:20:57 +00:00
|
|
|
@callback
|
|
|
|
def async_loaded_entries(self, domain: str) -> list[ConfigEntry]:
|
|
|
|
"""Return loaded entries for a specific domain.
|
|
|
|
|
|
|
|
This will exclude ignored or disabled config entruis.
|
|
|
|
"""
|
|
|
|
entries = self._entries.get_entries_for_domain(domain)
|
|
|
|
|
|
|
|
return [entry for entry in entries if entry.state == ConfigEntryState.LOADED]
|
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
@callback
|
|
|
|
def async_entry_for_domain_unique_id(
|
|
|
|
self, domain: str, unique_id: str
|
|
|
|
) -> ConfigEntry | None:
|
|
|
|
"""Return entry for a domain with a matching unique id."""
|
|
|
|
return self._entries.get_entry_by_domain_and_unique_id(domain, unique_id)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_add(self, entry: ConfigEntry) -> None:
|
|
|
|
"""Add and setup an entry."""
|
2024-01-13 20:34:15 +00:00
|
|
|
if entry.entry_id in self._entries.data:
|
2021-03-22 04:44:29 +00:00
|
|
|
raise HomeAssistantError(
|
|
|
|
f"An entry with the id {entry.entry_id} already exists."
|
|
|
|
)
|
2024-02-26 18:00:33 +00:00
|
|
|
|
2021-03-22 04:44:29 +00:00
|
|
|
self._entries[entry.entry_id] = entry
|
2022-09-05 03:57:43 +00:00
|
|
|
self._async_dispatch(ConfigEntryChange.ADDED, entry)
|
2020-01-03 10:52:01 +00:00
|
|
|
await self.async_setup(entry.entry_id)
|
|
|
|
self._async_schedule_save()
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
async def async_remove(self, entry_id: str) -> dict[str, Any]:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Remove an entry."""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2018-02-16 22:07:38 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2024-05-11 00:47:26 +00:00
|
|
|
async with entry.setup_lock:
|
|
|
|
if not entry.state.recoverable:
|
|
|
|
unload_success = entry.state is not ConfigEntryState.FAILED_UNLOAD
|
|
|
|
else:
|
2024-05-11 23:20:08 +00:00
|
|
|
unload_success = await self.async_unload(entry_id, _lock=False)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-05-11 00:47:26 +00:00
|
|
|
await entry.async_remove(self.hass)
|
2019-03-02 05:13:55 +00:00
|
|
|
|
2024-05-11 00:47:26 +00:00
|
|
|
del self._entries[entry.entry_id]
|
|
|
|
self._async_schedule_save()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2022-04-25 12:21:03 +00:00
|
|
|
dev_reg = device_registry.async_get(self.hass)
|
|
|
|
ent_reg = entity_registry.async_get(self.hass)
|
2018-09-04 07:00:14 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
dev_reg.async_clear_config_entry(entry_id)
|
|
|
|
ent_reg.async_clear_config_entry(entry_id)
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2021-07-02 18:56:51 +00:00
|
|
|
# If the configuration entry is removed during reauth, it should
|
2024-01-31 14:05:52 +00:00
|
|
|
# abort any reauth flow that is active for the removed entry and
|
|
|
|
# linked issues.
|
2021-10-22 17:19:49 +00:00
|
|
|
for progress_flow in self.hass.config_entries.flow.async_progress_by_handler(
|
2023-06-11 08:41:38 +00:00
|
|
|
entry.domain, match_context={"entry_id": entry_id, "source": SOURCE_REAUTH}
|
2021-10-22 17:19:49 +00:00
|
|
|
):
|
2023-06-11 08:41:38 +00:00
|
|
|
if "flow_id" in progress_flow:
|
2021-07-02 18:56:51 +00:00
|
|
|
self.hass.config_entries.flow.async_abort(progress_flow["flow_id"])
|
2024-01-31 14:05:52 +00:00
|
|
|
issue_id = f"config_entry_reauth_{entry.domain}_{entry.entry_id}"
|
2024-07-29 11:35:36 +00:00
|
|
|
ir.async_delete_issue(self.hass, HOMEASSISTANT_DOMAIN, issue_id)
|
2021-07-02 18:56:51 +00:00
|
|
|
|
2022-09-05 03:57:43 +00:00
|
|
|
self._async_dispatch(ConfigEntryChange.REMOVED, entry)
|
2024-09-23 19:48:11 +00:00
|
|
|
for discovery_domain in entry.discovery_keys:
|
|
|
|
async_dispatcher_send_internal(
|
|
|
|
self.hass,
|
|
|
|
signal_discovered_config_entry_removed(discovery_domain),
|
|
|
|
entry,
|
|
|
|
)
|
2019-07-31 19:25:30 +00:00
|
|
|
return {"require_restart": not unload_success}
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-02-24 07:37:33 +00:00
|
|
|
@callback
|
|
|
|
def _async_shutdown(self, event: Event) -> None:
|
2021-04-14 02:16:26 +00:00
|
|
|
"""Call when Home Assistant is stopping."""
|
2024-02-21 02:38:24 +00:00
|
|
|
for entry in self._entries.values():
|
|
|
|
entry.async_shutdown()
|
2024-02-24 07:37:33 +00:00
|
|
|
self.flow.async_shutdown()
|
2021-04-14 02:16:26 +00:00
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
async def async_initialize(self) -> None:
|
|
|
|
"""Initialize config entry config."""
|
2024-05-13 07:39:04 +00:00
|
|
|
config = await self._store.async_load()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-04-08 20:07:54 +00:00
|
|
|
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._async_shutdown)
|
2021-04-14 02:16:26 +00:00
|
|
|
|
2018-06-25 21:21:38 +00:00
|
|
|
if config is None:
|
2024-02-08 14:39:01 +00:00
|
|
|
self._entries = ConfigEntryItems(self.hass)
|
2018-06-25 21:21:38 +00:00
|
|
|
return
|
|
|
|
|
2024-02-08 14:39:01 +00:00
|
|
|
entries: ConfigEntryItems = ConfigEntryItems(self.hass)
|
2021-06-01 20:34:31 +00:00
|
|
|
for entry in config["entries"]:
|
2021-09-17 20:39:00 +00:00
|
|
|
entry_id = entry["entry_id"]
|
|
|
|
|
2023-09-20 16:43:15 +00:00
|
|
|
config_entry = ConfigEntry(
|
2024-07-29 20:08:46 +00:00
|
|
|
created_at=datetime.fromisoformat(entry["created_at"]),
|
2019-07-31 19:25:30 +00:00
|
|
|
data=entry["data"],
|
2024-05-13 08:16:18 +00:00
|
|
|
disabled_by=try_parse_enum(ConfigEntryDisabler, entry["disabled_by"]),
|
2024-09-23 19:48:11 +00:00
|
|
|
discovery_keys=MappingProxyType(
|
|
|
|
{
|
|
|
|
domain: tuple(DiscoveryKey.from_json_dict(key) for key in keys)
|
|
|
|
for domain, keys in entry["discovery_keys"].items()
|
|
|
|
}
|
2024-09-23 14:49:21 +00:00
|
|
|
),
|
2024-05-13 08:16:18 +00:00
|
|
|
domain=entry["domain"],
|
|
|
|
entry_id=entry_id,
|
|
|
|
minor_version=entry["minor_version"],
|
2024-07-29 20:08:46 +00:00
|
|
|
modified_at=datetime.fromisoformat(entry["modified_at"]),
|
2024-05-13 08:16:18 +00:00
|
|
|
options=entry["options"],
|
|
|
|
pref_disable_new_entities=entry["pref_disable_new_entities"],
|
|
|
|
pref_disable_polling=entry["pref_disable_polling"],
|
2019-07-31 19:25:30 +00:00
|
|
|
source=entry["source"],
|
|
|
|
title=entry["title"],
|
2024-05-13 08:16:18 +00:00
|
|
|
unique_id=entry["unique_id"],
|
|
|
|
version=entry["version"],
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2023-09-20 16:43:15 +00:00
|
|
|
entries[entry_id] = config_entry
|
2021-06-01 20:34:31 +00:00
|
|
|
|
|
|
|
self._entries = entries
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2024-05-11 23:20:08 +00:00
|
|
|
async def async_setup(self, entry_id: str, _lock: bool = True) -> bool:
|
2019-03-01 04:27:20 +00:00
|
|
|
"""Set up a config entry.
|
|
|
|
|
|
|
|
Return True if entry has been successfully loaded.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2019-03-01 04:27:20 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if entry.state is not ConfigEntryState.NOT_LOADED:
|
2022-09-17 17:52:28 +00:00
|
|
|
raise OperationNotAllowed(
|
2024-06-14 06:00:36 +00:00
|
|
|
f"The config entry '{entry.title}' ({entry.domain}) with entry_id"
|
|
|
|
f" '{entry.entry_id}' cannot be set up because it is in state "
|
|
|
|
f"{entry.state}, but needs to be in the {ConfigEntryState.NOT_LOADED} state"
|
2022-09-17 17:52:28 +00:00
|
|
|
)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
|
|
|
# Setup Component if not set up yet
|
|
|
|
if entry.domain in self.hass.config.components:
|
2024-05-11 23:20:08 +00:00
|
|
|
if _lock:
|
|
|
|
async with entry.setup_lock:
|
|
|
|
await entry.async_setup(self.hass)
|
|
|
|
else:
|
|
|
|
await entry.async_setup(self.hass)
|
2019-03-01 04:27:20 +00:00
|
|
|
else:
|
|
|
|
# Setting up the component will set up all its config entries
|
|
|
|
result = await async_setup_component(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass, entry.domain, self._hass_config
|
|
|
|
)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
|
|
|
if not result:
|
|
|
|
return result
|
|
|
|
|
2023-01-15 22:00:51 +00:00
|
|
|
return (
|
|
|
|
entry.state is ConfigEntryState.LOADED # type: ignore[comparison-overlap]
|
|
|
|
)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2024-05-11 23:20:08 +00:00
|
|
|
async def async_unload(self, entry_id: str, _lock: bool = True) -> bool:
|
2019-03-01 04:27:20 +00:00
|
|
|
"""Unload a config entry."""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2019-03-01 04:27:20 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2021-05-20 17:19:20 +00:00
|
|
|
if not entry.state.recoverable:
|
2022-09-17 17:52:28 +00:00
|
|
|
raise OperationNotAllowed(
|
2024-06-14 06:00:36 +00:00
|
|
|
f"The config entry '{entry.title}' ({entry.domain}) with entry_id"
|
|
|
|
f" '{entry.entry_id}' cannot be unloaded because it is in the non"
|
|
|
|
f" recoverable state {entry.state}"
|
2022-09-17 17:52:28 +00:00
|
|
|
)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2024-05-11 23:20:08 +00:00
|
|
|
if _lock:
|
|
|
|
async with entry.setup_lock:
|
|
|
|
return await entry.async_unload(self.hass)
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
return await entry.async_unload(self.hass)
|
|
|
|
|
2024-02-20 01:14:45 +00:00
|
|
|
@callback
|
|
|
|
def async_schedule_reload(self, entry_id: str) -> None:
|
|
|
|
"""Schedule a config entry to be reloaded."""
|
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
|
|
|
raise UnknownEntry
|
|
|
|
entry.async_cancel_retry_setup()
|
|
|
|
self.hass.async_create_task(
|
|
|
|
self.async_reload(entry_id),
|
|
|
|
f"config entry reload {entry.title} {entry.domain} {entry.entry_id}",
|
|
|
|
)
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
async def async_reload(self, entry_id: str) -> bool:
|
|
|
|
"""Reload an entry.
|
|
|
|
|
2024-04-13 20:26:41 +00:00
|
|
|
When reloading from an integration is is preferable to
|
|
|
|
call async_schedule_reload instead of this method since
|
|
|
|
it will cancel setup retry before starting this method
|
|
|
|
in a task which eliminates a race condition where the
|
|
|
|
setup retry can fire during the reload.
|
|
|
|
|
2019-03-01 04:27:20 +00:00
|
|
|
If an entry was not loaded, will just load.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2021-02-21 03:21:39 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2024-04-13 20:26:41 +00:00
|
|
|
# Cancel the setup retry task before waiting for the
|
|
|
|
# reload lock to reduce the chance of concurrent reload
|
|
|
|
# attempts.
|
|
|
|
entry.async_cancel_retry_setup()
|
|
|
|
|
2024-04-30 23:47:12 +00:00
|
|
|
if entry.domain not in self.hass.config.components:
|
|
|
|
# If the component is not loaded, just load it as
|
|
|
|
# the config entry will be loaded as well. We need
|
|
|
|
# to do this before holding the lock to avoid a
|
|
|
|
# deadlock.
|
|
|
|
await async_setup_component(self.hass, entry.domain, self._hass_config)
|
|
|
|
return entry.state is ConfigEntryState.LOADED
|
|
|
|
|
|
|
|
async with entry.setup_lock:
|
2024-05-11 23:20:08 +00:00
|
|
|
unload_result = await self.async_unload(entry_id, _lock=False)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2022-05-28 08:49:55 +00:00
|
|
|
if not unload_result or entry.disabled_by:
|
|
|
|
return unload_result
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2024-05-11 23:20:08 +00:00
|
|
|
return await self.async_setup(entry_id, _lock=False)
|
2019-03-01 04:27:20 +00:00
|
|
|
|
2021-02-21 03:21:39 +00:00
|
|
|
async def async_set_disabled_by(
|
2021-12-15 19:53:21 +00:00
|
|
|
self, entry_id: str, disabled_by: ConfigEntryDisabler | None
|
2021-02-21 03:21:39 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Disable an entry.
|
|
|
|
|
|
|
|
If disabled_by is changed, the config entry will be reloaded.
|
|
|
|
"""
|
2021-09-18 23:31:35 +00:00
|
|
|
if (entry := self.async_get_entry(entry_id)) is None:
|
2021-02-21 03:21:39 +00:00
|
|
|
raise UnknownEntry
|
|
|
|
|
2024-09-10 18:39:51 +00:00
|
|
|
_validate_item(disabled_by=disabled_by)
|
2021-12-15 19:53:21 +00:00
|
|
|
if entry.disabled_by is disabled_by:
|
2021-02-21 03:21:39 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
entry.disabled_by = disabled_by
|
|
|
|
self._async_schedule_save()
|
|
|
|
|
2021-03-03 18:12:37 +00:00
|
|
|
dev_reg = device_registry.async_get(self.hass)
|
|
|
|
ent_reg = entity_registry.async_get(self.hass)
|
|
|
|
|
|
|
|
if not entry.disabled_by:
|
|
|
|
# The config entry will no longer be disabled, enable devices and entities
|
|
|
|
device_registry.async_config_entry_disabled_by_changed(dev_reg, entry)
|
|
|
|
entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry)
|
|
|
|
|
|
|
|
# Load or unload the config entry
|
2021-03-01 11:38:49 +00:00
|
|
|
reload_result = await self.async_reload(entry_id)
|
|
|
|
|
2021-03-03 18:12:37 +00:00
|
|
|
if entry.disabled_by:
|
|
|
|
# The config entry has been disabled, disable devices and entities
|
|
|
|
device_registry.async_config_entry_disabled_by_changed(dev_reg, entry)
|
|
|
|
entity_registry.async_config_entry_disabled_by_changed(ent_reg, entry)
|
2021-02-21 03:21:39 +00:00
|
|
|
|
2021-03-01 11:38:49 +00:00
|
|
|
return reload_result
|
2021-02-21 03:21:39 +00:00
|
|
|
|
2018-09-25 10:21:11 +00:00
|
|
|
@callback
|
2019-08-19 23:45:17 +00:00
|
|
|
def async_update_entry(
|
2019-10-28 20:36:26 +00:00
|
|
|
self,
|
|
|
|
entry: ConfigEntry,
|
|
|
|
*,
|
2022-01-14 03:06:19 +00:00
|
|
|
data: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
2024-09-23 19:48:11 +00:00
|
|
|
discovery_keys: MappingProxyType[str, tuple[DiscoveryKey, ...]]
|
|
|
|
| UndefinedType = UNDEFINED,
|
2024-02-09 09:10:25 +00:00
|
|
|
minor_version: int | UndefinedType = UNDEFINED,
|
2021-04-15 17:17:07 +00:00
|
|
|
options: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
2021-06-01 20:34:31 +00:00
|
|
|
pref_disable_new_entities: bool | UndefinedType = UNDEFINED,
|
|
|
|
pref_disable_polling: bool | UndefinedType = UNDEFINED,
|
2024-02-09 09:10:25 +00:00
|
|
|
title: str | UndefinedType = UNDEFINED,
|
|
|
|
unique_id: str | None | UndefinedType = UNDEFINED,
|
|
|
|
version: int | UndefinedType = UNDEFINED,
|
2020-08-08 18:23:56 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Update a config entry.
|
|
|
|
|
|
|
|
If the entry was changed, the update_listeners are
|
|
|
|
fired and this function returns True
|
|
|
|
|
|
|
|
If the entry was not changed, the update_listeners are
|
|
|
|
not fired and this function returns False
|
|
|
|
"""
|
2024-02-16 16:15:05 +00:00
|
|
|
if entry.entry_id not in self._entries:
|
|
|
|
raise UnknownEntry(entry.entry_id)
|
|
|
|
|
2024-05-14 13:20:31 +00:00
|
|
|
self.hass.verify_event_loop_thread("hass.config_entries.async_update_entry")
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = False
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter = object.__setattr__
|
2020-08-08 18:23:56 +00:00
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
if unique_id is not UNDEFINED and entry.unique_id != unique_id:
|
2024-10-08 12:42:41 +00:00
|
|
|
# Deprecated in 2024.11, should fail in 2025.11
|
|
|
|
if (
|
|
|
|
unique_id is not None
|
|
|
|
and self.async_entry_for_domain_unique_id(entry.domain, unique_id)
|
|
|
|
is not None
|
|
|
|
):
|
|
|
|
report_issue = async_suggest_report_issue(
|
|
|
|
self.hass, integration_domain=entry.domain
|
|
|
|
)
|
|
|
|
_LOGGER.error(
|
|
|
|
(
|
|
|
|
"Unique id of config entry '%s' from integration %s changed to"
|
|
|
|
" '%s' which is already in use, please %s"
|
|
|
|
),
|
|
|
|
entry.title,
|
|
|
|
entry.domain,
|
|
|
|
unique_id,
|
|
|
|
report_issue,
|
|
|
|
)
|
2024-01-13 20:34:15 +00:00
|
|
|
# Reindex the entry if the unique_id has changed
|
2024-02-09 13:51:02 +00:00
|
|
|
self._entries.update_unique_id(entry, unique_id)
|
2024-01-13 20:34:15 +00:00
|
|
|
changed = True
|
|
|
|
|
2021-06-01 20:34:31 +00:00
|
|
|
for attr, value in (
|
2024-09-23 14:49:21 +00:00
|
|
|
("discovery_keys", discovery_keys),
|
2024-02-09 09:10:25 +00:00
|
|
|
("minor_version", minor_version),
|
2021-06-01 20:34:31 +00:00
|
|
|
("pref_disable_new_entities", pref_disable_new_entities),
|
|
|
|
("pref_disable_polling", pref_disable_polling),
|
2024-02-09 09:10:25 +00:00
|
|
|
("title", title),
|
|
|
|
("version", version),
|
2021-06-01 20:34:31 +00:00
|
|
|
):
|
2023-09-19 23:08:32 +00:00
|
|
|
if value is UNDEFINED or getattr(entry, attr) == value:
|
2021-06-01 20:34:31 +00:00
|
|
|
continue
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(entry, attr, value)
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = True
|
2020-03-09 21:07:50 +00:00
|
|
|
|
2022-01-14 03:06:19 +00:00
|
|
|
if data is not UNDEFINED and entry.data != data:
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = True
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(entry, "data", MappingProxyType(data))
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2021-04-15 17:17:07 +00:00
|
|
|
if options is not UNDEFINED and entry.options != options:
|
2020-08-08 18:23:56 +00:00
|
|
|
changed = True
|
2024-02-16 16:15:05 +00:00
|
|
|
_setter(entry, "options", MappingProxyType(options))
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2020-08-08 18:23:56 +00:00
|
|
|
if not changed:
|
|
|
|
return False
|
|
|
|
|
2024-07-29 20:08:46 +00:00
|
|
|
_setter(entry, "modified_at", utcnow())
|
|
|
|
|
2023-07-03 18:56:21 +00:00
|
|
|
for listener in entry.update_listeners:
|
|
|
|
self.hass.async_create_task(
|
|
|
|
listener(self.hass, entry),
|
|
|
|
f"config entry update listener {entry.title} {entry.domain} {entry.domain}",
|
|
|
|
)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2018-09-25 10:21:11 +00:00
|
|
|
self._async_schedule_save()
|
2024-10-03 17:51:09 +00:00
|
|
|
entry.clear_state_cache()
|
|
|
|
entry.clear_storage_cache()
|
2022-09-05 03:57:43 +00:00
|
|
|
self._async_dispatch(ConfigEntryChange.UPDATED, entry)
|
2020-08-08 18:23:56 +00:00
|
|
|
return True
|
|
|
|
|
2022-09-05 03:57:43 +00:00
|
|
|
@callback
|
|
|
|
def _async_dispatch(
|
|
|
|
self, change_type: ConfigEntryChange, entry: ConfigEntry
|
|
|
|
) -> None:
|
|
|
|
"""Dispatch a config entry change."""
|
2024-05-05 20:29:43 +00:00
|
|
|
async_dispatcher_send_internal(
|
2022-09-05 03:57:43 +00:00
|
|
|
self.hass, SIGNAL_CONFIG_ENTRY_CHANGED, change_type, entry
|
|
|
|
)
|
2021-04-26 17:46:55 +00:00
|
|
|
|
2022-07-09 15:27:42 +00:00
|
|
|
async def async_forward_entry_setups(
|
|
|
|
self, entry: ConfigEntry, platforms: Iterable[Platform | str]
|
|
|
|
) -> None:
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
"""Forward the setup of an entry to platforms.
|
|
|
|
|
|
|
|
This method should be awaited before async_setup_entry is finished
|
|
|
|
in each integration. This is to ensure that all platforms are loaded
|
|
|
|
before the entry is set up. This ensures that the config entry cannot
|
|
|
|
be unloaded before all platforms are loaded.
|
|
|
|
|
|
|
|
This method is more efficient than async_forward_entry_setup as
|
|
|
|
it can load multiple platforms at once and does not require a separate
|
|
|
|
import executor job for each platform.
|
|
|
|
"""
|
2024-03-04 07:32:19 +00:00
|
|
|
integration = await loader.async_get_integration(self.hass, entry.domain)
|
2024-03-19 01:45:34 +00:00
|
|
|
if not integration.platforms_are_loaded(platforms):
|
|
|
|
with async_pause_setup(self.hass, SetupPhases.WAIT_IMPORT_PLATFORMS):
|
|
|
|
await integration.async_get_platforms(platforms)
|
2024-06-13 01:06:11 +00:00
|
|
|
|
|
|
|
if not entry.setup_lock.locked():
|
|
|
|
async with entry.setup_lock:
|
|
|
|
if entry.state is not ConfigEntryState.LOADED:
|
|
|
|
raise OperationNotAllowed(
|
2024-06-14 06:00:36 +00:00
|
|
|
f"The config entry '{entry.title}' ({entry.domain}) with "
|
|
|
|
f"entry_id '{entry.entry_id}' cannot forward setup for "
|
|
|
|
f"{platforms} because it is in state {entry.state}, but needs "
|
|
|
|
f"to be in the {ConfigEntryState.LOADED} state"
|
2024-06-13 01:06:11 +00:00
|
|
|
)
|
|
|
|
await self._async_forward_entry_setups_locked(entry, platforms)
|
|
|
|
else:
|
|
|
|
await self._async_forward_entry_setups_locked(entry, platforms)
|
|
|
|
# If the lock was held when we stated, and it was released during
|
|
|
|
# the platform setup, it means they did not await the setup call.
|
|
|
|
if not entry.setup_lock.locked():
|
|
|
|
_report_non_awaited_platform_forwards(
|
|
|
|
entry, "async_forward_entry_setups"
|
|
|
|
)
|
|
|
|
|
|
|
|
async def _async_forward_entry_setups_locked(
|
|
|
|
self, entry: ConfigEntry, platforms: Iterable[Platform | str]
|
|
|
|
) -> None:
|
2022-07-09 15:27:42 +00:00
|
|
|
await asyncio.gather(
|
2023-03-08 15:19:36 +00:00
|
|
|
*(
|
2024-02-27 04:55:41 +00:00
|
|
|
create_eager_task(
|
2024-06-13 01:06:11 +00:00
|
|
|
self._async_forward_entry_setup(entry, platform, False),
|
2024-05-12 02:39:20 +00:00
|
|
|
name=(
|
|
|
|
f"config entry forward setup {entry.title} "
|
|
|
|
f"{entry.domain} {entry.entry_id} {platform}"
|
|
|
|
),
|
|
|
|
loop=self.hass.loop,
|
2023-03-08 15:19:36 +00:00
|
|
|
)
|
|
|
|
for platform in platforms
|
|
|
|
)
|
2022-07-09 15:27:42 +00:00
|
|
|
)
|
|
|
|
|
2022-01-09 05:08:04 +00:00
|
|
|
async def async_forward_entry_setup(
|
|
|
|
self, entry: ConfigEntry, domain: Platform | str
|
|
|
|
) -> bool:
|
2018-04-09 14:09:08 +00:00
|
|
|
"""Forward the setup of an entry to a different component.
|
|
|
|
|
|
|
|
By default an entry is setup with the component it belongs to. If that
|
|
|
|
component also has related platforms, the component will have to
|
|
|
|
forward the entry to be setup by that component.
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
|
|
|
|
This method is deprecated and will stop working in Home Assistant 2025.6.
|
|
|
|
|
|
|
|
Instead, await async_forward_entry_setups as it can load
|
|
|
|
multiple platforms at once and is more efficient since it
|
|
|
|
does not require a separate import executor job for each platform.
|
2018-04-09 14:09:08 +00:00
|
|
|
"""
|
2024-06-13 01:06:11 +00:00
|
|
|
report(
|
|
|
|
"calls async_forward_entry_setup for "
|
|
|
|
f"integration, {entry.domain} with title: {entry.title} "
|
|
|
|
f"and entry_id: {entry.entry_id}, which is deprecated and "
|
|
|
|
"will stop working in Home Assistant 2025.6, "
|
|
|
|
"await async_forward_entry_setups instead",
|
|
|
|
error_if_core=False,
|
|
|
|
error_if_integration=False,
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
)
|
2024-06-13 01:06:11 +00:00
|
|
|
if not entry.setup_lock.locked():
|
|
|
|
async with entry.setup_lock:
|
|
|
|
if entry.state is not ConfigEntryState.LOADED:
|
|
|
|
raise OperationNotAllowed(
|
2024-06-14 06:00:36 +00:00
|
|
|
f"The config entry '{entry.title}' ({entry.domain}) with "
|
|
|
|
f"entry_id '{entry.entry_id}' cannot forward setup for "
|
|
|
|
f"{domain} because it is in state {entry.state}, but needs "
|
|
|
|
f"to be in the {ConfigEntryState.LOADED} state"
|
2024-06-13 01:06:11 +00:00
|
|
|
)
|
|
|
|
return await self._async_forward_entry_setup(entry, domain, True)
|
|
|
|
result = await self._async_forward_entry_setup(entry, domain, True)
|
|
|
|
# If the lock was held when we stated, and it was released during
|
|
|
|
# the platform setup, it means they did not await the setup call.
|
|
|
|
if not entry.setup_lock.locked():
|
|
|
|
_report_non_awaited_platform_forwards(entry, "async_forward_entry_setup")
|
|
|
|
return result
|
2024-03-23 19:26:38 +00:00
|
|
|
|
|
|
|
async def _async_forward_entry_setup(
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
self,
|
|
|
|
entry: ConfigEntry,
|
|
|
|
domain: Platform | str,
|
|
|
|
preload_platform: bool,
|
2024-03-23 19:26:38 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Forward the setup of an entry to a different component."""
|
2018-04-09 14:09:08 +00:00
|
|
|
# Setup Component if not set up yet
|
2019-04-15 02:07:05 +00:00
|
|
|
if domain not in self.hass.config.components:
|
2024-03-19 01:45:34 +00:00
|
|
|
with async_pause_setup(self.hass, SetupPhases.WAIT_BASE_PLATFORM_SETUP):
|
|
|
|
result = await async_setup_component(
|
|
|
|
self.hass, domain, self._hass_config
|
|
|
|
)
|
2018-04-09 14:09:08 +00:00
|
|
|
|
|
|
|
if not result:
|
|
|
|
return False
|
|
|
|
|
2024-03-23 19:26:38 +00:00
|
|
|
if preload_platform:
|
|
|
|
# If this is a late setup, we need to make sure the platform is loaded
|
|
|
|
# so we do not end up waiting for when the EntityComponent calls
|
|
|
|
# async_prepare_setup_platform
|
|
|
|
integration = await loader.async_get_integration(self.hass, entry.domain)
|
|
|
|
if not integration.platforms_are_loaded((domain,)):
|
|
|
|
with async_pause_setup(self.hass, SetupPhases.WAIT_IMPORT_PLATFORMS):
|
|
|
|
await integration.async_get_platform(domain)
|
2019-04-15 02:07:05 +00:00
|
|
|
|
2024-05-11 01:30:34 +00:00
|
|
|
integration = loader.async_get_loaded_integration(self.hass, domain)
|
2019-04-15 02:07:05 +00:00
|
|
|
await entry.async_setup(self.hass, integration=integration)
|
2019-10-28 20:36:26 +00:00
|
|
|
return True
|
2018-04-09 14:09:08 +00:00
|
|
|
|
2021-04-26 17:46:55 +00:00
|
|
|
async def async_unload_platforms(
|
2022-01-09 05:08:04 +00:00
|
|
|
self, entry: ConfigEntry, platforms: Iterable[Platform | str]
|
2021-04-26 17:46:55 +00:00
|
|
|
) -> bool:
|
|
|
|
"""Forward the unloading of an entry to platforms."""
|
|
|
|
return all(
|
|
|
|
await asyncio.gather(
|
2021-07-19 08:46:09 +00:00
|
|
|
*(
|
2024-02-27 04:55:41 +00:00
|
|
|
create_eager_task(
|
2023-03-08 15:19:36 +00:00
|
|
|
self.async_forward_entry_unload(entry, platform),
|
2024-05-12 02:39:20 +00:00
|
|
|
name=(
|
|
|
|
f"config entry forward unload {entry.title} "
|
|
|
|
f"{entry.domain} {entry.entry_id} {platform}"
|
|
|
|
),
|
|
|
|
loop=self.hass.loop,
|
2023-03-08 15:19:36 +00:00
|
|
|
)
|
2021-04-26 17:46:55 +00:00
|
|
|
for platform in platforms
|
2021-07-19 08:46:09 +00:00
|
|
|
)
|
2021-04-26 17:46:55 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2022-01-09 05:08:04 +00:00
|
|
|
async def async_forward_entry_unload(
|
|
|
|
self, entry: ConfigEntry, domain: Platform | str
|
|
|
|
) -> bool:
|
Ensure config entries are not unloaded while their platforms are setting up (#118767)
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* Report non-awaited/non-locked config entry platform forwards
Its currently possible for config entries to be reloaded while their platforms
are being forwarded if platform forwards are not awaited or done after the
config entry is setup since the lock will not be held in this case.
In https://developers.home-assistant.io/blog/2022/07/08/config_entry_forwards
we advised to await platform forwards to ensure this does not happen, however
for sleeping devices and late discovered devices, platform forwards may happen
later.
If config platform forwards are happening during setup, they should be awaited
If config entry platform forwards are not happening during setup, instead
async_late_forward_entry_setups should be used which will hold the lock to
prevent the config entry from being unloaded while its platforms are being
setup
* run with error on to find them
* cert_exp, hold lock
* cert_exp, hold lock
* shelly async_late_forward_entry_setups
* compact
* compact
* found another
* patch up mobileapp
* patch up hue tests
* patch up smartthings
* fix mqtt
* fix esphome
* zwave_js
* mqtt
* rework
* fixes
* fix mocking
* fix mocking
* do not call async_forward_entry_setup directly
* docstrings
* docstrings
* docstrings
* add comments
* doc strings
* fixed all in core, turn off strict
* coverage
* coverage
* missing
* coverage
2024-06-05 01:34:39 +00:00
|
|
|
"""Forward the unloading of an entry to a different component.
|
|
|
|
|
|
|
|
Its is preferred to call async_unload_platforms instead
|
|
|
|
of directly calling this method.
|
|
|
|
"""
|
2018-04-12 12:28:54 +00:00
|
|
|
# It was never loaded.
|
2019-04-15 02:07:05 +00:00
|
|
|
if domain not in self.hass.config.components:
|
2018-04-12 12:28:54 +00:00
|
|
|
return True
|
|
|
|
|
2024-05-11 01:30:34 +00:00
|
|
|
integration = loader.async_get_loaded_integration(self.hass, domain)
|
2019-04-15 02:07:05 +00:00
|
|
|
|
|
|
|
return await entry.async_unload(self.hass, integration=integration)
|
2018-04-12 12:28:54 +00:00
|
|
|
|
2020-02-14 18:00:22 +00:00
|
|
|
@callback
|
2019-02-22 16:59:43 +00:00
|
|
|
def _async_schedule_save(self) -> None:
|
2018-02-16 22:07:38 +00:00
|
|
|
"""Save the entity registry to a file."""
|
2018-08-17 18:18:21 +00:00
|
|
|
self._store.async_delay_save(self._data_to_save, SAVE_DELAY)
|
|
|
|
|
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def _data_to_save(self) -> dict[str, list[dict[str, Any]]]:
|
2018-08-17 18:18:21 +00:00
|
|
|
"""Return data to save."""
|
2024-10-03 17:51:09 +00:00
|
|
|
# typing does not know that the storage fragment will serialize to a dict
|
|
|
|
return {
|
|
|
|
"entries": [entry.as_storage_fragment for entry in self._entries.values()] # type: ignore[misc]
|
|
|
|
}
|
2018-06-25 16:53:49 +00:00
|
|
|
|
2022-11-17 20:52:57 +00:00
|
|
|
async def async_wait_component(self, entry: ConfigEntry) -> bool:
|
|
|
|
"""Wait for an entry's component to load and return if the entry is loaded.
|
|
|
|
|
|
|
|
This is primarily intended for existing config entries which are loaded at
|
|
|
|
startup, awaiting this function will block until the component and all its
|
|
|
|
config entries are loaded.
|
|
|
|
Config entries which are created after Home Assistant is started can't be waited
|
|
|
|
for, the function will just return if the config entry is loaded or not.
|
|
|
|
"""
|
2024-05-07 08:53:13 +00:00
|
|
|
setup_done = self.hass.data.get(DATA_SETUP_DONE, {})
|
2024-02-22 22:34:46 +00:00
|
|
|
if setup_future := setup_done.get(entry.domain):
|
2024-02-19 09:28:50 +00:00
|
|
|
await setup_future
|
2022-11-17 20:52:57 +00:00
|
|
|
# The component was not loaded.
|
|
|
|
if entry.domain not in self.hass.config.components:
|
|
|
|
return False
|
2024-06-13 01:06:11 +00:00
|
|
|
return entry.state is ConfigEntryState.LOADED
|
2022-11-17 20:52:57 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2023-03-29 15:20:51 +00:00
|
|
|
@callback
|
|
|
|
def _async_abort_entries_match(
|
|
|
|
other_entries: list[ConfigEntry], match_dict: dict[str, Any] | None = None
|
|
|
|
) -> None:
|
|
|
|
"""Abort if current entries match all data.
|
|
|
|
|
|
|
|
Requires `already_configured` in strings.json in user visible flows.
|
|
|
|
"""
|
|
|
|
if match_dict is None:
|
|
|
|
match_dict = {} # Match any entry
|
|
|
|
for entry in other_entries:
|
2023-08-24 13:34:45 +00:00
|
|
|
options_items = entry.options.items()
|
|
|
|
data_items = entry.data.items()
|
|
|
|
for kv in match_dict.items():
|
|
|
|
if kv not in options_items and kv not in data_items:
|
|
|
|
break
|
|
|
|
else:
|
2023-03-29 15:20:51 +00:00
|
|
|
raise data_entry_flow.AbortFlow("already_configured")
|
|
|
|
|
|
|
|
|
2024-10-08 10:18:45 +00:00
|
|
|
class ConfigEntryBaseFlow(
|
|
|
|
data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult]
|
|
|
|
):
|
2024-02-29 15:52:39 +00:00
|
|
|
"""Base class for config and option flows."""
|
|
|
|
|
|
|
|
_flow_result = ConfigFlowResult
|
|
|
|
|
|
|
|
|
|
|
|
class ConfigFlow(ConfigEntryBaseFlow):
|
2018-09-14 09:57:31 +00:00
|
|
|
"""Base class for config flows with some helpers."""
|
|
|
|
|
2022-03-25 22:14:48 +00:00
|
|
|
def __init_subclass__(cls, *, domain: str | None = None, **kwargs: Any) -> None:
|
2019-08-20 17:46:51 +00:00
|
|
|
"""Initialize a subclass, register if possible."""
|
2021-12-27 16:55:17 +00:00
|
|
|
super().__init_subclass__(**kwargs)
|
2019-08-20 17:46:51 +00:00
|
|
|
if domain is not None:
|
|
|
|
HANDLERS.register(domain)(cls)
|
|
|
|
|
2019-12-16 18:45:09 +00:00
|
|
|
@property
|
2021-03-17 16:34:55 +00:00
|
|
|
def unique_id(self) -> str | None:
|
2019-12-16 18:45:09 +00:00
|
|
|
"""Return unique ID if available."""
|
|
|
|
if not self.context:
|
|
|
|
return None
|
|
|
|
|
2024-10-08 10:18:45 +00:00
|
|
|
return self.context.get("unique_id")
|
2019-12-16 18:45:09 +00:00
|
|
|
|
2019-08-15 21:11:55 +00:00
|
|
|
@staticmethod
|
|
|
|
@callback
|
2021-02-12 09:58:20 +00:00
|
|
|
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
2019-08-15 21:11:55 +00:00
|
|
|
"""Get the options flow for this handler."""
|
|
|
|
raise data_entry_flow.UnknownHandler
|
|
|
|
|
2021-12-07 20:50:34 +00:00
|
|
|
@classmethod
|
|
|
|
@callback
|
|
|
|
def async_supports_options_flow(cls, config_entry: ConfigEntry) -> bool:
|
|
|
|
"""Return options flow support for this handler."""
|
|
|
|
return cls.async_get_options_flow is not ConfigFlow.async_get_options_flow
|
|
|
|
|
2021-05-11 20:00:12 +00:00
|
|
|
@callback
|
|
|
|
def _async_abort_entries_match(
|
|
|
|
self, match_dict: dict[str, Any] | None = None
|
|
|
|
) -> None:
|
2023-01-24 06:59:32 +00:00
|
|
|
"""Abort if current entries match all data.
|
|
|
|
|
|
|
|
Requires `already_configured` in strings.json in user visible flows.
|
|
|
|
"""
|
2023-03-29 15:20:51 +00:00
|
|
|
_async_abort_entries_match(
|
|
|
|
self._async_current_entries(include_ignore=False), match_dict
|
|
|
|
)
|
2021-05-11 20:00:12 +00:00
|
|
|
|
2024-10-08 08:07:36 +00:00
|
|
|
@callback
|
|
|
|
def _abort_if_unique_id_mismatch(
|
|
|
|
self,
|
|
|
|
*,
|
|
|
|
reason: str = "unique_id_mismatch",
|
2024-10-18 07:04:55 +00:00
|
|
|
description_placeholders: Mapping[str, str] | None = None,
|
2024-10-08 08:07:36 +00:00
|
|
|
) -> None:
|
|
|
|
"""Abort if the unique ID does not match the reauth/reconfigure context.
|
|
|
|
|
|
|
|
Requires strings.json entry corresponding to the `reason` parameter
|
|
|
|
in user visible flows.
|
|
|
|
"""
|
|
|
|
if (
|
|
|
|
self.source == SOURCE_REAUTH
|
|
|
|
and self._get_reauth_entry().unique_id != self.unique_id
|
|
|
|
) or (
|
|
|
|
self.source == SOURCE_RECONFIGURE
|
|
|
|
and self._get_reconfigure_entry().unique_id != self.unique_id
|
|
|
|
):
|
2024-10-18 07:04:55 +00:00
|
|
|
raise data_entry_flow.AbortFlow(reason, description_placeholders)
|
2024-10-08 08:07:36 +00:00
|
|
|
|
2019-12-16 18:45:09 +00:00
|
|
|
@callback
|
2020-03-09 21:07:50 +00:00
|
|
|
def _abort_if_unique_id_configured(
|
2020-08-27 11:56:20 +00:00
|
|
|
self,
|
2022-03-29 07:24:15 +00:00
|
|
|
updates: dict[str, Any] | None = None,
|
2020-08-27 11:56:20 +00:00
|
|
|
reload_on_update: bool = True,
|
2022-08-11 13:01:35 +00:00
|
|
|
*,
|
|
|
|
error: str = "already_configured",
|
2020-03-09 21:07:50 +00:00
|
|
|
) -> None:
|
2023-01-24 06:59:32 +00:00
|
|
|
"""Abort if the unique ID is already configured.
|
|
|
|
|
|
|
|
Requires strings.json entry corresponding to the `error` parameter
|
|
|
|
in user visible flows.
|
|
|
|
"""
|
2019-12-16 18:45:09 +00:00
|
|
|
if self.unique_id is None:
|
|
|
|
return
|
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
if not (
|
|
|
|
entry := self.hass.config_entries.async_entry_for_domain_unique_id(
|
|
|
|
self.handler, self.unique_id
|
|
|
|
)
|
|
|
|
):
|
|
|
|
return
|
|
|
|
|
|
|
|
should_reload = False
|
|
|
|
if (
|
|
|
|
updates is not None
|
|
|
|
and self.hass.config_entries.async_update_entry(
|
|
|
|
entry, data={**entry.data, **updates}
|
|
|
|
)
|
|
|
|
and reload_on_update
|
|
|
|
and entry.state in (ConfigEntryState.LOADED, ConfigEntryState.SETUP_RETRY)
|
|
|
|
):
|
|
|
|
# Existing config entry present, and the
|
|
|
|
# entry data just changed
|
|
|
|
should_reload = True
|
|
|
|
elif (
|
|
|
|
self.source in DISCOVERY_SOURCES
|
|
|
|
and entry.state is ConfigEntryState.SETUP_RETRY
|
|
|
|
):
|
|
|
|
# Existing config entry present in retry state, and we
|
|
|
|
# just discovered the unique id so we know its online
|
|
|
|
should_reload = True
|
|
|
|
# Allow ignored entries to be configured on manual user step
|
|
|
|
if entry.source == SOURCE_IGNORE and self.source == SOURCE_USER:
|
|
|
|
return
|
|
|
|
if should_reload:
|
2024-02-20 01:14:45 +00:00
|
|
|
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
2024-01-13 20:34:15 +00:00
|
|
|
raise data_entry_flow.AbortFlow(error)
|
2019-12-16 18:45:09 +00:00
|
|
|
|
2019-12-16 11:27:43 +00:00
|
|
|
async def async_set_unique_id(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, unique_id: str | None = None, *, raise_on_progress: bool = True
|
|
|
|
) -> ConfigEntry | None:
|
2019-12-16 11:27:43 +00:00
|
|
|
"""Set a unique ID for the config flow.
|
|
|
|
|
|
|
|
Returns optionally existing config entry with same ID.
|
|
|
|
"""
|
2020-06-15 11:38:38 +00:00
|
|
|
if unique_id is None:
|
2021-03-02 08:02:04 +00:00
|
|
|
self.context["unique_id"] = None
|
2020-06-15 11:38:38 +00:00
|
|
|
return None
|
|
|
|
|
2019-12-16 11:27:43 +00:00
|
|
|
if raise_on_progress:
|
2023-06-11 08:41:38 +00:00
|
|
|
if self._async_in_progress(
|
|
|
|
include_uninitialized=True, match_context={"unique_id": unique_id}
|
|
|
|
):
|
|
|
|
raise data_entry_flow.AbortFlow("already_in_progress")
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2021-03-02 08:02:04 +00:00
|
|
|
self.context["unique_id"] = unique_id
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2020-06-15 11:38:38 +00:00
|
|
|
# Abort discoveries done using the default discovery unique id
|
|
|
|
if unique_id != DEFAULT_DISCOVERY_UNIQUE_ID:
|
2023-06-11 08:41:38 +00:00
|
|
|
for progress in self._async_in_progress(
|
|
|
|
include_uninitialized=True,
|
|
|
|
match_context={"unique_id": DEFAULT_DISCOVERY_UNIQUE_ID},
|
|
|
|
):
|
|
|
|
self.hass.config_entries.flow.async_abort(progress["flow_id"])
|
2020-06-15 11:38:38 +00:00
|
|
|
|
2024-01-13 20:34:15 +00:00
|
|
|
return self.hass.config_entries.async_entry_for_domain_unique_id(
|
|
|
|
self.handler, unique_id
|
|
|
|
)
|
2019-12-16 11:27:43 +00:00
|
|
|
|
2021-03-08 18:54:51 +00:00
|
|
|
@callback
|
|
|
|
def _set_confirm_only(
|
|
|
|
self,
|
|
|
|
) -> None:
|
|
|
|
"""Mark the config flow as only needing user confirmation to finish flow."""
|
|
|
|
self.context["confirm_only"] = True
|
|
|
|
|
2018-09-14 09:57:31 +00:00
|
|
|
@callback
|
2021-03-22 04:57:49 +00:00
|
|
|
def _async_current_entries(
|
|
|
|
self, include_ignore: bool | None = None
|
|
|
|
) -> list[ConfigEntry]:
|
2021-02-04 10:08:10 +00:00
|
|
|
"""Return current entries.
|
|
|
|
|
2023-01-15 22:00:51 +00:00
|
|
|
If the flow is user initiated, filter out ignored entries,
|
|
|
|
unless include_ignore is True.
|
2021-02-04 10:08:10 +00:00
|
|
|
"""
|
2024-02-17 17:04:27 +00:00
|
|
|
return self.hass.config_entries.async_entries(
|
|
|
|
self.handler,
|
|
|
|
include_ignore or (include_ignore is None and self.source != SOURCE_USER),
|
|
|
|
)
|
2018-09-14 09:57:31 +00:00
|
|
|
|
2019-12-16 18:45:09 +00:00
|
|
|
@callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def _async_current_ids(self, include_ignore: bool = True) -> set[str | None]:
|
2019-12-16 18:45:09 +00:00
|
|
|
"""Return current unique IDs."""
|
2020-04-04 18:05:15 +00:00
|
|
|
return {
|
2019-12-16 18:45:09 +00:00
|
|
|
entry.unique_id
|
|
|
|
for entry in self.hass.config_entries.async_entries(self.handler)
|
2019-12-18 06:41:01 +00:00
|
|
|
if include_ignore or entry.source != SOURCE_IGNORE
|
2020-04-04 18:05:15 +00:00
|
|
|
}
|
2019-12-16 18:45:09 +00:00
|
|
|
|
2018-09-14 09:57:31 +00:00
|
|
|
@callback
|
2021-04-15 17:17:07 +00:00
|
|
|
def _async_in_progress(
|
2023-06-11 08:41:38 +00:00
|
|
|
self,
|
|
|
|
include_uninitialized: bool = False,
|
|
|
|
match_context: dict[str, Any] | None = None,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> list[ConfigFlowResult]:
|
2018-09-14 09:57:31 +00:00
|
|
|
"""Return other in progress flows for current domain."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return [
|
|
|
|
flw
|
2021-10-22 17:19:49 +00:00
|
|
|
for flw in self.hass.config_entries.flow.async_progress_by_handler(
|
2023-06-11 08:41:38 +00:00
|
|
|
self.handler,
|
|
|
|
include_uninitialized=include_uninitialized,
|
|
|
|
match_context=match_context,
|
2021-03-25 17:35:01 +00:00
|
|
|
)
|
2021-10-22 17:19:49 +00:00
|
|
|
if flw["flow_id"] != self.flow_id
|
2019-07-31 19:25:30 +00:00
|
|
|
]
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2024-02-29 15:52:39 +00:00
|
|
|
async def async_step_ignore(self, user_input: dict[str, Any]) -> ConfigFlowResult:
|
2024-09-23 14:49:21 +00:00
|
|
|
"""Ignore this config flow.
|
|
|
|
|
|
|
|
Ignoring a config flow works by creating a config entry with source set to
|
|
|
|
SOURCE_IGNORE.
|
|
|
|
|
|
|
|
There will only be a single active discovery flow per device, also when the
|
|
|
|
integration has multiple discovery sources for the same device. This method
|
|
|
|
is called when the user ignores a discovered device or service, we then store
|
|
|
|
the key for the flow being ignored.
|
|
|
|
|
|
|
|
Once the ignore config entry is created, ConfigEntriesFlowManager.async_finish_flow
|
|
|
|
will make sure the discovery key is kept up to date since it may not be stable
|
|
|
|
unlike the unique id.
|
|
|
|
"""
|
2019-12-18 06:41:01 +00:00
|
|
|
await self.async_set_unique_id(user_input["unique_id"], raise_on_progress=False)
|
2021-01-12 08:26:20 +00:00
|
|
|
return self.async_create_entry(title=user_input["title"], data={})
|
2019-12-18 06:41:01 +00:00
|
|
|
|
2020-06-15 11:38:38 +00:00
|
|
|
async def async_step_user(
|
2021-03-17 16:34:55 +00:00
|
|
|
self, user_input: dict[str, Any] | None = None
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2020-06-15 11:38:38 +00:00
|
|
|
"""Handle a flow initiated by the user."""
|
|
|
|
return self.async_abort(reason="not_implemented")
|
|
|
|
|
|
|
|
async def _async_handle_discovery_without_unique_id(self) -> None:
|
|
|
|
"""Mark this flow discovered, without a unique identifier.
|
|
|
|
|
|
|
|
If a flow initiated by discovery, doesn't have a unique ID, this can
|
|
|
|
be used alternatively. It will ensure only 1 flow is started and only
|
|
|
|
when the handler has no existing config entries.
|
|
|
|
|
|
|
|
It ensures that the discovery can be ignored by the user.
|
2023-01-24 06:59:32 +00:00
|
|
|
|
|
|
|
Requires `already_configured` and `already_in_progress` in strings.json
|
|
|
|
in user visible flows.
|
2020-06-15 11:38:38 +00:00
|
|
|
"""
|
|
|
|
if self.unique_id is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Abort if the handler has config entries already
|
|
|
|
if self._async_current_entries():
|
|
|
|
raise data_entry_flow.AbortFlow("already_configured")
|
|
|
|
|
|
|
|
# Use an special unique id to differentiate
|
|
|
|
await self.async_set_unique_id(DEFAULT_DISCOVERY_UNIQUE_ID)
|
|
|
|
self._abort_if_unique_id_configured()
|
|
|
|
|
|
|
|
# Abort if any other flow for this handler is already in progress
|
2021-03-25 17:35:01 +00:00
|
|
|
if self._async_in_progress(include_uninitialized=True):
|
2020-06-15 11:38:38 +00:00
|
|
|
raise data_entry_flow.AbortFlow("already_in_progress")
|
|
|
|
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
async def _async_step_discovery_without_unique_id(
|
|
|
|
self,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2020-06-15 11:38:38 +00:00
|
|
|
"""Handle a flow initialized by discovery."""
|
|
|
|
await self._async_handle_discovery_without_unique_id()
|
|
|
|
return await self.async_step_user()
|
|
|
|
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
async def async_step_discovery(
|
|
|
|
self, discovery_info: DiscoveryInfoType
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
"""Handle a flow initialized by discovery."""
|
|
|
|
return await self._async_step_discovery_without_unique_id()
|
|
|
|
|
2020-10-15 20:46:27 +00:00
|
|
|
@callback
|
|
|
|
def async_abort(
|
2022-05-31 08:33:34 +00:00
|
|
|
self,
|
|
|
|
*,
|
|
|
|
reason: str,
|
|
|
|
description_placeholders: Mapping[str, str] | None = None,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2020-10-15 20:46:27 +00:00
|
|
|
"""Abort the config flow."""
|
|
|
|
# Remove reauth notification if no reauth flows are in progress
|
|
|
|
if self.source == SOURCE_REAUTH and not any(
|
2023-06-11 08:41:38 +00:00
|
|
|
ent["flow_id"] != self.flow_id
|
2021-10-22 17:19:49 +00:00
|
|
|
for ent in self.hass.config_entries.flow.async_progress_by_handler(
|
2023-06-11 08:41:38 +00:00
|
|
|
self.handler, match_context={"source": SOURCE_REAUTH}
|
2021-10-22 17:19:49 +00:00
|
|
|
)
|
2020-10-15 20:46:27 +00:00
|
|
|
):
|
2022-01-11 16:24:59 +00:00
|
|
|
persistent_notification.async_dismiss(
|
|
|
|
self.hass, RECONFIGURE_NOTIFICATION_ID
|
2020-10-15 20:46:27 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
return super().async_abort(
|
|
|
|
reason=reason, description_placeholders=description_placeholders
|
|
|
|
)
|
|
|
|
|
2022-07-08 23:55:31 +00:00
|
|
|
async def async_step_bluetooth(
|
2022-07-30 00:53:33 +00:00
|
|
|
self, discovery_info: BluetoothServiceInfoBleak
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2022-07-08 23:55:31 +00:00
|
|
|
"""Handle a flow initialized by Bluetooth discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2022-07-08 23:55:31 +00:00
|
|
|
|
2022-02-11 09:31:51 +00:00
|
|
|
async def async_step_dhcp(
|
|
|
|
self, discovery_info: DhcpServiceInfo
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2022-02-11 09:31:51 +00:00
|
|
|
"""Handle a flow initialized by DHCP discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2022-02-11 09:31:51 +00:00
|
|
|
|
2021-04-17 10:42:31 +00:00
|
|
|
async def async_step_hassio(
|
2021-12-03 13:05:56 +00:00
|
|
|
self, discovery_info: HassioServiceInfo
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by HASS IO discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2021-04-17 10:42:31 +00:00
|
|
|
|
2022-02-11 09:31:51 +00:00
|
|
|
async def async_step_integration_discovery(
|
|
|
|
self, discovery_info: DiscoveryInfoType
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2022-02-11 09:31:51 +00:00
|
|
|
"""Handle a flow initialized by integration specific discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2022-02-11 09:31:51 +00:00
|
|
|
|
2021-04-17 10:42:31 +00:00
|
|
|
async def async_step_homekit(
|
2021-11-15 23:27:04 +00:00
|
|
|
self, discovery_info: ZeroconfServiceInfo
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by Homekit discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_mqtt(
|
2021-11-16 12:30:38 +00:00
|
|
|
self, discovery_info: MqttServiceInfo
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by MQTT discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2021-04-17 10:42:31 +00:00
|
|
|
|
|
|
|
async def async_step_ssdp(
|
2021-11-29 16:10:07 +00:00
|
|
|
self, discovery_info: SsdpServiceInfo
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2021-04-17 10:42:31 +00:00
|
|
|
"""Handle a flow initialized by SSDP discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2021-04-17 10:42:31 +00:00
|
|
|
|
2024-02-29 15:52:39 +00:00
|
|
|
async def async_step_usb(self, discovery_info: UsbServiceInfo) -> ConfigFlowResult:
|
2021-08-20 19:04:18 +00:00
|
|
|
"""Handle a flow initialized by USB discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2021-08-20 19:04:18 +00:00
|
|
|
|
2022-02-11 09:31:51 +00:00
|
|
|
async def async_step_zeroconf(
|
|
|
|
self, discovery_info: ZeroconfServiceInfo
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2022-02-11 09:31:51 +00:00
|
|
|
"""Handle a flow initialized by Zeroconf discovery."""
|
Avoid converting discovery_info dataclasses to dict that will be thrown away in config flows (#75451)
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
* Avoid converting BluetoothServiceInfo to a dict for default discovery
Fixes
```
2022-07-19 09:46:48.303 ERROR (MainThread) [homeassistant] Error doing job: Task exception was never retrieved
Traceback (most recent call last):
File "/Users/bdraco/home-assistant/homeassistant/helpers/discovery_flow.py", line 74, in _async_process_pending_flows
await gather_with_concurrency(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 201, in gather_with_concurrency
return await gather(
File "/Users/bdraco/home-assistant/homeassistant/util/async_.py", line 199, in sem_task
return await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 222, in async_init
flow, result = await task
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 249, in _async_init
result = await self._async_handle_step(flow, flow.init_step, data, init_done)
File "/Users/bdraco/home-assistant/homeassistant/data_entry_flow.py", line 359, in _async_handle_step
result: FlowResult = await getattr(flow, method)(user_input)
File "/Users/bdraco/home-assistant/homeassistant/config_entries.py", line 1484, in async_step_bluetooth
return await self.async_step_discovery(dataclasses.asdict(discovery_info))
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1239, in asdict
return _asdict_inner(obj, dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1246, in _asdict_inner
value = _asdict_inner(getattr(obj, f.name), dict_factory)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/dataclasses.py", line 1280, in _asdict_inner
return copy.deepcopy(obj)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 172, in deepcopy
y = _reconstruct(x, memo, *rv)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 271, in _reconstruct
state = deepcopy(state, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 146, in deepcopy
y = copier(x, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 231, in _deepcopy_dict
y[deepcopy(key, memo)] = deepcopy(value, memo)
File "/opt/homebrew/Cellar/python@3.10/3.10.5/Frameworks/Python.framework/Versions/3.10/lib/python3.10/copy.py", line 161, in deepcopy
rv = reductor(4)
TypeError: Cannot pickle Objective-C objects
```
2022-07-19 16:50:30 +00:00
|
|
|
return await self._async_step_discovery_without_unique_id()
|
2022-02-11 09:31:51 +00:00
|
|
|
|
2021-05-06 05:14:01 +00:00
|
|
|
@callback
|
2022-12-09 09:24:08 +00:00
|
|
|
def async_create_entry( # type: ignore[override]
|
2021-05-06 05:14:01 +00:00
|
|
|
self,
|
|
|
|
*,
|
|
|
|
title: str,
|
|
|
|
data: Mapping[str, Any],
|
|
|
|
description: str | None = None,
|
2022-05-31 08:33:34 +00:00
|
|
|
description_placeholders: Mapping[str, str] | None = None,
|
2021-05-06 05:14:01 +00:00
|
|
|
options: Mapping[str, Any] | None = None,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2021-05-06 05:14:01 +00:00
|
|
|
"""Finish config flow and create a config entry."""
|
2024-10-08 13:19:58 +00:00
|
|
|
if self.source in {SOURCE_REAUTH, SOURCE_RECONFIGURE}:
|
|
|
|
report_issue = async_suggest_report_issue(
|
|
|
|
self.hass, integration_domain=self.handler
|
|
|
|
)
|
|
|
|
_LOGGER.warning(
|
|
|
|
(
|
|
|
|
"Detected %s config flow creating a new entry, "
|
|
|
|
"when it is expected to update an existing entry and abort. "
|
|
|
|
"This will stop working in %s, please %s"
|
|
|
|
),
|
|
|
|
self.source,
|
|
|
|
"2025.11",
|
|
|
|
report_issue,
|
|
|
|
)
|
2021-05-06 05:14:01 +00:00
|
|
|
result = super().async_create_entry(
|
|
|
|
title=title,
|
|
|
|
data=data,
|
|
|
|
description=description,
|
|
|
|
description_placeholders=description_placeholders,
|
|
|
|
)
|
|
|
|
|
2024-02-29 15:52:39 +00:00
|
|
|
result["minor_version"] = self.MINOR_VERSION
|
2024-05-13 07:39:18 +00:00
|
|
|
result["options"] = options or {}
|
2024-02-29 15:52:39 +00:00
|
|
|
result["version"] = self.VERSION
|
2021-05-06 05:14:01 +00:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2024-01-22 16:40:20 +00:00
|
|
|
@callback
|
|
|
|
def async_update_reload_and_abort(
|
|
|
|
self,
|
|
|
|
entry: ConfigEntry,
|
2024-01-22 22:01:55 +00:00
|
|
|
*,
|
|
|
|
unique_id: str | None | UndefinedType = UNDEFINED,
|
2024-01-22 16:40:20 +00:00
|
|
|
title: str | UndefinedType = UNDEFINED,
|
|
|
|
data: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
2024-10-09 05:47:18 +00:00
|
|
|
data_updates: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
2024-01-22 16:40:20 +00:00
|
|
|
options: Mapping[str, Any] | UndefinedType = UNDEFINED,
|
2024-10-07 07:02:58 +00:00
|
|
|
reason: str | UndefinedType = UNDEFINED,
|
2024-04-24 13:13:33 +00:00
|
|
|
reload_even_if_entry_is_unchanged: bool = True,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2024-10-09 05:47:18 +00:00
|
|
|
"""Update config entry, reload config entry and finish config flow.
|
|
|
|
|
|
|
|
:param data: replace the entry data with new data
|
|
|
|
:param data_updates: add items from data_updates to entry data - existing keys
|
|
|
|
are overridden
|
|
|
|
:param options: replace the entry options with new options
|
|
|
|
:param title: replace the title of the entry
|
|
|
|
:param unique_id: replace the unique_id of the entry
|
|
|
|
|
|
|
|
:param reason: set the reason for the abort, defaults to
|
|
|
|
`reauth_successful` or `reconfigure_successful` based on flow source
|
|
|
|
|
|
|
|
:param reload_even_if_entry_is_unchanged: set this to `False` if the entry
|
|
|
|
should not be reloaded if it is unchanged
|
|
|
|
"""
|
|
|
|
if data_updates is not UNDEFINED:
|
|
|
|
if data is not UNDEFINED:
|
|
|
|
raise ValueError("Cannot set both data and data_updates")
|
|
|
|
data = entry.data | data_updates
|
2024-01-22 16:40:20 +00:00
|
|
|
result = self.hass.config_entries.async_update_entry(
|
|
|
|
entry=entry,
|
2024-01-22 22:01:55 +00:00
|
|
|
unique_id=unique_id,
|
2024-01-22 16:40:20 +00:00
|
|
|
title=title,
|
|
|
|
data=data,
|
|
|
|
options=options,
|
|
|
|
)
|
2024-04-24 13:13:33 +00:00
|
|
|
if reload_even_if_entry_is_unchanged or result:
|
2024-02-20 01:14:45 +00:00
|
|
|
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
2024-10-07 07:02:58 +00:00
|
|
|
if reason is UNDEFINED:
|
|
|
|
reason = "reauth_successful"
|
|
|
|
if self.source == SOURCE_RECONFIGURE:
|
|
|
|
reason = "reconfigure_successful"
|
2024-01-22 16:40:20 +00:00
|
|
|
return self.async_abort(reason=reason)
|
|
|
|
|
2024-09-27 08:51:36 +00:00
|
|
|
def is_matching(self, other_flow: Self) -> bool:
|
|
|
|
"""Return True if other_flow is matching this flow."""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2024-10-02 10:00:07 +00:00
|
|
|
@property
|
|
|
|
def _reauth_entry_id(self) -> str:
|
|
|
|
"""Return reauth entry id."""
|
|
|
|
if self.source != SOURCE_REAUTH:
|
|
|
|
raise ValueError(f"Source is {self.source}, expected {SOURCE_REAUTH}")
|
2024-10-08 10:18:45 +00:00
|
|
|
return self.context["entry_id"]
|
2024-10-02 10:00:07 +00:00
|
|
|
|
|
|
|
@callback
|
|
|
|
def _get_reauth_entry(self) -> ConfigEntry:
|
|
|
|
"""Return the reauth config entry linked to the current context."""
|
|
|
|
if entry := self.hass.config_entries.async_get_entry(self._reauth_entry_id):
|
|
|
|
return entry
|
|
|
|
raise UnknownEntry
|
|
|
|
|
|
|
|
@property
|
|
|
|
def _reconfigure_entry_id(self) -> str:
|
|
|
|
"""Return reconfigure entry id."""
|
|
|
|
if self.source != SOURCE_RECONFIGURE:
|
|
|
|
raise ValueError(f"Source is {self.source}, expected {SOURCE_RECONFIGURE}")
|
2024-10-08 10:18:45 +00:00
|
|
|
return self.context["entry_id"]
|
2024-10-02 10:00:07 +00:00
|
|
|
|
|
|
|
@callback
|
|
|
|
def _get_reconfigure_entry(self) -> ConfigEntry:
|
|
|
|
"""Return the reconfigure config entry linked to the current context."""
|
|
|
|
if entry := self.hass.config_entries.async_get_entry(
|
|
|
|
self._reconfigure_entry_id
|
|
|
|
):
|
|
|
|
return entry
|
|
|
|
raise UnknownEntry
|
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2024-10-08 10:18:45 +00:00
|
|
|
class OptionsFlowManager(
|
|
|
|
data_entry_flow.FlowManager[ConfigFlowContext, ConfigFlowResult]
|
|
|
|
):
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Flow to set options for a configuration entry."""
|
|
|
|
|
2024-02-29 15:52:39 +00:00
|
|
|
_flow_result = ConfigFlowResult
|
|
|
|
|
2023-08-22 08:29:16 +00:00
|
|
|
def _async_get_config_entry(self, config_entry_id: str) -> ConfigEntry:
|
|
|
|
"""Return config entry or raise if not found."""
|
|
|
|
entry = self.hass.config_entries.async_get_entry(config_entry_id)
|
|
|
|
if entry is None:
|
|
|
|
raise UnknownEntry(config_entry_id)
|
|
|
|
|
|
|
|
return entry
|
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_create_flow(
|
|
|
|
self,
|
2022-12-15 11:41:20 +00:00
|
|
|
handler_key: str,
|
2020-01-03 10:52:01 +00:00
|
|
|
*,
|
2024-10-08 10:18:45 +00:00
|
|
|
context: ConfigFlowContext | None = None,
|
2021-03-17 16:34:55 +00:00
|
|
|
data: dict[str, Any] | None = None,
|
2021-02-12 09:58:20 +00:00
|
|
|
) -> OptionsFlow:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Create an options flow for a config entry.
|
|
|
|
|
|
|
|
Entry_id and flow.handler is the same thing to map entry with flow.
|
|
|
|
"""
|
2023-08-22 08:29:16 +00:00
|
|
|
entry = self._async_get_config_entry(handler_key)
|
2023-08-07 06:25:03 +00:00
|
|
|
handler = await _async_get_flow_handler(self.hass, entry.domain, {})
|
|
|
|
return handler.async_get_options_flow(entry)
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2020-01-03 10:52:01 +00:00
|
|
|
async def async_finish_flow(
|
2024-03-05 21:52:11 +00:00
|
|
|
self,
|
2024-10-08 10:18:45 +00:00
|
|
|
flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult],
|
2024-03-05 21:52:11 +00:00
|
|
|
result: ConfigFlowResult,
|
2024-02-29 15:52:39 +00:00
|
|
|
) -> ConfigFlowResult:
|
2019-02-22 16:59:43 +00:00
|
|
|
"""Finish an options flow and update options for configuration entry.
|
|
|
|
|
2024-09-18 16:19:13 +00:00
|
|
|
This method is called when a flow step returns FlowResultType.ABORT or
|
|
|
|
FlowResultType.CREATE_ENTRY.
|
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
Flow.handler and entry_id is the same thing to map flow with entry.
|
|
|
|
"""
|
2020-01-03 10:52:01 +00:00
|
|
|
flow = cast(OptionsFlow, flow)
|
|
|
|
|
2022-06-08 05:02:44 +00:00
|
|
|
if result["type"] != data_entry_flow.FlowResultType.CREATE_ENTRY:
|
2020-11-09 07:59:42 +00:00
|
|
|
return result
|
|
|
|
|
2019-02-22 16:59:43 +00:00
|
|
|
entry = self.hass.config_entries.async_get_entry(flow.handler)
|
|
|
|
if entry is None:
|
2020-01-03 10:52:01 +00:00
|
|
|
raise UnknownEntry(flow.handler)
|
2020-06-23 00:49:01 +00:00
|
|
|
if result["data"] is not None:
|
|
|
|
self.hass.config_entries.async_update_entry(entry, options=result["data"])
|
2019-02-22 16:59:43 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
result["result"] = True
|
2019-02-22 16:59:43 +00:00
|
|
|
return result
|
2019-08-15 21:11:55 +00:00
|
|
|
|
2024-03-05 21:52:11 +00:00
|
|
|
async def _async_setup_preview(
|
2024-10-08 10:18:45 +00:00
|
|
|
self, flow: data_entry_flow.FlowHandler[ConfigFlowContext, ConfigFlowResult]
|
2024-03-05 21:52:11 +00:00
|
|
|
) -> None:
|
2023-08-22 08:29:16 +00:00
|
|
|
"""Set up preview for an option flow handler."""
|
|
|
|
entry = self._async_get_config_entry(flow.handler)
|
|
|
|
await _load_integration(self.hass, entry.domain, {})
|
|
|
|
if entry.domain not in self._preview:
|
|
|
|
self._preview.add(entry.domain)
|
2023-08-24 09:59:24 +00:00
|
|
|
await flow.async_setup_preview(self.hass)
|
2023-08-22 08:29:16 +00:00
|
|
|
|
2019-08-15 21:11:55 +00:00
|
|
|
|
2024-02-29 15:52:39 +00:00
|
|
|
class OptionsFlow(ConfigEntryBaseFlow):
|
2022-11-24 11:18:09 +00:00
|
|
|
"""Base class for config options flows."""
|
2019-08-15 21:11:55 +00:00
|
|
|
|
2019-10-28 20:36:26 +00:00
|
|
|
handler: str
|
2019-08-18 04:34:11 +00:00
|
|
|
|
2023-03-29 15:20:51 +00:00
|
|
|
@callback
|
|
|
|
def _async_abort_entries_match(
|
|
|
|
self, match_dict: dict[str, Any] | None = None
|
|
|
|
) -> None:
|
|
|
|
"""Abort if another current entry matches all data.
|
|
|
|
|
|
|
|
Requires `already_configured` in strings.json in user visible flows.
|
|
|
|
"""
|
|
|
|
|
|
|
|
config_entry = cast(
|
|
|
|
ConfigEntry, self.hass.config_entries.async_get_entry(self.handler)
|
|
|
|
)
|
|
|
|
_async_abort_entries_match(
|
|
|
|
[
|
|
|
|
entry
|
|
|
|
for entry in self.hass.config_entries.async_entries(config_entry.domain)
|
|
|
|
if entry is not config_entry and entry.source != SOURCE_IGNORE
|
|
|
|
],
|
|
|
|
match_dict,
|
|
|
|
)
|
|
|
|
|
2019-08-18 04:34:11 +00:00
|
|
|
|
2022-11-24 11:18:09 +00:00
|
|
|
class OptionsFlowWithConfigEntry(OptionsFlow):
|
|
|
|
"""Base class for options flows with config entry and options."""
|
|
|
|
|
|
|
|
def __init__(self, config_entry: ConfigEntry) -> None:
|
|
|
|
"""Initialize options flow."""
|
2022-11-25 10:13:44 +00:00
|
|
|
self._config_entry = config_entry
|
2022-11-24 11:18:09 +00:00
|
|
|
self._options = deepcopy(dict(config_entry.options))
|
|
|
|
|
2022-11-25 10:13:44 +00:00
|
|
|
@property
|
|
|
|
def config_entry(self) -> ConfigEntry:
|
|
|
|
"""Return the config entry."""
|
|
|
|
return self._config_entry
|
|
|
|
|
|
|
|
@property
|
|
|
|
def options(self) -> dict[str, Any]:
|
|
|
|
"""Return a mutable copy of the config entry options."""
|
|
|
|
return self._options
|
|
|
|
|
2022-11-24 11:18:09 +00:00
|
|
|
|
2019-08-23 00:32:43 +00:00
|
|
|
class EntityRegistryDisabledHandler:
|
2023-01-15 22:00:51 +00:00
|
|
|
"""Handler when entities related to config entries updated disabled_by."""
|
2019-08-23 00:32:43 +00:00
|
|
|
|
|
|
|
def __init__(self, hass: HomeAssistant) -> None:
|
|
|
|
"""Initialize the handler."""
|
|
|
|
self.hass = hass
|
2021-03-17 16:34:55 +00:00
|
|
|
self.registry: entity_registry.EntityRegistry | None = None
|
|
|
|
self.changed: set[str] = set()
|
|
|
|
self._remove_call_later: Callable[[], None] | None = None
|
2019-08-23 00:32:43 +00:00
|
|
|
|
|
|
|
@callback
|
|
|
|
def async_setup(self) -> None:
|
|
|
|
"""Set up the disable handler."""
|
|
|
|
self.hass.bus.async_listen(
|
2021-02-14 19:42:55 +00:00
|
|
|
entity_registry.EVENT_ENTITY_REGISTRY_UPDATED,
|
|
|
|
self._handle_entry_updated,
|
|
|
|
event_filter=_handle_entry_updated_filter,
|
2019-08-23 00:32:43 +00:00
|
|
|
)
|
|
|
|
|
2024-02-19 20:44:57 +00:00
|
|
|
@callback
|
2024-04-08 18:44:59 +00:00
|
|
|
def _handle_entry_updated(
|
|
|
|
self, event: Event[entity_registry.EventEntityRegistryUpdatedData]
|
|
|
|
) -> None:
|
2019-08-23 00:32:43 +00:00
|
|
|
"""Handle entity registry entry update."""
|
|
|
|
if self.registry is None:
|
2022-05-17 11:40:19 +00:00
|
|
|
self.registry = entity_registry.async_get(self.hass)
|
2019-08-23 00:32:43 +00:00
|
|
|
|
|
|
|
entity_entry = self.registry.async_get(event.data["entity_id"])
|
|
|
|
|
|
|
|
if (
|
|
|
|
# Stop if no entry found
|
|
|
|
entity_entry is None
|
|
|
|
# Stop if entry not connected to config entry
|
|
|
|
or entity_entry.config_entry_id is None
|
|
|
|
# Stop if the entry got disabled. In that case the entity handles it
|
|
|
|
# themselves.
|
|
|
|
or entity_entry.disabled_by
|
|
|
|
):
|
|
|
|
return
|
|
|
|
|
|
|
|
config_entry = self.hass.config_entries.async_get_entry(
|
|
|
|
entity_entry.config_entry_id
|
|
|
|
)
|
2019-10-28 20:36:26 +00:00
|
|
|
assert config_entry is not None
|
2019-08-23 00:32:43 +00:00
|
|
|
|
2020-08-25 22:59:22 +00:00
|
|
|
if config_entry.entry_id not in self.changed and config_entry.supports_unload:
|
2019-08-23 00:32:43 +00:00
|
|
|
self.changed.add(config_entry.entry_id)
|
|
|
|
|
|
|
|
if not self.changed:
|
|
|
|
return
|
|
|
|
|
|
|
|
# We are going to delay reloading on *every* entity registry change so that
|
|
|
|
# if a user is happily clicking along, it will only reload at the end.
|
|
|
|
|
|
|
|
if self._remove_call_later:
|
|
|
|
self._remove_call_later()
|
|
|
|
|
2022-01-11 21:30:59 +00:00
|
|
|
self._remove_call_later = async_call_later(
|
2023-04-14 04:37:49 +00:00
|
|
|
self.hass,
|
|
|
|
RELOAD_AFTER_UPDATE_DELAY,
|
2024-04-13 20:30:59 +00:00
|
|
|
HassJob(self._async_handle_reload, cancel_on_shutdown=True),
|
2019-08-23 00:32:43 +00:00
|
|
|
)
|
|
|
|
|
2024-04-13 20:30:59 +00:00
|
|
|
@callback
|
|
|
|
def _async_handle_reload(self, _now: Any) -> None:
|
2019-08-23 00:32:43 +00:00
|
|
|
"""Handle a reload."""
|
|
|
|
self._remove_call_later = None
|
|
|
|
to_reload = self.changed
|
|
|
|
self.changed = set()
|
|
|
|
|
|
|
|
_LOGGER.info(
|
2022-12-22 09:12:50 +00:00
|
|
|
(
|
|
|
|
"Reloading configuration entries because disabled_by changed in entity"
|
|
|
|
" registry: %s"
|
|
|
|
),
|
2022-11-27 19:34:01 +00:00
|
|
|
", ".join(to_reload),
|
2019-08-23 00:32:43 +00:00
|
|
|
)
|
2024-04-13 20:30:59 +00:00
|
|
|
for entry_id in to_reload:
|
|
|
|
self.hass.config_entries.async_schedule_reload(entry_id)
|
2019-08-23 00:32:43 +00:00
|
|
|
|
|
|
|
|
2021-02-14 19:42:55 +00:00
|
|
|
@callback
|
2024-04-08 18:44:59 +00:00
|
|
|
def _handle_entry_updated_filter(
|
|
|
|
event_data: entity_registry.EventEntityRegistryUpdatedData,
|
|
|
|
) -> bool:
|
2021-03-03 18:12:37 +00:00
|
|
|
"""Handle entity registry entry update filter.
|
|
|
|
|
|
|
|
Only handle changes to "disabled_by".
|
2021-12-15 21:25:40 +00:00
|
|
|
If "disabled_by" was CONFIG_ENTRY, reload is not needed.
|
2021-03-03 18:12:37 +00:00
|
|
|
"""
|
2024-04-17 14:42:23 +00:00
|
|
|
return not (
|
2024-03-20 08:40:06 +00:00
|
|
|
event_data["action"] != "update"
|
|
|
|
or "disabled_by" not in event_data["changes"]
|
|
|
|
or event_data["changes"]["disabled_by"]
|
2021-12-15 21:25:40 +00:00
|
|
|
is entity_registry.RegistryEntryDisabler.CONFIG_ENTRY
|
2024-04-17 14:42:23 +00:00
|
|
|
)
|
2021-02-14 19:42:55 +00:00
|
|
|
|
|
|
|
|
2019-08-23 00:32:43 +00:00
|
|
|
async def support_entry_unload(hass: HomeAssistant, domain: str) -> bool:
|
|
|
|
"""Test if a domain supports entry unloading."""
|
|
|
|
integration = await loader.async_get_integration(hass, domain)
|
2024-03-05 14:59:52 +00:00
|
|
|
component = await integration.async_get_component()
|
2019-08-23 00:32:43 +00:00
|
|
|
return hasattr(component, "async_unload_entry")
|
2022-02-21 09:11:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def support_remove_from_device(hass: HomeAssistant, domain: str) -> bool:
|
|
|
|
"""Test if a domain supports being removed from a device."""
|
|
|
|
integration = await loader.async_get_integration(hass, domain)
|
2024-03-05 14:59:52 +00:00
|
|
|
component = await integration.async_get_component()
|
2022-02-21 09:11:18 +00:00
|
|
|
return hasattr(component, "async_remove_config_entry_device")
|
2023-03-22 19:10:10 +00:00
|
|
|
|
|
|
|
|
2024-02-26 18:00:33 +00:00
|
|
|
async def _support_single_config_entry_only(hass: HomeAssistant, domain: str) -> bool:
|
|
|
|
"""Test if a domain supports only a single config entry."""
|
|
|
|
integration = await loader.async_get_integration(hass, domain)
|
|
|
|
return integration.single_config_entry
|
|
|
|
|
|
|
|
|
2023-08-22 08:29:16 +00:00
|
|
|
async def _load_integration(
|
2023-03-22 19:10:10 +00:00
|
|
|
hass: HomeAssistant, domain: str, hass_config: ConfigType
|
2023-08-22 08:29:16 +00:00
|
|
|
) -> None:
|
2023-03-22 19:10:10 +00:00
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(hass, domain)
|
|
|
|
except loader.IntegrationNotFound as err:
|
|
|
|
_LOGGER.error("Cannot find integration %s", domain)
|
|
|
|
raise data_entry_flow.UnknownHandler from err
|
|
|
|
|
|
|
|
# Make sure requirements and dependencies of component are resolved
|
|
|
|
await async_process_deps_reqs(hass, hass_config, integration)
|
|
|
|
try:
|
2024-03-21 08:34:33 +00:00
|
|
|
await integration.async_get_platform("config_flow")
|
2023-03-22 19:10:10 +00:00
|
|
|
except ImportError as err:
|
|
|
|
_LOGGER.error(
|
|
|
|
"Error occurred loading flow for integration %s: %s",
|
|
|
|
domain,
|
|
|
|
err,
|
|
|
|
)
|
2024-02-06 11:17:39 +00:00
|
|
|
raise data_entry_flow.UnknownHandler from err
|
2023-08-07 06:25:03 +00:00
|
|
|
|
2023-08-22 08:29:16 +00:00
|
|
|
|
|
|
|
async def _async_get_flow_handler(
|
|
|
|
hass: HomeAssistant, domain: str, hass_config: ConfigType
|
|
|
|
) -> type[ConfigFlow]:
|
|
|
|
"""Get a flow handler for specified domain."""
|
|
|
|
|
|
|
|
# First check if there is a handler registered for the domain
|
2023-09-04 18:19:10 +00:00
|
|
|
if loader.is_component_module_loaded(hass, f"{domain}.config_flow") and (
|
|
|
|
handler := HANDLERS.get(domain)
|
|
|
|
):
|
2023-08-22 08:29:16 +00:00
|
|
|
return handler
|
|
|
|
|
|
|
|
await _load_integration(hass, domain, hass_config)
|
|
|
|
|
2023-08-07 06:25:03 +00:00
|
|
|
if handler := HANDLERS.get(domain):
|
|
|
|
return handler
|
|
|
|
|
|
|
|
raise data_entry_flow.UnknownHandler
|