2017-04-30 05:04:49 +00:00
|
|
|
"""Provide methods to bootstrap a Home Assistant instance."""
|
2021-03-17 16:34:55 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-03-27 14:06:20 +00:00
|
|
|
import asyncio
|
2020-02-18 19:52:38 +00:00
|
|
|
import contextlib
|
2024-02-18 02:47:55 +00:00
|
|
|
from datetime import timedelta
|
2013-10-22 05:06:22 +00:00
|
|
|
import logging
|
2015-09-04 22:22:42 +00:00
|
|
|
import logging.handlers
|
2024-02-18 02:47:55 +00:00
|
|
|
from operator import itemgetter
|
2015-11-15 10:05:46 +00:00
|
|
|
import os
|
2022-06-25 08:31:44 +00:00
|
|
|
import platform
|
2015-11-15 10:05:46 +00:00
|
|
|
import sys
|
2020-08-11 07:45:36 +00:00
|
|
|
import threading
|
2020-02-13 21:57:07 +00:00
|
|
|
from time import monotonic
|
2021-03-17 16:34:55 +00:00
|
|
|
from typing import TYPE_CHECKING, Any
|
2013-10-13 17:42:22 +00:00
|
|
|
|
2024-02-29 00:00:17 +00:00
|
|
|
# Import cryptography early since import openssl is not thread-safe
|
|
|
|
# _frozen_importlib._DeadlockError: deadlock detected by _ModuleLock('cryptography.hazmat.backends.openssl.backend')
|
2024-02-29 15:28:32 +00:00
|
|
|
import cryptography.hazmat.backends.openssl.backend # noqa: F401
|
2016-03-28 01:48:51 +00:00
|
|
|
import voluptuous as vol
|
2020-07-06 22:58:53 +00:00
|
|
|
import yarl
|
2016-03-28 01:48:51 +00:00
|
|
|
|
2024-01-08 03:55:40 +00:00
|
|
|
from . import config as conf_util, config_entries, core, loader, requirements
|
2024-02-29 01:40:16 +00:00
|
|
|
|
2024-03-02 23:44:06 +00:00
|
|
|
# Pre-import frontend deps which have no requirements here to avoid
|
2024-02-29 01:40:16 +00:00
|
|
|
# loading them at run time and blocking the event loop. We do this ahead
|
2024-03-02 23:44:06 +00:00
|
|
|
# of time so that we do not have to flag frontend deps with `import_executor`
|
2024-02-29 01:40:16 +00:00
|
|
|
# as it would create a thundering heard of executor jobs trying to import
|
|
|
|
# frontend deps at the same time.
|
|
|
|
from .components import (
|
|
|
|
api as api_pre_import, # noqa: F401
|
2024-03-02 23:44:06 +00:00
|
|
|
auth as auth_pre_import, # noqa: F401
|
2024-02-29 01:40:16 +00:00
|
|
|
config as config_pre_import, # noqa: F401
|
2024-03-02 23:44:06 +00:00
|
|
|
device_automation as device_automation_pre_import, # noqa: F401
|
|
|
|
diagnostics as diagnostics_pre_import, # noqa: F401
|
|
|
|
file_upload as file_upload_pre_import, # noqa: F401
|
2024-02-29 01:40:16 +00:00
|
|
|
http,
|
|
|
|
lovelace as lovelace_pre_import, # noqa: F401
|
2024-03-02 23:44:06 +00:00
|
|
|
onboarding as onboarding_pre_import, # noqa: F401
|
|
|
|
repairs as repairs_pre_import, # noqa: F401
|
|
|
|
search as search_pre_import, # noqa: F401
|
|
|
|
system_log as system_log_pre_import, # noqa: F401
|
|
|
|
websocket_api as websocket_api_pre_import, # noqa: F401
|
2024-02-29 01:40:16 +00:00
|
|
|
)
|
2022-01-11 16:24:59 +00:00
|
|
|
from .const import (
|
2023-05-29 21:24:15 +00:00
|
|
|
FORMAT_DATETIME,
|
2024-02-29 00:09:51 +00:00
|
|
|
KEY_DATA_LOGGING as DATA_LOGGING,
|
2022-01-11 16:24:59 +00:00
|
|
|
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
|
|
|
REQUIRED_NEXT_PYTHON_VER,
|
2022-10-10 03:06:28 +00:00
|
|
|
SIGNAL_BOOTSTRAP_INTEGRATIONS,
|
2022-01-11 16:24:59 +00:00
|
|
|
)
|
2021-12-23 19:14:47 +00:00
|
|
|
from .exceptions import HomeAssistantError
|
2022-08-25 09:32:06 +00:00
|
|
|
from .helpers import (
|
|
|
|
area_registry,
|
2023-12-05 14:07:32 +00:00
|
|
|
config_validation as cv,
|
2022-08-25 09:32:06 +00:00
|
|
|
device_registry,
|
2023-02-18 13:21:41 +00:00
|
|
|
entity,
|
2022-08-25 09:32:06 +00:00
|
|
|
entity_registry,
|
2024-02-17 20:21:15 +00:00
|
|
|
floor_registry,
|
2022-08-25 09:32:06 +00:00
|
|
|
issue_registry,
|
2024-02-19 10:59:08 +00:00
|
|
|
label_registry,
|
2022-08-25 09:32:06 +00:00
|
|
|
recorder,
|
2023-05-31 01:48:17 +00:00
|
|
|
restore_state,
|
2023-03-13 10:00:05 +00:00
|
|
|
template,
|
2024-02-10 09:47:56 +00:00
|
|
|
translation,
|
2022-08-25 09:32:06 +00:00
|
|
|
)
|
2021-12-23 19:14:47 +00:00
|
|
|
from .helpers.dispatcher import async_dispatcher_send
|
|
|
|
from .helpers.typing import ConfigType
|
|
|
|
from .setup import (
|
2024-02-16 15:35:46 +00:00
|
|
|
BASE_PLATFORMS,
|
2020-06-20 00:24:33 +00:00
|
|
|
DATA_SETUP_STARTED,
|
2021-04-05 08:11:44 +00:00
|
|
|
DATA_SETUP_TIME,
|
2023-11-24 20:34:09 +00:00
|
|
|
async_notify_setup_error,
|
2020-06-20 00:24:33 +00:00
|
|
|
async_set_domains_to_be_loaded,
|
|
|
|
async_setup_component,
|
|
|
|
)
|
2024-02-27 04:56:16 +00:00
|
|
|
from .util.async_ import create_eager_task
|
2021-12-23 19:14:47 +00:00
|
|
|
from .util.logging import async_activate_log_queue_handler
|
|
|
|
from .util.package import async_get_user_site, is_virtual_env
|
2014-01-24 05:34:08 +00:00
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from .runner import RuntimeConfig
|
|
|
|
|
2015-01-09 08:07:58 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
ERROR_LOG_FILENAME = "home-assistant.log"
|
2017-09-16 05:25:32 +00:00
|
|
|
|
|
|
|
# hass.data key for logging information.
|
2022-11-24 22:25:50 +00:00
|
|
|
DATA_REGISTRIES_LOADED = "bootstrap_registries_loaded"
|
2017-09-16 05:25:32 +00:00
|
|
|
|
2020-05-28 23:48:42 +00:00
|
|
|
LOG_SLOW_STARTUP_INTERVAL = 60
|
2021-04-05 08:11:44 +00:00
|
|
|
SLOW_STARTUP_CHECK_INTERVAL = 1
|
2020-05-28 23:48:42 +00:00
|
|
|
|
2020-08-05 12:58:19 +00:00
|
|
|
STAGE_1_TIMEOUT = 120
|
|
|
|
STAGE_2_TIMEOUT = 300
|
|
|
|
WRAP_UP_TIMEOUT = 300
|
|
|
|
COOLDOWN_TIME = 60
|
|
|
|
|
2020-11-11 07:34:54 +00:00
|
|
|
MAX_LOAD_CONCURRENTLY = 6
|
|
|
|
|
2021-01-01 22:36:15 +00:00
|
|
|
DEBUGGER_INTEGRATIONS = {"debugpy"}
|
2022-02-14 16:12:24 +00:00
|
|
|
CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
|
2020-06-20 00:24:33 +00:00
|
|
|
LOGGING_INTEGRATIONS = {
|
|
|
|
# Set log levels
|
|
|
|
"logger",
|
|
|
|
# Error logging
|
|
|
|
"system_log",
|
|
|
|
"sentry",
|
2022-07-22 13:11:34 +00:00
|
|
|
}
|
|
|
|
FRONTEND_INTEGRATIONS = {
|
|
|
|
# Get the frontend up and running as soon as possible so problem
|
|
|
|
# integrations can be removed and database migration status is
|
|
|
|
# visible in frontend
|
|
|
|
"frontend",
|
|
|
|
}
|
|
|
|
RECORDER_INTEGRATIONS = {
|
|
|
|
# Setup after frontend
|
2019-04-15 23:45:46 +00:00
|
|
|
# To record data
|
2019-07-31 19:25:30 +00:00
|
|
|
"recorder",
|
2020-06-20 00:24:33 +00:00
|
|
|
}
|
2022-07-08 23:55:31 +00:00
|
|
|
DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
|
2020-06-20 00:24:33 +00:00
|
|
|
STAGE_1_INTEGRATIONS = {
|
2022-02-14 16:12:24 +00:00
|
|
|
# We need to make sure discovery integrations
|
|
|
|
# update their deps before stage 2 integrations
|
|
|
|
# load them inadvertently before their deps have
|
|
|
|
# been updated which leads to using an old version
|
|
|
|
# of the dep, or worse (import errors).
|
|
|
|
*DISCOVERY_INTEGRATIONS,
|
2019-04-15 23:45:46 +00:00
|
|
|
# To make sure we forward data to other instances
|
2019-07-31 19:25:30 +00:00
|
|
|
"mqtt_eventstream",
|
2019-10-25 23:04:24 +00:00
|
|
|
# To provide account link implementations
|
|
|
|
"cloud",
|
2020-06-02 18:54:11 +00:00
|
|
|
# Ensure supervisor is available
|
|
|
|
"hassio",
|
2019-03-08 22:47:10 +00:00
|
|
|
}
|
2024-01-22 19:09:48 +00:00
|
|
|
DEFAULT_INTEGRATIONS = {
|
|
|
|
# These integrations are set up unless recovery mode is activated.
|
|
|
|
#
|
|
|
|
# Integrations providing core functionality:
|
|
|
|
"application_credentials",
|
2024-02-27 16:56:07 +00:00
|
|
|
"backup",
|
2024-01-22 19:09:48 +00:00
|
|
|
"frontend",
|
|
|
|
"hardware",
|
|
|
|
"logger",
|
|
|
|
"network",
|
|
|
|
"system_health",
|
|
|
|
#
|
|
|
|
# Key-feature:
|
|
|
|
"automation",
|
|
|
|
"person",
|
|
|
|
"scene",
|
|
|
|
"script",
|
|
|
|
"tag",
|
|
|
|
"zone",
|
|
|
|
#
|
|
|
|
# Built-in helpers:
|
|
|
|
"counter",
|
|
|
|
"input_boolean",
|
|
|
|
"input_button",
|
|
|
|
"input_datetime",
|
|
|
|
"input_number",
|
|
|
|
"input_select",
|
|
|
|
"input_text",
|
|
|
|
"schedule",
|
|
|
|
"timer",
|
|
|
|
}
|
|
|
|
DEFAULT_INTEGRATIONS_RECOVERY_MODE = {
|
|
|
|
# These integrations are set up if recovery mode is activated.
|
|
|
|
"frontend",
|
|
|
|
}
|
|
|
|
DEFAULT_INTEGRATIONS_SUPERVISOR = {
|
|
|
|
# These integrations are set up if using the Supervisor
|
|
|
|
"hassio",
|
|
|
|
}
|
|
|
|
CRITICAL_INTEGRATIONS = {
|
|
|
|
# Recovery mode is activated if these integrations fail to set up
|
|
|
|
"frontend",
|
|
|
|
}
|
2015-05-12 05:23:20 +00:00
|
|
|
|
2024-02-27 04:58:16 +00:00
|
|
|
SETUP_ORDER = {
|
|
|
|
# Load logging as soon as possible
|
|
|
|
"logging": LOGGING_INTEGRATIONS,
|
|
|
|
# Setup frontend
|
|
|
|
"frontend": FRONTEND_INTEGRATIONS,
|
|
|
|
# Setup recorder
|
|
|
|
"recorder": RECORDER_INTEGRATIONS,
|
|
|
|
# Start up debuggers. Start these first in case they want to wait.
|
|
|
|
"debugger": DEBUGGER_INTEGRATIONS,
|
|
|
|
}
|
|
|
|
|
2015-01-09 08:07:58 +00:00
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
async def async_setup_hass(
|
2021-03-18 21:58:19 +00:00
|
|
|
runtime_config: RuntimeConfig,
|
2021-03-17 16:34:55 +00:00
|
|
|
) -> core.HomeAssistant | None:
|
2020-01-14 21:03:02 +00:00
|
|
|
"""Set up Home Assistant."""
|
2023-08-16 11:00:14 +00:00
|
|
|
hass = core.HomeAssistant(runtime_config.config_dir)
|
2020-07-06 22:58:53 +00:00
|
|
|
|
|
|
|
async_enable_logging(
|
|
|
|
hass,
|
|
|
|
runtime_config.verbose,
|
|
|
|
runtime_config.log_rotate_days,
|
|
|
|
runtime_config.log_file,
|
|
|
|
runtime_config.log_no_color,
|
|
|
|
)
|
|
|
|
|
2023-10-24 12:47:58 +00:00
|
|
|
hass.config.safe_mode = runtime_config.safe_mode
|
2020-07-06 22:58:53 +00:00
|
|
|
hass.config.skip_pip = runtime_config.skip_pip
|
2022-11-30 07:38:52 +00:00
|
|
|
hass.config.skip_pip_packages = runtime_config.skip_pip_packages
|
|
|
|
if runtime_config.skip_pip or runtime_config.skip_pip_packages:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.warning(
|
2020-01-02 19:17:10 +00:00
|
|
|
"Skipping pip installation of required modules. This may cause issues"
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-28 18:01:10 +00:00
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
if not await conf_util.async_ensure_config_exists(hass):
|
|
|
|
_LOGGER.error("Error getting configuration path")
|
|
|
|
return None
|
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2023-08-15 08:59:42 +00:00
|
|
|
loader.async_setup(hass)
|
2020-01-14 21:03:02 +00:00
|
|
|
config_dict = None
|
2020-02-17 18:20:05 +00:00
|
|
|
basic_setup_success = False
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2023-10-23 18:33:08 +00:00
|
|
|
if not (recovery_mode := runtime_config.recovery_mode):
|
2020-01-14 21:03:02 +00:00
|
|
|
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
|
|
|
|
|
|
|
try:
|
|
|
|
config_dict = await conf_util.async_hass_config_yaml(hass)
|
|
|
|
except HomeAssistantError as err:
|
|
|
|
_LOGGER.error(
|
2023-10-23 18:33:08 +00:00
|
|
|
"Failed to parse configuration.yaml: %s. Activating recovery mode",
|
2020-08-27 11:56:20 +00:00
|
|
|
err,
|
2020-01-14 21:03:02 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
if not is_virtual_env():
|
2020-07-06 22:58:53 +00:00
|
|
|
await async_mount_local_lib_path(runtime_config.config_dir)
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2020-02-17 18:20:05 +00:00
|
|
|
basic_setup_success = (
|
|
|
|
await async_from_config_dict(config_dict, hass) is not None
|
|
|
|
)
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2020-02-18 19:52:38 +00:00
|
|
|
if config_dict is None:
|
2023-10-23 18:33:08 +00:00
|
|
|
recovery_mode = True
|
2020-02-18 19:52:38 +00:00
|
|
|
|
|
|
|
elif not basic_setup_success:
|
2023-10-23 18:33:08 +00:00
|
|
|
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
|
|
|
|
recovery_mode = True
|
2020-02-18 19:52:38 +00:00
|
|
|
|
2024-01-22 19:09:48 +00:00
|
|
|
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Detected that %s did not load. Activating recovery mode",
|
|
|
|
",".join(CRITICAL_INTEGRATIONS),
|
|
|
|
)
|
2020-02-18 19:52:38 +00:00
|
|
|
# Ask integrations to shut down. It's messy but we can't
|
|
|
|
# do a clean stop without knowing what is broken
|
2024-02-05 11:09:54 +00:00
|
|
|
with contextlib.suppress(TimeoutError):
|
2020-08-05 12:58:19 +00:00
|
|
|
async with hass.timeout.async_timeout(10):
|
2020-11-16 14:43:48 +00:00
|
|
|
await hass.async_stop()
|
2020-02-18 19:52:38 +00:00
|
|
|
|
2023-10-23 18:33:08 +00:00
|
|
|
recovery_mode = True
|
2020-06-18 00:14:48 +00:00
|
|
|
old_config = hass.config
|
2022-02-19 00:08:26 +00:00
|
|
|
old_logging = hass.data.get(DATA_LOGGING)
|
2020-07-09 14:15:14 +00:00
|
|
|
|
2023-08-16 11:00:14 +00:00
|
|
|
hass = core.HomeAssistant(old_config.config_dir)
|
2022-02-19 00:08:26 +00:00
|
|
|
if old_logging:
|
|
|
|
hass.data[DATA_LOGGING] = old_logging
|
2020-06-18 00:14:48 +00:00
|
|
|
hass.config.skip_pip = old_config.skip_pip
|
2022-11-30 07:38:52 +00:00
|
|
|
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
2020-06-18 00:14:48 +00:00
|
|
|
hass.config.internal_url = old_config.internal_url
|
|
|
|
hass.config.external_url = old_config.external_url
|
2023-08-15 08:59:42 +00:00
|
|
|
# Setup loader cache after the config dir has been set
|
|
|
|
loader.async_setup(hass)
|
2020-02-18 19:52:38 +00:00
|
|
|
|
2023-10-23 18:33:08 +00:00
|
|
|
if recovery_mode:
|
|
|
|
_LOGGER.info("Starting in recovery mode")
|
|
|
|
hass.config.recovery_mode = True
|
2020-01-14 21:03:02 +00:00
|
|
|
|
|
|
|
http_conf = (await http.async_get_last_config(hass)) or {}
|
|
|
|
|
|
|
|
await async_from_config_dict(
|
2023-10-24 07:11:14 +00:00
|
|
|
{"recovery_mode": {}, "http": http_conf},
|
2020-08-27 11:56:20 +00:00
|
|
|
hass,
|
2020-01-14 21:03:02 +00:00
|
|
|
)
|
2023-10-24 12:47:58 +00:00
|
|
|
elif hass.config.safe_mode:
|
|
|
|
_LOGGER.info("Starting in safe mode")
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
if runtime_config.open_ui:
|
|
|
|
hass.add_job(open_hass_ui, hass)
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
return hass
|
|
|
|
|
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
|
|
|
"""Open the UI."""
|
|
|
|
import webbrowser # pylint: disable=import-outside-toplevel
|
|
|
|
|
|
|
|
if hass.config.api is None or "frontend" not in hass.config.components:
|
|
|
|
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
|
|
|
return
|
|
|
|
|
|
|
|
scheme = "https" if hass.config.api.use_ssl else "http"
|
|
|
|
url = str(
|
|
|
|
yarl.URL.build(scheme=scheme, host="127.0.0.1", port=hass.config.api.port)
|
|
|
|
)
|
|
|
|
|
|
|
|
if not webbrowser.open(url):
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Unable to open the Home Assistant UI in a browser. Open it yourself at %s",
|
|
|
|
url,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-01-08 03:55:40 +00:00
|
|
|
async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
2022-11-24 22:25:50 +00:00
|
|
|
"""Load the registries and cache the result of platform.uname().processor."""
|
|
|
|
if DATA_REGISTRIES_LOADED in hass.data:
|
|
|
|
return
|
|
|
|
hass.data[DATA_REGISTRIES_LOADED] = None
|
|
|
|
|
|
|
|
def _cache_uname_processor() -> None:
|
|
|
|
"""Cache the result of platform.uname().processor in the executor.
|
|
|
|
|
|
|
|
Multiple modules call this function at startup which
|
|
|
|
executes a blocking subprocess call. This is a problem for the
|
|
|
|
asyncio event loop. By primeing the cache of uname we can
|
|
|
|
avoid the blocking call in the event loop.
|
|
|
|
"""
|
|
|
|
platform.uname().processor # pylint: disable=expression-not-assigned
|
|
|
|
|
|
|
|
# Load the registries and cache the result of platform.uname().processor
|
2024-02-10 09:47:56 +00:00
|
|
|
translation.async_setup(hass)
|
2023-02-18 13:21:41 +00:00
|
|
|
entity.async_setup(hass)
|
2023-04-03 00:51:25 +00:00
|
|
|
template.async_setup(hass)
|
2022-11-24 22:25:50 +00:00
|
|
|
await asyncio.gather(
|
2024-02-28 11:41:03 +00:00
|
|
|
create_eager_task(area_registry.async_load(hass)),
|
|
|
|
create_eager_task(device_registry.async_load(hass)),
|
|
|
|
create_eager_task(entity_registry.async_load(hass)),
|
|
|
|
create_eager_task(floor_registry.async_load(hass)),
|
|
|
|
create_eager_task(issue_registry.async_load(hass)),
|
|
|
|
create_eager_task(label_registry.async_load(hass)),
|
2022-11-24 22:25:50 +00:00
|
|
|
hass.async_add_executor_job(_cache_uname_processor),
|
2024-02-28 11:41:03 +00:00
|
|
|
create_eager_task(template.async_load_custom_templates(hass)),
|
|
|
|
create_eager_task(restore_state.async_load(hass)),
|
|
|
|
create_eager_task(hass.config_entries.async_initialize()),
|
2022-11-24 22:25:50 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
async def async_from_config_dict(
|
2020-03-14 10:39:28 +00:00
|
|
|
config: ConfigType, hass: core.HomeAssistant
|
2021-03-17 16:34:55 +00:00
|
|
|
) -> core.HomeAssistant | None:
|
2020-01-14 21:03:02 +00:00
|
|
|
"""Try to configure Home Assistant from a configuration dictionary.
|
|
|
|
|
|
|
|
Dynamically loads required components and its dependencies.
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2020-02-13 21:57:07 +00:00
|
|
|
start = monotonic()
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2020-02-17 18:20:05 +00:00
|
|
|
hass.config_entries = config_entries.ConfigEntries(hass, config)
|
2024-01-08 03:55:40 +00:00
|
|
|
await async_load_base_functionality(hass)
|
2020-02-17 18:20:05 +00:00
|
|
|
|
|
|
|
# Set up core.
|
|
|
|
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
|
|
|
|
|
|
|
|
if not all(
|
|
|
|
await asyncio.gather(
|
|
|
|
*(
|
2024-02-28 11:41:03 +00:00
|
|
|
create_eager_task(async_setup_component(hass, domain, config))
|
2020-02-17 18:20:05 +00:00
|
|
|
for domain in CORE_INTEGRATIONS
|
|
|
|
)
|
|
|
|
)
|
|
|
|
):
|
|
|
|
_LOGGER.error("Home Assistant core failed to initialize. ")
|
|
|
|
return None
|
|
|
|
|
|
|
|
_LOGGER.debug("Home Assistant core initialized")
|
|
|
|
|
2016-05-08 05:24:04 +00:00
|
|
|
core_config = config.get(core.DOMAIN, {})
|
|
|
|
|
2016-03-28 01:48:51 +00:00
|
|
|
try:
|
2019-10-14 21:56:45 +00:00
|
|
|
await conf_util.async_process_ha_core_config(hass, core_config)
|
2018-08-28 18:54:01 +00:00
|
|
|
except vol.Invalid as config_err:
|
2023-11-16 10:26:57 +00:00
|
|
|
conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
|
2023-11-24 20:34:09 +00:00
|
|
|
async_notify_setup_error(hass, core.DOMAIN)
|
2018-08-28 18:54:01 +00:00
|
|
|
return None
|
|
|
|
except HomeAssistantError:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
2022-12-22 09:12:50 +00:00
|
|
|
"Home Assistant core failed to initialize. Further initialization aborted"
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2016-03-28 01:48:51 +00:00
|
|
|
return None
|
|
|
|
|
2019-04-16 20:40:21 +00:00
|
|
|
await _async_set_up_integrations(hass, config)
|
2016-11-30 21:02:45 +00:00
|
|
|
|
2020-02-13 21:57:07 +00:00
|
|
|
stop = monotonic()
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
2021-10-05 06:58:20 +00:00
|
|
|
if (
|
|
|
|
REQUIRED_NEXT_PYTHON_HA_RELEASE
|
|
|
|
and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER
|
|
|
|
):
|
2022-11-16 14:38:10 +00:00
|
|
|
current_python_version = ".".join(str(x) for x in sys.version_info[:3])
|
|
|
|
required_python_version = ".".join(str(x) for x in REQUIRED_NEXT_PYTHON_VER[:2])
|
|
|
|
_LOGGER.warning(
|
|
|
|
(
|
|
|
|
"Support for the running Python version %s is deprecated and "
|
|
|
|
"will be removed in Home Assistant %s; "
|
|
|
|
"Please upgrade Python to %s"
|
|
|
|
),
|
|
|
|
current_python_version,
|
|
|
|
REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
|
|
|
required_python_version,
|
2019-10-15 08:04:58 +00:00
|
|
|
)
|
2022-11-16 14:38:10 +00:00
|
|
|
issue_registry.async_create_issue(
|
|
|
|
hass,
|
|
|
|
core.DOMAIN,
|
|
|
|
"python_version",
|
|
|
|
is_fixable=False,
|
|
|
|
severity=issue_registry.IssueSeverity.WARNING,
|
|
|
|
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
|
|
|
translation_key="python_version",
|
|
|
|
translation_placeholders={
|
|
|
|
"current_python_version": current_python_version,
|
|
|
|
"required_python_version": required_python_version,
|
|
|
|
"breaks_in_ha_version": REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
|
|
|
},
|
2019-10-15 08:04:58 +00:00
|
|
|
)
|
|
|
|
|
2014-08-13 12:28:45 +00:00
|
|
|
return hass
|
2013-10-22 05:06:22 +00:00
|
|
|
|
2014-01-24 01:46:29 +00:00
|
|
|
|
2017-02-13 05:24:07 +00:00
|
|
|
@core.callback
|
2019-07-31 19:25:30 +00:00
|
|
|
def async_enable_logging(
|
|
|
|
hass: core.HomeAssistant,
|
|
|
|
verbose: bool = False,
|
2021-03-17 16:34:55 +00:00
|
|
|
log_rotate_days: int | None = None,
|
|
|
|
log_file: str | None = None,
|
2019-07-31 19:25:30 +00:00
|
|
|
log_no_color: bool = False,
|
|
|
|
) -> None:
|
2017-04-30 05:04:49 +00:00
|
|
|
"""Set up the logging.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
2017-02-13 05:24:07 +00:00
|
|
|
This method must be run in the event loop.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2022-07-07 08:49:32 +00:00
|
|
|
fmt = (
|
|
|
|
"%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
|
|
|
)
|
2016-10-17 19:14:10 +00:00
|
|
|
|
2018-04-18 14:18:44 +00:00
|
|
|
if not log_no_color:
|
|
|
|
try:
|
2023-01-20 12:47:55 +00:00
|
|
|
# pylint: disable-next=import-outside-toplevel
|
2018-04-18 14:18:44 +00:00
|
|
|
from colorlog import ColoredFormatter
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2018-04-18 14:18:44 +00:00
|
|
|
# basicConfig must be called after importing colorlog in order to
|
|
|
|
# ensure that the handlers it sets up wraps the correct streams.
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
2019-08-23 16:53:33 +00:00
|
|
|
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
2019-07-31 19:25:30 +00:00
|
|
|
logging.getLogger().handlers[0].setFormatter(
|
|
|
|
ColoredFormatter(
|
|
|
|
colorfmt,
|
2023-05-29 21:24:15 +00:00
|
|
|
datefmt=FORMAT_DATETIME,
|
2019-07-31 19:25:30 +00:00
|
|
|
reset=True,
|
|
|
|
log_colors={
|
|
|
|
"DEBUG": "cyan",
|
|
|
|
"INFO": "green",
|
|
|
|
"WARNING": "yellow",
|
|
|
|
"ERROR": "red",
|
|
|
|
"CRITICAL": "red",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
2018-04-18 14:18:44 +00:00
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# If the above initialization failed for any reason, setup the default
|
2020-01-31 16:33:00 +00:00
|
|
|
# formatting. If the above succeeds, this will result in a no-op.
|
2023-05-29 21:24:15 +00:00
|
|
|
logging.basicConfig(format=fmt, datefmt=FORMAT_DATETIME, level=logging.INFO)
|
2018-04-18 14:18:44 +00:00
|
|
|
|
2023-05-18 18:41:13 +00:00
|
|
|
# Capture warnings.warn(...) and friends messages in logs.
|
|
|
|
# The standard destination for them is stderr, which may end up unnoticed.
|
|
|
|
# This way they're where other messages are, and can be filtered as usual.
|
|
|
|
logging.captureWarnings(True)
|
|
|
|
|
2017-04-30 05:04:49 +00:00
|
|
|
# Suppress overly verbose logs from libraries that aren't helpful
|
2019-07-31 19:25:30 +00:00
|
|
|
logging.getLogger("requests").setLevel(logging.WARNING)
|
|
|
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
|
|
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
2023-06-08 07:39:06 +00:00
|
|
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
2016-10-17 19:14:10 +00:00
|
|
|
|
2020-04-28 21:31:35 +00:00
|
|
|
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
2023-11-11 22:31:04 +00:00
|
|
|
"Uncaught exception", exc_info=args
|
2020-04-28 21:31:35 +00:00
|
|
|
)
|
2021-01-04 10:47:29 +00:00
|
|
|
threading.excepthook = lambda args: logging.getLogger(None).exception(
|
|
|
|
"Uncaught thread exception",
|
2023-01-15 22:00:51 +00:00
|
|
|
exc_info=( # type: ignore[arg-type]
|
|
|
|
args.exc_type,
|
|
|
|
args.exc_value,
|
|
|
|
args.exc_traceback,
|
|
|
|
),
|
2021-01-04 10:47:29 +00:00
|
|
|
)
|
2020-08-11 07:45:36 +00:00
|
|
|
|
2015-01-18 06:23:07 +00:00
|
|
|
# Log errors to a file if we have write access to file or config dir
|
2017-09-14 04:22:42 +00:00
|
|
|
if log_file is None:
|
|
|
|
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
|
|
|
else:
|
|
|
|
err_log_path = os.path.abspath(log_file)
|
|
|
|
|
2015-01-18 06:23:07 +00:00
|
|
|
err_path_exists = os.path.isfile(err_log_path)
|
2017-09-14 04:22:42 +00:00
|
|
|
err_dir = os.path.dirname(err_log_path)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
|
|
|
# Check if we can write to the error log if it exists or that
|
|
|
|
# we can create files in the containing directory if not.
|
2019-07-31 19:25:30 +00:00
|
|
|
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
|
|
|
not err_path_exists and os.access(err_dir, os.W_OK)
|
|
|
|
):
|
2023-01-15 22:00:51 +00:00
|
|
|
err_handler: (
|
|
|
|
logging.handlers.RotatingFileHandler
|
|
|
|
| logging.handlers.TimedRotatingFileHandler
|
|
|
|
)
|
2015-09-04 22:22:42 +00:00
|
|
|
if log_rotate_days:
|
2021-08-20 15:59:10 +00:00
|
|
|
err_handler = logging.handlers.TimedRotatingFileHandler(
|
|
|
|
err_log_path, when="midnight", backupCount=log_rotate_days
|
2019-09-04 03:36:04 +00:00
|
|
|
)
|
2015-09-04 22:22:42 +00:00
|
|
|
else:
|
2021-08-20 15:59:10 +00:00
|
|
|
err_handler = logging.handlers.RotatingFileHandler(
|
|
|
|
err_log_path, backupCount=1
|
|
|
|
)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2021-09-03 16:53:47 +00:00
|
|
|
try:
|
|
|
|
err_handler.doRollover()
|
|
|
|
except OSError as err:
|
|
|
|
_LOGGER.error("Error rolling over log file: %s", err)
|
|
|
|
|
2015-09-01 06:12:00 +00:00
|
|
|
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
|
2023-05-29 21:24:15 +00:00
|
|
|
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
2016-12-16 23:51:06 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
logger = logging.getLogger("")
|
2020-05-16 14:29:58 +00:00
|
|
|
logger.addHandler(err_handler)
|
2020-06-04 16:51:06 +00:00
|
|
|
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2017-09-16 05:25:32 +00:00
|
|
|
# Save the log file location for access by other components.
|
|
|
|
hass.data[DATA_LOGGING] = err_log_path
|
2015-01-18 06:23:07 +00:00
|
|
|
else:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
2015-01-30 07:56:04 +00:00
|
|
|
|
2020-05-16 14:29:58 +00:00
|
|
|
async_activate_log_queue_handler(hass)
|
|
|
|
|
2015-01-30 16:26:06 +00:00
|
|
|
|
2018-06-16 14:48:41 +00:00
|
|
|
async def async_mount_local_lib_path(config_dir: str) -> str:
|
2016-10-27 07:16:23 +00:00
|
|
|
"""Add local library to Python Path.
|
|
|
|
|
2017-07-14 02:26:21 +00:00
|
|
|
This function is a coroutine.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2019-07-31 19:25:30 +00:00
|
|
|
deps_dir = os.path.join(config_dir, "deps")
|
2021-09-18 23:31:35 +00:00
|
|
|
if (lib_dir := await async_get_user_site(deps_dir)) not in sys.path:
|
2017-07-14 02:26:21 +00:00
|
|
|
sys.path.insert(0, lib_dir)
|
2016-08-10 06:54:34 +00:00
|
|
|
return deps_dir
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
@core.callback
|
2021-03-17 16:34:55 +00:00
|
|
|
def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
|
2019-04-09 16:30:32 +00:00
|
|
|
"""Get domains of components to set up."""
|
2019-03-19 18:33:50 +00:00
|
|
|
# Filter out the repeating and common config section [homeassistant]
|
2023-12-05 14:07:32 +00:00
|
|
|
domains = {
|
|
|
|
domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
|
|
|
|
}
|
2019-03-19 18:33:50 +00:00
|
|
|
|
2024-01-22 19:09:48 +00:00
|
|
|
# Add config entry and default domains
|
2023-10-23 18:33:08 +00:00
|
|
|
if not hass.config.recovery_mode:
|
2024-01-22 19:09:48 +00:00
|
|
|
domains.update(DEFAULT_INTEGRATIONS)
|
2020-01-14 21:03:02 +00:00
|
|
|
domains.update(hass.config_entries.async_domains())
|
2024-01-22 19:09:48 +00:00
|
|
|
else:
|
|
|
|
domains.update(DEFAULT_INTEGRATIONS_RECOVERY_MODE)
|
2019-03-19 18:33:50 +00:00
|
|
|
|
2024-01-22 19:09:48 +00:00
|
|
|
# Add domains depending on if the Supervisor is used or not
|
2022-05-30 10:00:13 +00:00
|
|
|
if "SUPERVISOR" in os.environ:
|
2024-01-22 19:09:48 +00:00
|
|
|
domains.update(DEFAULT_INTEGRATIONS_SUPERVISOR)
|
2019-03-19 18:33:50 +00:00
|
|
|
|
2019-04-09 16:30:32 +00:00
|
|
|
return domains
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
|
2024-02-18 02:47:55 +00:00
|
|
|
class _WatchPendingSetups:
|
|
|
|
"""Periodic log and dispatch of setups that are pending."""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self, hass: core.HomeAssistant, setup_started: dict[str, float]
|
|
|
|
) -> None:
|
|
|
|
"""Initialize the WatchPendingSetups class."""
|
|
|
|
self._hass = hass
|
|
|
|
self._setup_started = setup_started
|
|
|
|
self._duration_count = 0
|
|
|
|
self._handle: asyncio.TimerHandle | None = None
|
|
|
|
self._previous_was_empty = True
|
|
|
|
self._loop = hass.loop
|
|
|
|
|
|
|
|
def _async_watch(self) -> None:
|
|
|
|
"""Periodic log of setups that are pending."""
|
|
|
|
now = monotonic()
|
|
|
|
self._duration_count += SLOW_STARTUP_CHECK_INTERVAL
|
2023-01-15 22:00:51 +00:00
|
|
|
|
2021-04-05 08:11:44 +00:00
|
|
|
remaining_with_setup_started = {
|
2024-02-18 02:47:55 +00:00
|
|
|
domain: (now - start_time)
|
|
|
|
for domain, start_time in self._setup_started.items()
|
2021-04-05 08:11:44 +00:00
|
|
|
}
|
|
|
|
_LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
|
2024-02-18 18:53:05 +00:00
|
|
|
self._async_dispatch(remaining_with_setup_started)
|
2024-02-18 02:47:55 +00:00
|
|
|
if (
|
|
|
|
self._setup_started
|
|
|
|
and self._duration_count % LOG_SLOW_STARTUP_INTERVAL == 0
|
|
|
|
):
|
|
|
|
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
|
|
|
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
2020-07-08 13:57:07 +00:00
|
|
|
_LOGGER.warning(
|
2020-08-27 11:56:20 +00:00
|
|
|
"Waiting on integrations to complete setup: %s",
|
2024-02-18 02:47:55 +00:00
|
|
|
", ".join(self._setup_started),
|
2020-06-20 00:24:33 +00:00
|
|
|
)
|
2024-02-18 02:47:55 +00:00
|
|
|
|
|
|
|
_LOGGER.debug("Running timeout Zones: %s", self._hass.timeout.zones)
|
|
|
|
self._async_schedule_next()
|
|
|
|
|
|
|
|
def _async_dispatch(self, remaining_with_setup_started: dict[str, float]) -> None:
|
|
|
|
"""Dispatch the signal."""
|
2024-02-18 18:53:05 +00:00
|
|
|
if remaining_with_setup_started or not self._previous_was_empty:
|
|
|
|
async_dispatcher_send(
|
|
|
|
self._hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
|
|
|
|
)
|
|
|
|
self._previous_was_empty = not remaining_with_setup_started
|
2024-02-18 02:47:55 +00:00
|
|
|
|
|
|
|
def _async_schedule_next(self) -> None:
|
|
|
|
"""Schedule the next call."""
|
|
|
|
self._handle = self._loop.call_later(
|
|
|
|
SLOW_STARTUP_CHECK_INTERVAL, self._async_watch
|
|
|
|
)
|
|
|
|
|
|
|
|
def async_start(self) -> None:
|
|
|
|
"""Start watching."""
|
|
|
|
self._async_schedule_next()
|
|
|
|
|
|
|
|
def async_stop(self) -> None:
|
|
|
|
"""Stop watching."""
|
|
|
|
self._async_dispatch({})
|
|
|
|
if self._handle:
|
|
|
|
self._handle.cancel()
|
|
|
|
self._handle = None
|
2020-06-20 00:24:33 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def async_setup_multi_components(
|
|
|
|
hass: core.HomeAssistant,
|
2021-03-17 16:34:55 +00:00
|
|
|
domains: set[str],
|
|
|
|
config: dict[str, Any],
|
2020-06-20 00:24:33 +00:00
|
|
|
) -> None:
|
|
|
|
"""Set up multiple domains. Log on failure."""
|
2024-01-08 03:55:40 +00:00
|
|
|
# Avoid creating tasks for domains that were setup in a previous stage
|
|
|
|
domains_not_yet_setup = domains - hass.config.components
|
2020-06-20 00:24:33 +00:00
|
|
|
futures = {
|
2023-03-05 11:46:02 +00:00
|
|
|
domain: hass.async_create_task(
|
2024-02-27 04:45:55 +00:00
|
|
|
async_setup_component(hass, domain, config),
|
|
|
|
f"setup component {domain}",
|
|
|
|
eager_start=True,
|
2023-03-05 11:46:02 +00:00
|
|
|
)
|
2024-01-08 03:55:40 +00:00
|
|
|
for domain in domains_not_yet_setup
|
2020-06-20 00:24:33 +00:00
|
|
|
}
|
2023-03-28 21:22:41 +00:00
|
|
|
results = await asyncio.gather(*futures.values(), return_exceptions=True)
|
|
|
|
for idx, domain in enumerate(futures):
|
|
|
|
result = results[idx]
|
|
|
|
if isinstance(result, BaseException):
|
|
|
|
_LOGGER.error(
|
|
|
|
"Error setting up integration %s - received exception",
|
|
|
|
domain,
|
|
|
|
exc_info=(type(result), result, result.__traceback__),
|
|
|
|
)
|
2020-06-20 00:24:33 +00:00
|
|
|
|
|
|
|
|
2024-02-16 15:35:46 +00:00
|
|
|
async def _async_resolve_domains_to_setup(
|
2021-03-17 16:34:55 +00:00
|
|
|
hass: core.HomeAssistant, config: dict[str, Any]
|
2024-02-16 15:35:46 +00:00
|
|
|
) -> tuple[set[str], dict[str, loader.Integration]]:
|
|
|
|
"""Resolve all dependencies and return list of domains to set up."""
|
|
|
|
base_platforms_loaded = False
|
2020-06-20 00:24:33 +00:00
|
|
|
domains_to_setup = _get_domains(hass, config)
|
2024-01-08 03:55:40 +00:00
|
|
|
needed_requirements: set[str] = set()
|
2024-03-03 09:03:35 +00:00
|
|
|
platform_integrations = conf_util.extract_platform_integrations(
|
|
|
|
config, BASE_PLATFORMS
|
|
|
|
)
|
2024-01-08 03:55:40 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Resolve all dependencies so we know all integrations
|
|
|
|
# that will have to be loaded and start rightaway
|
2021-03-17 16:34:55 +00:00
|
|
|
integration_cache: dict[str, loader.Integration] = {}
|
2022-02-14 13:24:58 +00:00
|
|
|
to_resolve: set[str] = domains_to_setup
|
2020-06-20 00:24:33 +00:00
|
|
|
while to_resolve:
|
2022-02-14 13:24:58 +00:00
|
|
|
old_to_resolve: set[str] = to_resolve
|
2020-06-20 00:24:33 +00:00
|
|
|
to_resolve = set()
|
|
|
|
|
2024-02-16 15:35:46 +00:00
|
|
|
if not base_platforms_loaded:
|
|
|
|
# Load base platforms right away since
|
|
|
|
# we do not require the manifest to list
|
|
|
|
# them as dependencies and we want
|
|
|
|
# to avoid the lock contention when multiple
|
|
|
|
# integrations try to resolve them at once
|
|
|
|
base_platforms_loaded = True
|
2024-03-03 09:03:35 +00:00
|
|
|
to_get = {*old_to_resolve, *BASE_PLATFORMS, *platform_integrations}
|
2024-02-16 15:35:46 +00:00
|
|
|
else:
|
|
|
|
to_get = old_to_resolve
|
2024-01-08 03:55:40 +00:00
|
|
|
|
|
|
|
manifest_deps: set[str] = set()
|
2024-02-27 04:56:16 +00:00
|
|
|
resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
|
2024-02-16 15:35:46 +00:00
|
|
|
integrations_to_process: list[loader.Integration] = []
|
|
|
|
|
|
|
|
for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
|
2024-03-03 09:03:35 +00:00
|
|
|
if not isinstance(itg, loader.Integration):
|
2024-02-16 15:35:46 +00:00
|
|
|
continue
|
|
|
|
integration_cache[domain] = itg
|
2024-03-03 09:03:35 +00:00
|
|
|
needed_requirements.update(itg.requirements)
|
|
|
|
if domain not in old_to_resolve:
|
|
|
|
continue
|
|
|
|
|
|
|
|
integrations_to_process.append(itg)
|
2024-01-08 03:55:40 +00:00
|
|
|
manifest_deps.update(itg.dependencies)
|
|
|
|
manifest_deps.update(itg.after_dependencies)
|
2024-02-16 15:35:46 +00:00
|
|
|
if not itg.all_dependencies_resolved:
|
2024-02-27 04:56:16 +00:00
|
|
|
resolve_dependencies_tasks.append(
|
|
|
|
create_eager_task(
|
|
|
|
itg.resolve_dependencies(),
|
|
|
|
name=f"resolve dependencies {domain}",
|
|
|
|
loop=hass.loop,
|
|
|
|
)
|
|
|
|
)
|
2024-01-08 03:55:40 +00:00
|
|
|
|
2024-02-16 15:35:46 +00:00
|
|
|
if unseen_deps := manifest_deps - integration_cache.keys():
|
2024-01-08 03:55:40 +00:00
|
|
|
# If there are dependencies, try to preload all
|
|
|
|
# the integrations manifest at once and add them
|
|
|
|
# to the list of requirements we need to install
|
|
|
|
# so we can try to check if they are already installed
|
|
|
|
# in a single call below which avoids each integration
|
|
|
|
# having to wait for the lock to do it individually
|
2024-02-16 15:35:46 +00:00
|
|
|
deps = await loader.async_get_integrations(hass, unseen_deps)
|
|
|
|
for dependant_domain, dependant_itg in deps.items():
|
2024-01-08 03:55:40 +00:00
|
|
|
if isinstance(dependant_itg, loader.Integration):
|
2024-02-16 15:35:46 +00:00
|
|
|
integration_cache[dependant_domain] = dependant_itg
|
2024-01-08 03:55:40 +00:00
|
|
|
needed_requirements.update(dependant_itg.requirements)
|
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
if resolve_dependencies_tasks:
|
|
|
|
await asyncio.gather(*resolve_dependencies_tasks)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
for itg in integrations_to_process:
|
|
|
|
for dep in itg.all_dependencies:
|
|
|
|
if dep in domains_to_setup:
|
|
|
|
continue
|
|
|
|
domains_to_setup.add(dep)
|
|
|
|
to_resolve.add(dep)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2024-01-08 03:55:40 +00:00
|
|
|
# Optimistically check if requirements are already installed
|
|
|
|
# ahead of setting up the integrations so we can prime the cache
|
|
|
|
# We do not wait for this since its an optimization only
|
|
|
|
hass.async_create_background_task(
|
|
|
|
requirements.async_load_installed_versions(hass, needed_requirements),
|
|
|
|
"check installed requirements",
|
2024-02-27 04:56:16 +00:00
|
|
|
eager_start=True,
|
2024-01-08 03:55:40 +00:00
|
|
|
)
|
2024-02-18 03:08:55 +00:00
|
|
|
# Start loading translations for all integrations we are going to set up
|
|
|
|
# in the background so they are ready when we need them. This avoids a
|
|
|
|
# lot of waiting for the translation load lock and a thundering herd of
|
|
|
|
# tasks trying to load the same translations at the same time as each
|
|
|
|
# integration is loaded.
|
|
|
|
#
|
|
|
|
# We do not wait for this since as soon as the task runs it will
|
|
|
|
# hold the translation load lock and if anything is fast enough to
|
|
|
|
# wait for the translation load lock, loading will be done by the
|
|
|
|
# time it gets to it.
|
|
|
|
hass.async_create_background_task(
|
2024-03-03 09:03:35 +00:00
|
|
|
translation.async_load_integrations(
|
|
|
|
hass, {*BASE_PLATFORMS, *platform_integrations, *domains_to_setup}
|
|
|
|
),
|
2024-02-18 03:08:55 +00:00
|
|
|
"load translations",
|
2024-02-27 04:56:16 +00:00
|
|
|
eager_start=True,
|
2024-02-18 03:08:55 +00:00
|
|
|
)
|
|
|
|
|
2024-02-16 15:35:46 +00:00
|
|
|
return domains_to_setup, integration_cache
|
|
|
|
|
|
|
|
|
|
|
|
async def _async_set_up_integrations(
|
|
|
|
hass: core.HomeAssistant, config: dict[str, Any]
|
|
|
|
) -> None:
|
|
|
|
"""Set up all the integrations."""
|
2024-02-18 02:47:55 +00:00
|
|
|
setup_started: dict[str, float] = {}
|
|
|
|
hass.data[DATA_SETUP_STARTED] = setup_started
|
2024-02-16 15:35:46 +00:00
|
|
|
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
|
|
|
|
2024-02-18 02:47:55 +00:00
|
|
|
watcher = _WatchPendingSetups(hass, setup_started)
|
|
|
|
watcher.async_start()
|
2024-02-16 15:35:46 +00:00
|
|
|
|
|
|
|
domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
|
|
|
|
hass, config
|
|
|
|
)
|
2024-01-08 03:55:40 +00:00
|
|
|
|
2022-07-22 13:11:34 +00:00
|
|
|
# Initialize recorder
|
|
|
|
if "recorder" in domains_to_setup:
|
|
|
|
recorder.async_initialize_recorder(hass)
|
|
|
|
|
2024-02-27 04:58:16 +00:00
|
|
|
pre_stage_domains: dict[str, set[str]] = {
|
|
|
|
name: domains_to_setup & domain_group
|
|
|
|
for name, domain_group in SETUP_ORDER.items()
|
|
|
|
}
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# calculate what components to setup in what stage
|
2022-02-14 13:24:58 +00:00
|
|
|
stage_1_domains: set[str] = set()
|
2020-06-02 18:54:11 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Find all dependencies of any dependency of any stage 1 integration that
|
2022-07-22 13:11:34 +00:00
|
|
|
# we plan on loading and promote them to stage 1. This is done only to not
|
|
|
|
# get misleading log messages
|
2022-02-14 13:24:58 +00:00
|
|
|
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
|
2020-06-20 00:24:33 +00:00
|
|
|
while deps_promotion:
|
|
|
|
old_deps_promotion = deps_promotion
|
|
|
|
deps_promotion = set()
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
for domain in old_deps_promotion:
|
|
|
|
if domain not in domains_to_setup or domain in stage_1_domains:
|
|
|
|
continue
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
stage_1_domains.add(domain)
|
|
|
|
|
2021-09-18 23:31:35 +00:00
|
|
|
if (dep_itg := integration_cache.get(domain)) is None:
|
2020-06-20 00:24:33 +00:00
|
|
|
continue
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
deps_promotion.update(dep_itg.all_dependencies)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2024-02-27 04:58:16 +00:00
|
|
|
stage_2_domains = domains_to_setup - stage_1_domains
|
|
|
|
|
|
|
|
for name, domain_group in pre_stage_domains.items():
|
|
|
|
if domain_group:
|
|
|
|
stage_2_domains -= domain_group
|
|
|
|
_LOGGER.info("Setting up %s: %s", name, domain_group)
|
|
|
|
await async_setup_multi_components(hass, domain_group, config)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2023-04-21 06:33:50 +00:00
|
|
|
# Enables after dependencies when setting up stage 1 domains
|
|
|
|
async_set_domains_to_be_loaded(hass, stage_1_domains)
|
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Start setup
|
|
|
|
if stage_1_domains:
|
|
|
|
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
|
2020-08-05 12:58:19 +00:00
|
|
|
try:
|
|
|
|
async with hass.timeout.async_timeout(
|
|
|
|
STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
|
|
|
|
):
|
2021-04-05 08:11:44 +00:00
|
|
|
await async_setup_multi_components(hass, stage_1_domains, config)
|
2024-02-05 11:09:54 +00:00
|
|
|
except TimeoutError:
|
2020-08-05 12:58:19 +00:00
|
|
|
_LOGGER.warning("Setup timed out for stage 1 - moving forward")
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2023-04-21 06:33:50 +00:00
|
|
|
# Add after dependencies when setting up stage 2 domains
|
2020-11-26 10:38:30 +00:00
|
|
|
async_set_domains_to_be_loaded(hass, stage_2_domains)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
if stage_2_domains:
|
2020-06-20 00:24:33 +00:00
|
|
|
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
2020-08-05 12:58:19 +00:00
|
|
|
try:
|
|
|
|
async with hass.timeout.async_timeout(
|
|
|
|
STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
|
|
|
|
):
|
2021-04-05 08:11:44 +00:00
|
|
|
await async_setup_multi_components(hass, stage_2_domains, config)
|
2024-02-05 11:09:54 +00:00
|
|
|
except TimeoutError:
|
2020-08-05 12:58:19 +00:00
|
|
|
_LOGGER.warning("Setup timed out for stage 2 - moving forward")
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2021-09-10 03:39:22 +00:00
|
|
|
# Wrap up startup
|
|
|
|
_LOGGER.debug("Waiting for startup to wrap up")
|
|
|
|
try:
|
|
|
|
async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
|
|
|
|
await hass.async_block_till_done()
|
2024-02-05 11:09:54 +00:00
|
|
|
except TimeoutError:
|
2021-09-10 03:39:22 +00:00
|
|
|
_LOGGER.warning("Setup timed out for bootstrap - moving forward")
|
|
|
|
|
2024-02-18 02:47:55 +00:00
|
|
|
watcher.async_stop()
|
2021-04-08 17:30:33 +00:00
|
|
|
|
2021-04-05 08:11:44 +00:00
|
|
|
_LOGGER.debug(
|
|
|
|
"Integration setup times: %s",
|
2024-02-18 02:47:55 +00:00
|
|
|
dict(sorted(setup_time.items(), key=itemgetter(1))),
|
2021-04-05 08:11:44 +00:00
|
|
|
)
|