2017-04-30 05:04:49 +00:00
|
|
|
"""Provide methods to bootstrap a Home Assistant instance."""
|
2019-03-27 14:06:20 +00:00
|
|
|
import asyncio
|
2020-02-18 19:52:38 +00:00
|
|
|
import contextlib
|
2020-06-20 00:24:33 +00:00
|
|
|
from datetime import datetime
|
2013-10-22 05:06:22 +00:00
|
|
|
import logging
|
2015-09-04 22:22:42 +00:00
|
|
|
import logging.handlers
|
2015-11-15 10:05:46 +00:00
|
|
|
import os
|
|
|
|
import sys
|
2020-02-13 21:57:07 +00:00
|
|
|
from time import monotonic
|
2020-07-06 22:58:53 +00:00
|
|
|
from typing import TYPE_CHECKING, Any, Dict, Optional, Set
|
2013-10-13 17:42:22 +00:00
|
|
|
|
2020-02-18 19:52:38 +00:00
|
|
|
from async_timeout import timeout
|
2016-03-28 01:48:51 +00:00
|
|
|
import voluptuous as vol
|
2020-07-06 22:58:53 +00:00
|
|
|
import yarl
|
2016-03-28 01:48:51 +00:00
|
|
|
|
2019-12-09 15:42:10 +00:00
|
|
|
from homeassistant import config as conf_util, config_entries, core, loader
|
2020-01-14 21:03:02 +00:00
|
|
|
from homeassistant.components import http
|
2019-12-13 09:39:57 +00:00
|
|
|
from homeassistant.const import (
|
2020-02-18 19:52:38 +00:00
|
|
|
EVENT_HOMEASSISTANT_STOP,
|
2019-12-13 09:39:57 +00:00
|
|
|
REQUIRED_NEXT_PYTHON_DATE,
|
|
|
|
REQUIRED_NEXT_PYTHON_VER,
|
|
|
|
)
|
2019-12-09 15:42:10 +00:00
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
2020-03-14 10:39:28 +00:00
|
|
|
from homeassistant.helpers.typing import ConfigType
|
2020-06-20 00:24:33 +00:00
|
|
|
from homeassistant.setup import (
|
|
|
|
DATA_SETUP,
|
|
|
|
DATA_SETUP_STARTED,
|
|
|
|
async_set_domains_to_be_loaded,
|
|
|
|
async_setup_component,
|
|
|
|
)
|
2020-05-16 14:29:58 +00:00
|
|
|
from homeassistant.util.logging import async_activate_log_queue_handler
|
2018-06-16 14:48:41 +00:00
|
|
|
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
2016-08-20 19:39:56 +00:00
|
|
|
from homeassistant.util.yaml import clear_secret_cache
|
2014-01-24 05:34:08 +00:00
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from .runner import RuntimeConfig
|
|
|
|
|
2015-01-09 08:07:58 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
ERROR_LOG_FILENAME = "home-assistant.log"
|
2017-09-16 05:25:32 +00:00
|
|
|
|
|
|
|
# hass.data key for logging information.
|
2019-07-31 19:25:30 +00:00
|
|
|
DATA_LOGGING = "logging"
|
2017-09-16 05:25:32 +00:00
|
|
|
|
2020-05-28 23:48:42 +00:00
|
|
|
LOG_SLOW_STARTUP_INTERVAL = 60
|
|
|
|
|
2020-06-22 13:17:59 +00:00
|
|
|
DEBUGGER_INTEGRATIONS = {"debugpy", "ptvsd"}
|
2019-07-31 19:25:30 +00:00
|
|
|
CORE_INTEGRATIONS = ("homeassistant", "persistent_notification")
|
2020-06-20 00:24:33 +00:00
|
|
|
LOGGING_INTEGRATIONS = {
|
|
|
|
# Set log levels
|
|
|
|
"logger",
|
|
|
|
# Error logging
|
|
|
|
"system_log",
|
|
|
|
"sentry",
|
2019-04-15 23:45:46 +00:00
|
|
|
# To record data
|
2019-07-31 19:25:30 +00:00
|
|
|
"recorder",
|
2020-06-20 00:24:33 +00:00
|
|
|
}
|
|
|
|
STAGE_1_INTEGRATIONS = {
|
2019-04-15 23:45:46 +00:00
|
|
|
# To make sure we forward data to other instances
|
2019-07-31 19:25:30 +00:00
|
|
|
"mqtt_eventstream",
|
2019-10-25 23:04:24 +00:00
|
|
|
# To provide account link implementations
|
|
|
|
"cloud",
|
2020-06-02 18:54:11 +00:00
|
|
|
# Ensure supervisor is available
|
|
|
|
"hassio",
|
|
|
|
# Get the frontend up and running as soon
|
|
|
|
# as possible so problem integrations can
|
|
|
|
# be removed
|
|
|
|
"frontend",
|
2019-03-08 22:47:10 +00:00
|
|
|
}
|
2015-05-12 05:23:20 +00:00
|
|
|
|
2015-01-09 08:07:58 +00:00
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
async def async_setup_hass(
|
2020-07-06 22:58:53 +00:00
|
|
|
runtime_config: "RuntimeConfig",
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> Optional[core.HomeAssistant]:
|
2020-01-14 21:03:02 +00:00
|
|
|
"""Set up Home Assistant."""
|
|
|
|
hass = core.HomeAssistant()
|
2020-07-06 22:58:53 +00:00
|
|
|
hass.config.config_dir = runtime_config.config_dir
|
|
|
|
|
|
|
|
async_enable_logging(
|
|
|
|
hass,
|
|
|
|
runtime_config.verbose,
|
|
|
|
runtime_config.log_rotate_days,
|
|
|
|
runtime_config.log_file,
|
|
|
|
runtime_config.log_no_color,
|
|
|
|
)
|
|
|
|
|
|
|
|
hass.config.skip_pip = runtime_config.skip_pip
|
|
|
|
if runtime_config.skip_pip:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.warning(
|
2020-01-02 19:17:10 +00:00
|
|
|
"Skipping pip installation of required modules. This may cause issues"
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-28 18:01:10 +00:00
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
if not await conf_util.async_ensure_config_exists(hass):
|
|
|
|
_LOGGER.error("Error getting configuration path")
|
|
|
|
return None
|
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
_LOGGER.info("Config directory: %s", runtime_config.config_dir)
|
2020-01-14 21:03:02 +00:00
|
|
|
|
|
|
|
config_dict = None
|
2020-02-17 18:20:05 +00:00
|
|
|
basic_setup_success = False
|
2020-07-06 22:58:53 +00:00
|
|
|
safe_mode = runtime_config.safe_mode
|
2020-01-14 21:03:02 +00:00
|
|
|
|
|
|
|
if not safe_mode:
|
|
|
|
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
|
|
|
|
|
|
|
try:
|
|
|
|
config_dict = await conf_util.async_hass_config_yaml(hass)
|
|
|
|
except HomeAssistantError as err:
|
|
|
|
_LOGGER.error(
|
2020-02-18 19:52:38 +00:00
|
|
|
"Failed to parse configuration.yaml: %s. Activating safe mode", err,
|
2020-01-14 21:03:02 +00:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
if not is_virtual_env():
|
2020-07-06 22:58:53 +00:00
|
|
|
await async_mount_local_lib_path(runtime_config.config_dir)
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2020-02-17 18:20:05 +00:00
|
|
|
basic_setup_success = (
|
|
|
|
await async_from_config_dict(config_dict, hass) is not None
|
|
|
|
)
|
2020-01-14 21:03:02 +00:00
|
|
|
finally:
|
|
|
|
clear_secret_cache()
|
|
|
|
|
2020-02-18 19:52:38 +00:00
|
|
|
if config_dict is None:
|
|
|
|
safe_mode = True
|
|
|
|
|
|
|
|
elif not basic_setup_success:
|
|
|
|
_LOGGER.warning("Unable to set up core integrations. Activating safe mode")
|
|
|
|
safe_mode = True
|
|
|
|
|
2020-02-19 14:56:46 +00:00
|
|
|
elif (
|
|
|
|
"frontend" in hass.data.get(DATA_SETUP, {})
|
|
|
|
and "frontend" not in hass.config.components
|
|
|
|
):
|
2020-02-18 19:52:38 +00:00
|
|
|
_LOGGER.warning("Detected that frontend did not load. Activating safe mode")
|
|
|
|
# Ask integrations to shut down. It's messy but we can't
|
|
|
|
# do a clean stop without knowing what is broken
|
|
|
|
hass.async_track_tasks()
|
|
|
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP, {})
|
|
|
|
with contextlib.suppress(asyncio.TimeoutError):
|
|
|
|
async with timeout(10):
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
safe_mode = True
|
2020-06-18 00:14:48 +00:00
|
|
|
old_config = hass.config
|
2020-02-18 19:52:38 +00:00
|
|
|
hass = core.HomeAssistant()
|
2020-06-18 00:14:48 +00:00
|
|
|
hass.config.skip_pip = old_config.skip_pip
|
|
|
|
hass.config.internal_url = old_config.internal_url
|
|
|
|
hass.config.external_url = old_config.external_url
|
|
|
|
hass.config.config_dir = old_config.config_dir
|
2020-02-18 19:52:38 +00:00
|
|
|
|
|
|
|
if safe_mode:
|
2020-01-14 21:03:02 +00:00
|
|
|
_LOGGER.info("Starting in safe mode")
|
2020-02-18 19:52:38 +00:00
|
|
|
hass.config.safe_mode = True
|
2020-01-14 21:03:02 +00:00
|
|
|
|
|
|
|
http_conf = (await http.async_get_last_config(hass)) or {}
|
|
|
|
|
|
|
|
await async_from_config_dict(
|
|
|
|
{"safe_mode": {}, "http": http_conf}, hass,
|
|
|
|
)
|
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
if runtime_config.open_ui:
|
|
|
|
hass.add_job(open_hass_ui, hass)
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
return hass
|
|
|
|
|
|
|
|
|
2020-07-06 22:58:53 +00:00
|
|
|
def open_hass_ui(hass: core.HomeAssistant) -> None:
|
|
|
|
"""Open the UI."""
|
|
|
|
import webbrowser # pylint: disable=import-outside-toplevel
|
|
|
|
|
|
|
|
if hass.config.api is None or "frontend" not in hass.config.components:
|
|
|
|
_LOGGER.warning("Cannot launch the UI because frontend not loaded")
|
|
|
|
return
|
|
|
|
|
|
|
|
scheme = "https" if hass.config.api.use_ssl else "http"
|
|
|
|
url = str(
|
|
|
|
yarl.URL.build(scheme=scheme, host="127.0.0.1", port=hass.config.api.port)
|
|
|
|
)
|
|
|
|
|
|
|
|
if not webbrowser.open(url):
|
|
|
|
_LOGGER.warning(
|
|
|
|
"Unable to open the Home Assistant UI in a browser. Open it yourself at %s",
|
|
|
|
url,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
async def async_from_config_dict(
|
2020-03-14 10:39:28 +00:00
|
|
|
config: ConfigType, hass: core.HomeAssistant
|
2020-01-14 21:03:02 +00:00
|
|
|
) -> Optional[core.HomeAssistant]:
|
|
|
|
"""Try to configure Home Assistant from a configuration dictionary.
|
|
|
|
|
|
|
|
Dynamically loads required components and its dependencies.
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2020-02-13 21:57:07 +00:00
|
|
|
start = monotonic()
|
2020-01-14 21:03:02 +00:00
|
|
|
|
2020-02-17 18:20:05 +00:00
|
|
|
hass.config_entries = config_entries.ConfigEntries(hass, config)
|
|
|
|
await hass.config_entries.async_initialize()
|
|
|
|
|
|
|
|
# Set up core.
|
|
|
|
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
|
|
|
|
|
|
|
|
if not all(
|
|
|
|
await asyncio.gather(
|
|
|
|
*(
|
|
|
|
async_setup_component(hass, domain, config)
|
|
|
|
for domain in CORE_INTEGRATIONS
|
|
|
|
)
|
|
|
|
)
|
|
|
|
):
|
|
|
|
_LOGGER.error("Home Assistant core failed to initialize. ")
|
|
|
|
return None
|
|
|
|
|
|
|
|
_LOGGER.debug("Home Assistant core initialized")
|
|
|
|
|
2016-05-08 05:24:04 +00:00
|
|
|
core_config = config.get(core.DOMAIN, {})
|
|
|
|
|
2016-03-28 01:48:51 +00:00
|
|
|
try:
|
2019-10-14 21:56:45 +00:00
|
|
|
await conf_util.async_process_ha_core_config(hass, core_config)
|
2018-08-28 18:54:01 +00:00
|
|
|
except vol.Invalid as config_err:
|
2019-07-31 19:25:30 +00:00
|
|
|
conf_util.async_log_exception(config_err, "homeassistant", core_config, hass)
|
2018-08-28 18:54:01 +00:00
|
|
|
return None
|
|
|
|
except HomeAssistantError:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
"Home Assistant core failed to initialize. "
|
|
|
|
"Further initialization aborted"
|
|
|
|
)
|
2016-03-28 01:48:51 +00:00
|
|
|
return None
|
|
|
|
|
2019-04-16 20:40:21 +00:00
|
|
|
await _async_set_up_integrations(hass, config)
|
2016-11-30 21:02:45 +00:00
|
|
|
|
2020-02-13 21:57:07 +00:00
|
|
|
stop = monotonic()
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
2019-12-13 09:39:57 +00:00
|
|
|
if REQUIRED_NEXT_PYTHON_DATE and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER:
|
2019-10-15 08:04:58 +00:00
|
|
|
msg = (
|
2019-12-13 09:39:57 +00:00
|
|
|
"Support for the running Python version "
|
|
|
|
f"{'.'.join(str(x) for x in sys.version_info[:3])} is deprecated and will "
|
|
|
|
f"be removed in the first release after {REQUIRED_NEXT_PYTHON_DATE}. "
|
|
|
|
"Please upgrade Python to "
|
|
|
|
f"{'.'.join(str(x) for x in REQUIRED_NEXT_PYTHON_VER)} or "
|
|
|
|
"higher."
|
2019-10-15 08:04:58 +00:00
|
|
|
)
|
|
|
|
_LOGGER.warning(msg)
|
|
|
|
hass.components.persistent_notification.async_create(
|
|
|
|
msg, "Python version", "python_version"
|
|
|
|
)
|
|
|
|
|
2014-08-13 12:28:45 +00:00
|
|
|
return hass
|
2013-10-22 05:06:22 +00:00
|
|
|
|
2014-01-24 01:46:29 +00:00
|
|
|
|
2017-02-13 05:24:07 +00:00
|
|
|
@core.callback
|
2019-07-31 19:25:30 +00:00
|
|
|
def async_enable_logging(
|
|
|
|
hass: core.HomeAssistant,
|
|
|
|
verbose: bool = False,
|
|
|
|
log_rotate_days: Optional[int] = None,
|
|
|
|
log_file: Optional[str] = None,
|
|
|
|
log_no_color: bool = False,
|
|
|
|
) -> None:
|
2017-04-30 05:04:49 +00:00
|
|
|
"""Set up the logging.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
2017-02-13 05:24:07 +00:00
|
|
|
This method must be run in the event loop.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2020-01-02 19:17:10 +00:00
|
|
|
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
2019-07-31 19:25:30 +00:00
|
|
|
datefmt = "%Y-%m-%d %H:%M:%S"
|
2016-10-17 19:14:10 +00:00
|
|
|
|
2018-04-18 14:18:44 +00:00
|
|
|
if not log_no_color:
|
|
|
|
try:
|
2020-04-04 15:07:36 +00:00
|
|
|
# pylint: disable=import-outside-toplevel
|
2018-04-18 14:18:44 +00:00
|
|
|
from colorlog import ColoredFormatter
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2018-04-18 14:18:44 +00:00
|
|
|
# basicConfig must be called after importing colorlog in order to
|
|
|
|
# ensure that the handlers it sets up wraps the correct streams.
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
2019-08-23 16:53:33 +00:00
|
|
|
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
2019-07-31 19:25:30 +00:00
|
|
|
logging.getLogger().handlers[0].setFormatter(
|
|
|
|
ColoredFormatter(
|
|
|
|
colorfmt,
|
|
|
|
datefmt=datefmt,
|
|
|
|
reset=True,
|
|
|
|
log_colors={
|
|
|
|
"DEBUG": "cyan",
|
|
|
|
"INFO": "green",
|
|
|
|
"WARNING": "yellow",
|
|
|
|
"ERROR": "red",
|
|
|
|
"CRITICAL": "red",
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
2018-04-18 14:18:44 +00:00
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# If the above initialization failed for any reason, setup the default
|
2020-01-31 16:33:00 +00:00
|
|
|
# formatting. If the above succeeds, this will result in a no-op.
|
2018-04-18 14:18:44 +00:00
|
|
|
logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO)
|
|
|
|
|
2017-04-30 05:04:49 +00:00
|
|
|
# Suppress overly verbose logs from libraries that aren't helpful
|
2019-07-31 19:25:30 +00:00
|
|
|
logging.getLogger("requests").setLevel(logging.WARNING)
|
|
|
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
|
|
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
2016-10-17 19:14:10 +00:00
|
|
|
|
2020-04-28 21:31:35 +00:00
|
|
|
sys.excepthook = lambda *args: logging.getLogger(None).exception(
|
|
|
|
"Uncaught exception", exc_info=args # type: ignore
|
|
|
|
)
|
|
|
|
|
2015-01-18 06:23:07 +00:00
|
|
|
# Log errors to a file if we have write access to file or config dir
|
2017-09-14 04:22:42 +00:00
|
|
|
if log_file is None:
|
|
|
|
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
|
|
|
else:
|
|
|
|
err_log_path = os.path.abspath(log_file)
|
|
|
|
|
2015-01-18 06:23:07 +00:00
|
|
|
err_path_exists = os.path.isfile(err_log_path)
|
2017-09-14 04:22:42 +00:00
|
|
|
err_dir = os.path.dirname(err_log_path)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
|
|
|
# Check if we can write to the error log if it exists or that
|
|
|
|
# we can create files in the containing directory if not.
|
2019-07-31 19:25:30 +00:00
|
|
|
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
|
|
|
not err_path_exists and os.access(err_dir, os.W_OK)
|
|
|
|
):
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2015-09-04 22:22:42 +00:00
|
|
|
if log_rotate_days:
|
2019-09-04 03:36:04 +00:00
|
|
|
err_handler: logging.FileHandler = logging.handlers.TimedRotatingFileHandler(
|
2019-07-31 19:25:30 +00:00
|
|
|
err_log_path, when="midnight", backupCount=log_rotate_days
|
2019-09-04 03:36:04 +00:00
|
|
|
)
|
2015-09-04 22:22:42 +00:00
|
|
|
else:
|
2019-07-31 19:25:30 +00:00
|
|
|
err_handler = logging.FileHandler(err_log_path, mode="w", delay=True)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2015-09-01 06:12:00 +00:00
|
|
|
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
|
2017-01-20 05:31:44 +00:00
|
|
|
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
|
2016-12-16 23:51:06 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
logger = logging.getLogger("")
|
2020-05-16 14:29:58 +00:00
|
|
|
logger.addHandler(err_handler)
|
2020-06-04 16:51:06 +00:00
|
|
|
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2017-09-16 05:25:32 +00:00
|
|
|
# Save the log file location for access by other components.
|
|
|
|
hass.data[DATA_LOGGING] = err_log_path
|
2015-01-18 06:23:07 +00:00
|
|
|
else:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
2015-01-30 07:56:04 +00:00
|
|
|
|
2020-05-16 14:29:58 +00:00
|
|
|
async_activate_log_queue_handler(hass)
|
|
|
|
|
2015-01-30 16:26:06 +00:00
|
|
|
|
2018-06-16 14:48:41 +00:00
|
|
|
async def async_mount_local_lib_path(config_dir: str) -> str:
|
2016-10-27 07:16:23 +00:00
|
|
|
"""Add local library to Python Path.
|
|
|
|
|
2017-07-14 02:26:21 +00:00
|
|
|
This function is a coroutine.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2019-07-31 19:25:30 +00:00
|
|
|
deps_dir = os.path.join(config_dir, "deps")
|
2018-06-16 14:48:41 +00:00
|
|
|
lib_dir = await async_get_user_site(deps_dir)
|
2017-07-14 02:26:21 +00:00
|
|
|
if lib_dir not in sys.path:
|
|
|
|
sys.path.insert(0, lib_dir)
|
2016-08-10 06:54:34 +00:00
|
|
|
return deps_dir
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
@core.callback
|
2019-04-09 16:30:32 +00:00
|
|
|
def _get_domains(hass: core.HomeAssistant, config: Dict[str, Any]) -> Set[str]:
|
|
|
|
"""Get domains of components to set up."""
|
2019-03-19 18:33:50 +00:00
|
|
|
# Filter out the repeating and common config section [homeassistant]
|
2020-04-04 18:05:15 +00:00
|
|
|
domains = {key.split(" ")[0] for key in config.keys() if key != core.DOMAIN}
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
# Add config entry domains
|
2020-02-18 19:52:38 +00:00
|
|
|
if not hass.config.safe_mode:
|
2020-01-14 21:03:02 +00:00
|
|
|
domains.update(hass.config_entries.async_domains())
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
# Make sure the Hass.io component is loaded
|
2019-07-31 19:25:30 +00:00
|
|
|
if "HASSIO" in os.environ:
|
|
|
|
domains.add("hassio")
|
2019-03-19 18:33:50 +00:00
|
|
|
|
2019-04-09 16:30:32 +00:00
|
|
|
return domains
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
async def _async_log_pending_setups(
|
|
|
|
domains: Set[str], setup_started: Dict[str, datetime]
|
|
|
|
) -> None:
|
|
|
|
"""Periodic log of setups that are pending for longer than LOG_SLOW_STARTUP_INTERVAL."""
|
|
|
|
while True:
|
|
|
|
await asyncio.sleep(LOG_SLOW_STARTUP_INTERVAL)
|
|
|
|
remaining = [domain for domain in domains if domain in setup_started]
|
|
|
|
|
|
|
|
if remaining:
|
2020-07-08 13:57:07 +00:00
|
|
|
_LOGGER.warning(
|
2020-06-20 00:24:33 +00:00
|
|
|
"Waiting on integrations to complete setup: %s", ", ".join(remaining),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
async def async_setup_multi_components(
|
|
|
|
hass: core.HomeAssistant,
|
|
|
|
domains: Set[str],
|
|
|
|
config: Dict[str, Any],
|
|
|
|
setup_started: Dict[str, datetime],
|
|
|
|
) -> None:
|
|
|
|
"""Set up multiple domains. Log on failure."""
|
|
|
|
futures = {
|
|
|
|
domain: hass.async_create_task(async_setup_component(hass, domain, config))
|
|
|
|
for domain in domains
|
|
|
|
}
|
|
|
|
log_task = asyncio.create_task(_async_log_pending_setups(domains, setup_started))
|
|
|
|
await asyncio.wait(futures.values())
|
|
|
|
log_task.cancel()
|
|
|
|
errors = [domain for domain in domains if futures[domain].exception()]
|
|
|
|
for domain in errors:
|
|
|
|
exception = futures[domain].exception()
|
|
|
|
assert exception is not None
|
|
|
|
_LOGGER.error(
|
|
|
|
"Error setting up integration %s - received exception",
|
|
|
|
domain,
|
|
|
|
exc_info=(type(exception), exception, exception.__traceback__),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-04-16 20:40:21 +00:00
|
|
|
async def _async_set_up_integrations(
|
2019-07-31 19:25:30 +00:00
|
|
|
hass: core.HomeAssistant, config: Dict[str, Any]
|
|
|
|
) -> None:
|
2019-04-16 20:40:21 +00:00
|
|
|
"""Set up all the integrations."""
|
2020-05-28 23:48:42 +00:00
|
|
|
setup_started = hass.data[DATA_SETUP_STARTED] = {}
|
2020-06-20 00:24:33 +00:00
|
|
|
domains_to_setup = _get_domains(hass, config)
|
|
|
|
|
|
|
|
# Resolve all dependencies so we know all integrations
|
|
|
|
# that will have to be loaded and start rightaway
|
|
|
|
integration_cache: Dict[str, loader.Integration] = {}
|
|
|
|
to_resolve = domains_to_setup
|
|
|
|
while to_resolve:
|
|
|
|
old_to_resolve = to_resolve
|
|
|
|
to_resolve = set()
|
|
|
|
|
|
|
|
integrations_to_process = [
|
|
|
|
int_or_exc
|
|
|
|
for int_or_exc in await asyncio.gather(
|
|
|
|
*(
|
|
|
|
loader.async_get_integration(hass, domain)
|
|
|
|
for domain in old_to_resolve
|
|
|
|
),
|
|
|
|
return_exceptions=True,
|
2020-04-01 14:09:13 +00:00
|
|
|
)
|
2020-06-20 00:24:33 +00:00
|
|
|
if isinstance(int_or_exc, loader.Integration)
|
|
|
|
]
|
|
|
|
resolve_dependencies_tasks = [
|
|
|
|
itg.resolve_dependencies()
|
|
|
|
for itg in integrations_to_process
|
|
|
|
if not itg.all_dependencies_resolved
|
|
|
|
]
|
2020-04-01 14:09:13 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
if resolve_dependencies_tasks:
|
|
|
|
await asyncio.gather(*resolve_dependencies_tasks)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
for itg in integrations_to_process:
|
|
|
|
integration_cache[itg.domain] = itg
|
|
|
|
|
|
|
|
for dep in itg.all_dependencies:
|
|
|
|
if dep in domains_to_setup:
|
|
|
|
continue
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
domains_to_setup.add(dep)
|
|
|
|
to_resolve.add(dep)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
_LOGGER.info("Domains to be set up: %s", domains_to_setup)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
logging_domains = domains_to_setup & LOGGING_INTEGRATIONS
|
|
|
|
|
|
|
|
# Load logging as soon as possible
|
2019-04-16 20:40:21 +00:00
|
|
|
if logging_domains:
|
2020-06-20 00:24:33 +00:00
|
|
|
_LOGGER.info("Setting up logging: %s", logging_domains)
|
|
|
|
await async_setup_multi_components(hass, logging_domains, config, setup_started)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Start up debuggers. Start these first in case they want to wait.
|
|
|
|
debuggers = domains_to_setup & DEBUGGER_INTEGRATIONS
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
if debuggers:
|
|
|
|
_LOGGER.debug("Setting up debuggers: %s", debuggers)
|
|
|
|
await async_setup_multi_components(hass, debuggers, config, setup_started)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# calculate what components to setup in what stage
|
|
|
|
stage_1_domains = set()
|
2020-06-02 18:54:11 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Find all dependencies of any dependency of any stage 1 integration that
|
|
|
|
# we plan on loading and promote them to stage 1
|
|
|
|
deps_promotion = STAGE_1_INTEGRATIONS
|
|
|
|
while deps_promotion:
|
|
|
|
old_deps_promotion = deps_promotion
|
|
|
|
deps_promotion = set()
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
for domain in old_deps_promotion:
|
|
|
|
if domain not in domains_to_setup or domain in stage_1_domains:
|
|
|
|
continue
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
stage_1_domains.add(domain)
|
|
|
|
|
|
|
|
dep_itg = integration_cache.get(domain)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
if dep_itg is None:
|
|
|
|
continue
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
deps_promotion.update(dep_itg.all_dependencies)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
stage_2_domains = domains_to_setup - logging_domains - debuggers - stage_1_domains
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Kick off loading the registries. They don't need to be awaited.
|
|
|
|
asyncio.gather(
|
|
|
|
hass.helpers.device_registry.async_get_registry(),
|
|
|
|
hass.helpers.entity_registry.async_get_registry(),
|
|
|
|
hass.helpers.area_registry.async_get_registry(),
|
|
|
|
)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Start setup
|
|
|
|
if stage_1_domains:
|
|
|
|
_LOGGER.info("Setting up stage 1: %s", stage_1_domains)
|
|
|
|
await async_setup_multi_components(hass, stage_1_domains, config, setup_started)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
# Enables after dependencies
|
|
|
|
async_set_domains_to_be_loaded(hass, stage_1_domains | stage_2_domains)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
if stage_2_domains:
|
2020-06-20 00:24:33 +00:00
|
|
|
_LOGGER.info("Setting up stage 2: %s", stage_2_domains)
|
|
|
|
await async_setup_multi_components(hass, stage_2_domains, config, setup_started)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
# Wrap up startup
|
2020-05-27 18:43:05 +00:00
|
|
|
_LOGGER.debug("Waiting for startup to wrap up")
|
2020-05-29 08:18:39 +00:00
|
|
|
await hass.async_block_till_done()
|