2017-04-30 05:04:49 +00:00
|
|
|
"""Provide methods to bootstrap a Home Assistant instance."""
|
2019-03-27 14:06:20 +00:00
|
|
|
import asyncio
|
2013-10-22 05:06:22 +00:00
|
|
|
import logging
|
2015-09-04 22:22:42 +00:00
|
|
|
import logging.handlers
|
2015-11-15 10:05:46 +00:00
|
|
|
import os
|
|
|
|
import sys
|
2017-03-01 04:33:19 +00:00
|
|
|
from time import time
|
2016-11-19 16:18:33 +00:00
|
|
|
from collections import OrderedDict
|
2019-03-20 14:49:27 +00:00
|
|
|
from typing import Any, Optional, Dict, Set
|
2013-10-13 17:42:22 +00:00
|
|
|
|
2016-03-28 01:48:51 +00:00
|
|
|
import voluptuous as vol
|
|
|
|
|
2019-03-26 12:38:33 +00:00
|
|
|
from homeassistant import core, config as conf_util, config_entries, loader
|
2017-02-13 05:24:07 +00:00
|
|
|
from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE
|
2017-03-05 09:41:54 +00:00
|
|
|
from homeassistant.setup import async_setup_component
|
2016-12-16 23:51:06 +00:00
|
|
|
from homeassistant.util.logging import AsyncHandler
|
2018-06-16 14:48:41 +00:00
|
|
|
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
2016-08-20 19:39:56 +00:00
|
|
|
from homeassistant.util.yaml import clear_secret_cache
|
2016-04-09 22:25:01 +00:00
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
2019-01-26 22:09:41 +00:00
|
|
|
from homeassistant.helpers import config_validation as cv
|
2014-01-24 05:34:08 +00:00
|
|
|
|
2015-01-09 08:07:58 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2015-11-07 09:44:02 +00:00
|
|
|
ERROR_LOG_FILENAME = 'home-assistant.log'
|
2017-09-16 05:25:32 +00:00
|
|
|
|
|
|
|
# hass.data key for logging information.
|
|
|
|
DATA_LOGGING = 'logging'
|
|
|
|
|
2019-04-30 22:07:34 +00:00
|
|
|
DEBUGGER_INTEGRATIONS = {'ptvsd', }
|
2019-04-16 20:40:21 +00:00
|
|
|
CORE_INTEGRATIONS = ('homeassistant', 'persistent_notification')
|
2019-04-15 23:45:46 +00:00
|
|
|
LOGGING_INTEGRATIONS = {'logger', 'system_log'}
|
|
|
|
STAGE_1_INTEGRATIONS = {
|
|
|
|
# To record data
|
2019-03-08 22:47:10 +00:00
|
|
|
'recorder',
|
2019-04-15 23:45:46 +00:00
|
|
|
# To make sure we forward data to other instances
|
2019-03-08 22:47:10 +00:00
|
|
|
'mqtt_eventstream',
|
|
|
|
}
|
2015-05-12 05:23:20 +00:00
|
|
|
|
2015-01-09 08:07:58 +00:00
|
|
|
|
2018-04-28 23:26:20 +00:00
|
|
|
async def async_from_config_dict(config: Dict[str, Any],
|
|
|
|
hass: core.HomeAssistant,
|
|
|
|
config_dir: Optional[str] = None,
|
|
|
|
enable_log: bool = True,
|
|
|
|
verbose: bool = False,
|
|
|
|
skip_pip: bool = False,
|
|
|
|
log_rotate_days: Any = None,
|
|
|
|
log_file: Any = None,
|
|
|
|
log_no_color: bool = False) \
|
2016-10-27 07:16:23 +00:00
|
|
|
-> Optional[core.HomeAssistant]:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Try to configure Home Assistant from a configuration dictionary.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
|
|
Dynamically loads required components and its dependencies.
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2017-03-01 04:33:19 +00:00
|
|
|
start = time()
|
2017-10-06 04:47:51 +00:00
|
|
|
|
|
|
|
if enable_log:
|
2018-04-18 14:18:44 +00:00
|
|
|
async_enable_logging(hass, verbose, log_rotate_days, log_file,
|
|
|
|
log_no_color)
|
2017-10-06 04:47:51 +00:00
|
|
|
|
2019-02-28 18:01:10 +00:00
|
|
|
hass.config.skip_pip = skip_pip
|
|
|
|
if skip_pip:
|
|
|
|
_LOGGER.warning("Skipping pip installation of required modules. "
|
|
|
|
"This may cause issues")
|
|
|
|
|
2016-05-08 05:24:04 +00:00
|
|
|
core_config = config.get(core.DOMAIN, {})
|
2019-03-11 02:55:36 +00:00
|
|
|
api_password = config.get('http', {}).get('api_password')
|
2019-02-26 22:42:48 +00:00
|
|
|
trusted_networks = config.get('http', {}).get('trusted_networks')
|
2016-05-08 05:24:04 +00:00
|
|
|
|
2016-03-28 01:48:51 +00:00
|
|
|
try:
|
2018-08-23 11:38:08 +00:00
|
|
|
await conf_util.async_process_ha_core_config(
|
2019-03-11 02:55:36 +00:00
|
|
|
hass, core_config, api_password, trusted_networks)
|
2018-08-28 18:54:01 +00:00
|
|
|
except vol.Invalid as config_err:
|
|
|
|
conf_util.async_log_exception(
|
|
|
|
config_err, 'homeassistant', core_config, hass)
|
|
|
|
return None
|
|
|
|
except HomeAssistantError:
|
|
|
|
_LOGGER.error("Home Assistant core failed to initialize. "
|
|
|
|
"Further initialization aborted")
|
2016-03-28 01:48:51 +00:00
|
|
|
return None
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
# Make a copy because we are mutating it.
|
2018-03-10 18:02:04 +00:00
|
|
|
config = OrderedDict(config)
|
2018-03-09 03:34:24 +00:00
|
|
|
|
2017-01-14 06:01:47 +00:00
|
|
|
# Merge packages
|
2019-04-15 02:02:49 +00:00
|
|
|
await conf_util.merge_packages_config(
|
2018-05-01 18:57:30 +00:00
|
|
|
hass, config, core_config.get(conf_util.CONF_PACKAGES, {}))
|
2017-01-14 06:01:47 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
hass.config_entries = config_entries.ConfigEntries(hass, config)
|
2019-03-01 04:27:20 +00:00
|
|
|
await hass.config_entries.async_initialize()
|
2018-02-16 22:07:38 +00:00
|
|
|
|
2019-04-16 20:40:21 +00:00
|
|
|
await _async_set_up_integrations(hass, config)
|
2016-11-30 21:02:45 +00:00
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
stop = time()
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.info("Home Assistant initialized in %.2fs", stop-start)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
2019-01-26 22:09:41 +00:00
|
|
|
# TEMP: warn users for invalid slugs
|
|
|
|
# Remove after 0.94 or 1.0
|
|
|
|
if cv.INVALID_SLUGS_FOUND or cv.INVALID_ENTITY_IDS_FOUND:
|
|
|
|
msg = []
|
|
|
|
|
|
|
|
if cv.INVALID_ENTITY_IDS_FOUND:
|
|
|
|
msg.append(
|
|
|
|
"Your configuration contains invalid entity ID references. "
|
|
|
|
"Please find and update the following. "
|
|
|
|
"This will become a breaking change."
|
|
|
|
)
|
|
|
|
msg.append('\n'.join('- {} -> {}'.format(*item)
|
|
|
|
for item
|
|
|
|
in cv.INVALID_ENTITY_IDS_FOUND.items()))
|
|
|
|
|
|
|
|
if cv.INVALID_SLUGS_FOUND:
|
|
|
|
msg.append(
|
|
|
|
"Your configuration contains invalid slugs. "
|
|
|
|
"Please find and update the following. "
|
|
|
|
"This will become a breaking change."
|
|
|
|
)
|
|
|
|
msg.append('\n'.join('- {} -> {}'.format(*item)
|
|
|
|
for item in cv.INVALID_SLUGS_FOUND.items()))
|
|
|
|
|
|
|
|
hass.components.persistent_notification.async_create(
|
|
|
|
'\n\n'.join(msg), "Config Warning", "config_warning"
|
|
|
|
)
|
|
|
|
|
2019-02-16 13:51:30 +00:00
|
|
|
# TEMP: warn users of invalid extra keys
|
2019-02-15 17:40:46 +00:00
|
|
|
# Remove after 0.92
|
|
|
|
if cv.INVALID_EXTRA_KEYS_FOUND:
|
|
|
|
msg = []
|
|
|
|
msg.append(
|
|
|
|
"Your configuration contains extra keys "
|
|
|
|
"that the platform does not support (but were silently "
|
|
|
|
"accepted before 0.88). Please find and remove the following."
|
|
|
|
"This will become a breaking change."
|
|
|
|
)
|
|
|
|
msg.append('\n'.join('- {}'.format(it)
|
|
|
|
for it in cv.INVALID_EXTRA_KEYS_FOUND))
|
|
|
|
|
|
|
|
hass.components.persistent_notification.async_create(
|
|
|
|
'\n\n'.join(msg), "Config Warning", "config_warning"
|
|
|
|
)
|
|
|
|
|
2014-08-13 12:28:45 +00:00
|
|
|
return hass
|
2013-10-22 05:06:22 +00:00
|
|
|
|
2014-01-24 01:46:29 +00:00
|
|
|
|
2018-04-28 23:26:20 +00:00
|
|
|
async def async_from_config_file(config_path: str,
|
|
|
|
hass: core.HomeAssistant,
|
|
|
|
verbose: bool = False,
|
|
|
|
skip_pip: bool = True,
|
|
|
|
log_rotate_days: Any = None,
|
|
|
|
log_file: Any = None,
|
2018-07-13 10:24:51 +00:00
|
|
|
log_no_color: bool = False)\
|
|
|
|
-> Optional[core.HomeAssistant]:
|
2016-10-27 07:16:23 +00:00
|
|
|
"""Read the configuration file and try to start all the functionality.
|
|
|
|
|
|
|
|
Will add functionality to 'hass' parameter.
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2015-03-19 06:02:58 +00:00
|
|
|
# Set config dir to directory holding config file
|
2015-08-30 02:19:52 +00:00
|
|
|
config_dir = os.path.abspath(os.path.dirname(config_path))
|
|
|
|
hass.config.config_dir = config_dir
|
2018-06-16 14:48:41 +00:00
|
|
|
|
|
|
|
if not is_virtual_env():
|
|
|
|
await async_mount_local_lib_path(config_dir)
|
2014-09-21 02:19:39 +00:00
|
|
|
|
2018-04-18 14:18:44 +00:00
|
|
|
async_enable_logging(hass, verbose, log_rotate_days, log_file,
|
|
|
|
log_no_color)
|
2015-08-30 06:02:07 +00:00
|
|
|
|
2019-04-16 21:27:07 +00:00
|
|
|
await hass.async_add_executor_job(
|
|
|
|
conf_util.process_ha_config_upgrade, hass)
|
|
|
|
|
2016-04-09 22:25:01 +00:00
|
|
|
try:
|
2018-07-13 10:24:51 +00:00
|
|
|
config_dict = await hass.async_add_executor_job(
|
2017-05-26 15:28:07 +00:00
|
|
|
conf_util.load_yaml_config_file, config_path)
|
2017-03-01 04:56:23 +00:00
|
|
|
except HomeAssistantError as err:
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.error("Error loading %s: %s", config_path, err)
|
2016-04-09 22:25:01 +00:00
|
|
|
return None
|
2016-08-20 19:39:56 +00:00
|
|
|
finally:
|
|
|
|
clear_secret_cache()
|
2013-10-13 17:42:22 +00:00
|
|
|
|
2018-06-16 14:48:41 +00:00
|
|
|
return await async_from_config_dict(
|
2016-10-27 07:16:23 +00:00
|
|
|
config_dict, hass, enable_log=False, skip_pip=skip_pip)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
|
|
|
|
2017-02-13 05:24:07 +00:00
|
|
|
@core.callback
|
2018-04-18 14:18:44 +00:00
|
|
|
def async_enable_logging(hass: core.HomeAssistant,
|
|
|
|
verbose: bool = False,
|
2018-07-23 08:24:39 +00:00
|
|
|
log_rotate_days: Optional[int] = None,
|
|
|
|
log_file: Optional[str] = None,
|
2018-04-18 14:18:44 +00:00
|
|
|
log_no_color: bool = False) -> None:
|
2017-04-30 05:04:49 +00:00
|
|
|
"""Set up the logging.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
2017-02-13 05:24:07 +00:00
|
|
|
This method must be run in the event loop.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2017-01-20 05:31:44 +00:00
|
|
|
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
|
|
|
|
"[%(name)s] %(message)s")
|
2017-04-27 16:30:34 +00:00
|
|
|
datefmt = '%Y-%m-%d %H:%M:%S'
|
2016-10-17 19:14:10 +00:00
|
|
|
|
2018-04-18 14:18:44 +00:00
|
|
|
if not log_no_color:
|
|
|
|
try:
|
|
|
|
from colorlog import ColoredFormatter
|
|
|
|
# basicConfig must be called after importing colorlog in order to
|
|
|
|
# ensure that the handlers it sets up wraps the correct streams.
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
|
|
|
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
|
|
|
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
|
|
|
|
colorfmt,
|
|
|
|
datefmt=datefmt,
|
|
|
|
reset=True,
|
|
|
|
log_colors={
|
|
|
|
'DEBUG': 'cyan',
|
|
|
|
'INFO': 'green',
|
|
|
|
'WARNING': 'yellow',
|
|
|
|
'ERROR': 'red',
|
|
|
|
'CRITICAL': 'red',
|
|
|
|
}
|
|
|
|
))
|
|
|
|
except ImportError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# If the above initialization failed for any reason, setup the default
|
|
|
|
# formatting. If the above succeeds, this wil result in a no-op.
|
|
|
|
logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO)
|
|
|
|
|
2017-04-30 05:04:49 +00:00
|
|
|
# Suppress overly verbose logs from libraries that aren't helpful
|
|
|
|
logging.getLogger('requests').setLevel(logging.WARNING)
|
|
|
|
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
|
|
|
logging.getLogger('aiohttp.access').setLevel(logging.WARNING)
|
2016-10-17 19:14:10 +00:00
|
|
|
|
2015-01-18 06:23:07 +00:00
|
|
|
# Log errors to a file if we have write access to file or config dir
|
2017-09-14 04:22:42 +00:00
|
|
|
if log_file is None:
|
|
|
|
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
|
|
|
else:
|
|
|
|
err_log_path = os.path.abspath(log_file)
|
|
|
|
|
2015-01-18 06:23:07 +00:00
|
|
|
err_path_exists = os.path.isfile(err_log_path)
|
2017-09-14 04:22:42 +00:00
|
|
|
err_dir = os.path.dirname(err_log_path)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
|
|
|
# Check if we can write to the error log if it exists or that
|
|
|
|
# we can create files in the containing directory if not.
|
|
|
|
if (err_path_exists and os.access(err_log_path, os.W_OK)) or \
|
2017-09-14 04:22:42 +00:00
|
|
|
(not err_path_exists and os.access(err_dir, os.W_OK)):
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2015-09-04 22:22:42 +00:00
|
|
|
if log_rotate_days:
|
|
|
|
err_handler = logging.handlers.TimedRotatingFileHandler(
|
2018-05-12 21:44:53 +00:00
|
|
|
err_log_path, when='midnight',
|
|
|
|
backupCount=log_rotate_days) # type: logging.FileHandler
|
2015-09-04 22:22:42 +00:00
|
|
|
else:
|
|
|
|
err_handler = logging.FileHandler(
|
|
|
|
err_log_path, mode='w', delay=True)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2015-09-01 06:12:00 +00:00
|
|
|
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
|
2017-01-20 05:31:44 +00:00
|
|
|
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
|
2016-12-16 23:51:06 +00:00
|
|
|
|
|
|
|
async_handler = AsyncHandler(hass.loop, err_handler)
|
2017-02-13 05:24:07 +00:00
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
async def async_stop_async_handler(_: Any) -> None:
|
2017-02-13 05:24:07 +00:00
|
|
|
"""Cleanup async handler."""
|
2018-07-17 22:28:44 +00:00
|
|
|
logging.getLogger('').removeHandler(async_handler) # type: ignore
|
2018-04-28 23:26:20 +00:00
|
|
|
await async_handler.async_close(blocking=True)
|
2017-02-13 05:24:07 +00:00
|
|
|
|
|
|
|
hass.bus.async_listen_once(
|
|
|
|
EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler)
|
2016-12-16 23:51:06 +00:00
|
|
|
|
2015-09-01 06:12:00 +00:00
|
|
|
logger = logging.getLogger('')
|
2018-05-12 21:44:53 +00:00
|
|
|
logger.addHandler(async_handler) # type: ignore
|
2015-12-23 02:39:46 +00:00
|
|
|
logger.setLevel(logging.INFO)
|
2015-01-18 06:23:07 +00:00
|
|
|
|
2017-09-16 05:25:32 +00:00
|
|
|
# Save the log file location for access by other components.
|
|
|
|
hass.data[DATA_LOGGING] = err_log_path
|
2015-01-18 06:23:07 +00:00
|
|
|
else:
|
|
|
|
_LOGGER.error(
|
2018-08-19 20:29:08 +00:00
|
|
|
"Unable to set up error log %s (access denied)", err_log_path)
|
2015-01-30 07:56:04 +00:00
|
|
|
|
2015-01-30 16:26:06 +00:00
|
|
|
|
2018-06-16 14:48:41 +00:00
|
|
|
async def async_mount_local_lib_path(config_dir: str) -> str:
|
2016-10-27 07:16:23 +00:00
|
|
|
"""Add local library to Python Path.
|
|
|
|
|
2017-07-14 02:26:21 +00:00
|
|
|
This function is a coroutine.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2016-08-10 06:54:34 +00:00
|
|
|
deps_dir = os.path.join(config_dir, 'deps')
|
2018-06-16 14:48:41 +00:00
|
|
|
lib_dir = await async_get_user_site(deps_dir)
|
2017-07-14 02:26:21 +00:00
|
|
|
if lib_dir not in sys.path:
|
|
|
|
sys.path.insert(0, lib_dir)
|
2016-08-10 06:54:34 +00:00
|
|
|
return deps_dir
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
@core.callback
|
2019-04-09 16:30:32 +00:00
|
|
|
def _get_domains(hass: core.HomeAssistant, config: Dict[str, Any]) -> Set[str]:
|
|
|
|
"""Get domains of components to set up."""
|
2019-03-19 18:33:50 +00:00
|
|
|
# Filter out the repeating and common config section [homeassistant]
|
2019-04-09 16:30:32 +00:00
|
|
|
domains = set(key.split(' ')[0] for key in config.keys()
|
|
|
|
if key != core.DOMAIN)
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
# Add config entry domains
|
2019-04-09 16:30:32 +00:00
|
|
|
domains.update(hass.config_entries.async_domains()) # type: ignore
|
2019-03-19 18:33:50 +00:00
|
|
|
|
|
|
|
# Make sure the Hass.io component is loaded
|
|
|
|
if 'HASSIO' in os.environ:
|
2019-04-09 16:30:32 +00:00
|
|
|
domains.add('hassio')
|
2019-03-19 18:33:50 +00:00
|
|
|
|
2019-04-09 16:30:32 +00:00
|
|
|
return domains
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def _async_set_up_integrations(
|
|
|
|
hass: core.HomeAssistant, config: Dict[str, Any]) -> None:
|
|
|
|
"""Set up all the integrations."""
|
|
|
|
domains = _get_domains(hass, config)
|
|
|
|
|
2019-04-30 22:07:34 +00:00
|
|
|
# Start up debuggers. Start these first in case they want to wait.
|
|
|
|
debuggers = domains & DEBUGGER_INTEGRATIONS
|
|
|
|
if debuggers:
|
|
|
|
_LOGGER.debug("Starting up debuggers %s", debuggers)
|
|
|
|
await asyncio.gather(*[
|
|
|
|
async_setup_component(hass, domain, config)
|
|
|
|
for domain in debuggers])
|
|
|
|
domains -= DEBUGGER_INTEGRATIONS
|
|
|
|
|
2019-04-16 20:40:21 +00:00
|
|
|
# Resolve all dependencies of all components so we can find the logging
|
|
|
|
# and integrations that need faster initialization.
|
|
|
|
resolved_domains_task = asyncio.gather(*[
|
|
|
|
loader.async_component_dependencies(hass, domain)
|
|
|
|
for domain in domains
|
|
|
|
], return_exceptions=True)
|
|
|
|
|
|
|
|
# Set up core.
|
|
|
|
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
|
|
|
|
|
|
|
|
if not all(await asyncio.gather(*[
|
|
|
|
async_setup_component(hass, domain, config)
|
|
|
|
for domain in CORE_INTEGRATIONS
|
|
|
|
])):
|
|
|
|
_LOGGER.error("Home Assistant core failed to initialize. "
|
|
|
|
"Further initialization aborted")
|
|
|
|
return
|
|
|
|
|
|
|
|
_LOGGER.debug("Home Assistant core initialized")
|
|
|
|
|
|
|
|
# Finish resolving domains
|
|
|
|
for dep_domains in await resolved_domains_task:
|
|
|
|
# Result is either a set or an exception. We ignore exceptions
|
|
|
|
# It will be properly handled during setup of the domain.
|
|
|
|
if isinstance(dep_domains, set):
|
|
|
|
domains.update(dep_domains)
|
|
|
|
|
|
|
|
# setup components
|
|
|
|
logging_domains = domains & LOGGING_INTEGRATIONS
|
|
|
|
stage_1_domains = domains & STAGE_1_INTEGRATIONS
|
|
|
|
stage_2_domains = domains - logging_domains - stage_1_domains
|
|
|
|
|
|
|
|
if logging_domains:
|
2019-04-30 22:07:34 +00:00
|
|
|
_LOGGER.info("Setting up %s", logging_domains)
|
2019-04-16 20:40:21 +00:00
|
|
|
|
|
|
|
await asyncio.gather(*[
|
|
|
|
async_setup_component(hass, domain, config)
|
|
|
|
for domain in logging_domains
|
|
|
|
])
|
|
|
|
|
|
|
|
# Kick off loading the registries. They don't need to be awaited.
|
|
|
|
asyncio.gather(
|
|
|
|
hass.helpers.device_registry.async_get_registry(),
|
|
|
|
hass.helpers.entity_registry.async_get_registry(),
|
|
|
|
hass.helpers.area_registry.async_get_registry())
|
|
|
|
|
|
|
|
if stage_1_domains:
|
|
|
|
await asyncio.gather(*[
|
|
|
|
async_setup_component(hass, domain, config)
|
2019-04-25 12:50:28 +00:00
|
|
|
for domain in stage_1_domains
|
2019-04-16 20:40:21 +00:00
|
|
|
])
|
|
|
|
|
|
|
|
# Load all integrations
|
|
|
|
after_dependencies = {} # type: Dict[str, Set[str]]
|
|
|
|
|
|
|
|
for int_or_exc in await asyncio.gather(*[
|
|
|
|
loader.async_get_integration(hass, domain)
|
|
|
|
for domain in stage_2_domains
|
|
|
|
], return_exceptions=True):
|
|
|
|
# Exceptions are handled in async_setup_component.
|
|
|
|
if (isinstance(int_or_exc, loader.Integration) and
|
|
|
|
int_or_exc.after_dependencies):
|
|
|
|
after_dependencies[int_or_exc.domain] = set(
|
|
|
|
int_or_exc.after_dependencies
|
|
|
|
)
|
|
|
|
|
|
|
|
last_load = None
|
|
|
|
while stage_2_domains:
|
|
|
|
domains_to_load = set()
|
|
|
|
|
|
|
|
for domain in stage_2_domains:
|
|
|
|
after_deps = after_dependencies.get(domain)
|
|
|
|
# Load if integration has no after_dependencies or they are
|
|
|
|
# all loaded
|
|
|
|
if (not after_deps or
|
|
|
|
not after_deps-hass.config.components):
|
|
|
|
domains_to_load.add(domain)
|
|
|
|
|
|
|
|
if not domains_to_load or domains_to_load == last_load:
|
|
|
|
break
|
|
|
|
|
|
|
|
_LOGGER.debug("Setting up %s", domains_to_load)
|
|
|
|
|
|
|
|
await asyncio.gather(*[
|
|
|
|
async_setup_component(hass, domain, config)
|
|
|
|
for domain in domains_to_load
|
|
|
|
])
|
|
|
|
|
|
|
|
last_load = domains_to_load
|
|
|
|
stage_2_domains -= domains_to_load
|
|
|
|
|
|
|
|
# These are stage 2 domains that never have their after_dependencies
|
|
|
|
# satisfied.
|
|
|
|
if stage_2_domains:
|
|
|
|
_LOGGER.debug("Final set up: %s", stage_2_domains)
|
|
|
|
|
|
|
|
await asyncio.gather(*[
|
|
|
|
async_setup_component(hass, domain, config)
|
|
|
|
for domain in stage_2_domains
|
|
|
|
])
|
|
|
|
|
|
|
|
# Wrap up startup
|
|
|
|
await hass.async_block_till_done()
|