2016-03-07 23:06:04 +00:00
|
|
|
"""Module to help with parsing and generating configuration files."""
|
2017-01-14 06:01:47 +00:00
|
|
|
from collections import OrderedDict
|
2017-07-14 02:26:21 +00:00
|
|
|
# pylint: disable=no-name-in-module
|
|
|
|
from distutils.version import LooseVersion # pylint: disable=import-error
|
2015-04-26 17:05:01 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2017-02-08 17:17:52 +00:00
|
|
|
import re
|
2016-06-27 16:02:45 +00:00
|
|
|
import shutil
|
2018-09-11 09:21:48 +00:00
|
|
|
from typing import ( # noqa: F401 pylint: disable=unused-import
|
2018-08-28 18:54:01 +00:00
|
|
|
Any, Tuple, Optional, Dict, List, Union, Callable, Sequence, Set)
|
2018-07-23 08:24:39 +00:00
|
|
|
from types import ModuleType
|
2016-03-28 01:48:51 +00:00
|
|
|
import voluptuous as vol
|
2017-03-01 04:33:19 +00:00
|
|
|
from voluptuous.humanize import humanize_error
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2018-05-01 16:20:41 +00:00
|
|
|
from homeassistant import auth
|
2018-08-22 07:52:34 +00:00
|
|
|
from homeassistant.auth import providers as auth_providers,\
|
|
|
|
mfa_modules as auth_mfa_modules
|
2015-04-26 17:05:01 +00:00
|
|
|
from homeassistant.const import (
|
2018-03-27 02:50:29 +00:00
|
|
|
ATTR_FRIENDLY_NAME, ATTR_HIDDEN, ATTR_ASSUMED_STATE,
|
2017-01-14 06:01:47 +00:00
|
|
|
CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, CONF_PACKAGES, CONF_UNIT_SYSTEM,
|
2017-02-16 03:47:30 +00:00
|
|
|
CONF_TIME_ZONE, CONF_ELEVATION, CONF_UNIT_SYSTEM_METRIC,
|
2016-07-31 20:24:49 +00:00
|
|
|
CONF_UNIT_SYSTEM_IMPERIAL, CONF_TEMPERATURE_UNIT, TEMP_CELSIUS,
|
2017-06-25 22:10:30 +00:00
|
|
|
__version__, CONF_CUSTOMIZE, CONF_CUSTOMIZE_DOMAIN, CONF_CUSTOMIZE_GLOB,
|
2018-08-22 07:52:34 +00:00
|
|
|
CONF_WHITELIST_EXTERNAL_DIRS, CONF_AUTH_PROVIDERS, CONF_AUTH_MFA_MODULES,
|
2018-08-28 18:54:01 +00:00
|
|
|
CONF_TYPE, CONF_ID)
|
2018-07-23 08:24:39 +00:00
|
|
|
from homeassistant.core import callback, DOMAIN as CONF_CORE, HomeAssistant
|
2016-02-19 05:27:50 +00:00
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
2017-03-01 04:33:19 +00:00
|
|
|
from homeassistant.loader import get_component, get_platform
|
2017-10-05 16:10:29 +00:00
|
|
|
from homeassistant.util.yaml import load_yaml, SECRET_YAML
|
2016-03-28 01:48:51 +00:00
|
|
|
import homeassistant.helpers.config_validation as cv
|
2016-06-27 16:02:45 +00:00
|
|
|
from homeassistant.util import dt as date_util, location as loc_util
|
2016-08-09 03:42:25 +00:00
|
|
|
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
|
2017-02-16 03:47:30 +00:00
|
|
|
from homeassistant.helpers.entity_values import EntityValues
|
2017-03-01 04:33:19 +00:00
|
|
|
from homeassistant.helpers import config_per_platform, extract_domain_configs
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
DATA_PERSISTENT_ERRORS = 'bootstrap_persistent_errors'
|
2018-01-12 14:29:58 +00:00
|
|
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
|
|
|
RE_ASCII = re.compile(r"\033\[[^m]*m")
|
2017-03-01 04:33:19 +00:00
|
|
|
HA_COMPONENT_URL = '[{}](https://home-assistant.io/components/{}/)'
|
2015-04-26 17:05:01 +00:00
|
|
|
YAML_CONFIG_FILE = 'configuration.yaml'
|
2016-06-27 16:02:45 +00:00
|
|
|
VERSION_FILE = '.HA_VERSION'
|
2015-08-30 01:11:24 +00:00
|
|
|
CONFIG_DIR_NAME = '.homeassistant'
|
2017-02-16 03:47:30 +00:00
|
|
|
DATA_CUSTOMIZE = 'hass_customize'
|
2015-06-02 05:50:57 +00:00
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
FILE_MIGRATION = (
|
|
|
|
('ios.conf', '.ios.conf'),
|
|
|
|
)
|
2017-06-02 06:50:04 +00:00
|
|
|
|
2016-07-30 17:40:51 +00:00
|
|
|
DEFAULT_CORE_CONFIG = (
|
2015-06-02 05:50:57 +00:00
|
|
|
# Tuples (attribute, default, auto detect property, description)
|
|
|
|
(CONF_NAME, 'Home', None, 'Name of the location where Home Assistant is '
|
|
|
|
'running'),
|
2016-06-27 16:02:45 +00:00
|
|
|
(CONF_LATITUDE, 0, 'latitude', 'Location required to calculate the time'
|
2015-06-02 05:50:57 +00:00
|
|
|
' the sun rises and sets'),
|
2016-06-27 16:02:45 +00:00
|
|
|
(CONF_LONGITUDE, 0, 'longitude', None),
|
2016-09-20 06:17:52 +00:00
|
|
|
(CONF_ELEVATION, 0, None, 'Impacts weather/sunrise data'
|
|
|
|
' (altitude above sea level in meters)'),
|
2016-07-31 20:24:49 +00:00
|
|
|
(CONF_UNIT_SYSTEM, CONF_UNIT_SYSTEM_METRIC, None,
|
|
|
|
'{} for Metric, {} for Imperial'.format(CONF_UNIT_SYSTEM_METRIC,
|
|
|
|
CONF_UNIT_SYSTEM_IMPERIAL)),
|
2015-06-02 05:50:57 +00:00
|
|
|
(CONF_TIME_ZONE, 'UTC', 'time_zone', 'Pick yours from here: http://en.wiki'
|
|
|
|
'pedia.org/wiki/List_of_tz_database_time_zones'),
|
2017-08-26 17:02:32 +00:00
|
|
|
(CONF_CUSTOMIZE, '!include customize.yaml', None, 'Customization file'),
|
2018-05-12 21:44:53 +00:00
|
|
|
) # type: Tuple[Tuple[str, Any, Any, Optional[str]], ...]
|
2016-07-30 17:40:51 +00:00
|
|
|
DEFAULT_CONFIG = """
|
|
|
|
# Show links to resources in log and frontend
|
|
|
|
introduction:
|
|
|
|
|
|
|
|
# Enables the frontend
|
|
|
|
frontend:
|
|
|
|
|
2017-02-12 19:31:46 +00:00
|
|
|
# Enables configuration UI
|
|
|
|
config:
|
|
|
|
|
2018-08-23 20:16:31 +00:00
|
|
|
# Uncomment this if you are using SSL/TLS, running in Docker container, etc.
|
|
|
|
# http:
|
|
|
|
# base_url: example.duckdns.org:8123
|
2016-07-30 17:40:51 +00:00
|
|
|
|
|
|
|
# Checks for available updates
|
2017-04-24 02:59:26 +00:00
|
|
|
# Note: This component will send some information about your system to
|
|
|
|
# the developers to assist with development of Home Assistant.
|
|
|
|
# For more information, please see:
|
|
|
|
# https://home-assistant.io/blog/2016/10/25/explaining-the-updater/
|
2016-07-30 17:40:51 +00:00
|
|
|
updater:
|
2017-06-16 05:31:22 +00:00
|
|
|
# Optional, allows Home Assistant developers to focus on popular components.
|
|
|
|
# include_used_components: true
|
2016-07-30 17:40:51 +00:00
|
|
|
|
|
|
|
# Discover some devices automatically
|
|
|
|
discovery:
|
|
|
|
|
|
|
|
# Allows you to issue voice commands from the frontend in enabled browsers
|
|
|
|
conversation:
|
|
|
|
|
2017-07-18 14:23:57 +00:00
|
|
|
# Enables support for tracking state changes over time
|
2016-07-30 17:40:51 +00:00
|
|
|
history:
|
|
|
|
|
|
|
|
# View all events in a logbook
|
|
|
|
logbook:
|
|
|
|
|
2017-10-11 15:45:55 +00:00
|
|
|
# Enables a map showing the location of tracked devices
|
|
|
|
map:
|
|
|
|
|
2016-07-30 17:40:51 +00:00
|
|
|
# Track the sun
|
|
|
|
sun:
|
|
|
|
|
2018-10-17 07:58:41 +00:00
|
|
|
# Sensors
|
2016-07-30 17:40:51 +00:00
|
|
|
sensor:
|
2018-10-17 07:58:41 +00:00
|
|
|
# Weather prediction
|
2017-07-18 14:23:57 +00:00
|
|
|
- platform: yr
|
2016-12-18 22:59:45 +00:00
|
|
|
|
|
|
|
# Text to speech
|
|
|
|
tts:
|
2017-07-18 14:23:57 +00:00
|
|
|
- platform: google
|
2016-12-18 22:59:45 +00:00
|
|
|
|
2017-12-16 08:42:25 +00:00
|
|
|
# Cloud
|
|
|
|
cloud:
|
|
|
|
|
2017-02-24 06:53:16 +00:00
|
|
|
group: !include groups.yaml
|
2017-05-10 01:44:00 +00:00
|
|
|
automation: !include automations.yaml
|
2017-08-16 05:09:10 +00:00
|
|
|
script: !include scripts.yaml
|
2016-07-30 17:40:51 +00:00
|
|
|
"""
|
2017-10-05 16:10:29 +00:00
|
|
|
DEFAULT_SECRETS = """
|
|
|
|
# Use this file to store secrets like usernames and passwords.
|
|
|
|
# Learn more at https://home-assistant.io/docs/configuration/secrets/
|
2018-08-23 20:16:31 +00:00
|
|
|
some_password: welcome
|
2017-10-05 16:10:29 +00:00
|
|
|
"""
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
2018-08-28 18:54:01 +00:00
|
|
|
def _no_duplicate_auth_provider(configs: Sequence[Dict[str, Any]]) \
|
|
|
|
-> Sequence[Dict[str, Any]]:
|
|
|
|
"""No duplicate auth provider config allowed in a list.
|
|
|
|
|
|
|
|
Each type of auth provider can only have one config without optional id.
|
|
|
|
Unique id is required if same type of auth provider used multiple times.
|
|
|
|
"""
|
|
|
|
config_keys = set() # type: Set[Tuple[str, Optional[str]]]
|
|
|
|
for config in configs:
|
|
|
|
key = (config[CONF_TYPE], config.get(CONF_ID))
|
|
|
|
if key in config_keys:
|
|
|
|
raise vol.Invalid(
|
|
|
|
'Duplicate auth provider {} found. Please add unique IDs if '
|
|
|
|
'you want to have the same auth provider twice'.format(
|
|
|
|
config[CONF_TYPE]
|
|
|
|
))
|
|
|
|
config_keys.add(key)
|
|
|
|
return configs
|
|
|
|
|
|
|
|
|
|
|
|
def _no_duplicate_auth_mfa_module(configs: Sequence[Dict[str, Any]]) \
|
|
|
|
-> Sequence[Dict[str, Any]]:
|
|
|
|
"""No duplicate auth mfa module item allowed in a list.
|
|
|
|
|
|
|
|
Each type of mfa module can only have one config without optional id.
|
|
|
|
A global unique id is required if same type of mfa module used multiple
|
|
|
|
times.
|
|
|
|
Note: this is different than auth provider
|
|
|
|
"""
|
|
|
|
config_keys = set() # type: Set[str]
|
|
|
|
for config in configs:
|
|
|
|
key = config.get(CONF_ID, config[CONF_TYPE])
|
|
|
|
if key in config_keys:
|
|
|
|
raise vol.Invalid(
|
|
|
|
'Duplicate mfa module {} found. Please add unique IDs if '
|
|
|
|
'you want to have the same mfa module twice'.format(
|
|
|
|
config[CONF_TYPE]
|
|
|
|
))
|
|
|
|
config_keys.add(key)
|
|
|
|
return configs
|
|
|
|
|
|
|
|
|
2017-01-14 06:01:47 +00:00
|
|
|
PACKAGES_CONFIG_SCHEMA = vol.Schema({
|
|
|
|
cv.slug: vol.Schema( # Package names are slugs
|
2018-09-24 08:17:24 +00:00
|
|
|
{cv.string: vol.Any(dict, list, None)}) # Component configuration
|
2017-01-14 06:01:47 +00:00
|
|
|
})
|
|
|
|
|
2018-03-27 02:50:29 +00:00
|
|
|
CUSTOMIZE_DICT_SCHEMA = vol.Schema({
|
|
|
|
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
|
|
|
|
vol.Optional(ATTR_HIDDEN): cv.boolean,
|
|
|
|
vol.Optional(ATTR_ASSUMED_STATE): cv.boolean,
|
|
|
|
}, extra=vol.ALLOW_EXTRA)
|
|
|
|
|
2017-02-16 03:47:30 +00:00
|
|
|
CUSTOMIZE_CONFIG_SCHEMA = vol.Schema({
|
|
|
|
vol.Optional(CONF_CUSTOMIZE, default={}):
|
2018-03-27 02:50:29 +00:00
|
|
|
vol.Schema({cv.entity_id: CUSTOMIZE_DICT_SCHEMA}),
|
2017-02-16 03:47:30 +00:00
|
|
|
vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}):
|
2018-03-27 02:50:29 +00:00
|
|
|
vol.Schema({cv.string: CUSTOMIZE_DICT_SCHEMA}),
|
2017-02-16 03:47:30 +00:00
|
|
|
vol.Optional(CONF_CUSTOMIZE_GLOB, default={}):
|
2018-03-27 02:50:29 +00:00
|
|
|
vol.Schema({cv.string: CUSTOMIZE_DICT_SCHEMA}),
|
2017-02-16 03:47:30 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
CORE_CONFIG_SCHEMA = CUSTOMIZE_CONFIG_SCHEMA.extend({
|
2016-03-28 01:48:51 +00:00
|
|
|
CONF_NAME: vol.Coerce(str),
|
|
|
|
CONF_LATITUDE: cv.latitude,
|
|
|
|
CONF_LONGITUDE: cv.longitude,
|
2016-07-05 15:01:59 +00:00
|
|
|
CONF_ELEVATION: vol.Coerce(int),
|
2016-07-31 20:24:49 +00:00
|
|
|
vol.Optional(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
|
|
|
|
CONF_UNIT_SYSTEM: cv.unit_system,
|
2016-03-28 01:48:51 +00:00
|
|
|
CONF_TIME_ZONE: cv.time_zone,
|
2017-06-25 22:10:30 +00:00
|
|
|
vol.Optional(CONF_WHITELIST_EXTERNAL_DIRS):
|
|
|
|
# pylint: disable=no-value-for-parameter
|
|
|
|
vol.All(cv.ensure_list, [vol.IsDir()]),
|
2017-01-14 06:01:47 +00:00
|
|
|
vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA,
|
2018-05-01 16:20:41 +00:00
|
|
|
vol.Optional(CONF_AUTH_PROVIDERS):
|
2018-07-17 17:36:33 +00:00
|
|
|
vol.All(cv.ensure_list,
|
|
|
|
[auth_providers.AUTH_PROVIDER_SCHEMA.extend({
|
|
|
|
CONF_TYPE: vol.NotIn(['insecure_example'],
|
|
|
|
'The insecure_example auth provider'
|
|
|
|
' is for testing only.')
|
2018-08-28 18:54:01 +00:00
|
|
|
})],
|
|
|
|
_no_duplicate_auth_provider),
|
2018-08-22 07:52:34 +00:00
|
|
|
vol.Optional(CONF_AUTH_MFA_MODULES):
|
|
|
|
vol.All(cv.ensure_list,
|
2018-08-28 18:54:01 +00:00
|
|
|
[auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({
|
|
|
|
CONF_TYPE: vol.NotIn(['insecure_example'],
|
|
|
|
'The insecure_example mfa module'
|
|
|
|
' is for testing only.')
|
|
|
|
})],
|
|
|
|
_no_duplicate_auth_mfa_module),
|
2016-03-28 01:48:51 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
|
2016-07-21 05:38:52 +00:00
|
|
|
def get_default_config_dir() -> str:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Put together the default configuration directory based on the OS."""
|
2015-08-30 01:11:24 +00:00
|
|
|
data_dir = os.getenv('APPDATA') if os.name == "nt" \
|
|
|
|
else os.path.expanduser('~')
|
2018-05-12 21:44:53 +00:00
|
|
|
return os.path.join(data_dir, CONFIG_DIR_NAME) # type: ignore
|
2015-08-30 01:11:24 +00:00
|
|
|
|
|
|
|
|
2018-07-13 17:14:45 +00:00
|
|
|
def ensure_config_exists(config_dir: str, detect_location: bool = True)\
|
|
|
|
-> Optional[str]:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Ensure a configuration file exists in given configuration directory.
|
2016-03-07 23:06:04 +00:00
|
|
|
|
|
|
|
Creating a default one if needed.
|
2017-07-18 14:23:57 +00:00
|
|
|
Return path to the configuration file.
|
2016-03-07 23:06:04 +00:00
|
|
|
"""
|
2015-04-26 17:05:01 +00:00
|
|
|
config_path = find_config_file(config_dir)
|
|
|
|
|
|
|
|
if config_path is None:
|
2015-09-01 07:18:26 +00:00
|
|
|
print("Unable to find configuration. Creating default one in",
|
2015-08-30 06:02:07 +00:00
|
|
|
config_dir)
|
2015-04-26 17:05:01 +00:00
|
|
|
config_path = create_default_config(config_dir, detect_location)
|
|
|
|
|
|
|
|
return config_path
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def create_default_config(config_dir: str, detect_location: bool = True)\
|
2018-07-13 17:14:45 +00:00
|
|
|
-> Optional[str]:
|
2016-03-07 23:06:04 +00:00
|
|
|
"""Create a default configuration file in given configuration directory.
|
|
|
|
|
|
|
|
Return path to new config file if success, None if failed.
|
2016-10-28 19:26:52 +00:00
|
|
|
This method needs to run in an executor.
|
2016-03-07 23:06:04 +00:00
|
|
|
"""
|
2017-02-24 06:53:16 +00:00
|
|
|
from homeassistant.components.config.group import (
|
|
|
|
CONFIG_PATH as GROUP_CONFIG_PATH)
|
2017-05-10 01:44:00 +00:00
|
|
|
from homeassistant.components.config.automation import (
|
|
|
|
CONFIG_PATH as AUTOMATION_CONFIG_PATH)
|
2017-08-16 05:09:10 +00:00
|
|
|
from homeassistant.components.config.script import (
|
|
|
|
CONFIG_PATH as SCRIPT_CONFIG_PATH)
|
2017-08-26 17:02:32 +00:00
|
|
|
from homeassistant.components.config.customize import (
|
|
|
|
CONFIG_PATH as CUSTOMIZE_CONFIG_PATH)
|
2017-02-24 06:53:16 +00:00
|
|
|
|
2015-04-26 17:05:01 +00:00
|
|
|
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
|
2017-10-05 16:10:29 +00:00
|
|
|
secret_path = os.path.join(config_dir, SECRET_YAML)
|
2016-06-27 16:02:45 +00:00
|
|
|
version_path = os.path.join(config_dir, VERSION_FILE)
|
2017-02-24 06:53:16 +00:00
|
|
|
group_yaml_path = os.path.join(config_dir, GROUP_CONFIG_PATH)
|
2017-05-10 01:44:00 +00:00
|
|
|
automation_yaml_path = os.path.join(config_dir, AUTOMATION_CONFIG_PATH)
|
2017-08-16 05:09:10 +00:00
|
|
|
script_yaml_path = os.path.join(config_dir, SCRIPT_CONFIG_PATH)
|
2017-08-26 17:02:32 +00:00
|
|
|
customize_yaml_path = os.path.join(config_dir, CUSTOMIZE_CONFIG_PATH)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2016-07-30 17:40:51 +00:00
|
|
|
info = {attr: default for attr, default, _, _ in DEFAULT_CORE_CONFIG}
|
2015-06-02 05:50:57 +00:00
|
|
|
|
2015-07-07 07:01:17 +00:00
|
|
|
location_info = detect_location and loc_util.detect_location_info()
|
2015-06-02 05:50:57 +00:00
|
|
|
|
|
|
|
if location_info:
|
2016-07-31 20:24:49 +00:00
|
|
|
if location_info.use_metric:
|
|
|
|
info[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_METRIC
|
|
|
|
else:
|
|
|
|
info[CONF_UNIT_SYSTEM] = CONF_UNIT_SYSTEM_IMPERIAL
|
2015-06-02 05:50:57 +00:00
|
|
|
|
2016-07-30 17:40:51 +00:00
|
|
|
for attr, default, prop, _ in DEFAULT_CORE_CONFIG:
|
2015-06-02 05:50:57 +00:00
|
|
|
if prop is None:
|
|
|
|
continue
|
|
|
|
info[attr] = getattr(location_info, prop) or default
|
|
|
|
|
2016-06-27 16:02:45 +00:00
|
|
|
if location_info.latitude and location_info.longitude:
|
2017-07-18 14:23:57 +00:00
|
|
|
info[CONF_ELEVATION] = loc_util.elevation(
|
|
|
|
location_info.latitude, location_info.longitude)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
2015-04-26 17:05:01 +00:00
|
|
|
# Writing files with YAML does not create the most human readable results
|
|
|
|
# So we're hard coding a YAML template.
|
|
|
|
try:
|
2017-10-05 16:10:29 +00:00
|
|
|
with open(config_path, 'wt') as config_file:
|
2015-06-02 05:50:57 +00:00
|
|
|
config_file.write("homeassistant:\n")
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2016-07-30 17:40:51 +00:00
|
|
|
for attr, _, _, description in DEFAULT_CORE_CONFIG:
|
2015-06-02 05:50:57 +00:00
|
|
|
if info[attr] is None:
|
|
|
|
continue
|
|
|
|
elif description:
|
|
|
|
config_file.write(" # {}\n".format(description))
|
|
|
|
config_file.write(" {}: {}\n".format(attr, info[attr]))
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2016-07-30 17:40:51 +00:00
|
|
|
config_file.write(DEFAULT_CONFIG)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2017-10-05 16:10:29 +00:00
|
|
|
with open(secret_path, 'wt') as secret_file:
|
|
|
|
secret_file.write(DEFAULT_SECRETS)
|
|
|
|
|
2016-06-27 16:02:45 +00:00
|
|
|
with open(version_path, 'wt') as version_file:
|
|
|
|
version_file.write(__version__)
|
|
|
|
|
2017-08-16 05:09:10 +00:00
|
|
|
with open(group_yaml_path, 'wt'):
|
2017-02-24 06:53:16 +00:00
|
|
|
pass
|
|
|
|
|
2017-05-10 01:44:00 +00:00
|
|
|
with open(automation_yaml_path, 'wt') as fil:
|
|
|
|
fil.write('[]')
|
|
|
|
|
2017-08-16 05:09:10 +00:00
|
|
|
with open(script_yaml_path, 'wt'):
|
|
|
|
pass
|
|
|
|
|
2017-08-26 17:02:32 +00:00
|
|
|
with open(customize_yaml_path, 'wt'):
|
|
|
|
pass
|
|
|
|
|
2015-04-26 17:05:01 +00:00
|
|
|
return config_path
|
|
|
|
|
|
|
|
except IOError:
|
2017-07-18 14:23:57 +00:00
|
|
|
print("Unable to create default configuration file", config_path)
|
2015-04-26 17:05:01 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
async def async_hass_config_yaml(hass: HomeAssistant) -> Dict:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Load YAML from a Home Assistant configuration file.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
2017-07-18 14:23:57 +00:00
|
|
|
This function allow a component inside the asyncio loop to reload its
|
2018-12-03 09:56:26 +00:00
|
|
|
configuration by itself. Include package merge.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2018-07-23 08:24:39 +00:00
|
|
|
def _load_hass_yaml_config() -> Dict:
|
2016-10-27 07:16:23 +00:00
|
|
|
path = find_config_file(hass.config.config_dir)
|
2018-07-23 08:24:39 +00:00
|
|
|
if path is None:
|
|
|
|
raise HomeAssistantError(
|
|
|
|
"Config file not found in: {}".format(hass.config.config_dir))
|
2018-12-03 09:56:26 +00:00
|
|
|
config = load_yaml_config_file(path)
|
|
|
|
core_config = config.get(CONF_CORE, {})
|
|
|
|
merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {}))
|
|
|
|
return config
|
2016-10-27 07:16:23 +00:00
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
return await hass.async_add_executor_job(_load_hass_yaml_config)
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def find_config_file(config_dir: Optional[str]) -> Optional[str]:
|
2018-07-13 17:14:45 +00:00
|
|
|
"""Look in given directory for supported configuration files."""
|
2018-07-23 08:24:39 +00:00
|
|
|
if config_dir is None:
|
|
|
|
return None
|
2015-08-04 20:21:25 +00:00
|
|
|
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2015-08-04 20:21:25 +00:00
|
|
|
return config_path if os.path.isfile(config_path) else None
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def load_yaml_config_file(config_path: str) -> Dict[Any, Any]:
|
2016-10-28 19:26:52 +00:00
|
|
|
"""Parse a YAML configuration file.
|
|
|
|
|
|
|
|
This method needs to run in an executor.
|
|
|
|
"""
|
2017-03-01 04:56:23 +00:00
|
|
|
try:
|
|
|
|
conf_dict = load_yaml(config_path)
|
|
|
|
except FileNotFoundError as err:
|
|
|
|
raise HomeAssistantError("Config file not found: {}".format(
|
|
|
|
getattr(err, 'filename', err)))
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
if not isinstance(conf_dict, dict):
|
2017-07-18 14:23:57 +00:00
|
|
|
msg = "The configuration file {} does not contain a dictionary".format(
|
2015-04-26 17:05:01 +00:00
|
|
|
os.path.basename(config_path))
|
2016-06-22 16:13:18 +00:00
|
|
|
_LOGGER.error(msg)
|
|
|
|
raise HomeAssistantError(msg)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
# Convert values to dictionaries if they are None
|
|
|
|
for key, value in conf_dict.items():
|
|
|
|
conf_dict[key] = value or {}
|
2015-04-26 17:05:01 +00:00
|
|
|
return conf_dict
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def process_ha_config_upgrade(hass: HomeAssistant) -> None:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Upgrade configuration if necessary.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
2016-10-28 19:26:52 +00:00
|
|
|
This method needs to run in an executor.
|
2016-10-27 07:16:23 +00:00
|
|
|
"""
|
2016-06-27 16:02:45 +00:00
|
|
|
version_path = hass.config.path(VERSION_FILE)
|
|
|
|
|
|
|
|
try:
|
|
|
|
with open(version_path, 'rt') as inp:
|
|
|
|
conf_version = inp.readline().strip()
|
|
|
|
except FileNotFoundError:
|
|
|
|
# Last version to not have this file
|
|
|
|
conf_version = '0.7.7'
|
|
|
|
|
|
|
|
if conf_version == __version__:
|
|
|
|
return
|
|
|
|
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.info("Upgrading configuration directory from %s to %s",
|
|
|
|
conf_version, __version__)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
2017-07-20 05:59:21 +00:00
|
|
|
if LooseVersion(conf_version) < LooseVersion('0.50'):
|
|
|
|
# 0.50 introduced persistent deps dir.
|
2017-07-14 02:26:21 +00:00
|
|
|
lib_path = hass.config.path('deps')
|
|
|
|
if os.path.isdir(lib_path):
|
|
|
|
shutil.rmtree(lib_path)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
with open(version_path, 'wt') as outp:
|
|
|
|
outp.write(__version__)
|
|
|
|
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.info("Migrating old system configuration files to new locations")
|
2017-06-02 06:50:04 +00:00
|
|
|
for oldf, newf in FILE_MIGRATION:
|
|
|
|
if os.path.isfile(hass.config.path(oldf)):
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.info("Migrating %s to %s", oldf, newf)
|
2017-06-02 06:50:04 +00:00
|
|
|
os.rename(hass.config.path(oldf), hass.config.path(newf))
|
|
|
|
|
2016-06-27 16:02:45 +00:00
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
@callback
|
2018-07-23 08:24:39 +00:00
|
|
|
def async_log_exception(ex: vol.Invalid, domain: str, config: Dict,
|
|
|
|
hass: HomeAssistant) -> None:
|
2018-03-09 03:34:24 +00:00
|
|
|
"""Log an error for configuration validation.
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
|
|
|
if hass is not None:
|
|
|
|
async_notify_setup_error(hass, domain, True)
|
2018-03-09 03:34:24 +00:00
|
|
|
_LOGGER.error(_format_config_error(ex, domain, config))
|
|
|
|
|
|
|
|
|
|
|
|
@callback
|
2018-07-23 08:24:39 +00:00
|
|
|
def _format_config_error(ex: vol.Invalid, domain: str, config: Dict) -> str:
|
2018-03-09 03:34:24 +00:00
|
|
|
"""Generate log exception for configuration validation.
|
2017-03-01 04:33:19 +00:00
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
|
|
|
message = "Invalid config for [{}]: ".format(domain)
|
2017-03-01 04:33:19 +00:00
|
|
|
if 'extra keys not allowed' in ex.error_message:
|
2018-12-06 10:54:44 +00:00
|
|
|
message += '[{option}] is an invalid option for [{domain}]. ' \
|
|
|
|
'Check: {domain}->{path}.'.format(
|
|
|
|
option=ex.path[-1], domain=domain,
|
|
|
|
path='->'.join(str(m) for m in ex.path))
|
2017-03-01 04:33:19 +00:00
|
|
|
else:
|
|
|
|
message += '{}.'.format(humanize_error(config, ex))
|
|
|
|
|
|
|
|
domain_config = config.get(domain, config)
|
|
|
|
message += " (See {}, line {}). ".format(
|
|
|
|
getattr(domain_config, '__config_file__', '?'),
|
|
|
|
getattr(domain_config, '__line__', '?'))
|
|
|
|
|
2018-10-30 15:38:09 +00:00
|
|
|
if domain != CONF_CORE:
|
2017-03-01 04:33:19 +00:00
|
|
|
message += ('Please check the docs at '
|
|
|
|
'https://home-assistant.io/components/{}/'.format(domain))
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
return message
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
async def async_process_ha_core_config(
|
2018-08-23 11:38:08 +00:00
|
|
|
hass: HomeAssistant, config: Dict,
|
2018-08-25 09:09:48 +00:00
|
|
|
has_api_password: bool = False,
|
|
|
|
has_trusted_networks: bool = False) -> None:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Process the [homeassistant] section from the configuration.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2016-06-27 16:02:45 +00:00
|
|
|
config = CORE_CONFIG_SCHEMA(config)
|
2018-05-01 16:20:41 +00:00
|
|
|
|
|
|
|
# Only load auth during startup.
|
|
|
|
if not hasattr(hass, 'auth'):
|
2018-08-23 11:38:08 +00:00
|
|
|
auth_conf = config.get(CONF_AUTH_PROVIDERS)
|
|
|
|
|
|
|
|
if auth_conf is None:
|
|
|
|
auth_conf = [
|
|
|
|
{'type': 'homeassistant'}
|
|
|
|
]
|
|
|
|
if has_api_password:
|
|
|
|
auth_conf.append({'type': 'legacy_api_password'})
|
2018-08-25 09:09:48 +00:00
|
|
|
if has_trusted_networks:
|
|
|
|
auth_conf.append({'type': 'trusted_networks'})
|
2018-08-23 11:38:08 +00:00
|
|
|
|
2018-08-26 20:38:52 +00:00
|
|
|
mfa_conf = config.get(CONF_AUTH_MFA_MODULES, [
|
2018-09-24 09:06:50 +00:00
|
|
|
{'type': 'totp', 'id': 'totp', 'name': 'Authenticator app'},
|
2018-08-26 20:38:52 +00:00
|
|
|
])
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
setattr(hass, 'auth', await auth.auth_manager_from_config(
|
2018-08-22 07:52:34 +00:00
|
|
|
hass,
|
2018-08-23 11:38:08 +00:00
|
|
|
auth_conf,
|
2018-08-26 20:38:52 +00:00
|
|
|
mfa_conf))
|
2018-05-01 16:20:41 +00:00
|
|
|
|
2016-06-27 16:02:45 +00:00
|
|
|
hac = hass.config
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def set_time_zone(time_zone_str: Optional[str]) -> None:
|
2017-04-30 05:04:49 +00:00
|
|
|
"""Help to set the time zone."""
|
2016-06-27 16:02:45 +00:00
|
|
|
if time_zone_str is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
time_zone = date_util.get_time_zone(time_zone_str)
|
|
|
|
|
|
|
|
if time_zone:
|
|
|
|
hac.time_zone = time_zone
|
|
|
|
date_util.set_default_time_zone(time_zone)
|
|
|
|
else:
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.error("Received invalid time zone %s", time_zone_str)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
for key, attr in ((CONF_LATITUDE, 'latitude'),
|
|
|
|
(CONF_LONGITUDE, 'longitude'),
|
|
|
|
(CONF_NAME, 'location_name'),
|
|
|
|
(CONF_ELEVATION, 'elevation')):
|
|
|
|
if key in config:
|
|
|
|
setattr(hac, attr, config[key])
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
set_time_zone(config.get(CONF_TIME_ZONE))
|
2016-06-27 16:02:45 +00:00
|
|
|
|
2017-07-18 14:23:57 +00:00
|
|
|
# Init whitelist external dir
|
2018-07-23 08:24:39 +00:00
|
|
|
hac.whitelist_external_dirs = {hass.config.path('www')}
|
2017-06-25 22:10:30 +00:00
|
|
|
if CONF_WHITELIST_EXTERNAL_DIRS in config:
|
|
|
|
hac.whitelist_external_dirs.update(
|
|
|
|
set(config[CONF_WHITELIST_EXTERNAL_DIRS]))
|
|
|
|
|
2017-02-16 03:47:30 +00:00
|
|
|
# Customize
|
|
|
|
cust_exact = dict(config[CONF_CUSTOMIZE])
|
|
|
|
cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN])
|
2017-02-25 20:54:04 +00:00
|
|
|
cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB])
|
2017-02-16 03:47:30 +00:00
|
|
|
|
|
|
|
for name, pkg in config[CONF_PACKAGES].items():
|
|
|
|
pkg_cust = pkg.get(CONF_CORE)
|
|
|
|
|
|
|
|
if pkg_cust is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust)
|
|
|
|
except vol.Invalid:
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.warning("Package %s contains invalid customize", name)
|
2017-02-16 03:47:30 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
cust_exact.update(pkg_cust[CONF_CUSTOMIZE])
|
|
|
|
cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN])
|
|
|
|
cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB])
|
|
|
|
|
|
|
|
hass.data[DATA_CUSTOMIZE] = \
|
|
|
|
EntityValues(cust_exact, cust_domain, cust_glob)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
2016-07-31 20:24:49 +00:00
|
|
|
if CONF_UNIT_SYSTEM in config:
|
|
|
|
if config[CONF_UNIT_SYSTEM] == CONF_UNIT_SYSTEM_IMPERIAL:
|
|
|
|
hac.units = IMPERIAL_SYSTEM
|
|
|
|
else:
|
|
|
|
hac.units = METRIC_SYSTEM
|
|
|
|
elif CONF_TEMPERATURE_UNIT in config:
|
|
|
|
unit = config[CONF_TEMPERATURE_UNIT]
|
|
|
|
if unit == TEMP_CELSIUS:
|
|
|
|
hac.units = METRIC_SYSTEM
|
|
|
|
else:
|
|
|
|
hac.units = IMPERIAL_SYSTEM
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.warning("Found deprecated temperature unit in core "
|
|
|
|
"configuration expected unit system. Replace '%s: %s' "
|
|
|
|
"with '%s: %s'", CONF_TEMPERATURE_UNIT, unit,
|
2016-08-16 05:12:43 +00:00
|
|
|
CONF_UNIT_SYSTEM, hac.units.name)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
# Shortcut if no auto-detection necessary
|
2016-07-31 20:24:49 +00:00
|
|
|
if None not in (hac.latitude, hac.longitude, hac.units,
|
2016-06-27 16:02:45 +00:00
|
|
|
hac.time_zone, hac.elevation):
|
|
|
|
return
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
discovered = [] # type: List[Tuple[str, Any]]
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
# If we miss some of the needed values, auto detect them
|
2016-07-31 20:24:49 +00:00
|
|
|
if None in (hac.latitude, hac.longitude, hac.units,
|
2016-06-27 16:02:45 +00:00
|
|
|
hac.time_zone):
|
2018-07-23 08:24:39 +00:00
|
|
|
info = await hass.async_add_executor_job(
|
2017-05-26 15:28:07 +00:00
|
|
|
loc_util.detect_location_info)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
if info is None:
|
2017-07-18 14:23:57 +00:00
|
|
|
_LOGGER.error("Could not detect location information")
|
2016-06-27 16:02:45 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if hac.latitude is None and hac.longitude is None:
|
2016-07-31 20:24:49 +00:00
|
|
|
hac.latitude, hac.longitude = (info.latitude, info.longitude)
|
2016-06-27 16:02:45 +00:00
|
|
|
discovered.append(('latitude', hac.latitude))
|
|
|
|
discovered.append(('longitude', hac.longitude))
|
|
|
|
|
2016-07-31 20:24:49 +00:00
|
|
|
if hac.units is None:
|
|
|
|
hac.units = METRIC_SYSTEM if info.use_metric else IMPERIAL_SYSTEM
|
|
|
|
discovered.append((CONF_UNIT_SYSTEM, hac.units.name))
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
if hac.location_name is None:
|
|
|
|
hac.location_name = info.city
|
|
|
|
discovered.append(('name', info.city))
|
|
|
|
|
|
|
|
if hac.time_zone is None:
|
|
|
|
set_time_zone(info.time_zone)
|
|
|
|
discovered.append(('time_zone', info.time_zone))
|
|
|
|
|
|
|
|
if hac.elevation is None and hac.latitude is not None and \
|
|
|
|
hac.longitude is not None:
|
2018-07-23 08:24:39 +00:00
|
|
|
elevation = await hass.async_add_executor_job(
|
2017-05-26 15:28:07 +00:00
|
|
|
loc_util.elevation, hac.latitude, hac.longitude)
|
2016-06-27 16:02:45 +00:00
|
|
|
hac.elevation = elevation
|
|
|
|
discovered.append(('elevation', elevation))
|
|
|
|
|
|
|
|
if discovered:
|
|
|
|
_LOGGER.warning(
|
2017-07-18 14:23:57 +00:00
|
|
|
"Incomplete core configuration. Auto detected %s",
|
|
|
|
", ".join('{}: {}'.format(key, val) for key, val in discovered))
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def _log_pkg_error(
|
|
|
|
package: str, component: str, config: Dict, message: str) -> None:
|
2018-03-09 03:34:24 +00:00
|
|
|
"""Log an error while merging packages."""
|
2017-01-14 06:01:47 +00:00
|
|
|
message = "Package {} setup failed. Component {} {}".format(
|
|
|
|
package, component, message)
|
|
|
|
|
|
|
|
pack_config = config[CONF_CORE][CONF_PACKAGES].get(package, config)
|
|
|
|
message += " (See {}:{}). ".format(
|
|
|
|
getattr(pack_config, '__config_file__', '?'),
|
|
|
|
getattr(pack_config, '__line__', '?'))
|
|
|
|
|
|
|
|
_LOGGER.error(message)
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def _identify_config_schema(module: ModuleType) -> \
|
|
|
|
Tuple[Optional[str], Optional[Dict]]:
|
2017-01-14 06:01:47 +00:00
|
|
|
"""Extract the schema and identify list or dict based."""
|
|
|
|
try:
|
2018-07-23 08:24:39 +00:00
|
|
|
schema = module.CONFIG_SCHEMA.schema[module.DOMAIN] # type: ignore
|
2017-01-14 06:01:47 +00:00
|
|
|
except (AttributeError, KeyError):
|
2018-07-23 08:24:39 +00:00
|
|
|
return None, None
|
2017-01-14 06:01:47 +00:00
|
|
|
t_schema = str(schema)
|
2017-04-30 17:55:03 +00:00
|
|
|
if t_schema.startswith('{'):
|
2017-01-14 06:01:47 +00:00
|
|
|
return ('dict', schema)
|
2017-04-30 07:42:19 +00:00
|
|
|
if t_schema.startswith(('[', 'All(<function ensure_list')):
|
2017-01-14 06:01:47 +00:00
|
|
|
return ('list', schema)
|
|
|
|
return '', schema
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def _recursive_merge(
|
|
|
|
conf: Dict[str, Any], package: Dict[str, Any]) -> Union[bool, str]:
|
2018-05-25 20:41:50 +00:00
|
|
|
"""Merge package into conf, recursively."""
|
2018-07-23 08:24:39 +00:00
|
|
|
error = False # type: Union[bool, str]
|
2018-05-25 20:41:50 +00:00
|
|
|
for key, pack_conf in package.items():
|
|
|
|
if isinstance(pack_conf, dict):
|
|
|
|
if not pack_conf:
|
|
|
|
continue
|
|
|
|
conf[key] = conf.get(key, OrderedDict())
|
2018-06-16 10:55:32 +00:00
|
|
|
error = _recursive_merge(conf=conf[key], package=pack_conf)
|
2018-05-25 20:41:50 +00:00
|
|
|
|
|
|
|
elif isinstance(pack_conf, list):
|
|
|
|
if not pack_conf:
|
|
|
|
continue
|
|
|
|
conf[key] = cv.ensure_list(conf.get(key))
|
|
|
|
conf[key].extend(cv.ensure_list(pack_conf))
|
|
|
|
|
|
|
|
else:
|
|
|
|
if conf.get(key) is not None:
|
2018-06-16 10:55:32 +00:00
|
|
|
return key
|
2018-07-23 08:16:05 +00:00
|
|
|
conf[key] = pack_conf
|
2018-06-16 10:55:32 +00:00
|
|
|
return error
|
2018-05-25 20:41:50 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def merge_packages_config(hass: HomeAssistant, config: Dict, packages: Dict,
|
|
|
|
_log_pkg_error: Callable = _log_pkg_error) -> Dict:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Merge packages into the top-level configuration. Mutate config."""
|
2017-01-14 06:01:47 +00:00
|
|
|
# pylint: disable=too-many-nested-blocks
|
|
|
|
PACKAGES_CONFIG_SCHEMA(packages)
|
|
|
|
for pack_name, pack_conf in packages.items():
|
|
|
|
for comp_name, comp_conf in pack_conf.items():
|
2017-01-27 06:26:49 +00:00
|
|
|
if comp_name == CONF_CORE:
|
|
|
|
continue
|
2018-09-24 08:17:24 +00:00
|
|
|
# If component name is given with a trailing description, remove it
|
|
|
|
# when looking for component
|
|
|
|
domain = comp_name.split(' ')[0]
|
|
|
|
component = get_component(hass, domain)
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
|
|
if component is None:
|
|
|
|
_log_pkg_error(pack_name, comp_name, config, "does not exist")
|
|
|
|
continue
|
|
|
|
|
|
|
|
if hasattr(component, 'PLATFORM_SCHEMA'):
|
2018-03-30 00:13:08 +00:00
|
|
|
if not comp_conf:
|
|
|
|
continue # Ensure we dont add Falsy items to list
|
2017-01-14 06:01:47 +00:00
|
|
|
config[comp_name] = cv.ensure_list(config.get(comp_name))
|
|
|
|
config[comp_name].extend(cv.ensure_list(comp_conf))
|
|
|
|
continue
|
|
|
|
|
|
|
|
if hasattr(component, 'CONFIG_SCHEMA'):
|
|
|
|
merge_type, _ = _identify_config_schema(component)
|
|
|
|
|
|
|
|
if merge_type == 'list':
|
2018-03-30 00:13:08 +00:00
|
|
|
if not comp_conf:
|
|
|
|
continue # Ensure we dont add Falsy items to list
|
2017-01-14 06:01:47 +00:00
|
|
|
config[comp_name] = cv.ensure_list(config.get(comp_name))
|
|
|
|
config[comp_name].extend(cv.ensure_list(comp_conf))
|
|
|
|
continue
|
|
|
|
|
2018-06-16 10:55:32 +00:00
|
|
|
if comp_conf is None:
|
|
|
|
comp_conf = OrderedDict()
|
|
|
|
|
|
|
|
if not isinstance(comp_conf, dict):
|
|
|
|
_log_pkg_error(
|
|
|
|
pack_name, comp_name, config,
|
|
|
|
"cannot be merged. Expected a dict.")
|
|
|
|
continue
|
2017-01-14 06:01:47 +00:00
|
|
|
|
2018-06-16 10:55:32 +00:00
|
|
|
if comp_name not in config or config[comp_name] is None:
|
|
|
|
config[comp_name] = OrderedDict()
|
|
|
|
|
|
|
|
if not isinstance(config[comp_name], dict):
|
|
|
|
_log_pkg_error(
|
|
|
|
pack_name, comp_name, config,
|
|
|
|
"cannot be merged. Dict expected in main config.")
|
2017-01-14 06:01:47 +00:00
|
|
|
continue
|
2018-06-16 10:55:32 +00:00
|
|
|
if not isinstance(comp_conf, dict):
|
|
|
|
_log_pkg_error(
|
|
|
|
pack_name, comp_name, config,
|
|
|
|
"cannot be merged. Dict expected in package.")
|
|
|
|
continue
|
|
|
|
|
|
|
|
error = _recursive_merge(conf=config[comp_name],
|
|
|
|
package=comp_conf)
|
|
|
|
if error:
|
|
|
|
_log_pkg_error(pack_name, comp_name, config,
|
|
|
|
"has duplicate key '{}'".format(error))
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
|
|
return config
|
2017-01-27 06:26:49 +00:00
|
|
|
|
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
@callback
|
2018-07-23 08:24:39 +00:00
|
|
|
def async_process_component_config(
|
|
|
|
hass: HomeAssistant, config: Dict, domain: str) -> Optional[Dict]:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Check component configuration and return processed configuration.
|
2017-03-01 04:33:19 +00:00
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
Returns None on error.
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
2018-05-01 18:57:30 +00:00
|
|
|
component = get_component(hass, domain)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
if hasattr(component, 'CONFIG_SCHEMA'):
|
|
|
|
try:
|
2018-07-17 22:28:44 +00:00
|
|
|
config = component.CONFIG_SCHEMA(config) # type: ignore
|
2017-03-01 04:33:19 +00:00
|
|
|
except vol.Invalid as ex:
|
|
|
|
async_log_exception(ex, domain, config, hass)
|
|
|
|
return None
|
|
|
|
|
|
|
|
elif hasattr(component, 'PLATFORM_SCHEMA'):
|
|
|
|
platforms = []
|
|
|
|
for p_name, p_config in config_per_platform(config, domain):
|
|
|
|
# Validate component specific platform schema
|
|
|
|
try:
|
2018-07-17 22:28:44 +00:00
|
|
|
p_validated = component.PLATFORM_SCHEMA( # type: ignore
|
|
|
|
p_config)
|
2017-03-01 04:33:19 +00:00
|
|
|
except vol.Invalid as ex:
|
|
|
|
async_log_exception(ex, domain, config, hass)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Not all platform components follow same pattern for platforms
|
|
|
|
# So if p_name is None we are not going to validate platform
|
|
|
|
# (the automation component is one of them)
|
|
|
|
if p_name is None:
|
|
|
|
platforms.append(p_validated)
|
|
|
|
continue
|
|
|
|
|
2018-05-01 18:57:30 +00:00
|
|
|
platform = get_platform(hass, domain, p_name)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
if platform is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Validate platform specific schema
|
|
|
|
if hasattr(platform, 'PLATFORM_SCHEMA'):
|
|
|
|
# pylint: disable=no-member
|
|
|
|
try:
|
2018-07-17 22:28:44 +00:00
|
|
|
p_validated = platform.PLATFORM_SCHEMA( # type: ignore
|
|
|
|
p_validated)
|
2017-03-01 04:33:19 +00:00
|
|
|
except vol.Invalid as ex:
|
|
|
|
async_log_exception(ex, '{}.{}'.format(domain, p_name),
|
|
|
|
p_validated, hass)
|
|
|
|
continue
|
|
|
|
|
|
|
|
platforms.append(p_validated)
|
|
|
|
|
|
|
|
# Create a copy of the configuration with all config for current
|
|
|
|
# component removed and add validated config back in.
|
|
|
|
filter_keys = extract_domain_configs(config, domain)
|
|
|
|
config = {key: value for key, value in config.items()
|
|
|
|
if key not in filter_keys}
|
|
|
|
config[domain] = platforms
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
async def async_check_ha_config_file(hass: HomeAssistant) -> Optional[str]:
|
2017-07-18 14:23:57 +00:00
|
|
|
"""Check if Home Assistant configuration file is valid.
|
2017-02-08 17:17:52 +00:00
|
|
|
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
2018-03-19 21:20:04 +00:00
|
|
|
from homeassistant.scripts.check_config import check_ha_config_file
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
res = await hass.async_add_executor_job(
|
2018-05-04 10:29:07 +00:00
|
|
|
check_ha_config_file, hass)
|
2018-03-19 21:20:04 +00:00
|
|
|
|
|
|
|
if not res.errors:
|
|
|
|
return None
|
|
|
|
return '\n'.join([err.message for err in res.errors])
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
@callback
|
2018-07-23 08:24:39 +00:00
|
|
|
def async_notify_setup_error(
|
|
|
|
hass: HomeAssistant, component: str,
|
|
|
|
display_link: bool = False) -> None:
|
2017-03-01 04:33:19 +00:00
|
|
|
"""Print a persistent notification.
|
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
|
|
|
from homeassistant.components import persistent_notification
|
|
|
|
|
|
|
|
errors = hass.data.get(DATA_PERSISTENT_ERRORS)
|
|
|
|
|
|
|
|
if errors is None:
|
|
|
|
errors = hass.data[DATA_PERSISTENT_ERRORS] = {}
|
|
|
|
|
2018-02-11 17:20:28 +00:00
|
|
|
errors[component] = errors.get(component) or display_link
|
2017-11-03 17:12:45 +00:00
|
|
|
|
|
|
|
message = 'The following components and platforms could not be set up:\n\n'
|
|
|
|
|
|
|
|
for name, link in errors.items():
|
|
|
|
if link:
|
|
|
|
part = HA_COMPONENT_URL.format(name.replace('_', '-'), name)
|
|
|
|
else:
|
|
|
|
part = name
|
|
|
|
|
|
|
|
message += ' - {}\n'.format(part)
|
|
|
|
|
|
|
|
message += '\nPlease check your config.'
|
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
persistent_notification.async_create(
|
|
|
|
hass, message, 'Invalid config', 'invalid_config')
|