2016-03-07 23:06:04 +00:00
|
|
|
|
"""Module to help with parsing and generating configuration files."""
|
2021-03-17 16:34:55 +00:00
|
|
|
|
from __future__ import annotations
|
|
|
|
|
|
2019-12-09 15:42:10 +00:00
|
|
|
|
from collections import OrderedDict
|
2021-09-29 14:32:11 +00:00
|
|
|
|
from collections.abc import Callable, Sequence
|
2022-11-08 06:21:09 +00:00
|
|
|
|
from contextlib import suppress
|
2023-11-24 16:34:45 +00:00
|
|
|
|
from dataclasses import dataclass
|
|
|
|
|
from enum import StrEnum
|
2023-11-14 07:21:36 +00:00
|
|
|
|
from functools import reduce
|
2015-04-26 17:05:01 +00:00
|
|
|
|
import logging
|
2023-11-14 07:21:36 +00:00
|
|
|
|
import operator
|
2015-04-26 17:05:01 +00:00
|
|
|
|
import os
|
2021-03-02 20:58:53 +00:00
|
|
|
|
from pathlib import Path
|
2017-02-08 17:17:52 +00:00
|
|
|
|
import re
|
2016-06-27 16:02:45 +00:00
|
|
|
|
import shutil
|
2018-07-23 08:24:39 +00:00
|
|
|
|
from types import ModuleType
|
2023-11-24 16:34:45 +00:00
|
|
|
|
from typing import TYPE_CHECKING, Any
|
2021-08-09 07:38:09 +00:00
|
|
|
|
from urllib.parse import urlparse
|
2019-12-09 15:42:10 +00:00
|
|
|
|
|
2021-02-10 14:26:38 +00:00
|
|
|
|
from awesomeversion import AwesomeVersion
|
2016-03-28 01:48:51 +00:00
|
|
|
|
import voluptuous as vol
|
2023-11-14 11:48:45 +00:00
|
|
|
|
from voluptuous.humanize import MAX_VALIDATION_ERROR_ITEM_LENGTH
|
2023-11-17 22:00:23 +00:00
|
|
|
|
from yaml.error import MarkedYAMLError
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
2021-12-23 19:14:47 +00:00
|
|
|
|
from . import auth
|
|
|
|
|
from .auth import mfa_modules as auth_mfa_modules, providers as auth_providers
|
|
|
|
|
from .const import (
|
2019-12-09 15:42:10 +00:00
|
|
|
|
ATTR_ASSUMED_STATE,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
ATTR_FRIENDLY_NAME,
|
|
|
|
|
ATTR_HIDDEN,
|
2020-07-13 15:43:11 +00:00
|
|
|
|
CONF_ALLOWLIST_EXTERNAL_DIRS,
|
2020-06-25 00:37:01 +00:00
|
|
|
|
CONF_ALLOWLIST_EXTERNAL_URLS,
|
2019-12-09 15:42:10 +00:00
|
|
|
|
CONF_AUTH_MFA_MODULES,
|
|
|
|
|
CONF_AUTH_PROVIDERS,
|
2022-11-24 22:25:50 +00:00
|
|
|
|
CONF_COUNTRY,
|
2021-07-28 06:55:58 +00:00
|
|
|
|
CONF_CURRENCY,
|
2019-12-09 15:42:10 +00:00
|
|
|
|
CONF_CUSTOMIZE,
|
|
|
|
|
CONF_CUSTOMIZE_DOMAIN,
|
|
|
|
|
CONF_CUSTOMIZE_GLOB,
|
|
|
|
|
CONF_ELEVATION,
|
2020-05-08 00:29:47 +00:00
|
|
|
|
CONF_EXTERNAL_URL,
|
2019-12-09 15:42:10 +00:00
|
|
|
|
CONF_ID,
|
2020-05-08 00:29:47 +00:00
|
|
|
|
CONF_INTERNAL_URL,
|
2022-11-24 22:25:50 +00:00
|
|
|
|
CONF_LANGUAGE,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
CONF_LATITUDE,
|
2020-10-06 22:05:52 +00:00
|
|
|
|
CONF_LEGACY_TEMPLATES,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
CONF_LONGITUDE,
|
2020-09-16 13:28:25 +00:00
|
|
|
|
CONF_MEDIA_DIRS,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
CONF_NAME,
|
|
|
|
|
CONF_PACKAGES,
|
2019-12-09 15:42:10 +00:00
|
|
|
|
CONF_TEMPERATURE_UNIT,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
CONF_TIME_ZONE,
|
2019-12-09 15:42:10 +00:00
|
|
|
|
CONF_TYPE,
|
|
|
|
|
CONF_UNIT_SYSTEM,
|
2020-07-13 15:43:11 +00:00
|
|
|
|
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
__version__,
|
2019-04-14 14:23:01 +00:00
|
|
|
|
)
|
2022-11-10 16:28:19 +00:00
|
|
|
|
from .core import DOMAIN as CONF_CORE, ConfigSource, HomeAssistant, callback
|
2023-11-24 16:34:45 +00:00
|
|
|
|
from .exceptions import ConfigValidationError, HomeAssistantError
|
2022-11-10 16:28:19 +00:00
|
|
|
|
from .generated.currencies import HISTORIC_CURRENCIES
|
2021-12-23 19:14:47 +00:00
|
|
|
|
from .helpers import (
|
|
|
|
|
config_per_platform,
|
|
|
|
|
config_validation as cv,
|
|
|
|
|
extract_domain_configs,
|
2022-11-08 06:21:09 +00:00
|
|
|
|
issue_registry as ir,
|
2019-08-07 22:35:50 +00:00
|
|
|
|
)
|
2021-12-23 19:14:47 +00:00
|
|
|
|
from .helpers.entity_values import EntityValues
|
|
|
|
|
from .helpers.typing import ConfigType
|
2023-03-31 18:19:58 +00:00
|
|
|
|
from .loader import ComponentProtocol, Integration, IntegrationNotFound
|
2021-12-23 19:14:47 +00:00
|
|
|
|
from .requirements import RequirementsNotFound, async_get_integration_with_requirements
|
|
|
|
|
from .util.package import is_docker_env
|
2022-10-14 14:50:04 +00:00
|
|
|
|
from .util.unit_system import get_unit_system, validate_unit_system
|
2021-12-23 19:14:47 +00:00
|
|
|
|
from .util.yaml import SECRET_YAML, Secrets, load_yaml
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
2018-01-12 14:29:58 +00:00
|
|
|
|
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
|
|
|
|
RE_ASCII = re.compile(r"\033\[[^m]*m")
|
2019-07-31 19:25:30 +00:00
|
|
|
|
YAML_CONFIG_FILE = "configuration.yaml"
|
|
|
|
|
VERSION_FILE = ".HA_VERSION"
|
|
|
|
|
CONFIG_DIR_NAME = ".homeassistant"
|
|
|
|
|
DATA_CUSTOMIZE = "hass_customize"
|
|
|
|
|
|
2019-10-15 23:15:26 +00:00
|
|
|
|
AUTOMATION_CONFIG_PATH = "automations.yaml"
|
|
|
|
|
SCRIPT_CONFIG_PATH = "scripts.yaml"
|
2019-11-04 20:38:18 +00:00
|
|
|
|
SCENE_CONFIG_PATH = "scenes.yaml"
|
2017-06-02 06:50:04 +00:00
|
|
|
|
|
2021-02-20 06:22:48 +00:00
|
|
|
|
LOAD_EXCEPTIONS = (ImportError, FileNotFoundError)
|
2023-11-16 08:08:47 +00:00
|
|
|
|
INTEGRATION_LOAD_EXCEPTIONS = (IntegrationNotFound, RequirementsNotFound)
|
2021-02-20 06:22:48 +00:00
|
|
|
|
|
2023-10-24 12:47:58 +00:00
|
|
|
|
SAFE_MODE_FILENAME = "safe-mode"
|
|
|
|
|
|
2019-10-15 23:15:26 +00:00
|
|
|
|
DEFAULT_CONFIG = f"""
|
2022-03-28 16:03:34 +00:00
|
|
|
|
# Loads default set of integrations. Do not remove.
|
2019-02-08 04:07:15 +00:00
|
|
|
|
default_config:
|
2016-07-30 17:40:51 +00:00
|
|
|
|
|
2022-10-25 10:10:40 +00:00
|
|
|
|
# Load frontend themes from the themes folder
|
|
|
|
|
frontend:
|
|
|
|
|
themes: !include_dir_merge_named themes
|
|
|
|
|
|
2019-10-15 23:15:26 +00:00
|
|
|
|
automation: !include {AUTOMATION_CONFIG_PATH}
|
|
|
|
|
script: !include {SCRIPT_CONFIG_PATH}
|
2019-11-04 20:38:18 +00:00
|
|
|
|
scene: !include {SCENE_CONFIG_PATH}
|
2016-07-30 17:40:51 +00:00
|
|
|
|
"""
|
2017-10-05 16:10:29 +00:00
|
|
|
|
DEFAULT_SECRETS = """
|
|
|
|
|
# Use this file to store secrets like usernames and passwords.
|
2020-02-22 00:10:02 +00:00
|
|
|
|
# Learn more at https://www.home-assistant.io/docs/configuration/secrets/
|
2018-08-23 20:16:31 +00:00
|
|
|
|
some_password: welcome
|
2017-10-05 16:10:29 +00:00
|
|
|
|
"""
|
2019-04-16 21:27:07 +00:00
|
|
|
|
TTS_PRE_92 = """
|
|
|
|
|
tts:
|
|
|
|
|
- platform: google
|
|
|
|
|
"""
|
|
|
|
|
TTS_92 = """
|
|
|
|
|
tts:
|
|
|
|
|
- platform: google_translate
|
|
|
|
|
service_name: google_say
|
|
|
|
|
"""
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-24 16:34:45 +00:00
|
|
|
|
class ConfigErrorTranslationKey(StrEnum):
|
|
|
|
|
"""Config error translation keys for config errors."""
|
|
|
|
|
|
|
|
|
|
# translation keys with a generated config related message text
|
|
|
|
|
CONFIG_VALIDATION_ERR = "config_validation_err"
|
|
|
|
|
PLATFORM_CONFIG_VALIDATION_ERR = "platform_config_validation_err"
|
|
|
|
|
|
|
|
|
|
# translation keys with a general static message text
|
|
|
|
|
COMPONENT_IMPORT_ERR = "component_import_err"
|
|
|
|
|
CONFIG_PLATFORM_IMPORT_ERR = "config_platform_import_err"
|
|
|
|
|
CONFIG_VALIDATOR_UNKNOWN_ERR = "config_validator_unknown_err"
|
|
|
|
|
CONFIG_SCHEMA_UNKNOWN_ERR = "config_schema_unknown_err"
|
|
|
|
|
PLATFORM_VALIDATOR_UNKNOWN_ERR = "platform_validator_unknown_err"
|
|
|
|
|
PLATFORM_COMPONENT_LOAD_ERR = "platform_component_load_err"
|
|
|
|
|
PLATFORM_COMPONENT_LOAD_EXC = "platform_component_load_exc"
|
|
|
|
|
PLATFORM_SCHEMA_VALIDATOR_ERR = "platform_schema_validator_err"
|
|
|
|
|
|
|
|
|
|
# translation key in case multiple errors occurred
|
|
|
|
|
INTEGRATION_CONFIG_ERROR = "integration_config_error"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
|
class ConfigExceptionInfo:
|
|
|
|
|
"""Configuration exception info class."""
|
|
|
|
|
|
|
|
|
|
exception: Exception
|
|
|
|
|
translation_key: ConfigErrorTranslationKey
|
|
|
|
|
platform_name: str
|
|
|
|
|
config: ConfigType
|
|
|
|
|
integration_link: str | None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@dataclass
|
|
|
|
|
class IntegrationConfigInfo:
|
|
|
|
|
"""Configuration for an integration and exception information."""
|
|
|
|
|
|
|
|
|
|
config: ConfigType | None
|
|
|
|
|
exception_info_list: list[ConfigExceptionInfo]
|
|
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
def _no_duplicate_auth_provider(
|
2021-03-17 16:34:55 +00:00
|
|
|
|
configs: Sequence[dict[str, Any]]
|
|
|
|
|
) -> Sequence[dict[str, Any]]:
|
2018-08-28 18:54:01 +00:00
|
|
|
|
"""No duplicate auth provider config allowed in a list.
|
|
|
|
|
|
|
|
|
|
Each type of auth provider can only have one config without optional id.
|
|
|
|
|
Unique id is required if same type of auth provider used multiple times.
|
|
|
|
|
"""
|
2021-03-17 16:34:55 +00:00
|
|
|
|
config_keys: set[tuple[str, str | None]] = set()
|
2018-08-28 18:54:01 +00:00
|
|
|
|
for config in configs:
|
|
|
|
|
key = (config[CONF_TYPE], config.get(CONF_ID))
|
|
|
|
|
if key in config_keys:
|
|
|
|
|
raise vol.Invalid(
|
2020-04-05 15:48:55 +00:00
|
|
|
|
f"Duplicate auth provider {config[CONF_TYPE]} found. "
|
|
|
|
|
"Please add unique IDs "
|
|
|
|
|
"if you want to have the same auth provider twice"
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2018-08-28 18:54:01 +00:00
|
|
|
|
config_keys.add(key)
|
|
|
|
|
return configs
|
|
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
def _no_duplicate_auth_mfa_module(
|
2021-03-17 16:34:55 +00:00
|
|
|
|
configs: Sequence[dict[str, Any]]
|
|
|
|
|
) -> Sequence[dict[str, Any]]:
|
2018-08-28 18:54:01 +00:00
|
|
|
|
"""No duplicate auth mfa module item allowed in a list.
|
|
|
|
|
|
|
|
|
|
Each type of mfa module can only have one config without optional id.
|
|
|
|
|
A global unique id is required if same type of mfa module used multiple
|
|
|
|
|
times.
|
|
|
|
|
Note: this is different than auth provider
|
|
|
|
|
"""
|
2021-03-17 16:34:55 +00:00
|
|
|
|
config_keys: set[str] = set()
|
2018-08-28 18:54:01 +00:00
|
|
|
|
for config in configs:
|
|
|
|
|
key = config.get(CONF_ID, config[CONF_TYPE])
|
|
|
|
|
if key in config_keys:
|
|
|
|
|
raise vol.Invalid(
|
2020-04-05 15:48:55 +00:00
|
|
|
|
f"Duplicate mfa module {config[CONF_TYPE]} found. "
|
|
|
|
|
"Please add unique IDs "
|
|
|
|
|
"if you want to have the same mfa module twice"
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2018-08-28 18:54:01 +00:00
|
|
|
|
config_keys.add(key)
|
|
|
|
|
return configs
|
|
|
|
|
|
|
|
|
|
|
2021-08-09 07:38:09 +00:00
|
|
|
|
def _filter_bad_internal_external_urls(conf: dict) -> dict:
|
|
|
|
|
"""Filter internal/external URL with a path."""
|
|
|
|
|
for key in CONF_INTERNAL_URL, CONF_EXTERNAL_URL:
|
|
|
|
|
if key in conf and urlparse(conf[key]).path not in ("", "/"):
|
|
|
|
|
# We warn but do not fix, because if this was incorrectly configured,
|
|
|
|
|
# adjusting this value might impact security.
|
|
|
|
|
_LOGGER.warning(
|
|
|
|
|
"Invalid %s set. It's not allowed to have a path (/bla)", key
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
return conf
|
|
|
|
|
|
|
|
|
|
|
2019-01-21 17:45:11 +00:00
|
|
|
|
PACKAGES_CONFIG_SCHEMA = cv.schema_with_slug_keys( # Package names are slugs
|
|
|
|
|
vol.Schema({cv.string: vol.Any(dict, list, None)}) # Component config
|
|
|
|
|
)
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
CUSTOMIZE_DICT_SCHEMA = vol.Schema(
|
|
|
|
|
{
|
|
|
|
|
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
|
|
|
|
|
vol.Optional(ATTR_HIDDEN): cv.boolean,
|
|
|
|
|
vol.Optional(ATTR_ASSUMED_STATE): cv.boolean,
|
|
|
|
|
},
|
|
|
|
|
extra=vol.ALLOW_EXTRA,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
CUSTOMIZE_CONFIG_SCHEMA = vol.Schema(
|
|
|
|
|
{
|
|
|
|
|
vol.Optional(CONF_CUSTOMIZE, default={}): vol.Schema(
|
|
|
|
|
{cv.entity_id: CUSTOMIZE_DICT_SCHEMA}
|
|
|
|
|
),
|
|
|
|
|
vol.Optional(CONF_CUSTOMIZE_DOMAIN, default={}): vol.Schema(
|
|
|
|
|
{cv.string: CUSTOMIZE_DICT_SCHEMA}
|
|
|
|
|
),
|
|
|
|
|
vol.Optional(CONF_CUSTOMIZE_GLOB, default={}): vol.Schema(
|
|
|
|
|
{cv.string: CUSTOMIZE_DICT_SCHEMA}
|
|
|
|
|
),
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
|
2022-11-08 06:21:09 +00:00
|
|
|
|
|
2022-11-10 16:28:19 +00:00
|
|
|
|
def _raise_issue_if_historic_currency(hass: HomeAssistant, currency: str) -> None:
|
2022-11-28 08:54:13 +00:00
|
|
|
|
if currency not in HISTORIC_CURRENCIES:
|
|
|
|
|
ir.async_delete_issue(hass, "homeassistant", "historic_currency")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
ir.async_create_issue(
|
|
|
|
|
hass,
|
|
|
|
|
"homeassistant",
|
|
|
|
|
"historic_currency",
|
|
|
|
|
is_fixable=False,
|
|
|
|
|
learn_more_url="homeassistant://config/general",
|
|
|
|
|
severity=ir.IssueSeverity.WARNING,
|
|
|
|
|
translation_key="historic_currency",
|
|
|
|
|
translation_placeholders={"currency": currency},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _raise_issue_if_no_country(hass: HomeAssistant, country: str | None) -> None:
|
|
|
|
|
if country is not None:
|
|
|
|
|
ir.async_delete_issue(hass, "homeassistant", "country_not_configured")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
ir.async_create_issue(
|
|
|
|
|
hass,
|
|
|
|
|
"homeassistant",
|
|
|
|
|
"country_not_configured",
|
|
|
|
|
is_fixable=False,
|
|
|
|
|
learn_more_url="homeassistant://config/general",
|
|
|
|
|
severity=ir.IssueSeverity.WARNING,
|
|
|
|
|
translation_key="country_not_configured",
|
|
|
|
|
)
|
2022-11-10 16:28:19 +00:00
|
|
|
|
|
|
|
|
|
|
2022-11-08 06:21:09 +00:00
|
|
|
|
def _validate_currency(data: Any) -> Any:
|
|
|
|
|
try:
|
|
|
|
|
return cv.currency(data)
|
|
|
|
|
except vol.InInvalid:
|
|
|
|
|
with suppress(vol.InInvalid):
|
|
|
|
|
currency = cv.historic_currency(data)
|
|
|
|
|
return currency
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
|
2021-08-09 07:38:09 +00:00
|
|
|
|
CORE_CONFIG_SCHEMA = vol.All(
|
|
|
|
|
CUSTOMIZE_CONFIG_SCHEMA.extend(
|
|
|
|
|
{
|
|
|
|
|
CONF_NAME: vol.Coerce(str),
|
|
|
|
|
CONF_LATITUDE: cv.latitude,
|
|
|
|
|
CONF_LONGITUDE: cv.longitude,
|
|
|
|
|
CONF_ELEVATION: vol.Coerce(int),
|
2022-10-14 11:03:17 +00:00
|
|
|
|
vol.Remove(CONF_TEMPERATURE_UNIT): cv.temperature_unit,
|
2022-10-14 14:50:04 +00:00
|
|
|
|
CONF_UNIT_SYSTEM: validate_unit_system,
|
2021-08-09 07:38:09 +00:00
|
|
|
|
CONF_TIME_ZONE: cv.time_zone,
|
|
|
|
|
vol.Optional(CONF_INTERNAL_URL): cv.url,
|
|
|
|
|
vol.Optional(CONF_EXTERNAL_URL): cv.url,
|
|
|
|
|
vol.Optional(CONF_ALLOWLIST_EXTERNAL_DIRS): vol.All(
|
2023-08-22 21:12:12 +00:00
|
|
|
|
cv.ensure_list, [vol.IsDir()]
|
2021-08-09 07:38:09 +00:00
|
|
|
|
),
|
|
|
|
|
vol.Optional(LEGACY_CONF_WHITELIST_EXTERNAL_DIRS): vol.All(
|
2023-08-22 21:12:12 +00:00
|
|
|
|
cv.ensure_list, [vol.IsDir()]
|
2021-08-09 07:38:09 +00:00
|
|
|
|
),
|
|
|
|
|
vol.Optional(CONF_ALLOWLIST_EXTERNAL_URLS): vol.All(
|
|
|
|
|
cv.ensure_list, [cv.url]
|
|
|
|
|
),
|
|
|
|
|
vol.Optional(CONF_PACKAGES, default={}): PACKAGES_CONFIG_SCHEMA,
|
|
|
|
|
vol.Optional(CONF_AUTH_PROVIDERS): vol.All(
|
|
|
|
|
cv.ensure_list,
|
|
|
|
|
[
|
|
|
|
|
auth_providers.AUTH_PROVIDER_SCHEMA.extend(
|
|
|
|
|
{
|
|
|
|
|
CONF_TYPE: vol.NotIn(
|
|
|
|
|
["insecure_example"],
|
2022-12-22 09:12:50 +00:00
|
|
|
|
(
|
|
|
|
|
"The insecure_example auth provider"
|
|
|
|
|
" is for testing only."
|
|
|
|
|
),
|
2021-08-09 07:38:09 +00:00
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
],
|
|
|
|
|
_no_duplicate_auth_provider,
|
|
|
|
|
),
|
|
|
|
|
vol.Optional(CONF_AUTH_MFA_MODULES): vol.All(
|
|
|
|
|
cv.ensure_list,
|
|
|
|
|
[
|
|
|
|
|
auth_mfa_modules.MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
|
|
|
|
|
{
|
|
|
|
|
CONF_TYPE: vol.NotIn(
|
|
|
|
|
["insecure_example"],
|
|
|
|
|
"The insecure_example mfa module is for testing only.",
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
)
|
|
|
|
|
],
|
|
|
|
|
_no_duplicate_auth_mfa_module,
|
|
|
|
|
),
|
|
|
|
|
vol.Optional(CONF_MEDIA_DIRS): cv.schema_with_slug_keys(vol.IsDir()),
|
|
|
|
|
vol.Optional(CONF_LEGACY_TEMPLATES): cv.boolean,
|
2022-11-08 06:21:09 +00:00
|
|
|
|
vol.Optional(CONF_CURRENCY): _validate_currency,
|
2022-11-24 22:25:50 +00:00
|
|
|
|
vol.Optional(CONF_COUNTRY): cv.country,
|
|
|
|
|
vol.Optional(CONF_LANGUAGE): cv.language,
|
2021-08-09 07:38:09 +00:00
|
|
|
|
}
|
|
|
|
|
),
|
|
|
|
|
_filter_bad_internal_external_urls,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2016-03-28 01:48:51 +00:00
|
|
|
|
|
|
|
|
|
|
2016-07-21 05:38:52 +00:00
|
|
|
|
def get_default_config_dir() -> str:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Put together the default configuration directory based on the OS."""
|
2022-01-13 19:41:11 +00:00
|
|
|
|
data_dir = os.path.expanduser("~")
|
|
|
|
|
return os.path.join(data_dir, CONFIG_DIR_NAME)
|
2015-08-30 01:11:24 +00:00
|
|
|
|
|
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
|
async def async_ensure_config_exists(hass: HomeAssistant) -> bool:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Ensure a configuration file exists in given configuration directory.
|
2016-03-07 23:06:04 +00:00
|
|
|
|
|
|
|
|
|
Creating a default one if needed.
|
2020-01-14 21:03:02 +00:00
|
|
|
|
Return boolean if configuration dir is ready to go.
|
2016-03-07 23:06:04 +00:00
|
|
|
|
"""
|
2020-01-14 21:03:02 +00:00
|
|
|
|
config_path = hass.config.path(YAML_CONFIG_FILE)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
|
if os.path.isfile(config_path):
|
|
|
|
|
return True
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
2023-01-24 13:24:21 +00:00
|
|
|
|
print( # noqa: T201
|
2020-01-14 21:03:02 +00:00
|
|
|
|
"Unable to find configuration. Creating default one in", hass.config.config_dir
|
|
|
|
|
)
|
|
|
|
|
return await async_create_default_config(hass)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
|
async def async_create_default_config(hass: HomeAssistant) -> bool:
|
2016-03-07 23:06:04 +00:00
|
|
|
|
"""Create a default configuration file in given configuration directory.
|
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
|
Return if creation was successful.
|
2016-03-07 23:06:04 +00:00
|
|
|
|
"""
|
2020-01-14 21:03:02 +00:00
|
|
|
|
return await hass.async_add_executor_job(
|
|
|
|
|
_write_default_config, hass.config.config_dir
|
|
|
|
|
)
|
2019-05-08 18:15:04 +00:00
|
|
|
|
|
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
|
def _write_default_config(config_dir: str) -> bool:
|
2019-05-08 18:15:04 +00:00
|
|
|
|
"""Write the default config."""
|
2015-04-26 17:05:01 +00:00
|
|
|
|
config_path = os.path.join(config_dir, YAML_CONFIG_FILE)
|
2017-10-05 16:10:29 +00:00
|
|
|
|
secret_path = os.path.join(config_dir, SECRET_YAML)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
version_path = os.path.join(config_dir, VERSION_FILE)
|
2017-05-10 01:44:00 +00:00
|
|
|
|
automation_yaml_path = os.path.join(config_dir, AUTOMATION_CONFIG_PATH)
|
2017-08-16 05:09:10 +00:00
|
|
|
|
script_yaml_path = os.path.join(config_dir, SCRIPT_CONFIG_PATH)
|
2019-11-04 20:38:18 +00:00
|
|
|
|
scene_yaml_path = os.path.join(config_dir, SCENE_CONFIG_PATH)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
|
# Writing files with YAML does not create the most human readable results
|
|
|
|
|
# So we're hard coding a YAML template.
|
|
|
|
|
try:
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(config_path, "w", encoding="utf8") as config_file:
|
2016-07-30 17:40:51 +00:00
|
|
|
|
config_file.write(DEFAULT_CONFIG)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
2021-04-28 15:15:04 +00:00
|
|
|
|
if not os.path.isfile(secret_path):
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(secret_path, "w", encoding="utf8") as secret_file:
|
2021-04-28 15:15:04 +00:00
|
|
|
|
secret_file.write(DEFAULT_SECRETS)
|
2017-10-05 16:10:29 +00:00
|
|
|
|
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(version_path, "w", encoding="utf8") as version_file:
|
2016-06-27 16:02:45 +00:00
|
|
|
|
version_file.write(__version__)
|
|
|
|
|
|
2021-04-28 15:15:04 +00:00
|
|
|
|
if not os.path.isfile(automation_yaml_path):
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(automation_yaml_path, "w", encoding="utf8") as automation_file:
|
2021-04-28 15:15:04 +00:00
|
|
|
|
automation_file.write("[]")
|
2017-05-10 01:44:00 +00:00
|
|
|
|
|
2021-04-28 15:15:04 +00:00
|
|
|
|
if not os.path.isfile(script_yaml_path):
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(script_yaml_path, "w", encoding="utf8"):
|
2021-04-28 15:15:04 +00:00
|
|
|
|
pass
|
2017-08-16 05:09:10 +00:00
|
|
|
|
|
2021-04-28 15:15:04 +00:00
|
|
|
|
if not os.path.isfile(scene_yaml_path):
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(scene_yaml_path, "w", encoding="utf8"):
|
2021-04-28 15:15:04 +00:00
|
|
|
|
pass
|
2019-11-04 20:38:18 +00:00
|
|
|
|
|
2020-01-14 21:03:02 +00:00
|
|
|
|
return True
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
2019-09-04 17:09:24 +00:00
|
|
|
|
except OSError:
|
2023-10-07 11:37:19 +00:00
|
|
|
|
print( # noqa: T201
|
|
|
|
|
f"Unable to create default configuration file {config_path}"
|
|
|
|
|
)
|
2020-01-14 21:03:02 +00:00
|
|
|
|
return False
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
|
async def async_hass_config_yaml(hass: HomeAssistant) -> dict:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Load YAML from a Home Assistant configuration file.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
2023-10-02 13:57:04 +00:00
|
|
|
|
This function allows a component inside the asyncio loop to reload its
|
2018-12-03 09:56:26 +00:00
|
|
|
|
configuration by itself. Include package merge.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
"""
|
2023-08-16 11:00:14 +00:00
|
|
|
|
secrets = Secrets(Path(hass.config.config_dir))
|
2021-03-02 20:58:53 +00:00
|
|
|
|
|
2019-08-12 14:42:12 +00:00
|
|
|
|
# Not using async_add_executor_job because this is an internal method.
|
2023-11-17 22:00:23 +00:00
|
|
|
|
try:
|
|
|
|
|
config = await hass.loop.run_in_executor(
|
|
|
|
|
None,
|
|
|
|
|
load_yaml_config_file,
|
|
|
|
|
hass.config.path(YAML_CONFIG_FILE),
|
|
|
|
|
secrets,
|
|
|
|
|
)
|
2023-11-25 07:30:18 +00:00
|
|
|
|
except HomeAssistantError as exc:
|
|
|
|
|
if not (base_exc := exc.__cause__) or not isinstance(base_exc, MarkedYAMLError):
|
2023-11-17 22:00:23 +00:00
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
# Rewrite path to offending YAML file to be relative the hass config dir
|
2023-11-25 07:30:18 +00:00
|
|
|
|
if base_exc.context_mark and base_exc.context_mark.name:
|
|
|
|
|
base_exc.context_mark.name = _relpath(hass, base_exc.context_mark.name)
|
|
|
|
|
if base_exc.problem_mark and base_exc.problem_mark.name:
|
|
|
|
|
base_exc.problem_mark.name = _relpath(hass, base_exc.problem_mark.name)
|
2023-11-17 22:00:23 +00:00
|
|
|
|
raise
|
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
core_config = config.get(CONF_CORE, {})
|
2019-07-31 19:25:30 +00:00
|
|
|
|
await merge_packages_config(hass, config, core_config.get(CONF_PACKAGES, {}))
|
2019-04-14 14:23:01 +00:00
|
|
|
|
return config
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
|
|
|
|
|
2021-03-02 20:58:53 +00:00
|
|
|
|
def load_yaml_config_file(
|
2021-03-17 16:34:55 +00:00
|
|
|
|
config_path: str, secrets: Secrets | None = None
|
|
|
|
|
) -> dict[Any, Any]:
|
2016-10-28 19:26:52 +00:00
|
|
|
|
"""Parse a YAML configuration file.
|
|
|
|
|
|
2019-05-19 10:01:29 +00:00
|
|
|
|
Raises FileNotFoundError or HomeAssistantError.
|
|
|
|
|
|
2016-10-28 19:26:52 +00:00
|
|
|
|
This method needs to run in an executor.
|
|
|
|
|
"""
|
2021-03-02 20:58:53 +00:00
|
|
|
|
conf_dict = load_yaml(config_path, secrets)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
|
if not isinstance(conf_dict, dict):
|
2020-04-05 15:48:55 +00:00
|
|
|
|
msg = (
|
|
|
|
|
f"The configuration file {os.path.basename(config_path)} "
|
|
|
|
|
"does not contain a dictionary"
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2016-06-22 16:13:18 +00:00
|
|
|
|
_LOGGER.error(msg)
|
|
|
|
|
raise HomeAssistantError(msg)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
|
# Convert values to dictionaries if they are None
|
|
|
|
|
for key, value in conf_dict.items():
|
|
|
|
|
conf_dict[key] = value or {}
|
2015-04-26 17:05:01 +00:00
|
|
|
|
return conf_dict
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
|
def process_ha_config_upgrade(hass: HomeAssistant) -> None:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Upgrade configuration if necessary.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
2016-10-28 19:26:52 +00:00
|
|
|
|
This method needs to run in an executor.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
"""
|
2016-06-27 16:02:45 +00:00
|
|
|
|
version_path = hass.config.path(VERSION_FILE)
|
|
|
|
|
|
|
|
|
|
try:
|
2021-07-28 07:41:45 +00:00
|
|
|
|
with open(version_path, encoding="utf8") as inp:
|
2016-06-27 16:02:45 +00:00
|
|
|
|
conf_version = inp.readline().strip()
|
|
|
|
|
except FileNotFoundError:
|
|
|
|
|
# Last version to not have this file
|
2019-07-31 19:25:30 +00:00
|
|
|
|
conf_version = "0.7.7"
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
|
|
if conf_version == __version__:
|
|
|
|
|
return
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
_LOGGER.info(
|
|
|
|
|
"Upgrading configuration directory from %s to %s", conf_version, __version__
|
|
|
|
|
)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2021-02-10 14:26:38 +00:00
|
|
|
|
version_obj = AwesomeVersion(conf_version)
|
2019-06-03 19:37:27 +00:00
|
|
|
|
|
2021-02-10 14:26:38 +00:00
|
|
|
|
if version_obj < AwesomeVersion("0.50"):
|
2017-07-20 05:59:21 +00:00
|
|
|
|
# 0.50 introduced persistent deps dir.
|
2019-07-31 19:25:30 +00:00
|
|
|
|
lib_path = hass.config.path("deps")
|
2017-07-14 02:26:21 +00:00
|
|
|
|
if os.path.isdir(lib_path):
|
|
|
|
|
shutil.rmtree(lib_path)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2021-02-10 14:26:38 +00:00
|
|
|
|
if version_obj < AwesomeVersion("0.92"):
|
2019-04-16 21:27:07 +00:00
|
|
|
|
# 0.92 moved google/tts.py to google_translate/tts.py
|
2020-01-14 21:03:02 +00:00
|
|
|
|
config_path = hass.config.path(YAML_CONFIG_FILE)
|
2019-04-16 21:27:07 +00:00
|
|
|
|
|
2020-04-04 20:49:15 +00:00
|
|
|
|
with open(config_path, encoding="utf-8") as config_file:
|
2019-04-16 21:27:07 +00:00
|
|
|
|
config_raw = config_file.read()
|
|
|
|
|
|
|
|
|
|
if TTS_PRE_92 in config_raw:
|
|
|
|
|
_LOGGER.info("Migrating google tts to google_translate tts")
|
|
|
|
|
config_raw = config_raw.replace(TTS_PRE_92, TTS_92)
|
2019-04-25 19:58:10 +00:00
|
|
|
|
try:
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(config_path, "w", encoding="utf-8") as config_file:
|
2019-04-25 19:58:10 +00:00
|
|
|
|
config_file.write(config_raw)
|
2019-09-04 17:09:24 +00:00
|
|
|
|
except OSError:
|
2019-04-25 19:58:10 +00:00
|
|
|
|
_LOGGER.exception("Migrating to google_translate tts failed")
|
2019-04-16 21:27:07 +00:00
|
|
|
|
|
2021-02-10 14:26:38 +00:00
|
|
|
|
if version_obj < AwesomeVersion("0.94") and is_docker_env():
|
2019-06-03 19:37:27 +00:00
|
|
|
|
# In 0.94 we no longer install packages inside the deps folder when
|
|
|
|
|
# running inside a Docker container.
|
2019-07-31 19:25:30 +00:00
|
|
|
|
lib_path = hass.config.path("deps")
|
2019-06-03 19:37:27 +00:00
|
|
|
|
if os.path.isdir(lib_path):
|
|
|
|
|
shutil.rmtree(lib_path)
|
|
|
|
|
|
2022-10-11 07:04:52 +00:00
|
|
|
|
with open(version_path, "w", encoding="utf8") as outp:
|
2016-06-27 16:02:45 +00:00
|
|
|
|
outp.write(__version__)
|
|
|
|
|
|
|
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
|
@callback
|
2023-11-15 18:09:49 +00:00
|
|
|
|
def async_log_schema_error(
|
2023-11-25 07:30:18 +00:00
|
|
|
|
exc: vol.Invalid,
|
2019-12-16 19:16:23 +00:00
|
|
|
|
domain: str,
|
2021-03-17 16:34:55 +00:00
|
|
|
|
config: dict,
|
2019-12-16 19:16:23 +00:00
|
|
|
|
hass: HomeAssistant,
|
2021-03-17 16:34:55 +00:00
|
|
|
|
link: str | None = None,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
) -> None:
|
2023-11-15 18:09:49 +00:00
|
|
|
|
"""Log a schema validation error."""
|
2023-11-25 07:30:18 +00:00
|
|
|
|
message = format_schema_error(hass, exc, domain, config, link)
|
2023-11-15 18:09:49 +00:00
|
|
|
|
_LOGGER.error(message)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@callback
|
|
|
|
|
def async_log_config_validator_error(
|
2023-11-25 07:30:18 +00:00
|
|
|
|
exc: vol.Invalid | HomeAssistantError,
|
2023-11-15 18:09:49 +00:00
|
|
|
|
domain: str,
|
|
|
|
|
config: dict,
|
|
|
|
|
hass: HomeAssistant,
|
|
|
|
|
link: str | None = None,
|
|
|
|
|
) -> None:
|
|
|
|
|
"""Log an error from a custom config validator."""
|
2023-11-25 07:30:18 +00:00
|
|
|
|
if isinstance(exc, vol.Invalid):
|
|
|
|
|
async_log_schema_error(exc, domain, config, hass, link)
|
2023-11-15 18:09:49 +00:00
|
|
|
|
return
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2023-11-25 07:30:18 +00:00
|
|
|
|
message = format_homeassistant_error(hass, exc, domain, config, link)
|
|
|
|
|
_LOGGER.error(message, exc_info=exc)
|
2018-03-09 03:34:24 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-14 07:21:36 +00:00
|
|
|
|
def _get_annotation(item: Any) -> tuple[str, int | str] | None:
|
|
|
|
|
if not hasattr(item, "__config_file__"):
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return (getattr(item, "__config_file__"), getattr(item, "__line__", "?"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _get_by_path(data: dict | list, items: list[str | int]) -> Any:
|
|
|
|
|
"""Access a nested object in root by item sequence.
|
|
|
|
|
|
|
|
|
|
Returns None in case of error.
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
return reduce(operator.getitem, items, data) # type: ignore[arg-type]
|
|
|
|
|
except (KeyError, IndexError, TypeError):
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def find_annotation(
|
|
|
|
|
config: dict | list, path: list[str | int]
|
|
|
|
|
) -> tuple[str, int | str] | None:
|
|
|
|
|
"""Find file/line annotation for a node in config pointed to by path.
|
|
|
|
|
|
|
|
|
|
If the node pointed to is a dict or list, prefer the annotation for the key in
|
|
|
|
|
the key/value pair defining the dict or list.
|
|
|
|
|
If the node is not annotated, try the parent node.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
def find_annotation_for_key(
|
|
|
|
|
item: dict, path: list[str | int], tail: str | int
|
|
|
|
|
) -> tuple[str, int | str] | None:
|
|
|
|
|
for key in item:
|
|
|
|
|
if key == tail:
|
|
|
|
|
if annotation := _get_annotation(key):
|
|
|
|
|
return annotation
|
|
|
|
|
break
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
def find_annotation_rec(
|
|
|
|
|
config: dict | list, path: list[str | int], tail: str | int | None
|
|
|
|
|
) -> tuple[str, int | str] | None:
|
|
|
|
|
item = _get_by_path(config, path)
|
|
|
|
|
if isinstance(item, dict) and tail is not None:
|
|
|
|
|
if tail_annotation := find_annotation_for_key(item, path, tail):
|
|
|
|
|
return tail_annotation
|
|
|
|
|
|
|
|
|
|
if (
|
|
|
|
|
isinstance(item, (dict, list))
|
|
|
|
|
and path
|
|
|
|
|
and (
|
|
|
|
|
key_annotation := find_annotation_for_key(
|
|
|
|
|
_get_by_path(config, path[:-1]), path[:-1], path[-1]
|
|
|
|
|
)
|
|
|
|
|
)
|
|
|
|
|
):
|
|
|
|
|
return key_annotation
|
|
|
|
|
|
|
|
|
|
if annotation := _get_annotation(item):
|
|
|
|
|
return annotation
|
|
|
|
|
|
|
|
|
|
if not path:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
tail = path.pop()
|
|
|
|
|
if annotation := find_annotation_rec(config, path, tail):
|
|
|
|
|
return annotation
|
|
|
|
|
return _get_annotation(item)
|
|
|
|
|
|
|
|
|
|
return find_annotation_rec(config, list(path), None)
|
|
|
|
|
|
|
|
|
|
|
2023-11-16 09:56:47 +00:00
|
|
|
|
def _relpath(hass: HomeAssistant, path: str) -> str:
|
|
|
|
|
"""Return path relative to the Home Assistant config dir."""
|
|
|
|
|
return os.path.relpath(path, hass.config.config_dir)
|
|
|
|
|
|
|
|
|
|
|
2023-11-14 20:50:54 +00:00
|
|
|
|
def stringify_invalid(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass: HomeAssistant,
|
2023-11-25 07:30:18 +00:00
|
|
|
|
exc: vol.Invalid,
|
2023-11-14 20:50:54 +00:00
|
|
|
|
domain: str,
|
|
|
|
|
config: dict,
|
|
|
|
|
link: str | None,
|
|
|
|
|
max_sub_error_length: int,
|
|
|
|
|
) -> str:
|
2023-11-14 11:48:45 +00:00
|
|
|
|
"""Stringify voluptuous.Invalid.
|
|
|
|
|
|
2023-11-14 13:05:46 +00:00
|
|
|
|
This is an alternative to the custom __str__ implemented in
|
2023-11-14 20:50:54 +00:00
|
|
|
|
voluptuous.error.Invalid. The modifications are:
|
|
|
|
|
- Format the path delimited by -> instead of @data[]
|
|
|
|
|
- Prefix with domain, file and line of the error
|
|
|
|
|
- Suffix with a link to the documentation
|
|
|
|
|
- Give a more user friendly output for unknown options
|
2023-11-15 09:47:05 +00:00
|
|
|
|
- Give a more user friendly output for missing options
|
2023-11-14 11:48:45 +00:00
|
|
|
|
"""
|
2023-11-17 22:01:00 +00:00
|
|
|
|
message_prefix = f"Invalid config for '{domain}'"
|
2023-11-14 20:50:54 +00:00
|
|
|
|
if domain != CONF_CORE and link:
|
2023-11-17 22:01:00 +00:00
|
|
|
|
message_suffix = f", please check the docs at {link}"
|
2023-11-14 20:50:54 +00:00
|
|
|
|
else:
|
|
|
|
|
message_suffix = ""
|
2023-11-25 07:30:18 +00:00
|
|
|
|
if annotation := find_annotation(config, exc.path):
|
2023-11-16 09:56:47 +00:00
|
|
|
|
message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}"
|
2023-11-25 07:30:18 +00:00
|
|
|
|
path = "->".join(str(m) for m in exc.path)
|
|
|
|
|
if exc.error_message == "extra keys not allowed":
|
2023-11-14 20:50:54 +00:00
|
|
|
|
return (
|
2023-11-25 07:30:18 +00:00
|
|
|
|
f"{message_prefix}: '{exc.path[-1]}' is an invalid option for '{domain}', "
|
2023-11-14 20:50:54 +00:00
|
|
|
|
f"check: {path}{message_suffix}"
|
|
|
|
|
)
|
2023-11-25 07:30:18 +00:00
|
|
|
|
if exc.error_message == "required key not provided":
|
2023-11-15 09:47:05 +00:00
|
|
|
|
return (
|
2023-11-25 07:30:18 +00:00
|
|
|
|
f"{message_prefix}: required key '{exc.path[-1]}' not provided"
|
2023-11-17 22:01:00 +00:00
|
|
|
|
f"{message_suffix}"
|
2023-11-15 09:47:05 +00:00
|
|
|
|
)
|
2023-11-14 11:48:45 +00:00
|
|
|
|
# This function is an alternative to the stringification done by
|
|
|
|
|
# vol.Invalid.__str__, so we need to call Exception.__str__ here
|
2023-11-25 07:30:18 +00:00
|
|
|
|
# instead of str(exc)
|
|
|
|
|
output = Exception.__str__(exc)
|
|
|
|
|
if error_type := exc.error_type:
|
2023-11-14 11:48:45 +00:00
|
|
|
|
output += " for " + error_type
|
2023-11-25 07:30:18 +00:00
|
|
|
|
offending_item_summary = repr(_get_by_path(config, exc.path))
|
2023-11-14 20:50:54 +00:00
|
|
|
|
if len(offending_item_summary) > max_sub_error_length:
|
|
|
|
|
offending_item_summary = (
|
|
|
|
|
f"{offending_item_summary[: max_sub_error_length - 3]}..."
|
|
|
|
|
)
|
|
|
|
|
return (
|
|
|
|
|
f"{message_prefix}: {output} '{path}', got {offending_item_summary}"
|
2023-11-17 22:01:00 +00:00
|
|
|
|
f"{message_suffix}"
|
2023-11-14 20:50:54 +00:00
|
|
|
|
)
|
2023-11-14 11:48:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def humanize_error(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass: HomeAssistant,
|
2023-11-14 11:48:45 +00:00
|
|
|
|
validation_error: vol.Invalid,
|
2023-11-14 20:50:54 +00:00
|
|
|
|
domain: str,
|
|
|
|
|
config: dict,
|
|
|
|
|
link: str | None,
|
2023-11-14 11:48:45 +00:00
|
|
|
|
max_sub_error_length: int = MAX_VALIDATION_ERROR_ITEM_LENGTH,
|
|
|
|
|
) -> str:
|
|
|
|
|
"""Provide a more helpful + complete validation error message.
|
|
|
|
|
|
|
|
|
|
This is a modified version of voluptuous.error.Invalid.__str__,
|
|
|
|
|
the modifications make some minor changes to the formatting.
|
|
|
|
|
"""
|
|
|
|
|
if isinstance(validation_error, vol.MultipleInvalid):
|
|
|
|
|
return "\n".join(
|
|
|
|
|
sorted(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
humanize_error(
|
|
|
|
|
hass, sub_error, domain, config, link, max_sub_error_length
|
|
|
|
|
)
|
2023-11-14 11:48:45 +00:00
|
|
|
|
for sub_error in validation_error.errors
|
|
|
|
|
)
|
|
|
|
|
)
|
2023-11-14 20:50:54 +00:00
|
|
|
|
return stringify_invalid(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass, validation_error, domain, config, link, max_sub_error_length
|
2023-11-14 20:50:54 +00:00
|
|
|
|
)
|
2023-11-14 11:48:45 +00:00
|
|
|
|
|
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
|
@callback
|
2023-11-15 18:09:49 +00:00
|
|
|
|
def format_homeassistant_error(
|
2023-11-16 14:28:48 +00:00
|
|
|
|
hass: HomeAssistant,
|
2023-11-25 07:30:18 +00:00
|
|
|
|
exc: HomeAssistantError,
|
2023-11-16 14:28:48 +00:00
|
|
|
|
domain: str,
|
|
|
|
|
config: dict,
|
|
|
|
|
link: str | None = None,
|
2023-11-15 18:09:49 +00:00
|
|
|
|
) -> str:
|
|
|
|
|
"""Format HomeAssistantError thrown by a custom config validator."""
|
2023-11-17 22:01:00 +00:00
|
|
|
|
message_prefix = f"Invalid config for '{domain}'"
|
2023-11-16 14:28:48 +00:00
|
|
|
|
# HomeAssistantError raised by custom config validator has no path to the
|
|
|
|
|
# offending configuration key, use the domain key as path instead.
|
|
|
|
|
if annotation := find_annotation(config, [domain]):
|
|
|
|
|
message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}"
|
2023-11-25 07:30:18 +00:00
|
|
|
|
message = f"{message_prefix}: {str(exc) or repr(exc)}"
|
2023-11-15 18:09:49 +00:00
|
|
|
|
if domain != CONF_CORE and link:
|
2023-11-17 22:01:00 +00:00
|
|
|
|
message += f", please check the docs at {link}"
|
2023-11-14 07:21:36 +00:00
|
|
|
|
|
2023-11-15 18:09:49 +00:00
|
|
|
|
return message
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-15 18:09:49 +00:00
|
|
|
|
@callback
|
|
|
|
|
def format_schema_error(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass: HomeAssistant,
|
2023-11-25 07:30:18 +00:00
|
|
|
|
exc: vol.Invalid,
|
2023-11-16 09:56:47 +00:00
|
|
|
|
domain: str,
|
|
|
|
|
config: dict,
|
|
|
|
|
link: str | None = None,
|
2023-11-15 18:09:49 +00:00
|
|
|
|
) -> str:
|
|
|
|
|
"""Format configuration validation error."""
|
2023-11-25 07:30:18 +00:00
|
|
|
|
return humanize_error(hass, exc, domain, config, link)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
|
async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> None:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Process the [homeassistant] section from the configuration.
|
2016-10-27 07:16:23 +00:00
|
|
|
|
|
|
|
|
|
This method is a coroutine.
|
|
|
|
|
"""
|
2016-06-27 16:02:45 +00:00
|
|
|
|
config = CORE_CONFIG_SCHEMA(config)
|
2018-05-01 16:20:41 +00:00
|
|
|
|
|
|
|
|
|
# Only load auth during startup.
|
2019-07-31 19:25:30 +00:00
|
|
|
|
if not hasattr(hass, "auth"):
|
2021-09-18 23:31:35 +00:00
|
|
|
|
if (auth_conf := config.get(CONF_AUTH_PROVIDERS)) is None:
|
2019-07-31 19:25:30 +00:00
|
|
|
|
auth_conf = [{"type": "homeassistant"}]
|
2018-08-23 11:38:08 +00:00
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
mfa_conf = config.get(
|
|
|
|
|
CONF_AUTH_MFA_MODULES,
|
|
|
|
|
[{"type": "totp", "id": "totp", "name": "Authenticator app"}],
|
|
|
|
|
)
|
2018-08-26 20:38:52 +00:00
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
setattr(
|
|
|
|
|
hass, "auth", await auth.auth_manager_from_config(hass, auth_conf, mfa_conf)
|
|
|
|
|
)
|
2018-05-01 16:20:41 +00:00
|
|
|
|
|
2019-05-20 18:02:36 +00:00
|
|
|
|
await hass.config.async_load()
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2019-05-16 14:27:53 +00:00
|
|
|
|
hac = hass.config
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
if any(
|
2020-04-03 16:44:20 +00:00
|
|
|
|
k in config
|
2021-07-19 13:57:06 +00:00
|
|
|
|
for k in (
|
2020-04-03 16:44:20 +00:00
|
|
|
|
CONF_LATITUDE,
|
|
|
|
|
CONF_LONGITUDE,
|
|
|
|
|
CONF_NAME,
|
|
|
|
|
CONF_ELEVATION,
|
|
|
|
|
CONF_TIME_ZONE,
|
|
|
|
|
CONF_UNIT_SYSTEM,
|
2020-05-08 00:29:47 +00:00
|
|
|
|
CONF_EXTERNAL_URL,
|
|
|
|
|
CONF_INTERNAL_URL,
|
2021-07-28 06:55:58 +00:00
|
|
|
|
CONF_CURRENCY,
|
2022-11-24 22:25:50 +00:00
|
|
|
|
CONF_COUNTRY,
|
|
|
|
|
CONF_LANGUAGE,
|
2021-07-19 13:57:06 +00:00
|
|
|
|
)
|
2019-07-31 19:25:30 +00:00
|
|
|
|
):
|
2021-12-19 17:02:52 +00:00
|
|
|
|
hac.config_source = ConfigSource.YAML
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
for key, attr in (
|
|
|
|
|
(CONF_LATITUDE, "latitude"),
|
|
|
|
|
(CONF_LONGITUDE, "longitude"),
|
|
|
|
|
(CONF_NAME, "location_name"),
|
|
|
|
|
(CONF_ELEVATION, "elevation"),
|
2020-05-08 00:29:47 +00:00
|
|
|
|
(CONF_INTERNAL_URL, "internal_url"),
|
|
|
|
|
(CONF_EXTERNAL_URL, "external_url"),
|
2020-09-16 13:28:25 +00:00
|
|
|
|
(CONF_MEDIA_DIRS, "media_dirs"),
|
2020-10-06 22:05:52 +00:00
|
|
|
|
(CONF_LEGACY_TEMPLATES, "legacy_templates"),
|
2021-07-28 06:55:58 +00:00
|
|
|
|
(CONF_CURRENCY, "currency"),
|
2022-11-24 22:25:50 +00:00
|
|
|
|
(CONF_COUNTRY, "country"),
|
|
|
|
|
(CONF_LANGUAGE, "language"),
|
2019-07-31 19:25:30 +00:00
|
|
|
|
):
|
2016-06-27 16:02:45 +00:00
|
|
|
|
if key in config:
|
|
|
|
|
setattr(hac, attr, config[key])
|
|
|
|
|
|
2022-11-10 16:28:19 +00:00
|
|
|
|
_raise_issue_if_historic_currency(hass, hass.config.currency)
|
2022-11-28 08:54:13 +00:00
|
|
|
|
_raise_issue_if_no_country(hass, hass.config.country)
|
2022-11-10 16:28:19 +00:00
|
|
|
|
|
2019-05-20 18:02:36 +00:00
|
|
|
|
if CONF_TIME_ZONE in config:
|
|
|
|
|
hac.set_time_zone(config[CONF_TIME_ZONE])
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2020-09-16 13:28:25 +00:00
|
|
|
|
if CONF_MEDIA_DIRS not in config:
|
|
|
|
|
if is_docker_env():
|
2020-09-16 19:38:40 +00:00
|
|
|
|
hac.media_dirs = {"local": "/media"}
|
2020-09-16 13:28:25 +00:00
|
|
|
|
else:
|
2020-09-16 19:38:40 +00:00
|
|
|
|
hac.media_dirs = {"local": hass.config.path("media")}
|
2020-09-16 13:28:25 +00:00
|
|
|
|
|
2017-07-18 14:23:57 +00:00
|
|
|
|
# Init whitelist external dir
|
2020-09-16 13:28:25 +00:00
|
|
|
|
hac.allowlist_external_dirs = {hass.config.path("www"), *hac.media_dirs.values()}
|
2020-07-13 15:43:11 +00:00
|
|
|
|
if CONF_ALLOWLIST_EXTERNAL_DIRS in config:
|
|
|
|
|
hac.allowlist_external_dirs.update(set(config[CONF_ALLOWLIST_EXTERNAL_DIRS]))
|
|
|
|
|
|
|
|
|
|
elif LEGACY_CONF_WHITELIST_EXTERNAL_DIRS in config:
|
|
|
|
|
_LOGGER.warning(
|
|
|
|
|
"Key %s has been replaced with %s. Please update your config",
|
|
|
|
|
LEGACY_CONF_WHITELIST_EXTERNAL_DIRS,
|
2020-07-16 08:08:05 +00:00
|
|
|
|
CONF_ALLOWLIST_EXTERNAL_DIRS,
|
2020-07-13 15:43:11 +00:00
|
|
|
|
)
|
|
|
|
|
hac.allowlist_external_dirs.update(
|
|
|
|
|
set(config[LEGACY_CONF_WHITELIST_EXTERNAL_DIRS])
|
|
|
|
|
)
|
2017-06-25 22:10:30 +00:00
|
|
|
|
|
2020-06-25 00:37:01 +00:00
|
|
|
|
# Init whitelist external URL list – make sure to add / to every URL that doesn't
|
|
|
|
|
# already have it so that we can properly test "path ownership"
|
|
|
|
|
if CONF_ALLOWLIST_EXTERNAL_URLS in config:
|
|
|
|
|
hac.allowlist_external_urls.update(
|
|
|
|
|
url if url.endswith("/") else f"{url}/"
|
|
|
|
|
for url in config[CONF_ALLOWLIST_EXTERNAL_URLS]
|
|
|
|
|
)
|
|
|
|
|
|
2017-02-16 03:47:30 +00:00
|
|
|
|
# Customize
|
|
|
|
|
cust_exact = dict(config[CONF_CUSTOMIZE])
|
|
|
|
|
cust_domain = dict(config[CONF_CUSTOMIZE_DOMAIN])
|
2017-02-25 20:54:04 +00:00
|
|
|
|
cust_glob = OrderedDict(config[CONF_CUSTOMIZE_GLOB])
|
2017-02-16 03:47:30 +00:00
|
|
|
|
|
|
|
|
|
for name, pkg in config[CONF_PACKAGES].items():
|
2021-09-18 23:31:35 +00:00
|
|
|
|
if (pkg_cust := pkg.get(CONF_CORE)) is None:
|
2017-02-16 03:47:30 +00:00
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
pkg_cust = CUSTOMIZE_CONFIG_SCHEMA(pkg_cust)
|
|
|
|
|
except vol.Invalid:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
_LOGGER.warning("Package %s contains invalid customize", name)
|
2017-02-16 03:47:30 +00:00
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
cust_exact.update(pkg_cust[CONF_CUSTOMIZE])
|
|
|
|
|
cust_domain.update(pkg_cust[CONF_CUSTOMIZE_DOMAIN])
|
|
|
|
|
cust_glob.update(pkg_cust[CONF_CUSTOMIZE_GLOB])
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
hass.data[DATA_CUSTOMIZE] = EntityValues(cust_exact, cust_domain, cust_glob)
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
2016-07-31 20:24:49 +00:00
|
|
|
|
if CONF_UNIT_SYSTEM in config:
|
2022-10-14 14:50:04 +00:00
|
|
|
|
hac.units = get_unit_system(config[CONF_UNIT_SYSTEM])
|
2019-07-31 19:25:30 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-16 09:56:47 +00:00
|
|
|
|
def _log_pkg_error(
|
|
|
|
|
hass: HomeAssistant, package: str, component: str, config: dict, message: str
|
|
|
|
|
) -> None:
|
2018-03-09 03:34:24 +00:00
|
|
|
|
"""Log an error while merging packages."""
|
2023-11-17 13:57:37 +00:00
|
|
|
|
message_prefix = f"Setup of package '{package}'"
|
|
|
|
|
if annotation := find_annotation(config, [CONF_CORE, CONF_PACKAGES, package]):
|
|
|
|
|
message_prefix += f" at {_relpath(hass, annotation[0])}, line {annotation[1]}"
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2023-11-17 13:57:37 +00:00
|
|
|
|
_LOGGER.error("%s failed: %s", message_prefix, message)
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
|
|
|
|
|
2023-03-31 18:19:58 +00:00
|
|
|
|
def _identify_config_schema(module: ComponentProtocol) -> str | None:
|
2017-01-14 06:01:47 +00:00
|
|
|
|
"""Extract the schema and identify list or dict based."""
|
2021-12-27 16:55:17 +00:00
|
|
|
|
if not isinstance(module.CONFIG_SCHEMA, vol.Schema):
|
2020-06-11 17:59:52 +00:00
|
|
|
|
return None
|
|
|
|
|
|
2021-12-27 16:55:17 +00:00
|
|
|
|
schema = module.CONFIG_SCHEMA.schema
|
2020-04-24 16:52:23 +00:00
|
|
|
|
|
|
|
|
|
if isinstance(schema, vol.All):
|
|
|
|
|
for subschema in schema.validators:
|
|
|
|
|
if isinstance(subschema, dict):
|
|
|
|
|
schema = subschema
|
|
|
|
|
break
|
|
|
|
|
else:
|
|
|
|
|
return None
|
|
|
|
|
|
2017-01-14 06:01:47 +00:00
|
|
|
|
try:
|
2021-12-27 16:55:17 +00:00
|
|
|
|
key = next(k for k in schema if k == module.DOMAIN)
|
2020-04-24 16:52:23 +00:00
|
|
|
|
except (TypeError, AttributeError, StopIteration):
|
|
|
|
|
return None
|
|
|
|
|
except Exception: # pylint: disable=broad-except
|
|
|
|
|
_LOGGER.exception("Unexpected error identifying config schema")
|
2020-03-20 20:34:56 +00:00
|
|
|
|
return None
|
2020-03-20 03:45:26 +00:00
|
|
|
|
|
2020-03-20 15:17:43 +00:00
|
|
|
|
if hasattr(key, "default") and not isinstance(
|
|
|
|
|
key.default, vol.schema_builder.Undefined
|
|
|
|
|
):
|
2021-12-27 16:55:17 +00:00
|
|
|
|
default_value = module.CONFIG_SCHEMA({module.DOMAIN: key.default()})[
|
|
|
|
|
module.DOMAIN
|
2020-03-21 03:27:37 +00:00
|
|
|
|
]
|
2020-03-20 03:45:26 +00:00
|
|
|
|
|
|
|
|
|
if isinstance(default_value, dict):
|
2020-03-20 20:34:56 +00:00
|
|
|
|
return "dict"
|
2020-03-20 03:45:26 +00:00
|
|
|
|
|
|
|
|
|
if isinstance(default_value, list):
|
2020-03-20 20:34:56 +00:00
|
|
|
|
return "list"
|
2020-03-20 03:45:26 +00:00
|
|
|
|
|
2020-03-20 20:34:56 +00:00
|
|
|
|
return None
|
2020-03-20 03:45:26 +00:00
|
|
|
|
|
2020-04-24 16:52:23 +00:00
|
|
|
|
domain_schema = schema[key]
|
|
|
|
|
|
|
|
|
|
t_schema = str(domain_schema)
|
2019-07-31 19:25:30 +00:00
|
|
|
|
if t_schema.startswith("{") or "schema_with_slug_keys" in t_schema:
|
2020-03-20 20:34:56 +00:00
|
|
|
|
return "dict"
|
2019-07-31 19:25:30 +00:00
|
|
|
|
if t_schema.startswith(("[", "All(<function ensure_list")):
|
2020-03-20 20:34:56 +00:00
|
|
|
|
return "list"
|
|
|
|
|
return None
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-12 18:06:12 +00:00
|
|
|
|
def _recursive_merge(conf: dict[str, Any], package: dict[str, Any]) -> str | None:
|
2018-05-25 20:41:50 +00:00
|
|
|
|
"""Merge package into conf, recursively."""
|
2023-11-12 18:06:12 +00:00
|
|
|
|
duplicate_key: str | None = None
|
2018-05-25 20:41:50 +00:00
|
|
|
|
for key, pack_conf in package.items():
|
|
|
|
|
if isinstance(pack_conf, dict):
|
|
|
|
|
if not pack_conf:
|
|
|
|
|
continue
|
|
|
|
|
conf[key] = conf.get(key, OrderedDict())
|
2023-11-12 18:06:12 +00:00
|
|
|
|
duplicate_key = _recursive_merge(conf=conf[key], package=pack_conf)
|
2018-05-25 20:41:50 +00:00
|
|
|
|
|
|
|
|
|
elif isinstance(pack_conf, list):
|
2019-08-10 23:30:33 +00:00
|
|
|
|
conf[key] = cv.remove_falsy(
|
|
|
|
|
cv.ensure_list(conf.get(key)) + cv.ensure_list(pack_conf)
|
|
|
|
|
)
|
2018-05-25 20:41:50 +00:00
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
if conf.get(key) is not None:
|
2018-06-16 10:55:32 +00:00
|
|
|
|
return key
|
2018-07-23 08:16:05 +00:00
|
|
|
|
conf[key] = pack_conf
|
2023-11-12 18:06:12 +00:00
|
|
|
|
return duplicate_key
|
2018-05-25 20:41:50 +00:00
|
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
|
async def merge_packages_config(
|
|
|
|
|
hass: HomeAssistant,
|
2021-03-17 16:34:55 +00:00
|
|
|
|
config: dict,
|
|
|
|
|
packages: dict[str, Any],
|
2023-11-16 09:56:47 +00:00
|
|
|
|
_log_pkg_error: Callable[
|
|
|
|
|
[HomeAssistant, str, str, dict, str], None
|
|
|
|
|
] = _log_pkg_error,
|
2021-03-17 16:34:55 +00:00
|
|
|
|
) -> dict:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Merge packages into the top-level configuration. Mutate config."""
|
2017-01-14 06:01:47 +00:00
|
|
|
|
PACKAGES_CONFIG_SCHEMA(packages)
|
|
|
|
|
for pack_name, pack_conf in packages.items():
|
|
|
|
|
for comp_name, comp_conf in pack_conf.items():
|
2017-01-27 06:26:49 +00:00
|
|
|
|
if comp_name == CONF_CORE:
|
|
|
|
|
continue
|
2018-09-24 08:17:24 +00:00
|
|
|
|
# If component name is given with a trailing description, remove it
|
|
|
|
|
# when looking for component
|
2022-11-15 20:45:48 +00:00
|
|
|
|
domain = comp_name.partition(" ")[0]
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
try:
|
2019-08-07 22:35:50 +00:00
|
|
|
|
integration = await async_get_integration_with_requirements(
|
|
|
|
|
hass, domain
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2019-04-14 14:23:01 +00:00
|
|
|
|
component = integration.get_component()
|
2023-11-25 07:30:18 +00:00
|
|
|
|
except LOAD_EXCEPTIONS as exc:
|
2023-11-16 08:08:47 +00:00
|
|
|
|
_log_pkg_error(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass,
|
2023-11-16 08:08:47 +00:00
|
|
|
|
pack_name,
|
|
|
|
|
comp_name,
|
|
|
|
|
config,
|
2023-11-25 07:30:18 +00:00
|
|
|
|
f"Integration {comp_name} caused error: {str(exc)}",
|
2023-11-16 08:08:47 +00:00
|
|
|
|
)
|
|
|
|
|
continue
|
2023-11-25 07:30:18 +00:00
|
|
|
|
except INTEGRATION_LOAD_EXCEPTIONS as exc:
|
|
|
|
|
_log_pkg_error(hass, pack_name, comp_name, config, str(exc))
|
2019-04-14 14:23:01 +00:00
|
|
|
|
continue
|
|
|
|
|
|
2021-08-05 20:11:01 +00:00
|
|
|
|
try:
|
|
|
|
|
config_platform: ModuleType | None = integration.get_platform("config")
|
|
|
|
|
# Test if config platform has a config validator
|
|
|
|
|
if not hasattr(config_platform, "async_validate_config"):
|
|
|
|
|
config_platform = None
|
|
|
|
|
except ImportError:
|
|
|
|
|
config_platform = None
|
|
|
|
|
|
|
|
|
|
merge_list = False
|
|
|
|
|
|
|
|
|
|
# If integration has a custom config validator, it needs to provide a hint.
|
|
|
|
|
if config_platform is not None:
|
2021-12-27 16:55:17 +00:00
|
|
|
|
merge_list = config_platform.PACKAGE_MERGE_HINT == "list"
|
2021-08-05 20:11:01 +00:00
|
|
|
|
|
|
|
|
|
if not merge_list:
|
|
|
|
|
merge_list = hasattr(component, "PLATFORM_SCHEMA")
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2019-08-10 23:30:33 +00:00
|
|
|
|
if not merge_list and hasattr(component, "CONFIG_SCHEMA"):
|
2020-03-20 20:34:56 +00:00
|
|
|
|
merge_list = _identify_config_schema(component) == "list"
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2019-08-10 23:30:33 +00:00
|
|
|
|
if merge_list:
|
|
|
|
|
config[comp_name] = cv.remove_falsy(
|
|
|
|
|
cv.ensure_list(config.get(comp_name)) + cv.ensure_list(comp_conf)
|
|
|
|
|
)
|
|
|
|
|
continue
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2018-06-16 10:55:32 +00:00
|
|
|
|
if comp_conf is None:
|
|
|
|
|
comp_conf = OrderedDict()
|
|
|
|
|
|
|
|
|
|
if not isinstance(comp_conf, dict):
|
|
|
|
|
_log_pkg_error(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass,
|
2023-11-16 08:08:47 +00:00
|
|
|
|
pack_name,
|
|
|
|
|
comp_name,
|
|
|
|
|
config,
|
2023-11-17 13:57:37 +00:00
|
|
|
|
f"integration '{comp_name}' cannot be merged, expected a dict",
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2018-06-16 10:55:32 +00:00
|
|
|
|
continue
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
2018-06-16 10:55:32 +00:00
|
|
|
|
if comp_name not in config or config[comp_name] is None:
|
|
|
|
|
config[comp_name] = OrderedDict()
|
|
|
|
|
|
|
|
|
|
if not isinstance(config[comp_name], dict):
|
|
|
|
|
_log_pkg_error(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass,
|
2019-07-31 19:25:30 +00:00
|
|
|
|
pack_name,
|
|
|
|
|
comp_name,
|
|
|
|
|
config,
|
2023-11-17 13:57:37 +00:00
|
|
|
|
(
|
|
|
|
|
f"integration '{comp_name}' cannot be merged, dict expected in "
|
|
|
|
|
"main config"
|
|
|
|
|
),
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2017-01-14 06:01:47 +00:00
|
|
|
|
continue
|
2018-06-16 10:55:32 +00:00
|
|
|
|
|
2023-11-12 18:06:12 +00:00
|
|
|
|
duplicate_key = _recursive_merge(conf=config[comp_name], package=comp_conf)
|
|
|
|
|
if duplicate_key:
|
2019-07-31 19:25:30 +00:00
|
|
|
|
_log_pkg_error(
|
2023-11-16 09:56:47 +00:00
|
|
|
|
hass,
|
2023-11-16 08:08:47 +00:00
|
|
|
|
pack_name,
|
|
|
|
|
comp_name,
|
|
|
|
|
config,
|
2023-11-17 13:57:37 +00:00
|
|
|
|
f"integration '{comp_name}' has duplicate key '{duplicate_key}'",
|
2019-07-31 19:25:30 +00:00
|
|
|
|
)
|
2017-01-14 06:01:47 +00:00
|
|
|
|
|
|
|
|
|
return config
|
2017-01-27 06:26:49 +00:00
|
|
|
|
|
|
|
|
|
|
2023-11-24 16:34:45 +00:00
|
|
|
|
@callback
|
|
|
|
|
def _get_log_message_and_stack_print_pref(
|
|
|
|
|
hass: HomeAssistant, domain: str, platform_exception: ConfigExceptionInfo
|
|
|
|
|
) -> tuple[str | None, bool, dict[str, str]]:
|
|
|
|
|
"""Get message to log and print stack trace preference."""
|
|
|
|
|
exception = platform_exception.exception
|
|
|
|
|
platform_name = platform_exception.platform_name
|
|
|
|
|
platform_config = platform_exception.config
|
|
|
|
|
link = platform_exception.integration_link
|
|
|
|
|
|
|
|
|
|
placeholders: dict[str, str] = {"domain": domain, "error": str(exception)}
|
|
|
|
|
|
|
|
|
|
log_message_mapping: dict[ConfigErrorTranslationKey, tuple[str, bool]] = {
|
|
|
|
|
ConfigErrorTranslationKey.COMPONENT_IMPORT_ERR: (
|
|
|
|
|
f"Unable to import {domain}: {exception}",
|
|
|
|
|
False,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_PLATFORM_IMPORT_ERR: (
|
|
|
|
|
f"Error importing config platform {domain}: {exception}",
|
|
|
|
|
False,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_VALIDATOR_UNKNOWN_ERR: (
|
|
|
|
|
f"Unknown error calling {domain} config validator",
|
|
|
|
|
True,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_SCHEMA_UNKNOWN_ERR: (
|
|
|
|
|
f"Unknown error calling {domain} CONFIG_SCHEMA",
|
|
|
|
|
True,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_VALIDATOR_UNKNOWN_ERR: (
|
|
|
|
|
f"Unknown error validating {platform_name} platform config with {domain} "
|
|
|
|
|
"component platform schema",
|
|
|
|
|
True,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_ERR: (
|
|
|
|
|
f"Platform error: {domain} - {exception}",
|
|
|
|
|
False,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_EXC: (
|
|
|
|
|
f"Platform error: {domain} - {exception}",
|
|
|
|
|
True,
|
|
|
|
|
),
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_SCHEMA_VALIDATOR_ERR: (
|
|
|
|
|
f"Unknown error validating config for {platform_name} platform "
|
|
|
|
|
f"for {domain} component with PLATFORM_SCHEMA",
|
|
|
|
|
True,
|
|
|
|
|
),
|
|
|
|
|
}
|
|
|
|
|
log_message_show_stack_trace = log_message_mapping.get(
|
|
|
|
|
platform_exception.translation_key
|
|
|
|
|
)
|
|
|
|
|
if log_message_show_stack_trace is None:
|
|
|
|
|
# If no pre defined log_message is set, we generate an enriched error
|
|
|
|
|
# message, so we can notify about it during setup
|
|
|
|
|
show_stack_trace = False
|
|
|
|
|
if isinstance(exception, vol.Invalid):
|
|
|
|
|
log_message = format_schema_error(
|
|
|
|
|
hass, exception, platform_name, platform_config, link
|
|
|
|
|
)
|
|
|
|
|
if annotation := find_annotation(platform_config, exception.path):
|
|
|
|
|
placeholders["config_file"], line = annotation
|
|
|
|
|
placeholders["line"] = str(line)
|
|
|
|
|
else:
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
assert isinstance(exception, HomeAssistantError)
|
|
|
|
|
log_message = format_homeassistant_error(
|
|
|
|
|
hass, exception, platform_name, platform_config, link
|
|
|
|
|
)
|
|
|
|
|
if annotation := find_annotation(platform_config, [platform_name]):
|
|
|
|
|
placeholders["config_file"], line = annotation
|
|
|
|
|
placeholders["line"] = str(line)
|
|
|
|
|
show_stack_trace = True
|
|
|
|
|
return (log_message, show_stack_trace, placeholders)
|
|
|
|
|
|
|
|
|
|
assert isinstance(log_message_show_stack_trace, tuple)
|
|
|
|
|
|
|
|
|
|
return (*log_message_show_stack_trace, placeholders)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def async_process_component_and_handle_errors(
|
|
|
|
|
hass: HomeAssistant,
|
|
|
|
|
config: ConfigType,
|
|
|
|
|
integration: Integration,
|
|
|
|
|
raise_on_failure: bool = False,
|
2021-03-17 16:34:55 +00:00
|
|
|
|
) -> ConfigType | None:
|
2023-11-24 16:34:45 +00:00
|
|
|
|
"""Process and component configuration and handle errors.
|
|
|
|
|
|
|
|
|
|
In case of errors:
|
|
|
|
|
- Print the error messages to the log.
|
|
|
|
|
- Raise a ConfigValidationError if raise_on_failure is set.
|
|
|
|
|
|
|
|
|
|
Returns the integration config or `None`.
|
|
|
|
|
"""
|
|
|
|
|
integration_config_info = await async_process_component_config(
|
|
|
|
|
hass, config, integration
|
|
|
|
|
)
|
|
|
|
|
return async_handle_component_errors(
|
|
|
|
|
hass, integration_config_info, integration, raise_on_failure
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@callback
|
|
|
|
|
def async_handle_component_errors(
|
|
|
|
|
hass: HomeAssistant,
|
|
|
|
|
integration_config_info: IntegrationConfigInfo,
|
|
|
|
|
integration: Integration,
|
|
|
|
|
raise_on_failure: bool = False,
|
|
|
|
|
) -> ConfigType | None:
|
|
|
|
|
"""Handle component configuration errors from async_process_component_config.
|
|
|
|
|
|
|
|
|
|
In case of errors:
|
|
|
|
|
- Print the error messages to the log.
|
|
|
|
|
- Raise a ConfigValidationError if raise_on_failure is set.
|
|
|
|
|
|
|
|
|
|
Returns the integration config or `None`.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
if not (config_exception_info := integration_config_info.exception_info_list):
|
|
|
|
|
return integration_config_info.config
|
|
|
|
|
|
|
|
|
|
platform_exception: ConfigExceptionInfo
|
|
|
|
|
domain = integration.domain
|
|
|
|
|
placeholders: dict[str, str]
|
|
|
|
|
for platform_exception in config_exception_info:
|
|
|
|
|
exception = platform_exception.exception
|
|
|
|
|
(
|
|
|
|
|
log_message,
|
|
|
|
|
show_stack_trace,
|
|
|
|
|
placeholders,
|
|
|
|
|
) = _get_log_message_and_stack_print_pref(hass, domain, platform_exception)
|
|
|
|
|
_LOGGER.error(
|
|
|
|
|
log_message,
|
|
|
|
|
exc_info=exception if show_stack_trace else None,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if not raise_on_failure:
|
|
|
|
|
return integration_config_info.config
|
|
|
|
|
|
|
|
|
|
if len(config_exception_info) == 1:
|
|
|
|
|
translation_key = platform_exception.translation_key
|
|
|
|
|
else:
|
|
|
|
|
translation_key = ConfigErrorTranslationKey.INTEGRATION_CONFIG_ERROR
|
|
|
|
|
errors = str(len(config_exception_info))
|
|
|
|
|
log_message = (
|
|
|
|
|
f"Failed to process component config for integration {domain} "
|
|
|
|
|
f"due to multiple errors ({errors}), check the logs for more information."
|
|
|
|
|
)
|
|
|
|
|
placeholders = {
|
|
|
|
|
"domain": domain,
|
|
|
|
|
"errors": errors,
|
|
|
|
|
}
|
|
|
|
|
raise ConfigValidationError(
|
|
|
|
|
str(log_message),
|
|
|
|
|
[platform_exception.exception for platform_exception in config_exception_info],
|
|
|
|
|
translation_domain="homeassistant",
|
|
|
|
|
translation_key=translation_key,
|
|
|
|
|
translation_placeholders=placeholders,
|
|
|
|
|
)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2023-11-24 16:34:45 +00:00
|
|
|
|
|
|
|
|
|
async def async_process_component_config( # noqa: C901
|
|
|
|
|
hass: HomeAssistant,
|
|
|
|
|
config: ConfigType,
|
|
|
|
|
integration: Integration,
|
|
|
|
|
) -> IntegrationConfigInfo:
|
|
|
|
|
"""Check component configuration.
|
|
|
|
|
|
|
|
|
|
Returns processed configuration and exception information.
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
|
"""
|
2019-04-14 14:23:01 +00:00
|
|
|
|
domain = integration.domain
|
2023-11-24 16:34:45 +00:00
|
|
|
|
integration_docs = integration.documentation
|
|
|
|
|
config_exceptions: list[ConfigExceptionInfo] = []
|
|
|
|
|
|
2019-05-13 06:54:55 +00:00
|
|
|
|
try:
|
|
|
|
|
component = integration.get_component()
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except LOAD_EXCEPTIONS as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.COMPONENT_IMPORT_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
|
|
|
|
return IntegrationConfigInfo(None, config_exceptions)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-09-27 15:48:48 +00:00
|
|
|
|
# Check if the integration has a custom config validator
|
|
|
|
|
config_validator = None
|
|
|
|
|
try:
|
|
|
|
|
config_validator = integration.get_platform("config")
|
2020-10-19 15:09:57 +00:00
|
|
|
|
except ImportError as err:
|
|
|
|
|
# Filter out import error of the config platform.
|
|
|
|
|
# If the config platform contains bad imports, make sure
|
|
|
|
|
# that still fails.
|
|
|
|
|
if err.name != f"{integration.pkg_path}.config":
|
2023-11-24 16:34:45 +00:00
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
err,
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_PLATFORM_IMPORT_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
|
|
|
|
return IntegrationConfigInfo(None, config_exceptions)
|
2020-10-19 15:09:57 +00:00
|
|
|
|
|
2019-09-27 15:48:48 +00:00
|
|
|
|
if config_validator is not None and hasattr(
|
|
|
|
|
config_validator, "async_validate_config"
|
|
|
|
|
):
|
|
|
|
|
try:
|
2023-11-24 16:34:45 +00:00
|
|
|
|
return IntegrationConfigInfo(
|
|
|
|
|
await config_validator.async_validate_config(hass, config), []
|
2019-09-27 15:48:48 +00:00
|
|
|
|
)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except (vol.Invalid, HomeAssistantError) as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_VALIDATION_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
2023-11-15 18:09:49 +00:00
|
|
|
|
)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
config_exceptions.append(exc_info)
|
|
|
|
|
return IntegrationConfigInfo(None, config_exceptions)
|
|
|
|
|
except Exception as exc: # pylint: disable=broad-except
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_VALIDATOR_UNKNOWN_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
|
|
|
|
return IntegrationConfigInfo(None, config_exceptions)
|
2019-09-27 15:48:48 +00:00
|
|
|
|
|
|
|
|
|
# No custom config validator, proceed with schema validation
|
2019-07-31 19:25:30 +00:00
|
|
|
|
if hasattr(component, "CONFIG_SCHEMA"):
|
2017-03-01 04:33:19 +00:00
|
|
|
|
try:
|
2023-11-24 16:34:45 +00:00
|
|
|
|
return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), [])
|
|
|
|
|
except vol.Invalid as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_VALIDATION_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
|
|
|
|
return IntegrationConfigInfo(None, config_exceptions)
|
|
|
|
|
except Exception as exc: # pylint: disable=broad-except
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.CONFIG_SCHEMA_UNKNOWN_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
|
|
|
|
return IntegrationConfigInfo(None, config_exceptions)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
component_platform_schema = getattr(
|
2019-07-31 19:25:30 +00:00
|
|
|
|
component, "PLATFORM_SCHEMA_BASE", getattr(component, "PLATFORM_SCHEMA", None)
|
|
|
|
|
)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
if component_platform_schema is None:
|
2023-11-24 16:34:45 +00:00
|
|
|
|
return IntegrationConfigInfo(config, [])
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2023-11-24 16:34:45 +00:00
|
|
|
|
platforms: list[ConfigType] = []
|
2019-04-14 14:23:01 +00:00
|
|
|
|
for p_name, p_config in config_per_platform(config, domain):
|
|
|
|
|
# Validate component specific platform schema
|
2023-11-24 16:34:45 +00:00
|
|
|
|
platform_name = f"{domain}.{p_name}"
|
2019-04-14 14:23:01 +00:00
|
|
|
|
try:
|
|
|
|
|
p_validated = component_platform_schema(p_config)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except vol.Invalid as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_CONFIG_VALIDATION_ERR,
|
|
|
|
|
domain,
|
|
|
|
|
p_config,
|
|
|
|
|
integration_docs,
|
2023-11-15 18:09:49 +00:00
|
|
|
|
)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
config_exceptions.append(exc_info)
|
2019-04-14 14:23:01 +00:00
|
|
|
|
continue
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except Exception as exc: # pylint: disable=broad-except
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_SCHEMA_VALIDATOR_ERR,
|
|
|
|
|
str(p_name),
|
|
|
|
|
config,
|
|
|
|
|
integration_docs,
|
2020-02-13 21:43:07 +00:00
|
|
|
|
)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
config_exceptions.append(exc_info)
|
2020-02-13 21:43:07 +00:00
|
|
|
|
continue
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
# Not all platform components follow same pattern for platforms
|
|
|
|
|
# So if p_name is None we are not going to validate platform
|
|
|
|
|
# (the automation component is one of them)
|
|
|
|
|
if p_name is None:
|
|
|
|
|
platforms.append(p_validated)
|
|
|
|
|
continue
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
try:
|
2019-08-07 22:35:50 +00:00
|
|
|
|
p_integration = await async_get_integration_with_requirements(hass, p_name)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except (RequirementsNotFound, IntegrationNotFound) as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_ERR,
|
|
|
|
|
platform_name,
|
|
|
|
|
p_config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
2019-07-14 21:13:37 +00:00
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
try:
|
2019-04-14 14:23:01 +00:00
|
|
|
|
platform = p_integration.get_platform(domain)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except LOAD_EXCEPTIONS as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_COMPONENT_LOAD_EXC,
|
|
|
|
|
platform_name,
|
|
|
|
|
p_config,
|
|
|
|
|
integration_docs,
|
|
|
|
|
)
|
|
|
|
|
config_exceptions.append(exc_info)
|
2019-04-14 14:23:01 +00:00
|
|
|
|
continue
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
# Validate platform specific schema
|
2019-07-31 19:25:30 +00:00
|
|
|
|
if hasattr(platform, "PLATFORM_SCHEMA"):
|
2019-04-14 14:23:01 +00:00
|
|
|
|
try:
|
2021-12-27 16:55:17 +00:00
|
|
|
|
p_validated = platform.PLATFORM_SCHEMA(p_config)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except vol.Invalid as exc:
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_CONFIG_VALIDATION_ERR,
|
|
|
|
|
platform_name,
|
2019-12-16 19:16:23 +00:00
|
|
|
|
p_config,
|
|
|
|
|
p_integration.documentation,
|
|
|
|
|
)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
config_exceptions.append(exc_info)
|
2019-04-14 14:23:01 +00:00
|
|
|
|
continue
|
2023-11-24 16:34:45 +00:00
|
|
|
|
except Exception as exc: # pylint: disable=broad-except
|
|
|
|
|
exc_info = ConfigExceptionInfo(
|
|
|
|
|
exc,
|
|
|
|
|
ConfigErrorTranslationKey.PLATFORM_SCHEMA_VALIDATOR_ERR,
|
2020-02-13 21:43:07 +00:00
|
|
|
|
p_name,
|
2023-11-24 16:34:45 +00:00
|
|
|
|
p_config,
|
|
|
|
|
p_integration.documentation,
|
2020-02-13 21:43:07 +00:00
|
|
|
|
)
|
2023-11-24 16:34:45 +00:00
|
|
|
|
config_exceptions.append(exc_info)
|
2020-02-13 21:43:07 +00:00
|
|
|
|
continue
|
2019-04-14 14:23:01 +00:00
|
|
|
|
|
|
|
|
|
platforms.append(p_validated)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2019-04-14 14:23:01 +00:00
|
|
|
|
# Create a copy of the configuration with all config for current
|
|
|
|
|
# component removed and add validated config back in.
|
2019-05-15 21:43:45 +00:00
|
|
|
|
config = config_without_domain(config, domain)
|
2019-04-14 14:23:01 +00:00
|
|
|
|
config[domain] = platforms
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
2023-11-24 16:34:45 +00:00
|
|
|
|
return IntegrationConfigInfo(config, config_exceptions)
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
|
|
2019-05-15 21:43:45 +00:00
|
|
|
|
@callback
|
2021-08-18 11:22:05 +00:00
|
|
|
|
def config_without_domain(config: ConfigType, domain: str) -> ConfigType:
|
2019-05-15 21:43:45 +00:00
|
|
|
|
"""Return a config with all configuration for a domain removed."""
|
|
|
|
|
filter_keys = extract_domain_configs(config, domain)
|
2019-07-31 19:25:30 +00:00
|
|
|
|
return {key: value for key, value in config.items() if key not in filter_keys}
|
2019-05-15 21:43:45 +00:00
|
|
|
|
|
|
|
|
|
|
2021-03-17 16:34:55 +00:00
|
|
|
|
async def async_check_ha_config_file(hass: HomeAssistant) -> str | None:
|
2017-07-18 14:23:57 +00:00
|
|
|
|
"""Check if Home Assistant configuration file is valid.
|
2017-02-08 17:17:52 +00:00
|
|
|
|
|
|
|
|
|
This method is a coroutine.
|
|
|
|
|
"""
|
2023-01-20 12:47:55 +00:00
|
|
|
|
# pylint: disable-next=import-outside-toplevel
|
2021-12-23 19:14:47 +00:00
|
|
|
|
from .helpers import check_config
|
2018-03-19 21:20:04 +00:00
|
|
|
|
|
2019-07-10 18:56:50 +00:00
|
|
|
|
res = await check_config.async_check_ha_config_file(hass)
|
2018-03-19 21:20:04 +00:00
|
|
|
|
|
|
|
|
|
if not res.errors:
|
|
|
|
|
return None
|
2019-07-10 18:56:50 +00:00
|
|
|
|
return res.error_str
|
2017-03-01 04:33:19 +00:00
|
|
|
|
|
|
|
|
|
|
2023-10-24 12:47:58 +00:00
|
|
|
|
def safe_mode_enabled(config_dir: str) -> bool:
|
|
|
|
|
"""Return if safe mode is enabled.
|
|
|
|
|
|
|
|
|
|
If safe mode is enabled, the safe mode file will be removed.
|
|
|
|
|
"""
|
|
|
|
|
safe_mode_path = os.path.join(config_dir, SAFE_MODE_FILENAME)
|
|
|
|
|
safe_mode = os.path.exists(safe_mode_path)
|
|
|
|
|
if safe_mode:
|
|
|
|
|
os.remove(safe_mode_path)
|
|
|
|
|
return safe_mode
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def async_enable_safe_mode(hass: HomeAssistant) -> None:
|
|
|
|
|
"""Enable safe mode."""
|
|
|
|
|
|
|
|
|
|
def _enable_safe_mode() -> None:
|
|
|
|
|
Path(hass.config.path(SAFE_MODE_FILENAME)).touch()
|
|
|
|
|
|
|
|
|
|
await hass.async_add_executor_job(_enable_safe_mode)
|