2019-07-10 18:56:50 +00:00
|
|
|
"""Helper to check the configuration file."""
|
|
|
|
from collections import OrderedDict, namedtuple
|
2019-07-21 16:59:02 +00:00
|
|
|
from typing import List
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
import attr
|
|
|
|
import voluptuous as vol
|
|
|
|
|
|
|
|
from homeassistant import loader, requirements
|
|
|
|
from homeassistant.core import HomeAssistant
|
|
|
|
from homeassistant.config import (
|
2019-07-31 19:25:30 +00:00
|
|
|
CONF_CORE,
|
|
|
|
CORE_CONFIG_SCHEMA,
|
|
|
|
CONF_PACKAGES,
|
|
|
|
merge_packages_config,
|
|
|
|
_format_config_error,
|
|
|
|
find_config_file,
|
|
|
|
load_yaml_config_file,
|
|
|
|
extract_domain_configs,
|
|
|
|
config_per_platform,
|
|
|
|
)
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
import homeassistant.util.yaml.loader as yaml_loader
|
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
|
|
|
|
|
|
|
|
2019-07-21 16:59:02 +00:00
|
|
|
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
|
|
|
|
# mypy: no-warn-return-any
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
CheckConfigError = namedtuple("CheckConfigError", "message domain config")
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
class HomeAssistantConfig(OrderedDict):
|
|
|
|
"""Configuration result with errors attribute."""
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
errors = attr.ib(default=attr.Factory(list)) # type: List[CheckConfigError]
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
def add_error(self, message, domain=None, config=None):
|
|
|
|
"""Add a single error."""
|
|
|
|
self.errors.append(CheckConfigError(str(message), domain, config))
|
|
|
|
return self
|
|
|
|
|
|
|
|
@property
|
|
|
|
def error_str(self) -> str:
|
|
|
|
"""Return errors as a string."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return "\n".join([err.message for err in self.errors])
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
async def async_check_ha_config_file(hass: HomeAssistant) -> HomeAssistantConfig:
|
2019-07-10 18:56:50 +00:00
|
|
|
"""Load and check if Home Assistant configuration file is valid.
|
|
|
|
|
|
|
|
This method is a coroutine.
|
|
|
|
"""
|
|
|
|
config_dir = hass.config.config_dir
|
|
|
|
result = HomeAssistantConfig()
|
|
|
|
|
|
|
|
def _pack_error(package, component, config, message):
|
|
|
|
"""Handle errors from packages: _log_pkg_error."""
|
|
|
|
message = "Package {} setup failed. Component {} {}".format(
|
2019-07-31 19:25:30 +00:00
|
|
|
package, component, message
|
|
|
|
)
|
|
|
|
domain = "homeassistant.packages.{}.{}".format(package, component)
|
2019-07-10 18:56:50 +00:00
|
|
|
pack_config = core_config[CONF_PACKAGES].get(package, config)
|
|
|
|
result.add_error(message, domain, pack_config)
|
|
|
|
|
|
|
|
def _comp_error(ex, domain, config):
|
|
|
|
"""Handle errors from components: async_log_exception."""
|
2019-07-31 19:25:30 +00:00
|
|
|
result.add_error(_format_config_error(ex, domain, config), domain, config)
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
# Load configuration.yaml
|
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
config_path = await hass.async_add_executor_job(find_config_file, config_dir)
|
2019-07-10 18:56:50 +00:00
|
|
|
if not config_path:
|
|
|
|
return result.add_error("File configuration.yaml not found.")
|
2019-07-31 19:25:30 +00:00
|
|
|
config = await hass.async_add_executor_job(load_yaml_config_file, config_path)
|
2019-07-10 18:56:50 +00:00
|
|
|
except FileNotFoundError:
|
|
|
|
return result.add_error("File not found: {}".format(config_path))
|
|
|
|
except HomeAssistantError as err:
|
2019-07-31 19:25:30 +00:00
|
|
|
return result.add_error("Error loading {}: {}".format(config_path, err))
|
2019-07-10 18:56:50 +00:00
|
|
|
finally:
|
|
|
|
yaml_loader.clear_secret_cache()
|
|
|
|
|
|
|
|
# Extract and validate core [homeassistant] config
|
|
|
|
try:
|
|
|
|
core_config = config.pop(CONF_CORE, {})
|
|
|
|
core_config = CORE_CONFIG_SCHEMA(core_config)
|
|
|
|
result[CONF_CORE] = core_config
|
|
|
|
except vol.Invalid as err:
|
|
|
|
result.add_error(err, CONF_CORE, core_config)
|
|
|
|
core_config = {}
|
|
|
|
|
|
|
|
# Merge packages
|
|
|
|
await merge_packages_config(
|
2019-07-31 19:25:30 +00:00
|
|
|
hass, config, core_config.get(CONF_PACKAGES, {}), _pack_error
|
|
|
|
)
|
2019-07-10 18:56:50 +00:00
|
|
|
core_config.pop(CONF_PACKAGES, None)
|
|
|
|
|
|
|
|
# Filter out repeating config sections
|
2019-07-31 19:25:30 +00:00
|
|
|
components = set(key.split(" ")[0] for key in config.keys())
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
# Process and validate config
|
|
|
|
for domain in components:
|
|
|
|
try:
|
|
|
|
integration = await loader.async_get_integration(hass, domain)
|
|
|
|
except loader.IntegrationNotFound:
|
|
|
|
result.add_error("Integration not found: {}".format(domain))
|
|
|
|
continue
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
if (
|
|
|
|
not hass.config.skip_pip
|
|
|
|
and integration.requirements
|
|
|
|
and not await requirements.async_process_requirements(
|
|
|
|
hass, integration.domain, integration.requirements
|
|
|
|
)
|
|
|
|
):
|
|
|
|
result.add_error(
|
|
|
|
"Unable to install all requirements: {}".format(
|
|
|
|
", ".join(integration.requirements)
|
|
|
|
)
|
|
|
|
)
|
2019-07-10 18:56:50 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
|
|
|
component = integration.get_component()
|
|
|
|
except ImportError:
|
|
|
|
result.add_error("Component not found: {}".format(domain))
|
|
|
|
continue
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
config_schema = getattr(component, "CONFIG_SCHEMA", None)
|
2019-07-21 16:59:02 +00:00
|
|
|
if config_schema is not None:
|
2019-07-10 18:56:50 +00:00
|
|
|
try:
|
2019-07-21 16:59:02 +00:00
|
|
|
config = config_schema(config)
|
2019-07-10 18:56:50 +00:00
|
|
|
result[domain] = config[domain]
|
|
|
|
except vol.Invalid as ex:
|
|
|
|
_comp_error(ex, domain, config)
|
|
|
|
continue
|
|
|
|
|
|
|
|
component_platform_schema = getattr(
|
2019-07-31 19:25:30 +00:00
|
|
|
component,
|
|
|
|
"PLATFORM_SCHEMA_BASE",
|
|
|
|
getattr(component, "PLATFORM_SCHEMA", None),
|
|
|
|
)
|
2019-07-10 18:56:50 +00:00
|
|
|
|
|
|
|
if component_platform_schema is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
platforms = []
|
|
|
|
for p_name, p_config in config_per_platform(config, domain):
|
|
|
|
# Validate component specific platform schema
|
|
|
|
try:
|
2019-07-21 16:59:02 +00:00
|
|
|
p_validated = component_platform_schema(p_config)
|
2019-07-10 18:56:50 +00:00
|
|
|
except vol.Invalid as ex:
|
|
|
|
_comp_error(ex, domain, config)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Not all platform components follow same pattern for platforms
|
|
|
|
# So if p_name is None we are not going to validate platform
|
|
|
|
# (the automation component is one of them)
|
|
|
|
if p_name is None:
|
|
|
|
platforms.append(p_validated)
|
|
|
|
continue
|
|
|
|
|
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
p_integration = await loader.async_get_integration(hass, p_name)
|
2019-07-10 18:56:50 +00:00
|
|
|
except loader.IntegrationNotFound:
|
|
|
|
result.add_error(
|
|
|
|
"Integration {} not found when trying to verify its {} "
|
2019-07-31 19:25:30 +00:00
|
|
|
"platform.".format(p_name, domain)
|
|
|
|
)
|
2019-07-10 18:56:50 +00:00
|
|
|
continue
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
if (
|
|
|
|
not hass.config.skip_pip
|
|
|
|
and p_integration.requirements
|
|
|
|
and not await requirements.async_process_requirements(
|
|
|
|
hass, p_integration.domain, p_integration.requirements
|
|
|
|
)
|
|
|
|
):
|
2019-07-31 16:09:00 +00:00
|
|
|
result.add_error(
|
|
|
|
"Unable to install all requirements: {}".format(
|
2019-07-31 19:25:30 +00:00
|
|
|
", ".join(integration.requirements)
|
|
|
|
)
|
|
|
|
)
|
2019-07-31 16:09:00 +00:00
|
|
|
continue
|
|
|
|
|
2019-07-10 18:56:50 +00:00
|
|
|
try:
|
|
|
|
platform = p_integration.get_platform(domain)
|
|
|
|
except ImportError:
|
2019-07-31 19:25:30 +00:00
|
|
|
result.add_error("Platform not found: {}.{}".format(domain, p_name))
|
2019-07-10 18:56:50 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# Validate platform specific schema
|
2019-07-31 19:25:30 +00:00
|
|
|
platform_schema = getattr(platform, "PLATFORM_SCHEMA", None)
|
2019-07-21 16:59:02 +00:00
|
|
|
if platform_schema is not None:
|
2019-07-10 18:56:50 +00:00
|
|
|
try:
|
2019-07-21 16:59:02 +00:00
|
|
|
p_validated = platform_schema(p_validated)
|
2019-07-10 18:56:50 +00:00
|
|
|
except vol.Invalid as ex:
|
2019-07-31 19:25:30 +00:00
|
|
|
_comp_error(ex, "{}.{}".format(domain, p_name), p_validated)
|
2019-07-10 18:56:50 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
platforms.append(p_validated)
|
|
|
|
|
|
|
|
# Remove config for current component and add validated config back in.
|
|
|
|
for filter_comp in extract_domain_configs(config, domain):
|
|
|
|
del config[filter_comp]
|
|
|
|
result[domain] = platforms
|
|
|
|
|
|
|
|
return result
|