2018-03-09 03:34:24 +00:00
|
|
|
"""Script to check the configuration file."""
|
2018-02-25 11:38:46 +00:00
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
import argparse
|
2016-09-21 04:26:40 +00:00
|
|
|
import logging
|
2016-08-23 04:42:05 +00:00
|
|
|
import os
|
2018-03-09 03:34:24 +00:00
|
|
|
from collections import OrderedDict, namedtuple
|
2016-08-23 04:42:05 +00:00
|
|
|
from glob import glob
|
2018-09-11 09:21:48 +00:00
|
|
|
from typing import Dict, List, Sequence
|
2016-09-21 04:26:40 +00:00
|
|
|
from unittest.mock import patch
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
import attr
|
|
|
|
import voluptuous as vol
|
|
|
|
|
|
|
|
from homeassistant import bootstrap, core, loader
|
|
|
|
from homeassistant.config import (
|
|
|
|
get_default_config_dir, CONF_CORE, CORE_CONFIG_SCHEMA,
|
|
|
|
CONF_PACKAGES, merge_packages_config, _format_config_error,
|
2018-05-01 18:57:30 +00:00
|
|
|
find_config_file, load_yaml_config_file,
|
|
|
|
extract_domain_configs, config_per_platform)
|
2018-07-18 09:54:27 +00:00
|
|
|
from homeassistant.util import yaml
|
2016-09-21 04:26:40 +00:00
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
2016-08-23 04:42:05 +00:00
|
|
|
|
2018-12-17 22:54:07 +00:00
|
|
|
REQUIREMENTS = ('colorlog==4.0.2',)
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
# pylint: disable=protected-access
|
|
|
|
MOCKS = {
|
|
|
|
'load': ("homeassistant.util.yaml.load_yaml", yaml.load_yaml),
|
2016-08-25 05:18:32 +00:00
|
|
|
'load*': ("homeassistant.config.load_yaml", yaml.load_yaml),
|
2018-10-31 12:49:54 +00:00
|
|
|
'secrets': ("homeassistant.util.yaml.secret_yaml", yaml.secret_yaml),
|
2016-08-23 04:42:05 +00:00
|
|
|
}
|
|
|
|
SILENCE = (
|
2018-03-09 03:34:24 +00:00
|
|
|
'homeassistant.scripts.check_config.yaml.clear_secret_cache',
|
2016-08-23 04:42:05 +00:00
|
|
|
)
|
2018-03-09 03:34:24 +00:00
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
PATCHES = {}
|
|
|
|
|
|
|
|
C_HEAD = 'bold'
|
|
|
|
ERROR_STR = 'General Errors'
|
|
|
|
|
|
|
|
|
|
|
|
def color(the_color, *args, reset=None):
|
|
|
|
"""Color helper."""
|
|
|
|
try:
|
2019-03-07 19:07:07 +00:00
|
|
|
from colorlog.escape_codes import escape_codes, parse_colors
|
|
|
|
try:
|
|
|
|
if not args:
|
|
|
|
assert reset is None, "Cannot reset if nothing being printed"
|
|
|
|
return parse_colors(the_color)
|
|
|
|
return parse_colors(the_color) + ' '.join(args) + \
|
|
|
|
escape_codes[reset or 'reset']
|
|
|
|
except KeyError as k:
|
|
|
|
raise ValueError(
|
|
|
|
"Invalid color {} in {}".format(str(k), the_color))
|
|
|
|
except ImportError:
|
|
|
|
# We should fallback to black-and-white if colorlog is not installed
|
|
|
|
return ' '.join(args)
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
def run(script_args: List) -> int:
|
|
|
|
"""Handle ensure config commandline script."""
|
|
|
|
parser = argparse.ArgumentParser(
|
2018-04-28 12:03:09 +00:00
|
|
|
description="Check Home Assistant configuration.")
|
2016-08-23 04:42:05 +00:00
|
|
|
parser.add_argument(
|
|
|
|
'--script', choices=['check_config'])
|
|
|
|
parser.add_argument(
|
|
|
|
'-c', '--config',
|
2018-03-09 03:34:24 +00:00
|
|
|
default=get_default_config_dir(),
|
2016-08-23 04:42:05 +00:00
|
|
|
help="Directory that contains the Home Assistant configuration")
|
|
|
|
parser.add_argument(
|
2018-03-09 03:34:24 +00:00
|
|
|
'-i', '--info', nargs='?',
|
|
|
|
default=None, const='all',
|
2016-08-23 04:42:05 +00:00
|
|
|
help="Show a portion of the config")
|
|
|
|
parser.add_argument(
|
|
|
|
'-f', '--files',
|
|
|
|
action='store_true',
|
|
|
|
help="Show used configuration files")
|
|
|
|
parser.add_argument(
|
|
|
|
'-s', '--secrets',
|
|
|
|
action='store_true',
|
|
|
|
help="Show secret information")
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
args, unknown = parser.parse_known_args()
|
|
|
|
if unknown:
|
|
|
|
print(color('red', "Unknown arguments:", ', '.join(unknown)))
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
config_dir = os.path.join(os.getcwd(), args.config)
|
|
|
|
|
|
|
|
print(color('bold', "Testing configuration at", config_dir))
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
res = check(config_dir, args.secrets)
|
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
domain_info = []
|
|
|
|
if args.info:
|
|
|
|
domain_info = args.info.split(',')
|
|
|
|
|
|
|
|
if args.files:
|
|
|
|
print(color(C_HEAD, 'yaml files'), '(used /',
|
2016-09-21 04:26:40 +00:00
|
|
|
color('red', 'not used') + ')')
|
2018-03-15 11:10:54 +00:00
|
|
|
deps = os.path.join(config_dir, 'deps')
|
|
|
|
yaml_files = [f for f in glob(os.path.join(config_dir, '**/*.yaml'),
|
|
|
|
recursive=True)
|
|
|
|
if not f.startswith(deps)]
|
|
|
|
|
|
|
|
for yfn in sorted(yaml_files):
|
2016-08-23 04:42:05 +00:00
|
|
|
the_color = '' if yfn in res['yaml_files'] else 'red'
|
|
|
|
print(color(the_color, '-', yfn))
|
|
|
|
|
2017-04-24 03:41:09 +00:00
|
|
|
if res['except']:
|
2016-08-23 04:42:05 +00:00
|
|
|
print(color('bold_white', 'Failed config'))
|
|
|
|
for domain, config in res['except'].items():
|
|
|
|
domain_info.append(domain)
|
|
|
|
print(' ', color('bold_red', domain + ':'),
|
|
|
|
color('red', '', reset='red'))
|
2016-09-23 07:10:19 +00:00
|
|
|
dump_dict(config, reset='red')
|
2016-08-23 04:42:05 +00:00
|
|
|
print(color('reset'))
|
|
|
|
|
|
|
|
if domain_info:
|
|
|
|
if 'all' in domain_info:
|
|
|
|
print(color('bold_white', 'Successful config (all)'))
|
2016-08-25 05:18:32 +00:00
|
|
|
for domain, config in res['components'].items():
|
2016-08-26 23:33:57 +00:00
|
|
|
print(' ', color(C_HEAD, domain + ':'))
|
2016-09-23 07:10:19 +00:00
|
|
|
dump_dict(config)
|
2016-08-23 04:42:05 +00:00
|
|
|
else:
|
|
|
|
print(color('bold_white', 'Successful config (partial)'))
|
|
|
|
for domain in domain_info:
|
|
|
|
if domain == ERROR_STR:
|
|
|
|
continue
|
|
|
|
print(' ', color(C_HEAD, domain + ':'))
|
2016-09-23 07:10:19 +00:00
|
|
|
dump_dict(res['components'].get(domain, None))
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
if args.secrets:
|
|
|
|
flatsecret = {}
|
|
|
|
|
|
|
|
for sfn, sdict in res['secret_cache'].items():
|
|
|
|
sss = []
|
2018-01-30 22:44:05 +00:00
|
|
|
for skey in sdict:
|
2016-08-23 04:42:05 +00:00
|
|
|
if skey in flatsecret:
|
|
|
|
_LOGGER.error('Duplicated secrets in files %s and %s',
|
|
|
|
flatsecret[skey], sfn)
|
|
|
|
flatsecret[skey] = sfn
|
|
|
|
sss.append(color('green', skey) if skey in res['secrets']
|
|
|
|
else skey)
|
|
|
|
print(color(C_HEAD, 'Secrets from', sfn + ':'), ', '.join(sss))
|
|
|
|
|
|
|
|
print(color(C_HEAD, 'Used Secrets:'))
|
|
|
|
for skey, sval in res['secrets'].items():
|
2018-03-11 10:51:03 +00:00
|
|
|
if sval is None:
|
|
|
|
print(' -', skey + ':', color('red', "not found"))
|
|
|
|
continue
|
2016-08-23 04:42:05 +00:00
|
|
|
print(' -', skey + ':', sval, color('cyan', '[from:', flatsecret
|
|
|
|
.get(skey, 'keyring') + ']'))
|
|
|
|
|
2017-01-21 07:39:50 +00:00
|
|
|
return len(res['except'])
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
|
2018-03-09 03:34:24 +00:00
|
|
|
def check(config_dir, secrets=False):
|
2016-08-23 04:42:05 +00:00
|
|
|
"""Perform a check by mocking hass load functions."""
|
2018-03-09 03:34:24 +00:00
|
|
|
logging.getLogger('homeassistant.loader').setLevel(logging.CRITICAL)
|
2016-08-23 04:42:05 +00:00
|
|
|
res = {
|
2016-09-23 07:10:19 +00:00
|
|
|
'yaml_files': OrderedDict(), # yaml_files loaded
|
|
|
|
'secrets': OrderedDict(), # secret cache and secrets loaded
|
|
|
|
'except': OrderedDict(), # exceptions raised (with config)
|
2018-03-09 03:34:24 +00:00
|
|
|
'components': None, # successful components
|
|
|
|
'secret_cache': None,
|
2016-08-23 04:42:05 +00:00
|
|
|
}
|
|
|
|
|
2018-07-26 06:55:42 +00:00
|
|
|
# pylint: disable=possibly-unused-variable
|
2016-10-30 21:18:53 +00:00
|
|
|
def mock_load(filename):
|
2018-03-09 03:34:24 +00:00
|
|
|
"""Mock hass.util.load_yaml to save config file names."""
|
2016-08-23 04:42:05 +00:00
|
|
|
res['yaml_files'][filename] = True
|
|
|
|
return MOCKS['load'][1](filename)
|
|
|
|
|
2018-07-26 06:55:42 +00:00
|
|
|
# pylint: disable=possibly-unused-variable
|
2016-10-30 21:18:53 +00:00
|
|
|
def mock_secrets(ldr, node):
|
2016-08-23 04:42:05 +00:00
|
|
|
"""Mock _get_secrets."""
|
|
|
|
try:
|
|
|
|
val = MOCKS['secrets'][1](ldr, node)
|
|
|
|
except HomeAssistantError:
|
|
|
|
val = None
|
|
|
|
res['secrets'][node.value] = val
|
|
|
|
return val
|
|
|
|
|
|
|
|
# Patches to skip functions
|
|
|
|
for sil in SILENCE:
|
2018-03-09 03:34:24 +00:00
|
|
|
PATCHES[sil] = patch(sil)
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
# Patches with local mock functions
|
|
|
|
for key, val in MOCKS.items():
|
2018-03-09 03:34:24 +00:00
|
|
|
if not secrets and key == 'secrets':
|
|
|
|
continue
|
2016-08-25 05:18:32 +00:00
|
|
|
# The * in the key is removed to find the mock_function (side_effect)
|
|
|
|
# This allows us to use one side_effect to patch multiple locations
|
|
|
|
mock_function = locals()['mock_' + key.replace('*', '')]
|
2016-08-23 04:42:05 +00:00
|
|
|
PATCHES[key] = patch(val[0], side_effect=mock_function)
|
|
|
|
|
|
|
|
# Start all patches
|
|
|
|
for pat in PATCHES.values():
|
|
|
|
pat.start()
|
2018-03-09 03:34:24 +00:00
|
|
|
|
|
|
|
if secrets:
|
|
|
|
# Ensure !secrets point to the patched function
|
2018-10-31 12:49:54 +00:00
|
|
|
yaml.yaml.SafeLoader.add_constructor('!secret', yaml.secret_yaml)
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
try:
|
2018-05-01 18:57:30 +00:00
|
|
|
hass = core.HomeAssistant()
|
|
|
|
hass.config.config_dir = config_dir
|
2018-03-09 03:34:24 +00:00
|
|
|
|
2018-05-01 18:57:30 +00:00
|
|
|
res['components'] = check_ha_config_file(hass)
|
2018-03-09 03:34:24 +00:00
|
|
|
res['secret_cache'] = OrderedDict(yaml.__SECRET_CACHE)
|
|
|
|
|
|
|
|
for err in res['components'].errors:
|
|
|
|
domain = err.domain or ERROR_STR
|
|
|
|
res['except'].setdefault(domain, []).append(err.message)
|
|
|
|
if err.config:
|
|
|
|
res['except'].setdefault(domain, []).append(err.config)
|
|
|
|
|
2016-09-08 20:20:38 +00:00
|
|
|
except Exception as err: # pylint: disable=broad-except
|
2018-05-01 18:57:30 +00:00
|
|
|
_LOGGER.exception("BURB")
|
2016-09-08 20:20:38 +00:00
|
|
|
print(color('red', 'Fatal error while loading config:'), str(err))
|
2018-03-09 03:34:24 +00:00
|
|
|
res['except'].setdefault(ERROR_STR, []).append(str(err))
|
2016-08-23 04:42:05 +00:00
|
|
|
finally:
|
|
|
|
# Stop all patches
|
|
|
|
for pat in PATCHES.values():
|
|
|
|
pat.stop()
|
2018-03-09 03:34:24 +00:00
|
|
|
if secrets:
|
|
|
|
# Ensure !secrets point to the original function
|
2018-10-31 12:49:54 +00:00
|
|
|
yaml.yaml.SafeLoader.add_constructor('!secret', yaml.secret_yaml)
|
2016-09-08 20:20:38 +00:00
|
|
|
bootstrap.clear_secret_cache()
|
|
|
|
|
|
|
|
return res
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
|
2017-01-27 05:42:14 +00:00
|
|
|
def line_info(obj, **kwargs):
|
|
|
|
"""Display line config source."""
|
|
|
|
if hasattr(obj, '__config_file__'):
|
|
|
|
return color('cyan', "[source {}:{}]"
|
|
|
|
.format(obj.__config_file__, obj.__line__ or '?'),
|
|
|
|
**kwargs)
|
|
|
|
return '?'
|
|
|
|
|
|
|
|
|
2016-09-23 07:10:19 +00:00
|
|
|
def dump_dict(layer, indent_count=3, listi=False, **kwargs):
|
2016-08-23 04:42:05 +00:00
|
|
|
"""Display a dict.
|
|
|
|
|
|
|
|
A friendly version of print yaml.yaml.dump(config).
|
|
|
|
"""
|
2016-09-23 07:10:19 +00:00
|
|
|
def sort_dict_key(val):
|
|
|
|
"""Return the dict key for sorting."""
|
2018-03-30 20:50:08 +00:00
|
|
|
key = str(val[0]).lower()
|
2017-01-27 05:42:14 +00:00
|
|
|
return '0' if key == 'platform' else key
|
2016-09-23 07:10:19 +00:00
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
indent_str = indent_count * ' '
|
|
|
|
if listi or isinstance(layer, list):
|
2016-09-21 04:26:40 +00:00
|
|
|
indent_str = indent_str[:-1] + '-'
|
2016-08-23 04:42:05 +00:00
|
|
|
if isinstance(layer, Dict):
|
2016-09-23 07:10:19 +00:00
|
|
|
for key, value in sorted(layer.items(), key=sort_dict_key):
|
2017-07-06 03:02:16 +00:00
|
|
|
if isinstance(value, (dict, list)):
|
2018-03-30 20:50:08 +00:00
|
|
|
print(indent_str, str(key) + ':', line_info(value, **kwargs))
|
2016-09-21 04:26:40 +00:00
|
|
|
dump_dict(value, indent_count + 2)
|
2016-08-23 04:42:05 +00:00
|
|
|
else:
|
2018-03-30 20:50:08 +00:00
|
|
|
print(indent_str, str(key) + ':', value)
|
2016-08-23 04:42:05 +00:00
|
|
|
indent_str = indent_count * ' '
|
|
|
|
if isinstance(layer, Sequence):
|
|
|
|
for i in layer:
|
|
|
|
if isinstance(i, dict):
|
2016-09-24 21:45:01 +00:00
|
|
|
dump_dict(i, indent_count + 2, True)
|
2016-08-23 04:42:05 +00:00
|
|
|
else:
|
2016-09-23 07:10:19 +00:00
|
|
|
print(' ', indent_str, i)
|
2018-03-09 03:34:24 +00:00
|
|
|
|
|
|
|
|
2018-06-25 17:05:07 +00:00
|
|
|
CheckConfigError = namedtuple(
|
2018-03-09 03:34:24 +00:00
|
|
|
'CheckConfigError', "message domain config")
|
|
|
|
|
|
|
|
|
|
|
|
@attr.s
|
|
|
|
class HomeAssistantConfig(OrderedDict):
|
|
|
|
"""Configuration result with errors attribute."""
|
|
|
|
|
|
|
|
errors = attr.ib(default=attr.Factory(list))
|
|
|
|
|
|
|
|
def add_error(self, message, domain=None, config=None):
|
|
|
|
"""Add a single error."""
|
|
|
|
self.errors.append(CheckConfigError(str(message), domain, config))
|
|
|
|
return self
|
|
|
|
|
|
|
|
|
2018-05-01 18:57:30 +00:00
|
|
|
def check_ha_config_file(hass):
|
2018-03-09 03:34:24 +00:00
|
|
|
"""Check if Home Assistant configuration file is valid."""
|
2018-05-01 18:57:30 +00:00
|
|
|
config_dir = hass.config.config_dir
|
2018-03-09 03:34:24 +00:00
|
|
|
result = HomeAssistantConfig()
|
|
|
|
|
|
|
|
def _pack_error(package, component, config, message):
|
|
|
|
"""Handle errors from packages: _log_pkg_error."""
|
|
|
|
message = "Package {} setup failed. Component {} {}".format(
|
|
|
|
package, component, message)
|
|
|
|
domain = 'homeassistant.packages.{}.{}'.format(package, component)
|
|
|
|
pack_config = core_config[CONF_PACKAGES].get(package, config)
|
|
|
|
result.add_error(message, domain, pack_config)
|
|
|
|
|
|
|
|
def _comp_error(ex, domain, config):
|
|
|
|
"""Handle errors from components: async_log_exception."""
|
|
|
|
result.add_error(
|
|
|
|
_format_config_error(ex, domain, config), domain, config)
|
|
|
|
|
|
|
|
# Load configuration.yaml
|
|
|
|
try:
|
|
|
|
config_path = find_config_file(config_dir)
|
|
|
|
if not config_path:
|
|
|
|
return result.add_error("File configuration.yaml not found.")
|
|
|
|
config = load_yaml_config_file(config_path)
|
|
|
|
except HomeAssistantError as err:
|
2018-03-11 10:51:03 +00:00
|
|
|
return result.add_error(
|
|
|
|
"Error loading {}: {}".format(config_path, err))
|
2018-03-09 03:34:24 +00:00
|
|
|
finally:
|
|
|
|
yaml.clear_secret_cache()
|
|
|
|
|
|
|
|
# Extract and validate core [homeassistant] config
|
|
|
|
try:
|
|
|
|
core_config = config.pop(CONF_CORE, {})
|
|
|
|
core_config = CORE_CONFIG_SCHEMA(core_config)
|
|
|
|
result[CONF_CORE] = core_config
|
|
|
|
except vol.Invalid as err:
|
|
|
|
result.add_error(err, CONF_CORE, core_config)
|
|
|
|
core_config = {}
|
|
|
|
|
|
|
|
# Merge packages
|
|
|
|
merge_packages_config(
|
2018-05-01 18:57:30 +00:00
|
|
|
hass, config, core_config.get(CONF_PACKAGES, {}), _pack_error)
|
2018-08-17 03:28:00 +00:00
|
|
|
core_config.pop(CONF_PACKAGES, None)
|
2018-03-09 03:34:24 +00:00
|
|
|
|
|
|
|
# Filter out repeating config sections
|
|
|
|
components = set(key.split(' ')[0] for key in config.keys())
|
|
|
|
|
|
|
|
# Process and validate config
|
|
|
|
for domain in components:
|
2018-05-01 18:57:30 +00:00
|
|
|
component = loader.get_component(hass, domain)
|
2018-03-09 03:34:24 +00:00
|
|
|
if not component:
|
|
|
|
result.add_error("Component not found: {}".format(domain))
|
|
|
|
continue
|
|
|
|
|
|
|
|
if hasattr(component, 'CONFIG_SCHEMA'):
|
|
|
|
try:
|
|
|
|
config = component.CONFIG_SCHEMA(config)
|
|
|
|
result[domain] = config[domain]
|
|
|
|
except vol.Invalid as ex:
|
|
|
|
_comp_error(ex, domain, config)
|
|
|
|
continue
|
|
|
|
|
2019-02-01 16:14:02 +00:00
|
|
|
if (not hasattr(component, 'PLATFORM_SCHEMA') and
|
|
|
|
not hasattr(component, 'PLATFORM_SCHEMA_BASE')):
|
2018-03-09 03:34:24 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
platforms = []
|
|
|
|
for p_name, p_config in config_per_platform(config, domain):
|
|
|
|
# Validate component specific platform schema
|
|
|
|
try:
|
2019-02-01 16:14:02 +00:00
|
|
|
if hasattr(component, 'PLATFORM_SCHEMA_BASE'):
|
|
|
|
p_validated = \
|
|
|
|
component.PLATFORM_SCHEMA_BASE( # type: ignore
|
|
|
|
p_config)
|
|
|
|
else:
|
|
|
|
p_validated = component.PLATFORM_SCHEMA( # type: ignore
|
|
|
|
p_config)
|
2018-03-09 03:34:24 +00:00
|
|
|
except vol.Invalid as ex:
|
|
|
|
_comp_error(ex, domain, config)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Not all platform components follow same pattern for platforms
|
|
|
|
# So if p_name is None we are not going to validate platform
|
|
|
|
# (the automation component is one of them)
|
|
|
|
if p_name is None:
|
|
|
|
platforms.append(p_validated)
|
|
|
|
continue
|
|
|
|
|
2018-05-01 18:57:30 +00:00
|
|
|
platform = loader.get_platform(hass, domain, p_name)
|
2018-03-09 03:34:24 +00:00
|
|
|
|
|
|
|
if platform is None:
|
|
|
|
result.add_error(
|
|
|
|
"Platform not found: {}.{}".format(domain, p_name))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Validate platform specific schema
|
|
|
|
if hasattr(platform, 'PLATFORM_SCHEMA'):
|
|
|
|
try:
|
|
|
|
p_validated = platform.PLATFORM_SCHEMA(p_validated)
|
|
|
|
except vol.Invalid as ex:
|
|
|
|
_comp_error(
|
|
|
|
ex, '{}.{}'.format(domain, p_name), p_validated)
|
|
|
|
continue
|
|
|
|
|
|
|
|
platforms.append(p_validated)
|
|
|
|
|
|
|
|
# Remove config for current component and add validated config back in.
|
|
|
|
for filter_comp in extract_domain_configs(config, domain):
|
|
|
|
del config[filter_comp]
|
|
|
|
result[domain] = platforms
|
|
|
|
|
|
|
|
return result
|