2019-05-09 16:07:56 +00:00
|
|
|
"""Custom loader."""
|
2016-01-24 06:37:15 +00:00
|
|
|
import logging
|
|
|
|
import os
|
2016-08-20 19:39:56 +00:00
|
|
|
import sys
|
2016-10-12 10:05:41 +00:00
|
|
|
import fnmatch
|
2016-04-09 03:53:27 +00:00
|
|
|
from collections import OrderedDict
|
2018-07-23 08:24:39 +00:00
|
|
|
from typing import Union, List, Dict, Iterator, overload, TypeVar
|
2016-01-24 06:37:15 +00:00
|
|
|
|
|
|
|
import yaml
|
2019-05-09 16:07:56 +00:00
|
|
|
|
2016-06-25 07:10:03 +00:00
|
|
|
try:
|
|
|
|
import keyring
|
|
|
|
except ImportError:
|
|
|
|
keyring = None
|
2016-01-24 06:37:15 +00:00
|
|
|
|
2017-07-24 15:59:10 +00:00
|
|
|
try:
|
2018-06-25 17:05:07 +00:00
|
|
|
import credstash
|
2017-07-24 15:59:10 +00:00
|
|
|
except ImportError:
|
|
|
|
credstash = None
|
|
|
|
|
2016-01-24 06:37:15 +00:00
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
|
|
|
|
2019-05-09 16:07:56 +00:00
|
|
|
from .const import _SECRET_NAMESPACE, SECRET_YAML
|
|
|
|
from .objects import NodeListClass, NodeStrClass
|
|
|
|
|
|
|
|
|
2019-07-25 06:08:20 +00:00
|
|
|
# mypy: allow-untyped-calls, no-warn-return-any
|
|
|
|
|
2016-01-24 06:37:15 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2018-07-23 08:24:39 +00:00
|
|
|
__SECRET_CACHE = {} # type: Dict[str, JSON_TYPE]
|
|
|
|
|
2018-07-26 06:55:42 +00:00
|
|
|
JSON_TYPE = Union[List, Dict, str] # pylint: disable=invalid-name
|
|
|
|
DICT_T = TypeVar('DICT_T', bound=Dict) # pylint: disable=invalid-name
|
2016-01-24 06:37:15 +00:00
|
|
|
|
|
|
|
|
2019-05-09 16:07:56 +00:00
|
|
|
def clear_secret_cache() -> None:
|
|
|
|
"""Clear the secret cache.
|
2017-01-14 05:13:17 +00:00
|
|
|
|
2019-05-09 16:07:56 +00:00
|
|
|
Async friendly.
|
|
|
|
"""
|
|
|
|
__SECRET_CACHE.clear()
|
2017-01-14 05:13:17 +00:00
|
|
|
|
2017-05-02 03:09:49 +00:00
|
|
|
|
2016-04-09 03:53:27 +00:00
|
|
|
# pylint: disable=too-many-ancestors
|
|
|
|
class SafeLineLoader(yaml.SafeLoader):
|
|
|
|
"""Loader class that keeps track of line numbers."""
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def compose_node(self, parent: yaml.nodes.Node,
|
|
|
|
index: int) -> yaml.nodes.Node:
|
2016-04-09 03:53:27 +00:00
|
|
|
"""Annotate a node with the first line it was seen."""
|
2016-07-23 18:07:08 +00:00
|
|
|
last_line = self.line # type: int
|
|
|
|
node = super(SafeLineLoader,
|
|
|
|
self).compose_node(parent, index) # type: yaml.nodes.Node
|
2018-07-13 10:24:51 +00:00
|
|
|
node.__line__ = last_line + 1 # type: ignore
|
2016-04-09 03:53:27 +00:00
|
|
|
return node
|
|
|
|
|
|
|
|
|
2019-05-09 16:07:56 +00:00
|
|
|
def load_yaml(fname: str) -> JSON_TYPE:
|
|
|
|
"""Load a YAML file."""
|
|
|
|
try:
|
|
|
|
with open(fname, encoding='utf-8') as conf_file:
|
|
|
|
# If configuration file is empty YAML returns None
|
|
|
|
# We convert that to an empty dict
|
|
|
|
return yaml.load(conf_file, Loader=SafeLineLoader) or OrderedDict()
|
|
|
|
except yaml.YAMLError as exc:
|
|
|
|
_LOGGER.error(str(exc))
|
|
|
|
raise HomeAssistantError(exc)
|
|
|
|
except UnicodeDecodeError as exc:
|
|
|
|
_LOGGER.error("Unable to read file %s: %s", fname, exc)
|
|
|
|
raise HomeAssistantError(exc)
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
# pylint: disable=pointless-statement
|
|
|
|
@overload
|
|
|
|
def _add_reference(obj: Union[list, NodeListClass],
|
|
|
|
loader: yaml.SafeLoader,
|
|
|
|
node: yaml.nodes.Node) -> NodeListClass: ...
|
|
|
|
|
|
|
|
|
|
|
|
@overload # noqa: F811
|
|
|
|
def _add_reference(obj: Union[str, NodeStrClass],
|
|
|
|
loader: yaml.SafeLoader,
|
|
|
|
node: yaml.nodes.Node) -> NodeStrClass: ...
|
|
|
|
|
|
|
|
|
|
|
|
@overload # noqa: F811
|
|
|
|
def _add_reference(obj: DICT_T,
|
|
|
|
loader: yaml.SafeLoader,
|
|
|
|
node: yaml.nodes.Node) -> DICT_T: ...
|
|
|
|
# pylint: enable=pointless-statement
|
|
|
|
|
|
|
|
|
|
|
|
def _add_reference(obj, loader: SafeLineLoader, # type: ignore # noqa: F811
|
|
|
|
node: yaml.nodes.Node):
|
|
|
|
"""Add file reference information to an object."""
|
|
|
|
if isinstance(obj, list):
|
|
|
|
obj = NodeListClass(obj)
|
|
|
|
if isinstance(obj, str):
|
|
|
|
obj = NodeStrClass(obj)
|
|
|
|
setattr(obj, '__config_file__', loader.name)
|
|
|
|
setattr(obj, '__line__', node.start_mark.line)
|
|
|
|
return obj
|
|
|
|
|
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _include_yaml(loader: SafeLineLoader,
|
2018-07-23 08:24:39 +00:00
|
|
|
node: yaml.nodes.Node) -> JSON_TYPE:
|
2016-03-07 22:20:48 +00:00
|
|
|
"""Load another YAML file and embeds it using the !include tag.
|
2016-01-24 06:37:15 +00:00
|
|
|
|
|
|
|
Example:
|
|
|
|
device_tracker: !include device_tracker.yaml
|
|
|
|
"""
|
|
|
|
fname = os.path.join(os.path.dirname(loader.name), node.value)
|
2017-01-14 05:13:17 +00:00
|
|
|
return _add_reference(load_yaml(fname), loader, node)
|
2016-01-24 06:37:15 +00:00
|
|
|
|
|
|
|
|
2016-10-23 14:47:06 +00:00
|
|
|
def _is_file_valid(name: str) -> bool:
|
|
|
|
"""Decide if a file is valid."""
|
|
|
|
return not name.startswith('.')
|
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def _find_files(directory: str, pattern: str) -> Iterator[str]:
|
2016-10-12 10:05:41 +00:00
|
|
|
"""Recursively load files in a directory."""
|
2016-10-23 14:47:06 +00:00
|
|
|
for root, dirs, files in os.walk(directory, topdown=True):
|
|
|
|
dirs[:] = [d for d in dirs if _is_file_valid(d)]
|
2019-03-21 06:10:09 +00:00
|
|
|
for basename in sorted(files):
|
2016-10-23 14:47:06 +00:00
|
|
|
if _is_file_valid(basename) and fnmatch.fnmatch(basename, pattern):
|
2016-10-12 10:05:41 +00:00
|
|
|
filename = os.path.join(root, basename)
|
|
|
|
yield filename
|
|
|
|
|
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _include_dir_named_yaml(loader: SafeLineLoader,
|
2016-10-12 10:05:41 +00:00
|
|
|
node: yaml.nodes.Node) -> OrderedDict:
|
2016-07-01 19:39:30 +00:00
|
|
|
"""Load multiple files from directory as a dictionary."""
|
2016-07-23 18:07:08 +00:00
|
|
|
mapping = OrderedDict() # type: OrderedDict
|
2016-10-12 10:05:41 +00:00
|
|
|
loc = os.path.join(os.path.dirname(loader.name), node.value)
|
|
|
|
for fname in _find_files(loc, '*.yaml'):
|
2016-05-14 04:16:04 +00:00
|
|
|
filename = os.path.splitext(os.path.basename(fname))[0]
|
2019-04-14 04:24:06 +00:00
|
|
|
if os.path.basename(fname) == SECRET_YAML:
|
|
|
|
continue
|
2016-05-14 04:16:04 +00:00
|
|
|
mapping[filename] = load_yaml(fname)
|
2017-01-14 05:13:17 +00:00
|
|
|
return _add_reference(mapping, loader, node)
|
2016-05-14 04:16:04 +00:00
|
|
|
|
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _include_dir_merge_named_yaml(loader: SafeLineLoader,
|
2016-10-12 10:05:41 +00:00
|
|
|
node: yaml.nodes.Node) -> OrderedDict:
|
2016-07-01 19:39:30 +00:00
|
|
|
"""Load multiple files from directory as a merged dictionary."""
|
2016-07-23 18:07:08 +00:00
|
|
|
mapping = OrderedDict() # type: OrderedDict
|
2016-10-12 10:05:41 +00:00
|
|
|
loc = os.path.join(os.path.dirname(loader.name), node.value)
|
|
|
|
for fname in _find_files(loc, '*.yaml'):
|
2017-10-05 16:10:29 +00:00
|
|
|
if os.path.basename(fname) == SECRET_YAML:
|
2016-07-07 05:17:02 +00:00
|
|
|
continue
|
2016-05-17 22:47:44 +00:00
|
|
|
loaded_yaml = load_yaml(fname)
|
|
|
|
if isinstance(loaded_yaml, dict):
|
|
|
|
mapping.update(loaded_yaml)
|
2017-01-14 05:13:17 +00:00
|
|
|
return _add_reference(mapping, loader, node)
|
2016-05-17 22:47:44 +00:00
|
|
|
|
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _include_dir_list_yaml(loader: SafeLineLoader,
|
2018-07-23 08:24:39 +00:00
|
|
|
node: yaml.nodes.Node) -> List[JSON_TYPE]:
|
2016-07-01 19:39:30 +00:00
|
|
|
"""Load multiple files from directory as a list."""
|
2016-10-12 10:05:41 +00:00
|
|
|
loc = os.path.join(os.path.dirname(loader.name), node.value)
|
|
|
|
return [load_yaml(f) for f in _find_files(loc, '*.yaml')
|
2017-10-05 16:10:29 +00:00
|
|
|
if os.path.basename(f) != SECRET_YAML]
|
2016-05-14 04:16:04 +00:00
|
|
|
|
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _include_dir_merge_list_yaml(loader: SafeLineLoader,
|
2018-07-23 08:24:39 +00:00
|
|
|
node: yaml.nodes.Node) -> JSON_TYPE:
|
2016-07-01 19:39:30 +00:00
|
|
|
"""Load multiple files from directory as a merged list."""
|
2016-10-12 10:05:41 +00:00
|
|
|
loc = os.path.join(os.path.dirname(loader.name),
|
|
|
|
node.value) # type: str
|
2018-07-23 08:24:39 +00:00
|
|
|
merged_list = [] # type: List[JSON_TYPE]
|
2016-10-12 10:05:41 +00:00
|
|
|
for fname in _find_files(loc, '*.yaml'):
|
2017-10-05 16:10:29 +00:00
|
|
|
if os.path.basename(fname) == SECRET_YAML:
|
2016-07-07 05:17:02 +00:00
|
|
|
continue
|
2016-05-17 22:47:44 +00:00
|
|
|
loaded_yaml = load_yaml(fname)
|
|
|
|
if isinstance(loaded_yaml, list):
|
|
|
|
merged_list.extend(loaded_yaml)
|
2017-01-14 05:13:17 +00:00
|
|
|
return _add_reference(merged_list, loader, node)
|
2016-05-17 22:47:44 +00:00
|
|
|
|
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _ordered_dict(loader: SafeLineLoader,
|
|
|
|
node: yaml.nodes.MappingNode) -> OrderedDict:
|
2016-07-01 19:39:30 +00:00
|
|
|
"""Load YAML mappings into an ordered dictionary to preserve key order."""
|
2016-01-24 06:37:15 +00:00
|
|
|
loader.flatten_mapping(node)
|
2016-04-06 04:21:16 +00:00
|
|
|
nodes = loader.construct_pairs(node)
|
2016-01-24 06:37:15 +00:00
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
seen = {} # type: Dict
|
2016-10-25 18:13:32 +00:00
|
|
|
for (key, _), (child_node, _) in zip(nodes, node.value):
|
|
|
|
line = child_node.start_mark.line
|
2016-09-08 20:20:38 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
hash(key)
|
|
|
|
except TypeError:
|
|
|
|
fname = getattr(loader.stream, 'name', '')
|
|
|
|
raise yaml.MarkedYAMLError(
|
|
|
|
context="invalid key: \"{}\"".format(key),
|
2016-10-25 18:13:32 +00:00
|
|
|
context_mark=yaml.Mark(fname, 0, line, -1, None, None)
|
2016-09-08 20:20:38 +00:00
|
|
|
)
|
|
|
|
|
2016-04-09 03:53:27 +00:00
|
|
|
if key in seen:
|
|
|
|
fname = getattr(loader.stream, 'name', '')
|
2017-08-06 17:47:19 +00:00
|
|
|
_LOGGER.error(
|
|
|
|
'YAML file %s contains duplicate key "%s". '
|
|
|
|
'Check lines %d and %d.', fname, key, seen[key], line)
|
2016-04-09 03:53:27 +00:00
|
|
|
seen[key] = line
|
|
|
|
|
2017-01-14 05:13:17 +00:00
|
|
|
return _add_reference(OrderedDict(nodes), loader, node)
|
2016-10-25 18:13:32 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def _construct_seq(loader: SafeLineLoader, node: yaml.nodes.Node) -> JSON_TYPE:
|
2016-10-25 18:13:32 +00:00
|
|
|
"""Add line number and file name to Load YAML sequence."""
|
|
|
|
obj, = loader.construct_yaml_seq(node)
|
2017-01-14 05:13:17 +00:00
|
|
|
return _add_reference(obj, loader, node)
|
2016-01-24 06:37:15 +00:00
|
|
|
|
2016-05-04 01:41:14 +00:00
|
|
|
|
2016-07-23 18:07:08 +00:00
|
|
|
def _env_var_yaml(loader: SafeLineLoader,
|
2018-07-23 08:24:39 +00:00
|
|
|
node: yaml.nodes.Node) -> str:
|
2016-05-04 01:41:14 +00:00
|
|
|
"""Load environment variables and embed it into the configuration YAML."""
|
2017-07-24 16:00:01 +00:00
|
|
|
args = node.value.split()
|
|
|
|
|
|
|
|
# Check for a default value
|
|
|
|
if len(args) > 1:
|
|
|
|
return os.getenv(args[0], ' '.join(args[1:]))
|
2018-07-23 08:16:05 +00:00
|
|
|
if args[0] in os.environ:
|
2017-07-24 16:00:01 +00:00
|
|
|
return os.environ[args[0]]
|
2018-07-23 08:16:05 +00:00
|
|
|
_LOGGER.error("Environment variable %s not defined.", node.value)
|
|
|
|
raise HomeAssistantError(node.value)
|
2016-05-04 01:41:14 +00:00
|
|
|
|
|
|
|
|
2018-07-23 08:24:39 +00:00
|
|
|
def _load_secret_yaml(secret_path: str) -> JSON_TYPE:
|
2016-08-20 19:39:56 +00:00
|
|
|
"""Load the secrets yaml from path."""
|
2017-10-05 16:10:29 +00:00
|
|
|
secret_path = os.path.join(secret_path, SECRET_YAML)
|
2016-08-25 05:18:32 +00:00
|
|
|
if secret_path in __SECRET_CACHE:
|
|
|
|
return __SECRET_CACHE[secret_path]
|
|
|
|
|
|
|
|
_LOGGER.debug('Loading %s', secret_path)
|
2016-08-23 04:42:05 +00:00
|
|
|
try:
|
2016-08-25 05:18:32 +00:00
|
|
|
secrets = load_yaml(secret_path)
|
2018-07-13 10:24:51 +00:00
|
|
|
if not isinstance(secrets, dict):
|
|
|
|
raise HomeAssistantError('Secrets is not a dictionary')
|
2016-08-20 19:39:56 +00:00
|
|
|
if 'logger' in secrets:
|
|
|
|
logger = str(secrets['logger']).lower()
|
|
|
|
if logger == 'debug':
|
|
|
|
_LOGGER.setLevel(logging.DEBUG)
|
|
|
|
else:
|
|
|
|
_LOGGER.error("secrets.yaml: 'logger: debug' expected,"
|
|
|
|
" but 'logger: %s' found", logger)
|
|
|
|
del secrets['logger']
|
2016-08-23 04:42:05 +00:00
|
|
|
except FileNotFoundError:
|
2016-08-25 05:18:32 +00:00
|
|
|
secrets = {}
|
|
|
|
__SECRET_CACHE[secret_path] = secrets
|
|
|
|
return secrets
|
2016-08-20 19:39:56 +00:00
|
|
|
|
|
|
|
|
2018-10-31 12:49:54 +00:00
|
|
|
def secret_yaml(loader: SafeLineLoader,
|
|
|
|
node: yaml.nodes.Node) -> JSON_TYPE:
|
2016-06-25 07:10:03 +00:00
|
|
|
"""Load secrets and embed it into the configuration YAML."""
|
2016-08-20 19:39:56 +00:00
|
|
|
secret_path = os.path.dirname(loader.name)
|
2016-08-23 04:42:05 +00:00
|
|
|
while True:
|
2016-08-25 05:18:32 +00:00
|
|
|
secrets = _load_secret_yaml(secret_path)
|
|
|
|
|
2016-08-20 19:39:56 +00:00
|
|
|
if node.value in secrets:
|
2017-04-30 05:04:49 +00:00
|
|
|
_LOGGER.debug("Secret %s retrieved from secrets.yaml in "
|
|
|
|
"folder %s", node.value, secret_path)
|
2016-08-20 19:39:56 +00:00
|
|
|
return secrets[node.value]
|
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
if secret_path == os.path.dirname(sys.path[0]):
|
|
|
|
break # sys.path[0] set to config/deps folder by bootstrap
|
2016-08-20 19:39:56 +00:00
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
secret_path = os.path.dirname(secret_path)
|
|
|
|
if not os.path.exists(secret_path) or len(secret_path) < 5:
|
|
|
|
break # Somehow we got past the .homeassistant config folder
|
2016-07-07 05:17:02 +00:00
|
|
|
|
|
|
|
if keyring:
|
2016-08-23 04:42:05 +00:00
|
|
|
# do some keyring stuff
|
2016-06-25 07:10:03 +00:00
|
|
|
pwd = keyring.get_password(_SECRET_NAMESPACE, node.value)
|
|
|
|
if pwd:
|
2017-04-30 05:04:49 +00:00
|
|
|
_LOGGER.debug("Secret %s retrieved from keyring", node.value)
|
2016-06-25 07:10:03 +00:00
|
|
|
return pwd
|
|
|
|
|
2017-10-05 16:10:29 +00:00
|
|
|
global credstash # pylint: disable=invalid-name
|
|
|
|
|
2017-07-24 15:59:10 +00:00
|
|
|
if credstash:
|
2018-02-11 17:20:28 +00:00
|
|
|
# pylint: disable=no-member
|
2017-07-24 15:59:10 +00:00
|
|
|
try:
|
|
|
|
pwd = credstash.getSecret(node.value, table=_SECRET_NAMESPACE)
|
|
|
|
if pwd:
|
|
|
|
_LOGGER.debug("Secret %s retrieved from credstash", node.value)
|
|
|
|
return pwd
|
|
|
|
except credstash.ItemNotFound:
|
|
|
|
pass
|
2017-10-05 16:10:29 +00:00
|
|
|
except Exception: # pylint: disable=broad-except
|
|
|
|
# Catch if package installed and no config
|
|
|
|
credstash = None
|
2017-07-24 15:59:10 +00:00
|
|
|
|
2018-03-11 10:51:03 +00:00
|
|
|
raise HomeAssistantError("Secret {} not defined".format(node.value))
|
2016-06-25 07:10:03 +00:00
|
|
|
|
2016-11-19 05:47:59 +00:00
|
|
|
|
2016-01-30 23:46:08 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!include', _include_yaml)
|
|
|
|
yaml.SafeLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
|
|
|
|
_ordered_dict)
|
2016-10-25 18:13:32 +00:00
|
|
|
yaml.SafeLoader.add_constructor(
|
|
|
|
yaml.resolver.BaseResolver.DEFAULT_SEQUENCE_TAG, _construct_seq)
|
2016-05-04 01:41:14 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!env_var', _env_var_yaml)
|
2018-10-31 12:49:54 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!secret', secret_yaml)
|
2016-05-14 04:16:04 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!include_dir_list', _include_dir_list_yaml)
|
2016-05-17 22:47:44 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!include_dir_merge_list',
|
|
|
|
_include_dir_merge_list_yaml)
|
2016-05-14 04:16:04 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!include_dir_named', _include_dir_named_yaml)
|
2016-05-17 22:47:44 +00:00
|
|
|
yaml.SafeLoader.add_constructor('!include_dir_merge_named',
|
|
|
|
_include_dir_merge_named_yaml)
|