2017-04-24 03:41:09 +00:00
|
|
|
"""Template helper methods for rendering strings with Home Assistant data."""
|
2020-10-06 22:05:52 +00:00
|
|
|
from ast import literal_eval
|
2020-09-28 12:43:22 +00:00
|
|
|
import asyncio
|
2019-05-01 02:54:25 +00:00
|
|
|
import base64
|
2020-05-02 21:57:48 +00:00
|
|
|
import collections.abc
|
2020-09-11 20:07:31 +00:00
|
|
|
from datetime import datetime, timedelta
|
2020-10-26 15:01:09 +00:00
|
|
|
from functools import partial, wraps
|
2015-12-11 05:38:35 +00:00
|
|
|
import json
|
2015-12-12 03:07:03 +00:00
|
|
|
import logging
|
2018-01-21 06:35:38 +00:00
|
|
|
import math
|
2020-09-13 14:21:11 +00:00
|
|
|
from operator import attrgetter
|
2017-05-23 17:32:06 +00:00
|
|
|
import random
|
2016-09-28 04:29:55 +00:00
|
|
|
import re
|
2020-10-29 23:06:08 +00:00
|
|
|
from typing import Any, Dict, Generator, Iterable, Optional, Type, Union
|
2020-07-13 14:48:29 +00:00
|
|
|
from urllib.parse import urlencode as urllib_urlencode
|
2020-08-04 01:00:44 +00:00
|
|
|
import weakref
|
2016-02-19 05:27:50 +00:00
|
|
|
|
2015-12-12 03:07:03 +00:00
|
|
|
import jinja2
|
2019-06-22 07:32:32 +00:00
|
|
|
from jinja2 import contextfilter, contextfunction
|
2015-12-11 05:16:05 +00:00
|
|
|
from jinja2.sandbox import ImmutableSandboxedEnvironment
|
2019-07-21 16:59:02 +00:00
|
|
|
from jinja2.utils import Namespace # type: ignore
|
2020-08-16 00:53:03 +00:00
|
|
|
import voluptuous as vol
|
2016-02-19 05:27:50 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
from homeassistant.const import (
|
2019-07-31 19:25:30 +00:00
|
|
|
ATTR_ENTITY_ID,
|
|
|
|
ATTR_LATITUDE,
|
|
|
|
ATTR_LONGITUDE,
|
|
|
|
ATTR_UNIT_OF_MEASUREMENT,
|
2020-04-12 19:44:56 +00:00
|
|
|
LENGTH_METERS,
|
2019-07-31 19:25:30 +00:00
|
|
|
STATE_UNKNOWN,
|
|
|
|
)
|
|
|
|
from homeassistant.core import State, callback, split_entity_id, valid_entity_id
|
2015-12-12 03:07:03 +00:00
|
|
|
from homeassistant.exceptions import TemplateError
|
2020-08-16 00:53:03 +00:00
|
|
|
from homeassistant.helpers import config_validation as cv, location as loc_helper
|
2019-09-20 15:23:34 +00:00
|
|
|
from homeassistant.helpers.typing import HomeAssistantType, TemplateVarsType
|
2018-05-01 18:57:30 +00:00
|
|
|
from homeassistant.loader import bind_hass
|
2019-06-22 07:32:32 +00:00
|
|
|
from homeassistant.util import convert, dt as dt_util, location as loc_util
|
2018-03-11 17:01:12 +00:00
|
|
|
from homeassistant.util.async_ import run_callback_threadsafe
|
2020-09-28 12:43:22 +00:00
|
|
|
from homeassistant.util.thread import ThreadWithException
|
2015-12-12 03:07:03 +00:00
|
|
|
|
2019-09-20 15:23:34 +00:00
|
|
|
# mypy: allow-untyped-calls, allow-untyped-defs
|
2019-07-21 16:59:02 +00:00
|
|
|
# mypy: no-check-untyped-defs, no-warn-return-any
|
|
|
|
|
2015-12-12 03:07:03 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2015-12-12 18:35:15 +00:00
|
|
|
_SENTINEL = object()
|
2016-07-23 02:47:43 +00:00
|
|
|
DATE_STR_FORMAT = "%Y-%m-%d %H:%M:%S"
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
_RENDER_INFO = "template.render_info"
|
|
|
|
_ENVIRONMENT = "template.environment"
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2020-08-31 08:51:30 +00:00
|
|
|
_RE_JINJA_DELIMITERS = re.compile(r"\{%|\{\{|\{#")
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2020-08-16 00:53:03 +00:00
|
|
|
_RESERVED_NAMES = {"contextfunction", "evalcontextfunction", "environmentfunction"}
|
|
|
|
|
|
|
|
_GROUP_DOMAIN_PREFIX = "group."
|
|
|
|
|
2020-09-28 15:35:12 +00:00
|
|
|
_COLLECTABLE_STATE_ATTRIBUTES = {
|
|
|
|
"state",
|
|
|
|
"attributes",
|
|
|
|
"last_changed",
|
|
|
|
"last_updated",
|
|
|
|
"context",
|
|
|
|
"domain",
|
|
|
|
"object_id",
|
|
|
|
"name",
|
|
|
|
}
|
|
|
|
|
2020-10-19 08:18:25 +00:00
|
|
|
ALL_STATES_RATE_LIMIT = timedelta(minutes=1)
|
|
|
|
DOMAIN_STATES_RATE_LIMIT = timedelta(seconds=1)
|
2020-10-01 19:39:44 +00:00
|
|
|
|
2015-12-11 05:38:35 +00:00
|
|
|
|
2017-10-08 15:17:54 +00:00
|
|
|
@bind_hass
|
2020-04-17 18:33:58 +00:00
|
|
|
def attach(hass: HomeAssistantType, obj: Any) -> None:
|
2016-09-28 04:29:55 +00:00
|
|
|
"""Recursively attach hass to all template instances in list and dict."""
|
|
|
|
if isinstance(obj, list):
|
|
|
|
for child in obj:
|
|
|
|
attach(hass, child)
|
2020-09-10 18:41:42 +00:00
|
|
|
elif isinstance(obj, collections.abc.Mapping):
|
2020-08-21 20:42:05 +00:00
|
|
|
for child_key, child_value in obj.items():
|
|
|
|
attach(hass, child_key)
|
|
|
|
attach(hass, child_value)
|
2016-09-28 04:29:55 +00:00
|
|
|
elif isinstance(obj, Template):
|
|
|
|
obj.hass = hass
|
2015-12-11 05:38:35 +00:00
|
|
|
|
2016-05-12 05:44:44 +00:00
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def render_complex(value: Any, variables: TemplateVarsType = None) -> Any:
|
2018-01-19 06:13:14 +00:00
|
|
|
"""Recursive template creator helper function."""
|
|
|
|
if isinstance(value, list):
|
2019-07-31 19:25:30 +00:00
|
|
|
return [render_complex(item, variables) for item in value]
|
2020-09-10 18:41:42 +00:00
|
|
|
if isinstance(value, collections.abc.Mapping):
|
2020-08-21 20:42:05 +00:00
|
|
|
return {
|
|
|
|
render_complex(key, variables): render_complex(item, variables)
|
|
|
|
for key, item in value.items()
|
|
|
|
}
|
2019-12-03 22:15:45 +00:00
|
|
|
if isinstance(value, Template):
|
|
|
|
return value.async_render(variables)
|
2020-08-21 20:42:05 +00:00
|
|
|
|
2019-12-03 22:15:45 +00:00
|
|
|
return value
|
2018-01-19 06:13:14 +00:00
|
|
|
|
|
|
|
|
2020-09-10 18:41:42 +00:00
|
|
|
def is_complex(value: Any) -> bool:
|
|
|
|
"""Test if data structure is a complex template."""
|
|
|
|
if isinstance(value, Template):
|
|
|
|
return True
|
|
|
|
if isinstance(value, list):
|
|
|
|
return any(is_complex(val) for val in value)
|
|
|
|
if isinstance(value, collections.abc.Mapping):
|
|
|
|
return any(is_complex(val) for val in value.keys()) or any(
|
|
|
|
is_complex(val) for val in value.values()
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-08-21 20:42:05 +00:00
|
|
|
def is_template_string(maybe_template: str) -> bool:
|
|
|
|
"""Check if the input is a Jinja2 template."""
|
|
|
|
return _RE_JINJA_DELIMITERS.search(maybe_template) is not None
|
|
|
|
|
|
|
|
|
2020-10-26 10:30:58 +00:00
|
|
|
class ResultWrapper:
|
|
|
|
"""Result wrapper class to store render result."""
|
|
|
|
|
2020-10-27 21:51:58 +00:00
|
|
|
render_result: Optional[str]
|
2020-10-26 10:30:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
def gen_result_wrapper(kls):
|
|
|
|
"""Generate a result wrapper."""
|
|
|
|
|
|
|
|
class Wrapper(kls, ResultWrapper):
|
|
|
|
"""Wrapper of a kls that can store render_result."""
|
|
|
|
|
2020-10-27 21:51:58 +00:00
|
|
|
def __init__(self, *args: tuple, render_result: Optional[str] = None) -> None:
|
|
|
|
super().__init__(*args)
|
2020-10-26 10:30:58 +00:00
|
|
|
self.render_result = render_result
|
|
|
|
|
2020-10-28 08:11:08 +00:00
|
|
|
def __str__(self) -> str:
|
|
|
|
if self.render_result is None:
|
|
|
|
# Can't get set repr to work
|
|
|
|
if kls is set:
|
|
|
|
return str(set(self))
|
|
|
|
|
|
|
|
return kls.__str__(self)
|
|
|
|
|
|
|
|
return self.render_result
|
|
|
|
|
2020-10-26 10:30:58 +00:00
|
|
|
return Wrapper
|
|
|
|
|
|
|
|
|
|
|
|
class TupleWrapper(tuple, ResultWrapper):
|
|
|
|
"""Wrap a tuple."""
|
|
|
|
|
2020-10-26 12:36:03 +00:00
|
|
|
# This is all magic to be allowed to subclass a tuple.
|
|
|
|
|
2020-10-27 21:51:58 +00:00
|
|
|
def __new__(
|
|
|
|
cls, value: tuple, *, render_result: Optional[str] = None
|
|
|
|
) -> "TupleWrapper":
|
2020-10-26 10:30:58 +00:00
|
|
|
"""Create a new tuple class."""
|
|
|
|
return super().__new__(cls, tuple(value))
|
|
|
|
|
2020-10-26 12:36:03 +00:00
|
|
|
# pylint: disable=super-init-not-called
|
|
|
|
|
2020-10-27 21:51:58 +00:00
|
|
|
def __init__(self, value: tuple, *, render_result: Optional[str] = None):
|
2020-10-26 10:30:58 +00:00
|
|
|
"""Initialize a new tuple class."""
|
|
|
|
self.render_result = render_result
|
|
|
|
|
2020-10-28 08:11:08 +00:00
|
|
|
def __str__(self) -> str:
|
|
|
|
"""Return string representation."""
|
|
|
|
if self.render_result is None:
|
|
|
|
return super().__str__()
|
|
|
|
|
|
|
|
return self.render_result
|
|
|
|
|
2020-10-26 10:30:58 +00:00
|
|
|
|
|
|
|
RESULT_WRAPPERS: Dict[Type, Type] = {
|
|
|
|
kls: gen_result_wrapper(kls) for kls in (list, dict, set)
|
|
|
|
}
|
|
|
|
RESULT_WRAPPERS[tuple] = TupleWrapper
|
|
|
|
|
|
|
|
|
2019-09-20 15:23:34 +00:00
|
|
|
def _true(arg: Any) -> bool:
|
2019-05-01 02:54:25 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
def _false(arg: Any) -> bool:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-05-01 02:54:25 +00:00
|
|
|
class RenderInfo:
|
|
|
|
"""Holds information about a template render."""
|
|
|
|
|
|
|
|
def __init__(self, template):
|
|
|
|
"""Initialise."""
|
|
|
|
self.template = template
|
|
|
|
# Will be set sensibly once frozen.
|
|
|
|
self.filter_lifecycle = _true
|
2020-09-26 21:29:49 +00:00
|
|
|
self.filter = _true
|
2019-05-01 02:54:25 +00:00
|
|
|
self._result = None
|
2020-08-16 00:53:03 +00:00
|
|
|
self.is_static = False
|
|
|
|
self.exception = None
|
|
|
|
self.all_states = False
|
2020-09-26 21:29:49 +00:00
|
|
|
self.all_states_lifecycle = False
|
2020-08-16 00:53:03 +00:00
|
|
|
self.domains = set()
|
2020-09-26 21:29:49 +00:00
|
|
|
self.domains_lifecycle = set()
|
2020-08-16 00:53:03 +00:00
|
|
|
self.entities = set()
|
2020-10-01 19:39:44 +00:00
|
|
|
self.rate_limit = None
|
2020-10-19 09:02:43 +00:00
|
|
|
self.has_time = False
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
def __repr__(self) -> str:
|
|
|
|
"""Representation of RenderInfo."""
|
2020-10-19 09:02:43 +00:00
|
|
|
return f"<RenderInfo {self.template} all_states={self.all_states} all_states_lifecycle={self.all_states_lifecycle} domains={self.domains} domains_lifecycle={self.domains_lifecycle} entities={self.entities} rate_limit={self.rate_limit}> has_time={self.has_time}"
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
def _filter_domains_and_entities(self, entity_id: str) -> bool:
|
|
|
|
"""Template should re-render if the entity state changes when we match specific domains or entities."""
|
2019-05-01 02:54:25 +00:00
|
|
|
return (
|
2020-08-16 00:53:03 +00:00
|
|
|
split_entity_id(entity_id)[0] in self.domains or entity_id in self.entities
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2020-10-15 10:02:05 +00:00
|
|
|
def _filter_entities(self, entity_id: str) -> bool:
|
|
|
|
"""Template should re-render if the entity state changes when we match specific entities."""
|
|
|
|
return entity_id in self.entities
|
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
def _filter_lifecycle_domains(self, entity_id: str) -> bool:
|
|
|
|
"""Template should re-render if the entity is added or removed with domains watched."""
|
|
|
|
return split_entity_id(entity_id)[0] in self.domains_lifecycle
|
|
|
|
|
2019-05-01 02:54:25 +00:00
|
|
|
def result(self) -> str:
|
|
|
|
"""Results of the template computation."""
|
2020-08-16 00:53:03 +00:00
|
|
|
if self.exception is not None:
|
|
|
|
raise self.exception
|
2019-05-01 02:54:25 +00:00
|
|
|
return self._result
|
|
|
|
|
2020-08-16 00:53:03 +00:00
|
|
|
def _freeze_static(self) -> None:
|
|
|
|
self.is_static = True
|
2020-10-01 19:39:44 +00:00
|
|
|
self._freeze_sets()
|
2020-08-16 00:53:03 +00:00
|
|
|
self.all_states = False
|
|
|
|
|
2020-10-01 19:39:44 +00:00
|
|
|
def _freeze_sets(self) -> None:
|
2020-08-16 00:53:03 +00:00
|
|
|
self.entities = frozenset(self.entities)
|
|
|
|
self.domains = frozenset(self.domains)
|
2020-09-26 21:29:49 +00:00
|
|
|
self.domains_lifecycle = frozenset(self.domains_lifecycle)
|
|
|
|
|
2020-10-01 19:39:44 +00:00
|
|
|
def _freeze(self) -> None:
|
|
|
|
self._freeze_sets()
|
|
|
|
|
2020-10-19 08:18:25 +00:00
|
|
|
if self.rate_limit is None:
|
|
|
|
if self.all_states or self.exception:
|
|
|
|
self.rate_limit = ALL_STATES_RATE_LIMIT
|
|
|
|
elif self.domains or self.domains_lifecycle:
|
|
|
|
self.rate_limit = DOMAIN_STATES_RATE_LIMIT
|
2020-10-01 19:39:44 +00:00
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
if self.exception:
|
|
|
|
return
|
|
|
|
|
|
|
|
if not self.all_states_lifecycle:
|
|
|
|
if self.domains_lifecycle:
|
|
|
|
self.filter_lifecycle = self._filter_lifecycle_domains
|
|
|
|
else:
|
|
|
|
self.filter_lifecycle = _false
|
2020-08-16 00:53:03 +00:00
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
if self.all_states:
|
2020-08-16 00:53:03 +00:00
|
|
|
return
|
|
|
|
|
2020-10-15 10:02:05 +00:00
|
|
|
if self.domains:
|
2020-09-26 21:29:49 +00:00
|
|
|
self.filter = self._filter_domains_and_entities
|
2020-10-15 10:02:05 +00:00
|
|
|
elif self.entities:
|
|
|
|
self.filter = self._filter_entities
|
2019-05-01 02:54:25 +00:00
|
|
|
else:
|
2020-09-26 21:29:49 +00:00
|
|
|
self.filter = _false
|
2019-05-01 02:54:25 +00:00
|
|
|
|
|
|
|
|
2018-07-20 08:45:20 +00:00
|
|
|
class Template:
|
2016-09-28 04:29:55 +00:00
|
|
|
"""Class to hold a template and manage caching and rendering."""
|
|
|
|
|
2020-10-15 10:02:05 +00:00
|
|
|
__slots__ = (
|
|
|
|
"__weakref__",
|
|
|
|
"template",
|
|
|
|
"hass",
|
|
|
|
"is_static",
|
|
|
|
"_compiled_code",
|
|
|
|
"_compiled",
|
|
|
|
)
|
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
def __init__(self, template, hass=None):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Instantiate a template."""
|
2016-09-28 04:29:55 +00:00
|
|
|
if not isinstance(template, str):
|
2019-07-31 19:25:30 +00:00
|
|
|
raise TypeError("Expected template to be a string")
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2020-10-26 10:30:58 +00:00
|
|
|
self.template: str = template.strip()
|
2016-09-28 04:29:55 +00:00
|
|
|
self._compiled_code = None
|
|
|
|
self._compiled = None
|
|
|
|
self.hass = hass
|
2020-08-24 11:40:34 +00:00
|
|
|
self.is_static = not is_template_string(template)
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
@property
|
|
|
|
def _env(self):
|
|
|
|
if self.hass is None:
|
|
|
|
return _NO_HASS_ENV
|
|
|
|
ret = self.hass.data.get(_ENVIRONMENT)
|
|
|
|
if ret is None:
|
|
|
|
ret = self.hass.data[_ENVIRONMENT] = TemplateEnvironment(self.hass)
|
|
|
|
return ret
|
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
def ensure_valid(self):
|
|
|
|
"""Return if template is valid."""
|
|
|
|
if self._compiled_code is not None:
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2019-06-22 07:32:32 +00:00
|
|
|
self._compiled_code = self._env.compile(self.template)
|
2020-09-26 22:03:32 +00:00
|
|
|
except jinja2.TemplateError as err:
|
2020-08-28 11:50:32 +00:00
|
|
|
raise TemplateError(err) from err
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2020-10-26 15:01:09 +00:00
|
|
|
def render(
|
|
|
|
self,
|
|
|
|
variables: TemplateVarsType = None,
|
|
|
|
parse_result: bool = True,
|
|
|
|
**kwargs: Any,
|
|
|
|
) -> Any:
|
2016-09-28 04:29:55 +00:00
|
|
|
"""Render given template."""
|
2020-08-24 11:40:34 +00:00
|
|
|
if self.is_static:
|
2020-10-26 15:01:09 +00:00
|
|
|
if self.hass.config.legacy_templates or not parse_result:
|
2020-10-26 10:30:58 +00:00
|
|
|
return self.template
|
|
|
|
return self._parse_result(self.template)
|
2020-08-24 11:40:34 +00:00
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
return run_callback_threadsafe(
|
2020-10-26 15:01:09 +00:00
|
|
|
self.hass.loop,
|
|
|
|
partial(self.async_render, variables, parse_result, **kwargs),
|
2019-07-31 19:25:30 +00:00
|
|
|
).result()
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2019-05-01 02:54:25 +00:00
|
|
|
@callback
|
2020-10-26 15:01:09 +00:00
|
|
|
def async_render(
|
|
|
|
self,
|
|
|
|
variables: TemplateVarsType = None,
|
|
|
|
parse_result: bool = True,
|
|
|
|
**kwargs: Any,
|
|
|
|
) -> Any:
|
2016-09-28 04:29:55 +00:00
|
|
|
"""Render given template.
|
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
2020-08-24 11:40:34 +00:00
|
|
|
if self.is_static:
|
2020-10-26 15:01:09 +00:00
|
|
|
if self.hass.config.legacy_templates or not parse_result:
|
2020-10-26 10:30:58 +00:00
|
|
|
return self.template
|
|
|
|
return self._parse_result(self.template)
|
2020-08-24 11:40:34 +00:00
|
|
|
|
2019-07-21 16:59:02 +00:00
|
|
|
compiled = self._compiled or self._ensure_compiled()
|
2016-09-28 04:29:55 +00:00
|
|
|
|
|
|
|
if variables is not None:
|
|
|
|
kwargs.update(variables)
|
|
|
|
|
|
|
|
try:
|
2020-10-06 22:05:52 +00:00
|
|
|
render_result = compiled.render(kwargs)
|
2020-10-12 14:38:24 +00:00
|
|
|
except Exception as err: # pylint: disable=broad-except
|
2020-08-28 11:50:32 +00:00
|
|
|
raise TemplateError(err) from err
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2020-10-06 22:05:52 +00:00
|
|
|
render_result = render_result.strip()
|
|
|
|
|
2020-10-26 15:01:09 +00:00
|
|
|
if self.hass.config.legacy_templates or not parse_result:
|
2020-10-26 10:30:58 +00:00
|
|
|
return render_result
|
|
|
|
|
|
|
|
return self._parse_result(render_result)
|
|
|
|
|
2020-10-26 12:36:03 +00:00
|
|
|
def _parse_result(self, render_result: str) -> Any: # pylint: disable=no-self-use
|
2020-10-26 10:30:58 +00:00
|
|
|
"""Parse the result."""
|
|
|
|
try:
|
|
|
|
result = literal_eval(render_result)
|
|
|
|
|
|
|
|
if type(result) in RESULT_WRAPPERS:
|
2020-10-27 21:51:58 +00:00
|
|
|
result = RESULT_WRAPPERS[type(result)](
|
|
|
|
result, render_result=render_result
|
|
|
|
)
|
2020-10-26 10:30:58 +00:00
|
|
|
|
|
|
|
# If the literal_eval result is a string, use the original
|
|
|
|
# render, by not returning right here. The evaluation of strings
|
|
|
|
# resulting in strings impacts quotes, to avoid unexpected
|
|
|
|
# output; use the original render instead of the evaluated one.
|
|
|
|
if not isinstance(result, str):
|
|
|
|
return result
|
2020-10-27 22:22:59 +00:00
|
|
|
except (ValueError, TypeError, SyntaxError, MemoryError):
|
2020-10-26 10:30:58 +00:00
|
|
|
pass
|
2020-10-06 22:05:52 +00:00
|
|
|
|
|
|
|
return render_result
|
|
|
|
|
2020-09-28 12:43:22 +00:00
|
|
|
async def async_render_will_timeout(
|
|
|
|
self, timeout: float, variables: TemplateVarsType = None, **kwargs: Any
|
|
|
|
) -> bool:
|
|
|
|
"""Check to see if rendering a template will timeout during render.
|
|
|
|
|
|
|
|
This is intended to check for expensive templates
|
|
|
|
that will make the system unstable. The template
|
|
|
|
is rendered in the executor to ensure it does not
|
|
|
|
tie up the event loop.
|
|
|
|
|
|
|
|
This function is not a security control and is only
|
|
|
|
intended to be used as a safety check when testing
|
|
|
|
templates.
|
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
|
|
|
assert self.hass
|
|
|
|
|
|
|
|
if self.is_static:
|
|
|
|
return False
|
|
|
|
|
|
|
|
compiled = self._compiled or self._ensure_compiled()
|
|
|
|
|
|
|
|
if variables is not None:
|
|
|
|
kwargs.update(variables)
|
|
|
|
|
|
|
|
finish_event = asyncio.Event()
|
|
|
|
|
|
|
|
def _render_template():
|
|
|
|
try:
|
|
|
|
compiled.render(kwargs)
|
|
|
|
except TimeoutError:
|
|
|
|
pass
|
|
|
|
finally:
|
|
|
|
run_callback_threadsafe(self.hass.loop, finish_event.set)
|
|
|
|
|
|
|
|
try:
|
|
|
|
template_render_thread = ThreadWithException(target=_render_template)
|
|
|
|
template_render_thread.start()
|
|
|
|
await asyncio.wait_for(finish_event.wait(), timeout=timeout)
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
template_render_thread.raise_exc(TimeoutError)
|
|
|
|
return True
|
|
|
|
finally:
|
|
|
|
template_render_thread.join()
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2019-05-01 02:54:25 +00:00
|
|
|
@callback
|
|
|
|
def async_render_to_info(
|
2019-09-20 15:23:34 +00:00
|
|
|
self, variables: TemplateVarsType = None, **kwargs: Any
|
2019-07-31 19:25:30 +00:00
|
|
|
) -> RenderInfo:
|
2019-05-01 02:54:25 +00:00
|
|
|
"""Render the template and collect an entity filter."""
|
|
|
|
assert self.hass and _RENDER_INFO not in self.hass.data
|
2020-08-24 11:40:34 +00:00
|
|
|
|
|
|
|
render_info = RenderInfo(self)
|
|
|
|
|
2019-05-01 02:54:25 +00:00
|
|
|
# pylint: disable=protected-access
|
2020-08-24 11:40:34 +00:00
|
|
|
if self.is_static:
|
2020-09-07 15:19:39 +00:00
|
|
|
render_info._result = self.template.strip()
|
2020-08-24 11:40:34 +00:00
|
|
|
render_info._freeze_static()
|
|
|
|
return render_info
|
|
|
|
|
|
|
|
self.hass.data[_RENDER_INFO] = render_info
|
2019-05-01 02:54:25 +00:00
|
|
|
try:
|
|
|
|
render_info._result = self.async_render(variables, **kwargs)
|
|
|
|
except TemplateError as ex:
|
2020-08-16 00:53:03 +00:00
|
|
|
render_info.exception = ex
|
2019-05-01 02:54:25 +00:00
|
|
|
finally:
|
|
|
|
del self.hass.data[_RENDER_INFO]
|
2020-08-24 11:40:34 +00:00
|
|
|
|
|
|
|
render_info._freeze()
|
2019-05-01 02:54:25 +00:00
|
|
|
return render_info
|
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
def render_with_possible_json_value(self, value, error_value=_SENTINEL):
|
|
|
|
"""Render template with value exposed.
|
|
|
|
|
|
|
|
If valid JSON will expose value_json too.
|
|
|
|
"""
|
2020-08-24 11:40:34 +00:00
|
|
|
if self.is_static:
|
|
|
|
return self.template
|
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
return run_callback_threadsafe(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass.loop,
|
|
|
|
self.async_render_with_possible_json_value,
|
|
|
|
value,
|
|
|
|
error_value,
|
|
|
|
).result()
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2019-05-01 02:54:25 +00:00
|
|
|
@callback
|
2019-07-31 19:25:30 +00:00
|
|
|
def async_render_with_possible_json_value(
|
|
|
|
self, value, error_value=_SENTINEL, variables=None
|
|
|
|
):
|
2016-09-28 04:29:55 +00:00
|
|
|
"""Render template with value exposed.
|
|
|
|
|
|
|
|
If valid JSON will expose value_json too.
|
|
|
|
|
|
|
|
This method must be run in the event loop.
|
|
|
|
"""
|
2020-08-24 11:40:34 +00:00
|
|
|
if self.is_static:
|
|
|
|
return self.template
|
|
|
|
|
2017-11-03 13:19:36 +00:00
|
|
|
if self._compiled is None:
|
|
|
|
self._ensure_compiled()
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2018-12-14 12:00:37 +00:00
|
|
|
variables = dict(variables or {})
|
2019-07-31 19:25:30 +00:00
|
|
|
variables["value"] = value
|
2018-12-14 12:00:37 +00:00
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
variables["value_json"] = json.loads(value)
|
2019-01-21 00:46:14 +00:00
|
|
|
except (ValueError, TypeError):
|
2016-09-28 04:29:55 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
return self._compiled.render(variables).strip()
|
|
|
|
except jinja2.TemplateError as ex:
|
2018-12-02 09:31:46 +00:00
|
|
|
if error_value is _SENTINEL:
|
|
|
|
_LOGGER.error(
|
|
|
|
"Error parsing value: %s (value: %s, template: %s)",
|
2019-07-31 19:25:30 +00:00
|
|
|
ex,
|
|
|
|
value,
|
|
|
|
self.template,
|
|
|
|
)
|
2016-09-28 04:29:55 +00:00
|
|
|
return value if error_value is _SENTINEL else error_value
|
|
|
|
|
|
|
|
def _ensure_compiled(self):
|
|
|
|
"""Bind a template to a specific hass instance."""
|
|
|
|
self.ensure_valid()
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
assert self.hass is not None, "hass variable not set on template"
|
2016-09-28 04:29:55 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
env = self._env
|
2016-09-28 04:29:55 +00:00
|
|
|
|
|
|
|
self._compiled = jinja2.Template.from_code(
|
2019-07-31 19:25:30 +00:00
|
|
|
env, self._compiled_code, env.globals, None
|
|
|
|
)
|
2016-09-25 20:33:01 +00:00
|
|
|
|
2016-09-28 04:29:55 +00:00
|
|
|
return self._compiled
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2016-10-01 06:26:15 +00:00
|
|
|
def __eq__(self, other):
|
|
|
|
"""Compare template with another."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return (
|
|
|
|
self.__class__ == other.__class__
|
|
|
|
and self.template == other.template
|
|
|
|
and self.hass == other.hass
|
|
|
|
)
|
2016-10-01 06:26:15 +00:00
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def __hash__(self) -> int:
|
2019-05-01 02:54:25 +00:00
|
|
|
"""Hash code for template."""
|
|
|
|
return hash(self.template)
|
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def __repr__(self) -> str:
|
2019-05-01 02:54:25 +00:00
|
|
|
"""Representation of Template."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return 'Template("' + self.template + '")'
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2018-07-20 08:45:20 +00:00
|
|
|
class AllStates:
|
2016-02-23 20:06:50 +00:00
|
|
|
"""Class to expose all HA states as attributes."""
|
2016-03-07 22:39:52 +00:00
|
|
|
|
2015-12-10 00:20:09 +00:00
|
|
|
def __init__(self, hass):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Initialize all states."""
|
2015-12-10 00:20:09 +00:00
|
|
|
self._hass = hass
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Return the domain state."""
|
2019-07-31 19:25:30 +00:00
|
|
|
if "." in name:
|
2020-09-28 15:35:12 +00:00
|
|
|
return _get_state_if_valid(self._hass, name)
|
2020-08-16 00:53:03 +00:00
|
|
|
|
|
|
|
if name in _RESERVED_NAMES:
|
|
|
|
return None
|
|
|
|
|
2020-01-03 13:47:06 +00:00
|
|
|
if not valid_entity_id(f"{name}.entity"):
|
2019-08-23 16:53:33 +00:00
|
|
|
raise TemplateError(f"Invalid domain name '{name}'")
|
2020-08-16 00:53:03 +00:00
|
|
|
|
2015-12-10 00:20:09 +00:00
|
|
|
return DomainStates(self._hass, name)
|
|
|
|
|
2020-09-28 15:35:12 +00:00
|
|
|
# Jinja will try __getitem__ first and it avoids the need
|
|
|
|
# to call is_safe_attribute
|
|
|
|
__getitem__ = __getattr__
|
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def _collect_all(self) -> None:
|
2019-05-01 02:54:25 +00:00
|
|
|
render_info = self._hass.data.get(_RENDER_INFO)
|
|
|
|
if render_info is not None:
|
2020-08-16 00:53:03 +00:00
|
|
|
render_info.all_states = True
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
def _collect_all_lifecycle(self) -> None:
|
|
|
|
render_info = self._hass.data.get(_RENDER_INFO)
|
|
|
|
if render_info is not None:
|
|
|
|
render_info.all_states_lifecycle = True
|
|
|
|
|
2015-12-10 00:20:09 +00:00
|
|
|
def __iter__(self):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Return all states."""
|
2019-05-01 02:54:25 +00:00
|
|
|
self._collect_all()
|
2020-09-26 16:36:47 +00:00
|
|
|
return _state_generator(self._hass, None)
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def __len__(self) -> int:
|
2017-08-27 16:33:25 +00:00
|
|
|
"""Return number of states."""
|
2020-09-26 21:29:49 +00:00
|
|
|
self._collect_all_lifecycle()
|
2020-09-26 16:36:47 +00:00
|
|
|
return self._hass.states.async_entity_ids_count()
|
2017-08-27 16:33:25 +00:00
|
|
|
|
2015-12-13 06:19:37 +00:00
|
|
|
def __call__(self, entity_id):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Return the states."""
|
2019-05-01 02:54:25 +00:00
|
|
|
state = _get_state(self._hass, entity_id)
|
2015-12-13 06:19:37 +00:00
|
|
|
return STATE_UNKNOWN if state is None else state.state
|
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def __repr__(self) -> str:
|
2019-05-01 02:54:25 +00:00
|
|
|
"""Representation of All States."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return "<template AllStates>"
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2018-07-20 08:45:20 +00:00
|
|
|
class DomainStates:
|
2016-02-23 20:06:50 +00:00
|
|
|
"""Class to expose a specific HA domain as attributes."""
|
2015-12-10 00:20:09 +00:00
|
|
|
|
|
|
|
def __init__(self, hass, domain):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Initialize the domain states."""
|
2015-12-10 00:20:09 +00:00
|
|
|
self._hass = hass
|
|
|
|
self._domain = domain
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Return the states."""
|
2020-09-28 15:35:12 +00:00
|
|
|
return _get_state_if_valid(self._hass, f"{self._domain}.{name}")
|
|
|
|
|
|
|
|
# Jinja will try __getitem__ first and it avoids the need
|
|
|
|
# to call is_safe_attribute
|
|
|
|
__getitem__ = __getattr__
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2020-02-15 21:03:53 +00:00
|
|
|
def _collect_domain(self) -> None:
|
2019-05-01 02:54:25 +00:00
|
|
|
entity_collect = self._hass.data.get(_RENDER_INFO)
|
|
|
|
if entity_collect is not None:
|
2020-08-16 00:53:03 +00:00
|
|
|
entity_collect.domains.add(self._domain)
|
2015-12-10 00:20:09 +00:00
|
|
|
|
2020-09-26 21:29:49 +00:00
|
|
|
def _collect_domain_lifecycle(self) -> None:
|
|
|
|
entity_collect = self._hass.data.get(_RENDER_INFO)
|
|
|
|
if entity_collect is not None:
|
|
|
|
entity_collect.domains_lifecycle.add(self._domain)
|
|
|
|
|
2015-12-10 00:20:09 +00:00
|
|
|
def __iter__(self):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""Return the iteration over all the states."""
|
2019-05-01 02:54:25 +00:00
|
|
|
self._collect_domain()
|
2020-09-26 16:36:47 +00:00
|
|
|
return _state_generator(self._hass, self._domain)
|
2015-12-11 05:16:05 +00:00
|
|
|
|
2020-02-15 21:03:53 +00:00
|
|
|
def __len__(self) -> int:
|
2017-08-27 16:33:25 +00:00
|
|
|
"""Return number of states."""
|
2020-09-26 21:29:49 +00:00
|
|
|
self._collect_domain_lifecycle()
|
2020-09-26 16:36:47 +00:00
|
|
|
return self._hass.states.async_entity_ids_count(self._domain)
|
2017-08-27 16:33:25 +00:00
|
|
|
|
2020-02-15 21:03:53 +00:00
|
|
|
def __repr__(self) -> str:
|
2019-05-01 02:54:25 +00:00
|
|
|
"""Representation of Domain States."""
|
2019-08-23 16:53:33 +00:00
|
|
|
return f"<template DomainStates('{self._domain}')>"
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2015-12-11 05:16:05 +00:00
|
|
|
|
2017-08-18 06:19:35 +00:00
|
|
|
class TemplateState(State):
|
|
|
|
"""Class to represent a state object in a template."""
|
|
|
|
|
2020-10-06 05:25:05 +00:00
|
|
|
__slots__ = ("_hass", "_state", "_collect")
|
2020-09-13 14:21:11 +00:00
|
|
|
|
2017-08-18 06:19:35 +00:00
|
|
|
# Inheritance is done so functions that check against State keep working
|
|
|
|
# pylint: disable=super-init-not-called
|
2020-10-06 05:25:05 +00:00
|
|
|
def __init__(self, hass, state, collect=True):
|
2017-08-18 06:19:35 +00:00
|
|
|
"""Initialize template state."""
|
2019-05-01 02:54:25 +00:00
|
|
|
self._hass = hass
|
2017-08-18 06:19:35 +00:00
|
|
|
self._state = state
|
2020-10-06 05:25:05 +00:00
|
|
|
self._collect = collect
|
2017-08-18 06:19:35 +00:00
|
|
|
|
2020-09-28 15:35:12 +00:00
|
|
|
def _collect_state(self):
|
2020-10-06 05:25:05 +00:00
|
|
|
if self._collect and _RENDER_INFO in self._hass.data:
|
2020-09-28 15:35:12 +00:00
|
|
|
self._hass.data[_RENDER_INFO].entities.add(self._state.entity_id)
|
|
|
|
|
|
|
|
# Jinja will try __getitem__ first and it avoids the need
|
|
|
|
# to call is_safe_attribute
|
|
|
|
def __getitem__(self, item):
|
|
|
|
"""Return a property as an attribute for jinja."""
|
|
|
|
if item in _COLLECTABLE_STATE_ATTRIBUTES:
|
|
|
|
# _collect_state inlined here for performance
|
2020-10-06 05:25:05 +00:00
|
|
|
if self._collect and _RENDER_INFO in self._hass.data:
|
2020-09-28 15:35:12 +00:00
|
|
|
self._hass.data[_RENDER_INFO].entities.add(self._state.entity_id)
|
|
|
|
return getattr(self._state, item)
|
|
|
|
if item == "entity_id":
|
|
|
|
return self._state.entity_id
|
|
|
|
if item == "state_with_unit":
|
|
|
|
return self.state_with_unit
|
|
|
|
raise KeyError
|
|
|
|
|
|
|
|
@property
|
|
|
|
def entity_id(self):
|
|
|
|
"""Wrap State.entity_id.
|
|
|
|
|
|
|
|
Intentionally does not collect state
|
|
|
|
"""
|
|
|
|
return self._state.entity_id
|
|
|
|
|
|
|
|
@property
|
|
|
|
def state(self):
|
|
|
|
"""Wrap State.state."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.state
|
|
|
|
|
|
|
|
@property
|
|
|
|
def attributes(self):
|
|
|
|
"""Wrap State.attributes."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.attributes
|
|
|
|
|
|
|
|
@property
|
|
|
|
def last_changed(self):
|
|
|
|
"""Wrap State.last_changed."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.last_changed
|
|
|
|
|
|
|
|
@property
|
|
|
|
def last_updated(self):
|
|
|
|
"""Wrap State.last_updated."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.last_updated
|
|
|
|
|
|
|
|
@property
|
|
|
|
def context(self):
|
|
|
|
"""Wrap State.context."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.context
|
|
|
|
|
|
|
|
@property
|
|
|
|
def domain(self):
|
|
|
|
"""Wrap State.domain."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.domain
|
|
|
|
|
|
|
|
@property
|
|
|
|
def object_id(self):
|
|
|
|
"""Wrap State.object_id."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.object_id
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
"""Wrap State.name."""
|
|
|
|
self._collect_state()
|
|
|
|
return self._state.name
|
2019-05-01 02:54:25 +00:00
|
|
|
|
2017-08-18 06:19:35 +00:00
|
|
|
@property
|
2020-02-15 21:03:53 +00:00
|
|
|
def state_with_unit(self) -> str:
|
2017-08-18 06:19:35 +00:00
|
|
|
"""Return the state concatenated with the unit if available."""
|
2020-09-28 15:35:12 +00:00
|
|
|
self._collect_state()
|
|
|
|
unit = self._state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
|
|
|
return f"{self._state.state} {unit}" if unit else self._state.state
|
2017-08-18 06:19:35 +00:00
|
|
|
|
2020-08-16 00:53:03 +00:00
|
|
|
def __eq__(self, other: Any) -> bool:
|
|
|
|
"""Ensure we collect on equality check."""
|
2020-09-28 15:35:12 +00:00
|
|
|
self._collect_state()
|
|
|
|
return self._state.__eq__(other)
|
2017-08-18 06:19:35 +00:00
|
|
|
|
2020-02-15 21:03:53 +00:00
|
|
|
def __repr__(self) -> str:
|
2017-08-18 06:19:35 +00:00
|
|
|
"""Representation of Template State."""
|
2020-09-28 15:35:12 +00:00
|
|
|
return f"<template TemplateState({self._state.__repr__()})>"
|
2017-08-18 06:19:35 +00:00
|
|
|
|
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def _collect_state(hass: HomeAssistantType, entity_id: str) -> None:
|
2019-05-01 02:54:25 +00:00
|
|
|
entity_collect = hass.data.get(_RENDER_INFO)
|
|
|
|
if entity_collect is not None:
|
2020-08-16 00:53:03 +00:00
|
|
|
entity_collect.entities.add(entity_id)
|
2019-05-01 02:54:25 +00:00
|
|
|
|
|
|
|
|
2020-09-26 16:36:47 +00:00
|
|
|
def _state_generator(hass: HomeAssistantType, domain: Optional[str]) -> Generator:
|
|
|
|
"""State generator for a domain or all states."""
|
|
|
|
for state in sorted(hass.states.async_all(domain), key=attrgetter("entity_id")):
|
2020-10-06 05:25:05 +00:00
|
|
|
yield TemplateState(hass, state, collect=False)
|
2019-05-01 02:54:25 +00:00
|
|
|
|
|
|
|
|
2020-09-28 15:35:12 +00:00
|
|
|
def _get_state_if_valid(
|
|
|
|
hass: HomeAssistantType, entity_id: str
|
|
|
|
) -> Optional[TemplateState]:
|
2019-05-01 02:54:25 +00:00
|
|
|
state = hass.states.get(entity_id)
|
2020-09-28 15:35:12 +00:00
|
|
|
if state is None and not valid_entity_id(entity_id):
|
|
|
|
raise TemplateError(f"Invalid entity ID '{entity_id}'") # type: ignore
|
|
|
|
return _get_template_state_from_state(hass, entity_id, state)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_state(hass: HomeAssistantType, entity_id: str) -> Optional[TemplateState]:
|
|
|
|
return _get_template_state_from_state(hass, entity_id, hass.states.get(entity_id))
|
|
|
|
|
|
|
|
|
|
|
|
def _get_template_state_from_state(
|
|
|
|
hass: HomeAssistantType, entity_id: str, state: Optional[State]
|
|
|
|
) -> Optional[TemplateState]:
|
2019-05-01 02:54:25 +00:00
|
|
|
if state is None:
|
2020-01-31 16:33:00 +00:00
|
|
|
# Only need to collect if none, if not none collect first actual
|
2019-05-01 02:54:25 +00:00
|
|
|
# access to the state properties in the state wrapper.
|
|
|
|
_collect_state(hass, entity_id)
|
|
|
|
return None
|
2020-09-13 14:21:11 +00:00
|
|
|
return TemplateState(hass, state)
|
2017-08-18 06:19:35 +00:00
|
|
|
|
|
|
|
|
2020-04-17 18:33:58 +00:00
|
|
|
def _resolve_state(
|
|
|
|
hass: HomeAssistantType, entity_id_or_state: Any
|
|
|
|
) -> Union[State, TemplateState, None]:
|
2019-06-22 07:32:32 +00:00
|
|
|
"""Return state or entity_id if given."""
|
|
|
|
if isinstance(entity_id_or_state, State):
|
|
|
|
return entity_id_or_state
|
|
|
|
if isinstance(entity_id_or_state, str):
|
|
|
|
return _get_state(hass, entity_id_or_state)
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2020-08-16 00:53:03 +00:00
|
|
|
def result_as_boolean(template_result: Optional[str]) -> bool:
|
|
|
|
"""Convert the template result to a boolean.
|
|
|
|
|
|
|
|
True/not 0/'1'/'true'/'yes'/'on'/'enable' are considered truthy
|
|
|
|
False/0/None/'0'/'false'/'no'/'off'/'disable' are considered falsy
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return cv.boolean(template_result)
|
|
|
|
except vol.Invalid:
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2019-09-20 15:23:34 +00:00
|
|
|
def expand(hass: HomeAssistantType, *args: Any) -> Iterable[State]:
|
2019-06-22 07:32:32 +00:00
|
|
|
"""Expand out any groups into entity states."""
|
|
|
|
search = list(args)
|
|
|
|
found = {}
|
|
|
|
while search:
|
|
|
|
entity = search.pop()
|
|
|
|
if isinstance(entity, str):
|
|
|
|
entity_id = entity
|
|
|
|
entity = _get_state(hass, entity)
|
|
|
|
if entity is None:
|
|
|
|
continue
|
|
|
|
elif isinstance(entity, State):
|
|
|
|
entity_id = entity.entity_id
|
2020-05-02 21:57:48 +00:00
|
|
|
elif isinstance(entity, collections.abc.Iterable):
|
2019-06-22 07:32:32 +00:00
|
|
|
search += entity
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
# ignore other types
|
|
|
|
continue
|
|
|
|
|
2020-08-16 00:53:03 +00:00
|
|
|
if entity_id.startswith(_GROUP_DOMAIN_PREFIX):
|
2019-06-22 07:32:32 +00:00
|
|
|
# Collect state will be called in here since it's wrapped
|
|
|
|
group_entities = entity.attributes.get(ATTR_ENTITY_ID)
|
|
|
|
if group_entities:
|
|
|
|
search += group_entities
|
|
|
|
else:
|
2020-08-16 00:53:03 +00:00
|
|
|
_collect_state(hass, entity_id)
|
2019-06-22 07:32:32 +00:00
|
|
|
found[entity_id] = entity
|
2020-08-16 00:53:03 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
return sorted(found.values(), key=lambda a: a.entity_id)
|
2016-02-21 05:58:53 +00:00
|
|
|
|
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
def closest(hass, *args):
|
|
|
|
"""Find closest entity.
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
Closest to home:
|
|
|
|
closest(states)
|
|
|
|
closest(states.device_tracker)
|
|
|
|
closest('group.children')
|
|
|
|
closest(states.group.children)
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
Closest to a point:
|
|
|
|
closest(23.456, 23.456, 'group.children')
|
|
|
|
closest('zone.school', 'group.children')
|
|
|
|
closest(states.zone.school, 'group.children')
|
|
|
|
|
|
|
|
As a filter:
|
|
|
|
states | closest
|
|
|
|
states.device_tracker | closest
|
|
|
|
['group.children', states.device_tracker] | closest
|
|
|
|
'group.children' | closest(23.456, 23.456)
|
|
|
|
states.device_tracker | closest('zone.school')
|
|
|
|
'group.children' | closest(states.zone.school)
|
|
|
|
|
|
|
|
"""
|
|
|
|
if len(args) == 1:
|
|
|
|
latitude = hass.config.latitude
|
|
|
|
longitude = hass.config.longitude
|
|
|
|
entities = args[0]
|
|
|
|
|
|
|
|
elif len(args) == 2:
|
|
|
|
point_state = _resolve_state(hass, args[0])
|
|
|
|
|
|
|
|
if point_state is None:
|
|
|
|
_LOGGER.warning("Closest:Unable to find state %s", args[0])
|
|
|
|
return None
|
|
|
|
if not loc_helper.has_location(point_state):
|
|
|
|
_LOGGER.warning(
|
2019-07-31 19:25:30 +00:00
|
|
|
"Closest:State does not contain valid location: %s", point_state
|
|
|
|
)
|
2019-06-22 07:32:32 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
latitude = point_state.attributes.get(ATTR_LATITUDE)
|
|
|
|
longitude = point_state.attributes.get(ATTR_LONGITUDE)
|
|
|
|
|
|
|
|
entities = args[1]
|
|
|
|
|
|
|
|
else:
|
|
|
|
latitude = convert(args[0], float)
|
|
|
|
longitude = convert(args[1], float)
|
|
|
|
|
|
|
|
if latitude is None or longitude is None:
|
|
|
|
_LOGGER.warning(
|
2019-07-31 19:25:30 +00:00
|
|
|
"Closest:Received invalid coordinates: %s, %s", args[0], args[1]
|
|
|
|
)
|
2019-06-22 07:32:32 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
entities = args[2]
|
|
|
|
|
|
|
|
states = expand(hass, entities)
|
|
|
|
|
|
|
|
# state will already be wrapped here
|
|
|
|
return loc_helper.closest(latitude, longitude, states)
|
|
|
|
|
|
|
|
|
|
|
|
def closest_filter(hass, *args):
|
|
|
|
"""Call closest as a filter. Need to reorder arguments."""
|
|
|
|
new_args = list(args[1:])
|
|
|
|
new_args.append(args[0])
|
|
|
|
return closest(hass, *new_args)
|
|
|
|
|
|
|
|
|
|
|
|
def distance(hass, *args):
|
|
|
|
"""Calculate distance.
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
Will calculate distance from home to a point or between points.
|
|
|
|
Points can be passed in using state objects or lat/lng coordinates.
|
|
|
|
"""
|
|
|
|
locations = []
|
|
|
|
|
|
|
|
to_process = list(args)
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
while to_process:
|
|
|
|
value = to_process.pop(0)
|
2020-08-16 00:53:03 +00:00
|
|
|
if isinstance(value, str) and not valid_entity_id(value):
|
|
|
|
point_state = None
|
|
|
|
else:
|
|
|
|
point_state = _resolve_state(hass, value)
|
2019-06-22 07:32:32 +00:00
|
|
|
|
|
|
|
if point_state is None:
|
|
|
|
# We expect this and next value to be lat&lng
|
|
|
|
if not to_process:
|
|
|
|
_LOGGER.warning(
|
2019-07-31 19:25:30 +00:00
|
|
|
"Distance:Expected latitude and longitude, got %s", value
|
|
|
|
)
|
2016-02-21 19:13:40 +00:00
|
|
|
return None
|
2019-06-22 07:32:32 +00:00
|
|
|
|
|
|
|
value_2 = to_process.pop(0)
|
|
|
|
latitude = convert(value, float)
|
|
|
|
longitude = convert(value_2, float)
|
|
|
|
|
|
|
|
if latitude is None or longitude is None:
|
2019-07-31 19:25:30 +00:00
|
|
|
_LOGGER.warning(
|
2020-01-02 19:17:10 +00:00
|
|
|
"Distance:Unable to process latitude and longitude: %s, %s",
|
2019-07-31 19:25:30 +00:00
|
|
|
value,
|
|
|
|
value_2,
|
|
|
|
)
|
2019-06-22 07:32:32 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
else:
|
2018-07-23 08:16:05 +00:00
|
|
|
if not loc_helper.has_location(point_state):
|
2016-02-21 19:13:40 +00:00
|
|
|
_LOGGER.warning(
|
2019-07-31 19:25:30 +00:00
|
|
|
"distance:State does not contain valid location: %s", point_state
|
|
|
|
)
|
2016-02-21 19:13:40 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
latitude = point_state.attributes.get(ATTR_LATITUDE)
|
|
|
|
longitude = point_state.attributes.get(ATTR_LONGITUDE)
|
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
locations.append((latitude, longitude))
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
if len(locations) == 1:
|
|
|
|
return hass.config.distance(*locations[0])
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
return hass.config.units.length(
|
2020-04-12 19:44:56 +00:00
|
|
|
loc_util.distance(*locations[0] + locations[1]), LENGTH_METERS
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2016-02-21 19:13:40 +00:00
|
|
|
|
2016-09-07 13:59:16 +00:00
|
|
|
|
2019-09-20 15:23:34 +00:00
|
|
|
def is_state(hass: HomeAssistantType, entity_id: str, state: State) -> bool:
|
2019-06-22 07:32:32 +00:00
|
|
|
"""Test if a state is a specific value."""
|
|
|
|
state_obj = _get_state(hass, entity_id)
|
|
|
|
return state_obj is not None and state_obj.state == state
|
2016-02-21 19:13:40 +00:00
|
|
|
|
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
def is_state_attr(hass, entity_id, name, value):
|
|
|
|
"""Test if a state's attribute is a specific value."""
|
|
|
|
attr = state_attr(hass, entity_id, name)
|
|
|
|
return attr is not None and attr == value
|
2016-02-21 05:58:53 +00:00
|
|
|
|
2017-11-03 13:19:36 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
def state_attr(hass, entity_id, name):
|
|
|
|
"""Get a specific attribute from a state."""
|
|
|
|
state_obj = _get_state(hass, entity_id)
|
|
|
|
if state_obj is not None:
|
|
|
|
return state_obj.attributes.get(name)
|
|
|
|
return None
|
2016-02-21 05:58:53 +00:00
|
|
|
|
|
|
|
|
2020-10-19 09:02:43 +00:00
|
|
|
def now(hass):
|
|
|
|
"""Record fetching now."""
|
|
|
|
render_info = hass.data.get(_RENDER_INFO)
|
|
|
|
if render_info is not None:
|
|
|
|
render_info.has_time = True
|
|
|
|
|
|
|
|
return dt_util.now()
|
|
|
|
|
|
|
|
|
|
|
|
def utcnow(hass):
|
|
|
|
"""Record fetching utcnow."""
|
|
|
|
render_info = hass.data.get(_RENDER_INFO)
|
|
|
|
if render_info is not None:
|
|
|
|
render_info.has_time = True
|
|
|
|
|
|
|
|
return dt_util.utcnow()
|
|
|
|
|
|
|
|
|
2019-03-25 01:10:49 +00:00
|
|
|
def forgiving_round(value, precision=0, method="common"):
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Round accepted strings."""
|
2015-12-11 05:16:05 +00:00
|
|
|
try:
|
2019-03-25 01:10:49 +00:00
|
|
|
# support rounding methods like jinja
|
|
|
|
multiplier = float(10 ** precision)
|
|
|
|
if method == "ceil":
|
|
|
|
value = math.ceil(float(value) * multiplier) / multiplier
|
|
|
|
elif method == "floor":
|
|
|
|
value = math.floor(float(value) * multiplier) / multiplier
|
2019-11-22 18:08:41 +00:00
|
|
|
elif method == "half":
|
|
|
|
value = round(float(value) * 2) / 2
|
2019-03-25 01:10:49 +00:00
|
|
|
else:
|
|
|
|
# if method is common or something else, use common rounding
|
|
|
|
value = round(float(value), precision)
|
2015-12-12 02:45:53 +00:00
|
|
|
return int(value) if precision == 0 else value
|
2016-02-21 05:59:16 +00:00
|
|
|
except (ValueError, TypeError):
|
2015-12-11 05:16:05 +00:00
|
|
|
# If value can't be converted to float
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def multiply(value, amount):
|
2016-02-21 05:59:16 +00:00
|
|
|
"""Filter to convert value to float and multiply it."""
|
2015-12-11 05:16:05 +00:00
|
|
|
try:
|
|
|
|
return float(value) * amount
|
2016-02-21 19:12:37 +00:00
|
|
|
except (ValueError, TypeError):
|
2015-12-11 05:16:05 +00:00
|
|
|
# If value can't be converted to float
|
|
|
|
return value
|
|
|
|
|
2015-12-13 06:19:37 +00:00
|
|
|
|
2017-11-28 05:29:01 +00:00
|
|
|
def logarithm(value, base=math.e):
|
2018-01-29 22:37:19 +00:00
|
|
|
"""Filter to get logarithm of the value with a specific base."""
|
2017-11-28 05:29:01 +00:00
|
|
|
try:
|
|
|
|
return math.log(float(value), float(base))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2018-04-15 16:51:45 +00:00
|
|
|
def sine(value):
|
|
|
|
"""Filter to get sine of the value."""
|
|
|
|
try:
|
|
|
|
return math.sin(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def cosine(value):
|
|
|
|
"""Filter to get cosine of the value."""
|
|
|
|
try:
|
|
|
|
return math.cos(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def tangent(value):
|
|
|
|
"""Filter to get tangent of the value."""
|
|
|
|
try:
|
|
|
|
return math.tan(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2019-08-09 18:16:47 +00:00
|
|
|
def arc_sine(value):
|
|
|
|
"""Filter to get arc sine of the value."""
|
|
|
|
try:
|
|
|
|
return math.asin(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def arc_cosine(value):
|
|
|
|
"""Filter to get arc cosine of the value."""
|
|
|
|
try:
|
|
|
|
return math.acos(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def arc_tangent(value):
|
|
|
|
"""Filter to get arc tangent of the value."""
|
|
|
|
try:
|
|
|
|
return math.atan(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def arc_tangent2(*args):
|
|
|
|
"""Filter to calculate four quadrant arc tangent of y / x."""
|
|
|
|
try:
|
|
|
|
if len(args) == 1 and isinstance(args[0], (list, tuple)):
|
|
|
|
args = args[0]
|
|
|
|
|
|
|
|
return math.atan2(float(args[0]), float(args[1]))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
2018-04-15 16:51:45 +00:00
|
|
|
def square_root(value):
|
|
|
|
"""Filter to get square root of the value."""
|
|
|
|
try:
|
|
|
|
return math.sqrt(float(value))
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2016-09-09 00:49:02 +00:00
|
|
|
def timestamp_custom(value, date_format=DATE_STR_FORMAT, local=True):
|
|
|
|
"""Filter to convert given timestamp to format."""
|
|
|
|
try:
|
|
|
|
date = dt_util.utc_from_timestamp(value)
|
|
|
|
|
|
|
|
if local:
|
|
|
|
date = dt_util.as_local(date)
|
|
|
|
|
|
|
|
return date.strftime(date_format)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
# If timestamp can't be converted
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2016-07-23 02:47:43 +00:00
|
|
|
def timestamp_local(value):
|
|
|
|
"""Filter to convert given timestamp to local date/time."""
|
|
|
|
try:
|
2019-07-31 19:25:30 +00:00
|
|
|
return dt_util.as_local(dt_util.utc_from_timestamp(value)).strftime(
|
|
|
|
DATE_STR_FORMAT
|
|
|
|
)
|
2016-07-23 02:47:43 +00:00
|
|
|
except (ValueError, TypeError):
|
|
|
|
# If timestamp can't be converted
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
def timestamp_utc(value):
|
2016-09-09 00:49:02 +00:00
|
|
|
"""Filter to convert given timestamp to UTC date/time."""
|
2016-07-23 02:47:43 +00:00
|
|
|
try:
|
|
|
|
return dt_util.utc_from_timestamp(value).strftime(DATE_STR_FORMAT)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
# If timestamp can't be converted
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2017-03-03 06:18:01 +00:00
|
|
|
def forgiving_as_timestamp(value):
|
|
|
|
"""Try to convert value to timestamp."""
|
|
|
|
try:
|
|
|
|
return dt_util.as_timestamp(value)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2016-11-11 06:57:44 +00:00
|
|
|
def strptime(string, fmt):
|
|
|
|
"""Parse a time string to datetime."""
|
|
|
|
try:
|
|
|
|
return datetime.strptime(string, fmt)
|
2020-09-18 13:29:40 +00:00
|
|
|
except (ValueError, AttributeError, TypeError):
|
2016-11-11 06:57:44 +00:00
|
|
|
return string
|
|
|
|
|
|
|
|
|
2016-10-14 15:16:30 +00:00
|
|
|
def fail_when_undefined(value):
|
|
|
|
"""Filter to force a failure when the value is undefined."""
|
|
|
|
if isinstance(value, jinja2.Undefined):
|
|
|
|
value()
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2016-02-24 18:41:49 +00:00
|
|
|
def forgiving_float(value):
|
|
|
|
"""Try to convert value to a float."""
|
|
|
|
try:
|
|
|
|
return float(value)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
def regex_match(value, find="", ignorecase=False):
|
2018-04-04 13:34:01 +00:00
|
|
|
"""Match value using regex."""
|
|
|
|
if not isinstance(value, str):
|
|
|
|
value = str(value)
|
|
|
|
flags = re.I if ignorecase else 0
|
|
|
|
return bool(re.match(find, value, flags))
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
def regex_replace(value="", find="", replace="", ignorecase=False):
|
2018-04-04 13:34:01 +00:00
|
|
|
"""Replace using regex."""
|
|
|
|
if not isinstance(value, str):
|
|
|
|
value = str(value)
|
|
|
|
flags = re.I if ignorecase else 0
|
|
|
|
regex = re.compile(find, flags)
|
|
|
|
return regex.sub(replace, value)
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
def regex_search(value, find="", ignorecase=False):
|
2018-04-04 13:34:01 +00:00
|
|
|
"""Search using regex."""
|
|
|
|
if not isinstance(value, str):
|
|
|
|
value = str(value)
|
|
|
|
flags = re.I if ignorecase else 0
|
|
|
|
return bool(re.search(find, value, flags))
|
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
def regex_findall_index(value, find="", index=0, ignorecase=False):
|
2018-04-04 13:34:01 +00:00
|
|
|
"""Find all matches using regex and then pick specific match index."""
|
|
|
|
if not isinstance(value, str):
|
|
|
|
value = str(value)
|
|
|
|
flags = re.I if ignorecase else 0
|
|
|
|
return re.findall(find, value, flags)[index]
|
|
|
|
|
|
|
|
|
2018-09-26 09:57:16 +00:00
|
|
|
def bitwise_and(first_value, second_value):
|
|
|
|
"""Perform a bitwise and operation."""
|
|
|
|
return first_value & second_value
|
|
|
|
|
|
|
|
|
|
|
|
def bitwise_or(first_value, second_value):
|
|
|
|
"""Perform a bitwise or operation."""
|
|
|
|
return first_value | second_value
|
|
|
|
|
|
|
|
|
2018-12-01 09:38:10 +00:00
|
|
|
def base64_encode(value):
|
|
|
|
"""Perform base64 encode."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return base64.b64encode(value.encode("utf-8")).decode("utf-8")
|
2018-12-01 09:38:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
def base64_decode(value):
|
|
|
|
"""Perform base64 denode."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return base64.b64decode(value).decode("utf-8")
|
2018-12-01 09:38:10 +00:00
|
|
|
|
|
|
|
|
|
|
|
def ordinal(value):
|
|
|
|
"""Perform ordinal conversion."""
|
2019-07-31 19:25:30 +00:00
|
|
|
return str(value) + (
|
|
|
|
list(["th", "st", "nd", "rd"] + ["th"] * 6)[(int(str(value)[-1])) % 10]
|
|
|
|
if int(str(value)[-2:]) % 100 not in range(11, 14)
|
|
|
|
else "th"
|
|
|
|
)
|
2018-12-01 09:38:10 +00:00
|
|
|
|
|
|
|
|
2019-10-23 05:51:29 +00:00
|
|
|
def from_json(value):
|
|
|
|
"""Convert a JSON string to an object."""
|
|
|
|
return json.loads(value)
|
|
|
|
|
|
|
|
|
|
|
|
def to_json(value):
|
|
|
|
"""Convert an object to a JSON string."""
|
|
|
|
return json.dumps(value)
|
|
|
|
|
|
|
|
|
2017-05-23 17:32:06 +00:00
|
|
|
@contextfilter
|
|
|
|
def random_every_time(context, values):
|
|
|
|
"""Choose a random value.
|
|
|
|
|
|
|
|
Unlike Jinja's random filter,
|
|
|
|
this is context-dependent to avoid caching the chosen value.
|
|
|
|
"""
|
|
|
|
return random.choice(values)
|
|
|
|
|
|
|
|
|
2020-04-20 17:29:12 +00:00
|
|
|
def relative_time(value):
|
|
|
|
"""
|
|
|
|
Take a datetime and return its "age" as a string.
|
|
|
|
|
|
|
|
The age can be in second, minute, hour, day, month or year. Only the
|
|
|
|
biggest unit is considered, e.g. if it's 2 days and 3 hours, "2 days" will
|
|
|
|
be returned.
|
|
|
|
Make sure date is not in the future, or else it will return None.
|
|
|
|
|
|
|
|
If the input are not a datetime object the input will be returned unmodified.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not isinstance(value, datetime):
|
|
|
|
return value
|
|
|
|
if not value.tzinfo:
|
|
|
|
value = dt_util.as_local(value)
|
|
|
|
if dt_util.now() < value:
|
|
|
|
return value
|
|
|
|
return dt_util.get_age(value)
|
|
|
|
|
|
|
|
|
2020-07-13 14:48:29 +00:00
|
|
|
def urlencode(value):
|
|
|
|
"""Urlencode dictionary and return as UTF-8 string."""
|
|
|
|
return urllib_urlencode(value).encode("utf-8")
|
|
|
|
|
|
|
|
|
2015-12-13 06:19:37 +00:00
|
|
|
class TemplateEnvironment(ImmutableSandboxedEnvironment):
|
2016-03-07 22:39:52 +00:00
|
|
|
"""The Home Assistant template environment."""
|
2016-03-09 10:15:04 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
def __init__(self, hass):
|
|
|
|
"""Initialise template environment."""
|
|
|
|
super().__init__()
|
|
|
|
self.hass = hass
|
2020-08-04 01:00:44 +00:00
|
|
|
self.template_cache = weakref.WeakValueDictionary()
|
2019-07-31 19:25:30 +00:00
|
|
|
self.filters["round"] = forgiving_round
|
|
|
|
self.filters["multiply"] = multiply
|
|
|
|
self.filters["log"] = logarithm
|
|
|
|
self.filters["sin"] = sine
|
|
|
|
self.filters["cos"] = cosine
|
|
|
|
self.filters["tan"] = tangent
|
2019-08-09 18:16:47 +00:00
|
|
|
self.filters["asin"] = arc_sine
|
|
|
|
self.filters["acos"] = arc_cosine
|
|
|
|
self.filters["atan"] = arc_tangent
|
|
|
|
self.filters["atan2"] = arc_tangent2
|
2019-07-31 19:25:30 +00:00
|
|
|
self.filters["sqrt"] = square_root
|
|
|
|
self.filters["as_timestamp"] = forgiving_as_timestamp
|
2020-09-03 19:35:16 +00:00
|
|
|
self.filters["as_local"] = dt_util.as_local
|
2019-07-31 19:25:30 +00:00
|
|
|
self.filters["timestamp_custom"] = timestamp_custom
|
|
|
|
self.filters["timestamp_local"] = timestamp_local
|
|
|
|
self.filters["timestamp_utc"] = timestamp_utc
|
2019-10-23 05:51:29 +00:00
|
|
|
self.filters["to_json"] = to_json
|
|
|
|
self.filters["from_json"] = from_json
|
2019-07-31 19:25:30 +00:00
|
|
|
self.filters["is_defined"] = fail_when_undefined
|
|
|
|
self.filters["max"] = max
|
|
|
|
self.filters["min"] = min
|
|
|
|
self.filters["random"] = random_every_time
|
|
|
|
self.filters["base64_encode"] = base64_encode
|
|
|
|
self.filters["base64_decode"] = base64_decode
|
|
|
|
self.filters["ordinal"] = ordinal
|
|
|
|
self.filters["regex_match"] = regex_match
|
|
|
|
self.filters["regex_replace"] = regex_replace
|
|
|
|
self.filters["regex_search"] = regex_search
|
|
|
|
self.filters["regex_findall_index"] = regex_findall_index
|
|
|
|
self.filters["bitwise_and"] = bitwise_and
|
|
|
|
self.filters["bitwise_or"] = bitwise_or
|
|
|
|
self.filters["ord"] = ord
|
|
|
|
self.globals["log"] = logarithm
|
|
|
|
self.globals["sin"] = sine
|
|
|
|
self.globals["cos"] = cosine
|
|
|
|
self.globals["tan"] = tangent
|
|
|
|
self.globals["sqrt"] = square_root
|
|
|
|
self.globals["pi"] = math.pi
|
|
|
|
self.globals["tau"] = math.pi * 2
|
|
|
|
self.globals["e"] = math.e
|
2019-08-09 18:16:47 +00:00
|
|
|
self.globals["asin"] = arc_sine
|
|
|
|
self.globals["acos"] = arc_cosine
|
|
|
|
self.globals["atan"] = arc_tangent
|
|
|
|
self.globals["atan2"] = arc_tangent2
|
2019-07-31 19:25:30 +00:00
|
|
|
self.globals["float"] = forgiving_float
|
2020-09-03 19:35:16 +00:00
|
|
|
self.globals["as_local"] = dt_util.as_local
|
2019-07-31 19:25:30 +00:00
|
|
|
self.globals["as_timestamp"] = forgiving_as_timestamp
|
2020-04-20 17:29:12 +00:00
|
|
|
self.globals["relative_time"] = relative_time
|
2020-09-11 20:07:31 +00:00
|
|
|
self.globals["timedelta"] = timedelta
|
2019-07-31 19:25:30 +00:00
|
|
|
self.globals["strptime"] = strptime
|
2020-07-13 14:48:29 +00:00
|
|
|
self.globals["urlencode"] = urlencode
|
2019-06-22 07:32:32 +00:00
|
|
|
if hass is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# We mark these as a context functions to ensure they get
|
|
|
|
# evaluated fresh with every execution, rather than executed
|
|
|
|
# at compile time and the value stored. The context itself
|
|
|
|
# can be discarded, we only need to get at the hass object.
|
|
|
|
def hassfunction(func):
|
|
|
|
"""Wrap function that depend on hass."""
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
@wraps(func)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
return func(hass, *args[1:], **kwargs)
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
return contextfunction(wrapper)
|
2019-07-31 19:25:30 +00:00
|
|
|
|
|
|
|
self.globals["expand"] = hassfunction(expand)
|
|
|
|
self.filters["expand"] = contextfilter(self.globals["expand"])
|
|
|
|
self.globals["closest"] = hassfunction(closest)
|
|
|
|
self.filters["closest"] = contextfilter(hassfunction(closest_filter))
|
|
|
|
self.globals["distance"] = hassfunction(distance)
|
|
|
|
self.globals["is_state"] = hassfunction(is_state)
|
|
|
|
self.globals["is_state_attr"] = hassfunction(is_state_attr)
|
|
|
|
self.globals["state_attr"] = hassfunction(state_attr)
|
|
|
|
self.globals["states"] = AllStates(hass)
|
2020-10-19 09:02:43 +00:00
|
|
|
self.globals["utcnow"] = hassfunction(utcnow)
|
|
|
|
self.globals["now"] = hassfunction(now)
|
2019-06-22 07:32:32 +00:00
|
|
|
|
2015-12-13 06:19:37 +00:00
|
|
|
def is_safe_callable(self, obj):
|
2016-02-21 05:59:16 +00:00
|
|
|
"""Test if callback is safe."""
|
2020-10-04 20:40:04 +00:00
|
|
|
return isinstance(obj, AllStates) or super().is_safe_callable(obj)
|
2015-12-13 06:19:37 +00:00
|
|
|
|
2018-10-30 18:13:20 +00:00
|
|
|
def is_safe_attribute(self, obj, attr, value):
|
|
|
|
"""Test if attribute is safe."""
|
2020-09-28 15:35:12 +00:00
|
|
|
if isinstance(obj, (AllStates, DomainStates, TemplateState)):
|
|
|
|
return not attr[0] == "_"
|
|
|
|
|
2020-09-07 07:17:41 +00:00
|
|
|
if isinstance(obj, Namespace):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return super().is_safe_attribute(obj, attr, value)
|
2018-10-30 18:13:20 +00:00
|
|
|
|
2020-08-04 01:00:44 +00:00
|
|
|
def compile(self, source, name=None, filename=None, raw=False, defer_init=False):
|
|
|
|
"""Compile the template."""
|
|
|
|
if (
|
|
|
|
name is not None
|
|
|
|
or filename is not None
|
|
|
|
or raw is not False
|
|
|
|
or defer_init is not False
|
|
|
|
):
|
|
|
|
# If there are any non-default keywords args, we do
|
|
|
|
# not cache. In prodution we currently do not have
|
|
|
|
# any instance of this.
|
|
|
|
return super().compile(source, name, filename, raw, defer_init)
|
|
|
|
|
|
|
|
cached = self.template_cache.get(source)
|
|
|
|
|
|
|
|
if cached is None:
|
|
|
|
cached = self.template_cache[source] = super().compile(source)
|
|
|
|
|
|
|
|
return cached
|
|
|
|
|
2016-11-19 05:47:59 +00:00
|
|
|
|
2019-06-22 07:32:32 +00:00
|
|
|
_NO_HASS_ENV = TemplateEnvironment(None)
|