2015-02-01 04:06:30 +00:00
|
|
|
"""
|
|
|
|
Provide pre-made queries on top of the recorder component.
|
2015-10-25 14:04:37 +00:00
|
|
|
|
|
|
|
For more details about this component, please refer to the documentation at
|
2015-11-09 12:12:18 +00:00
|
|
|
https://home-assistant.io/components/history/
|
2015-02-01 04:06:30 +00:00
|
|
|
"""
|
2016-10-24 06:48:01 +00:00
|
|
|
import asyncio
|
2016-02-19 05:27:50 +00:00
|
|
|
from collections import defaultdict
|
2015-04-29 02:12:05 +00:00
|
|
|
from datetime import timedelta
|
2015-02-02 02:00:30 +00:00
|
|
|
from itertools import groupby
|
2016-10-13 15:54:45 +00:00
|
|
|
import voluptuous as vol
|
2015-01-31 18:31:16 +00:00
|
|
|
|
2017-01-03 22:19:28 +00:00
|
|
|
from homeassistant.const import (
|
|
|
|
HTTP_BAD_REQUEST, CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE)
|
2016-10-13 15:54:45 +00:00
|
|
|
import homeassistant.helpers.config_validation as cv
|
2016-02-19 05:27:50 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2016-07-02 18:22:51 +00:00
|
|
|
from homeassistant.components import recorder, script
|
2016-07-17 05:32:25 +00:00
|
|
|
from homeassistant.components.frontend import register_built_in_panel
|
2016-05-14 07:58:36 +00:00
|
|
|
from homeassistant.components.http import HomeAssistantView
|
2016-10-13 15:54:45 +00:00
|
|
|
from homeassistant.const import ATTR_HIDDEN
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
DOMAIN = 'history'
|
|
|
|
DEPENDENCIES = ['recorder', 'http']
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
CONFIG_SCHEMA = vol.Schema({
|
|
|
|
DOMAIN: vol.Schema({
|
|
|
|
CONF_EXCLUDE: vol.Schema({
|
|
|
|
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
|
2016-10-20 17:10:12 +00:00
|
|
|
vol.Optional(CONF_DOMAINS, default=[]):
|
|
|
|
vol.All(cv.ensure_list, [cv.string])
|
2016-10-13 15:54:45 +00:00
|
|
|
}),
|
|
|
|
CONF_INCLUDE: vol.Schema({
|
|
|
|
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
|
2016-10-20 17:10:12 +00:00
|
|
|
vol.Optional(CONF_DOMAINS, default=[]):
|
|
|
|
vol.All(cv.ensure_list, [cv.string])
|
2016-10-13 15:54:45 +00:00
|
|
|
})
|
|
|
|
}),
|
|
|
|
}, extra=vol.ALLOW_EXTRA)
|
|
|
|
|
|
|
|
SIGNIFICANT_DOMAINS = ('thermostat', 'climate')
|
2016-03-05 17:49:04 +00:00
|
|
|
IGNORE_DOMAINS = ('zone', 'scene',)
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
def last_5_states(entity_id):
|
2016-03-07 17:49:31 +00:00
|
|
|
"""Return the last 5 states for entity_id."""
|
2015-02-02 02:00:30 +00:00
|
|
|
entity_id = entity_id.lower()
|
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
states = recorder.get_model('States')
|
|
|
|
return recorder.execute(
|
|
|
|
recorder.query('States').filter(
|
|
|
|
(states.entity_id == entity_id) &
|
|
|
|
(states.last_changed == states.last_updated)
|
|
|
|
).order_by(states.state_id.desc()).limit(5))
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
def get_significant_states(start_time, end_time=None, entity_id=None,
|
|
|
|
filters=None):
|
2016-03-07 17:49:31 +00:00
|
|
|
"""
|
|
|
|
Return states changes during UTC period start_time - end_time.
|
2016-01-23 20:36:43 +00:00
|
|
|
|
|
|
|
Significant states are all states where there is a state change,
|
|
|
|
as well as all states from certain domains (for instance
|
|
|
|
thermostat so that we get current temperature in our graphs).
|
|
|
|
"""
|
2016-10-13 15:54:45 +00:00
|
|
|
entity_ids = (entity_id.lower(), ) if entity_id is not None else None
|
2016-07-02 18:22:51 +00:00
|
|
|
states = recorder.get_model('States')
|
|
|
|
query = recorder.query('States').filter(
|
|
|
|
(states.domain.in_(SIGNIFICANT_DOMAINS) |
|
|
|
|
(states.last_changed == states.last_updated)) &
|
2016-10-13 15:54:45 +00:00
|
|
|
(states.last_updated > start_time))
|
|
|
|
if filters:
|
|
|
|
query = filters.apply(query, entity_ids)
|
2016-01-23 20:36:43 +00:00
|
|
|
|
|
|
|
if end_time is not None:
|
2016-07-02 18:22:51 +00:00
|
|
|
query = query.filter(states.last_updated < end_time)
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
states = (
|
|
|
|
state for state in recorder.execute(
|
|
|
|
query.order_by(states.entity_id, states.last_updated))
|
2016-10-13 15:54:45 +00:00
|
|
|
if (_is_significant(state) and
|
|
|
|
not state.attributes.get(ATTR_HIDDEN, False)))
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
return states_to_json(states, start_time, entity_id, filters)
|
2016-01-23 20:36:43 +00:00
|
|
|
|
|
|
|
|
2015-02-02 02:00:30 +00:00
|
|
|
def state_changes_during_period(start_time, end_time=None, entity_id=None):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Return states changes during UTC period start_time - end_time."""
|
2016-07-02 18:22:51 +00:00
|
|
|
states = recorder.get_model('States')
|
|
|
|
query = recorder.query('States').filter(
|
|
|
|
(states.last_changed == states.last_updated) &
|
|
|
|
(states.last_changed > start_time))
|
2015-02-02 02:00:30 +00:00
|
|
|
|
|
|
|
if end_time is not None:
|
2016-07-02 18:22:51 +00:00
|
|
|
query = query.filter(states.last_updated < end_time)
|
2015-02-02 02:00:30 +00:00
|
|
|
|
|
|
|
if entity_id is not None:
|
2016-07-02 18:22:51 +00:00
|
|
|
query = query.filter_by(entity_id=entity_id.lower())
|
2015-02-02 02:00:30 +00:00
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
states = recorder.execute(
|
|
|
|
query.order_by(states.entity_id, states.last_updated))
|
2015-02-02 02:00:30 +00:00
|
|
|
|
2016-01-23 20:36:43 +00:00
|
|
|
return states_to_json(states, start_time, entity_id)
|
2015-02-06 06:53:36 +00:00
|
|
|
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
def get_states(utc_point_in_time, entity_ids=None, run=None, filters=None):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Return the states at a specific point in time."""
|
2015-02-07 21:23:01 +00:00
|
|
|
if run is None:
|
2015-04-29 02:12:05 +00:00
|
|
|
run = recorder.run_information(utc_point_in_time)
|
2015-02-06 06:53:36 +00:00
|
|
|
|
2015-04-29 02:12:05 +00:00
|
|
|
# History did not run before utc_point_in_time
|
2015-03-29 16:42:24 +00:00
|
|
|
if run is None:
|
|
|
|
return []
|
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
from sqlalchemy import and_, func
|
|
|
|
|
|
|
|
states = recorder.get_model('States')
|
|
|
|
most_recent_state_ids = recorder.query(
|
|
|
|
func.max(states.state_id).label('max_state_id')
|
|
|
|
).filter(
|
|
|
|
(states.created >= run.start) &
|
2016-10-13 15:54:45 +00:00
|
|
|
(states.created < utc_point_in_time) &
|
|
|
|
(~states.domain.in_(IGNORE_DOMAINS)))
|
|
|
|
if filters:
|
|
|
|
most_recent_state_ids = filters.apply(most_recent_state_ids,
|
|
|
|
entity_ids)
|
2016-07-02 18:22:51 +00:00
|
|
|
|
|
|
|
most_recent_state_ids = most_recent_state_ids.group_by(
|
|
|
|
states.entity_id).subquery()
|
|
|
|
|
|
|
|
query = recorder.query('States').join(most_recent_state_ids, and_(
|
|
|
|
states.state_id == most_recent_state_ids.c.max_state_id))
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
for state in recorder.execute(query):
|
|
|
|
if not state.attributes.get(ATTR_HIDDEN, False):
|
|
|
|
yield state
|
2015-02-07 21:23:01 +00:00
|
|
|
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
def states_to_json(states, start_time, entity_id, filters=None):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Convert SQL results into JSON friendly data structure.
|
2016-01-23 20:36:43 +00:00
|
|
|
|
|
|
|
This takes our state list and turns it into a JSON friendly data
|
|
|
|
structure {'entity_id': [list of states], 'entity_id2': [list of states]}
|
|
|
|
|
|
|
|
We also need to go back and create a synthetic zero data point for
|
|
|
|
each list of states, otherwise our graphs won't start on the Y
|
|
|
|
axis correctly.
|
|
|
|
"""
|
|
|
|
result = defaultdict(list)
|
|
|
|
|
|
|
|
entity_ids = [entity_id] if entity_id is not None else None
|
|
|
|
|
|
|
|
# Get the states at the start time
|
2016-10-13 15:54:45 +00:00
|
|
|
for state in get_states(start_time, entity_ids, filters=filters):
|
2016-01-23 20:36:43 +00:00
|
|
|
state.last_changed = start_time
|
|
|
|
state.last_updated = start_time
|
|
|
|
result[state.entity_id].append(state)
|
|
|
|
|
|
|
|
# Append all changes to it
|
|
|
|
for entity_id, group in groupby(states, lambda state: state.entity_id):
|
|
|
|
result[entity_id].extend(group)
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2015-04-29 02:12:05 +00:00
|
|
|
def get_state(utc_point_in_time, entity_id, run=None):
|
2016-03-07 17:49:31 +00:00
|
|
|
"""Return a state at a specific point in time."""
|
2016-10-13 15:54:45 +00:00
|
|
|
states = list(get_states(utc_point_in_time, (entity_id,), run))
|
2015-02-07 21:23:01 +00:00
|
|
|
return states[0] if states else None
|
2015-02-02 02:00:30 +00:00
|
|
|
|
|
|
|
|
2015-04-07 08:01:23 +00:00
|
|
|
# pylint: disable=unused-argument
|
2015-01-31 18:31:16 +00:00
|
|
|
def setup(hass, config):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Setup the history hooks."""
|
2016-10-13 15:54:45 +00:00
|
|
|
filters = Filters()
|
|
|
|
exclude = config[DOMAIN].get(CONF_EXCLUDE)
|
|
|
|
if exclude:
|
|
|
|
filters.excluded_entities = exclude[CONF_ENTITIES]
|
|
|
|
filters.excluded_domains = exclude[CONF_DOMAINS]
|
|
|
|
include = config[DOMAIN].get(CONF_INCLUDE)
|
|
|
|
if include:
|
|
|
|
filters.included_entities = include[CONF_ENTITIES]
|
|
|
|
filters.included_domains = include[CONF_DOMAINS]
|
|
|
|
|
2016-11-25 21:04:06 +00:00
|
|
|
hass.http.register_view(Last5StatesView)
|
|
|
|
hass.http.register_view(HistoryPeriodView(filters))
|
2016-07-17 05:32:25 +00:00
|
|
|
register_built_in_panel(hass, 'history', 'History', 'mdi:poll-box')
|
2015-02-02 02:00:30 +00:00
|
|
|
|
2015-02-01 04:06:30 +00:00
|
|
|
return True
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
|
2016-05-14 07:58:36 +00:00
|
|
|
class Last5StatesView(HomeAssistantView):
|
|
|
|
"""Handle last 5 state view requests."""
|
|
|
|
|
2016-10-24 06:48:01 +00:00
|
|
|
url = '/api/history/entity/{entity_id}/recent_states'
|
2016-05-14 07:58:36 +00:00
|
|
|
name = 'api:history:entity-recent-states'
|
|
|
|
|
2016-10-24 06:48:01 +00:00
|
|
|
@asyncio.coroutine
|
2016-05-14 07:58:36 +00:00
|
|
|
def get(self, request, entity_id):
|
|
|
|
"""Retrieve last 5 states of entity."""
|
2016-11-25 21:04:06 +00:00
|
|
|
result = yield from request.app['hass'].loop.run_in_executor(
|
2016-10-24 06:48:01 +00:00
|
|
|
None, last_5_states, entity_id)
|
|
|
|
return self.json(result)
|
2016-05-14 07:58:36 +00:00
|
|
|
|
2015-01-31 18:31:16 +00:00
|
|
|
|
2016-05-14 07:58:36 +00:00
|
|
|
class HistoryPeriodView(HomeAssistantView):
|
|
|
|
"""Handle history period requests."""
|
2015-02-02 02:00:30 +00:00
|
|
|
|
2016-05-14 07:58:36 +00:00
|
|
|
url = '/api/history/period'
|
2016-05-28 17:37:22 +00:00
|
|
|
name = 'api:history:view-period'
|
2016-10-24 06:48:01 +00:00
|
|
|
extra_urls = ['/api/history/period/{datetime}']
|
2015-02-02 02:00:30 +00:00
|
|
|
|
2016-11-25 21:04:06 +00:00
|
|
|
def __init__(self, filters):
|
2016-10-13 15:54:45 +00:00
|
|
|
"""Initilalize the history period view."""
|
|
|
|
self.filters = filters
|
|
|
|
|
2016-10-24 06:48:01 +00:00
|
|
|
@asyncio.coroutine
|
2016-07-28 03:43:46 +00:00
|
|
|
def get(self, request, datetime=None):
|
2016-05-14 07:58:36 +00:00
|
|
|
"""Return history over a period of time."""
|
2016-10-24 06:48:01 +00:00
|
|
|
if datetime:
|
|
|
|
datetime = dt_util.parse_datetime(datetime)
|
|
|
|
|
|
|
|
if datetime is None:
|
|
|
|
return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
|
|
|
|
|
2017-01-30 17:12:07 +00:00
|
|
|
now = dt_util.utcnow()
|
|
|
|
|
2016-05-28 17:37:22 +00:00
|
|
|
one_day = timedelta(days=1)
|
2015-06-16 05:40:57 +00:00
|
|
|
|
2016-07-28 03:43:46 +00:00
|
|
|
if datetime:
|
|
|
|
start_time = dt_util.as_utc(datetime)
|
2016-05-14 07:58:36 +00:00
|
|
|
else:
|
2017-01-30 17:12:07 +00:00
|
|
|
start_time = now - one_day
|
|
|
|
|
|
|
|
if start_time > now:
|
|
|
|
return self.json([])
|
2015-06-16 05:40:57 +00:00
|
|
|
|
2016-05-14 07:58:36 +00:00
|
|
|
end_time = start_time + one_day
|
2016-10-24 06:48:01 +00:00
|
|
|
entity_id = request.GET.get('filter_entity_id')
|
|
|
|
|
2016-11-25 21:04:06 +00:00
|
|
|
result = yield from request.app['hass'].loop.run_in_executor(
|
2016-10-24 06:48:01 +00:00
|
|
|
None, get_significant_states, start_time, end_time, entity_id,
|
|
|
|
self.filters)
|
2015-02-02 02:00:30 +00:00
|
|
|
|
2016-10-24 06:48:01 +00:00
|
|
|
return self.json(result.values())
|
2016-10-13 15:54:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Filters(object):
|
|
|
|
"""Container for the configured include and exclude filters."""
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
"""Initialise the include and exclude filters."""
|
|
|
|
self.excluded_entities = []
|
|
|
|
self.excluded_domains = []
|
|
|
|
self.included_entities = []
|
|
|
|
self.included_domains = []
|
|
|
|
|
|
|
|
def apply(self, query, entity_ids=None):
|
2016-10-20 17:10:12 +00:00
|
|
|
"""Apply the include/exclude filter on domains and entities on query.
|
2016-10-13 15:54:45 +00:00
|
|
|
|
|
|
|
Following rules apply:
|
|
|
|
* only the include section is configured - just query the specified
|
|
|
|
entities or domains.
|
|
|
|
* only the exclude section is configured - filter the specified
|
|
|
|
entities and domains from all the entities in the system.
|
|
|
|
* if include and exclude is defined - select the entities specified in
|
|
|
|
the include and filter out the ones from the exclude list.
|
|
|
|
"""
|
|
|
|
states = recorder.get_model('States')
|
|
|
|
# specific entities requested - do not in/exclude anything
|
|
|
|
if entity_ids is not None:
|
|
|
|
return query.filter(states.entity_id.in_(entity_ids))
|
|
|
|
query = query.filter(~states.domain.in_(IGNORE_DOMAINS))
|
|
|
|
|
|
|
|
filter_query = None
|
|
|
|
# filter if only excluded domain is configured
|
|
|
|
if self.excluded_domains and not self.included_domains:
|
|
|
|
filter_query = ~states.domain.in_(self.excluded_domains)
|
|
|
|
if self.included_entities:
|
|
|
|
filter_query &= states.entity_id.in_(self.included_entities)
|
|
|
|
# filter if only included domain is configured
|
|
|
|
elif not self.excluded_domains and self.included_domains:
|
|
|
|
filter_query = states.domain.in_(self.included_domains)
|
|
|
|
if self.included_entities:
|
|
|
|
filter_query |= states.entity_id.in_(self.included_entities)
|
|
|
|
# filter if included and excluded domain is configured
|
|
|
|
elif self.excluded_domains and self.included_domains:
|
|
|
|
filter_query = ~states.domain.in_(self.excluded_domains)
|
|
|
|
if self.included_entities:
|
|
|
|
filter_query &= (states.domain.in_(self.included_domains) |
|
|
|
|
states.entity_id.in_(self.included_entities))
|
|
|
|
else:
|
2016-10-20 17:10:12 +00:00
|
|
|
filter_query &= (states.domain.in_(self.included_domains) & ~
|
|
|
|
states.domain.in_(self.excluded_domains))
|
2016-10-13 15:54:45 +00:00
|
|
|
# no domain filter just included entities
|
|
|
|
elif not self.excluded_domains and not self.included_domains and \
|
|
|
|
self.included_entities:
|
|
|
|
filter_query = states.entity_id.in_(self.included_entities)
|
|
|
|
if filter_query is not None:
|
|
|
|
query = query.filter(filter_query)
|
|
|
|
# finally apply excluded entities filter if configured
|
|
|
|
if self.excluded_entities:
|
|
|
|
query = query.filter(~states.entity_id.in_(self.excluded_entities))
|
|
|
|
return query
|
2016-03-05 18:28:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _is_significant(state):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Test if state is significant for history charts.
|
2016-03-05 18:28:48 +00:00
|
|
|
|
|
|
|
Will only test for things that are not filtered out in SQL.
|
|
|
|
"""
|
|
|
|
# scripts that are not cancellable will never change state
|
|
|
|
return (state.domain != 'script' or
|
|
|
|
state.attributes.get(script.ATTR_CAN_CANCEL))
|