2015-02-01 04:06:30 +00:00
|
|
|
"""
|
|
|
|
homeassistant.components.history
|
|
|
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
|
|
|
|
|
|
Provide pre-made queries on top of the recorder component.
|
|
|
|
"""
|
2015-01-31 18:31:16 +00:00
|
|
|
import re
|
2015-02-02 02:00:30 +00:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
from itertools import groupby
|
2015-02-07 21:23:01 +00:00
|
|
|
from collections import defaultdict
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
import homeassistant.components.recorder as recorder
|
|
|
|
|
|
|
|
DOMAIN = 'history'
|
|
|
|
DEPENDENCIES = ['recorder', 'http']
|
|
|
|
|
|
|
|
|
|
|
|
def last_5_states(entity_id):
|
|
|
|
""" Return the last 5 states for entity_id. """
|
2015-02-02 02:00:30 +00:00
|
|
|
entity_id = entity_id.lower()
|
|
|
|
|
2015-02-01 04:06:30 +00:00
|
|
|
query = """
|
|
|
|
SELECT * FROM states WHERE entity_id=? AND
|
2015-02-02 02:00:30 +00:00
|
|
|
last_changed=last_updated
|
2015-02-01 04:06:30 +00:00
|
|
|
ORDER BY last_changed DESC LIMIT 0, 5
|
2015-02-02 02:00:30 +00:00
|
|
|
"""
|
2015-01-31 18:31:16 +00:00
|
|
|
|
2015-02-01 04:06:30 +00:00
|
|
|
return recorder.query_states(query, (entity_id, ))
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
|
2015-02-02 02:00:30 +00:00
|
|
|
def state_changes_during_period(start_time, end_time=None, entity_id=None):
|
|
|
|
"""
|
|
|
|
Return states changes during period start_time - end_time.
|
|
|
|
"""
|
|
|
|
where = "last_changed=last_updated AND last_changed > ? "
|
|
|
|
data = [start_time]
|
|
|
|
|
|
|
|
if end_time is not None:
|
|
|
|
where += "AND last_changed < ? "
|
|
|
|
data.append(end_time)
|
|
|
|
|
|
|
|
if entity_id is not None:
|
|
|
|
where += "AND entity_id = ? "
|
|
|
|
data.append(entity_id.lower())
|
|
|
|
|
|
|
|
query = ("SELECT * FROM states WHERE {} "
|
|
|
|
"ORDER BY entity_id, last_changed ASC").format(where)
|
|
|
|
|
|
|
|
states = recorder.query_states(query, data)
|
|
|
|
|
2015-02-07 21:23:01 +00:00
|
|
|
result = defaultdict(list)
|
|
|
|
|
|
|
|
# Get the states at the start time
|
|
|
|
for state in get_states(start_time):
|
|
|
|
state.last_changed = start_time
|
|
|
|
result[state.entity_id].append(state)
|
2015-02-06 06:53:36 +00:00
|
|
|
|
2015-02-07 21:23:01 +00:00
|
|
|
# Append all changes to it
|
2015-02-06 06:53:36 +00:00
|
|
|
for entity_id, group in groupby(states, lambda state: state.entity_id):
|
2015-02-07 21:23:01 +00:00
|
|
|
result[entity_id].extend(group)
|
|
|
|
|
|
|
|
return result
|
2015-02-06 06:53:36 +00:00
|
|
|
|
|
|
|
|
2015-02-07 21:23:01 +00:00
|
|
|
def get_states(point_in_time, entity_ids=None, run=None):
|
|
|
|
""" Returns the states at a specific point in time. """
|
|
|
|
if run is None:
|
|
|
|
run = recorder.run_information(point_in_time)
|
2015-02-06 06:53:36 +00:00
|
|
|
|
2015-03-29 16:42:24 +00:00
|
|
|
# History did not run before point_in_time
|
|
|
|
if run is None:
|
|
|
|
return []
|
|
|
|
|
2015-02-07 21:23:01 +00:00
|
|
|
where = run.where_after_start_run + "AND created < ? "
|
|
|
|
where_data = [point_in_time]
|
|
|
|
|
|
|
|
if entity_ids is not None:
|
|
|
|
where += "AND entity_id IN ({}) ".format(
|
|
|
|
",".join(['?'] * len(entity_ids)))
|
|
|
|
where_data.extend(entity_ids)
|
|
|
|
|
|
|
|
query = """
|
|
|
|
SELECT * FROM states
|
|
|
|
INNER JOIN (
|
|
|
|
SELECT max(state_id) AS max_state_id
|
|
|
|
FROM states WHERE {}
|
|
|
|
GROUP BY entity_id)
|
|
|
|
WHERE state_id = max_state_id
|
|
|
|
""".format(where)
|
|
|
|
|
|
|
|
return recorder.query_states(query, where_data)
|
|
|
|
|
|
|
|
|
|
|
|
def get_state(point_in_time, entity_id, run=None):
|
|
|
|
""" Return a state at a specific point in time. """
|
|
|
|
states = get_states(point_in_time, (entity_id,), run)
|
|
|
|
|
|
|
|
return states[0] if states else None
|
2015-02-02 02:00:30 +00:00
|
|
|
|
|
|
|
|
2015-01-31 18:31:16 +00:00
|
|
|
def setup(hass, config):
|
|
|
|
""" Setup history hooks. """
|
|
|
|
hass.http.register_path(
|
|
|
|
'GET',
|
2015-02-01 04:06:30 +00:00
|
|
|
re.compile(
|
2015-02-02 02:00:30 +00:00
|
|
|
r'/api/history/entity/(?P<entity_id>[a-zA-Z\._0-9]+)/'
|
|
|
|
r'recent_states'),
|
2015-02-01 04:06:30 +00:00
|
|
|
_api_last_5_states)
|
|
|
|
|
2015-02-02 02:00:30 +00:00
|
|
|
hass.http.register_path(
|
|
|
|
'GET', re.compile(r'/api/history/period'), _api_history_period)
|
|
|
|
|
2015-02-01 04:06:30 +00:00
|
|
|
return True
|
2015-01-31 18:31:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
# pylint: disable=invalid-name
|
|
|
|
def _api_last_5_states(handler, path_match, data):
|
2015-02-01 04:06:30 +00:00
|
|
|
""" Return the last 5 states for an entity id as JSON. """
|
2015-01-31 18:31:16 +00:00
|
|
|
entity_id = path_match.group('entity_id')
|
|
|
|
|
2015-02-07 21:23:01 +00:00
|
|
|
handler.write_json(last_5_states(entity_id))
|
2015-02-02 02:00:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _api_history_period(handler, path_match, data):
|
|
|
|
""" Return history over a period of time. """
|
|
|
|
# 1 day for now..
|
|
|
|
start_time = datetime.now() - timedelta(seconds=86400)
|
|
|
|
|
|
|
|
entity_id = data.get('filter_entity_id')
|
|
|
|
|
|
|
|
handler.write_json(
|
2015-02-07 21:23:01 +00:00
|
|
|
state_changes_during_period(start_time, entity_id=entity_id).values())
|