core/homeassistant/components/logbook.py

381 lines
13 KiB
Python
Raw Normal View History

2015-03-29 21:43:16 +00:00
"""
2016-02-23 20:06:50 +00:00
Event parser and human readable log generator.
2015-10-25 14:10:51 +00:00
For more details about this component, please refer to the documentation at
2015-11-09 12:12:18 +00:00
https://home-assistant.io/components/logbook/
2015-03-29 21:43:16 +00:00
"""
import asyncio
2016-01-27 17:03:37 +00:00
import logging
from datetime import timedelta
from itertools import groupby
2015-03-29 21:43:16 +00:00
import voluptuous as vol
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
2015-09-14 01:30:44 +00:00
from homeassistant.components import recorder, sun
from homeassistant.components.frontend import register_built_in_panel
from homeassistant.components.http import HomeAssistantView
from homeassistant.const import (EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP, EVENT_STATE_CHANGED,
STATE_NOT_HOME, STATE_OFF, STATE_ON,
ATTR_HIDDEN, HTTP_BAD_REQUEST)
2016-08-09 03:21:40 +00:00
from homeassistant.core import State, split_entity_id, DOMAIN as HA_DOMAIN
2015-03-29 21:43:16 +00:00
DOMAIN = "logbook"
DEPENDENCIES = ['recorder', 'frontend']
2015-03-29 21:43:16 +00:00
2016-01-27 17:03:37 +00:00
_LOGGER = logging.getLogger(__name__)
CONF_EXCLUDE = 'exclude'
CONF_INCLUDE = 'include'
CONF_ENTITIES = 'entities'
CONF_DOMAINS = 'domains'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
CONF_EXCLUDE: vol.Schema({
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list,
[cv.string])
}),
CONF_INCLUDE: vol.Schema({
vol.Optional(CONF_ENTITIES, default=[]): cv.entity_ids,
vol.Optional(CONF_DOMAINS, default=[]): vol.All(cv.ensure_list,
[cv.string])
})
}),
}, extra=vol.ALLOW_EXTRA)
EVENT_LOGBOOK_ENTRY = 'logbook_entry'
2015-09-14 01:30:44 +00:00
GROUP_BY_MINUTES = 15
2015-09-14 01:30:44 +00:00
ATTR_NAME = 'name'
ATTR_MESSAGE = 'message'
ATTR_DOMAIN = 'domain'
ATTR_ENTITY_ID = 'entity_id'
LOG_MESSAGE_SCHEMA = vol.Schema({
vol.Required(ATTR_NAME): cv.string,
vol.Required(ATTR_MESSAGE): cv.template,
vol.Optional(ATTR_DOMAIN): cv.slug,
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
})
2016-01-27 16:27:55 +00:00
2015-09-14 01:30:44 +00:00
def log_entry(hass, name, message, domain=None, entity_id=None):
"""Add an entry to the logbook."""
hass.add_job(async_log_entry, hass, name, message, domain, entity_id)
def async_log_entry(hass, name, message, domain=None, entity_id=None):
2016-03-08 16:55:57 +00:00
"""Add an entry to the logbook."""
2015-09-14 01:30:44 +00:00
data = {
ATTR_NAME: name,
ATTR_MESSAGE: message
}
if domain is not None:
data[ATTR_DOMAIN] = domain
if entity_id is not None:
data[ATTR_ENTITY_ID] = entity_id
hass.bus.async_fire(EVENT_LOGBOOK_ENTRY, data)
2015-09-14 01:30:44 +00:00
2015-03-29 21:43:16 +00:00
def setup(hass, config):
2016-03-08 16:55:57 +00:00
"""Listen for download events to download files."""
@callback
2016-01-27 17:03:37 +00:00
def log_message(service):
2016-02-23 20:06:50 +00:00
"""Handle sending notification message service calls."""
message = service.data[ATTR_MESSAGE]
name = service.data[ATTR_NAME]
domain = service.data.get(ATTR_DOMAIN)
entity_id = service.data.get(ATTR_ENTITY_ID)
2016-01-29 07:13:46 +00:00
message.hass = hass
message = message.async_render()
async_log_entry(hass, name, message, domain, entity_id)
2016-01-27 16:27:55 +00:00
hass.http.register_view(LogbookView(config))
2016-05-14 07:58:36 +00:00
register_built_in_panel(hass, 'logbook', 'Logbook',
'mdi:format-list-bulleted-type')
hass.services.register(DOMAIN, 'log', log_message,
schema=LOG_MESSAGE_SCHEMA)
2015-03-29 21:43:16 +00:00
return True
2016-05-14 07:58:36 +00:00
class LogbookView(HomeAssistantView):
"""Handle logbook view requests."""
url = '/api/logbook'
name = 'api:logbook'
extra_urls = ['/api/logbook/{datetime}']
2015-04-01 06:09:08 +00:00
def __init__(self, config):
"""Initilalize the logbook view."""
self.config = config
@asyncio.coroutine
def get(self, request, datetime=None):
2016-05-14 07:58:36 +00:00
"""Retrieve logbook entries."""
if datetime:
datetime = dt_util.parse_datetime(datetime)
if datetime is None:
return self.json_message('Invalid datetime', HTTP_BAD_REQUEST)
else:
datetime = dt_util.start_of_local_day()
start_day = dt_util.as_utc(datetime)
2016-05-14 07:58:36 +00:00
end_day = start_day + timedelta(days=1)
def get_results():
"""Query DB for results."""
events = recorder.get_model('Events')
2016-11-28 06:29:21 +00:00
query = recorder.query('Events').order_by(
events.time_fired).filter(
(events.time_fired > start_day) &
(events.time_fired < end_day))
events = recorder.execute(query)
return _exclude_events(events, self.config)
events = yield from request.app['hass'].loop.run_in_executor(
None, get_results)
2016-05-14 07:58:36 +00:00
return self.json(humanify(events))
2015-03-29 21:43:16 +00:00
class Entry(object):
2016-02-23 20:06:50 +00:00
"""A human readable version of the log."""
2016-03-08 16:55:57 +00:00
2015-03-29 21:43:16 +00:00
def __init__(self, when=None, name=None, message=None, domain=None,
entity_id=None):
2016-03-08 16:55:57 +00:00
"""Initialize the entry."""
2015-03-29 21:43:16 +00:00
self.when = when
self.name = name
self.message = message
self.domain = domain
self.entity_id = entity_id
def as_dict(self):
2016-02-23 20:06:50 +00:00
"""Convert entry to a dict to be used within JSON."""
2015-03-29 21:43:16 +00:00
return {
2016-04-16 07:55:35 +00:00
'when': self.when,
2015-03-29 21:43:16 +00:00
'name': self.name,
'message': self.message,
'domain': self.domain,
'entity_id': self.entity_id,
}
def humanify(events):
2016-03-08 16:55:57 +00:00
"""Generator that converts a list of events into Entry objects.
Will try to group events if possible:
- if 2+ sensor updates in GROUP_BY_MINUTES, show last
2015-03-30 07:11:24 +00:00
- if home assistant stop and start happen in same minute call it restarted
"""
# Group events in batches of GROUP_BY_MINUTES
for _, g_events in groupby(
events,
lambda event: event.time_fired.minute // GROUP_BY_MINUTES):
2015-03-29 21:43:16 +00:00
events_batch = list(g_events)
2015-03-29 21:43:16 +00:00
# Keep track of last sensor states
last_sensor_event = {}
2015-03-29 21:43:16 +00:00
2016-02-23 20:06:50 +00:00
# Group HA start/stop events
2015-03-30 07:11:24 +00:00
# Maps minute of event to 1: stop, 2: stop + start
start_stop_events = {}
# Process events
for event in events_batch:
if event.event_type == EVENT_STATE_CHANGED:
entity_id = event.data.get('entity_id')
2015-03-29 21:43:16 +00:00
2015-09-19 19:29:23 +00:00
if entity_id is None:
continue
if entity_id.startswith('sensor.'):
last_sensor_event[entity_id] = event
2015-03-29 21:43:16 +00:00
2015-03-30 07:11:24 +00:00
elif event.event_type == EVENT_HOMEASSISTANT_STOP:
if event.time_fired.minute in start_stop_events:
continue
start_stop_events[event.time_fired.minute] = 1
elif event.event_type == EVENT_HOMEASSISTANT_START:
if event.time_fired.minute not in start_stop_events:
continue
start_stop_events[event.time_fired.minute] = 2
# Yield entries
for event in events_batch:
if event.event_type == EVENT_STATE_CHANGED:
2015-03-29 21:43:16 +00:00
to_state = State.from_dict(event.data.get('new_state'))
2016-02-23 20:06:50 +00:00
# If last_changed != last_updated only attributes have changed
2015-09-24 06:20:12 +00:00
# we do not report on that yet. Also filter auto groups.
2015-04-01 14:18:03 +00:00
if not to_state or \
2015-09-24 06:20:12 +00:00
to_state.last_changed != to_state.last_updated or \
to_state.domain == 'group' and \
to_state.attributes.get('auto', False):
continue
domain = to_state.domain
2015-03-29 21:43:16 +00:00
# Skip all but the last sensor state
if domain == 'sensor' and \
event != last_sensor_event[to_state.entity_id]:
continue
2015-03-29 21:43:16 +00:00
# Don't show continuous sensor value changes in the logbook
if domain == 'sensor' and \
to_state.attributes.get('unit_of_measurement'):
continue
2015-03-30 07:19:56 +00:00
yield Entry(
event.time_fired,
name=to_state.name,
message=_entry_message_from_state(domain, to_state),
domain=domain,
entity_id=to_state.entity_id)
2015-03-29 21:43:16 +00:00
elif event.event_type == EVENT_HOMEASSISTANT_START:
2015-03-30 07:11:24 +00:00
if start_stop_events.get(event.time_fired.minute) == 2:
continue
yield Entry(
event.time_fired, "Home Assistant", "started",
domain=HA_DOMAIN)
2015-03-29 21:43:16 +00:00
elif event.event_type == EVENT_HOMEASSISTANT_STOP:
2015-03-30 07:11:24 +00:00
if start_stop_events.get(event.time_fired.minute) == 2:
action = "restarted"
else:
action = "stopped"
yield Entry(
2015-03-30 07:11:24 +00:00
event.time_fired, "Home Assistant", action,
domain=HA_DOMAIN)
2015-03-30 07:19:56 +00:00
elif event.event_type == EVENT_LOGBOOK_ENTRY:
2015-09-14 01:30:44 +00:00
domain = event.data.get(ATTR_DOMAIN)
entity_id = event.data.get(ATTR_ENTITY_ID)
if domain is None and entity_id is not None:
try:
2016-01-24 06:49:49 +00:00
domain = split_entity_id(str(entity_id))[0]
2015-09-14 01:30:44 +00:00
except IndexError:
pass
yield Entry(
event.time_fired, event.data.get(ATTR_NAME),
event.data.get(ATTR_MESSAGE), domain,
entity_id)
2015-03-30 07:19:56 +00:00
def _exclude_events(events, config):
"""Get lists of excluded entities and platforms."""
excluded_entities = []
excluded_domains = []
included_entities = []
included_domains = []
exclude = config[DOMAIN].get(CONF_EXCLUDE)
if exclude:
excluded_entities = exclude[CONF_ENTITIES]
excluded_domains = exclude[CONF_DOMAINS]
include = config[DOMAIN].get(CONF_INCLUDE)
if include:
included_entities = include[CONF_ENTITIES]
included_domains = include[CONF_DOMAINS]
filtered_events = []
for event in events:
domain, entity_id = None, None
if event.event_type == EVENT_STATE_CHANGED:
to_state = State.from_dict(event.data.get('new_state'))
# Do not report on new entities
if event.data.get('old_state') is None:
continue
# Do not report on entity removal
if not to_state:
continue
# exclude entities which are customized hidden
hidden = to_state.attributes.get(ATTR_HIDDEN, False)
if hidden:
continue
domain = to_state.domain
entity_id = to_state.entity_id
elif event.event_type == EVENT_LOGBOOK_ENTRY:
domain = event.data.get(ATTR_DOMAIN)
entity_id = event.data.get(ATTR_ENTITY_ID)
if domain or entity_id:
# filter if only excluded is configured for this domain
if excluded_domains and domain in excluded_domains and \
not included_domains:
if (included_entities and entity_id not in included_entities) \
or not included_entities:
continue
# filter if only included is configured for this domain
elif not excluded_domains and included_domains and \
domain not in included_domains:
if (included_entities and entity_id not in included_entities) \
or not included_entities:
continue
# filter if included and excluded is configured for this domain
elif excluded_domains and included_domains and \
(domain not in included_domains or
domain in excluded_domains):
if (included_entities and entity_id not in included_entities) \
or not included_entities or domain in excluded_domains:
continue
# filter if only included is configured for this entity
elif not excluded_domains and not included_domains and \
included_entities and entity_id not in included_entities:
continue
# check if logbook entry is excluded for this entity
if entity_id in excluded_entities:
continue
filtered_events.append(event)
return filtered_events
# pylint: disable=too-many-return-statements
2015-03-30 07:19:56 +00:00
def _entry_message_from_state(domain, state):
2016-02-23 20:06:50 +00:00
"""Convert a state to a message for the logbook."""
2015-03-30 07:19:56 +00:00
# We pass domain in so we don't have to split entity_id again
if domain == 'device_tracker':
if state.state == STATE_NOT_HOME:
return 'is away'
else:
return 'is at {}'.format(state.state)
2015-03-30 07:19:56 +00:00
elif domain == 'sun':
if state.state == sun.STATE_ABOVE_HORIZON:
return 'has risen'
else:
return 'has set'
elif state.state == STATE_ON:
# Future: combine groups and its entity entries ?
return "turned on"
elif state.state == STATE_OFF:
return "turned off"
return "changed to {}".format(state.state)