2016-03-09 09:25:50 +00:00
|
|
|
"""The tests the History component."""
|
2017-02-21 07:40:27 +00:00
|
|
|
# pylint: disable=protected-access,invalid-name
|
2015-12-27 17:39:22 +00:00
|
|
|
from datetime import timedelta
|
2015-05-01 04:03:01 +00:00
|
|
|
import unittest
|
2016-01-23 20:36:43 +00:00
|
|
|
from unittest.mock import patch, sentinel
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2018-03-14 21:29:51 +00:00
|
|
|
from homeassistant.setup import setup_component, async_setup_component
|
2015-08-17 03:44:46 +00:00
|
|
|
import homeassistant.core as ha
|
2015-05-01 04:03:01 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2015-07-11 07:02:52 +00:00
|
|
|
from homeassistant.components import history, recorder
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2015-08-04 16:15:22 +00:00
|
|
|
from tests.common import (
|
2019-07-31 19:25:30 +00:00
|
|
|
init_recorder_component,
|
|
|
|
mock_state_change_event,
|
|
|
|
get_test_home_assistant,
|
|
|
|
)
|
2015-05-01 04:59:24 +00:00
|
|
|
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
class TestComponentHistory(unittest.TestCase):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Test History component."""
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2017-02-21 07:40:27 +00:00
|
|
|
def setUp(self): # pylint: disable=invalid-name
|
2018-08-19 20:29:08 +00:00
|
|
|
"""Set up things to be run when tests are started."""
|
2016-09-13 02:16:14 +00:00
|
|
|
self.hass = get_test_home_assistant()
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2017-02-21 07:40:27 +00:00
|
|
|
def tearDown(self): # pylint: disable=invalid-name
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Stop everything that was started."""
|
2015-05-01 04:03:01 +00:00
|
|
|
self.hass.stop()
|
|
|
|
|
|
|
|
def init_recorder(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Initialize the recorder."""
|
2017-02-21 07:40:27 +00:00
|
|
|
init_recorder_component(self.hass)
|
2015-05-01 04:03:01 +00:00
|
|
|
self.hass.start()
|
2015-12-27 17:39:22 +00:00
|
|
|
self.wait_recording_done()
|
|
|
|
|
|
|
|
def wait_recording_done(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Block till recording is done."""
|
2016-09-13 02:16:14 +00:00
|
|
|
self.hass.block_till_done()
|
2017-02-26 22:38:06 +00:00
|
|
|
self.hass.data[recorder.DATA_INSTANCE].block_till_done()
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
def test_setup(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Test setup method of history."""
|
2019-07-31 19:25:30 +00:00
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
# ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["media_player"],
|
|
|
|
history.CONF_ENTITIES: ["thermostat.test"],
|
|
|
|
},
|
|
|
|
history.CONF_EXCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["thermostat"],
|
|
|
|
history.CONF_ENTITIES: ["media_player.test"],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
)
|
2015-05-01 04:03:01 +00:00
|
|
|
self.init_recorder()
|
2018-10-24 10:10:05 +00:00
|
|
|
assert setup_component(self.hass, history.DOMAIN, config)
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
def test_get_states(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Test getting states at a specific point in time."""
|
2015-05-01 04:03:01 +00:00
|
|
|
self.init_recorder()
|
|
|
|
states = []
|
|
|
|
|
2015-12-27 17:39:22 +00:00
|
|
|
now = dt_util.utcnow()
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=now
|
|
|
|
):
|
2015-12-27 17:39:22 +00:00
|
|
|
for i in range(5):
|
|
|
|
state = ha.State(
|
2019-07-31 19:25:30 +00:00
|
|
|
"test.point_in_time_{}".format(i % 5),
|
2015-12-27 17:39:22 +00:00
|
|
|
"State {}".format(i),
|
2019-07-31 19:25:30 +00:00
|
|
|
{"attribute_test": i},
|
|
|
|
)
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2015-12-27 17:39:22 +00:00
|
|
|
mock_state_change_event(self.hass, state)
|
2015-09-13 05:56:49 +00:00
|
|
|
|
2015-12-27 17:39:22 +00:00
|
|
|
states.append(state)
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2015-12-27 17:39:22 +00:00
|
|
|
self.wait_recording_done()
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2015-12-27 17:39:22 +00:00
|
|
|
future = now + timedelta(seconds=1)
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=future
|
|
|
|
):
|
2015-09-13 05:56:49 +00:00
|
|
|
for i in range(5):
|
|
|
|
state = ha.State(
|
2019-07-31 19:25:30 +00:00
|
|
|
"test.point_in_time_{}".format(i % 5),
|
2015-09-13 05:56:49 +00:00
|
|
|
"State {}".format(i),
|
2019-07-31 19:25:30 +00:00
|
|
|
{"attribute_test": i},
|
|
|
|
)
|
2015-09-13 05:56:49 +00:00
|
|
|
|
|
|
|
mock_state_change_event(self.hass, state)
|
2015-12-27 17:39:22 +00:00
|
|
|
|
|
|
|
self.wait_recording_done()
|
2015-09-13 05:56:49 +00:00
|
|
|
|
|
|
|
# Get states returns everything before POINT
|
2018-08-10 16:09:01 +00:00
|
|
|
for state1, state2 in zip(
|
2019-07-31 19:25:30 +00:00
|
|
|
states,
|
|
|
|
sorted(
|
|
|
|
history.get_states(self.hass, future), key=lambda state: state.entity_id
|
|
|
|
),
|
|
|
|
):
|
2018-08-10 16:09:01 +00:00
|
|
|
assert state1 == state2
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
# Test get_state here because we have a DB setup
|
2019-07-31 19:25:30 +00:00
|
|
|
assert states[0] == history.get_state(self.hass, future, states[0].entity_id)
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
def test_state_changes_during_period(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Test state change during period."""
|
2015-05-01 04:03:01 +00:00
|
|
|
self.init_recorder()
|
2019-07-31 19:25:30 +00:00
|
|
|
entity_id = "media_player.test"
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
def set_state(state):
|
2017-02-21 07:40:27 +00:00
|
|
|
"""Set the state."""
|
2015-05-01 04:03:01 +00:00
|
|
|
self.hass.states.set(entity_id, state)
|
2015-12-27 17:39:22 +00:00
|
|
|
self.wait_recording_done()
|
2015-05-01 04:03:01 +00:00
|
|
|
return self.hass.states.get(entity_id)
|
|
|
|
|
|
|
|
start = dt_util.utcnow()
|
2015-09-13 05:56:49 +00:00
|
|
|
point = start + timedelta(seconds=1)
|
|
|
|
end = point + timedelta(seconds=1)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=start
|
|
|
|
):
|
|
|
|
set_state("idle")
|
|
|
|
set_state("YouTube")
|
2015-12-27 17:39:22 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=point
|
|
|
|
):
|
2015-09-13 05:56:49 +00:00
|
|
|
states = [
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state("idle"),
|
|
|
|
set_state("Netflix"),
|
|
|
|
set_state("Plex"),
|
|
|
|
set_state("YouTube"),
|
2015-09-13 05:56:49 +00:00
|
|
|
]
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=end
|
|
|
|
):
|
|
|
|
set_state("Netflix")
|
|
|
|
set_state("Plex")
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
hist = history.state_changes_during_period(self.hass, start, end, entity_id)
|
2015-12-27 17:39:22 +00:00
|
|
|
|
2018-10-24 10:10:05 +00:00
|
|
|
assert states == hist[entity_id]
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2018-04-07 01:59:55 +00:00
|
|
|
def test_get_last_state_changes(self):
|
|
|
|
"""Test number of state changes."""
|
|
|
|
self.init_recorder()
|
2019-07-31 19:25:30 +00:00
|
|
|
entity_id = "sensor.test"
|
2018-04-07 01:59:55 +00:00
|
|
|
|
|
|
|
def set_state(state):
|
|
|
|
"""Set the state."""
|
|
|
|
self.hass.states.set(entity_id, state)
|
|
|
|
self.wait_recording_done()
|
|
|
|
return self.hass.states.get(entity_id)
|
|
|
|
|
|
|
|
start = dt_util.utcnow() - timedelta(minutes=2)
|
|
|
|
point = start + timedelta(minutes=1)
|
|
|
|
point2 = point + timedelta(minutes=1)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=start
|
|
|
|
):
|
|
|
|
set_state("1")
|
2018-04-07 01:59:55 +00:00
|
|
|
|
|
|
|
states = []
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=point
|
|
|
|
):
|
|
|
|
states.append(set_state("2"))
|
2018-04-07 01:59:55 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=point2
|
|
|
|
):
|
|
|
|
states.append(set_state("3"))
|
2018-04-07 01:59:55 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
hist = history.get_last_state_changes(self.hass, 2, entity_id)
|
2018-04-07 01:59:55 +00:00
|
|
|
|
2018-10-24 10:10:05 +00:00
|
|
|
assert states == hist[entity_id]
|
2018-04-07 01:59:55 +00:00
|
|
|
|
2016-01-23 20:36:43 +00:00
|
|
|
def test_get_significant_states(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Test that only significant states are returned.
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
We should get back every thermostat change that
|
2016-01-23 20:36:43 +00:00
|
|
|
includes an attribute change, but only the state updates for
|
|
|
|
media player (attribute changes are not significant and not returned).
|
|
|
|
"""
|
2016-10-13 15:54:45 +00:00
|
|
|
zero, four, states = self.record_states()
|
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass, zero, four, filters=history.Filters()
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
assert states == hist
|
|
|
|
|
2017-09-23 17:01:48 +00:00
|
|
|
def test_get_significant_states_with_initial(self):
|
|
|
|
"""Test that only significant states are returned.
|
|
|
|
|
|
|
|
We should get back every thermostat change that
|
|
|
|
includes an attribute change, but only the state updates for
|
|
|
|
media player (attribute changes are not significant and not returned).
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
|
|
|
one = zero + timedelta(seconds=1)
|
|
|
|
one_and_half = zero + timedelta(seconds=1.5)
|
|
|
|
for entity_id in states:
|
2019-07-31 19:25:30 +00:00
|
|
|
if entity_id == "media_player.test":
|
2017-09-23 17:01:48 +00:00
|
|
|
states[entity_id] = states[entity_id][1:]
|
|
|
|
for state in states[entity_id]:
|
|
|
|
if state.last_changed == one:
|
|
|
|
state.last_changed = one_and_half
|
|
|
|
|
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass,
|
|
|
|
one_and_half,
|
|
|
|
four,
|
|
|
|
filters=history.Filters(),
|
|
|
|
include_start_time_state=True,
|
|
|
|
)
|
2017-09-23 17:01:48 +00:00
|
|
|
assert states == hist
|
|
|
|
|
|
|
|
def test_get_significant_states_without_initial(self):
|
|
|
|
"""Test that only significant states are returned.
|
|
|
|
|
|
|
|
We should get back every thermostat change that
|
|
|
|
includes an attribute change, but only the state updates for
|
|
|
|
media player (attribute changes are not significant and not returned).
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
|
|
|
one = zero + timedelta(seconds=1)
|
|
|
|
one_and_half = zero + timedelta(seconds=1.5)
|
|
|
|
for entity_id in states:
|
2019-07-31 19:25:30 +00:00
|
|
|
states[entity_id] = list(
|
|
|
|
filter(lambda s: s.last_changed != one, states[entity_id])
|
|
|
|
)
|
|
|
|
del states["media_player.test2"]
|
2017-09-23 17:01:48 +00:00
|
|
|
|
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass,
|
|
|
|
one_and_half,
|
|
|
|
four,
|
|
|
|
filters=history.Filters(),
|
|
|
|
include_start_time_state=False,
|
|
|
|
)
|
2017-09-23 17:01:48 +00:00
|
|
|
assert states == hist
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
def test_get_significant_states_entity_id(self):
|
|
|
|
"""Test that only significant states are returned for one entity."""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
2016-10-13 15:54:45 +00:00
|
|
|
|
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass, zero, four, ["media_player.test"], filters=history.Filters()
|
|
|
|
)
|
2017-09-23 17:01:48 +00:00
|
|
|
assert states == hist
|
|
|
|
|
|
|
|
def test_get_significant_states_multiple_entity_ids(self):
|
|
|
|
"""Test that only significant states are returned for one entity."""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
2017-09-23 17:01:48 +00:00
|
|
|
|
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass,
|
|
|
|
zero,
|
|
|
|
four,
|
|
|
|
["media_player.test", "thermostat.test"],
|
|
|
|
filters=history.Filters(),
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
assert states == hist
|
|
|
|
|
|
|
|
def test_get_significant_states_exclude_domain(self):
|
|
|
|
"""Test if significant states are returned when excluding domains.
|
|
|
|
|
|
|
|
We should get back every thermostat change that includes an attribute
|
|
|
|
change, but no media player changes.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
del states["media_player.test2"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["media_player"]}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_exclude_entity(self):
|
|
|
|
"""Test if significant states are returned when excluding entities.
|
|
|
|
|
|
|
|
We should get back every thermostat and script changes, but no media
|
|
|
|
player changes.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_EXCLUDE: {history.CONF_ENTITIES: ["media_player.test"]}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_exclude(self):
|
|
|
|
"""Test significant states when excluding entities and domains.
|
|
|
|
|
|
|
|
We should not get back every thermostat and media player test changes.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_EXCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["thermostat"],
|
|
|
|
history.CONF_ENTITIES: ["media_player.test"],
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_exclude_include_entity(self):
|
|
|
|
"""Test significant states when excluding domains and include entities.
|
|
|
|
|
|
|
|
We should not get back every thermostat and media player test changes.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {
|
|
|
|
history.CONF_ENTITIES: ["media_player.test", "thermostat.test"]
|
|
|
|
},
|
|
|
|
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["thermostat"]},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_include_domain(self):
|
|
|
|
"""Test if significant states are returned when including domains.
|
|
|
|
|
|
|
|
We should get back every thermostat and script changes, but no media
|
|
|
|
player changes.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
del states["media_player.test2"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["thermostat", "script"]
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_include_entity(self):
|
|
|
|
"""Test if significant states are returned when including entities.
|
|
|
|
|
|
|
|
We should only get back changes of the media_player.test entity.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {history.CONF_ENTITIES: ["media_player.test"]}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_include(self):
|
|
|
|
"""Test significant states when including domains and entities.
|
|
|
|
|
|
|
|
We should only get back changes of the media_player.test entity and the
|
|
|
|
thermostat domain.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["thermostat"],
|
|
|
|
history.CONF_ENTITIES: ["media_player.test"],
|
|
|
|
}
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_include_exclude_domain(self):
|
|
|
|
"""Test if significant states when excluding and including domains.
|
|
|
|
|
|
|
|
We should not get back any changes since we include only the
|
|
|
|
media_player domain but also exclude it.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {history.CONF_DOMAINS: ["media_player"]},
|
|
|
|
history.CONF_EXCLUDE: {history.CONF_DOMAINS: ["media_player"]},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_include_exclude_entity(self):
|
|
|
|
"""Test if significant states when excluding and including domains.
|
|
|
|
|
|
|
|
We should not get back any changes since we include only
|
|
|
|
media_player.test but also exclude it.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
del states["media_player.test2"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {
|
|
|
|
history.CONF_ENTITIES: ["media_player.test"]
|
|
|
|
},
|
|
|
|
history.CONF_EXCLUDE: {
|
|
|
|
history.CONF_ENTITIES: ["media_player.test"]
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
|
|
|
def test_get_significant_states_include_exclude(self):
|
|
|
|
"""Test if significant states when in/excluding domains and entities.
|
|
|
|
|
|
|
|
We should only get back changes of the media_player.test2 entity.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
del states["media_player.test"]
|
|
|
|
del states["thermostat.test"]
|
|
|
|
del states["thermostat.test2"]
|
|
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
|
|
|
|
config = history.CONFIG_SCHEMA(
|
|
|
|
{
|
|
|
|
ha.DOMAIN: {},
|
|
|
|
history.DOMAIN: {
|
|
|
|
history.CONF_INCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["media_player"],
|
|
|
|
history.CONF_ENTITIES: ["thermostat.test"],
|
|
|
|
},
|
|
|
|
history.CONF_EXCLUDE: {
|
|
|
|
history.CONF_DOMAINS: ["thermostat"],
|
|
|
|
history.CONF_ENTITIES: ["media_player.test"],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
self.check_significant_states(zero, four, states, config)
|
|
|
|
|
2019-07-31 18:59:25 +00:00
|
|
|
def test_get_significant_states_are_ordered(self):
|
|
|
|
"""Test order of results from get_significant_states
|
|
|
|
|
|
|
|
When entity ids are given, the results should be returned with the data
|
|
|
|
in the same order.
|
|
|
|
"""
|
|
|
|
zero, four, states = self.record_states()
|
2019-07-31 19:25:30 +00:00
|
|
|
entity_ids = ["media_player.test", "media_player.test2"]
|
2019-07-31 18:59:25 +00:00
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass, zero, four, entity_ids, filters=history.Filters()
|
|
|
|
)
|
2019-07-31 18:59:25 +00:00
|
|
|
assert list(hist.keys()) == entity_ids
|
2019-07-31 19:25:30 +00:00
|
|
|
entity_ids = ["media_player.test2", "media_player.test"]
|
2019-07-31 18:59:25 +00:00
|
|
|
hist = history.get_significant_states(
|
2019-07-31 19:25:30 +00:00
|
|
|
self.hass, zero, four, entity_ids, filters=history.Filters()
|
|
|
|
)
|
2019-07-31 18:59:25 +00:00
|
|
|
assert list(hist.keys()) == entity_ids
|
|
|
|
|
2018-07-13 21:02:23 +00:00
|
|
|
def check_significant_states(self, zero, four, states, config):
|
2016-10-13 15:54:45 +00:00
|
|
|
"""Check if significant states are retrieved."""
|
|
|
|
filters = history.Filters()
|
|
|
|
exclude = config[history.DOMAIN].get(history.CONF_EXCLUDE)
|
|
|
|
if exclude:
|
2018-02-17 09:29:14 +00:00
|
|
|
filters.excluded_entities = exclude.get(history.CONF_ENTITIES, [])
|
|
|
|
filters.excluded_domains = exclude.get(history.CONF_DOMAINS, [])
|
2016-10-13 15:54:45 +00:00
|
|
|
include = config[history.DOMAIN].get(history.CONF_INCLUDE)
|
|
|
|
if include:
|
2018-02-17 09:29:14 +00:00
|
|
|
filters.included_entities = include.get(history.CONF_ENTITIES, [])
|
|
|
|
filters.included_domains = include.get(history.CONF_DOMAINS, [])
|
2016-10-13 15:54:45 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
hist = history.get_significant_states(self.hass, zero, four, filters=filters)
|
2016-10-13 15:54:45 +00:00
|
|
|
assert states == hist
|
|
|
|
|
|
|
|
def record_states(self):
|
|
|
|
"""Record some test states.
|
|
|
|
|
|
|
|
We inject a bunch of state updates from media player, zone and
|
|
|
|
thermostat.
|
|
|
|
"""
|
2016-01-23 20:36:43 +00:00
|
|
|
self.init_recorder()
|
2019-07-31 19:25:30 +00:00
|
|
|
mp = "media_player.test"
|
|
|
|
mp2 = "media_player.test2"
|
|
|
|
therm = "thermostat.test"
|
|
|
|
therm2 = "thermostat.test2"
|
|
|
|
zone = "zone.home"
|
|
|
|
script_nc = "script.cannot_cancel_this_one"
|
|
|
|
script_c = "script.can_cancel_this_one"
|
2016-01-23 20:36:43 +00:00
|
|
|
|
|
|
|
def set_state(entity_id, state, **kwargs):
|
2017-02-21 07:40:27 +00:00
|
|
|
"""Set the state."""
|
2016-01-23 20:36:43 +00:00
|
|
|
self.hass.states.set(entity_id, state, **kwargs)
|
|
|
|
self.wait_recording_done()
|
|
|
|
return self.hass.states.get(entity_id)
|
|
|
|
|
|
|
|
zero = dt_util.utcnow()
|
|
|
|
one = zero + timedelta(seconds=1)
|
|
|
|
two = one + timedelta(seconds=1)
|
|
|
|
three = two + timedelta(seconds=1)
|
|
|
|
four = three + timedelta(seconds=1)
|
|
|
|
|
2016-10-13 15:54:45 +00:00
|
|
|
states = {therm: [], therm2: [], mp: [], mp2: [], script_c: []}
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=one
|
|
|
|
):
|
2016-01-23 20:36:43 +00:00
|
|
|
states[mp].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
|
|
|
)
|
2016-01-23 20:36:43 +00:00
|
|
|
states[mp].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
states[mp2].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
|
|
|
)
|
2016-01-23 20:36:43 +00:00
|
|
|
states[therm].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(therm, 20, attributes={"current_temperature": 19.5})
|
|
|
|
)
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=two
|
|
|
|
):
|
2016-03-09 09:25:50 +00:00
|
|
|
# This state will be skipped only different in time
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
|
2016-03-09 09:25:50 +00:00
|
|
|
# This state will be skipped because domain blacklisted
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(zone, "zoning")
|
|
|
|
set_state(script_nc, "off")
|
2016-03-05 18:28:48 +00:00
|
|
|
states[script_c].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(script_c, "off", attributes={"can_cancel": True})
|
|
|
|
)
|
2016-01-23 20:36:43 +00:00
|
|
|
states[therm].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(therm, 21, attributes={"current_temperature": 19.8})
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
states[therm2].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(therm2, 20, attributes={"current_temperature": 19})
|
|
|
|
)
|
2016-01-23 20:36:43 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.recorder.dt_util.utcnow", return_value=three
|
|
|
|
):
|
2016-01-23 20:36:43 +00:00
|
|
|
states[mp].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)})
|
|
|
|
)
|
2016-03-09 09:25:50 +00:00
|
|
|
# Attributes changed even though state is the same
|
2016-01-23 20:36:43 +00:00
|
|
|
states[therm].append(
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(therm, 21, attributes={"current_temperature": 20})
|
|
|
|
)
|
2016-10-13 15:54:45 +00:00
|
|
|
# state will be skipped since entity is hidden
|
2019-07-31 19:25:30 +00:00
|
|
|
set_state(therm, 22, attributes={"current_temperature": 21, "hidden": True})
|
2016-10-13 15:54:45 +00:00
|
|
|
return zero, four, states
|
2018-03-14 21:29:51 +00:00
|
|
|
|
|
|
|
|
2018-11-27 09:41:44 +00:00
|
|
|
async def test_fetch_period_api(hass, hass_client):
|
2018-03-14 21:29:51 +00:00
|
|
|
"""Test the fetch period view for history."""
|
|
|
|
await hass.async_add_job(init_recorder_component, hass)
|
2019-07-31 19:25:30 +00:00
|
|
|
await async_setup_component(hass, "history", {})
|
2018-03-15 17:54:22 +00:00
|
|
|
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
2018-11-27 09:41:44 +00:00
|
|
|
client = await hass_client()
|
2018-03-14 21:29:51 +00:00
|
|
|
response = await client.get(
|
2019-07-31 19:25:30 +00:00
|
|
|
"/api/history/period/{}".format(dt_util.utcnow().isoformat())
|
|
|
|
)
|
2018-03-14 21:29:51 +00:00
|
|
|
assert response.status == 200
|