2016-03-09 09:25:50 +00:00
|
|
|
"""Test the helper method for writing tests."""
|
2016-09-13 02:16:14 +00:00
|
|
|
import asyncio
|
2018-07-13 13:31:20 +00:00
|
|
|
from collections import OrderedDict
|
2018-02-24 18:53:59 +00:00
|
|
|
from datetime import timedelta
|
2017-05-02 06:29:01 +00:00
|
|
|
import functools as ft
|
2018-06-29 02:14:26 +00:00
|
|
|
import json
|
2014-12-01 07:14:08 +00:00
|
|
|
import os
|
2016-11-02 20:53:52 +00:00
|
|
|
import sys
|
2017-05-02 06:29:01 +00:00
|
|
|
from unittest.mock import patch, MagicMock, Mock
|
2016-08-23 04:42:05 +00:00
|
|
|
from io import StringIO
|
|
|
|
import logging
|
2016-09-13 02:16:14 +00:00
|
|
|
import threading
|
2016-10-08 18:27:35 +00:00
|
|
|
from contextlib import contextmanager
|
2014-12-01 07:14:08 +00:00
|
|
|
|
2018-08-09 11:24:14 +00:00
|
|
|
from homeassistant import auth, core as ha, config_entries
|
2018-07-13 13:31:20 +00:00
|
|
|
from homeassistant.auth import (
|
|
|
|
models as auth_models, auth_store, providers as auth_providers)
|
2017-06-04 01:51:29 +00:00
|
|
|
from homeassistant.setup import setup_component, async_setup_component
|
2017-03-01 04:33:19 +00:00
|
|
|
from homeassistant.config import async_process_component_config
|
2018-02-08 11:16:51 +00:00
|
|
|
from homeassistant.helpers import (
|
2018-06-25 16:53:49 +00:00
|
|
|
intent, entity, restore_state, entity_registry,
|
2018-06-29 02:14:26 +00:00
|
|
|
entity_platform, storage)
|
2016-08-09 03:42:25 +00:00
|
|
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
2016-04-08 01:32:21 +00:00
|
|
|
import homeassistant.util.dt as date_util
|
2016-08-23 04:42:05 +00:00
|
|
|
import homeassistant.util.yaml as yaml
|
2015-04-30 05:26:54 +00:00
|
|
|
from homeassistant.const import (
|
2015-05-01 04:03:01 +00:00
|
|
|
STATE_ON, STATE_OFF, DEVICE_DEFAULT_NAME, EVENT_TIME_CHANGED,
|
2015-09-12 16:15:28 +00:00
|
|
|
EVENT_STATE_CHANGED, EVENT_PLATFORM_DISCOVERED, ATTR_SERVICE,
|
2017-04-06 06:23:02 +00:00
|
|
|
ATTR_DISCOVERED, SERVER_PORT, EVENT_HOMEASSISTANT_CLOSE)
|
2017-05-09 07:03:34 +00:00
|
|
|
from homeassistant.components import mqtt, recorder
|
2018-03-11 17:01:12 +00:00
|
|
|
from homeassistant.util.async_ import (
|
2017-04-06 06:23:02 +00:00
|
|
|
run_callback_threadsafe, run_coroutine_threadsafe)
|
2014-11-25 08:20:36 +00:00
|
|
|
|
2017-03-05 20:14:21 +00:00
|
|
|
_TEST_INSTANCE_PORT = SERVER_PORT
|
2016-08-23 04:42:05 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
2017-05-17 22:19:40 +00:00
|
|
|
INSTANCES = []
|
2018-07-09 16:24:46 +00:00
|
|
|
CLIENT_ID = 'https://example.com/app'
|
|
|
|
CLIENT_REDIRECT_URI = 'https://example.com/app/callback'
|
2016-02-14 20:54:16 +00:00
|
|
|
|
2014-11-25 08:20:36 +00:00
|
|
|
|
2017-05-02 06:29:01 +00:00
|
|
|
def threadsafe_callback_factory(func):
|
|
|
|
"""Create threadsafe functions out of callbacks.
|
|
|
|
|
|
|
|
Callback needs to have `hass` as first argument.
|
|
|
|
"""
|
|
|
|
@ft.wraps(func)
|
|
|
|
def threadsafe(*args, **kwargs):
|
|
|
|
"""Call func threadsafe."""
|
|
|
|
hass = args[0]
|
2017-06-04 01:51:29 +00:00
|
|
|
return run_callback_threadsafe(
|
2017-05-02 06:29:01 +00:00
|
|
|
hass.loop, ft.partial(func, *args, **kwargs)).result()
|
|
|
|
|
|
|
|
return threadsafe
|
|
|
|
|
|
|
|
|
2017-06-04 01:51:29 +00:00
|
|
|
def threadsafe_coroutine_factory(func):
|
|
|
|
"""Create threadsafe functions out of coroutine.
|
|
|
|
|
|
|
|
Callback needs to have `hass` as first argument.
|
|
|
|
"""
|
|
|
|
@ft.wraps(func)
|
|
|
|
def threadsafe(*args, **kwargs):
|
|
|
|
"""Call func threadsafe."""
|
|
|
|
hass = args[0]
|
|
|
|
return run_coroutine_threadsafe(
|
|
|
|
func(*args, **kwargs), hass.loop).result()
|
|
|
|
|
|
|
|
return threadsafe
|
|
|
|
|
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
def get_test_config_dir(*add_path):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Return a path to a test config dir."""
|
2016-11-18 22:05:03 +00:00
|
|
|
return os.path.join(os.path.dirname(__file__), 'testing_config', *add_path)
|
2015-04-26 17:05:01 +00:00
|
|
|
|
|
|
|
|
2016-10-31 15:47:29 +00:00
|
|
|
def get_test_home_assistant():
|
2016-11-18 22:05:03 +00:00
|
|
|
"""Return a Home Assistant object pointing at test config directory."""
|
2016-11-02 20:53:52 +00:00
|
|
|
if sys.platform == "win32":
|
|
|
|
loop = asyncio.ProactorEventLoop()
|
|
|
|
else:
|
|
|
|
loop = asyncio.new_event_loop()
|
2016-09-13 02:16:14 +00:00
|
|
|
|
2016-10-24 06:48:01 +00:00
|
|
|
hass = loop.run_until_complete(async_test_home_assistant(loop))
|
2015-05-01 04:03:01 +00:00
|
|
|
|
2016-09-19 03:35:58 +00:00
|
|
|
stop_event = threading.Event()
|
|
|
|
|
|
|
|
def run_loop():
|
2016-10-08 18:27:35 +00:00
|
|
|
"""Run event loop."""
|
|
|
|
# pylint: disable=protected-access
|
2016-10-02 22:07:23 +00:00
|
|
|
loop._thread_ident = threading.get_ident()
|
2016-09-19 03:35:58 +00:00
|
|
|
loop.run_forever()
|
|
|
|
stop_event.set()
|
|
|
|
|
|
|
|
orig_stop = hass.stop
|
2016-09-13 02:16:14 +00:00
|
|
|
|
2016-10-08 16:56:36 +00:00
|
|
|
def start_hass(*mocks):
|
2017-05-02 16:18:47 +00:00
|
|
|
"""Start hass."""
|
2017-04-06 06:23:02 +00:00
|
|
|
run_coroutine_threadsafe(hass.async_start(), loop=hass.loop).result()
|
2016-09-13 02:16:14 +00:00
|
|
|
|
2016-09-19 03:35:58 +00:00
|
|
|
def stop_hass():
|
2016-10-08 18:27:35 +00:00
|
|
|
"""Stop hass."""
|
2016-09-19 03:35:58 +00:00
|
|
|
orig_stop()
|
|
|
|
stop_event.wait()
|
2017-03-07 09:11:41 +00:00
|
|
|
loop.close()
|
2016-09-19 03:35:58 +00:00
|
|
|
|
2016-09-13 02:16:14 +00:00
|
|
|
hass.start = start_hass
|
2016-09-19 03:35:58 +00:00
|
|
|
hass.stop = stop_hass
|
2016-09-13 02:16:14 +00:00
|
|
|
|
2017-01-23 05:03:55 +00:00
|
|
|
threading.Thread(name="LoopThread", target=run_loop, daemon=False).start()
|
|
|
|
|
2014-12-01 07:14:08 +00:00
|
|
|
return hass
|
|
|
|
|
|
|
|
|
2016-11-18 22:05:03 +00:00
|
|
|
# pylint: disable=protected-access
|
2016-10-24 06:48:01 +00:00
|
|
|
@asyncio.coroutine
|
|
|
|
def async_test_home_assistant(loop):
|
|
|
|
"""Return a Home Assistant object pointing at test config dir."""
|
2016-10-29 15:57:59 +00:00
|
|
|
hass = ha.HomeAssistant(loop)
|
2018-02-16 22:07:38 +00:00
|
|
|
hass.config.async_load = Mock()
|
2018-07-13 09:43:08 +00:00
|
|
|
store = auth_store.AuthStore(hass)
|
2018-05-01 16:20:41 +00:00
|
|
|
hass.auth = auth.AuthManager(hass, store, {})
|
|
|
|
ensure_auth_manager_loaded(hass.auth)
|
2017-05-17 22:19:40 +00:00
|
|
|
INSTANCES.append(hass)
|
2017-02-18 22:17:18 +00:00
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
orig_async_add_job = hass.async_add_job
|
|
|
|
|
2017-02-18 22:17:18 +00:00
|
|
|
def async_add_job(target, *args):
|
2017-02-22 08:15:48 +00:00
|
|
|
"""Add a magic mock."""
|
2017-05-02 06:29:01 +00:00
|
|
|
if isinstance(target, Mock):
|
2017-08-29 20:40:08 +00:00
|
|
|
return mock_coro(target(*args))
|
2017-03-01 04:33:19 +00:00
|
|
|
return orig_async_add_job(target, *args)
|
2017-02-18 22:17:18 +00:00
|
|
|
|
|
|
|
hass.async_add_job = async_add_job
|
2016-10-29 15:57:59 +00:00
|
|
|
|
|
|
|
hass.config.location_name = 'test home'
|
|
|
|
hass.config.config_dir = get_test_config_dir()
|
|
|
|
hass.config.latitude = 32.87336
|
|
|
|
hass.config.longitude = -117.22743
|
|
|
|
hass.config.elevation = 0
|
|
|
|
hass.config.time_zone = date_util.get_time_zone('US/Pacific')
|
|
|
|
hass.config.units = METRIC_SYSTEM
|
|
|
|
hass.config.skip_pip = True
|
|
|
|
|
2018-06-25 16:53:49 +00:00
|
|
|
hass.config_entries = config_entries.ConfigEntries(hass, {})
|
|
|
|
hass.config_entries._entries = []
|
|
|
|
hass.config_entries._store._async_ensure_stop_listener = lambda: None
|
|
|
|
|
2016-10-29 15:57:59 +00:00
|
|
|
hass.state = ha.CoreState.running
|
2016-10-24 06:48:01 +00:00
|
|
|
|
2016-11-03 02:16:59 +00:00
|
|
|
# Mock async_start
|
|
|
|
orig_start = hass.async_start
|
|
|
|
|
|
|
|
@asyncio.coroutine
|
|
|
|
def mock_async_start():
|
2016-11-18 22:05:03 +00:00
|
|
|
"""Start the mocking."""
|
2017-05-17 22:19:40 +00:00
|
|
|
# We only mock time during tests and we want to track tasks
|
2017-04-06 06:23:02 +00:00
|
|
|
with patch('homeassistant.core._async_create_timer'), \
|
2017-04-11 16:09:31 +00:00
|
|
|
patch.object(hass, 'async_stop_track_tasks'):
|
2016-11-03 02:16:59 +00:00
|
|
|
yield from orig_start()
|
|
|
|
|
|
|
|
hass.async_start = mock_async_start
|
|
|
|
|
2017-02-26 22:05:18 +00:00
|
|
|
@ha.callback
|
|
|
|
def clear_instance(event):
|
2017-03-04 23:19:01 +00:00
|
|
|
"""Clear global instance."""
|
2017-05-17 22:19:40 +00:00
|
|
|
INSTANCES.remove(hass)
|
2017-02-26 22:05:18 +00:00
|
|
|
|
2017-04-06 06:23:02 +00:00
|
|
|
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, clear_instance)
|
2017-02-26 22:05:18 +00:00
|
|
|
|
2016-10-24 06:48:01 +00:00
|
|
|
return hass
|
|
|
|
|
|
|
|
|
2017-03-05 20:14:21 +00:00
|
|
|
def get_test_instance_port():
|
|
|
|
"""Return unused port for running test instance.
|
|
|
|
|
|
|
|
The socket that holds the default port does not get released when we stop
|
|
|
|
HA in a different test case. Until I have figured out what is going on,
|
|
|
|
let's run each test on a different port.
|
|
|
|
"""
|
|
|
|
global _TEST_INSTANCE_PORT
|
|
|
|
_TEST_INSTANCE_PORT += 1
|
|
|
|
return _TEST_INSTANCE_PORT
|
|
|
|
|
|
|
|
|
2017-06-25 17:53:15 +00:00
|
|
|
@ha.callback
|
2017-08-12 16:39:05 +00:00
|
|
|
def async_mock_service(hass, domain, service, schema=None):
|
2017-05-02 16:18:47 +00:00
|
|
|
"""Set up a fake service & return a calls log list to this service."""
|
2014-12-01 07:14:08 +00:00
|
|
|
calls = []
|
|
|
|
|
2018-07-29 00:53:37 +00:00
|
|
|
@ha.callback
|
2017-03-04 23:19:01 +00:00
|
|
|
def mock_service_log(call): # pylint: disable=unnecessary-lambda
|
2017-05-02 20:47:20 +00:00
|
|
|
"""Mock service call."""
|
2016-11-05 23:36:20 +00:00
|
|
|
calls.append(call)
|
|
|
|
|
2017-08-12 16:39:05 +00:00
|
|
|
hass.services.async_register(
|
|
|
|
domain, service, mock_service_log, schema=schema)
|
2014-12-01 07:14:08 +00:00
|
|
|
|
|
|
|
return calls
|
|
|
|
|
|
|
|
|
2017-06-25 17:53:15 +00:00
|
|
|
mock_service = threadsafe_callback_factory(async_mock_service)
|
|
|
|
|
|
|
|
|
2017-07-22 04:38:53 +00:00
|
|
|
@ha.callback
|
|
|
|
def async_mock_intent(hass, intent_typ):
|
|
|
|
"""Set up a fake intent handler."""
|
|
|
|
intents = []
|
|
|
|
|
|
|
|
class MockIntentHandler(intent.IntentHandler):
|
|
|
|
intent_type = intent_typ
|
|
|
|
|
|
|
|
@asyncio.coroutine
|
|
|
|
def async_handle(self, intent):
|
|
|
|
"""Handle the intent."""
|
|
|
|
intents.append(intent)
|
|
|
|
return intent.create_response()
|
|
|
|
|
|
|
|
intent.async_register(hass, MockIntentHandler())
|
|
|
|
|
|
|
|
return intents
|
|
|
|
|
|
|
|
|
2017-02-07 17:13:24 +00:00
|
|
|
@ha.callback
|
2018-02-11 17:17:58 +00:00
|
|
|
def async_fire_mqtt_message(hass, topic, payload, qos=0, retain=False):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Fire the MQTT message."""
|
2017-04-13 14:38:09 +00:00
|
|
|
if isinstance(payload, str):
|
|
|
|
payload = payload.encode('utf-8')
|
2018-02-11 17:17:58 +00:00
|
|
|
msg = mqtt.Message(topic, payload, qos, retain)
|
|
|
|
hass.async_run_job(hass.data['mqtt']._mqtt_on_message, None, None, msg)
|
2015-08-11 06:11:46 +00:00
|
|
|
|
|
|
|
|
2017-05-02 06:29:01 +00:00
|
|
|
fire_mqtt_message = threadsafe_callback_factory(async_fire_mqtt_message)
|
2017-02-07 17:13:24 +00:00
|
|
|
|
|
|
|
|
2017-05-02 06:29:01 +00:00
|
|
|
@ha.callback
|
|
|
|
def async_fire_time_changed(hass, time):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Fire a time changes event."""
|
2017-05-02 06:29:01 +00:00
|
|
|
hass.bus.async_fire(EVENT_TIME_CHANGED, {'now': time})
|
|
|
|
|
|
|
|
|
|
|
|
fire_time_changed = threadsafe_callback_factory(async_fire_time_changed)
|
2015-08-03 15:57:12 +00:00
|
|
|
|
|
|
|
|
2015-09-12 16:15:28 +00:00
|
|
|
def fire_service_discovered(hass, service, info):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Fire the MQTT message."""
|
2015-09-12 16:15:28 +00:00
|
|
|
hass.bus.fire(EVENT_PLATFORM_DISCOVERED, {
|
|
|
|
ATTR_SERVICE: service,
|
|
|
|
ATTR_DISCOVERED: info
|
|
|
|
})
|
2015-04-30 05:26:54 +00:00
|
|
|
|
|
|
|
|
2016-06-27 16:02:45 +00:00
|
|
|
def load_fixture(filename):
|
2017-05-02 16:18:47 +00:00
|
|
|
"""Load a fixture."""
|
2016-06-27 16:02:45 +00:00
|
|
|
path = os.path.join(os.path.dirname(__file__), 'fixtures', filename)
|
2018-02-11 19:01:44 +00:00
|
|
|
with open(path, encoding='utf-8') as fptr:
|
2016-08-23 04:42:05 +00:00
|
|
|
return fptr.read()
|
2016-06-27 16:02:45 +00:00
|
|
|
|
|
|
|
|
2015-05-01 04:03:01 +00:00
|
|
|
def mock_state_change_event(hass, new_state, old_state=None):
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Mock state change envent."""
|
2015-05-01 04:03:01 +00:00
|
|
|
event_data = {
|
|
|
|
'entity_id': new_state.entity_id,
|
|
|
|
'new_state': new_state,
|
|
|
|
}
|
|
|
|
|
|
|
|
if old_state:
|
|
|
|
event_data['old_state'] = old_state
|
|
|
|
|
2018-08-10 16:09:01 +00:00
|
|
|
hass.bus.fire(EVENT_STATE_CHANGED, event_data, context=new_state.context)
|
2015-05-01 04:03:01 +00:00
|
|
|
|
|
|
|
|
2017-06-04 01:51:29 +00:00
|
|
|
@asyncio.coroutine
|
2018-02-11 17:17:58 +00:00
|
|
|
def async_mock_mqtt_component(hass, config=None):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Mock the MQTT component."""
|
2018-02-11 17:17:58 +00:00
|
|
|
if config is None:
|
|
|
|
config = {mqtt.CONF_BROKER: 'mock-broker'}
|
|
|
|
|
|
|
|
with patch('paho.mqtt.client.Client') as mock_client:
|
|
|
|
mock_client().connect.return_value = 0
|
|
|
|
mock_client().subscribe.return_value = (0, 0)
|
|
|
|
mock_client().publish.return_value = (0, 0)
|
|
|
|
|
|
|
|
result = yield from async_setup_component(hass, mqtt.DOMAIN, {
|
|
|
|
mqtt.DOMAIN: config
|
2016-08-30 16:22:52 +00:00
|
|
|
})
|
2018-02-11 17:17:58 +00:00
|
|
|
assert result
|
|
|
|
|
|
|
|
hass.data['mqtt'] = MagicMock(spec_set=hass.data['mqtt'],
|
|
|
|
wraps=hass.data['mqtt'])
|
|
|
|
|
|
|
|
return hass.data['mqtt']
|
2015-08-11 06:11:46 +00:00
|
|
|
|
|
|
|
|
2017-06-04 01:51:29 +00:00
|
|
|
mock_mqtt_component = threadsafe_coroutine_factory(async_mock_mqtt_component)
|
|
|
|
|
|
|
|
|
2017-05-02 06:29:01 +00:00
|
|
|
@ha.callback
|
2017-03-01 04:33:19 +00:00
|
|
|
def mock_component(hass, component):
|
|
|
|
"""Mock a component is setup."""
|
2017-04-04 16:29:49 +00:00
|
|
|
if component in hass.config.components:
|
2017-03-01 04:33:19 +00:00
|
|
|
AssertionError("Component {} is already setup".format(component))
|
|
|
|
|
|
|
|
hass.config.components.add(component)
|
|
|
|
|
|
|
|
|
2018-02-11 17:16:01 +00:00
|
|
|
def mock_registry(hass, mock_entries=None):
|
2018-01-30 09:39:39 +00:00
|
|
|
"""Mock the Entity Registry."""
|
|
|
|
registry = entity_registry.EntityRegistry(hass)
|
2018-02-11 17:16:01 +00:00
|
|
|
registry.entities = mock_entries or {}
|
2018-02-24 18:53:59 +00:00
|
|
|
hass.data[entity_registry.DATA_REGISTRY] = registry
|
2018-01-30 09:39:39 +00:00
|
|
|
return registry
|
|
|
|
|
|
|
|
|
2018-07-13 09:43:08 +00:00
|
|
|
class MockUser(auth_models.User):
|
2018-05-01 16:20:41 +00:00
|
|
|
"""Mock a user in Home Assistant."""
|
|
|
|
|
2018-08-14 19:14:12 +00:00
|
|
|
def __init__(self, id=None, is_owner=False, is_active=True,
|
2018-07-13 13:31:20 +00:00
|
|
|
name='Mock User', system_generated=False):
|
2018-05-01 16:20:41 +00:00
|
|
|
"""Initialize mock user."""
|
2018-08-14 19:14:12 +00:00
|
|
|
kwargs = {
|
|
|
|
'is_owner': is_owner,
|
|
|
|
'is_active': is_active,
|
|
|
|
'name': name,
|
|
|
|
'system_generated': system_generated
|
|
|
|
}
|
|
|
|
if id is not None:
|
|
|
|
kwargs['id'] = id
|
|
|
|
super().__init__(**kwargs)
|
2018-05-01 16:20:41 +00:00
|
|
|
|
|
|
|
def add_to_hass(self, hass):
|
|
|
|
"""Test helper to add entry to hass."""
|
|
|
|
return self.add_to_auth_manager(hass.auth)
|
|
|
|
|
|
|
|
def add_to_auth_manager(self, auth_mgr):
|
|
|
|
"""Test helper to add entry to hass."""
|
2018-06-29 04:02:33 +00:00
|
|
|
ensure_auth_manager_loaded(auth_mgr)
|
2018-07-01 17:36:50 +00:00
|
|
|
auth_mgr._store._users[self.id] = self
|
2018-05-01 16:20:41 +00:00
|
|
|
return self
|
|
|
|
|
|
|
|
|
2018-07-13 13:31:20 +00:00
|
|
|
async def register_auth_provider(hass, config):
|
|
|
|
"""Helper to register an auth provider."""
|
|
|
|
provider = await auth_providers.auth_provider_from_config(
|
|
|
|
hass, hass.auth._store, config)
|
|
|
|
assert provider is not None, 'Invalid config specified'
|
|
|
|
key = (provider.type, provider.id)
|
|
|
|
providers = hass.auth._providers
|
|
|
|
|
|
|
|
if key in providers:
|
|
|
|
raise ValueError('Provider already registered')
|
|
|
|
|
|
|
|
providers[key] = provider
|
|
|
|
return provider
|
|
|
|
|
|
|
|
|
2018-05-01 16:20:41 +00:00
|
|
|
@ha.callback
|
|
|
|
def ensure_auth_manager_loaded(auth_mgr):
|
|
|
|
"""Ensure an auth manager is considered loaded."""
|
|
|
|
store = auth_mgr._store
|
2018-07-01 17:36:50 +00:00
|
|
|
if store._users is None:
|
2018-07-13 13:31:20 +00:00
|
|
|
store._users = OrderedDict()
|
2018-05-01 16:20:41 +00:00
|
|
|
|
|
|
|
|
2018-07-20 08:45:20 +00:00
|
|
|
class MockModule:
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Representation of a fake module."""
|
2014-12-01 07:14:08 +00:00
|
|
|
|
2016-10-30 21:18:53 +00:00
|
|
|
# pylint: disable=invalid-name
|
2016-04-03 03:10:57 +00:00
|
|
|
def __init__(self, domain=None, dependencies=None, setup=None,
|
2016-10-31 15:47:29 +00:00
|
|
|
requirements=None, config_schema=None, platform_schema=None,
|
2018-02-16 22:07:38 +00:00
|
|
|
async_setup=None, async_setup_entry=None,
|
|
|
|
async_unload_entry=None):
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Initialize the mock module."""
|
2014-12-01 07:14:08 +00:00
|
|
|
self.DOMAIN = domain
|
2016-04-03 03:10:57 +00:00
|
|
|
self.DEPENDENCIES = dependencies or []
|
|
|
|
self.REQUIREMENTS = requirements or []
|
2016-03-29 07:17:53 +00:00
|
|
|
|
|
|
|
if config_schema is not None:
|
|
|
|
self.CONFIG_SCHEMA = config_schema
|
|
|
|
|
|
|
|
if platform_schema is not None:
|
|
|
|
self.PLATFORM_SCHEMA = platform_schema
|
|
|
|
|
2018-02-12 18:59:20 +00:00
|
|
|
if setup is not None:
|
|
|
|
# We run this in executor, wrap it in function
|
|
|
|
self.setup = lambda *args: setup(*args)
|
|
|
|
|
2016-10-31 15:47:29 +00:00
|
|
|
if async_setup is not None:
|
|
|
|
self.async_setup = async_setup
|
|
|
|
|
2018-02-12 18:59:20 +00:00
|
|
|
if setup is None and async_setup is None:
|
|
|
|
self.async_setup = mock_coro_func(True)
|
2016-01-31 02:55:52 +00:00
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
if async_setup_entry is not None:
|
|
|
|
self.async_setup_entry = async_setup_entry
|
|
|
|
|
|
|
|
if async_unload_entry is not None:
|
|
|
|
self.async_unload_entry = async_unload_entry
|
|
|
|
|
2016-01-31 02:55:52 +00:00
|
|
|
|
2018-07-20 08:45:20 +00:00
|
|
|
class MockPlatform:
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Provide a fake platform."""
|
2016-01-31 02:55:52 +00:00
|
|
|
|
2016-10-30 21:18:53 +00:00
|
|
|
# pylint: disable=invalid-name
|
2016-04-03 03:10:57 +00:00
|
|
|
def __init__(self, setup_platform=None, dependencies=None,
|
2018-04-09 14:09:08 +00:00
|
|
|
platform_schema=None, async_setup_platform=None,
|
2018-06-15 14:59:13 +00:00
|
|
|
async_setup_entry=None, scan_interval=None):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Initialize the platform."""
|
2016-04-03 03:10:57 +00:00
|
|
|
self.DEPENDENCIES = dependencies or []
|
2016-01-31 02:55:52 +00:00
|
|
|
|
2016-04-03 03:10:57 +00:00
|
|
|
if platform_schema is not None:
|
|
|
|
self.PLATFORM_SCHEMA = platform_schema
|
|
|
|
|
2018-06-15 14:59:13 +00:00
|
|
|
if scan_interval is not None:
|
|
|
|
self.SCAN_INTERVAL = scan_interval
|
|
|
|
|
2018-02-12 18:59:20 +00:00
|
|
|
if setup_platform is not None:
|
|
|
|
# We run this in executor, wrap it in function
|
|
|
|
self.setup_platform = lambda *args: setup_platform(*args)
|
|
|
|
|
2017-05-23 21:29:27 +00:00
|
|
|
if async_setup_platform is not None:
|
|
|
|
self.async_setup_platform = async_setup_platform
|
|
|
|
|
2018-04-09 14:09:08 +00:00
|
|
|
if async_setup_entry is not None:
|
|
|
|
self.async_setup_entry = async_setup_entry
|
|
|
|
|
2018-02-12 18:59:20 +00:00
|
|
|
if setup_platform is None and async_setup_platform is None:
|
|
|
|
self.async_setup_platform = mock_coro_func()
|
2014-12-01 07:14:08 +00:00
|
|
|
|
|
|
|
|
2018-02-24 18:53:59 +00:00
|
|
|
class MockEntityPlatform(entity_platform.EntityPlatform):
|
|
|
|
"""Mock class with some mock defaults."""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self, hass,
|
|
|
|
logger=None,
|
|
|
|
domain='test_domain',
|
|
|
|
platform_name='test_platform',
|
2018-04-08 03:04:50 +00:00
|
|
|
platform=None,
|
2018-02-24 18:53:59 +00:00
|
|
|
scan_interval=timedelta(seconds=15),
|
|
|
|
entity_namespace=None,
|
|
|
|
async_entities_added_callback=lambda: None
|
|
|
|
):
|
|
|
|
"""Initialize a mock entity platform."""
|
2018-04-09 14:09:08 +00:00
|
|
|
if logger is None:
|
|
|
|
logger = logging.getLogger('homeassistant.helpers.entity_platform')
|
|
|
|
|
|
|
|
# Otherwise the constructor will blow up.
|
|
|
|
if (isinstance(platform, Mock) and
|
|
|
|
isinstance(platform.PARALLEL_UPDATES, Mock)):
|
|
|
|
platform.PARALLEL_UPDATES = 0
|
|
|
|
|
2018-02-24 18:53:59 +00:00
|
|
|
super().__init__(
|
|
|
|
hass=hass,
|
|
|
|
logger=logger,
|
|
|
|
domain=domain,
|
|
|
|
platform_name=platform_name,
|
2018-04-08 03:04:50 +00:00
|
|
|
platform=platform,
|
2018-02-24 18:53:59 +00:00
|
|
|
scan_interval=scan_interval,
|
|
|
|
entity_namespace=entity_namespace,
|
|
|
|
async_entities_added_callback=async_entities_added_callback,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2017-07-22 04:38:53 +00:00
|
|
|
class MockToggleDevice(entity.ToggleEntity):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Provide a mock toggle device."""
|
2016-03-09 10:15:04 +00:00
|
|
|
|
2014-11-25 08:20:36 +00:00
|
|
|
def __init__(self, name, state):
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Initialize the mock device."""
|
2015-01-11 17:20:41 +00:00
|
|
|
self._name = name or DEVICE_DEFAULT_NAME
|
|
|
|
self._state = state
|
2014-11-26 05:28:43 +00:00
|
|
|
self.calls = []
|
2014-11-25 08:20:36 +00:00
|
|
|
|
2015-01-11 17:20:41 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Return the name of the device if any."""
|
2015-01-11 17:20:41 +00:00
|
|
|
self.calls.append(('name', {}))
|
|
|
|
return self._name
|
|
|
|
|
|
|
|
@property
|
|
|
|
def state(self):
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Return the name of the device if any."""
|
2015-01-11 17:20:41 +00:00
|
|
|
self.calls.append(('state', {}))
|
|
|
|
return self._state
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_on(self):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Return true if device is on."""
|
2015-01-11 17:20:41 +00:00
|
|
|
self.calls.append(('is_on', {}))
|
|
|
|
return self._state == STATE_ON
|
2014-11-25 08:20:36 +00:00
|
|
|
|
|
|
|
def turn_on(self, **kwargs):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Turn the device on."""
|
2014-11-26 05:28:43 +00:00
|
|
|
self.calls.append(('turn_on', kwargs))
|
2015-01-11 17:20:41 +00:00
|
|
|
self._state = STATE_ON
|
2014-11-25 08:20:36 +00:00
|
|
|
|
|
|
|
def turn_off(self, **kwargs):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Turn the device off."""
|
2014-11-26 05:28:43 +00:00
|
|
|
self.calls.append(('turn_off', kwargs))
|
2015-01-11 17:20:41 +00:00
|
|
|
self._state = STATE_OFF
|
2014-11-25 08:20:36 +00:00
|
|
|
|
2014-11-26 05:28:43 +00:00
|
|
|
def last_call(self, method=None):
|
2016-03-09 09:25:50 +00:00
|
|
|
"""Return the last call."""
|
2015-02-09 06:18:54 +00:00
|
|
|
if not self.calls:
|
|
|
|
return None
|
2018-07-23 08:16:05 +00:00
|
|
|
if method is None:
|
2014-11-26 05:28:43 +00:00
|
|
|
return self.calls[-1]
|
2018-07-23 08:16:05 +00:00
|
|
|
try:
|
|
|
|
return next(call for call in reversed(self.calls)
|
|
|
|
if call[0] == method)
|
|
|
|
except StopIteration:
|
|
|
|
return None
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
|
2018-02-16 22:07:38 +00:00
|
|
|
class MockConfigEntry(config_entries.ConfigEntry):
|
|
|
|
"""Helper for creating config entries that adds some defaults."""
|
|
|
|
|
|
|
|
def __init__(self, *, domain='test', data=None, version=0, entry_id=None,
|
2018-08-09 11:24:14 +00:00
|
|
|
source=config_entries.SOURCE_USER, title='Mock Title',
|
2018-02-16 22:07:38 +00:00
|
|
|
state=None):
|
|
|
|
"""Initialize a mock config entry."""
|
|
|
|
kwargs = {
|
|
|
|
'entry_id': entry_id or 'mock-id',
|
|
|
|
'domain': domain,
|
|
|
|
'data': data or {},
|
|
|
|
'version': version,
|
|
|
|
'title': title
|
|
|
|
}
|
|
|
|
if source is not None:
|
|
|
|
kwargs['source'] = source
|
|
|
|
if state is not None:
|
|
|
|
kwargs['state'] = state
|
|
|
|
super().__init__(**kwargs)
|
|
|
|
|
|
|
|
def add_to_hass(self, hass):
|
|
|
|
"""Test helper to add entry to hass."""
|
|
|
|
hass.config_entries._entries.append(self)
|
|
|
|
|
|
|
|
def add_to_manager(self, manager):
|
|
|
|
"""Test helper to add entry to entry manager."""
|
|
|
|
manager._entries.append(self)
|
|
|
|
|
|
|
|
|
2016-08-23 04:42:05 +00:00
|
|
|
def patch_yaml_files(files_dict, endswith=True):
|
|
|
|
"""Patch load_yaml with a dictionary of yaml files."""
|
|
|
|
# match using endswith, start search with longest string
|
|
|
|
matchlist = sorted(list(files_dict.keys()), key=len) if endswith else []
|
|
|
|
|
|
|
|
def mock_open_f(fname, **_):
|
|
|
|
"""Mock open() in the yaml module, used by load_yaml."""
|
|
|
|
# Return the mocked file on full match
|
|
|
|
if fname in files_dict:
|
2017-05-02 16:18:47 +00:00
|
|
|
_LOGGER.debug("patch_yaml_files match %s", fname)
|
2016-09-08 20:20:38 +00:00
|
|
|
res = StringIO(files_dict[fname])
|
|
|
|
setattr(res, 'name', fname)
|
|
|
|
return res
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
# Match using endswith
|
|
|
|
for ends in matchlist:
|
|
|
|
if fname.endswith(ends):
|
2017-05-02 16:18:47 +00:00
|
|
|
_LOGGER.debug("patch_yaml_files end match %s: %s", ends, fname)
|
2016-09-08 20:20:38 +00:00
|
|
|
res = StringIO(files_dict[ends])
|
|
|
|
setattr(res, 'name', fname)
|
|
|
|
return res
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
# Fallback for hass.components (i.e. services.yaml)
|
|
|
|
if 'homeassistant/components' in fname:
|
2017-05-02 16:18:47 +00:00
|
|
|
_LOGGER.debug("patch_yaml_files using real file: %s", fname)
|
2016-08-23 04:42:05 +00:00
|
|
|
return open(fname, encoding='utf-8')
|
|
|
|
|
|
|
|
# Not found
|
2017-05-02 16:18:47 +00:00
|
|
|
raise FileNotFoundError("File not found: {}".format(fname))
|
2016-08-23 04:42:05 +00:00
|
|
|
|
|
|
|
return patch.object(yaml, 'open', mock_open_f, create=True)
|
2016-10-08 18:27:35 +00:00
|
|
|
|
|
|
|
|
2016-10-29 19:54:47 +00:00
|
|
|
def mock_coro(return_value=None):
|
2017-05-02 16:18:47 +00:00
|
|
|
"""Return a coro that returns a value."""
|
2017-05-02 06:29:01 +00:00
|
|
|
return mock_coro_func(return_value)()
|
2017-02-07 09:19:08 +00:00
|
|
|
|
|
|
|
|
2017-02-14 05:34:36 +00:00
|
|
|
def mock_coro_func(return_value=None):
|
2017-05-02 16:18:47 +00:00
|
|
|
"""Return a method to create a coro function that returns a value."""
|
2017-02-14 05:34:36 +00:00
|
|
|
@asyncio.coroutine
|
|
|
|
def coro(*args, **kwargs):
|
|
|
|
"""Fake coroutine."""
|
|
|
|
return return_value
|
|
|
|
|
|
|
|
return coro
|
|
|
|
|
|
|
|
|
2016-10-08 18:27:35 +00:00
|
|
|
@contextmanager
|
|
|
|
def assert_setup_component(count, domain=None):
|
|
|
|
"""Collect valid configuration from setup_component.
|
|
|
|
|
|
|
|
- count: The amount of valid platforms that should be setup
|
|
|
|
- domain: The domain to count is optional. It can be automatically
|
|
|
|
determined most of the time
|
|
|
|
|
2017-09-23 15:15:46 +00:00
|
|
|
Use as a context manager around setup.setup_component
|
2016-10-08 18:27:35 +00:00
|
|
|
with assert_setup_component(0) as result_config:
|
2017-01-15 02:53:14 +00:00
|
|
|
setup_component(hass, domain, start_config)
|
2016-10-08 18:27:35 +00:00
|
|
|
# using result_config is optional
|
|
|
|
"""
|
|
|
|
config = {}
|
|
|
|
|
2017-03-01 04:33:19 +00:00
|
|
|
@ha.callback
|
2016-10-08 18:27:35 +00:00
|
|
|
def mock_psc(hass, config_input, domain):
|
|
|
|
"""Mock the prepare_setup_component to capture config."""
|
2017-03-01 04:33:19 +00:00
|
|
|
res = async_process_component_config(
|
2016-10-27 07:16:23 +00:00
|
|
|
hass, config_input, domain)
|
2016-10-08 18:27:35 +00:00
|
|
|
config[domain] = None if res is None else res.get(domain)
|
2017-05-02 16:18:47 +00:00
|
|
|
_LOGGER.debug("Configuration for %s, Validated: %s, Original %s",
|
2016-10-08 18:27:35 +00:00
|
|
|
domain, config[domain], config_input.get(domain))
|
|
|
|
return res
|
|
|
|
|
|
|
|
assert isinstance(config, dict)
|
2017-03-01 04:33:19 +00:00
|
|
|
with patch('homeassistant.config.async_process_component_config',
|
2016-10-27 07:16:23 +00:00
|
|
|
mock_psc):
|
2016-10-08 18:27:35 +00:00
|
|
|
yield config
|
|
|
|
|
|
|
|
if domain is None:
|
|
|
|
assert len(config) == 1, ('assert_setup_component requires DOMAIN: {}'
|
|
|
|
.format(list(config.keys())))
|
|
|
|
domain = list(config.keys())[0]
|
|
|
|
|
|
|
|
res = config.get(domain)
|
|
|
|
res_len = 0 if res is None else len(res)
|
|
|
|
assert res_len == count, 'setup_component failed, expected {} got {}: {}' \
|
|
|
|
.format(count, res_len, res)
|
2017-02-21 07:40:27 +00:00
|
|
|
|
|
|
|
|
2017-02-26 22:38:06 +00:00
|
|
|
def init_recorder_component(hass, add_config=None):
|
2017-02-21 07:40:27 +00:00
|
|
|
"""Initialize the recorder."""
|
|
|
|
config = dict(add_config) if add_config else {}
|
|
|
|
config[recorder.CONF_DB_URL] = 'sqlite://' # In memory DB
|
|
|
|
|
2017-02-26 22:38:06 +00:00
|
|
|
with patch('homeassistant.components.recorder.migration.migrate_schema'):
|
|
|
|
assert setup_component(hass, recorder.DOMAIN,
|
|
|
|
{recorder.DOMAIN: config})
|
|
|
|
assert recorder.DOMAIN in hass.config.components
|
2017-02-21 07:40:27 +00:00
|
|
|
_LOGGER.info("In-memory recorder successfully started")
|
2017-02-22 08:15:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
def mock_restore_cache(hass, states):
|
|
|
|
"""Mock the DATA_RESTORE_CACHE."""
|
2017-07-22 04:38:53 +00:00
|
|
|
key = restore_state.DATA_RESTORE_CACHE
|
|
|
|
hass.data[key] = {
|
2017-02-22 08:15:48 +00:00
|
|
|
state.entity_id: state for state in states}
|
2017-07-22 04:38:53 +00:00
|
|
|
_LOGGER.debug('Restore cache: %s', hass.data[key])
|
|
|
|
assert len(hass.data[key]) == len(states), \
|
2017-02-22 08:15:48 +00:00
|
|
|
"Duplicate entity_id? {}".format(states)
|
|
|
|
hass.state = ha.CoreState.starting
|
2017-04-04 16:29:49 +00:00
|
|
|
mock_component(hass, recorder.DOMAIN)
|
2017-05-14 04:25:54 +00:00
|
|
|
|
|
|
|
|
|
|
|
class MockDependency:
|
|
|
|
"""Decorator to mock install a dependency."""
|
|
|
|
|
|
|
|
def __init__(self, root, *args):
|
|
|
|
"""Initialize decorator."""
|
|
|
|
self.root = root
|
|
|
|
self.submodules = args
|
|
|
|
|
2018-01-25 13:55:14 +00:00
|
|
|
def __enter__(self):
|
|
|
|
"""Start mocking."""
|
2017-05-14 04:25:54 +00:00
|
|
|
def resolve(mock, path):
|
|
|
|
"""Resolve a mock."""
|
|
|
|
if not path:
|
|
|
|
return mock
|
|
|
|
|
|
|
|
return resolve(getattr(mock, path[0]), path[1:])
|
|
|
|
|
2018-01-25 13:55:14 +00:00
|
|
|
base = MagicMock()
|
|
|
|
to_mock = {
|
|
|
|
"{}.{}".format(self.root, tom): resolve(base, tom.split('.'))
|
|
|
|
for tom in self.submodules
|
|
|
|
}
|
|
|
|
to_mock[self.root] = base
|
|
|
|
|
|
|
|
self.patcher = patch.dict('sys.modules', to_mock)
|
|
|
|
self.patcher.start()
|
|
|
|
return base
|
|
|
|
|
|
|
|
def __exit__(self, *exc):
|
|
|
|
"""Stop mocking."""
|
|
|
|
self.patcher.stop()
|
|
|
|
return False
|
|
|
|
|
|
|
|
def __call__(self, func):
|
|
|
|
"""Apply decorator."""
|
2017-05-14 04:25:54 +00:00
|
|
|
def run_mocked(*args, **kwargs):
|
|
|
|
"""Run with mocked dependencies."""
|
2018-01-25 13:55:14 +00:00
|
|
|
with self as base:
|
2017-05-14 04:25:54 +00:00
|
|
|
args = list(args) + [base]
|
|
|
|
func(*args, **kwargs)
|
|
|
|
|
|
|
|
return run_mocked
|
2018-02-08 11:16:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
class MockEntity(entity.Entity):
|
|
|
|
"""Mock Entity class."""
|
|
|
|
|
|
|
|
def __init__(self, **values):
|
|
|
|
"""Initialize an entity."""
|
|
|
|
self._values = values
|
|
|
|
|
|
|
|
if 'entity_id' in values:
|
|
|
|
self.entity_id = values['entity_id']
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
"""Return the name of the entity."""
|
|
|
|
return self._handle('name')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def should_poll(self):
|
|
|
|
"""Return the ste of the polling."""
|
|
|
|
return self._handle('should_poll')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def unique_id(self):
|
|
|
|
"""Return the unique ID of the entity."""
|
|
|
|
return self._handle('unique_id')
|
|
|
|
|
|
|
|
@property
|
|
|
|
def available(self):
|
|
|
|
"""Return True if entity is available."""
|
|
|
|
return self._handle('available')
|
|
|
|
|
|
|
|
def _handle(self, attr):
|
|
|
|
"""Helper for the attributes."""
|
|
|
|
if attr in self._values:
|
|
|
|
return self._values[attr]
|
|
|
|
return getattr(super(), attr)
|
2018-06-29 02:14:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def mock_storage(data=None):
|
|
|
|
"""Mock storage.
|
|
|
|
|
|
|
|
Data is a dict {'key': {'version': version, 'data': data}}
|
|
|
|
|
|
|
|
Written data will be converted to JSON to ensure JSON parsing works.
|
|
|
|
"""
|
|
|
|
if data is None:
|
|
|
|
data = {}
|
|
|
|
|
|
|
|
orig_load = storage.Store._async_load
|
|
|
|
|
|
|
|
async def mock_async_load(store):
|
|
|
|
"""Mock version of load."""
|
|
|
|
if store._data is None:
|
|
|
|
# No data to load
|
|
|
|
if store.key not in data:
|
|
|
|
return None
|
|
|
|
|
2018-07-13 13:31:20 +00:00
|
|
|
mock_data = data.get(store.key)
|
|
|
|
|
|
|
|
if 'data' not in mock_data or 'version' not in mock_data:
|
|
|
|
_LOGGER.error('Mock data needs "version" and "data"')
|
|
|
|
raise ValueError('Mock data needs "version" and "data"')
|
|
|
|
|
|
|
|
store._data = mock_data
|
2018-06-29 02:14:26 +00:00
|
|
|
|
|
|
|
# Route through original load so that we trigger migration
|
|
|
|
loaded = await orig_load(store)
|
|
|
|
_LOGGER.info('Loading data for %s: %s', store.key, loaded)
|
|
|
|
return loaded
|
|
|
|
|
|
|
|
def mock_write_data(store, path, data_to_write):
|
|
|
|
"""Mock version of write data."""
|
|
|
|
# To ensure that the data can be serialized
|
|
|
|
_LOGGER.info('Writing data to %s: %s', store.key, data_to_write)
|
|
|
|
data[store.key] = json.loads(json.dumps(data_to_write))
|
|
|
|
|
|
|
|
with patch('homeassistant.helpers.storage.Store._async_load',
|
|
|
|
side_effect=mock_async_load, autospec=True), \
|
|
|
|
patch('homeassistant.helpers.storage.Store._write_data',
|
|
|
|
side_effect=mock_write_data, autospec=True):
|
|
|
|
yield data
|
|
|
|
|
|
|
|
|
|
|
|
async def flush_store(store):
|
|
|
|
"""Make sure all delayed writes of a store are written."""
|
|
|
|
if store._data is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
await store._async_handle_write_data()
|