Add zwave_js.event automation trigger (#62828)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>pull/66931/head
parent
ddedaf6f70
commit
6e5ae3e2e4
|
@ -69,6 +69,9 @@ ATTR_PREVIOUS_VALUE_RAW = "previous_value_raw"
|
|||
ATTR_CURRENT_VALUE = "current_value"
|
||||
ATTR_CURRENT_VALUE_RAW = "current_value_raw"
|
||||
ATTR_DESCRIPTION = "description"
|
||||
ATTR_EVENT_SOURCE = "event_source"
|
||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
||||
ATTR_PARTIAL_DICT_MATCH = "partial_dict_match"
|
||||
|
||||
# service constants
|
||||
SERVICE_SET_LOCK_USERCODE = "set_lock_usercode"
|
||||
|
|
|
@ -12,10 +12,11 @@ from homeassistant.const import CONF_PLATFORM
|
|||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .triggers import value_updated
|
||||
from .triggers import event, value_updated
|
||||
|
||||
TRIGGERS = {
|
||||
"value_updated": value_updated,
|
||||
"event": event,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,232 @@
|
|||
"""Offer Z-Wave JS event listening automation trigger."""
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
|
||||
from pydantic import ValidationError
|
||||
import voluptuous as vol
|
||||
from zwave_js_server.client import Client
|
||||
from zwave_js_server.model.controller import CONTROLLER_EVENT_MODEL_MAP
|
||||
from zwave_js_server.model.driver import DRIVER_EVENT_MODEL_MAP
|
||||
from zwave_js_server.model.node import NODE_EVENT_MODEL_MAP, Node
|
||||
|
||||
from homeassistant.components.automation import (
|
||||
AutomationActionType,
|
||||
AutomationTriggerInfo,
|
||||
)
|
||||
from homeassistant.components.zwave_js.const import (
|
||||
ATTR_CONFIG_ENTRY_ID,
|
||||
ATTR_EVENT,
|
||||
ATTR_EVENT_DATA,
|
||||
ATTR_EVENT_SOURCE,
|
||||
ATTR_NODE_ID,
|
||||
ATTR_PARTIAL_DICT_MATCH,
|
||||
DATA_CLIENT,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.components.zwave_js.helpers import (
|
||||
async_get_node_from_device_id,
|
||||
async_get_node_from_entity_id,
|
||||
get_device_id,
|
||||
get_home_and_node_id_from_device_entry,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_DEVICE_ID, ATTR_ENTITY_ID, CONF_PLATFORM
|
||||
from homeassistant.core import CALLBACK_TYPE, HassJob, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
# Platform type should be <DOMAIN>.<SUBMODULE_NAME>
|
||||
PLATFORM_TYPE = f"{DOMAIN}.{__name__.rsplit('.', maxsplit=1)[-1]}"
|
||||
|
||||
EVENT_MODEL_MAP = {
|
||||
"controller": CONTROLLER_EVENT_MODEL_MAP,
|
||||
"driver": DRIVER_EVENT_MODEL_MAP,
|
||||
"node": NODE_EVENT_MODEL_MAP,
|
||||
}
|
||||
|
||||
|
||||
def validate_non_node_event_source(obj: dict) -> dict:
|
||||
"""Validate that a trigger for a non node event source has a config entry."""
|
||||
if obj[ATTR_EVENT_SOURCE] != "node" and ATTR_CONFIG_ENTRY_ID in obj:
|
||||
return obj
|
||||
raise vol.Invalid(f"Non node event triggers must contain {ATTR_CONFIG_ENTRY_ID}.")
|
||||
|
||||
|
||||
def validate_event_name(obj: dict) -> dict:
|
||||
"""Validate that a trigger has a valid event name."""
|
||||
event_source = obj[ATTR_EVENT_SOURCE]
|
||||
event_name = obj[ATTR_EVENT]
|
||||
# the keys to the event source's model map are the event names
|
||||
vol.In(EVENT_MODEL_MAP[event_source])(event_name)
|
||||
return obj
|
||||
|
||||
|
||||
def validate_event_data(obj: dict) -> dict:
|
||||
"""Validate that a trigger has a valid event data."""
|
||||
# Return if there's no event data to validate
|
||||
if ATTR_EVENT_DATA not in obj:
|
||||
return obj
|
||||
|
||||
event_source = obj[ATTR_EVENT_SOURCE]
|
||||
event_name = obj[ATTR_EVENT]
|
||||
event_data = obj[ATTR_EVENT_DATA]
|
||||
try:
|
||||
EVENT_MODEL_MAP[event_source][event_name](**event_data)
|
||||
except ValidationError as exc:
|
||||
# Filter out required field errors if keys can be missing, and if there are
|
||||
# still errors, raise an exception
|
||||
if errors := [
|
||||
error for error in exc.errors() if error["type"] != "value_error.missing"
|
||||
]:
|
||||
raise vol.MultipleInvalid(errors) from exc
|
||||
return obj
|
||||
|
||||
|
||||
TRIGGER_SCHEMA = vol.All(
|
||||
cv.TRIGGER_BASE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PLATFORM): PLATFORM_TYPE,
|
||||
vol.Optional(ATTR_CONFIG_ENTRY_ID): str,
|
||||
vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
|
||||
vol.Required(ATTR_EVENT_SOURCE): vol.In(EVENT_MODEL_MAP),
|
||||
vol.Required(ATTR_EVENT): cv.string,
|
||||
vol.Optional(ATTR_EVENT_DATA): dict,
|
||||
vol.Optional(ATTR_PARTIAL_DICT_MATCH, default=False): bool,
|
||||
},
|
||||
),
|
||||
validate_event_name,
|
||||
validate_event_data,
|
||||
vol.Any(
|
||||
validate_non_node_event_source,
|
||||
cv.has_at_least_one_key(ATTR_DEVICE_ID, ATTR_ENTITY_ID),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_validate_trigger_config(
|
||||
hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
config = TRIGGER_SCHEMA(config)
|
||||
|
||||
if ATTR_CONFIG_ENTRY_ID not in config:
|
||||
return config
|
||||
|
||||
entry_id = config[ATTR_CONFIG_ENTRY_ID]
|
||||
if (entry := hass.config_entries.async_get_entry(entry_id)) is None:
|
||||
raise vol.Invalid(f"Config entry '{entry_id}' not found")
|
||||
|
||||
if entry.state is not ConfigEntryState.LOADED:
|
||||
raise vol.Invalid(f"Config entry '{entry_id}' not loaded")
|
||||
|
||||
return config
|
||||
|
||||
|
||||
async def async_attach_trigger(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
action: AutomationActionType,
|
||||
automation_info: AutomationTriggerInfo,
|
||||
*,
|
||||
platform_type: str = PLATFORM_TYPE,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Listen for state changes based on configuration."""
|
||||
nodes: set[Node] = set()
|
||||
if ATTR_DEVICE_ID in config:
|
||||
nodes.update(
|
||||
{
|
||||
async_get_node_from_device_id(hass, device_id)
|
||||
for device_id in config[ATTR_DEVICE_ID]
|
||||
}
|
||||
)
|
||||
if ATTR_ENTITY_ID in config:
|
||||
nodes.update(
|
||||
{
|
||||
async_get_node_from_entity_id(hass, entity_id)
|
||||
for entity_id in config[ATTR_ENTITY_ID]
|
||||
}
|
||||
)
|
||||
|
||||
event_source = config[ATTR_EVENT_SOURCE]
|
||||
event_name = config[ATTR_EVENT]
|
||||
event_data_filter = config.get(ATTR_EVENT_DATA, {})
|
||||
|
||||
unsubs = []
|
||||
job = HassJob(action)
|
||||
|
||||
trigger_data = automation_info["trigger_data"]
|
||||
|
||||
@callback
|
||||
def async_on_event(event_data: dict, device: dr.DeviceEntry | None = None) -> None:
|
||||
"""Handle event."""
|
||||
for key, val in event_data_filter.items():
|
||||
if key not in event_data:
|
||||
return
|
||||
if (
|
||||
config[ATTR_PARTIAL_DICT_MATCH]
|
||||
and isinstance(event_data[key], dict)
|
||||
and isinstance(event_data_filter[key], dict)
|
||||
):
|
||||
for key2, val2 in event_data_filter[key].items():
|
||||
if key2 not in event_data[key] or event_data[key][key2] != val2:
|
||||
return
|
||||
continue
|
||||
if event_data[key] != val:
|
||||
return
|
||||
|
||||
payload = {
|
||||
**trigger_data,
|
||||
CONF_PLATFORM: platform_type,
|
||||
ATTR_EVENT_SOURCE: event_source,
|
||||
ATTR_EVENT: event_name,
|
||||
ATTR_EVENT_DATA: event_data,
|
||||
}
|
||||
|
||||
primary_desc = f"Z-Wave JS '{event_source}' event '{event_name}' was emitted"
|
||||
|
||||
if device:
|
||||
device_name = device.name_by_user or device.name
|
||||
payload[ATTR_DEVICE_ID] = device.id
|
||||
home_and_node_id = get_home_and_node_id_from_device_entry(device)
|
||||
assert home_and_node_id
|
||||
payload[ATTR_NODE_ID] = home_and_node_id[1]
|
||||
payload["description"] = f"{primary_desc} on {device_name}"
|
||||
else:
|
||||
payload["description"] = primary_desc
|
||||
|
||||
payload[
|
||||
"description"
|
||||
] = f"{payload['description']} with event data: {event_data}"
|
||||
|
||||
hass.async_run_hass_job(job, {"trigger": payload})
|
||||
|
||||
dev_reg = dr.async_get(hass)
|
||||
|
||||
if not nodes:
|
||||
entry_id = config[ATTR_CONFIG_ENTRY_ID]
|
||||
client: Client = hass.data[DOMAIN][entry_id][DATA_CLIENT]
|
||||
if event_source == "controller":
|
||||
source = client.driver.controller
|
||||
else:
|
||||
source = client.driver
|
||||
unsubs.append(source.on(event_name, async_on_event))
|
||||
|
||||
for node in nodes:
|
||||
device_identifier = get_device_id(node.client, node)
|
||||
device = dev_reg.async_get_device({device_identifier})
|
||||
assert device
|
||||
# We need to store the device for the callback
|
||||
unsubs.append(
|
||||
node.on(event_name, functools.partial(async_on_event, device=device))
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_remove() -> None:
|
||||
"""Remove state listeners async."""
|
||||
for unsub in unsubs:
|
||||
unsub()
|
||||
unsubs.clear()
|
||||
|
||||
return async_remove
|
|
@ -1,4 +1,4 @@
|
|||
"""Offer Z-Wave JS value updated listening automation rules."""
|
||||
"""Offer Z-Wave JS value updated listening automation trigger."""
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
|
|
|
@ -264,6 +264,440 @@ async def test_zwave_js_value_updated(hass, client, lock_schlage_be469, integrat
|
|||
await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True)
|
||||
|
||||
|
||||
async def test_zwave_js_event(hass, client, lock_schlage_be469, integration):
|
||||
"""Test for zwave_js.event automation trigger."""
|
||||
trigger_type = f"{DOMAIN}.event"
|
||||
node: Node = lock_schlage_be469
|
||||
dev_reg = async_get_dev_reg(hass)
|
||||
device = async_entries_for_config_entry(dev_reg, integration.entry_id)[0]
|
||||
|
||||
node_no_event_data_filter = async_capture_events(hass, "node_no_event_data_filter")
|
||||
node_event_data_filter = async_capture_events(hass, "node_event_data_filter")
|
||||
controller_no_event_data_filter = async_capture_events(
|
||||
hass, "controller_no_event_data_filter"
|
||||
)
|
||||
controller_event_data_filter = async_capture_events(
|
||||
hass, "controller_event_data_filter"
|
||||
)
|
||||
driver_no_event_data_filter = async_capture_events(
|
||||
hass, "driver_no_event_data_filter"
|
||||
)
|
||||
driver_event_data_filter = async_capture_events(hass, "driver_event_data_filter")
|
||||
node_event_data_no_partial_dict_match_filter = async_capture_events(
|
||||
hass, "node_event_data_no_partial_dict_match_filter"
|
||||
)
|
||||
node_event_data_partial_dict_match_filter = async_capture_events(
|
||||
hass, "node_event_data_partial_dict_match_filter"
|
||||
)
|
||||
|
||||
def clear_events():
|
||||
"""Clear all events in the event list."""
|
||||
node_no_event_data_filter.clear()
|
||||
node_event_data_filter.clear()
|
||||
controller_no_event_data_filter.clear()
|
||||
controller_event_data_filter.clear()
|
||||
driver_no_event_data_filter.clear()
|
||||
driver_event_data_filter.clear()
|
||||
node_event_data_no_partial_dict_match_filter.clear()
|
||||
node_event_data_partial_dict_match_filter.clear()
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: [
|
||||
# node filter: no event data
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"entity_id": SCHLAGE_BE469_LOCK_ENTITY,
|
||||
"event_source": "node",
|
||||
"event": "interview stage completed",
|
||||
},
|
||||
"action": {
|
||||
"event": "node_no_event_data_filter",
|
||||
},
|
||||
},
|
||||
# node filter: event data
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"device_id": device.id,
|
||||
"event_source": "node",
|
||||
"event": "interview stage completed",
|
||||
"event_data": {"stageName": "ProtocolInfo"},
|
||||
},
|
||||
"action": {
|
||||
"event": "node_event_data_filter",
|
||||
},
|
||||
},
|
||||
# controller filter: no event data
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"config_entry_id": integration.entry_id,
|
||||
"event_source": "controller",
|
||||
"event": "inclusion started",
|
||||
},
|
||||
"action": {
|
||||
"event": "controller_no_event_data_filter",
|
||||
},
|
||||
},
|
||||
# controller filter: event data
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"config_entry_id": integration.entry_id,
|
||||
"event_source": "controller",
|
||||
"event": "inclusion started",
|
||||
"event_data": {"secure": True},
|
||||
},
|
||||
"action": {
|
||||
"event": "controller_event_data_filter",
|
||||
},
|
||||
},
|
||||
# driver filter: no event data
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"config_entry_id": integration.entry_id,
|
||||
"event_source": "driver",
|
||||
"event": "logging",
|
||||
},
|
||||
"action": {
|
||||
"event": "driver_no_event_data_filter",
|
||||
},
|
||||
},
|
||||
# driver filter: event data
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"config_entry_id": integration.entry_id,
|
||||
"event_source": "driver",
|
||||
"event": "logging",
|
||||
"event_data": {"message": "test"},
|
||||
},
|
||||
"action": {
|
||||
"event": "driver_event_data_filter",
|
||||
},
|
||||
},
|
||||
# node filter: event data, no partial dict match
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"entity_id": SCHLAGE_BE469_LOCK_ENTITY,
|
||||
"event_source": "node",
|
||||
"event": "value updated",
|
||||
"event_data": {"args": {"commandClassName": "Door Lock"}},
|
||||
},
|
||||
"action": {
|
||||
"event": "node_event_data_no_partial_dict_match_filter",
|
||||
},
|
||||
},
|
||||
# node filter: event data, partial dict match
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"entity_id": SCHLAGE_BE469_LOCK_ENTITY,
|
||||
"event_source": "node",
|
||||
"event": "value updated",
|
||||
"event_data": {"args": {"commandClassName": "Door Lock"}},
|
||||
"partial_dict_match": True,
|
||||
},
|
||||
"action": {
|
||||
"event": "node_event_data_partial_dict_match_filter",
|
||||
},
|
||||
},
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
# Test that `node no event data filter` is triggered and `node event data filter` is not
|
||||
event = Event(
|
||||
type="interview stage completed",
|
||||
data={
|
||||
"source": "node",
|
||||
"event": "interview stage completed",
|
||||
"stageName": "NodeInfo",
|
||||
"nodeId": node.node_id,
|
||||
},
|
||||
)
|
||||
node.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 1
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 0
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 0
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that `node no event data filter` and `node event data filter` are triggered
|
||||
event = Event(
|
||||
type="interview stage completed",
|
||||
data={
|
||||
"source": "node",
|
||||
"event": "interview stage completed",
|
||||
"stageName": "ProtocolInfo",
|
||||
"nodeId": node.node_id,
|
||||
},
|
||||
)
|
||||
node.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 1
|
||||
assert len(node_event_data_filter) == 1
|
||||
assert len(controller_no_event_data_filter) == 0
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 0
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that `controller no event data filter` is triggered and `controller event data filter` is not
|
||||
event = Event(
|
||||
type="inclusion started",
|
||||
data={
|
||||
"source": "controller",
|
||||
"event": "inclusion started",
|
||||
"secure": False,
|
||||
},
|
||||
)
|
||||
client.driver.controller.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 0
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 1
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 0
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that both `controller no event data filter` and `controller event data filter` are triggered
|
||||
event = Event(
|
||||
type="inclusion started",
|
||||
data={
|
||||
"source": "controller",
|
||||
"event": "inclusion started",
|
||||
"secure": True,
|
||||
},
|
||||
)
|
||||
client.driver.controller.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 0
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 1
|
||||
assert len(controller_event_data_filter) == 1
|
||||
assert len(driver_no_event_data_filter) == 0
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that `driver no event data filter` is triggered and `driver event data filter` is not
|
||||
event = Event(
|
||||
type="logging",
|
||||
data={
|
||||
"source": "driver",
|
||||
"event": "logging",
|
||||
"message": "no test",
|
||||
"formattedMessage": "test",
|
||||
"direction": ">",
|
||||
"level": "debug",
|
||||
"primaryTags": "tag",
|
||||
"secondaryTags": "tag2",
|
||||
"secondaryTagPadding": 0,
|
||||
"multiline": False,
|
||||
"timestamp": "time",
|
||||
"label": "label",
|
||||
},
|
||||
)
|
||||
client.driver.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 0
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 0
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 1
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that both `driver no event data filter` and `driver event data filter` are triggered
|
||||
event = Event(
|
||||
type="logging",
|
||||
data={
|
||||
"source": "driver",
|
||||
"event": "logging",
|
||||
"message": "test",
|
||||
"formattedMessage": "test",
|
||||
"direction": ">",
|
||||
"level": "debug",
|
||||
"primaryTags": "tag",
|
||||
"secondaryTags": "tag2",
|
||||
"secondaryTagPadding": 0,
|
||||
"multiline": False,
|
||||
"timestamp": "time",
|
||||
"label": "label",
|
||||
},
|
||||
)
|
||||
client.driver.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 0
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 0
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 1
|
||||
assert len(driver_event_data_filter) == 1
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that only `node with event data and partial match dict filter` is triggered
|
||||
event = Event(
|
||||
type="value updated",
|
||||
data={
|
||||
"source": "node",
|
||||
"event": "value updated",
|
||||
"nodeId": node.node_id,
|
||||
"args": {
|
||||
"commandClassName": "Door Lock",
|
||||
"commandClass": 49,
|
||||
"endpoint": 0,
|
||||
"property": "latchStatus",
|
||||
"newValue": "closed",
|
||||
"prevValue": "open",
|
||||
"propertyName": "latchStatus",
|
||||
},
|
||||
},
|
||||
)
|
||||
node.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 0
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 0
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 0
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 1
|
||||
|
||||
clear_events()
|
||||
|
||||
# Test that `node with event data and partial match dict filter` is not triggered
|
||||
# when partial dict doesn't match
|
||||
event = Event(
|
||||
type="value updated",
|
||||
data={
|
||||
"source": "node",
|
||||
"event": "value updated",
|
||||
"nodeId": node.node_id,
|
||||
"args": {
|
||||
"commandClassName": "fake command class name",
|
||||
"commandClass": 49,
|
||||
"endpoint": 0,
|
||||
"property": "latchStatus",
|
||||
"newValue": "closed",
|
||||
"prevValue": "open",
|
||||
"propertyName": "latchStatus",
|
||||
},
|
||||
},
|
||||
)
|
||||
node.receive_event(event)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(node_no_event_data_filter) == 0
|
||||
assert len(node_event_data_filter) == 0
|
||||
assert len(controller_no_event_data_filter) == 0
|
||||
assert len(controller_event_data_filter) == 0
|
||||
assert len(driver_no_event_data_filter) == 0
|
||||
assert len(driver_event_data_filter) == 0
|
||||
assert len(node_event_data_no_partial_dict_match_filter) == 0
|
||||
assert len(node_event_data_partial_dict_match_filter) == 0
|
||||
|
||||
clear_events()
|
||||
|
||||
with patch("homeassistant.config.load_yaml", return_value={}):
|
||||
await hass.services.async_call(automation.DOMAIN, SERVICE_RELOAD, blocking=True)
|
||||
|
||||
|
||||
async def test_zwave_js_event_invalid_config_entry_id(
|
||||
hass, client, integration, caplog
|
||||
):
|
||||
"""Test zwave_js.event automation trigger fails when config entry ID is invalid."""
|
||||
trigger_type = f"{DOMAIN}.event"
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: [
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"config_entry_id": "not_real_entry_id",
|
||||
"event_source": "controller",
|
||||
"event": "inclusion started",
|
||||
},
|
||||
"action": {
|
||||
"event": "node_no_event_data_filter",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
assert "Config entry 'not_real_entry_id' not found" in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
async def test_zwave_js_event_unloaded_config_entry(hass, client, integration, caplog):
|
||||
"""Test zwave_js.event automation trigger fails when config entry is unloaded."""
|
||||
trigger_type = f"{DOMAIN}.event"
|
||||
|
||||
await hass.config_entries.async_unload(integration.entry_id)
|
||||
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
automation.DOMAIN,
|
||||
{
|
||||
automation.DOMAIN: [
|
||||
{
|
||||
"trigger": {
|
||||
"platform": trigger_type,
|
||||
"config_entry_id": integration.entry_id,
|
||||
"event_source": "controller",
|
||||
"event": "inclusion started",
|
||||
},
|
||||
"action": {
|
||||
"event": "node_no_event_data_filter",
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
assert f"Config entry '{integration.entry_id}' not loaded" in caplog.text
|
||||
|
||||
|
||||
async def test_async_validate_trigger_config(hass):
|
||||
"""Test async_validate_trigger_config."""
|
||||
mock_platform = AsyncMock()
|
||||
|
|
Loading…
Reference in New Issue