Bump ZHA dependencies (#93989)
* Make `find_entity_id` synchronous * Remove `tries` * Use new `attribute_updated` event signature * Validate attributes before creating entities * Avoid swallowing exceptions when opening covers * Bump ZHA dependencies * Add a matcher for Sinope water leak sensors using a non-standard ZCL attribute * Ensure handler matching is strict, not multi * Add type annotations for newly-updated functionspull/94362/head
parent
584967a35a
commit
22dfa8797f
|
@ -236,6 +236,15 @@ class IASZone(BinarySensor):
|
|||
)
|
||||
|
||||
|
||||
@STRICT_MATCH(cluster_handler_names=CLUSTER_HANDLER_ZONE, models={"WL4200", "WL4200S"})
|
||||
class SinopeLeakStatus(BinarySensor):
|
||||
"""Sinope water leak sensor."""
|
||||
|
||||
SENSOR_ATTR = "leak_status"
|
||||
_attr_name = "Moisture"
|
||||
_attr_device_class = BinarySensorDeviceClass.MOISTURE
|
||||
|
||||
|
||||
@MULTI_MATCH(
|
||||
cluster_handler_names="tuya_manufacturer",
|
||||
manufacturers={
|
||||
|
|
|
@ -3,11 +3,12 @@ from __future__ import annotations
|
|||
|
||||
import asyncio
|
||||
from enum import Enum
|
||||
from functools import partialmethod, wraps
|
||||
from functools import partialmethod
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, TypedDict
|
||||
|
||||
import zigpy.exceptions
|
||||
import zigpy.util
|
||||
import zigpy.zcl
|
||||
from zigpy.zcl.foundation import (
|
||||
CommandSchema,
|
||||
|
@ -45,6 +46,8 @@ if TYPE_CHECKING:
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
retry_request = zigpy.util.retryable_request(tries=3)
|
||||
|
||||
|
||||
class AttrReportConfig(TypedDict, total=True):
|
||||
"""Configuration to report for the attributes."""
|
||||
|
@ -73,35 +76,6 @@ def parse_and_log_command(cluster_handler, tsn, command_id, args):
|
|||
return name
|
||||
|
||||
|
||||
def decorate_command(cluster_handler, command):
|
||||
"""Wrap a cluster command to make it safe."""
|
||||
|
||||
@wraps(command)
|
||||
async def wrapper(*args, **kwds):
|
||||
try:
|
||||
result = await command(*args, **kwds)
|
||||
cluster_handler.debug(
|
||||
"executed '%s' command with args: '%s' kwargs: '%s' result: %s",
|
||||
command.__name__,
|
||||
args,
|
||||
kwds,
|
||||
result,
|
||||
)
|
||||
return result
|
||||
|
||||
except (zigpy.exceptions.ZigbeeException, asyncio.TimeoutError) as ex:
|
||||
cluster_handler.debug(
|
||||
"command failed: '%s' args: '%s' kwargs '%s' exception: '%s'",
|
||||
command.__name__,
|
||||
args,
|
||||
kwds,
|
||||
str(ex),
|
||||
)
|
||||
return ex
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class ClusterHandlerStatus(Enum):
|
||||
"""Status of a cluster handler."""
|
||||
|
||||
|
@ -119,7 +93,7 @@ class ClusterHandler(LogMixin):
|
|||
# Dict of attributes to read on cluster handler initialization.
|
||||
# Dict keys -- attribute ID or names, with bool value indicating whether a cached
|
||||
# attribute read is acceptable.
|
||||
ZCL_INIT_ATTRS: dict[int | str, bool] = {}
|
||||
ZCL_INIT_ATTRS: dict[str, bool] = {}
|
||||
|
||||
def __init__(self, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> None:
|
||||
"""Initialize ClusterHandler."""
|
||||
|
@ -396,7 +370,7 @@ class ClusterHandler(LogMixin):
|
|||
"""Handle commands received to this cluster."""
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute updates on this cluster."""
|
||||
self.async_send_signal(
|
||||
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
|
||||
|
@ -462,7 +436,7 @@ class ClusterHandler(LogMixin):
|
|||
async def _get_attributes(
|
||||
self,
|
||||
raise_exceptions: bool,
|
||||
attributes: list[int | str],
|
||||
attributes: list[str],
|
||||
from_cache: bool = True,
|
||||
only_cache: bool = True,
|
||||
) -> dict[int | str, Any]:
|
||||
|
@ -510,7 +484,8 @@ class ClusterHandler(LogMixin):
|
|||
if hasattr(self._cluster, name) and callable(getattr(self._cluster, name)):
|
||||
command = getattr(self._cluster, name)
|
||||
command.__name__ = name
|
||||
return decorate_command(self, command)
|
||||
|
||||
return retry_request(command)
|
||||
return self.__getattribute__(name)
|
||||
|
||||
|
||||
|
@ -568,7 +543,7 @@ class ClientClusterHandler(ClusterHandler):
|
|||
"""ClusterHandler for Zigbee client (output) clusters."""
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle an attribute updated on this cluster."""
|
||||
|
||||
try:
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
"""Closures cluster handlers module for Zigbee Home Automation."""
|
||||
from typing import Any
|
||||
|
||||
from zigpy.zcl.clusters import closures
|
||||
|
||||
from homeassistant.core import callback
|
||||
|
@ -48,7 +50,7 @@ class DoorLockClusterHandler(ClusterHandler):
|
|||
)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute update from lock cluster."""
|
||||
attr_name = self._get_attribute_name(attrid)
|
||||
self.debug(
|
||||
|
@ -144,7 +146,7 @@ class WindowCovering(ClusterHandler):
|
|||
)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute update from window_covering cluster."""
|
||||
attr_name = self._get_attribute_name(attrid)
|
||||
self.debug(
|
||||
|
|
|
@ -297,7 +297,7 @@ class LevelControlClusterHandler(ClusterHandler):
|
|||
)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute updates on this cluster."""
|
||||
self.debug("received attribute: %s update with value: %s", attrid, value)
|
||||
if attrid == self.CURRENT_LEVEL:
|
||||
|
@ -358,7 +358,7 @@ class OnOffClusterHandler(ClusterHandler):
|
|||
super().__init__(cluster, endpoint)
|
||||
self._off_listener = None
|
||||
|
||||
if self.cluster.endpoint.model in (
|
||||
if self.cluster.endpoint.model not in (
|
||||
"TS011F",
|
||||
"TS0121",
|
||||
"TS0001",
|
||||
|
@ -366,13 +366,19 @@ class OnOffClusterHandler(ClusterHandler):
|
|||
"TS0003",
|
||||
"TS0004",
|
||||
):
|
||||
self.ZCL_INIT_ATTRS = ( # pylint: disable=invalid-name
|
||||
self.ZCL_INIT_ATTRS.copy()
|
||||
)
|
||||
self.ZCL_INIT_ATTRS["backlight_mode"] = True
|
||||
self.ZCL_INIT_ATTRS["power_on_state"] = True
|
||||
if self.cluster.endpoint.model == "TS011F":
|
||||
self.ZCL_INIT_ATTRS["child_lock"] = True
|
||||
return
|
||||
|
||||
try:
|
||||
self.cluster.find_attribute("backlight_mode")
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
self.ZCL_INIT_ATTRS = self.ZCL_INIT_ATTRS.copy() # pylint: disable=invalid-name
|
||||
self.ZCL_INIT_ATTRS["backlight_mode"] = True
|
||||
self.ZCL_INIT_ATTRS["power_on_state"] = True
|
||||
|
||||
if self.cluster.endpoint.model == "TS011F":
|
||||
self.ZCL_INIT_ATTRS["child_lock"] = True
|
||||
|
||||
@classmethod
|
||||
def matches(cls, cluster: zigpy.zcl.Cluster, endpoint: Endpoint) -> bool:
|
||||
|
@ -438,7 +444,7 @@ class OnOffClusterHandler(ClusterHandler):
|
|||
self.cluster.update_attribute(self.ON_OFF, t.Bool.false)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute updates on this cluster."""
|
||||
if attrid == self.ON_OFF:
|
||||
self.async_send_signal(
|
||||
|
|
|
@ -67,7 +67,7 @@ class FanClusterHandler(ClusterHandler):
|
|||
await self.get_attribute_value("fan_mode", from_cache=False)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid: int, value: Any) -> None:
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute update from fan cluster."""
|
||||
attr_name = self._get_attribute_name(attrid)
|
||||
self.debug(
|
||||
|
@ -109,7 +109,7 @@ class ThermostatClusterHandler(ClusterHandler):
|
|||
AttrReportConfig(attr="pi_cooling_demand", config=REPORT_CONFIG_CLIMATE_DEMAND),
|
||||
AttrReportConfig(attr="pi_heating_demand", config=REPORT_CONFIG_CLIMATE_DEMAND),
|
||||
)
|
||||
ZCL_INIT_ATTRS: dict[int | str, bool] = {
|
||||
ZCL_INIT_ATTRS: dict[str, bool] = {
|
||||
"abs_min_heat_setpoint_limit": True,
|
||||
"abs_max_heat_setpoint_limit": True,
|
||||
"abs_min_cool_setpoint_limit": True,
|
||||
|
@ -234,7 +234,7 @@ class ThermostatClusterHandler(ClusterHandler):
|
|||
return self.cluster.get("unoccupied_heating_setpoint")
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute update cluster."""
|
||||
attr_name = self._get_attribute_name(attrid)
|
||||
self.debug(
|
||||
|
|
|
@ -198,7 +198,7 @@ class SmartThingsAcceleration(ClusterHandler):
|
|||
)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute updates on this cluster."""
|
||||
try:
|
||||
attr_name = self._cluster.attributes[attrid].name
|
||||
|
@ -229,7 +229,7 @@ class InovelliNotificationClusterHandler(ClientClusterHandler):
|
|||
"""Inovelli Notification cluster handler."""
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle an attribute updated on this cluster."""
|
||||
|
||||
@callback
|
||||
|
@ -363,7 +363,7 @@ class IkeaAirPurifierClusterHandler(ClusterHandler):
|
|||
await self.get_attribute_value("fan_mode", from_cache=False)
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid: int, value: Any) -> None:
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute update from fan cluster."""
|
||||
attr_name = self._get_attribute_name(attrid)
|
||||
self.debug(
|
||||
|
|
|
@ -388,7 +388,7 @@ class IASZoneClusterHandler(ClusterHandler):
|
|||
self.debug("finished IASZoneClusterHandler configuration")
|
||||
|
||||
@callback
|
||||
def attribute_updated(self, attrid, value):
|
||||
def attribute_updated(self, attrid: int, value: Any, _: Any) -> None:
|
||||
"""Handle attribute updates on this cluster."""
|
||||
if attrid == IasZone.attributes_by_name["zone_status"].id:
|
||||
self.async_send_signal(
|
||||
|
|
|
@ -318,16 +318,12 @@ class KeenVent(Shade):
|
|||
async def async_open_cover(self, **kwargs: Any) -> None:
|
||||
"""Open the cover."""
|
||||
position = self._position or 100
|
||||
tasks = [
|
||||
await asyncio.gather(
|
||||
self._level_cluster_handler.move_to_level_with_on_off(
|
||||
position * 255 / 100, 1
|
||||
),
|
||||
self._on_off_cluster_handler.on(),
|
||||
]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
if any(isinstance(result, Exception) for result in results):
|
||||
self.debug("couldn't open cover")
|
||||
return
|
||||
)
|
||||
|
||||
self._is_open = True
|
||||
self._position = position
|
||||
|
|
|
@ -23,9 +23,9 @@
|
|||
"bellows==0.35.5",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.100",
|
||||
"zha-quirks==0.0.101",
|
||||
"zigpy-deconz==0.21.0",
|
||||
"zigpy==0.55.0",
|
||||
"zigpy==0.56.0",
|
||||
"zigpy-xbee==0.18.0",
|
||||
"zigpy-zigate==0.11.0",
|
||||
"zigpy-znp==0.11.1"
|
||||
|
|
|
@ -401,6 +401,7 @@ class ZHANumberConfigurationEntity(ZhaEntity, NumberEntity):
|
|||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
cls._zcl_attribute in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._zcl_attribute not in cluster_handler.cluster.attributes_by_name
|
||||
or cluster_handler.cluster.get(cls._zcl_attribute) is None
|
||||
):
|
||||
_LOGGER.debug(
|
||||
|
|
|
@ -176,6 +176,7 @@ class ZCLEnumSelectEntity(ZhaEntity, SelectEntity):
|
|||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
cls._select_attr in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._select_attr not in cluster_handler.cluster.attributes_by_name
|
||||
or cluster_handler.cluster.get(cls._select_attr) is None
|
||||
):
|
||||
_LOGGER.debug(
|
||||
|
|
|
@ -148,7 +148,10 @@ class Sensor(ZhaEntity, SensorEntity):
|
|||
Return entity if it is a supported configuration, otherwise return None
|
||||
"""
|
||||
cluster_handler = cluster_handlers[0]
|
||||
if cls.SENSOR_ATTR in cluster_handler.cluster.unsupported_attributes:
|
||||
if (
|
||||
cls.SENSOR_ATTR in cluster_handler.cluster.unsupported_attributes
|
||||
or cls.SENSOR_ATTR not in cluster_handler.cluster.attributes_by_name
|
||||
):
|
||||
return None
|
||||
|
||||
return cls(unique_id, zha_device, cluster_handlers, **kwargs)
|
||||
|
@ -275,8 +278,14 @@ class ElectricalMeasurement(Sensor):
|
|||
attrs["measurement_type"] = self._cluster_handler.measurement_type
|
||||
|
||||
max_attr_name = f"{self.SENSOR_ATTR}_max"
|
||||
if (max_v := self._cluster_handler.cluster.get(max_attr_name)) is not None:
|
||||
attrs[max_attr_name] = str(self.formatter(max_v))
|
||||
|
||||
try:
|
||||
max_v = self._cluster_handler.cluster.get(max_attr_name)
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if max_v is not None:
|
||||
attrs[max_attr_name] = str(self.formatter(max_v))
|
||||
|
||||
return attrs
|
||||
|
||||
|
|
|
@ -192,6 +192,7 @@ class ZHASwitchConfigurationEntity(ZhaEntity, SwitchEntity):
|
|||
cluster_handler = cluster_handlers[0]
|
||||
if (
|
||||
cls._zcl_attribute in cluster_handler.cluster.unsupported_attributes
|
||||
or cls._zcl_attribute not in cluster_handler.cluster.attributes_by_name
|
||||
or cluster_handler.cluster.get(cls._zcl_attribute) is None
|
||||
):
|
||||
_LOGGER.debug(
|
||||
|
|
|
@ -2742,7 +2742,7 @@ zeroconf==0.66.0
|
|||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.100
|
||||
zha-quirks==0.0.101
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong-hong-hvac==1.0.9
|
||||
|
@ -2763,7 +2763,7 @@ zigpy-zigate==0.11.0
|
|||
zigpy-znp==0.11.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.55.0
|
||||
zigpy==0.56.0
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.5.2
|
||||
|
|
|
@ -2006,7 +2006,7 @@ zeroconf==0.66.0
|
|||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.100
|
||||
zha-quirks==0.0.101
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.21.0
|
||||
|
@ -2021,7 +2021,7 @@ zigpy-zigate==0.11.0
|
|||
zigpy-znp==0.11.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.55.0
|
||||
zigpy==0.56.0
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.49.0
|
||||
|
|
|
@ -133,7 +133,7 @@ async def send_attributes_report(hass, cluster: zigpy.zcl.Cluster, attributes: d
|
|||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def find_entity_id(domain, zha_device, hass, qualifier=None):
|
||||
def find_entity_id(domain, zha_device, hass, qualifier=None):
|
||||
"""Find the entity id under the testing.
|
||||
|
||||
This is used to get the entity id in order to get the state from the state
|
||||
|
|
|
@ -65,7 +65,7 @@ async def test_alarm_control_panel(
|
|||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
cluster = zigpy_device.endpoints.get(1).ias_ace
|
||||
entity_id = await find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.ALARM_CONTROL_PANEL, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
|
|
|
@ -116,7 +116,7 @@ async def test_binary_sensor(
|
|||
"""Test ZHA binary_sensor platform."""
|
||||
zigpy_device = zigpy_device_mock(device)
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
@ -192,7 +192,7 @@ async def test_binary_sensor_migration_not_migrated(
|
|||
|
||||
zigpy_device = zigpy_device_mock(DEVICE_IAS)
|
||||
zha_device = await zha_device_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == restored_state
|
||||
|
@ -222,7 +222,7 @@ async def test_binary_sensor_migration_already_migrated(
|
|||
update_attribute_cache(cluster)
|
||||
|
||||
zha_device = await zha_device_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == STATE_ON # matches attribute cache
|
||||
|
@ -251,7 +251,7 @@ async def test_onoff_binary_sensor_restore_state(
|
|||
|
||||
zigpy_device = zigpy_device_mock(DEVICE_ONOFF)
|
||||
zha_device = await zha_device_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.BINARY_SENSOR, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == restored_state
|
||||
|
|
|
@ -10,6 +10,7 @@ from zhaquirks.const import (
|
|||
OUTPUT_CLUSTERS,
|
||||
PROFILE_ID,
|
||||
)
|
||||
from zhaquirks.tuya.ts0601_valve import ParksideTuyaValveManufCluster
|
||||
from zigpy.const import SIG_EP_PROFILE
|
||||
from zigpy.exceptions import ZigbeeException
|
||||
import zigpy.profiles.zha as zha
|
||||
|
@ -49,6 +50,7 @@ def button_platform_only():
|
|||
Platform.NUMBER,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
),
|
||||
):
|
||||
yield
|
||||
|
@ -107,13 +109,21 @@ async def tuya_water_valve(hass, zigpy_device_mock, zha_device_joined_restored):
|
|||
zigpy_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_INPUT: [general.Basic.cluster_id],
|
||||
SIG_EP_OUTPUT: [],
|
||||
SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH,
|
||||
}
|
||||
PROFILE_ID: zha.PROFILE_ID,
|
||||
DEVICE_TYPE: zha.DeviceType.ON_OFF_SWITCH,
|
||||
INPUT_CLUSTERS: [
|
||||
general.Basic.cluster_id,
|
||||
general.Identify.cluster_id,
|
||||
general.Groups.cluster_id,
|
||||
general.Scenes.cluster_id,
|
||||
general.OnOff.cluster_id,
|
||||
ParksideTuyaValveManufCluster.cluster_id,
|
||||
],
|
||||
OUTPUT_CLUSTERS: [general.Time.cluster_id, general.Ota.cluster_id],
|
||||
},
|
||||
},
|
||||
manufacturer="_TZE200_htnnfasr",
|
||||
quirk=FrostLockQuirk,
|
||||
model="TS0601",
|
||||
)
|
||||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
|
@ -127,7 +137,7 @@ async def test_button(hass: HomeAssistant, contact_sensor) -> None:
|
|||
entity_registry = er.async_get(hass)
|
||||
zha_device, cluster = contact_sensor
|
||||
assert cluster is not None
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
entity_id = find_entity_id(DOMAIN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -167,7 +177,7 @@ async def test_frost_unlock(hass: HomeAssistant, tuya_water_valve) -> None:
|
|||
entity_registry = er.async_get(hass)
|
||||
zha_device, cluster = tuya_water_valve
|
||||
assert cluster is not None
|
||||
entity_id = await find_entity_id(DOMAIN, zha_device, hass)
|
||||
entity_id = find_entity_id(DOMAIN, zha_device, hass, qualifier="frost_lock_reset")
|
||||
assert entity_id is not None
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
|
|
@ -281,7 +281,7 @@ async def test_climate_local_temperature(hass: HomeAssistant, device_climate) ->
|
|||
"""Test local temperature."""
|
||||
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.attributes[ATTR_CURRENT_TEMPERATURE] is None
|
||||
|
@ -297,8 +297,8 @@ async def test_climate_hvac_action_running_state(
|
|||
"""Test hvac action via running state."""
|
||||
|
||||
thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
sensor_entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
sensor_entity_id = find_entity_id(
|
||||
Platform.SENSOR, device_climate_sinope, hass, "hvac"
|
||||
)
|
||||
|
||||
|
@ -362,8 +362,8 @@ async def test_climate_hvac_action_running_state_zen(
|
|||
"""Test Zen hvac action via running state."""
|
||||
|
||||
thrm_cluster = device_climate_zen.device.endpoints[1].thermostat
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_zen, hass)
|
||||
sensor_entity_id = await find_entity_id(Platform.SENSOR, device_climate_zen, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_zen, hass)
|
||||
sensor_entity_id = find_entity_id(Platform.SENSOR, device_climate_zen, hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert ATTR_HVAC_ACTION not in state.attributes
|
||||
|
@ -449,7 +449,7 @@ async def test_climate_hvac_action_pi_demand(
|
|||
"""Test hvac action based on pi_heating/cooling_demand attrs."""
|
||||
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert ATTR_HVAC_ACTION not in state.attributes
|
||||
|
@ -498,7 +498,7 @@ async def test_hvac_mode(
|
|||
"""Test HVAC mode."""
|
||||
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == HVACMode.OFF
|
||||
|
@ -538,7 +538,7 @@ async def test_hvac_modes(
|
|||
device_climate = await device_climate_mock(
|
||||
CLIMATE, {"ctrl_sequence_of_oper": seq_of_op}
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
state = hass.states.get(entity_id)
|
||||
assert set(state.attributes[ATTR_HVAC_MODES]) == modes
|
||||
|
||||
|
@ -569,7 +569,7 @@ async def test_target_temperature(
|
|||
manuf=MANUF_SINOPE,
|
||||
quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
if preset:
|
||||
await hass.services.async_call(
|
||||
CLIMATE_DOMAIN,
|
||||
|
@ -605,7 +605,7 @@ async def test_target_temperature_high(
|
|||
manuf=MANUF_SINOPE,
|
||||
quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
if preset:
|
||||
await hass.services.async_call(
|
||||
CLIMATE_DOMAIN,
|
||||
|
@ -641,7 +641,7 @@ async def test_target_temperature_low(
|
|||
manuf=MANUF_SINOPE,
|
||||
quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
if preset:
|
||||
await hass.services.async_call(
|
||||
CLIMATE_DOMAIN,
|
||||
|
@ -671,7 +671,7 @@ async def test_set_hvac_mode(
|
|||
"""Test setting hvac mode."""
|
||||
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state.state == HVACMode.OFF
|
||||
|
@ -712,7 +712,7 @@ async def test_set_hvac_mode(
|
|||
async def test_preset_setting(hass: HomeAssistant, device_climate_sinope) -> None:
|
||||
"""Test preset setting."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -792,7 +792,7 @@ async def test_preset_setting_invalid(
|
|||
) -> None:
|
||||
"""Test invalid preset setting."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -813,7 +813,7 @@ async def test_preset_setting_invalid(
|
|||
async def test_set_temperature_hvac_mode(hass: HomeAssistant, device_climate) -> None:
|
||||
"""Test setting HVAC mode in temperature service call."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -855,7 +855,7 @@ async def test_set_temperature_heat_cool(
|
|||
manuf=MANUF_SINOPE,
|
||||
quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -941,7 +941,7 @@ async def test_set_temperature_heat(hass: HomeAssistant, device_climate_mock) ->
|
|||
manuf=MANUF_SINOPE,
|
||||
quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1020,7 +1020,7 @@ async def test_set_temperature_cool(hass: HomeAssistant, device_climate_mock) ->
|
|||
manuf=MANUF_SINOPE,
|
||||
quirk=zhaquirks.sinope.thermostat.SinopeTechnologiesThermostat,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1105,7 +1105,7 @@ async def test_set_temperature_wrong_mode(
|
|||
},
|
||||
manuf=MANUF_SINOPE,
|
||||
)
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate, hass)
|
||||
thrm_cluster = device_climate.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1128,7 +1128,7 @@ async def test_set_temperature_wrong_mode(
|
|||
async def test_occupancy_reset(hass: HomeAssistant, device_climate_sinope) -> None:
|
||||
"""Test away preset reset."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_sinope, hass)
|
||||
thrm_cluster = device_climate_sinope.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1155,7 +1155,7 @@ async def test_occupancy_reset(hass: HomeAssistant, device_climate_sinope) -> No
|
|||
async def test_fan_mode(hass: HomeAssistant, device_climate_fan) -> None:
|
||||
"""Test fan mode."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_fan, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass)
|
||||
thrm_cluster = device_climate_fan.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1186,7 +1186,7 @@ async def test_set_fan_mode_not_supported(
|
|||
) -> None:
|
||||
"""Test fan setting unsupported mode."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_fan, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass)
|
||||
fan_cluster = device_climate_fan.device.endpoints[1].fan
|
||||
|
||||
await hass.services.async_call(
|
||||
|
@ -1201,7 +1201,7 @@ async def test_set_fan_mode_not_supported(
|
|||
async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None:
|
||||
"""Test fan mode setting."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_fan, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_fan, hass)
|
||||
fan_cluster = device_climate_fan.device.endpoints[1].fan
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1230,7 +1230,7 @@ async def test_set_fan_mode(hass: HomeAssistant, device_climate_fan) -> None:
|
|||
async def test_set_moes_preset(hass: HomeAssistant, device_climate_moes) -> None:
|
||||
"""Test setting preset for moes trv."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_moes, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass)
|
||||
thrm_cluster = device_climate_moes.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1347,7 +1347,7 @@ async def test_set_moes_operation_mode(
|
|||
) -> None:
|
||||
"""Test setting preset for moes trv."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_moes, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_moes, hass)
|
||||
thrm_cluster = device_climate_moes.device.endpoints[1].thermostat
|
||||
|
||||
await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0})
|
||||
|
@ -1391,7 +1391,7 @@ async def test_set_zonnsmart_preset(
|
|||
) -> None:
|
||||
"""Test setting preset from homeassistant for zonnsmart trv."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass)
|
||||
thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
|
@ -1460,7 +1460,7 @@ async def test_set_zonnsmart_operation_mode(
|
|||
) -> None:
|
||||
"""Test setting preset from trv for zonnsmart trv."""
|
||||
|
||||
entity_id = await find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass)
|
||||
entity_id = find_entity_id(Platform.CLIMATE, device_climate_zonnsmart, hass)
|
||||
thrm_cluster = device_climate_zonnsmart.device.endpoints[1].thermostat
|
||||
|
||||
await send_attributes_report(hass, thrm_cluster, {"operation_preset": 0})
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""Test ZHA cover."""
|
||||
import asyncio
|
||||
from unittest.mock import AsyncMock, patch
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
import zigpy.profiles.zha
|
||||
|
@ -36,7 +36,7 @@ from .common import (
|
|||
)
|
||||
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE
|
||||
|
||||
from tests.common import async_capture_events, mock_coro, mock_restore_cache
|
||||
from tests.common import async_capture_events, mock_restore_cache
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
@ -132,7 +132,7 @@ async def test_cover(
|
|||
assert cluster.read_attributes.call_count == 1
|
||||
assert "current_position_lift_percentage" in cluster.read_attributes.call_args[0][0]
|
||||
|
||||
entity_id = await find_entity_id(Platform.COVER, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.COVER, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
|
@ -152,9 +152,7 @@ async def test_cover(
|
|||
assert hass.states.get(entity_id).state == STATE_OPEN
|
||||
|
||||
# close from UI
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x1, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
|
@ -165,9 +163,7 @@ async def test_cover(
|
|||
assert cluster.request.call_args[1]["expect_reply"] is True
|
||||
|
||||
# open from UI
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x0, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
|
@ -178,9 +174,7 @@ async def test_cover(
|
|||
assert cluster.request.call_args[1]["expect_reply"] is True
|
||||
|
||||
# set position UI
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x5, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
|
@ -195,9 +189,7 @@ async def test_cover(
|
|||
assert cluster.request.call_args[1]["expect_reply"] is True
|
||||
|
||||
# stop from UI
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([0x2, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x2, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_STOP_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
|
@ -223,7 +215,7 @@ async def test_shade(
|
|||
|
||||
cluster_on_off = zigpy_shade_device.endpoints.get(1).on_off
|
||||
cluster_level = zigpy_shade_device.endpoints.get(1).level
|
||||
entity_id = await find_entity_id(Platform.COVER, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.COVER, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
|
@ -244,17 +236,19 @@ async def test_shade(
|
|||
|
||||
# close from UI command fails
|
||||
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
assert cluster_on_off.request.call_count == 1
|
||||
with pytest.raises(asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_CLOSE_COVER,
|
||||
{"entity_id": entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_on_off.request.call_count == 3
|
||||
assert cluster_on_off.request.call_args[0][0] is False
|
||||
assert cluster_on_off.request.call_args[0][1] == 0x0000
|
||||
assert hass.states.get(entity_id).state == STATE_OPEN
|
||||
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", AsyncMock(return_value=[0x1, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x1, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_CLOSE_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
|
@ -267,18 +261,20 @@ async def test_shade(
|
|||
assert ATTR_CURRENT_POSITION not in hass.states.get(entity_id).attributes
|
||||
await send_attributes_report(hass, cluster_level, {0: 0})
|
||||
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
assert cluster_on_off.request.call_count == 1
|
||||
with pytest.raises(asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_OPEN_COVER,
|
||||
{"entity_id": entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_on_off.request.call_count == 3
|
||||
assert cluster_on_off.request.call_args[0][0] is False
|
||||
assert cluster_on_off.request.call_args[0][1] == 0x0001
|
||||
assert hass.states.get(entity_id).state == STATE_CLOSED
|
||||
|
||||
# open from UI succeeds
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", AsyncMock(return_value=[0x0, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x0, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
|
@ -289,22 +285,21 @@ async def test_shade(
|
|||
|
||||
# set position UI command fails
|
||||
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
{"entity_id": entity_id, "position": 47},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_level.request.call_count == 1
|
||||
with pytest.raises(asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
{"entity_id": entity_id, "position": 47},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_level.request.call_count == 3
|
||||
assert cluster_level.request.call_args[0][0] is False
|
||||
assert cluster_level.request.call_args[0][1] == 0x0004
|
||||
assert int(cluster_level.request.call_args[0][3] * 100 / 255) == 47
|
||||
assert hass.states.get(entity_id).attributes[ATTR_CURRENT_POSITION] == 0
|
||||
|
||||
# set position UI success
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", AsyncMock(return_value=[0x5, zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[0x5, zcl_f.Status.SUCCESS]):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_SET_COVER_POSITION,
|
||||
|
@ -331,13 +326,14 @@ async def test_shade(
|
|||
|
||||
# test cover stop
|
||||
with patch("zigpy.zcl.Cluster.request", side_effect=asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_STOP_COVER,
|
||||
{"entity_id": entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_level.request.call_count == 1
|
||||
with pytest.raises(asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_STOP_COVER,
|
||||
{"entity_id": entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_level.request.call_count == 3
|
||||
assert cluster_level.request.call_args[0][0] is False
|
||||
assert cluster_level.request.call_args[0][1] in (0x0003, 0x0007)
|
||||
|
||||
|
@ -361,7 +357,7 @@ async def test_restore_state(
|
|||
hass.state = CoreState.starting
|
||||
|
||||
zha_device = await zha_device_restored(zigpy_shade_device)
|
||||
entity_id = await find_entity_id(Platform.COVER, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.COVER, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
# test that the cover was created and that it is unavailable
|
||||
|
@ -379,7 +375,7 @@ async def test_keen_vent(
|
|||
|
||||
cluster_on_off = zigpy_keen_vent.endpoints.get(1).on_off
|
||||
cluster_level = zigpy_keen_vent.endpoints.get(1).level
|
||||
entity_id = await find_entity_id(Platform.COVER, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.COVER, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
|
@ -396,21 +392,25 @@ async def test_keen_vent(
|
|||
|
||||
# open from UI command fails
|
||||
p1 = patch.object(cluster_on_off, "request", side_effect=asyncio.TimeoutError)
|
||||
p2 = patch.object(cluster_level, "request", AsyncMock(return_value=[4, 0]))
|
||||
p2 = patch.object(cluster_level, "request", return_value=[4, 0])
|
||||
|
||||
with p1, p2:
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN, SERVICE_OPEN_COVER, {"entity_id": entity_id}, blocking=True
|
||||
)
|
||||
assert cluster_on_off.request.call_count == 1
|
||||
with pytest.raises(asyncio.TimeoutError):
|
||||
await hass.services.async_call(
|
||||
COVER_DOMAIN,
|
||||
SERVICE_OPEN_COVER,
|
||||
{"entity_id": entity_id},
|
||||
blocking=True,
|
||||
)
|
||||
assert cluster_on_off.request.call_count == 3
|
||||
assert cluster_on_off.request.call_args[0][0] is False
|
||||
assert cluster_on_off.request.call_args[0][1] == 0x0001
|
||||
assert cluster_level.request.call_count == 1
|
||||
assert hass.states.get(entity_id).state == STATE_CLOSED
|
||||
|
||||
# open from UI command success
|
||||
p1 = patch.object(cluster_on_off, "request", AsyncMock(return_value=[1, 0]))
|
||||
p2 = patch.object(cluster_level, "request", AsyncMock(return_value=[4, 0]))
|
||||
p1 = patch.object(cluster_on_off, "request", return_value=[1, 0])
|
||||
p2 = patch.object(cluster_level, "request", return_value=[4, 0])
|
||||
|
||||
with p1, p2:
|
||||
await hass.services.async_call(
|
||||
|
|
|
@ -328,7 +328,6 @@ async def test_action(hass: HomeAssistant, device_ias, device_inovelli) -> None:
|
|||
5,
|
||||
expect_reply=False,
|
||||
manufacturer=4151,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
in cluster.request.call_args_list
|
||||
|
@ -345,7 +344,6 @@ async def test_action(hass: HomeAssistant, device_ias, device_inovelli) -> None:
|
|||
5,
|
||||
expect_reply=False,
|
||||
manufacturer=4151,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
in cluster.request.call_args_list
|
||||
|
|
|
@ -70,7 +70,7 @@ async def test_device_tracker(
|
|||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device_dt)
|
||||
cluster = zigpy_device_dt.endpoints.get(1).power
|
||||
entity_id = await find_entity_id(Platform.DEVICE_TRACKER, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.DEVICE_TRACKER, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_NOT_HOME
|
||||
|
|
|
@ -15,20 +15,12 @@ import zigpy.zcl.clusters.general
|
|||
import zigpy.zcl.clusters.security
|
||||
import zigpy.zcl.foundation as zcl_f
|
||||
|
||||
import homeassistant.components.zha.binary_sensor
|
||||
import homeassistant.components.zha.core.cluster_handlers as cluster_handlers
|
||||
import homeassistant.components.zha.core.const as zha_const
|
||||
from homeassistant.components.zha.core.device import ZHADevice
|
||||
import homeassistant.components.zha.core.discovery as disc
|
||||
from homeassistant.components.zha.core.endpoint import Endpoint
|
||||
import homeassistant.components.zha.core.registries as zha_regs
|
||||
import homeassistant.components.zha.cover
|
||||
import homeassistant.components.zha.device_tracker
|
||||
import homeassistant.components.zha.fan
|
||||
import homeassistant.components.zha.light
|
||||
import homeassistant.components.zha.lock
|
||||
import homeassistant.components.zha.sensor
|
||||
import homeassistant.components.zha.switch
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
@ -131,7 +123,6 @@ async def test_devices(
|
|||
),
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
]
|
||||
|
@ -493,35 +484,3 @@ async def test_group_probe_cleanup_called(
|
|||
await config_entry.async_unload(hass_disable_services)
|
||||
await hass_disable_services.async_block_till_done()
|
||||
disc.GROUP_PROBE.cleanup.assert_called()
|
||||
|
||||
|
||||
@patch(
|
||||
"zigpy.zcl.clusters.general.Identify.request",
|
||||
new=AsyncMock(return_value=[mock.sentinel.data, zcl_f.Status.SUCCESS]),
|
||||
)
|
||||
@patch(
|
||||
"homeassistant.components.zha.entity.ZhaEntity.entity_registry_enabled_default",
|
||||
new=Mock(return_value=True),
|
||||
)
|
||||
async def test_cluster_handler_with_empty_ep_attribute_cluster(
|
||||
hass_disable_services,
|
||||
zigpy_device_mock,
|
||||
zha_device_joined_restored,
|
||||
) -> None:
|
||||
"""Test device discovery for cluster which does not have em_attribute."""
|
||||
entity_registry = homeassistant.helpers.entity_registry.async_get(
|
||||
hass_disable_services
|
||||
)
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
{1: {SIG_EP_INPUT: [0x042E], SIG_EP_OUTPUT: [], SIG_EP_TYPE: 0x1234}},
|
||||
"00:11:22:33:44:55:66:77",
|
||||
"test manufacturer",
|
||||
"test model",
|
||||
patch_cluster=False,
|
||||
)
|
||||
zha_dev = await zha_device_joined_restored(zigpy_device)
|
||||
ha_entity_id = entity_registry.async_get_entity_id(
|
||||
"sensor", "zha", f"{zha_dev.ieee}-1-1070"
|
||||
)
|
||||
assert ha_entity_id is not None
|
||||
|
|
|
@ -167,7 +167,7 @@ async def test_fan(
|
|||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
cluster = zigpy_device.endpoints.get(1).fan
|
||||
entity_id = await find_entity_id(Platform.FAN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
@ -475,7 +475,7 @@ async def test_fan_init(
|
|||
cluster.PLUGGED_ATTR_READS = plug_read
|
||||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.FAN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == expected_state
|
||||
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage
|
||||
|
@ -493,7 +493,7 @@ async def test_fan_update_entity(
|
|||
cluster.PLUGGED_ATTR_READS = {"fan_mode": 0}
|
||||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.FAN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0
|
||||
|
@ -562,7 +562,7 @@ async def test_fan_ikea(
|
|||
"""Test ZHA fan Ikea platform."""
|
||||
zha_device = await zha_device_joined_restored(zigpy_device_ikea)
|
||||
cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier
|
||||
entity_id = await find_entity_id(Platform.FAN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
@ -656,7 +656,7 @@ async def test_fan_ikea_init(
|
|||
cluster.PLUGGED_ATTR_READS = ikea_plug_read
|
||||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device_ikea)
|
||||
entity_id = await find_entity_id(Platform.FAN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == ikea_expected_state
|
||||
assert (
|
||||
|
@ -676,7 +676,7 @@ async def test_fan_ikea_update_entity(
|
|||
cluster.PLUGGED_ATTR_READS = {"fan_mode": 0}
|
||||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device_ikea)
|
||||
entity_id = await find_entity_id(Platform.FAN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0
|
||||
|
|
|
@ -264,7 +264,7 @@ async def test_light_refresh(
|
|||
on_off_cluster = zigpy_device.endpoints[1].on_off
|
||||
on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0}
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.LIGHT, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.LIGHT, zha_device, hass)
|
||||
|
||||
# allow traffic to flow through the gateway and device
|
||||
await async_enable_traffic(hass, [zha_device])
|
||||
|
@ -326,7 +326,7 @@ async def test_light(
|
|||
# create zigpy devices
|
||||
zigpy_device = zigpy_device_mock(device)
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.LIGHT, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.LIGHT, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
|
||||
|
@ -446,7 +446,7 @@ async def test_light_initialization(
|
|||
|
||||
with patch_zha_config("light", config_override):
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.LIGHT, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.LIGHT, zha_device, hass)
|
||||
|
||||
assert entity_id is not None
|
||||
|
||||
|
@ -495,9 +495,9 @@ async def test_transitions(
|
|||
assert member.group == zha_group
|
||||
assert member.endpoint is not None
|
||||
|
||||
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_2_entity_id = await find_entity_id(Platform.LIGHT, device_light_2, hass)
|
||||
eWeLink_light_entity_id = await find_entity_id(Platform.LIGHT, eWeLink_light, hass)
|
||||
device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass)
|
||||
eWeLink_light_entity_id = find_entity_id(Platform.LIGHT, eWeLink_light, hass)
|
||||
assert device_1_entity_id != device_2_entity_id
|
||||
|
||||
group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group)
|
||||
|
@ -553,7 +553,6 @@ async def test_transitions(
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -589,7 +588,6 @@ async def test_transitions(
|
|||
transition_time=35,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_color.request.call_args == call(
|
||||
|
@ -600,7 +598,6 @@ async def test_transitions(
|
|||
transition_time=35,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -637,7 +634,6 @@ async def test_transitions(
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -674,7 +670,6 @@ async def test_transitions(
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_color.request.call_args == call(
|
||||
|
@ -685,7 +680,6 @@ async def test_transitions(
|
|||
transition_time=0, # no transition when new_color_provided_while_off
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_level.request.call_args_list[1] == call(
|
||||
|
@ -696,7 +690,6 @@ async def test_transitions(
|
|||
transition_time=10,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -758,7 +751,6 @@ async def test_transitions(
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_color.request.call_args == call(
|
||||
|
@ -769,7 +761,6 @@ async def test_transitions(
|
|||
transition_time=0, # no transition when new_color_provided_while_off
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_level.request.call_args_list[1] == call(
|
||||
|
@ -780,7 +771,6 @@ async def test_transitions(
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -838,7 +828,6 @@ async def test_transitions(
|
|||
dev1_cluster_on_off.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -850,7 +839,6 @@ async def test_transitions(
|
|||
transition_time=0, # no transition when new_color_provided_while_off
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -910,7 +898,6 @@ async def test_transitions(
|
|||
transition_time=1, # transition time - sengled light uses default minimum
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -968,7 +955,6 @@ async def test_transitions(
|
|||
transition_time=1,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev2_cluster_color.request.call_args == call(
|
||||
|
@ -979,7 +965,6 @@ async def test_transitions(
|
|||
transition_time=1, # sengled transition == 1 when new_color_provided_while_off
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev2_cluster_level.request.call_args_list[1] == call(
|
||||
|
@ -990,7 +975,6 @@ async def test_transitions(
|
|||
transition_time=10,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1055,7 +1039,6 @@ async def test_transitions(
|
|||
transition_time=10, # sengled transition == 1 when new_color_provided_while_off
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert group_level_cluster_handler.request.call_args == call(
|
||||
|
@ -1066,7 +1049,6 @@ async def test_transitions(
|
|||
transition_time=10,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1121,7 +1103,6 @@ async def test_transitions(
|
|||
transition_time=20, # transition time
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1151,7 +1132,6 @@ async def test_transitions(
|
|||
transition_time=1, # transition time - sengled light uses default minimum
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1184,7 +1164,6 @@ async def test_transitions(
|
|||
eWeLink_cluster_on_off.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_color.request.call_args == call(
|
||||
|
@ -1195,7 +1174,6 @@ async def test_transitions(
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1222,7 +1200,7 @@ async def test_transitions(
|
|||
async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None:
|
||||
"""Test turning on the light and sending color commands before on/level commands for supporting lights."""
|
||||
|
||||
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off
|
||||
dev1_cluster_level = device_light_1.device.endpoints[1].level
|
||||
dev1_cluster_color = device_light_1.device.endpoints[1].light_color
|
||||
|
@ -1261,7 +1239,6 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None:
|
|||
dev1_cluster_on_off.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_color.request.call_args == call(
|
||||
|
@ -1272,7 +1249,6 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None:
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1319,7 +1295,6 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None:
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_color.request.call_args == call(
|
||||
|
@ -1330,7 +1305,6 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None:
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert dev1_cluster_level.request.call_args_list[1] == call(
|
||||
|
@ -1341,7 +1315,6 @@ async def test_on_with_off_color(hass: HomeAssistant, device_light_1) -> None:
|
|||
transition_time=0,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1388,7 +1361,6 @@ async def async_test_on_off_from_hass(hass, cluster, entity_id):
|
|||
cluster.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1411,7 +1383,6 @@ async def async_test_off_from_hass(hass, cluster, entity_id):
|
|||
cluster.commands_by_name["off"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1439,7 +1410,6 @@ async def async_test_level_on_off_from_hass(
|
|||
on_off_cluster.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
on_off_cluster.request.reset_mock()
|
||||
|
@ -1463,7 +1433,6 @@ async def async_test_level_on_off_from_hass(
|
|||
on_off_cluster.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert level_cluster.request.call_args == call(
|
||||
|
@ -1474,7 +1443,6 @@ async def async_test_level_on_off_from_hass(
|
|||
transition_time=100,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
on_off_cluster.request.reset_mock()
|
||||
|
@ -1499,7 +1467,6 @@ async def async_test_level_on_off_from_hass(
|
|||
transition_time=int(expected_default_transition),
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
on_off_cluster.request.reset_mock()
|
||||
|
@ -1542,7 +1509,6 @@ async def async_test_flash_from_hass(hass, cluster, entity_id, flash):
|
|||
effect_variant=general.Identify.EffectVariant.Default,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -1593,9 +1559,9 @@ async def test_zha_group_light_entity(
|
|||
assert member.group == zha_group
|
||||
assert member.endpoint is not None
|
||||
|
||||
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_2_entity_id = await find_entity_id(Platform.LIGHT, device_light_2, hass)
|
||||
device_3_entity_id = await find_entity_id(Platform.LIGHT, device_light_3, hass)
|
||||
device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass)
|
||||
device_3_entity_id = find_entity_id(Platform.LIGHT, device_light_3, hass)
|
||||
|
||||
assert (
|
||||
device_1_entity_id != device_2_entity_id
|
||||
|
@ -1833,8 +1799,8 @@ async def test_group_member_assume_state(
|
|||
assert member.group == zha_group
|
||||
assert member.endpoint is not None
|
||||
|
||||
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_2_entity_id = await find_entity_id(Platform.LIGHT, device_light_2, hass)
|
||||
device_1_entity_id = find_entity_id(Platform.LIGHT, device_light_1, hass)
|
||||
device_2_entity_id = find_entity_id(Platform.LIGHT, device_light_2, hass)
|
||||
|
||||
assert device_1_entity_id != device_2_entity_id
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@ from homeassistant.core import HomeAssistant
|
|||
from .common import async_enable_traffic, find_entity_id, send_attributes_report
|
||||
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE
|
||||
|
||||
from tests.common import mock_coro
|
||||
|
||||
LOCK_DOOR = 0
|
||||
UNLOCK_DOOR = 1
|
||||
SET_PIN_CODE = 5
|
||||
|
@ -64,7 +62,7 @@ async def test_lock(hass: HomeAssistant, lock) -> None:
|
|||
"""Test ZHA lock platform."""
|
||||
|
||||
zha_device, cluster = lock
|
||||
entity_id = await find_entity_id(Platform.LOCK, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.LOCK, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_UNLOCKED
|
||||
|
@ -107,9 +105,7 @@ async def test_lock(hass: HomeAssistant, lock) -> None:
|
|||
|
||||
async def async_lock(hass, cluster, entity_id):
|
||||
"""Test lock functionality from hass."""
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]):
|
||||
# lock via UI
|
||||
await hass.services.async_call(
|
||||
LOCK_DOMAIN, "lock", {"entity_id": entity_id}, blocking=True
|
||||
|
@ -121,9 +117,7 @@ async def async_lock(hass, cluster, entity_id):
|
|||
|
||||
async def async_unlock(hass, cluster, entity_id):
|
||||
"""Test lock functionality from hass."""
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]):
|
||||
# lock via UI
|
||||
await hass.services.async_call(
|
||||
LOCK_DOMAIN, "unlock", {"entity_id": entity_id}, blocking=True
|
||||
|
@ -135,9 +129,7 @@ async def async_unlock(hass, cluster, entity_id):
|
|||
|
||||
async def async_set_user_code(hass, cluster, entity_id):
|
||||
"""Test set lock code functionality from hass."""
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]):
|
||||
# set lock code via service call
|
||||
await hass.services.async_call(
|
||||
"zha",
|
||||
|
@ -158,9 +150,7 @@ async def async_set_user_code(hass, cluster, entity_id):
|
|||
|
||||
async def async_clear_user_code(hass, cluster, entity_id):
|
||||
"""Test clear lock code functionality from hass."""
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]):
|
||||
# set lock code via service call
|
||||
await hass.services.async_call(
|
||||
"zha",
|
||||
|
@ -179,9 +169,7 @@ async def async_clear_user_code(hass, cluster, entity_id):
|
|||
|
||||
async def async_enable_user_code(hass, cluster, entity_id):
|
||||
"""Test enable lock code functionality from hass."""
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]):
|
||||
# set lock code via service call
|
||||
await hass.services.async_call(
|
||||
"zha",
|
||||
|
@ -201,9 +189,7 @@ async def async_enable_user_code(hass, cluster, entity_id):
|
|||
|
||||
async def async_disable_user_code(hass, cluster, entity_id):
|
||||
"""Test disable lock code functionality from hass."""
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request", return_value=mock_coro([zcl_f.Status.SUCCESS])
|
||||
):
|
||||
with patch("zigpy.zcl.Cluster.request", return_value=[zcl_f.Status.SUCCESS]):
|
||||
# set lock code via service call
|
||||
await hass.services.async_call(
|
||||
"zha",
|
||||
|
|
|
@ -114,7 +114,7 @@ async def test_number(
|
|||
assert "engineering_units" in attr_reads
|
||||
assert "application_type" in attr_reads
|
||||
|
||||
entity_id = await find_entity_id(Platform.NUMBER, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.NUMBER, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
|
@ -211,7 +211,7 @@ async def test_level_control_number(
|
|||
}
|
||||
zha_device = await zha_device_joined(light)
|
||||
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.NUMBER,
|
||||
zha_device,
|
||||
hass,
|
||||
|
@ -344,7 +344,7 @@ async def test_color_number(
|
|||
}
|
||||
zha_device = await zha_device_joined(light)
|
||||
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.NUMBER,
|
||||
zha_device,
|
||||
hass,
|
||||
|
|
|
@ -127,7 +127,7 @@ async def test_select(hass: HomeAssistant, siren) -> None:
|
|||
entity_registry = er.async_get(hass)
|
||||
zha_device, cluster = siren
|
||||
assert cluster is not None
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.SELECT,
|
||||
zha_device,
|
||||
hass,
|
||||
|
@ -194,7 +194,7 @@ async def test_select_restore_state(
|
|||
zha_device = await zha_device_restored(zigpy_device)
|
||||
cluster = zigpy_device.endpoints[1].ias_wd
|
||||
assert cluster is not None
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.SELECT,
|
||||
zha_device,
|
||||
hass,
|
||||
|
@ -219,7 +219,7 @@ async def test_on_off_select_new_join(
|
|||
}
|
||||
zha_device = await zha_device_joined(light)
|
||||
select_name = "start_up_behavior"
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.SELECT,
|
||||
zha_device,
|
||||
hass,
|
||||
|
@ -304,7 +304,7 @@ async def test_on_off_select_restored(
|
|||
)
|
||||
|
||||
select_name = "start_up_behavior"
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.SELECT,
|
||||
zha_device,
|
||||
hass,
|
||||
|
@ -331,7 +331,7 @@ async def test_on_off_select_unsupported(
|
|||
on_off_cluster.add_unsupported_attribute("start_up_on_off")
|
||||
zha_device = await zha_device_joined_restored(light)
|
||||
select_name = general.OnOff.StartUpOnOff.__name__
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.SELECT,
|
||||
zha_device,
|
||||
hass,
|
||||
|
@ -400,7 +400,7 @@ async def test_on_off_select_attribute_report(
|
|||
|
||||
zha_device = await zha_device_restored(zigpy_device_aqara_sensor)
|
||||
cluster = zigpy_device_aqara_sensor.endpoints.get(1).opple_cluster
|
||||
entity_id = await find_entity_id(Platform.SELECT, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.SELECT, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
# allow traffic to flow through the gateway and device
|
||||
|
|
|
@ -569,7 +569,7 @@ async def test_temp_uom(
|
|||
)
|
||||
cluster = zigpy_device.endpoints[1].temperature
|
||||
zha_device = await zha_device_restored(zigpy_device)
|
||||
entity_id = await find_entity_id(Platform.SENSOR, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.SENSOR, zha_device, hass)
|
||||
|
||||
if not restore:
|
||||
await async_enable_traffic(hass, [zha_device], enabled=False)
|
||||
|
@ -613,7 +613,7 @@ async def test_electrical_measurement_init(
|
|||
)
|
||||
cluster = zigpy_device.endpoints[1].in_clusters[cluster_id]
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
entity_id = await find_entity_id(
|
||||
entity_id = find_entity_id(
|
||||
Platform.SENSOR, zha_device, hass, qualifier="active_power"
|
||||
)
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ async def test_siren(hass: HomeAssistant, siren) -> None:
|
|||
|
||||
zha_device, cluster = siren
|
||||
assert cluster is not None
|
||||
entity_id = await find_entity_id(Platform.SIREN, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.SIREN, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
|
|
@ -34,8 +34,6 @@ from .common import (
|
|||
)
|
||||
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_TYPE
|
||||
|
||||
from tests.common import mock_coro
|
||||
|
||||
ON = 1
|
||||
OFF = 0
|
||||
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
|
||||
|
@ -138,7 +136,7 @@ async def test_switch(
|
|||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device)
|
||||
cluster = zigpy_device.endpoints.get(1).on_off
|
||||
entity_id = await find_entity_id(Platform.SWITCH, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.SWITCH, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
@ -163,7 +161,7 @@ async def test_switch(
|
|||
# turn on from HA
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request",
|
||||
return_value=mock_coro([0x00, zcl_f.Status.SUCCESS]),
|
||||
return_value=[0x00, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
# turn on via UI
|
||||
await hass.services.async_call(
|
||||
|
@ -176,14 +174,13 @@ async def test_switch(
|
|||
cluster.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
# turn off from HA
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request",
|
||||
return_value=mock_coro([0x01, zcl_f.Status.SUCCESS]),
|
||||
return_value=[0x01, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
# turn off via UI
|
||||
await hass.services.async_call(
|
||||
|
@ -196,7 +193,6 @@ async def test_switch(
|
|||
cluster.commands_by_name["off"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
|
||||
|
@ -313,7 +309,7 @@ async def test_zha_group_switch_entity(
|
|||
# turn on from HA
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request",
|
||||
return_value=mock_coro([0x00, zcl_f.Status.SUCCESS]),
|
||||
return_value=[0x00, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
# turn on via UI
|
||||
await hass.services.async_call(
|
||||
|
@ -326,7 +322,6 @@ async def test_zha_group_switch_entity(
|
|||
group_cluster_on_off.commands_by_name["on"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert hass.states.get(entity_id).state == STATE_ON
|
||||
|
@ -334,7 +329,7 @@ async def test_zha_group_switch_entity(
|
|||
# turn off from HA
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.request",
|
||||
return_value=mock_coro([0x01, zcl_f.Status.SUCCESS]),
|
||||
return_value=[0x01, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
# turn off via UI
|
||||
await hass.services.async_call(
|
||||
|
@ -347,7 +342,6 @@ async def test_zha_group_switch_entity(
|
|||
group_cluster_on_off.commands_by_name["off"].schema,
|
||||
expect_reply=True,
|
||||
manufacturer=None,
|
||||
tries=1,
|
||||
tsn=None,
|
||||
)
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
@ -386,7 +380,7 @@ async def test_switch_configurable(
|
|||
|
||||
zha_device = await zha_device_joined_restored(zigpy_device_tuya)
|
||||
cluster = zigpy_device_tuya.endpoints.get(1).tuya_manufacturer
|
||||
entity_id = await find_entity_id(Platform.SWITCH, zha_device, hass)
|
||||
entity_id = find_entity_id(Platform.SWITCH, zha_device, hass)
|
||||
assert entity_id is not None
|
||||
|
||||
assert hass.states.get(entity_id).state == STATE_OFF
|
||||
|
@ -411,7 +405,7 @@ async def test_switch_configurable(
|
|||
# turn on from HA
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.write_attributes",
|
||||
return_value=mock_coro([zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS]),
|
||||
return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
# turn on via UI
|
||||
await hass.services.async_call(
|
||||
|
@ -425,7 +419,7 @@ async def test_switch_configurable(
|
|||
# turn off from HA
|
||||
with patch(
|
||||
"zigpy.zcl.Cluster.write_attributes",
|
||||
return_value=mock_coro([zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS]),
|
||||
return_value=[zcl_f.Status.SUCCESS, zcl_f.Status.SUCCESS],
|
||||
):
|
||||
# turn off via UI
|
||||
await hass.services.async_call(
|
||||
|
|
Loading…
Reference in New Issue