2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan."""
|
2024-03-08 13:44:56 +00:00
|
|
|
|
2021-01-01 21:31:56 +00:00
|
|
|
from unittest.mock import AsyncMock, call, patch
|
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
import pytest
|
2022-06-29 15:44:40 +00:00
|
|
|
import zhaquirks.ikea.starkvind
|
2023-08-28 21:24:12 +00:00
|
|
|
from zigpy.device import Device
|
2021-02-17 18:03:11 +00:00
|
|
|
from zigpy.exceptions import ZigbeeException
|
2022-06-29 15:44:40 +00:00
|
|
|
from zigpy.profiles import zha
|
|
|
|
from zigpy.zcl.clusters import general, hvac
|
2020-12-05 23:24:49 +00:00
|
|
|
import zigpy.zcl.foundation as zcl_f
|
2019-10-21 17:14:17 +00:00
|
|
|
|
2020-03-27 02:19:48 +00:00
|
|
|
from homeassistant.components.fan import (
|
2021-02-17 18:03:11 +00:00
|
|
|
ATTR_PERCENTAGE,
|
2021-02-20 05:42:14 +00:00
|
|
|
ATTR_PERCENTAGE_STEP,
|
2021-02-17 18:03:11 +00:00
|
|
|
ATTR_PRESET_MODE,
|
2022-02-03 13:16:35 +00:00
|
|
|
DOMAIN as FAN_DOMAIN,
|
2022-03-09 09:38:12 +00:00
|
|
|
SERVICE_SET_PERCENTAGE,
|
2021-02-17 18:03:11 +00:00
|
|
|
SERVICE_SET_PRESET_MODE,
|
2021-02-20 05:42:14 +00:00
|
|
|
NotValidPresetModeError,
|
2020-03-27 02:19:48 +00:00
|
|
|
)
|
2023-08-28 21:24:12 +00:00
|
|
|
from homeassistant.components.zha.core.device import ZHADevice
|
2020-03-27 02:19:48 +00:00
|
|
|
from homeassistant.components.zha.core.discovery import GROUP_PROBE
|
2020-12-05 23:24:49 +00:00
|
|
|
from homeassistant.components.zha.core.group import GroupMember
|
2023-09-11 19:39:33 +00:00
|
|
|
from homeassistant.components.zha.core.helpers import get_zha_gateway
|
2021-02-17 18:03:11 +00:00
|
|
|
from homeassistant.components.zha.fan import (
|
|
|
|
PRESET_MODE_AUTO,
|
|
|
|
PRESET_MODE_ON,
|
|
|
|
PRESET_MODE_SMART,
|
|
|
|
)
|
2019-10-21 17:14:17 +00:00
|
|
|
from homeassistant.const import (
|
|
|
|
ATTR_ENTITY_ID,
|
|
|
|
SERVICE_TURN_OFF,
|
|
|
|
SERVICE_TURN_ON,
|
|
|
|
STATE_OFF,
|
|
|
|
STATE_ON,
|
|
|
|
STATE_UNAVAILABLE,
|
2021-12-11 16:06:39 +00:00
|
|
|
Platform,
|
2019-10-21 17:14:17 +00:00
|
|
|
)
|
2023-02-17 17:54:26 +00:00
|
|
|
from homeassistant.core import HomeAssistant
|
2023-08-28 21:24:12 +00:00
|
|
|
from homeassistant.exceptions import HomeAssistantError
|
2020-12-05 23:24:49 +00:00
|
|
|
from homeassistant.setup import async_setup_component
|
2019-10-21 17:14:17 +00:00
|
|
|
|
2019-02-04 11:51:32 +00:00
|
|
|
from .common import (
|
2019-10-21 17:14:17 +00:00
|
|
|
async_enable_traffic,
|
2020-03-27 02:19:48 +00:00
|
|
|
async_find_group_entity_id,
|
2020-02-12 21:12:14 +00:00
|
|
|
async_test_rejoin,
|
2022-09-19 07:51:31 +00:00
|
|
|
async_wait_for_updates,
|
2019-10-31 16:31:06 +00:00
|
|
|
find_entity_id,
|
2020-03-13 23:17:50 +00:00
|
|
|
send_attributes_report,
|
2019-02-04 11:51:32 +00:00
|
|
|
)
|
2021-09-06 23:00:06 +00:00
|
|
|
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE
|
2019-02-04 11:51:32 +00:00
|
|
|
|
2020-03-27 02:19:48 +00:00
|
|
|
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
|
|
|
|
IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e8"
|
|
|
|
|
2019-02-04 11:51:32 +00:00
|
|
|
|
2022-06-17 16:41:10 +00:00
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def fan_platform_only():
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Only set up the fan and required base platforms to speed up tests."""
|
2022-06-17 16:41:10 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.zha.PLATFORMS",
|
|
|
|
(
|
2022-06-29 15:44:40 +00:00
|
|
|
Platform.BUTTON,
|
|
|
|
Platform.BINARY_SENSOR,
|
2022-06-17 16:41:10 +00:00
|
|
|
Platform.FAN,
|
|
|
|
Platform.LIGHT,
|
|
|
|
Platform.DEVICE_TRACKER,
|
|
|
|
Platform.NUMBER,
|
2022-06-29 15:44:40 +00:00
|
|
|
Platform.SENSOR,
|
2022-06-17 16:41:10 +00:00
|
|
|
Platform.SELECT,
|
2022-06-29 15:44:40 +00:00
|
|
|
Platform.SWITCH,
|
2022-06-17 16:41:10 +00:00
|
|
|
),
|
|
|
|
):
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
@pytest.fixture
|
|
|
|
def zigpy_device(zigpy_device_mock):
|
2023-10-25 03:56:08 +00:00
|
|
|
"""Fan zigpy device."""
|
2020-02-10 02:45:35 +00:00
|
|
|
endpoints = {
|
2020-09-29 19:25:05 +00:00
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [hvac.Fan.cluster_id],
|
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-09-29 19:25:05 +00:00
|
|
|
}
|
2020-02-10 02:45:35 +00:00
|
|
|
}
|
2020-12-05 23:24:49 +00:00
|
|
|
return zigpy_device_mock(
|
|
|
|
endpoints, node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00"
|
|
|
|
)
|
2019-02-04 11:51:32 +00:00
|
|
|
|
|
|
|
|
2020-03-27 02:19:48 +00:00
|
|
|
@pytest.fixture
|
|
|
|
async def coordinator(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [general.Groups.cluster_id],
|
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-03-27 02:19:48 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
ieee="00:15:8d:00:02:32:4f:32",
|
|
|
|
nwk=0x0000,
|
2020-05-04 19:19:53 +00:00
|
|
|
node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
2020-03-27 02:19:48 +00:00
|
|
|
)
|
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-03-27 02:19:48 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
async def device_fan_1(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [
|
2020-12-05 23:24:49 +00:00
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.OnOff.cluster_id,
|
|
|
|
hvac.Fan.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-12-05 23:24:49 +00:00
|
|
|
},
|
2020-03-27 02:19:48 +00:00
|
|
|
},
|
|
|
|
ieee=IEEE_GROUPABLE_DEVICE,
|
|
|
|
)
|
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-12-05 23:24:49 +00:00
|
|
|
await hass.async_block_till_done()
|
2020-03-27 02:19:48 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
async def device_fan_2(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [
|
2020-12-05 23:24:49 +00:00
|
|
|
general.Groups.cluster_id,
|
2020-03-27 02:19:48 +00:00
|
|
|
general.OnOff.cluster_id,
|
|
|
|
hvac.Fan.cluster_id,
|
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-12-05 23:24:49 +00:00
|
|
|
},
|
2020-03-27 02:19:48 +00:00
|
|
|
},
|
|
|
|
ieee=IEEE_GROUPABLE_DEVICE2,
|
|
|
|
)
|
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-12-05 23:24:49 +00:00
|
|
|
await hass.async_block_till_done()
|
2020-03-27 02:19:48 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
2023-02-17 17:54:26 +00:00
|
|
|
async def test_fan(
|
|
|
|
hass: HomeAssistant, zha_device_joined_restored, zigpy_device
|
|
|
|
) -> None:
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2019-02-04 11:51:32 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device)
|
2019-02-04 11:51:32 +00:00
|
|
|
cluster = zigpy_device.endpoints.get(1).fan
|
2023-06-15 01:42:31 +00:00
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
2019-10-31 16:31:06 +00:00
|
|
|
assert entity_id is not None
|
2019-02-04 11:51:32 +00:00
|
|
|
|
2020-06-11 21:21:08 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
await async_enable_traffic(hass, [zha_device], enabled=False)
|
2019-02-07 08:14:19 +00:00
|
|
|
# test that the fan was created and that it is unavailable
|
|
|
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
2020-02-12 21:12:14 +00:00
|
|
|
await async_enable_traffic(hass, [zha_device])
|
2019-02-07 08:14:19 +00:00
|
|
|
|
|
|
|
# test that the state has changed from unavailable to off
|
2019-02-04 11:51:32 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on at fan
|
2020-03-13 23:17:50 +00:00
|
|
|
await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3})
|
2019-02-04 11:51:32 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
# turn off at fan
|
2020-03-13 23:17:50 +00:00
|
|
|
await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2})
|
2019-02-04 11:51:32 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on from HA
|
2020-02-12 21:12:14 +00:00
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_on(hass, entity_id)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 2}, manufacturer=None)
|
|
|
|
]
|
2019-02-04 11:51:32 +00:00
|
|
|
|
|
|
|
# turn off from HA
|
2020-02-12 21:12:14 +00:00
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_off(hass, entity_id)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 0}, manufacturer=None)
|
|
|
|
]
|
2019-02-04 11:51:32 +00:00
|
|
|
|
|
|
|
# change speed from HA
|
2020-02-12 21:12:14 +00:00
|
|
|
cluster.write_attributes.reset_mock()
|
2022-03-09 09:38:12 +00:00
|
|
|
await async_set_percentage(hass, entity_id, percentage=100)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 3}, manufacturer=None)
|
|
|
|
]
|
2019-02-04 11:51:32 +00:00
|
|
|
|
2021-02-17 18:03:11 +00:00
|
|
|
# change preset_mode from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_ON)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 4}, manufacturer=None)
|
|
|
|
]
|
2021-02-17 18:03:11 +00:00
|
|
|
|
2021-02-20 05:42:14 +00:00
|
|
|
# set invalid preset_mode from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
2023-11-29 12:56:51 +00:00
|
|
|
with pytest.raises(NotValidPresetModeError) as exc:
|
2021-02-20 05:42:14 +00:00
|
|
|
await async_set_preset_mode(
|
|
|
|
hass, entity_id, preset_mode="invalid does not exist"
|
|
|
|
)
|
2023-11-29 12:56:51 +00:00
|
|
|
assert exc.value.translation_key == "not_valid_preset_mode"
|
2021-02-20 05:42:14 +00:00
|
|
|
assert len(cluster.write_attributes.mock_calls) == 0
|
|
|
|
|
2019-02-04 11:51:32 +00:00
|
|
|
# test adding new fan to the network and HA
|
2020-02-12 21:12:14 +00:00
|
|
|
await async_test_rejoin(hass, zigpy_device, [cluster], (1,))
|
2019-02-04 11:51:32 +00:00
|
|
|
|
|
|
|
|
2022-03-09 09:38:12 +00:00
|
|
|
async def async_turn_on(hass, entity_id, percentage=None):
|
2019-02-04 11:51:32 +00:00
|
|
|
"""Turn fan on."""
|
|
|
|
data = {
|
2019-07-31 19:25:30 +00:00
|
|
|
key: value
|
2024-06-12 13:50:27 +00:00
|
|
|
for key, value in ((ATTR_ENTITY_ID, entity_id), (ATTR_PERCENTAGE, percentage))
|
2019-07-31 19:25:30 +00:00
|
|
|
if value is not None
|
2019-02-04 11:51:32 +00:00
|
|
|
}
|
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
await hass.services.async_call(Platform.FAN, SERVICE_TURN_ON, data, blocking=True)
|
2019-02-04 11:51:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def async_turn_off(hass, entity_id):
|
|
|
|
"""Turn fan off."""
|
|
|
|
data = {ATTR_ENTITY_ID: entity_id} if entity_id else {}
|
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
await hass.services.async_call(Platform.FAN, SERVICE_TURN_OFF, data, blocking=True)
|
2019-02-04 11:51:32 +00:00
|
|
|
|
|
|
|
|
2022-03-09 09:38:12 +00:00
|
|
|
async def async_set_percentage(hass, entity_id, percentage=None):
|
|
|
|
"""Set percentage for specified fan."""
|
2019-02-04 11:51:32 +00:00
|
|
|
data = {
|
2019-07-31 19:25:30 +00:00
|
|
|
key: value
|
2024-06-12 13:50:27 +00:00
|
|
|
for key, value in ((ATTR_ENTITY_ID, entity_id), (ATTR_PERCENTAGE, percentage))
|
2019-07-31 19:25:30 +00:00
|
|
|
if value is not None
|
2019-02-04 11:51:32 +00:00
|
|
|
}
|
|
|
|
|
2022-03-09 09:38:12 +00:00
|
|
|
await hass.services.async_call(
|
|
|
|
Platform.FAN, SERVICE_SET_PERCENTAGE, data, blocking=True
|
|
|
|
)
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
|
2021-02-17 18:03:11 +00:00
|
|
|
async def async_set_preset_mode(hass, entity_id, preset_mode=None):
|
|
|
|
"""Set preset_mode for specified fan."""
|
|
|
|
data = {
|
|
|
|
key: value
|
2024-06-12 13:50:27 +00:00
|
|
|
for key, value in ((ATTR_ENTITY_ID, entity_id), (ATTR_PRESET_MODE, preset_mode))
|
2021-02-17 18:03:11 +00:00
|
|
|
if value is not None
|
|
|
|
}
|
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
FAN_DOMAIN, SERVICE_SET_PRESET_MODE, data, blocking=True
|
2021-12-11 16:06:39 +00:00
|
|
|
)
|
2021-02-17 18:03:11 +00:00
|
|
|
|
|
|
|
|
2020-12-05 23:24:49 +00:00
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.hvac.Fan.write_attributes",
|
|
|
|
new=AsyncMock(return_value=zcl_f.WriteAttributesResponse.deserialize(b"\x00")[0]),
|
|
|
|
)
|
2021-03-16 21:38:16 +00:00
|
|
|
@patch(
|
Implement "group members assume state" option for ZHA (#84938)
* Initial "group members assume state" implementation for ZHA
* Remove left-over debug flag (where polling was disabled)
* Implement _send_member_assume_state_event() method and also use after turn_off
* Only assume updated arguments from service call to group
* Make code more readable and change checks slightly
* Move "send member assume state" events to LightGroup on/off calls
* Include new config option in tests
* Check that member is available before updating to assumed state
* Lower "update group from child delay" for debouncer to basically 0 when using assumed member state
* Allow "child to group" updates regardless of config option
This is not needed, as group members will not update their state, as long as they're transitioning. (If a group transitions, it also sets its members to transitioning mode)
This fixes multiple issues. Previously, the state of a group was completely wrong when:
- turn on group with 10 second transition
- turn on members individually
- turn off members individually
- group state would not update correctly
* Move "default update group from child delay" constant
* Change to new constant name in test
* Also update fan test to new constant name
* Decrease "update group from child delay" to 10ms
In my testing, 0.0 also works without any issues and correctly de-bounces child updates when using the "assume state option".
This is just for avoiding multiple state changes when changing the group -> children issue individual updates.
With 2 children in a group and delay 0, both child updates only cause one group re-calculation and state change.
0.01 (10ms) should be plenty for very slow systems to de-bounce the update (and in the worst case, it'll cause just another state change but nothing breaks)
* Also implement "assuming state" for effect
Not sure if anybody even uses this, but this one is a bit special because the effect is always deactivated if it's not provided in the light.turn_on call.
* Move shortened delay for "assuming members" to a constant
* Add basic test to verify that group members assume on/off state
* Move _assume_group_state function declaration out of async_added_to_hass
* Fix rare edge-case when rapidly toggling lights and light groups at the same time
This prevents an issue where either the group transition would unset the transition flag or the single light would unset the group transition status midst-transition.
Note: When a new individual transition is started, we want to unset the group flag, as we actually cancel that transition.
* Check that effect list exists, add return type
* Re-trigger CI due to timeout
* Increase ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY slightly
The debouncer is used when updating group member states either by assuming them (in which case we want to barely have any delay), or between the time we get the results back from polling (where we want a slightly longer time).
As it's not easily possible to distinguish if a group member was updated via assuming the state of the group or by the polling that follows, 50 ms seems to be a good middle point.
* Add debug print for when updating group state
* Fix issues with "off brightness" when switching between group/members
This fixes a bunch of issues with "off brightness" and passes it down to the members correctly.
For example, if a light group is turned off with a transition (so bulbs get their level set to 1), this will also set the "off brightness" of all individual bulbs to the last level that they were at.
(It really fixes a lot of issues when using the "member assume group state" option. It's not really possible to fix them without that.)
Furthermore, issues where polling was previously needed to get the correct state after "playing with transitions", should now get be resolved and get correct state when using the "members assume group state" option.
Note: The only case which still can't be fixed is the following:
If individual lights have off_with_transition set, but not the group, and the group is then turned on without a level, individual lights might fall back to brightness level 1 (<- at least now shows correctly in UI even before polling).
Since all lights might need different brightness levels to be turned on, we can't use one group call. But making individual calls when turning on a ZHA group would cause a lot of traffic and thereby be counter-productive.
In this case, light.turn_on should just be called with a level (or individual calls to the lights should be made).
Another thing that was changed is to reset off_with_transition/off_brightness for a LightGroup when a member is turned on (even if the LightGroup wasn't turned on using its turn_on method).
off_with_transition/off_brightness for individual bulbs is now also turned off when a light is detected to be on during polling.
Lastly, the waiting for polled attributes could previously cause "invalid state" to be set (so mid-transition levels).
This could happen when group and members are repeatedly toggled at similar times. These "invalid states" could cause wrong "off brightness" levels if transitions are also used.
To fix this, we check after waiting for the polled attributes in async_get_state to see if a transition has started in the meanwhile. If so, the values can be discarded. A new poll will happen later and if using the "members assume group state" config option, the values should already be correct before the polling.
* Enable "group members assume state" config option by default
The config tests are also updated to expect the config option be enabled by default.
For all tests, the config option is generally disabled though:
There are only two group related tests. The one that tests this new feature overrides the config option to be enabled anyway.
The other tests works in a similar way but also "sends" attribute reports, so we want to disable the feature for that test.
(It would also run with it enabled (if the correct CHILD_UPDATE value is patched), but then it would test the same stuff as the other test, hence we're disabling the config option for that test.)
2023-01-16 15:48:18 +00:00
|
|
|
"homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY",
|
2021-03-16 21:38:16 +00:00
|
|
|
new=0,
|
|
|
|
)
|
2023-02-17 17:54:26 +00:00
|
|
|
async def test_zha_group_fan_entity(
|
|
|
|
hass: HomeAssistant, device_fan_1, device_fan_2, coordinator
|
|
|
|
) -> None:
|
2020-03-27 02:19:48 +00:00
|
|
|
"""Test the fan entity for a ZHA group."""
|
|
|
|
zha_gateway = get_zha_gateway(hass)
|
|
|
|
assert zha_gateway is not None
|
|
|
|
zha_gateway.coordinator_zha_device = coordinator
|
|
|
|
coordinator._zha_gateway = zha_gateway
|
|
|
|
device_fan_1._zha_gateway = zha_gateway
|
|
|
|
device_fan_2._zha_gateway = zha_gateway
|
|
|
|
member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee]
|
2020-12-05 23:24:49 +00:00
|
|
|
members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)]
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
# test creating a group with 2 members
|
2020-12-05 23:24:49 +00:00
|
|
|
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
|
2020-03-27 02:19:48 +00:00
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
assert zha_group is not None
|
|
|
|
assert len(zha_group.members) == 2
|
|
|
|
for member in zha_group.members:
|
2020-12-05 23:24:49 +00:00
|
|
|
assert member.device.ieee in member_ieee_addresses
|
|
|
|
assert member.group == zha_group
|
|
|
|
assert member.endpoint is not None
|
2020-03-27 02:19:48 +00:00
|
|
|
|
2020-12-05 23:24:49 +00:00
|
|
|
entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group)
|
2020-03-27 02:19:48 +00:00
|
|
|
assert len(entity_domains) == 2
|
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
assert Platform.LIGHT in entity_domains
|
|
|
|
assert Platform.FAN in entity_domains
|
2020-03-27 02:19:48 +00:00
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group)
|
2020-03-27 02:19:48 +00:00
|
|
|
assert hass.states.get(entity_id) is not None
|
|
|
|
|
|
|
|
group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id]
|
|
|
|
|
2020-12-05 23:24:49 +00:00
|
|
|
dev1_fan_cluster = device_fan_1.device.endpoints[1].fan
|
|
|
|
dev2_fan_cluster = device_fan_2.device.endpoints[1].fan
|
|
|
|
|
|
|
|
await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False)
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-12-05 23:24:49 +00:00
|
|
|
# test that the fans were created and that they are unavailable
|
2020-03-27 02:19:48 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
2020-12-05 23:24:49 +00:00
|
|
|
await async_enable_traffic(hass, [device_fan_1, device_fan_2])
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-27 02:19:48 +00:00
|
|
|
# test that the fan group entity was created and is off
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_on(hass, entity_id)
|
2020-12-05 23:24:49 +00:00
|
|
|
await hass.async_block_till_done()
|
2020-03-27 02:19:48 +00:00
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
2020-12-05 23:24:49 +00:00
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2}
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
# turn off from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_off(hass, entity_id)
|
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
2020-12-05 23:24:49 +00:00
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 0}
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
# change speed from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
2022-03-09 09:38:12 +00:00
|
|
|
await async_set_percentage(hass, entity_id, percentage=100)
|
2020-03-27 02:19:48 +00:00
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
2020-12-05 23:24:49 +00:00
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 3}
|
2020-03-27 02:19:48 +00:00
|
|
|
|
2021-02-17 18:03:11 +00:00
|
|
|
# change preset mode from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_ON)
|
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 4}
|
|
|
|
|
|
|
|
# change preset mode from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO)
|
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 5}
|
|
|
|
|
|
|
|
# change preset mode from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART)
|
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 6}
|
|
|
|
|
2020-03-27 02:19:48 +00:00
|
|
|
# test some of the group logic to make sure we key off states correctly
|
2020-12-05 23:24:49 +00:00
|
|
|
await send_attributes_report(hass, dev1_fan_cluster, {0: 0})
|
|
|
|
await send_attributes_report(hass, dev2_fan_cluster, {0: 0})
|
2024-02-23 17:28:23 +00:00
|
|
|
await hass.async_block_till_done()
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
# test that group fan is off
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
2020-12-05 23:24:49 +00:00
|
|
|
await send_attributes_report(hass, dev2_fan_cluster, {0: 2})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
# test that group fan is speed medium
|
2020-12-05 23:24:49 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
2020-03-27 02:19:48 +00:00
|
|
|
|
2020-12-05 23:24:49 +00:00
|
|
|
await send_attributes_report(hass, dev2_fan_cluster, {0: 0})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-27 02:19:48 +00:00
|
|
|
|
|
|
|
# test that group fan is now off
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
2020-12-05 23:24:49 +00:00
|
|
|
|
|
|
|
|
2021-02-17 18:03:11 +00:00
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.hvac.Fan.write_attributes",
|
|
|
|
new=AsyncMock(side_effect=ZigbeeException),
|
|
|
|
)
|
2021-03-16 21:38:16 +00:00
|
|
|
@patch(
|
Implement "group members assume state" option for ZHA (#84938)
* Initial "group members assume state" implementation for ZHA
* Remove left-over debug flag (where polling was disabled)
* Implement _send_member_assume_state_event() method and also use after turn_off
* Only assume updated arguments from service call to group
* Make code more readable and change checks slightly
* Move "send member assume state" events to LightGroup on/off calls
* Include new config option in tests
* Check that member is available before updating to assumed state
* Lower "update group from child delay" for debouncer to basically 0 when using assumed member state
* Allow "child to group" updates regardless of config option
This is not needed, as group members will not update their state, as long as they're transitioning. (If a group transitions, it also sets its members to transitioning mode)
This fixes multiple issues. Previously, the state of a group was completely wrong when:
- turn on group with 10 second transition
- turn on members individually
- turn off members individually
- group state would not update correctly
* Move "default update group from child delay" constant
* Change to new constant name in test
* Also update fan test to new constant name
* Decrease "update group from child delay" to 10ms
In my testing, 0.0 also works without any issues and correctly de-bounces child updates when using the "assume state option".
This is just for avoiding multiple state changes when changing the group -> children issue individual updates.
With 2 children in a group and delay 0, both child updates only cause one group re-calculation and state change.
0.01 (10ms) should be plenty for very slow systems to de-bounce the update (and in the worst case, it'll cause just another state change but nothing breaks)
* Also implement "assuming state" for effect
Not sure if anybody even uses this, but this one is a bit special because the effect is always deactivated if it's not provided in the light.turn_on call.
* Move shortened delay for "assuming members" to a constant
* Add basic test to verify that group members assume on/off state
* Move _assume_group_state function declaration out of async_added_to_hass
* Fix rare edge-case when rapidly toggling lights and light groups at the same time
This prevents an issue where either the group transition would unset the transition flag or the single light would unset the group transition status midst-transition.
Note: When a new individual transition is started, we want to unset the group flag, as we actually cancel that transition.
* Check that effect list exists, add return type
* Re-trigger CI due to timeout
* Increase ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY slightly
The debouncer is used when updating group member states either by assuming them (in which case we want to barely have any delay), or between the time we get the results back from polling (where we want a slightly longer time).
As it's not easily possible to distinguish if a group member was updated via assuming the state of the group or by the polling that follows, 50 ms seems to be a good middle point.
* Add debug print for when updating group state
* Fix issues with "off brightness" when switching between group/members
This fixes a bunch of issues with "off brightness" and passes it down to the members correctly.
For example, if a light group is turned off with a transition (so bulbs get their level set to 1), this will also set the "off brightness" of all individual bulbs to the last level that they were at.
(It really fixes a lot of issues when using the "member assume group state" option. It's not really possible to fix them without that.)
Furthermore, issues where polling was previously needed to get the correct state after "playing with transitions", should now get be resolved and get correct state when using the "members assume group state" option.
Note: The only case which still can't be fixed is the following:
If individual lights have off_with_transition set, but not the group, and the group is then turned on without a level, individual lights might fall back to brightness level 1 (<- at least now shows correctly in UI even before polling).
Since all lights might need different brightness levels to be turned on, we can't use one group call. But making individual calls when turning on a ZHA group would cause a lot of traffic and thereby be counter-productive.
In this case, light.turn_on should just be called with a level (or individual calls to the lights should be made).
Another thing that was changed is to reset off_with_transition/off_brightness for a LightGroup when a member is turned on (even if the LightGroup wasn't turned on using its turn_on method).
off_with_transition/off_brightness for individual bulbs is now also turned off when a light is detected to be on during polling.
Lastly, the waiting for polled attributes could previously cause "invalid state" to be set (so mid-transition levels).
This could happen when group and members are repeatedly toggled at similar times. These "invalid states" could cause wrong "off brightness" levels if transitions are also used.
To fix this, we check after waiting for the polled attributes in async_get_state to see if a transition has started in the meanwhile. If so, the values can be discarded. A new poll will happen later and if using the "members assume group state" config option, the values should already be correct before the polling.
* Enable "group members assume state" config option by default
The config tests are also updated to expect the config option be enabled by default.
For all tests, the config option is generally disabled though:
There are only two group related tests. The one that tests this new feature overrides the config option to be enabled anyway.
The other tests works in a similar way but also "sends" attribute reports, so we want to disable the feature for that test.
(It would also run with it enabled (if the correct CHILD_UPDATE value is patched), but then it would test the same stuff as the other test, hence we're disabling the config option for that test.)
2023-01-16 15:48:18 +00:00
|
|
|
"homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY",
|
2021-03-16 21:38:16 +00:00
|
|
|
new=0,
|
|
|
|
)
|
2021-02-17 18:03:11 +00:00
|
|
|
async def test_zha_group_fan_entity_failure_state(
|
2023-02-17 17:54:26 +00:00
|
|
|
hass: HomeAssistant,
|
|
|
|
device_fan_1,
|
|
|
|
device_fan_2,
|
|
|
|
coordinator,
|
|
|
|
caplog: pytest.LogCaptureFixture,
|
|
|
|
) -> None:
|
2021-02-17 18:03:11 +00:00
|
|
|
"""Test the fan entity for a ZHA group when writing attributes generates an exception."""
|
|
|
|
zha_gateway = get_zha_gateway(hass)
|
|
|
|
assert zha_gateway is not None
|
|
|
|
zha_gateway.coordinator_zha_device = coordinator
|
|
|
|
coordinator._zha_gateway = zha_gateway
|
|
|
|
device_fan_1._zha_gateway = zha_gateway
|
|
|
|
device_fan_2._zha_gateway = zha_gateway
|
|
|
|
member_ieee_addresses = [device_fan_1.ieee, device_fan_2.ieee]
|
|
|
|
members = [GroupMember(device_fan_1.ieee, 1), GroupMember(device_fan_2.ieee, 1)]
|
|
|
|
|
|
|
|
# test creating a group with 2 members
|
|
|
|
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
assert zha_group is not None
|
|
|
|
assert len(zha_group.members) == 2
|
|
|
|
for member in zha_group.members:
|
|
|
|
assert member.device.ieee in member_ieee_addresses
|
|
|
|
assert member.group == zha_group
|
|
|
|
assert member.endpoint is not None
|
|
|
|
|
|
|
|
entity_domains = GROUP_PROBE.determine_entity_domains(hass, zha_group)
|
|
|
|
assert len(entity_domains) == 2
|
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
assert Platform.LIGHT in entity_domains
|
|
|
|
assert Platform.FAN in entity_domains
|
2021-02-17 18:03:11 +00:00
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
entity_id = async_find_group_entity_id(hass, Platform.FAN, zha_group)
|
2021-02-17 18:03:11 +00:00
|
|
|
assert hass.states.get(entity_id) is not None
|
|
|
|
|
|
|
|
group_fan_cluster = zha_group.endpoint[hvac.Fan.cluster_id]
|
|
|
|
|
|
|
|
await async_enable_traffic(hass, [device_fan_1, device_fan_2], enabled=False)
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2021-02-17 18:03:11 +00:00
|
|
|
# test that the fans were created and that they are unavailable
|
|
|
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
|
|
|
await async_enable_traffic(hass, [device_fan_1, device_fan_2])
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2021-02-17 18:03:11 +00:00
|
|
|
# test that the fan group entity was created and is off
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on from HA
|
|
|
|
group_fan_cluster.write_attributes.reset_mock()
|
2023-08-28 21:24:12 +00:00
|
|
|
|
|
|
|
with pytest.raises(HomeAssistantError):
|
|
|
|
await async_turn_on(hass, entity_id)
|
|
|
|
|
2021-02-17 18:03:11 +00:00
|
|
|
await hass.async_block_till_done()
|
|
|
|
assert len(group_fan_cluster.write_attributes.mock_calls) == 1
|
|
|
|
assert group_fan_cluster.write_attributes.call_args[0][0] == {"fan_mode": 2}
|
|
|
|
|
|
|
|
|
2020-12-05 23:24:49 +00:00
|
|
|
@pytest.mark.parametrize(
|
2023-02-15 13:09:50 +00:00
|
|
|
("plug_read", "expected_state", "expected_percentage"),
|
2024-03-19 08:01:07 +00:00
|
|
|
[
|
2022-03-09 09:38:12 +00:00
|
|
|
(None, STATE_OFF, None),
|
|
|
|
({"fan_mode": 0}, STATE_OFF, 0),
|
|
|
|
({"fan_mode": 1}, STATE_ON, 33),
|
|
|
|
({"fan_mode": 2}, STATE_ON, 66),
|
|
|
|
({"fan_mode": 3}, STATE_ON, 100),
|
2024-03-19 08:01:07 +00:00
|
|
|
],
|
2020-12-05 23:24:49 +00:00
|
|
|
)
|
|
|
|
async def test_fan_init(
|
2023-02-17 17:54:26 +00:00
|
|
|
hass: HomeAssistant,
|
2020-12-05 23:24:49 +00:00
|
|
|
zha_device_joined_restored,
|
|
|
|
zigpy_device,
|
|
|
|
plug_read,
|
|
|
|
expected_state,
|
2021-02-17 18:03:11 +00:00
|
|
|
expected_percentage,
|
2023-02-17 17:54:26 +00:00
|
|
|
) -> None:
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2020-12-05 23:24:49 +00:00
|
|
|
|
|
|
|
cluster = zigpy_device.endpoints.get(1).fan
|
|
|
|
cluster.PLUGGED_ATTR_READS = plug_read
|
|
|
|
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device)
|
2023-06-15 01:42:31 +00:00
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
2020-12-05 23:24:49 +00:00
|
|
|
assert entity_id is not None
|
|
|
|
assert hass.states.get(entity_id).state == expected_state
|
2021-02-17 18:03:11 +00:00
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None
|
2020-12-05 23:24:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def test_fan_update_entity(
|
2023-02-17 17:54:26 +00:00
|
|
|
hass: HomeAssistant,
|
2020-12-05 23:24:49 +00:00
|
|
|
zha_device_joined_restored,
|
|
|
|
zigpy_device,
|
2023-02-17 17:54:26 +00:00
|
|
|
) -> None:
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2020-12-05 23:24:49 +00:00
|
|
|
|
|
|
|
cluster = zigpy_device.endpoints.get(1).fan
|
|
|
|
cluster.PLUGGED_ATTR_READS = {"fan_mode": 0}
|
|
|
|
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device)
|
2023-06-15 01:42:31 +00:00
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
2020-12-05 23:24:49 +00:00
|
|
|
assert entity_id is not None
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
2021-02-17 18:03:11 +00:00
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None
|
2021-02-20 05:42:14 +00:00
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3
|
2022-04-27 15:24:26 +00:00
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 2
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 4
|
2020-12-05 23:24:49 +00:00
|
|
|
|
|
|
|
await async_setup_component(hass, "homeassistant", {})
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
await hass.services.async_call(
|
|
|
|
"homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
2022-04-27 15:24:26 +00:00
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 3
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 5
|
2020-12-05 23:24:49 +00:00
|
|
|
|
|
|
|
cluster.PLUGGED_ATTR_READS = {"fan_mode": 1}
|
|
|
|
await hass.services.async_call(
|
|
|
|
"homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
2021-02-17 18:03:11 +00:00
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 33
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None
|
2021-02-20 05:42:14 +00:00
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 3
|
2022-04-27 15:24:26 +00:00
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 4
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 6
|
2022-06-29 15:44:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def zigpy_device_ikea(zigpy_device_mock):
|
2023-10-25 03:56:08 +00:00
|
|
|
"""Ikea fan zigpy device."""
|
2022-06-29 15:44:40 +00:00
|
|
|
endpoints = {
|
|
|
|
1: {
|
|
|
|
SIG_EP_INPUT: [
|
|
|
|
general.Basic.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.Scenes.cluster_id,
|
|
|
|
64637,
|
|
|
|
],
|
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return zigpy_device_mock(
|
|
|
|
endpoints,
|
|
|
|
manufacturer="IKEA of Sweden",
|
|
|
|
model="STARKVIND Air purifier",
|
|
|
|
quirk=zhaquirks.ikea.starkvind.IkeaSTARKVIND,
|
|
|
|
node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-02-17 17:54:26 +00:00
|
|
|
async def test_fan_ikea(
|
2023-08-28 21:24:12 +00:00
|
|
|
hass: HomeAssistant,
|
|
|
|
zha_device_joined_restored: ZHADevice,
|
|
|
|
zigpy_device_ikea: Device,
|
2023-02-17 17:54:26 +00:00
|
|
|
) -> None:
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan Ikea platform."""
|
2022-06-29 15:44:40 +00:00
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device_ikea)
|
|
|
|
cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier
|
2023-06-15 01:42:31 +00:00
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
2022-06-29 15:44:40 +00:00
|
|
|
assert entity_id is not None
|
|
|
|
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
await async_enable_traffic(hass, [zha_device], enabled=False)
|
|
|
|
# test that the fan was created and that it is unavailable
|
|
|
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
|
|
|
await async_enable_traffic(hass, [zha_device])
|
|
|
|
|
|
|
|
# test that the state has changed from unavailable to off
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on at fan
|
|
|
|
await send_attributes_report(hass, cluster, {6: 1})
|
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
# turn off at fan
|
|
|
|
await send_attributes_report(hass, cluster, {6: 0})
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_on(hass, entity_id)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 1}, manufacturer=None)
|
|
|
|
]
|
2022-06-29 15:44:40 +00:00
|
|
|
|
|
|
|
# turn off from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_off(hass, entity_id)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 0}, manufacturer=None)
|
|
|
|
]
|
2022-06-29 15:44:40 +00:00
|
|
|
|
|
|
|
# change speed from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_percentage(hass, entity_id, percentage=100)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 10}, manufacturer=None)
|
|
|
|
]
|
2022-06-29 15:44:40 +00:00
|
|
|
|
|
|
|
# change preset_mode from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO)
|
2023-08-28 21:24:12 +00:00
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 1}, manufacturer=None)
|
|
|
|
]
|
2022-06-29 15:44:40 +00:00
|
|
|
|
|
|
|
# set invalid preset_mode from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
2023-11-29 12:56:51 +00:00
|
|
|
with pytest.raises(NotValidPresetModeError) as exc:
|
2022-06-29 15:44:40 +00:00
|
|
|
await async_set_preset_mode(
|
|
|
|
hass, entity_id, preset_mode="invalid does not exist"
|
|
|
|
)
|
2023-11-29 12:56:51 +00:00
|
|
|
assert exc.value.translation_key == "not_valid_preset_mode"
|
2022-06-29 15:44:40 +00:00
|
|
|
assert len(cluster.write_attributes.mock_calls) == 0
|
|
|
|
|
|
|
|
# test adding new fan to the network and HA
|
|
|
|
await async_test_rejoin(hass, zigpy_device_ikea, [cluster], (9,))
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
2023-02-15 13:09:50 +00:00
|
|
|
(
|
|
|
|
"ikea_plug_read",
|
|
|
|
"ikea_expected_state",
|
|
|
|
"ikea_expected_percentage",
|
|
|
|
"ikea_preset_mode",
|
|
|
|
),
|
2024-03-19 08:01:07 +00:00
|
|
|
[
|
2022-06-29 15:44:40 +00:00
|
|
|
(None, STATE_OFF, None, None),
|
|
|
|
({"fan_mode": 0}, STATE_OFF, 0, None),
|
|
|
|
({"fan_mode": 1}, STATE_ON, 10, PRESET_MODE_AUTO),
|
|
|
|
({"fan_mode": 10}, STATE_ON, 20, "Speed 1"),
|
|
|
|
({"fan_mode": 15}, STATE_ON, 30, "Speed 1.5"),
|
|
|
|
({"fan_mode": 20}, STATE_ON, 40, "Speed 2"),
|
|
|
|
({"fan_mode": 25}, STATE_ON, 50, "Speed 2.5"),
|
|
|
|
({"fan_mode": 30}, STATE_ON, 60, "Speed 3"),
|
|
|
|
({"fan_mode": 35}, STATE_ON, 70, "Speed 3.5"),
|
|
|
|
({"fan_mode": 40}, STATE_ON, 80, "Speed 4"),
|
|
|
|
({"fan_mode": 45}, STATE_ON, 90, "Speed 4.5"),
|
|
|
|
({"fan_mode": 50}, STATE_ON, 100, "Speed 5"),
|
2024-03-19 08:01:07 +00:00
|
|
|
],
|
2022-06-29 15:44:40 +00:00
|
|
|
)
|
|
|
|
async def test_fan_ikea_init(
|
2023-02-17 17:54:26 +00:00
|
|
|
hass: HomeAssistant,
|
2022-06-29 15:44:40 +00:00
|
|
|
zha_device_joined_restored,
|
|
|
|
zigpy_device_ikea,
|
|
|
|
ikea_plug_read,
|
|
|
|
ikea_expected_state,
|
|
|
|
ikea_expected_percentage,
|
|
|
|
ikea_preset_mode,
|
2023-02-17 17:54:26 +00:00
|
|
|
) -> None:
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2022-06-29 15:44:40 +00:00
|
|
|
cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier
|
|
|
|
cluster.PLUGGED_ATTR_READS = ikea_plug_read
|
|
|
|
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device_ikea)
|
2023-06-15 01:42:31 +00:00
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
2022-06-29 15:44:40 +00:00
|
|
|
assert entity_id is not None
|
|
|
|
assert hass.states.get(entity_id).state == ikea_expected_state
|
|
|
|
assert (
|
|
|
|
hass.states.get(entity_id).attributes[ATTR_PERCENTAGE]
|
|
|
|
== ikea_expected_percentage
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == ikea_preset_mode
|
|
|
|
|
|
|
|
|
|
|
|
async def test_fan_ikea_update_entity(
|
2023-02-17 17:54:26 +00:00
|
|
|
hass: HomeAssistant,
|
2022-06-29 15:44:40 +00:00
|
|
|
zha_device_joined_restored,
|
|
|
|
zigpy_device_ikea,
|
2023-02-17 17:54:26 +00:00
|
|
|
) -> None:
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA fan platform."""
|
2022-06-29 15:44:40 +00:00
|
|
|
cluster = zigpy_device_ikea.endpoints.get(1).ikea_airpurifier
|
|
|
|
cluster.PLUGGED_ATTR_READS = {"fan_mode": 0}
|
|
|
|
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device_ikea)
|
2023-06-15 01:42:31 +00:00
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
2022-06-29 15:44:40 +00:00
|
|
|
assert entity_id is not None
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10
|
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 3
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 6
|
|
|
|
|
|
|
|
await async_setup_component(hass, "homeassistant", {})
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
await hass.services.async_call(
|
|
|
|
"homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 4
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 7
|
|
|
|
|
|
|
|
cluster.PLUGGED_ATTR_READS = {"fan_mode": 1}
|
|
|
|
await hass.services.async_call(
|
|
|
|
"homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 10
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is PRESET_MODE_AUTO
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 10
|
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 5
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 8
|
2023-10-25 03:56:08 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def zigpy_device_kof(zigpy_device_mock):
|
|
|
|
"""Fan by King of Fans zigpy device."""
|
|
|
|
endpoints = {
|
|
|
|
1: {
|
|
|
|
SIG_EP_INPUT: [
|
|
|
|
general.Basic.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.Scenes.cluster_id,
|
|
|
|
64637,
|
|
|
|
],
|
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COMBINED_INTERFACE,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
return zigpy_device_mock(
|
|
|
|
endpoints,
|
|
|
|
manufacturer="King Of Fans, Inc.",
|
|
|
|
model="HBUniversalCFRemote",
|
|
|
|
quirk=zhaquirks.kof.kof_mr101z.CeilingFan,
|
|
|
|
node_descriptor=b"\x02@\x8c\x02\x10RR\x00\x00\x00R\x00\x00",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
async def test_fan_kof(
|
|
|
|
hass: HomeAssistant,
|
|
|
|
zha_device_joined_restored: ZHADevice,
|
|
|
|
zigpy_device_kof: Device,
|
|
|
|
) -> None:
|
|
|
|
"""Test ZHA fan platform for King of Fans."""
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device_kof)
|
|
|
|
cluster = zigpy_device_kof.endpoints.get(1).fan
|
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
|
|
|
assert entity_id is not None
|
|
|
|
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
await async_enable_traffic(hass, [zha_device], enabled=False)
|
|
|
|
# test that the fan was created and that it is unavailable
|
|
|
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
|
|
|
await async_enable_traffic(hass, [zha_device])
|
|
|
|
|
|
|
|
# test that the state has changed from unavailable to off
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on at fan
|
|
|
|
await send_attributes_report(hass, cluster, {1: 2, 0: 1, 2: 3})
|
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
# turn off at fan
|
|
|
|
await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 2})
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# turn on from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_on(hass, entity_id)
|
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 2}, manufacturer=None)
|
|
|
|
]
|
|
|
|
|
|
|
|
# turn off from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_turn_off(hass, entity_id)
|
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 0}, manufacturer=None)
|
|
|
|
]
|
|
|
|
|
|
|
|
# change speed from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_percentage(hass, entity_id, percentage=100)
|
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 4}, manufacturer=None)
|
|
|
|
]
|
|
|
|
|
|
|
|
# change preset_mode from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_SMART)
|
|
|
|
assert cluster.write_attributes.mock_calls == [
|
|
|
|
call({"fan_mode": 6}, manufacturer=None)
|
|
|
|
]
|
|
|
|
|
|
|
|
# set invalid preset_mode from HA
|
|
|
|
cluster.write_attributes.reset_mock()
|
2023-11-29 12:56:51 +00:00
|
|
|
with pytest.raises(NotValidPresetModeError) as exc:
|
2023-10-25 03:56:08 +00:00
|
|
|
await async_set_preset_mode(hass, entity_id, preset_mode=PRESET_MODE_AUTO)
|
2023-11-29 12:56:51 +00:00
|
|
|
assert exc.value.translation_key == "not_valid_preset_mode"
|
2023-10-25 03:56:08 +00:00
|
|
|
assert len(cluster.write_attributes.mock_calls) == 0
|
|
|
|
|
|
|
|
# test adding new fan to the network and HA
|
|
|
|
await async_test_rejoin(hass, zigpy_device_kof, [cluster], (1,))
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
("plug_read", "expected_state", "expected_percentage", "expected_preset"),
|
2024-03-19 08:01:07 +00:00
|
|
|
[
|
2023-10-25 03:56:08 +00:00
|
|
|
(None, STATE_OFF, None, None),
|
|
|
|
({"fan_mode": 0}, STATE_OFF, 0, None),
|
|
|
|
({"fan_mode": 1}, STATE_ON, 25, None),
|
|
|
|
({"fan_mode": 2}, STATE_ON, 50, None),
|
|
|
|
({"fan_mode": 3}, STATE_ON, 75, None),
|
|
|
|
({"fan_mode": 4}, STATE_ON, 100, None),
|
|
|
|
({"fan_mode": 6}, STATE_ON, None, PRESET_MODE_SMART),
|
2024-03-19 08:01:07 +00:00
|
|
|
],
|
2023-10-25 03:56:08 +00:00
|
|
|
)
|
|
|
|
async def test_fan_kof_init(
|
|
|
|
hass: HomeAssistant,
|
|
|
|
zha_device_joined_restored,
|
|
|
|
zigpy_device_kof,
|
|
|
|
plug_read,
|
|
|
|
expected_state,
|
|
|
|
expected_percentage,
|
|
|
|
expected_preset,
|
|
|
|
) -> None:
|
|
|
|
"""Test ZHA fan platform for King of Fans."""
|
|
|
|
|
|
|
|
cluster = zigpy_device_kof.endpoints.get(1).fan
|
|
|
|
cluster.PLUGGED_ATTR_READS = plug_read
|
|
|
|
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device_kof)
|
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
|
|
|
assert entity_id is not None
|
|
|
|
assert hass.states.get(entity_id).state == expected_state
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == expected_percentage
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] == expected_preset
|
|
|
|
|
|
|
|
|
|
|
|
async def test_fan_kof_update_entity(
|
|
|
|
hass: HomeAssistant,
|
|
|
|
zha_device_joined_restored,
|
|
|
|
zigpy_device_kof,
|
|
|
|
) -> None:
|
|
|
|
"""Test ZHA fan platform for King of Fans."""
|
|
|
|
|
|
|
|
cluster = zigpy_device_kof.endpoints.get(1).fan
|
|
|
|
cluster.PLUGGED_ATTR_READS = {"fan_mode": 0}
|
|
|
|
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device_kof)
|
|
|
|
entity_id = find_entity_id(Platform.FAN, zha_device, hass)
|
|
|
|
assert entity_id is not None
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 0
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4
|
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 2
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 4
|
|
|
|
|
|
|
|
await async_setup_component(hass, "homeassistant", {})
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
await hass.services.async_call(
|
|
|
|
"homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 3
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 5
|
|
|
|
|
|
|
|
cluster.PLUGGED_ATTR_READS = {"fan_mode": 1}
|
|
|
|
await hass.services.async_call(
|
|
|
|
"homeassistant", "update_entity", {"entity_id": entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE] == 25
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PRESET_MODE] is None
|
|
|
|
assert hass.states.get(entity_id).attributes[ATTR_PERCENTAGE_STEP] == 100 / 4
|
|
|
|
if zha_device_joined_restored.name == "zha_device_joined":
|
|
|
|
assert cluster.read_attributes.await_count == 4
|
|
|
|
else:
|
|
|
|
assert cluster.read_attributes.await_count == 6
|