2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light."""
|
2020-03-11 11:17:53 +00:00
|
|
|
from datetime import timedelta
|
2021-09-06 23:00:06 +00:00
|
|
|
from unittest.mock import AsyncMock, call, patch, sentinel
|
2019-02-27 13:34:38 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
import pytest
|
2020-03-25 11:23:54 +00:00
|
|
|
import zigpy.profiles.zha as zha
|
2019-10-21 23:30:56 +00:00
|
|
|
import zigpy.zcl.clusters.general as general
|
2020-02-10 02:45:35 +00:00
|
|
|
import zigpy.zcl.clusters.lighting as lighting
|
2019-10-21 23:30:56 +00:00
|
|
|
import zigpy.zcl.foundation as zcl_f
|
2019-10-21 17:14:17 +00:00
|
|
|
|
2022-02-03 13:16:35 +00:00
|
|
|
from homeassistant.components.light import (
|
|
|
|
DOMAIN as LIGHT_DOMAIN,
|
|
|
|
FLASH_LONG,
|
|
|
|
FLASH_SHORT,
|
2022-05-27 13:38:22 +00:00
|
|
|
ColorMode,
|
2022-02-03 13:16:35 +00:00
|
|
|
)
|
2022-09-07 15:10:24 +00:00
|
|
|
from homeassistant.components.zha.core.const import (
|
|
|
|
CONF_ALWAYS_PREFER_XY_COLOR_MODE,
|
Implement "group members assume state" option for ZHA (#84938)
* Initial "group members assume state" implementation for ZHA
* Remove left-over debug flag (where polling was disabled)
* Implement _send_member_assume_state_event() method and also use after turn_off
* Only assume updated arguments from service call to group
* Make code more readable and change checks slightly
* Move "send member assume state" events to LightGroup on/off calls
* Include new config option in tests
* Check that member is available before updating to assumed state
* Lower "update group from child delay" for debouncer to basically 0 when using assumed member state
* Allow "child to group" updates regardless of config option
This is not needed, as group members will not update their state, as long as they're transitioning. (If a group transitions, it also sets its members to transitioning mode)
This fixes multiple issues. Previously, the state of a group was completely wrong when:
- turn on group with 10 second transition
- turn on members individually
- turn off members individually
- group state would not update correctly
* Move "default update group from child delay" constant
* Change to new constant name in test
* Also update fan test to new constant name
* Decrease "update group from child delay" to 10ms
In my testing, 0.0 also works without any issues and correctly de-bounces child updates when using the "assume state option".
This is just for avoiding multiple state changes when changing the group -> children issue individual updates.
With 2 children in a group and delay 0, both child updates only cause one group re-calculation and state change.
0.01 (10ms) should be plenty for very slow systems to de-bounce the update (and in the worst case, it'll cause just another state change but nothing breaks)
* Also implement "assuming state" for effect
Not sure if anybody even uses this, but this one is a bit special because the effect is always deactivated if it's not provided in the light.turn_on call.
* Move shortened delay for "assuming members" to a constant
* Add basic test to verify that group members assume on/off state
* Move _assume_group_state function declaration out of async_added_to_hass
* Fix rare edge-case when rapidly toggling lights and light groups at the same time
This prevents an issue where either the group transition would unset the transition flag or the single light would unset the group transition status midst-transition.
Note: When a new individual transition is started, we want to unset the group flag, as we actually cancel that transition.
* Check that effect list exists, add return type
* Re-trigger CI due to timeout
* Increase ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY slightly
The debouncer is used when updating group member states either by assuming them (in which case we want to barely have any delay), or between the time we get the results back from polling (where we want a slightly longer time).
As it's not easily possible to distinguish if a group member was updated via assuming the state of the group or by the polling that follows, 50 ms seems to be a good middle point.
* Add debug print for when updating group state
* Fix issues with "off brightness" when switching between group/members
This fixes a bunch of issues with "off brightness" and passes it down to the members correctly.
For example, if a light group is turned off with a transition (so bulbs get their level set to 1), this will also set the "off brightness" of all individual bulbs to the last level that they were at.
(It really fixes a lot of issues when using the "member assume group state" option. It's not really possible to fix them without that.)
Furthermore, issues where polling was previously needed to get the correct state after "playing with transitions", should now get be resolved and get correct state when using the "members assume group state" option.
Note: The only case which still can't be fixed is the following:
If individual lights have off_with_transition set, but not the group, and the group is then turned on without a level, individual lights might fall back to brightness level 1 (<- at least now shows correctly in UI even before polling).
Since all lights might need different brightness levels to be turned on, we can't use one group call. But making individual calls when turning on a ZHA group would cause a lot of traffic and thereby be counter-productive.
In this case, light.turn_on should just be called with a level (or individual calls to the lights should be made).
Another thing that was changed is to reset off_with_transition/off_brightness for a LightGroup when a member is turned on (even if the LightGroup wasn't turned on using its turn_on method).
off_with_transition/off_brightness for individual bulbs is now also turned off when a light is detected to be on during polling.
Lastly, the waiting for polled attributes could previously cause "invalid state" to be set (so mid-transition levels).
This could happen when group and members are repeatedly toggled at similar times. These "invalid states" could cause wrong "off brightness" levels if transitions are also used.
To fix this, we check after waiting for the polled attributes in async_get_state to see if a transition has started in the meanwhile. If so, the values can be discarded. A new poll will happen later and if using the "members assume group state" config option, the values should already be correct before the polling.
* Enable "group members assume state" config option by default
The config tests are also updated to expect the config option be enabled by default.
For all tests, the config option is generally disabled though:
There are only two group related tests. The one that tests this new feature overrides the config option to be enabled anyway.
The other tests works in a similar way but also "sends" attribute reports, so we want to disable the feature for that test.
(It would also run with it enabled (if the correct CHILD_UPDATE value is patched), but then it would test the same stuff as the other test, hence we're disabling the config option for that test.)
2023-01-16 15:48:18 +00:00
|
|
|
CONF_GROUP_MEMBERS_ASSUME_STATE,
|
2022-09-07 15:10:24 +00:00
|
|
|
ZHA_OPTIONS,
|
|
|
|
)
|
2020-05-04 19:19:53 +00:00
|
|
|
from homeassistant.components.zha.core.group import GroupMember
|
2022-07-21 21:54:50 +00:00
|
|
|
from homeassistant.components.zha.light import FLASH_EFFECTS
|
2021-12-11 16:06:39 +00:00
|
|
|
from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, Platform
|
2020-03-11 11:17:53 +00:00
|
|
|
import homeassistant.util.dt as dt_util
|
2019-02-27 13:34:38 +00:00
|
|
|
|
2019-02-04 11:51:47 +00:00
|
|
|
from .common import (
|
2019-07-31 19:25:30 +00:00
|
|
|
async_enable_traffic,
|
2020-03-25 11:23:54 +00:00
|
|
|
async_find_group_entity_id,
|
2022-07-27 00:03:17 +00:00
|
|
|
async_shift_time,
|
2020-02-12 21:12:14 +00:00
|
|
|
async_test_rejoin,
|
2022-09-19 07:51:31 +00:00
|
|
|
async_wait_for_updates,
|
2019-10-31 16:31:06 +00:00
|
|
|
find_entity_id,
|
2020-03-25 11:23:54 +00:00
|
|
|
get_zha_gateway,
|
2022-09-07 15:10:24 +00:00
|
|
|
patch_zha_config,
|
2020-03-13 23:17:50 +00:00
|
|
|
send_attributes_report,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2021-09-06 23:00:06 +00:00
|
|
|
from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE
|
2019-02-27 13:34:38 +00:00
|
|
|
|
2020-03-11 11:17:53 +00:00
|
|
|
from tests.common import async_fire_time_changed
|
|
|
|
|
2020-03-25 11:23:54 +00:00
|
|
|
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
|
2020-05-04 19:19:53 +00:00
|
|
|
IEEE_GROUPABLE_DEVICE2 = "02:2d:6f:00:0a:90:69:e9"
|
|
|
|
IEEE_GROUPABLE_DEVICE3 = "03:2d:6f:00:0a:90:69:e7"
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
LIGHT_ON_OFF = {
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.ON_OFF_LIGHT,
|
|
|
|
SIG_EP_INPUT: [
|
2020-02-29 23:37:06 +00:00
|
|
|
general.Basic.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
general.OnOff.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [general.Ota.cluster_id],
|
2020-02-10 02:45:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
LIGHT_LEVEL = {
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_INPUT: [
|
2020-02-10 02:45:35 +00:00
|
|
|
general.Basic.cluster_id,
|
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
general.OnOff.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [general.Ota.cluster_id],
|
2020-02-10 02:45:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
LIGHT_COLOR = {
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_INPUT: [
|
2020-02-10 02:45:35 +00:00
|
|
|
general.Basic.cluster_id,
|
2020-02-29 23:37:06 +00:00
|
|
|
general.Identify.cluster_id,
|
2020-02-10 02:45:35 +00:00
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
general.OnOff.cluster_id,
|
|
|
|
lighting.Color.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [general.Ota.cluster_id],
|
2020-02-10 02:45:35 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2022-06-17 16:41:10 +00:00
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def light_platform_only():
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Only set up the light and required base platforms to speed up tests."""
|
2022-06-17 16:41:10 +00:00
|
|
|
with patch(
|
|
|
|
"homeassistant.components.zha.PLATFORMS",
|
|
|
|
(
|
|
|
|
Platform.BINARY_SENSOR,
|
|
|
|
Platform.DEVICE_TRACKER,
|
|
|
|
Platform.BUTTON,
|
|
|
|
Platform.LIGHT,
|
|
|
|
Platform.SENSOR,
|
|
|
|
Platform.NUMBER,
|
|
|
|
Platform.SELECT,
|
|
|
|
),
|
|
|
|
):
|
|
|
|
yield
|
|
|
|
|
|
|
|
|
2020-03-25 11:23:54 +00:00
|
|
|
@pytest.fixture
|
|
|
|
async def coordinator(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light platform."""
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [general.Groups.cluster_id],
|
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-03-25 11:23:54 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
ieee="00:15:8d:00:02:32:4f:32",
|
|
|
|
nwk=0x0000,
|
2020-05-04 19:19:53 +00:00
|
|
|
node_descriptor=b"\xf8\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff",
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-03-25 11:23:54 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
async def device_light_1(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light platform."""
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [
|
2020-03-25 11:23:54 +00:00
|
|
|
general.OnOff.cluster_id,
|
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
lighting.Color.cluster_id,
|
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-03-25 11:23:54 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
ieee=IEEE_GROUPABLE_DEVICE,
|
2020-05-04 19:19:53 +00:00
|
|
|
nwk=0xB79D,
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
2022-07-18 14:20:49 +00:00
|
|
|
color_cluster = zigpy_device.endpoints[1].light_color
|
|
|
|
color_cluster.PLUGGED_ATTR_READS = {
|
2022-07-21 21:54:50 +00:00
|
|
|
"color_capabilities": lighting.Color.ColorCapabilities.Color_temperature
|
|
|
|
| lighting.Color.ColorCapabilities.XY_attributes
|
2022-07-18 14:20:49 +00:00
|
|
|
}
|
2020-03-25 11:23:54 +00:00
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-03-25 11:23:54 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
async def device_light_2(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light platform."""
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [
|
2020-03-25 11:23:54 +00:00
|
|
|
general.OnOff.cluster_id,
|
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
lighting.Color.cluster_id,
|
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-03-25 11:23:54 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
ieee=IEEE_GROUPABLE_DEVICE2,
|
2022-09-12 19:37:11 +00:00
|
|
|
manufacturer="sengled",
|
2020-05-04 19:19:53 +00:00
|
|
|
nwk=0xC79E,
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
2022-07-18 14:20:49 +00:00
|
|
|
color_cluster = zigpy_device.endpoints[1].light_color
|
|
|
|
color_cluster.PLUGGED_ATTR_READS = {
|
2022-07-21 21:54:50 +00:00
|
|
|
"color_capabilities": lighting.Color.ColorCapabilities.Color_temperature
|
|
|
|
| lighting.Color.ColorCapabilities.XY_attributes
|
2022-07-18 14:20:49 +00:00
|
|
|
}
|
2020-03-25 11:23:54 +00:00
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-03-25 11:23:54 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
2020-03-29 00:38:48 +00:00
|
|
|
@pytest.fixture
|
|
|
|
async def device_light_3(hass, zigpy_device_mock, zha_device_joined):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light platform."""
|
2020-03-29 00:38:48 +00:00
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_INPUT: [
|
2020-03-29 00:38:48 +00:00
|
|
|
general.OnOff.cluster_id,
|
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
lighting.Color.cluster_id,
|
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
],
|
2021-09-06 23:00:06 +00:00
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
2020-03-29 00:38:48 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
ieee=IEEE_GROUPABLE_DEVICE3,
|
2020-05-04 19:19:53 +00:00
|
|
|
nwk=0xB89F,
|
2020-03-29 00:38:48 +00:00
|
|
|
)
|
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
2020-06-08 12:54:52 +00:00
|
|
|
zha_device.available = True
|
2020-03-29 00:38:48 +00:00
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
2022-07-18 14:20:49 +00:00
|
|
|
@pytest.fixture
|
|
|
|
async def eWeLink_light(hass, zigpy_device_mock, zha_device_joined):
|
|
|
|
"""Mock eWeLink light."""
|
|
|
|
|
|
|
|
zigpy_device = zigpy_device_mock(
|
|
|
|
{
|
|
|
|
1: {
|
|
|
|
SIG_EP_INPUT: [
|
|
|
|
general.OnOff.cluster_id,
|
|
|
|
general.LevelControl.cluster_id,
|
|
|
|
lighting.Color.cluster_id,
|
|
|
|
general.Groups.cluster_id,
|
|
|
|
general.Identify.cluster_id,
|
|
|
|
],
|
|
|
|
SIG_EP_OUTPUT: [],
|
|
|
|
SIG_EP_TYPE: zha.DeviceType.COLOR_DIMMABLE_LIGHT,
|
|
|
|
SIG_EP_PROFILE: zha.PROFILE_ID,
|
|
|
|
}
|
|
|
|
},
|
|
|
|
ieee="03:2d:6f:00:0a:90:69:e3",
|
|
|
|
manufacturer="eWeLink",
|
|
|
|
nwk=0xB79D,
|
|
|
|
)
|
|
|
|
color_cluster = zigpy_device.endpoints[1].light_color
|
|
|
|
color_cluster.PLUGGED_ATTR_READS = {
|
2022-07-21 21:54:50 +00:00
|
|
|
"color_capabilities": lighting.Color.ColorCapabilities.Color_temperature
|
2022-11-05 12:40:28 +00:00
|
|
|
| lighting.Color.ColorCapabilities.XY_attributes,
|
|
|
|
"color_temp_physical_min": 0,
|
|
|
|
"color_temp_physical_max": 0,
|
2022-07-18 14:20:49 +00:00
|
|
|
}
|
|
|
|
zha_device = await zha_device_joined(zigpy_device)
|
|
|
|
zha_device.available = True
|
|
|
|
return zha_device
|
|
|
|
|
|
|
|
|
2020-03-11 11:17:53 +00:00
|
|
|
async def test_light_refresh(hass, zigpy_device_mock, zha_device_joined_restored):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light platform refresh."""
|
2020-03-11 11:17:53 +00:00
|
|
|
|
|
|
|
# create zigpy devices
|
|
|
|
zigpy_device = zigpy_device_mock(LIGHT_ON_OFF)
|
|
|
|
on_off_cluster = zigpy_device.endpoints[1].on_off
|
2021-09-06 23:00:06 +00:00
|
|
|
on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0}
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device)
|
2021-12-11 16:06:39 +00:00
|
|
|
entity_id = await find_entity_id(Platform.LIGHT, zha_device, hass)
|
2020-03-11 11:17:53 +00:00
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
|
|
|
await async_enable_traffic(hass, [zha_device])
|
|
|
|
on_off_cluster.read_attributes.reset_mock()
|
|
|
|
|
|
|
|
# not enough time passed
|
|
|
|
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20))
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
assert on_off_cluster.read_attributes.call_count == 0
|
|
|
|
assert on_off_cluster.read_attributes.await_count == 0
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# 1 interval - 1 call
|
2021-09-06 23:00:06 +00:00
|
|
|
on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 1}
|
2020-03-11 11:17:53 +00:00
|
|
|
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80))
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
assert on_off_cluster.read_attributes.call_count == 1
|
|
|
|
assert on_off_cluster.read_attributes.await_count == 1
|
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
# 2 intervals - 2 calls
|
2021-09-06 23:00:06 +00:00
|
|
|
on_off_cluster.PLUGGED_ATTR_READS = {"on_off": 0}
|
2020-03-11 11:17:53 +00:00
|
|
|
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=80))
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
assert on_off_cluster.read_attributes.call_count == 2
|
|
|
|
assert on_off_cluster.read_attributes.await_count == 2
|
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
|
2020-03-11 12:37:28 +00:00
|
|
|
@patch(
|
2020-02-10 02:45:35 +00:00
|
|
|
"zigpy.zcl.clusters.lighting.Color.request",
|
2020-04-30 20:29:50 +00:00
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
2020-03-11 12:37:28 +00:00
|
|
|
@patch(
|
2020-02-29 23:37:06 +00:00
|
|
|
"zigpy.zcl.clusters.general.Identify.request",
|
2020-04-30 20:29:50 +00:00
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
2020-02-29 23:37:06 +00:00
|
|
|
)
|
2020-03-11 12:37:28 +00:00
|
|
|
@patch(
|
2020-02-10 02:45:35 +00:00
|
|
|
"zigpy.zcl.clusters.general.LevelControl.request",
|
2020-04-30 20:29:50 +00:00
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
2020-03-11 12:37:28 +00:00
|
|
|
@patch(
|
2020-02-10 02:45:35 +00:00
|
|
|
"zigpy.zcl.clusters.general.OnOff.request",
|
2020-04-30 20:29:50 +00:00
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"device, reporting",
|
2022-07-21 21:54:50 +00:00
|
|
|
[(LIGHT_ON_OFF, (1, 0, 0)), (LIGHT_LEVEL, (1, 1, 0)), (LIGHT_COLOR, (1, 1, 6))],
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
|
|
|
async def test_light(
|
2020-03-11 11:17:53 +00:00
|
|
|
hass, zigpy_device_mock, zha_device_joined_restored, device, reporting
|
2020-02-10 02:45:35 +00:00
|
|
|
):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light platform."""
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
# create zigpy devices
|
2020-02-10 02:45:35 +00:00
|
|
|
zigpy_device = zigpy_device_mock(device)
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device)
|
2021-12-11 16:06:39 +00:00
|
|
|
entity_id = await find_entity_id(Platform.LIGHT, zha_device, hass)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
assert entity_id is not None
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
cluster_on_off = zigpy_device.endpoints[1].on_off
|
|
|
|
cluster_level = getattr(zigpy_device.endpoints[1], "level", None)
|
|
|
|
cluster_color = getattr(zigpy_device.endpoints[1], "light_color", None)
|
2020-02-29 23:37:06 +00:00
|
|
|
cluster_identify = getattr(zigpy_device.endpoints[1], "identify", None)
|
2019-02-07 08:14:19 +00:00
|
|
|
|
2020-06-11 21:21:08 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
await async_enable_traffic(hass, [zha_device], enabled=False)
|
2019-02-07 08:14:19 +00:00
|
|
|
# test that the lights were created and that they are unavailable
|
2020-02-10 02:45:35 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_UNAVAILABLE
|
2019-02-07 08:14:19 +00:00
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
2020-02-12 21:12:14 +00:00
|
|
|
await async_enable_traffic(hass, [zha_device])
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
# test that the lights were created and are off
|
2020-02-10 02:45:35 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
# test turning the lights on and off from the light
|
2020-02-10 02:45:35 +00:00
|
|
|
await async_test_on_off_from_light(hass, cluster_on_off, entity_id)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
# test turning the lights on and off from the HA
|
2020-02-10 02:45:35 +00:00
|
|
|
await async_test_on_off_from_hass(hass, cluster_on_off, entity_id)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2020-02-29 23:37:06 +00:00
|
|
|
# test short flashing the lights from the HA
|
|
|
|
if cluster_identify:
|
|
|
|
await async_test_flash_from_hass(hass, cluster_identify, entity_id, FLASH_SHORT)
|
|
|
|
|
2022-09-07 15:10:24 +00:00
|
|
|
# test long flashing the lights from the HA
|
|
|
|
if cluster_identify:
|
|
|
|
await async_test_flash_from_hass(hass, cluster_identify, entity_id, FLASH_LONG)
|
|
|
|
|
|
|
|
# test dimming the lights on and off from the HA
|
2020-02-10 02:45:35 +00:00
|
|
|
if cluster_level:
|
|
|
|
await async_test_level_on_off_from_hass(
|
|
|
|
hass, cluster_on_off, cluster_level, entity_id
|
|
|
|
)
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
# test getting a brightness change from the network
|
|
|
|
await async_test_on_from_light(hass, cluster_on_off, entity_id)
|
|
|
|
await async_test_dimmer_from_light(
|
|
|
|
hass, cluster_level, entity_id, 150, STATE_ON
|
|
|
|
)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
# test rejoin
|
|
|
|
await async_test_off_from_hass(hass, cluster_on_off, entity_id)
|
2022-09-07 15:10:24 +00:00
|
|
|
clusters = [c for c in (cluster_on_off, cluster_level, cluster_color) if c]
|
2020-02-12 21:12:14 +00:00
|
|
|
await async_test_rejoin(hass, zigpy_device, clusters, reporting)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2022-09-07 15:10:24 +00:00
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"plugged_attr_reads, config_override, expected_state",
|
|
|
|
[
|
|
|
|
# HS light without cached hue or saturation
|
|
|
|
(
|
|
|
|
{
|
|
|
|
"color_capabilities": (
|
|
|
|
lighting.Color.ColorCapabilities.Hue_and_saturation
|
|
|
|
),
|
|
|
|
},
|
|
|
|
{(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False},
|
|
|
|
{},
|
|
|
|
),
|
|
|
|
# HS light with cached hue
|
|
|
|
(
|
|
|
|
{
|
|
|
|
"color_capabilities": (
|
|
|
|
lighting.Color.ColorCapabilities.Hue_and_saturation
|
|
|
|
),
|
|
|
|
"current_hue": 100,
|
|
|
|
},
|
|
|
|
{(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False},
|
|
|
|
{},
|
|
|
|
),
|
|
|
|
# HS light with cached saturation
|
|
|
|
(
|
|
|
|
{
|
|
|
|
"color_capabilities": (
|
|
|
|
lighting.Color.ColorCapabilities.Hue_and_saturation
|
|
|
|
),
|
|
|
|
"current_saturation": 100,
|
|
|
|
},
|
|
|
|
{(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False},
|
|
|
|
{},
|
|
|
|
),
|
|
|
|
# HS light with both
|
|
|
|
(
|
|
|
|
{
|
|
|
|
"color_capabilities": (
|
|
|
|
lighting.Color.ColorCapabilities.Hue_and_saturation
|
|
|
|
),
|
|
|
|
"current_hue": 100,
|
|
|
|
"current_saturation": 100,
|
|
|
|
},
|
|
|
|
{(ZHA_OPTIONS, CONF_ALWAYS_PREFER_XY_COLOR_MODE): False},
|
|
|
|
{},
|
|
|
|
),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
async def test_light_initialization(
|
|
|
|
hass,
|
|
|
|
zigpy_device_mock,
|
|
|
|
zha_device_joined_restored,
|
|
|
|
plugged_attr_reads,
|
|
|
|
config_override,
|
|
|
|
expected_state,
|
|
|
|
):
|
2023-01-02 05:20:59 +00:00
|
|
|
"""Test ZHA light initialization with cached attributes and color modes."""
|
2022-09-07 15:10:24 +00:00
|
|
|
|
|
|
|
# create zigpy devices
|
|
|
|
zigpy_device = zigpy_device_mock(LIGHT_COLOR)
|
|
|
|
|
|
|
|
# mock attribute reads
|
|
|
|
zigpy_device.endpoints[1].light_color.PLUGGED_ATTR_READS = plugged_attr_reads
|
|
|
|
|
|
|
|
with patch_zha_config("light", config_override):
|
|
|
|
zha_device = await zha_device_joined_restored(zigpy_device)
|
|
|
|
entity_id = await find_entity_id(Platform.LIGHT, zha_device, hass)
|
|
|
|
|
|
|
|
assert entity_id is not None
|
|
|
|
|
|
|
|
# TODO ensure hue and saturation are properly set on startup
|
2020-02-29 23:37:06 +00:00
|
|
|
|
2019-02-04 11:51:47 +00:00
|
|
|
|
2022-07-18 14:20:49 +00:00
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.lighting.Color.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.Identify.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.LevelControl.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.OnOff.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
async def test_transitions(
|
|
|
|
hass, device_light_1, device_light_2, eWeLink_light, coordinator
|
|
|
|
):
|
|
|
|
"""Test ZHA light transition code."""
|
|
|
|
zha_gateway = get_zha_gateway(hass)
|
|
|
|
assert zha_gateway is not None
|
|
|
|
zha_gateway.coordinator_zha_device = coordinator
|
|
|
|
coordinator._zha_gateway = zha_gateway
|
|
|
|
device_light_1._zha_gateway = zha_gateway
|
|
|
|
device_light_2._zha_gateway = zha_gateway
|
|
|
|
member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee]
|
|
|
|
members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)]
|
|
|
|
|
|
|
|
assert coordinator.is_coordinator
|
|
|
|
|
|
|
|
# test creating a group with 2 members
|
|
|
|
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
assert zha_group is not None
|
|
|
|
assert len(zha_group.members) == 2
|
|
|
|
for member in zha_group.members:
|
|
|
|
assert member.device.ieee in member_ieee_addresses
|
|
|
|
assert member.group == zha_group
|
|
|
|
assert member.endpoint is not None
|
|
|
|
|
|
|
|
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
|
|
|
device_2_entity_id = await find_entity_id(Platform.LIGHT, device_light_2, hass)
|
|
|
|
eWeLink_light_entity_id = await find_entity_id(Platform.LIGHT, eWeLink_light, hass)
|
|
|
|
assert device_1_entity_id != device_2_entity_id
|
|
|
|
|
|
|
|
group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group)
|
|
|
|
assert hass.states.get(group_entity_id) is not None
|
|
|
|
|
|
|
|
assert device_1_entity_id in zha_group.member_entity_ids
|
|
|
|
assert device_2_entity_id in zha_group.member_entity_ids
|
|
|
|
|
|
|
|
dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off
|
|
|
|
dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off
|
|
|
|
eWeLink_cluster_on_off = eWeLink_light.device.endpoints[1].on_off
|
|
|
|
|
|
|
|
dev1_cluster_level = device_light_1.device.endpoints[1].level
|
|
|
|
dev2_cluster_level = device_light_2.device.endpoints[1].level
|
|
|
|
eWeLink_cluster_level = eWeLink_light.device.endpoints[1].level
|
|
|
|
|
|
|
|
dev1_cluster_color = device_light_1.device.endpoints[1].light_color
|
|
|
|
dev2_cluster_color = device_light_2.device.endpoints[1].light_color
|
|
|
|
eWeLink_cluster_color = eWeLink_light.device.endpoints[1].light_color
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
|
|
|
await async_enable_traffic(hass, [device_light_1, device_light_2])
|
|
|
|
await async_wait_for_updates(hass)
|
|
|
|
|
|
|
|
# test that the lights were created and are off
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_OFF
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_OFF
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_OFF
|
|
|
|
|
|
|
|
# first test 0 length transition with no color provided
|
|
|
|
dev1_cluster_on_off.request.reset_mock()
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{"entity_id": device_1_entity_id, "transition": 0, "brightness": 50},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev1_cluster_color.request.call_count == 0
|
|
|
|
assert dev1_cluster_color.request.await_count == 0
|
|
|
|
assert dev1_cluster_level.request.call_count == 1
|
|
|
|
assert dev1_cluster_level.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=50,
|
|
|
|
transition_time=0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_ON
|
|
|
|
assert light1_state.attributes["brightness"] == 50
|
|
|
|
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
|
|
|
|
# test non 0 length transition with color provided while light is on
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
"transition": 3,
|
|
|
|
"brightness": 18,
|
|
|
|
"color_temp": 432,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev1_cluster_color.request.call_count == 1
|
|
|
|
assert dev1_cluster_color.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_count == 1
|
|
|
|
assert dev1_cluster_level.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=18,
|
|
|
|
transition_time=30,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_color.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=432,
|
|
|
|
transition_time=30.0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_ON
|
|
|
|
assert light1_state.attributes["brightness"] == 18
|
|
|
|
assert light1_state.attributes["color_temp"] == 432
|
|
|
|
assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
|
|
|
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
|
|
|
|
# test 0 length transition to turn light off
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
"transition": 0,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev1_cluster_color.request.call_count == 0
|
|
|
|
assert dev1_cluster_color.request.await_count == 0
|
|
|
|
assert dev1_cluster_level.request.call_count == 1
|
|
|
|
assert dev1_cluster_level.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=0,
|
|
|
|
transition_time=0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
|
2022-07-21 23:46:16 +00:00
|
|
|
# test non 0 length transition and color temp while turning light on (new_color_provided_while_off)
|
2022-07-18 14:20:49 +00:00
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
"transition": 1,
|
|
|
|
"brightness": 25,
|
|
|
|
"color_temp": 235,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev1_cluster_color.request.call_count == 1
|
|
|
|
assert dev1_cluster_color.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_count == 2
|
|
|
|
assert dev1_cluster_level.request.await_count == 2
|
|
|
|
|
|
|
|
# first it comes on with no transition at 2 brightness
|
|
|
|
assert dev1_cluster_level.request.call_args_list[0] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=2,
|
|
|
|
transition_time=0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_color.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=235,
|
|
|
|
transition_time=0, # no transition when new_color_provided_while_off
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_level.request.call_args_list[1] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=25,
|
|
|
|
transition_time=10,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_ON
|
|
|
|
assert light1_state.attributes["brightness"] == 25
|
|
|
|
assert light1_state.attributes["color_temp"] == 235
|
|
|
|
assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
|
|
|
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
|
|
|
|
# turn light 1 back off
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev1_cluster_color.request.call_count == 0
|
|
|
|
assert dev1_cluster_color.request.await_count == 0
|
|
|
|
assert dev1_cluster_level.request.call_count == 0
|
|
|
|
assert dev1_cluster_level.request.await_count == 0
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev1_cluster_on_off.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
|
2022-07-21 23:46:16 +00:00
|
|
|
# test no transition provided and color temp while turning light on (new_color_provided_while_off)
|
2022-07-18 14:20:49 +00:00
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
"brightness": 25,
|
|
|
|
"color_temp": 236,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev1_cluster_color.request.call_count == 1
|
|
|
|
assert dev1_cluster_color.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_count == 2
|
|
|
|
assert dev1_cluster_level.request.await_count == 2
|
|
|
|
|
|
|
|
# first it comes on with no transition at 2 brightness
|
|
|
|
assert dev1_cluster_level.request.call_args_list[0] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=2,
|
|
|
|
transition_time=0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_color.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=236,
|
|
|
|
transition_time=0, # no transition when new_color_provided_while_off
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_level.request.call_args_list[1] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_level.commands_by_name["move_to_level"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=25,
|
|
|
|
transition_time=0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_ON
|
|
|
|
assert light1_state.attributes["brightness"] == 25
|
|
|
|
assert light1_state.attributes["color_temp"] == 236
|
|
|
|
assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
|
|
|
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
|
|
|
|
# turn light 1 back off to setup group test
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev1_cluster_color.request.call_count == 0
|
|
|
|
assert dev1_cluster_color.request.await_count == 0
|
|
|
|
assert dev1_cluster_level.request.call_count == 0
|
|
|
|
assert dev1_cluster_level.request.await_count == 0
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev1_cluster_on_off.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
|
|
|
|
# test no transition when the same color temp is provided from off
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
"color_temp": 236,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev1_cluster_color.request.call_count == 1
|
|
|
|
assert dev1_cluster_color.request.await_count == 1
|
|
|
|
assert dev1_cluster_level.request.call_count == 0
|
|
|
|
assert dev1_cluster_level.request.await_count == 0
|
|
|
|
|
|
|
|
assert dev1_cluster_on_off.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_on_off.commands_by_name["on"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_on_off.commands_by_name["on"].schema,
|
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
assert dev1_cluster_color.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=236,
|
|
|
|
transition_time=0, # no transition when new_color_provided_while_off
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light1_state = hass.states.get(device_1_entity_id)
|
|
|
|
assert light1_state.state == STATE_ON
|
|
|
|
assert light1_state.attributes["brightness"] == 25
|
|
|
|
assert light1_state.attributes["color_temp"] == 236
|
|
|
|
assert light1_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
|
|
|
|
|
|
|
dev1_cluster_on_off.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
|
|
|
|
# turn light 1 back off to setup group test
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{
|
|
|
|
"entity_id": device_1_entity_id,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev1_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev1_cluster_color.request.call_count == 0
|
|
|
|
assert dev1_cluster_color.request.await_count == 0
|
|
|
|
assert dev1_cluster_level.request.call_count == 0
|
|
|
|
assert dev1_cluster_level.request.await_count == 0
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev1_cluster_on_off.request.reset_mock()
|
|
|
|
dev1_cluster_color.request.reset_mock()
|
|
|
|
dev1_cluster_level.request.reset_mock()
|
|
|
|
|
|
|
|
# test sengled light uses default minimum transition time
|
|
|
|
dev2_cluster_on_off.request.reset_mock()
|
|
|
|
dev2_cluster_color.request.reset_mock()
|
|
|
|
dev2_cluster_level.request.reset_mock()
|
|
|
|
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{"entity_id": device_2_entity_id, "transition": 0, "brightness": 100},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev2_cluster_color.request.call_count == 0
|
|
|
|
assert dev2_cluster_color.request.await_count == 0
|
|
|
|
assert dev2_cluster_level.request.call_count == 1
|
|
|
|
assert dev2_cluster_level.request.await_count == 1
|
|
|
|
assert dev2_cluster_level.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=100,
|
|
|
|
transition_time=1, # transition time - sengled light uses default minimum
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_ON
|
|
|
|
assert light2_state.attributes["brightness"] == 100
|
|
|
|
|
|
|
|
dev2_cluster_level.request.reset_mock()
|
|
|
|
|
|
|
|
# turn the sengled light back off
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{
|
|
|
|
"entity_id": device_2_entity_id,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev2_cluster_color.request.call_count == 0
|
|
|
|
assert dev2_cluster_color.request.await_count == 0
|
|
|
|
assert dev2_cluster_level.request.call_count == 0
|
|
|
|
assert dev2_cluster_level.request.await_count == 0
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev2_cluster_on_off.request.reset_mock()
|
|
|
|
|
2022-07-21 23:46:16 +00:00
|
|
|
# test non 0 length transition and color temp while turning light on and sengled (new_color_provided_while_off)
|
2022-07-18 14:20:49 +00:00
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": device_2_entity_id,
|
|
|
|
"transition": 1,
|
|
|
|
"brightness": 25,
|
|
|
|
"color_temp": 235,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev2_cluster_color.request.call_count == 1
|
|
|
|
assert dev2_cluster_color.request.await_count == 1
|
|
|
|
assert dev2_cluster_level.request.call_count == 2
|
|
|
|
assert dev2_cluster_level.request.await_count == 2
|
|
|
|
|
|
|
|
# first it comes on with no transition at 2 brightness
|
|
|
|
assert dev2_cluster_level.request.call_args_list[0] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=2,
|
|
|
|
transition_time=1,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_color.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=235,
|
|
|
|
transition_time=1, # sengled transition == 1 when new_color_provided_while_off
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_level.request.call_args_list[1] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=25,
|
|
|
|
transition_time=10,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_ON
|
|
|
|
assert light2_state.attributes["brightness"] == 25
|
|
|
|
assert light2_state.attributes["color_temp"] == 235
|
|
|
|
assert light2_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
|
|
|
|
|
|
|
dev2_cluster_level.request.reset_mock()
|
|
|
|
dev2_cluster_color.request.reset_mock()
|
|
|
|
|
|
|
|
# turn the sengled light back off
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{
|
|
|
|
"entity_id": device_2_entity_id,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev2_cluster_color.request.call_count == 0
|
|
|
|
assert dev2_cluster_color.request.await_count == 0
|
|
|
|
assert dev2_cluster_level.request.call_count == 0
|
|
|
|
assert dev2_cluster_level.request.await_count == 0
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev2_cluster_on_off.request.reset_mock()
|
|
|
|
|
2022-07-21 23:46:16 +00:00
|
|
|
# test non 0 length transition and color temp while turning group light on (new_color_provided_while_off)
|
2022-07-18 14:20:49 +00:00
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": group_entity_id,
|
|
|
|
"transition": 1,
|
|
|
|
"brightness": 25,
|
|
|
|
"color_temp": 235,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
group_on_off_channel = zha_group.endpoint[general.OnOff.cluster_id]
|
|
|
|
group_level_channel = zha_group.endpoint[general.LevelControl.cluster_id]
|
|
|
|
group_color_channel = zha_group.endpoint[lighting.Color.cluster_id]
|
|
|
|
assert group_on_off_channel.request.call_count == 0
|
|
|
|
assert group_on_off_channel.request.await_count == 0
|
|
|
|
assert group_color_channel.request.call_count == 1
|
|
|
|
assert group_color_channel.request.await_count == 1
|
|
|
|
assert group_level_channel.request.call_count == 1
|
|
|
|
assert group_level_channel.request.await_count == 1
|
|
|
|
|
2022-07-21 23:46:16 +00:00
|
|
|
# groups are omitted from the 3 call dance for new_color_provided_while_off
|
2022-07-18 14:20:49 +00:00
|
|
|
assert group_color_channel.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=235,
|
|
|
|
transition_time=10, # sengled transition == 1 when new_color_provided_while_off
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert group_level_channel.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=25,
|
|
|
|
transition_time=10,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_ON
|
|
|
|
assert group_state.attributes["brightness"] == 25
|
|
|
|
assert group_state.attributes["color_temp"] == 235
|
|
|
|
assert group_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
|
|
|
|
|
|
|
group_on_off_channel.request.reset_mock()
|
|
|
|
group_color_channel.request.reset_mock()
|
|
|
|
group_level_channel.request.reset_mock()
|
|
|
|
|
|
|
|
# turn the sengled light back on
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": device_2_entity_id,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 1
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 1
|
|
|
|
assert dev2_cluster_color.request.call_count == 0
|
|
|
|
assert dev2_cluster_color.request.await_count == 0
|
|
|
|
assert dev2_cluster_level.request.call_count == 0
|
|
|
|
assert dev2_cluster_level.request.await_count == 0
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_ON
|
|
|
|
|
|
|
|
dev2_cluster_on_off.request.reset_mock()
|
|
|
|
|
|
|
|
# turn the light off with a transition
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_off",
|
|
|
|
{"entity_id": device_2_entity_id, "transition": 2},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev2_cluster_color.request.call_count == 0
|
|
|
|
assert dev2_cluster_color.request.await_count == 0
|
|
|
|
assert dev2_cluster_level.request.call_count == 1
|
|
|
|
assert dev2_cluster_level.request.await_count == 1
|
|
|
|
assert dev2_cluster_level.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=0,
|
|
|
|
transition_time=20, # transition time
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_OFF
|
|
|
|
|
|
|
|
dev2_cluster_level.request.reset_mock()
|
|
|
|
|
|
|
|
# turn the light back on with no args should use a transition and last known brightness
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{"entity_id": device_2_entity_id},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert dev2_cluster_on_off.request.call_count == 0
|
|
|
|
assert dev2_cluster_on_off.request.await_count == 0
|
|
|
|
assert dev2_cluster_color.request.call_count == 0
|
|
|
|
assert dev2_cluster_color.request.await_count == 0
|
|
|
|
assert dev2_cluster_level.request.call_count == 1
|
|
|
|
assert dev2_cluster_level.request.await_count == 1
|
|
|
|
assert dev2_cluster_level.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev2_cluster_level.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=25,
|
|
|
|
transition_time=1, # transition time - sengled light uses default minimum
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
light2_state = hass.states.get(device_2_entity_id)
|
|
|
|
assert light2_state.state == STATE_ON
|
|
|
|
|
|
|
|
dev2_cluster_level.request.reset_mock()
|
|
|
|
|
2022-07-21 23:46:16 +00:00
|
|
|
# test eWeLink color temp while turning light on from off (new_color_provided_while_off)
|
2022-07-18 14:20:49 +00:00
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN,
|
|
|
|
"turn_on",
|
|
|
|
{
|
|
|
|
"entity_id": eWeLink_light_entity_id,
|
|
|
|
"color_temp": 235,
|
|
|
|
},
|
|
|
|
blocking=True,
|
|
|
|
)
|
|
|
|
assert eWeLink_cluster_on_off.request.call_count == 1
|
|
|
|
assert eWeLink_cluster_on_off.request.await_count == 1
|
|
|
|
assert eWeLink_cluster_color.request.call_count == 1
|
|
|
|
assert eWeLink_cluster_color.request.await_count == 1
|
|
|
|
assert eWeLink_cluster_level.request.call_count == 0
|
|
|
|
assert eWeLink_cluster_level.request.await_count == 0
|
|
|
|
|
|
|
|
# first it comes on
|
|
|
|
assert eWeLink_cluster_on_off.request.call_args_list[0] == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
eWeLink_cluster_on_off.commands_by_name["on"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
eWeLink_cluster_on_off.commands_by_name["on"].schema,
|
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
assert dev1_cluster_color.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].id,
|
2022-07-18 14:20:49 +00:00
|
|
|
dev1_cluster_color.commands_by_name["move_to_color_temp"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
color_temp_mireds=235,
|
|
|
|
transition_time=0,
|
2022-07-18 14:20:49 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
eWeLink_state = hass.states.get(eWeLink_light_entity_id)
|
|
|
|
assert eWeLink_state.state == STATE_ON
|
|
|
|
assert eWeLink_state.attributes["color_temp"] == 235
|
|
|
|
assert eWeLink_state.attributes["color_mode"] == ColorMode.COLOR_TEMP
|
2022-11-05 12:40:28 +00:00
|
|
|
assert eWeLink_state.attributes["min_mireds"] == 153
|
|
|
|
assert eWeLink_state.attributes["max_mireds"] == 500
|
2022-07-18 14:20:49 +00:00
|
|
|
|
|
|
|
|
2019-02-04 11:51:47 +00:00
|
|
|
async def async_test_on_off_from_light(hass, cluster, entity_id):
|
|
|
|
"""Test on off functionality from the light."""
|
|
|
|
# turn on at light
|
2020-03-13 23:17:50 +00:00
|
|
|
await send_attributes_report(hass, cluster, {1: 0, 0: 1, 2: 3})
|
2021-08-24 17:09:36 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2019-02-04 11:51:47 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
# turn off at light
|
2020-03-13 23:17:50 +00:00
|
|
|
await send_attributes_report(hass, cluster, {1: 1, 0: 0, 2: 3})
|
2021-08-24 17:09:36 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2019-02-04 11:51:47 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
|
|
|
|
async def async_test_on_from_light(hass, cluster, entity_id):
|
|
|
|
"""Test on off functionality from the light."""
|
|
|
|
# turn on at light
|
2020-03-13 23:17:50 +00:00
|
|
|
await send_attributes_report(hass, cluster, {1: -1, 0: 1, 2: 2})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2019-02-04 11:51:47 +00:00
|
|
|
assert hass.states.get(entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
|
|
|
|
async def async_test_on_off_from_hass(hass, cluster, entity_id):
|
|
|
|
"""Test on off functionality from hass."""
|
2020-02-10 02:45:35 +00:00
|
|
|
# turn on via UI
|
|
|
|
cluster.request.reset_mock()
|
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
LIGHT_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
|
|
|
assert cluster.request.call_count == 1
|
|
|
|
assert cluster.request.await_count == 1
|
|
|
|
assert cluster.request.call_args == call(
|
2022-03-31 15:26:27 +00:00
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
cluster.commands_by_name["on"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
cluster.commands_by_name["on"].schema,
|
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
await async_test_off_from_hass(hass, cluster, entity_id)
|
|
|
|
|
|
|
|
|
|
|
|
async def async_test_off_from_hass(hass, cluster, entity_id):
|
2020-01-05 12:09:17 +00:00
|
|
|
"""Test turning off the light from Home Assistant."""
|
2020-02-10 02:45:35 +00:00
|
|
|
|
|
|
|
# turn off via UI
|
|
|
|
cluster.request.reset_mock()
|
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
LIGHT_DOMAIN, "turn_off", {"entity_id": entity_id}, blocking=True
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
|
|
|
assert cluster.request.call_count == 1
|
|
|
|
assert cluster.request.await_count == 1
|
|
|
|
assert cluster.request.call_args == call(
|
2022-03-31 15:26:27 +00:00
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
cluster.commands_by_name["off"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
cluster.commands_by_name["off"].schema,
|
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
2020-02-10 02:45:35 +00:00
|
|
|
)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
async def async_test_level_on_off_from_hass(
|
2022-07-25 12:13:01 +00:00
|
|
|
hass, on_off_cluster, level_cluster, entity_id, expected_default_transition: int = 0
|
2019-07-31 19:25:30 +00:00
|
|
|
):
|
2019-02-04 11:51:47 +00:00
|
|
|
"""Test on off functionality from hass."""
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2020-02-10 02:45:35 +00:00
|
|
|
on_off_cluster.request.reset_mock()
|
2020-03-09 16:39:41 +00:00
|
|
|
level_cluster.request.reset_mock()
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
|
|
|
|
2019-02-27 13:34:38 +00:00
|
|
|
# turn on via UI
|
2019-07-31 19:25:30 +00:00
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
LIGHT_DOMAIN, "turn_on", {"entity_id": entity_id}, blocking=True
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-27 13:34:38 +00:00
|
|
|
assert on_off_cluster.request.call_count == 1
|
2020-02-10 02:45:35 +00:00
|
|
|
assert on_off_cluster.request.await_count == 1
|
2019-02-27 13:34:38 +00:00
|
|
|
assert level_cluster.request.call_count == 0
|
2020-02-10 02:45:35 +00:00
|
|
|
assert level_cluster.request.await_count == 0
|
2019-02-27 13:34:38 +00:00
|
|
|
assert on_off_cluster.request.call_args == call(
|
2022-03-31 15:26:27 +00:00
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
on_off_cluster.commands_by_name["on"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
on_off_cluster.commands_by_name["on"].schema,
|
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-27 13:34:38 +00:00
|
|
|
on_off_cluster.request.reset_mock()
|
|
|
|
level_cluster.request.reset_mock()
|
|
|
|
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
LIGHT_DOMAIN,
|
2021-12-11 16:06:39 +00:00
|
|
|
"turn_on",
|
|
|
|
{"entity_id": entity_id, "transition": 10},
|
|
|
|
blocking=True,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-27 13:34:38 +00:00
|
|
|
assert on_off_cluster.request.call_count == 1
|
2020-02-10 02:45:35 +00:00
|
|
|
assert on_off_cluster.request.await_count == 1
|
2019-02-27 13:34:38 +00:00
|
|
|
assert level_cluster.request.call_count == 1
|
2020-02-10 02:45:35 +00:00
|
|
|
assert level_cluster.request.await_count == 1
|
2019-02-27 13:34:38 +00:00
|
|
|
assert on_off_cluster.request.call_args == call(
|
2022-03-31 15:26:27 +00:00
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
on_off_cluster.commands_by_name["on"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
on_off_cluster.commands_by_name["on"].schema,
|
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
|
|
|
tries=1,
|
|
|
|
tsn=None,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-27 13:34:38 +00:00
|
|
|
assert level_cluster.request.call_args == call(
|
2019-07-31 19:25:30 +00:00
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
level_cluster.commands_by_name["move_to_level_with_on_off"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
level_cluster.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=254,
|
|
|
|
transition_time=100,
|
2019-07-31 19:25:30 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
2020-11-01 16:05:55 +00:00
|
|
|
tries=1,
|
2020-03-09 16:39:41 +00:00
|
|
|
tsn=None,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-27 13:34:38 +00:00
|
|
|
on_off_cluster.request.reset_mock()
|
|
|
|
level_cluster.request.reset_mock()
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
LIGHT_DOMAIN,
|
2021-12-11 16:06:39 +00:00
|
|
|
"turn_on",
|
|
|
|
{"entity_id": entity_id, "brightness": 10},
|
|
|
|
blocking=True,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2021-03-16 14:02:26 +00:00
|
|
|
# the onoff cluster is now not used when brightness is present by default
|
|
|
|
assert on_off_cluster.request.call_count == 0
|
|
|
|
assert on_off_cluster.request.await_count == 0
|
2019-02-27 13:34:38 +00:00
|
|
|
assert level_cluster.request.call_count == 1
|
2020-02-10 02:45:35 +00:00
|
|
|
assert level_cluster.request.await_count == 1
|
2019-02-27 13:34:38 +00:00
|
|
|
assert level_cluster.request.call_args == call(
|
2019-07-31 19:25:30 +00:00
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
level_cluster.commands_by_name["move_to_level_with_on_off"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
level_cluster.commands_by_name["move_to_level_with_on_off"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
level=10,
|
|
|
|
transition_time=int(expected_default_transition),
|
2019-07-31 19:25:30 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
2020-11-01 16:05:55 +00:00
|
|
|
tries=1,
|
2020-03-09 16:39:41 +00:00
|
|
|
tsn=None,
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-02-27 13:34:38 +00:00
|
|
|
on_off_cluster.request.reset_mock()
|
|
|
|
level_cluster.request.reset_mock()
|
|
|
|
|
|
|
|
await async_test_off_from_hass(hass, on_off_cluster, entity_id)
|
2019-02-04 11:51:47 +00:00
|
|
|
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
async def async_test_dimmer_from_light(hass, cluster, entity_id, level, expected_state):
|
2019-02-04 11:51:47 +00:00
|
|
|
"""Test dimmer functionality from the light."""
|
2020-03-13 23:17:50 +00:00
|
|
|
|
|
|
|
await send_attributes_report(
|
|
|
|
hass, cluster, {1: level + 10, 0: level, 2: level - 10 or 22}
|
|
|
|
)
|
2021-07-20 06:33:45 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2019-02-04 11:51:47 +00:00
|
|
|
assert hass.states.get(entity_id).state == expected_state
|
|
|
|
# hass uses None for brightness of 0 in state attributes
|
|
|
|
if level == 0:
|
|
|
|
level = None
|
2019-07-31 19:25:30 +00:00
|
|
|
assert hass.states.get(entity_id).attributes.get("brightness") == level
|
2020-02-29 23:37:06 +00:00
|
|
|
|
|
|
|
|
|
|
|
async def async_test_flash_from_hass(hass, cluster, entity_id, flash):
|
|
|
|
"""Test flash functionality from hass."""
|
|
|
|
# turn on via UI
|
|
|
|
cluster.request.reset_mock()
|
|
|
|
await hass.services.async_call(
|
2022-02-03 13:16:35 +00:00
|
|
|
LIGHT_DOMAIN,
|
2021-12-11 16:06:39 +00:00
|
|
|
"turn_on",
|
|
|
|
{"entity_id": entity_id, "flash": flash},
|
|
|
|
blocking=True,
|
2020-02-29 23:37:06 +00:00
|
|
|
)
|
|
|
|
assert cluster.request.call_count == 1
|
|
|
|
assert cluster.request.await_count == 1
|
|
|
|
assert cluster.request.call_args == call(
|
|
|
|
False,
|
2022-09-01 19:32:32 +00:00
|
|
|
cluster.commands_by_name["trigger_effect"].id,
|
2022-03-31 15:26:27 +00:00
|
|
|
cluster.commands_by_name["trigger_effect"].schema,
|
2022-09-01 19:32:32 +00:00
|
|
|
effect_id=FLASH_EFFECTS[flash],
|
|
|
|
effect_variant=general.Identify.EffectVariant.Default,
|
2020-02-29 23:37:06 +00:00
|
|
|
expect_reply=True,
|
|
|
|
manufacturer=None,
|
2020-11-01 16:05:55 +00:00
|
|
|
tries=1,
|
2020-03-09 16:39:41 +00:00
|
|
|
tsn=None,
|
2020-02-29 23:37:06 +00:00
|
|
|
)
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.lighting.Color.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.Identify.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.LevelControl.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.OnOff.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
2021-03-16 21:38:16 +00:00
|
|
|
@patch(
|
Implement "group members assume state" option for ZHA (#84938)
* Initial "group members assume state" implementation for ZHA
* Remove left-over debug flag (where polling was disabled)
* Implement _send_member_assume_state_event() method and also use after turn_off
* Only assume updated arguments from service call to group
* Make code more readable and change checks slightly
* Move "send member assume state" events to LightGroup on/off calls
* Include new config option in tests
* Check that member is available before updating to assumed state
* Lower "update group from child delay" for debouncer to basically 0 when using assumed member state
* Allow "child to group" updates regardless of config option
This is not needed, as group members will not update their state, as long as they're transitioning. (If a group transitions, it also sets its members to transitioning mode)
This fixes multiple issues. Previously, the state of a group was completely wrong when:
- turn on group with 10 second transition
- turn on members individually
- turn off members individually
- group state would not update correctly
* Move "default update group from child delay" constant
* Change to new constant name in test
* Also update fan test to new constant name
* Decrease "update group from child delay" to 10ms
In my testing, 0.0 also works without any issues and correctly de-bounces child updates when using the "assume state option".
This is just for avoiding multiple state changes when changing the group -> children issue individual updates.
With 2 children in a group and delay 0, both child updates only cause one group re-calculation and state change.
0.01 (10ms) should be plenty for very slow systems to de-bounce the update (and in the worst case, it'll cause just another state change but nothing breaks)
* Also implement "assuming state" for effect
Not sure if anybody even uses this, but this one is a bit special because the effect is always deactivated if it's not provided in the light.turn_on call.
* Move shortened delay for "assuming members" to a constant
* Add basic test to verify that group members assume on/off state
* Move _assume_group_state function declaration out of async_added_to_hass
* Fix rare edge-case when rapidly toggling lights and light groups at the same time
This prevents an issue where either the group transition would unset the transition flag or the single light would unset the group transition status midst-transition.
Note: When a new individual transition is started, we want to unset the group flag, as we actually cancel that transition.
* Check that effect list exists, add return type
* Re-trigger CI due to timeout
* Increase ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY slightly
The debouncer is used when updating group member states either by assuming them (in which case we want to barely have any delay), or between the time we get the results back from polling (where we want a slightly longer time).
As it's not easily possible to distinguish if a group member was updated via assuming the state of the group or by the polling that follows, 50 ms seems to be a good middle point.
* Add debug print for when updating group state
* Fix issues with "off brightness" when switching between group/members
This fixes a bunch of issues with "off brightness" and passes it down to the members correctly.
For example, if a light group is turned off with a transition (so bulbs get their level set to 1), this will also set the "off brightness" of all individual bulbs to the last level that they were at.
(It really fixes a lot of issues when using the "member assume group state" option. It's not really possible to fix them without that.)
Furthermore, issues where polling was previously needed to get the correct state after "playing with transitions", should now get be resolved and get correct state when using the "members assume group state" option.
Note: The only case which still can't be fixed is the following:
If individual lights have off_with_transition set, but not the group, and the group is then turned on without a level, individual lights might fall back to brightness level 1 (<- at least now shows correctly in UI even before polling).
Since all lights might need different brightness levels to be turned on, we can't use one group call. But making individual calls when turning on a ZHA group would cause a lot of traffic and thereby be counter-productive.
In this case, light.turn_on should just be called with a level (or individual calls to the lights should be made).
Another thing that was changed is to reset off_with_transition/off_brightness for a LightGroup when a member is turned on (even if the LightGroup wasn't turned on using its turn_on method).
off_with_transition/off_brightness for individual bulbs is now also turned off when a light is detected to be on during polling.
Lastly, the waiting for polled attributes could previously cause "invalid state" to be set (so mid-transition levels).
This could happen when group and members are repeatedly toggled at similar times. These "invalid states" could cause wrong "off brightness" levels if transitions are also used.
To fix this, we check after waiting for the polled attributes in async_get_state to see if a transition has started in the meanwhile. If so, the values can be discarded. A new poll will happen later and if using the "members assume group state" config option, the values should already be correct before the polling.
* Enable "group members assume state" config option by default
The config tests are also updated to expect the config option be enabled by default.
For all tests, the config option is generally disabled though:
There are only two group related tests. The one that tests this new feature overrides the config option to be enabled anyway.
The other tests works in a similar way but also "sends" attribute reports, so we want to disable the feature for that test.
(It would also run with it enabled (if the correct CHILD_UPDATE value is patched), but then it would test the same stuff as the other test, hence we're disabling the config option for that test.)
2023-01-16 15:48:18 +00:00
|
|
|
"homeassistant.components.zha.entity.DEFAULT_UPDATE_GROUP_FROM_CHILD_DELAY",
|
2021-03-16 21:38:16 +00:00
|
|
|
new=0,
|
|
|
|
)
|
2020-05-04 19:19:53 +00:00
|
|
|
async def test_zha_group_light_entity(
|
2020-03-29 00:38:48 +00:00
|
|
|
hass, device_light_1, device_light_2, device_light_3, coordinator
|
2020-03-25 11:23:54 +00:00
|
|
|
):
|
|
|
|
"""Test the light entity for a ZHA group."""
|
|
|
|
zha_gateway = get_zha_gateway(hass)
|
|
|
|
assert zha_gateway is not None
|
|
|
|
zha_gateway.coordinator_zha_device = coordinator
|
|
|
|
coordinator._zha_gateway = zha_gateway
|
|
|
|
device_light_1._zha_gateway = zha_gateway
|
|
|
|
device_light_2._zha_gateway = zha_gateway
|
|
|
|
member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee]
|
2020-05-04 19:19:53 +00:00
|
|
|
members = [GroupMember(device_light_1.ieee, 1), GroupMember(device_light_2.ieee, 1)]
|
|
|
|
|
|
|
|
assert coordinator.is_coordinator
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test creating a group with 2 members
|
2020-05-04 19:19:53 +00:00
|
|
|
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
|
2020-03-25 11:23:54 +00:00
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
assert zha_group is not None
|
|
|
|
assert len(zha_group.members) == 2
|
|
|
|
for member in zha_group.members:
|
2020-05-04 19:19:53 +00:00
|
|
|
assert member.device.ieee in member_ieee_addresses
|
|
|
|
assert member.group == zha_group
|
|
|
|
assert member.endpoint is not None
|
2020-03-25 11:23:54 +00:00
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
|
|
|
device_2_entity_id = await find_entity_id(Platform.LIGHT, device_light_2, hass)
|
|
|
|
device_3_entity_id = await find_entity_id(Platform.LIGHT, device_light_3, hass)
|
2020-05-04 19:19:53 +00:00
|
|
|
|
|
|
|
assert (
|
|
|
|
device_1_entity_id != device_2_entity_id
|
|
|
|
and device_1_entity_id != device_3_entity_id
|
|
|
|
)
|
|
|
|
assert device_2_entity_id != device_3_entity_id
|
|
|
|
|
2021-12-11 16:06:39 +00:00
|
|
|
group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(group_entity_id) is not None
|
|
|
|
|
|
|
|
assert device_1_entity_id in zha_group.member_entity_ids
|
|
|
|
assert device_2_entity_id in zha_group.member_entity_ids
|
|
|
|
assert device_3_entity_id not in zha_group.member_entity_ids
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id]
|
|
|
|
group_cluster_level = zha_group.endpoint[general.LevelControl.cluster_id]
|
|
|
|
group_cluster_identify = zha_group.endpoint[general.Identify.cluster_id]
|
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
dev1_cluster_on_off = device_light_1.device.endpoints[1].on_off
|
|
|
|
dev2_cluster_on_off = device_light_2.device.endpoints[1].on_off
|
|
|
|
dev3_cluster_on_off = device_light_3.device.endpoints[1].on_off
|
|
|
|
|
|
|
|
dev1_cluster_level = device_light_1.device.endpoints[1].level
|
2020-03-25 11:23:54 +00:00
|
|
|
|
2020-06-11 21:21:08 +00:00
|
|
|
await async_enable_traffic(
|
|
|
|
hass, [device_light_1, device_light_2, device_light_3], enabled=False
|
|
|
|
)
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-25 11:23:54 +00:00
|
|
|
# test that the lights were created and that they are unavailable
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
2020-05-04 19:19:53 +00:00
|
|
|
await async_enable_traffic(hass, [device_light_1, device_light_2, device_light_3])
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test that the lights were created and are off
|
2022-05-27 13:38:22 +00:00
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_OFF
|
2022-07-18 14:20:49 +00:00
|
|
|
assert group_state.attributes["supported_color_modes"] == [
|
|
|
|
ColorMode.COLOR_TEMP,
|
2022-07-21 21:54:50 +00:00
|
|
|
ColorMode.XY,
|
2022-07-18 14:20:49 +00:00
|
|
|
]
|
2022-05-27 13:38:22 +00:00
|
|
|
# Light which is off has no color mode
|
|
|
|
assert "color_mode" not in group_state.attributes
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test turning the lights on and off from the HA
|
2020-05-04 19:19:53 +00:00
|
|
|
await async_test_on_off_from_hass(hass, group_cluster_on_off, group_entity_id)
|
2020-03-25 11:23:54 +00:00
|
|
|
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
|
|
|
|
2020-03-25 11:23:54 +00:00
|
|
|
# test short flashing the lights from the HA
|
|
|
|
await async_test_flash_from_hass(
|
2020-05-04 19:19:53 +00:00
|
|
|
hass, group_cluster_identify, group_entity_id, FLASH_SHORT
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
|
|
|
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
# test turning the lights on and off from the light
|
|
|
|
await async_test_on_off_from_light(hass, dev1_cluster_on_off, group_entity_id)
|
|
|
|
|
2020-03-25 11:23:54 +00:00
|
|
|
# test turning the lights on and off from the HA
|
|
|
|
await async_test_level_on_off_from_hass(
|
2022-07-25 12:13:01 +00:00
|
|
|
hass,
|
|
|
|
group_cluster_on_off,
|
|
|
|
group_cluster_level,
|
|
|
|
group_entity_id,
|
|
|
|
expected_default_transition=1, # a Sengled light is in that group and needs a minimum 0.1s transition
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
|
|
|
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
|
|
|
|
2020-03-25 11:23:54 +00:00
|
|
|
# test getting a brightness change from the network
|
2020-05-04 19:19:53 +00:00
|
|
|
await async_test_on_from_light(hass, dev1_cluster_on_off, group_entity_id)
|
2020-03-25 11:23:54 +00:00
|
|
|
await async_test_dimmer_from_light(
|
2020-05-04 19:19:53 +00:00
|
|
|
hass, dev1_cluster_level, group_entity_id, 150, STATE_ON
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
2022-05-27 13:38:22 +00:00
|
|
|
# Check state
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_ON
|
2022-07-18 14:20:49 +00:00
|
|
|
assert group_state.attributes["supported_color_modes"] == [
|
|
|
|
ColorMode.COLOR_TEMP,
|
2022-07-21 21:54:50 +00:00
|
|
|
ColorMode.XY,
|
2022-07-18 14:20:49 +00:00
|
|
|
]
|
2022-07-21 21:54:50 +00:00
|
|
|
assert group_state.attributes["color_mode"] == ColorMode.XY
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test long flashing the lights from the HA
|
|
|
|
await async_test_flash_from_hass(
|
2020-05-04 19:19:53 +00:00
|
|
|
hass, group_cluster_identify, group_entity_id, FLASH_LONG
|
2020-03-25 11:23:54 +00:00
|
|
|
)
|
|
|
|
|
2022-07-27 00:03:17 +00:00
|
|
|
await async_shift_time(hass)
|
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
assert len(zha_group.members) == 2
|
2020-03-25 11:23:54 +00:00
|
|
|
# test some of the group logic to make sure we key off states correctly
|
2020-05-04 19:19:53 +00:00
|
|
|
await send_attributes_report(hass, dev1_cluster_on_off, {0: 1})
|
|
|
|
await send_attributes_report(hass, dev2_cluster_on_off, {0: 1})
|
|
|
|
await hass.async_block_till_done()
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test that group light is on
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
2020-03-25 11:23:54 +00:00
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
await send_attributes_report(hass, dev1_cluster_on_off, {0: 0})
|
|
|
|
await hass.async_block_till_done()
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test that group light is still on
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
2020-03-25 11:23:54 +00:00
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
await send_attributes_report(hass, dev2_cluster_on_off, {0: 0})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test that group light is now off
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_OFF
|
2020-03-25 11:23:54 +00:00
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
await send_attributes_report(hass, dev1_cluster_on_off, {0: 1})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-03-25 11:23:54 +00:00
|
|
|
|
|
|
|
# test that group light is now back on
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
2020-03-29 00:38:48 +00:00
|
|
|
|
2020-05-04 19:19:53 +00:00
|
|
|
# turn it off to test a new member add being tracked
|
|
|
|
await send_attributes_report(hass, dev1_cluster_on_off, {0: 0})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_OFF
|
2020-03-29 00:38:48 +00:00
|
|
|
|
|
|
|
# add a new member and test that his state is also tracked
|
2020-05-04 19:19:53 +00:00
|
|
|
await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)])
|
|
|
|
await send_attributes_report(hass, dev3_cluster_on_off, {0: 1})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert device_3_entity_id in zha_group.member_entity_ids
|
|
|
|
assert len(zha_group.members) == 3
|
|
|
|
|
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(device_3_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
2020-04-10 20:17:48 +00:00
|
|
|
|
|
|
|
# make the group have only 1 member and now there should be no entity
|
2020-05-04 19:19:53 +00:00
|
|
|
await zha_group.async_remove_members(
|
|
|
|
[GroupMember(device_light_2.ieee, 1), GroupMember(device_light_3.ieee, 1)]
|
|
|
|
)
|
2020-04-10 20:17:48 +00:00
|
|
|
assert len(zha_group.members) == 1
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(group_entity_id) is None
|
|
|
|
assert device_2_entity_id not in zha_group.member_entity_ids
|
|
|
|
assert device_3_entity_id not in zha_group.member_entity_ids
|
|
|
|
|
2020-04-10 20:17:48 +00:00
|
|
|
# make sure the entity registry entry is still there
|
2020-05-04 19:19:53 +00:00
|
|
|
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is not None
|
2020-04-10 20:17:48 +00:00
|
|
|
|
|
|
|
# add a member back and ensure that the group entity was created again
|
2020-05-04 19:19:53 +00:00
|
|
|
await zha_group.async_add_members([GroupMember(device_light_3.ieee, 1)])
|
|
|
|
await send_attributes_report(hass, dev3_cluster_on_off, {0: 1})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert len(zha_group.members) == 2
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
2020-04-10 20:17:48 +00:00
|
|
|
|
2020-04-11 03:19:17 +00:00
|
|
|
# add a 3rd member and ensure we still have an entity and we track the new one
|
2020-05-04 19:19:53 +00:00
|
|
|
await send_attributes_report(hass, dev1_cluster_on_off, {0: 0})
|
|
|
|
await send_attributes_report(hass, dev3_cluster_on_off, {0: 0})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(group_entity_id).state == STATE_OFF
|
|
|
|
|
2020-04-11 03:19:17 +00:00
|
|
|
# this will test that _reprobe_group is used correctly
|
2020-05-04 19:19:53 +00:00
|
|
|
await zha_group.async_add_members(
|
|
|
|
[GroupMember(device_light_2.ieee, 1), GroupMember(coordinator.ieee, 1)]
|
|
|
|
)
|
|
|
|
await send_attributes_report(hass, dev2_cluster_on_off, {0: 1})
|
2021-03-16 21:38:16 +00:00
|
|
|
await async_wait_for_updates(hass)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert len(zha_group.members) == 4
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
await zha_group.async_remove_members([GroupMember(coordinator.ieee, 1)])
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
|
|
|
assert len(zha_group.members) == 3
|
2020-04-11 03:19:17 +00:00
|
|
|
|
2020-04-10 20:17:48 +00:00
|
|
|
# remove the group and ensure that there is no entity and that the entity registry is cleaned up
|
2020-05-04 19:19:53 +00:00
|
|
|
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is not None
|
2020-04-10 20:17:48 +00:00
|
|
|
await zha_gateway.async_remove_zigpy_group(zha_group.group_id)
|
2020-05-04 19:19:53 +00:00
|
|
|
assert hass.states.get(group_entity_id) is None
|
|
|
|
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is None
|
Implement "group members assume state" option for ZHA (#84938)
* Initial "group members assume state" implementation for ZHA
* Remove left-over debug flag (where polling was disabled)
* Implement _send_member_assume_state_event() method and also use after turn_off
* Only assume updated arguments from service call to group
* Make code more readable and change checks slightly
* Move "send member assume state" events to LightGroup on/off calls
* Include new config option in tests
* Check that member is available before updating to assumed state
* Lower "update group from child delay" for debouncer to basically 0 when using assumed member state
* Allow "child to group" updates regardless of config option
This is not needed, as group members will not update their state, as long as they're transitioning. (If a group transitions, it also sets its members to transitioning mode)
This fixes multiple issues. Previously, the state of a group was completely wrong when:
- turn on group with 10 second transition
- turn on members individually
- turn off members individually
- group state would not update correctly
* Move "default update group from child delay" constant
* Change to new constant name in test
* Also update fan test to new constant name
* Decrease "update group from child delay" to 10ms
In my testing, 0.0 also works without any issues and correctly de-bounces child updates when using the "assume state option".
This is just for avoiding multiple state changes when changing the group -> children issue individual updates.
With 2 children in a group and delay 0, both child updates only cause one group re-calculation and state change.
0.01 (10ms) should be plenty for very slow systems to de-bounce the update (and in the worst case, it'll cause just another state change but nothing breaks)
* Also implement "assuming state" for effect
Not sure if anybody even uses this, but this one is a bit special because the effect is always deactivated if it's not provided in the light.turn_on call.
* Move shortened delay for "assuming members" to a constant
* Add basic test to verify that group members assume on/off state
* Move _assume_group_state function declaration out of async_added_to_hass
* Fix rare edge-case when rapidly toggling lights and light groups at the same time
This prevents an issue where either the group transition would unset the transition flag or the single light would unset the group transition status midst-transition.
Note: When a new individual transition is started, we want to unset the group flag, as we actually cancel that transition.
* Check that effect list exists, add return type
* Re-trigger CI due to timeout
* Increase ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY slightly
The debouncer is used when updating group member states either by assuming them (in which case we want to barely have any delay), or between the time we get the results back from polling (where we want a slightly longer time).
As it's not easily possible to distinguish if a group member was updated via assuming the state of the group or by the polling that follows, 50 ms seems to be a good middle point.
* Add debug print for when updating group state
* Fix issues with "off brightness" when switching between group/members
This fixes a bunch of issues with "off brightness" and passes it down to the members correctly.
For example, if a light group is turned off with a transition (so bulbs get their level set to 1), this will also set the "off brightness" of all individual bulbs to the last level that they were at.
(It really fixes a lot of issues when using the "member assume group state" option. It's not really possible to fix them without that.)
Furthermore, issues where polling was previously needed to get the correct state after "playing with transitions", should now get be resolved and get correct state when using the "members assume group state" option.
Note: The only case which still can't be fixed is the following:
If individual lights have off_with_transition set, but not the group, and the group is then turned on without a level, individual lights might fall back to brightness level 1 (<- at least now shows correctly in UI even before polling).
Since all lights might need different brightness levels to be turned on, we can't use one group call. But making individual calls when turning on a ZHA group would cause a lot of traffic and thereby be counter-productive.
In this case, light.turn_on should just be called with a level (or individual calls to the lights should be made).
Another thing that was changed is to reset off_with_transition/off_brightness for a LightGroup when a member is turned on (even if the LightGroup wasn't turned on using its turn_on method).
off_with_transition/off_brightness for individual bulbs is now also turned off when a light is detected to be on during polling.
Lastly, the waiting for polled attributes could previously cause "invalid state" to be set (so mid-transition levels).
This could happen when group and members are repeatedly toggled at similar times. These "invalid states" could cause wrong "off brightness" levels if transitions are also used.
To fix this, we check after waiting for the polled attributes in async_get_state to see if a transition has started in the meanwhile. If so, the values can be discarded. A new poll will happen later and if using the "members assume group state" config option, the values should already be correct before the polling.
* Enable "group members assume state" config option by default
The config tests are also updated to expect the config option be enabled by default.
For all tests, the config option is generally disabled though:
There are only two group related tests. The one that tests this new feature overrides the config option to be enabled anyway.
The other tests works in a similar way but also "sends" attribute reports, so we want to disable the feature for that test.
(It would also run with it enabled (if the correct CHILD_UPDATE value is patched), but then it would test the same stuff as the other test, hence we're disabling the config option for that test.)
2023-01-16 15:48:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
@patch(
|
|
|
|
"zigpy.zcl.clusters.general.OnOff.request",
|
|
|
|
new=AsyncMock(return_value=[sentinel.data, zcl_f.Status.SUCCESS]),
|
|
|
|
)
|
|
|
|
@patch(
|
|
|
|
"homeassistant.components.zha.light.ASSUME_UPDATE_GROUP_FROM_CHILD_DELAY",
|
|
|
|
new=0,
|
|
|
|
)
|
|
|
|
async def test_group_member_assume_state(
|
|
|
|
hass,
|
|
|
|
zigpy_device_mock,
|
|
|
|
zha_device_joined,
|
|
|
|
coordinator,
|
|
|
|
device_light_1,
|
|
|
|
device_light_2,
|
|
|
|
):
|
|
|
|
"""Test the group members assume state function."""
|
|
|
|
with patch_zha_config(
|
|
|
|
"light", {(ZHA_OPTIONS, CONF_GROUP_MEMBERS_ASSUME_STATE): True}
|
|
|
|
):
|
|
|
|
zha_gateway = get_zha_gateway(hass)
|
|
|
|
assert zha_gateway is not None
|
|
|
|
zha_gateway.coordinator_zha_device = coordinator
|
|
|
|
coordinator._zha_gateway = zha_gateway
|
|
|
|
device_light_1._zha_gateway = zha_gateway
|
|
|
|
device_light_2._zha_gateway = zha_gateway
|
|
|
|
member_ieee_addresses = [device_light_1.ieee, device_light_2.ieee]
|
|
|
|
members = [
|
|
|
|
GroupMember(device_light_1.ieee, 1),
|
|
|
|
GroupMember(device_light_2.ieee, 1),
|
|
|
|
]
|
|
|
|
|
|
|
|
assert coordinator.is_coordinator
|
|
|
|
|
|
|
|
# test creating a group with 2 members
|
|
|
|
zha_group = await zha_gateway.async_create_zigpy_group("Test Group", members)
|
|
|
|
await hass.async_block_till_done()
|
|
|
|
|
|
|
|
assert zha_group is not None
|
|
|
|
assert len(zha_group.members) == 2
|
|
|
|
for member in zha_group.members:
|
|
|
|
assert member.device.ieee in member_ieee_addresses
|
|
|
|
assert member.group == zha_group
|
|
|
|
assert member.endpoint is not None
|
|
|
|
|
|
|
|
device_1_entity_id = await find_entity_id(Platform.LIGHT, device_light_1, hass)
|
|
|
|
device_2_entity_id = await find_entity_id(Platform.LIGHT, device_light_2, hass)
|
|
|
|
|
|
|
|
assert device_1_entity_id != device_2_entity_id
|
|
|
|
|
|
|
|
group_entity_id = async_find_group_entity_id(hass, Platform.LIGHT, zha_group)
|
|
|
|
assert hass.states.get(group_entity_id) is not None
|
|
|
|
|
|
|
|
assert device_1_entity_id in zha_group.member_entity_ids
|
|
|
|
assert device_2_entity_id in zha_group.member_entity_ids
|
|
|
|
|
|
|
|
group_cluster_on_off = zha_group.endpoint[general.OnOff.cluster_id]
|
|
|
|
|
|
|
|
await async_enable_traffic(
|
|
|
|
hass, [device_light_1, device_light_2], enabled=False
|
|
|
|
)
|
|
|
|
await async_wait_for_updates(hass)
|
|
|
|
# test that the lights were created and that they are unavailable
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_UNAVAILABLE
|
|
|
|
|
|
|
|
# allow traffic to flow through the gateway and device
|
|
|
|
await async_enable_traffic(hass, [device_light_1, device_light_2])
|
|
|
|
await async_wait_for_updates(hass)
|
|
|
|
|
|
|
|
# test that the lights were created and are off
|
|
|
|
group_state = hass.states.get(group_entity_id)
|
|
|
|
assert group_state.state == STATE_OFF
|
|
|
|
|
|
|
|
group_cluster_on_off.request.reset_mock()
|
|
|
|
await async_shift_time(hass)
|
|
|
|
|
|
|
|
# turn on via UI
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN, "turn_on", {"entity_id": group_entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
|
|
|
|
# members also instantly assume STATE_ON
|
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_ON
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_ON
|
|
|
|
|
|
|
|
# turn off via UI
|
|
|
|
await hass.services.async_call(
|
|
|
|
LIGHT_DOMAIN, "turn_off", {"entity_id": group_entity_id}, blocking=True
|
|
|
|
)
|
|
|
|
|
|
|
|
# members also instantly assume STATE_OFF
|
|
|
|
assert hass.states.get(device_1_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(device_2_entity_id).state == STATE_OFF
|
|
|
|
assert hass.states.get(group_entity_id).state == STATE_OFF
|
|
|
|
|
|
|
|
# remove the group and ensure that there is no entity and that the entity registry is cleaned up
|
|
|
|
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is not None
|
|
|
|
await zha_gateway.async_remove_zigpy_group(zha_group.group_id)
|
|
|
|
assert hass.states.get(group_entity_id) is None
|
|
|
|
assert zha_gateway.ha_entity_registry.async_get(group_entity_id) is None
|