commit
dc7f0fb21c
|
@ -3,7 +3,7 @@
|
|||
"name": "Airthings BLE",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"requirements": ["airthings-ble==0.5.2"],
|
||||
"requirements": ["airthings-ble==0.5.3"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@vincegio"],
|
||||
"iot_class": "local_polling",
|
||||
|
|
|
@ -3,6 +3,7 @@ from __future__ import annotations
|
|||
|
||||
from asyncio import Future
|
||||
from collections.abc import Callable, Iterable
|
||||
import datetime
|
||||
import logging
|
||||
import platform
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
@ -21,6 +22,7 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_ca
|
|||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr, discovery_flow
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
IssueSeverity,
|
||||
async_create_issue,
|
||||
|
@ -33,6 +35,7 @@ from .const import (
|
|||
ADAPTER_ADDRESS,
|
||||
ADAPTER_HW_VERSION,
|
||||
ADAPTER_SW_VERSION,
|
||||
BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS,
|
||||
CONF_ADAPTER,
|
||||
CONF_DETAILS,
|
||||
CONF_PASSIVE,
|
||||
|
@ -40,6 +43,7 @@ from .const import (
|
|||
DEFAULT_ADDRESS,
|
||||
DOMAIN,
|
||||
FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS,
|
||||
LINUX_FIRMWARE_LOAD_FALLBACK_SECONDS,
|
||||
SOURCE_LOCAL,
|
||||
AdapterDetails,
|
||||
)
|
||||
|
@ -298,9 +302,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
await async_discover_adapters(hass, discovered_adapters)
|
||||
|
||||
discovery_debouncer = Debouncer(
|
||||
hass, _LOGGER, cooldown=5, immediate=False, function=_async_rediscover_adapters
|
||||
hass,
|
||||
_LOGGER,
|
||||
cooldown=BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS,
|
||||
immediate=False,
|
||||
function=_async_rediscover_adapters,
|
||||
)
|
||||
|
||||
async def _async_call_debouncer(now: datetime.datetime) -> None:
|
||||
"""Call the debouncer at a later time."""
|
||||
await discovery_debouncer.async_call()
|
||||
|
||||
def _async_trigger_discovery() -> None:
|
||||
# There are so many bluetooth adapter models that
|
||||
# we check the bus whenever a usb device is plugged in
|
||||
|
@ -310,6 +322,17 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
# present.
|
||||
_LOGGER.debug("Triggering bluetooth usb discovery")
|
||||
hass.async_create_task(discovery_debouncer.async_call())
|
||||
# Because it can take 120s for the firmware loader
|
||||
# fallback to timeout we need to wait that plus
|
||||
# the debounce time to ensure we do not miss the
|
||||
# adapter becoming available to DBus since otherwise
|
||||
# we will never see the new adapter until
|
||||
# Home Assistant is restarted
|
||||
async_call_later(
|
||||
hass,
|
||||
BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS + LINUX_FIRMWARE_LOAD_FALLBACK_SECONDS,
|
||||
_async_call_debouncer,
|
||||
)
|
||||
|
||||
cancel = usb.async_register_scan_request_callback(hass, _async_trigger_discovery)
|
||||
hass.bus.async_listen_once(
|
||||
|
|
|
@ -59,6 +59,15 @@ SCANNER_WATCHDOG_TIMEOUT: Final = 90
|
|||
SCANNER_WATCHDOG_INTERVAL: Final = timedelta(seconds=30)
|
||||
|
||||
|
||||
# When the linux kernel is configured with
|
||||
# CONFIG_FW_LOADER_USER_HELPER_FALLBACK it
|
||||
# can take up to 120s before the USB device
|
||||
# is available if the firmware files
|
||||
# are not present
|
||||
LINUX_FIRMWARE_LOAD_FALLBACK_SECONDS = 120
|
||||
BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS = 5
|
||||
|
||||
|
||||
class AdapterDetails(TypedDict, total=False):
|
||||
"""Adapter details."""
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"quality_scale": "internal",
|
||||
"requirements": [
|
||||
"bleak==0.19.2",
|
||||
"bleak-retry-connector==2.8.3",
|
||||
"bleak-retry-connector==2.8.4",
|
||||
"bluetooth-adapters==0.7.0",
|
||||
"bluetooth-auto-recovery==0.3.6",
|
||||
"dbus-fast==1.61.1"
|
||||
|
|
|
@ -13,6 +13,7 @@ from aioesphomeapi import (
|
|||
BLEConnectionError,
|
||||
)
|
||||
from aioesphomeapi.connection import APIConnectionError, TimeoutAPIError
|
||||
from aioesphomeapi.core import BluetoothGATTAPIError
|
||||
import async_timeout
|
||||
from bleak.backends.characteristic import BleakGATTCharacteristic
|
||||
from bleak.backends.client import BaseBleakClient, NotifyCallback
|
||||
|
@ -83,6 +84,24 @@ def api_error_as_bleak_error(func: _WrapFuncType) -> _WrapFuncType:
|
|||
return await func(self, *args, **kwargs)
|
||||
except TimeoutAPIError as err:
|
||||
raise asyncio.TimeoutError(str(err)) from err
|
||||
except BluetoothGATTAPIError as ex:
|
||||
# If the device disconnects in the middle of an operation
|
||||
# be sure to mark it as disconnected so any library using
|
||||
# the proxy knows to reconnect.
|
||||
#
|
||||
# Because callbacks are delivered asynchronously it's possible
|
||||
# that we find out about the disconnection during the operation
|
||||
# before the callback is delivered.
|
||||
if ex.error.error == -1:
|
||||
_LOGGER.debug(
|
||||
"%s: %s - %s: BLE device disconnected during %s operation",
|
||||
self._source, # pylint: disable=protected-access
|
||||
self._ble_device.name, # pylint: disable=protected-access
|
||||
self._ble_device.address, # pylint: disable=protected-access
|
||||
func.__name__,
|
||||
)
|
||||
self._async_ble_device_disconnected() # pylint: disable=protected-access
|
||||
raise BleakError(str(ex)) from ex
|
||||
except APIConnectionError as err:
|
||||
raise BleakError(str(err)) from err
|
||||
|
||||
|
|
|
@ -84,6 +84,7 @@ FIBARO_TYPEMAP = {
|
|||
"com.fibaro.thermostatDanfoss": Platform.CLIMATE,
|
||||
"com.fibaro.doorLock": Platform.LOCK,
|
||||
"com.fibaro.binarySensor": Platform.BINARY_SENSOR,
|
||||
"com.fibaro.accelerometer": Platform.BINARY_SENSOR,
|
||||
}
|
||||
|
||||
DEVICE_CONFIG_SCHEMA_ENTRY = vol.Schema(
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
"""Support for Fibaro binary sensors."""
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
|
@ -28,6 +30,11 @@ SENSOR_TYPES = {
|
|||
"com.fibaro.smokeSensor": ["Smoke", "mdi:smoking", BinarySensorDeviceClass.SMOKE],
|
||||
"com.fibaro.FGMS001": ["Motion", "mdi:run", BinarySensorDeviceClass.MOTION],
|
||||
"com.fibaro.heatDetector": ["Heat", "mdi:fire", BinarySensorDeviceClass.HEAT],
|
||||
"com.fibaro.accelerometer": [
|
||||
"Moving",
|
||||
"mdi:axis-arrow",
|
||||
BinarySensorDeviceClass.MOVING,
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
|
@ -55,15 +62,50 @@ class FibaroBinarySensor(FibaroDevice, BinarySensorEntity):
|
|||
"""Initialize the binary_sensor."""
|
||||
super().__init__(fibaro_device)
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
stype = None
|
||||
self._own_extra_state_attributes: Mapping[str, Any] = {}
|
||||
self._fibaro_sensor_type = None
|
||||
if fibaro_device.type in SENSOR_TYPES:
|
||||
stype = fibaro_device.type
|
||||
self._fibaro_sensor_type = fibaro_device.type
|
||||
elif fibaro_device.baseType in SENSOR_TYPES:
|
||||
stype = fibaro_device.baseType
|
||||
if stype:
|
||||
self._attr_device_class = SENSOR_TYPES[stype][2]
|
||||
self._attr_icon = SENSOR_TYPES[stype][1]
|
||||
self._fibaro_sensor_type = fibaro_device.baseType
|
||||
if self._fibaro_sensor_type:
|
||||
self._attr_device_class = SENSOR_TYPES[self._fibaro_sensor_type][2]
|
||||
self._attr_icon = SENSOR_TYPES[self._fibaro_sensor_type][1]
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> Mapping[str, Any] | None:
|
||||
"""Return the extra state attributes of the device."""
|
||||
return super().extra_state_attributes | self._own_extra_state_attributes
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data and update the state."""
|
||||
self._attr_is_on = self.current_binary_state
|
||||
if self._fibaro_sensor_type == "com.fibaro.accelerometer":
|
||||
# Accelerator sensors have values for the three axis x, y and z
|
||||
moving_values = self._get_moving_values()
|
||||
self._attr_is_on = self._is_moving(moving_values)
|
||||
self._own_extra_state_attributes = self._get_xyz_moving(moving_values)
|
||||
else:
|
||||
self._attr_is_on = self.current_binary_state
|
||||
|
||||
def _get_xyz_moving(self, moving_values: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||
"""Return x y z values of the accelerator sensor value."""
|
||||
attrs = {}
|
||||
for axis_name in ("x", "y", "z"):
|
||||
attrs[axis_name] = float(moving_values[axis_name])
|
||||
return attrs
|
||||
|
||||
def _is_moving(self, moving_values: Mapping[str, Any]) -> bool:
|
||||
"""Return that a moving is detected when one axis reports a value."""
|
||||
for axis_name in ("x", "y", "z"):
|
||||
if float(moving_values[axis_name]) != 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_moving_values(self) -> Mapping[str, Any]:
|
||||
"""Get the moving values of the accelerator sensor in a dict."""
|
||||
value = self.fibaro_device.properties.value
|
||||
if isinstance(value, str):
|
||||
# HC2 returns dict as str
|
||||
return json.loads(value)
|
||||
# HC3 returns a real dict
|
||||
return value
|
||||
|
|
|
@ -17,7 +17,7 @@ DEFAULT_NAME = "Flume Sensor"
|
|||
|
||||
# Flume API limits individual endpoints to 120 queries per hour
|
||||
NOTIFICATION_SCAN_INTERVAL = timedelta(minutes=1)
|
||||
DEVICE_SCAN_INTERVAL = timedelta(minutes=1)
|
||||
DEVICE_SCAN_INTERVAL = timedelta(minutes=5)
|
||||
DEVICE_CONNECTION_SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
|
|
@ -20,7 +20,7 @@ start_application:
|
|||
device:
|
||||
integration: fully_kiosk
|
||||
fields:
|
||||
url:
|
||||
application:
|
||||
name: Application
|
||||
description: Package name of the application to start.
|
||||
example: "de.ozerov.fully"
|
||||
|
|
|
@ -10,7 +10,7 @@ from typing import Any
|
|||
|
||||
from gcal_sync.api import GoogleCalendarService, ListEventsRequest, SyncEventsRequest
|
||||
from gcal_sync.exceptions import ApiException
|
||||
from gcal_sync.model import DateOrDatetime, Event
|
||||
from gcal_sync.model import AccessRole, DateOrDatetime, Event
|
||||
from gcal_sync.store import ScopedCalendarStore
|
||||
from gcal_sync.sync import CalendarEventSyncManager
|
||||
from gcal_sync.timeline import Timeline
|
||||
|
@ -198,7 +198,13 @@ async def async_setup_entry(
|
|||
entity_entry.entity_id,
|
||||
)
|
||||
coordinator: CalendarSyncUpdateCoordinator | CalendarQueryUpdateCoordinator
|
||||
if search := data.get(CONF_SEARCH):
|
||||
# Prefer calendar sync down of resources when possible. However, sync does not work
|
||||
# for search. Also free-busy calendars denormalize recurring events as individual
|
||||
# events which is not efficient for sync
|
||||
if (
|
||||
search := data.get(CONF_SEARCH)
|
||||
or calendar_item.access_role == AccessRole.FREE_BUSY_READER
|
||||
):
|
||||
coordinator = CalendarQueryUpdateCoordinator(
|
||||
hass,
|
||||
calendar_service,
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/calendar.google/",
|
||||
"requirements": ["gcal-sync==4.0.0", "oauth2client==4.1.3"],
|
||||
"requirements": ["gcal-sync==4.0.2", "oauth2client==4.1.3"],
|
||||
"codeowners": ["@allenporter"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"]
|
||||
|
|
|
@ -7,6 +7,7 @@ import aiohttp
|
|||
from google.auth.exceptions import RefreshError
|
||||
from google.oauth2.credentials import Credentials
|
||||
from gspread import Client
|
||||
from gspread.utils import ValueInputOption
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
|
@ -100,7 +101,7 @@ async def async_setup_service(hass: HomeAssistant) -> None:
|
|||
columns.append(key)
|
||||
worksheet.update_cell(1, len(columns), key)
|
||||
row.append(value)
|
||||
worksheet.append_row(row)
|
||||
worksheet.append_row(row, value_input_option=ValueInputOption.user_entered)
|
||||
|
||||
async def append_to_sheet(call: ServiceCall) -> None:
|
||||
"""Append new line of data to a Google Sheets document."""
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"name": "Growatt",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/growatt_server/",
|
||||
"requirements": ["growattServer==1.2.3"],
|
||||
"requirements": ["growattServer==1.2.4"],
|
||||
"codeowners": ["@indykoning", "@muppet3000", "@JasperPlant"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["growattServer"]
|
||||
|
|
|
@ -32,7 +32,7 @@ from .sensor_types.total import TOTAL_SENSOR_TYPES
|
|||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = datetime.timedelta(minutes=1)
|
||||
SCAN_INTERVAL = datetime.timedelta(minutes=5)
|
||||
|
||||
|
||||
def get_device_list(api, config):
|
||||
|
@ -159,7 +159,7 @@ class GrowattInverter(SensorEntity):
|
|||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
result = self.probe.get_data(self.entity_description.api_key)
|
||||
result = self.probe.get_data(self.entity_description)
|
||||
if self.entity_description.precision is not None:
|
||||
result = round(result, self.entity_description.precision)
|
||||
return result
|
||||
|
@ -168,7 +168,7 @@ class GrowattInverter(SensorEntity):
|
|||
def native_unit_of_measurement(self) -> str | None:
|
||||
"""Return the unit of measurement of the sensor, if any."""
|
||||
if self.entity_description.currency:
|
||||
return self.probe.get_data("currency")
|
||||
return self.probe.get_currency()
|
||||
return super().native_unit_of_measurement
|
||||
|
||||
def update(self) -> None:
|
||||
|
@ -187,6 +187,7 @@ class GrowattData:
|
|||
self.device_id = device_id
|
||||
self.plant_id = None
|
||||
self.data = {}
|
||||
self.previous_values = {}
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
|
@ -254,9 +255,61 @@ class GrowattData:
|
|||
**mix_detail,
|
||||
**dashboard_values_for_mix,
|
||||
}
|
||||
_LOGGER.debug(
|
||||
"Finished updating data for %s (%s)",
|
||||
self.device_id,
|
||||
self.growatt_type,
|
||||
)
|
||||
except json.decoder.JSONDecodeError:
|
||||
_LOGGER.error("Unable to fetch data from Growatt server")
|
||||
|
||||
def get_data(self, variable):
|
||||
def get_currency(self):
|
||||
"""Get the currency."""
|
||||
return self.data.get("currency")
|
||||
|
||||
def get_data(self, entity_description):
|
||||
"""Get the data."""
|
||||
return self.data.get(variable)
|
||||
_LOGGER.debug(
|
||||
"Data request for: %s",
|
||||
entity_description.name,
|
||||
)
|
||||
variable = entity_description.api_key
|
||||
api_value = self.data.get(variable)
|
||||
previous_value = self.previous_values.get(variable)
|
||||
return_value = api_value
|
||||
|
||||
# If we have a 'drop threshold' specified, then check it and correct if needed
|
||||
if (
|
||||
entity_description.previous_value_drop_threshold is not None
|
||||
and previous_value is not None
|
||||
and api_value is not None
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"%s - Drop threshold specified (%s), checking for drop... API Value: %s, Previous Value: %s",
|
||||
entity_description.name,
|
||||
entity_description.previous_value_drop_threshold,
|
||||
api_value,
|
||||
previous_value,
|
||||
)
|
||||
diff = float(api_value) - float(previous_value)
|
||||
|
||||
# Check if the value has dropped (negative value i.e. < 0) and it has only dropped by a
|
||||
# small amount, if so, use the previous value.
|
||||
# Note - The energy dashboard takes care of drops within 10% of the current value,
|
||||
# however if the value is low e.g. 0.2 and drops by 0.1 it classes as a reset.
|
||||
if -(entity_description.previous_value_drop_threshold) <= diff < 0:
|
||||
_LOGGER.debug(
|
||||
"Diff is negative, but only by a small amount therefore not a nightly reset, "
|
||||
"using previous value (%s) instead of api value (%s)",
|
||||
previous_value,
|
||||
api_value,
|
||||
)
|
||||
return_value = previous_value
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"%s - No drop detected, using API value", entity_description.name
|
||||
)
|
||||
|
||||
self.previous_values[variable] = return_value
|
||||
|
||||
return return_value
|
||||
|
|
|
@ -241,5 +241,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
|||
native_unit_of_measurement=ENERGY_KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
previous_value_drop_threshold=0.2,
|
||||
),
|
||||
)
|
||||
|
|
|
@ -19,3 +19,4 @@ class GrowattSensorEntityDescription(SensorEntityDescription, GrowattRequiredKey
|
|||
|
||||
precision: int | None = None
|
||||
currency: bool = False
|
||||
previous_value_drop_threshold: float | None = None
|
||||
|
|
|
@ -23,6 +23,9 @@ from homeassistant.components.binary_sensor import (
|
|||
BinarySensorDeviceClass,
|
||||
)
|
||||
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
|
||||
from homeassistant.components.device_automation.trigger import (
|
||||
async_validate_trigger_config,
|
||||
)
|
||||
from homeassistant.components.http import HomeAssistantView
|
||||
from homeassistant.components.humidifier import DOMAIN as HUMIDIFIER_DOMAIN
|
||||
from homeassistant.components.network import MDNS_TARGET_IP
|
||||
|
@ -906,29 +909,47 @@ class HomeKit:
|
|||
self.bridge = HomeBridge(self.hass, self.driver, self._name)
|
||||
for state in entity_states:
|
||||
self.add_bridge_accessory(state)
|
||||
dev_reg = device_registry.async_get(self.hass)
|
||||
if self._devices:
|
||||
valid_device_ids = []
|
||||
for device_id in self._devices:
|
||||
if not dev_reg.async_get(device_id):
|
||||
_LOGGER.warning(
|
||||
"HomeKit %s cannot add device %s because it is missing from the device registry",
|
||||
self._name,
|
||||
device_id,
|
||||
)
|
||||
else:
|
||||
valid_device_ids.append(device_id)
|
||||
for device_id, device_triggers in (
|
||||
await device_automation.async_get_device_automations(
|
||||
self.hass,
|
||||
device_automation.DeviceAutomationType.TRIGGER,
|
||||
valid_device_ids,
|
||||
)
|
||||
).items():
|
||||
if device := dev_reg.async_get(device_id):
|
||||
self.add_bridge_triggers_accessory(device, device_triggers)
|
||||
await self._async_add_trigger_accessories()
|
||||
return self.bridge
|
||||
|
||||
async def _async_add_trigger_accessories(self) -> None:
|
||||
"""Add devices with triggers to the bridge."""
|
||||
dev_reg = device_registry.async_get(self.hass)
|
||||
valid_device_ids = []
|
||||
for device_id in self._devices:
|
||||
if not dev_reg.async_get(device_id):
|
||||
_LOGGER.warning(
|
||||
"HomeKit %s cannot add device %s because it is missing from the device registry",
|
||||
self._name,
|
||||
device_id,
|
||||
)
|
||||
else:
|
||||
valid_device_ids.append(device_id)
|
||||
for device_id, device_triggers in (
|
||||
await device_automation.async_get_device_automations(
|
||||
self.hass,
|
||||
device_automation.DeviceAutomationType.TRIGGER,
|
||||
valid_device_ids,
|
||||
)
|
||||
).items():
|
||||
device = dev_reg.async_get(device_id)
|
||||
assert device is not None
|
||||
valid_device_triggers: list[dict[str, Any]] = []
|
||||
for trigger in device_triggers:
|
||||
try:
|
||||
await async_validate_trigger_config(self.hass, trigger)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.debug(
|
||||
"%s: cannot add unsupported trigger %s because it requires additional inputs which are not supported by HomeKit: %s",
|
||||
self._name,
|
||||
trigger,
|
||||
ex,
|
||||
)
|
||||
continue
|
||||
valid_device_triggers.append(trigger)
|
||||
self.add_bridge_triggers_accessory(device, valid_device_triggers)
|
||||
|
||||
async def _async_create_accessories(self) -> bool:
|
||||
"""Create the accessories."""
|
||||
assert self.driver is not None
|
||||
|
|
|
@ -653,7 +653,7 @@ class HomeIIDManager(IIDManager): # type: ignore[misc]
|
|||
"""Get IID for object."""
|
||||
aid = obj.broker.aid
|
||||
if isinstance(obj, Characteristic):
|
||||
service = obj.service
|
||||
service: Service = obj.service
|
||||
iid = self._iid_storage.get_or_allocate_iid(
|
||||
aid, service.type_id, service.unique_id, obj.type_id, obj.unique_id
|
||||
)
|
||||
|
|
|
@ -67,13 +67,16 @@ def _get_accessory_diagnostics(
|
|||
hass: HomeAssistant, accessory: HomeAccessory
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for an accessory."""
|
||||
return {
|
||||
entity_state = None
|
||||
if accessory.entity_id:
|
||||
entity_state = hass.states.get(accessory.entity_id)
|
||||
data = {
|
||||
"aid": accessory.aid,
|
||||
"config": accessory.config,
|
||||
"category": accessory.category,
|
||||
"name": accessory.display_name,
|
||||
"entity_id": accessory.entity_id,
|
||||
"entity_state": async_redact_data(
|
||||
hass.states.get(accessory.entity_id), TO_REDACT
|
||||
),
|
||||
}
|
||||
if entity_state:
|
||||
data["entity_state"] = async_redact_data(entity_state, TO_REDACT)
|
||||
return data
|
||||
|
|
|
@ -7,9 +7,11 @@ from typing import Any
|
|||
from pyhap.const import CATEGORY_SENSOR
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, Context
|
||||
from homeassistant.helpers import entity_registry
|
||||
from homeassistant.helpers.trigger import async_initialize_triggers
|
||||
|
||||
from .accessories import TYPES, HomeAccessory
|
||||
from .aidmanager import get_system_unique_id
|
||||
from .const import (
|
||||
CHAR_NAME,
|
||||
CHAR_PROGRAMMABLE_SWITCH_EVENT,
|
||||
|
@ -18,6 +20,7 @@ from .const import (
|
|||
SERV_SERVICE_LABEL,
|
||||
SERV_STATELESS_PROGRAMMABLE_SWITCH,
|
||||
)
|
||||
from .util import cleanup_name_for_homekit
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@ -39,13 +42,22 @@ class DeviceTriggerAccessory(HomeAccessory):
|
|||
self._remove_triggers: CALLBACK_TYPE | None = None
|
||||
self.triggers = []
|
||||
assert device_triggers is not None
|
||||
ent_reg = entity_registry.async_get(self.hass)
|
||||
for idx, trigger in enumerate(device_triggers):
|
||||
type_ = trigger["type"]
|
||||
subtype = trigger.get("subtype")
|
||||
type_: str = trigger["type"]
|
||||
subtype: str | None = trigger.get("subtype")
|
||||
unique_id = f'{type_}-{subtype or ""}'
|
||||
trigger_name = (
|
||||
f"{type_.title()} {subtype.title()}" if subtype else type_.title()
|
||||
)
|
||||
if (entity_id := trigger.get("entity_id")) and (
|
||||
entry := ent_reg.async_get(entity_id)
|
||||
):
|
||||
unique_id += f"-entity_unique_id:{get_system_unique_id(entry)}"
|
||||
trigger_name_parts = []
|
||||
if entity_id and (state := self.hass.states.get(entity_id)):
|
||||
trigger_name_parts.append(state.name)
|
||||
trigger_name_parts.append(type_.replace("_", " ").title())
|
||||
if subtype:
|
||||
trigger_name_parts.append(subtype.replace("_", " ").title())
|
||||
trigger_name = cleanup_name_for_homekit(" ".join(trigger_name_parts))
|
||||
serv_stateless_switch = self.add_preload_service(
|
||||
SERV_STATELESS_PROGRAMMABLE_SWITCH,
|
||||
[CHAR_NAME, CHAR_SERVICE_LABEL_INDEX],
|
||||
|
|
|
@ -209,6 +209,7 @@ class HomeKitHeaterCoolerEntity(HomeKitBaseClimateEntity):
|
|||
)
|
||||
await self.async_put_characteristics(
|
||||
{
|
||||
CharacteristicsTypes.ACTIVE: ActivationStateValues.ACTIVE,
|
||||
CharacteristicsTypes.TARGET_HEATER_COOLER_STATE: TARGET_HEATER_COOLER_STATE_HASS_TO_HOMEKIT[
|
||||
hvac_mode
|
||||
],
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"name": "HomeKit Controller",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"requirements": ["aiohomekit==2.2.18"],
|
||||
"requirements": ["aiohomekit==2.2.19"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."],
|
||||
"bluetooth": [{ "manufacturer_id": 76, "manufacturer_data_start": [6] }],
|
||||
"dependencies": ["bluetooth", "zeroconf"],
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/huawei_lte",
|
||||
"requirements": [
|
||||
"huawei-lte-api==1.6.3",
|
||||
"huawei-lte-api==1.6.7",
|
||||
"stringcase==1.2.0",
|
||||
"url-normalize==1.4.3"
|
||||
],
|
||||
|
|
|
@ -396,7 +396,11 @@ class IBeaconCoordinator:
|
|||
)
|
||||
continue
|
||||
|
||||
if service_info.rssi != ibeacon_advertisement.rssi:
|
||||
if (
|
||||
service_info.rssi != ibeacon_advertisement.rssi
|
||||
or service_info.source != ibeacon_advertisement.source
|
||||
):
|
||||
ibeacon_advertisement.source = service_info.source
|
||||
ibeacon_advertisement.update_rssi(service_info.rssi)
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
|
|
|
@ -3,11 +3,14 @@
|
|||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientTimeout
|
||||
|
||||
DOMAIN = "life360"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
ATTRIBUTION = "Data provided by life360.com"
|
||||
COMM_TIMEOUT = 10
|
||||
COMM_MAX_RETRIES = 3
|
||||
COMM_TIMEOUT = ClientTimeout(sock_connect=15, total=60)
|
||||
SPEED_FACTOR_MPH = 2.25
|
||||
SPEED_DIGITS = 1
|
||||
UPDATE_INTERVAL = timedelta(seconds=10)
|
||||
|
|
|
@ -26,6 +26,7 @@ from homeassistant.util.unit_conversion import DistanceConverter
|
|||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import (
|
||||
COMM_MAX_RETRIES,
|
||||
COMM_TIMEOUT,
|
||||
CONF_AUTHORIZATION,
|
||||
DOMAIN,
|
||||
|
@ -106,6 +107,7 @@ class Life360DataUpdateCoordinator(DataUpdateCoordinator[Life360Data]):
|
|||
self._api = Life360(
|
||||
session=async_get_clientsession(hass),
|
||||
timeout=COMM_TIMEOUT,
|
||||
max_retries=COMM_MAX_RETRIES,
|
||||
authorization=entry.data[CONF_AUTHORIZATION],
|
||||
)
|
||||
self._missing_loc_reason = hass.data[DOMAIN].missing_loc_reason
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/life360",
|
||||
"codeowners": ["@pnbruckner"],
|
||||
"requirements": ["life360==5.1.1"],
|
||||
"requirements": ["life360==5.3.0"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["life360"]
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ from homeassistant.components.light import (
|
|||
ATTR_BRIGHTNESS,
|
||||
ATTR_BRIGHTNESS_PCT,
|
||||
ATTR_COLOR_NAME,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_HS_COLOR,
|
||||
ATTR_KELVIN,
|
||||
|
@ -114,6 +115,12 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] |
|
|||
kelvin = kwargs.pop(ATTR_KELVIN)
|
||||
saturation = 0
|
||||
|
||||
if ATTR_COLOR_TEMP in kwargs:
|
||||
kelvin = color_util.color_temperature_mired_to_kelvin(
|
||||
kwargs.pop(ATTR_COLOR_TEMP)
|
||||
)
|
||||
saturation = 0
|
||||
|
||||
if ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
kelvin = kwargs.pop(ATTR_COLOR_TEMP_KELVIN)
|
||||
saturation = 0
|
||||
|
|
|
@ -196,19 +196,19 @@ class MqttUpdate(MqttEntity, UpdateEntity, RestoreEntity):
|
|||
self._attr_latest_version = json_payload["latest_version"]
|
||||
get_mqtt_data(self.hass).state_write_requests.write_state_request(self)
|
||||
|
||||
if CONF_TITLE in json_payload and not self._attr_title:
|
||||
if CONF_TITLE in json_payload:
|
||||
self._attr_title = json_payload[CONF_TITLE]
|
||||
get_mqtt_data(self.hass).state_write_requests.write_state_request(self)
|
||||
|
||||
if CONF_RELEASE_SUMMARY in json_payload and not self._attr_release_summary:
|
||||
if CONF_RELEASE_SUMMARY in json_payload:
|
||||
self._attr_release_summary = json_payload[CONF_RELEASE_SUMMARY]
|
||||
get_mqtt_data(self.hass).state_write_requests.write_state_request(self)
|
||||
|
||||
if CONF_RELEASE_URL in json_payload and not self._attr_release_url:
|
||||
if CONF_RELEASE_URL in json_payload:
|
||||
self._attr_release_url = json_payload[CONF_RELEASE_URL]
|
||||
get_mqtt_data(self.hass).state_write_requests.write_state_request(self)
|
||||
|
||||
if CONF_ENTITY_PICTURE in json_payload and not self._entity_picture:
|
||||
if CONF_ENTITY_PICTURE in json_payload:
|
||||
self._entity_picture = json_payload[CONF_ENTITY_PICTURE]
|
||||
get_mqtt_data(self.hass).state_write_requests.write_state_request(self)
|
||||
|
||||
|
|
|
@ -378,7 +378,7 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateEntity):
|
|||
|
||||
async def async_turn_aux_heat_on(self) -> None:
|
||||
"""Turn Aux Heat on."""
|
||||
self._thermostat.set_emergency_heat(True)
|
||||
await self._thermostat.set_emergency_heat(True)
|
||||
self._signal_thermostat_update()
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
|
|
|
@ -8,7 +8,7 @@ import datetime as dt
|
|||
|
||||
from httpx import RemoteProtocolError, TransportError
|
||||
from onvif import ONVIFCamera, ONVIFService
|
||||
from zeep.exceptions import Fault
|
||||
from zeep.exceptions import Fault, XMLParseError
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE, CoreState, HomeAssistant, callback
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
@ -20,6 +20,7 @@ from .parsers import PARSERS
|
|||
|
||||
UNHANDLED_TOPICS = set()
|
||||
SUBSCRIPTION_ERRORS = (
|
||||
XMLParseError,
|
||||
Fault,
|
||||
asyncio.TimeoutError,
|
||||
TransportError,
|
||||
|
@ -153,7 +154,8 @@ class EventManager:
|
|||
.isoformat(timespec="seconds")
|
||||
.replace("+00:00", "Z")
|
||||
)
|
||||
await self._subscription.Renew(termination_time)
|
||||
with suppress(*SUBSCRIPTION_ERRORS):
|
||||
await self._subscription.Renew(termination_time)
|
||||
|
||||
def async_schedule_pull(self) -> None:
|
||||
"""Schedule async_pull_messages to run."""
|
||||
|
|
|
@ -48,7 +48,8 @@
|
|||
"onvif_devices": {
|
||||
"data": {
|
||||
"extra_arguments": "Extra FFMPEG arguments",
|
||||
"rtsp_transport": "RTSP transport mechanism"
|
||||
"rtsp_transport": "RTSP transport mechanism",
|
||||
"use_wallclock_as_timestamps": "Use wall clock as timestamps"
|
||||
},
|
||||
"title": "ONVIF Device Options"
|
||||
}
|
||||
|
|
|
@ -48,7 +48,8 @@
|
|||
"onvif_devices": {
|
||||
"data": {
|
||||
"extra_arguments": "Extra FFMPEG arguments",
|
||||
"rtsp_transport": "RTSP transport mechanism"
|
||||
"rtsp_transport": "RTSP transport mechanism",
|
||||
"use_wallclock_as_timestamps": "Use wall clock as timestamps"
|
||||
},
|
||||
"title": "ONVIF Device Options"
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
"manufacturer_id": 220
|
||||
}
|
||||
],
|
||||
"requirements": ["oralb-ble==0.13.0"],
|
||||
"requirements": ["oralb-ble==0.14.2"],
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": ["@bdraco"],
|
||||
"iot_class": "local_push"
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"domain": "recorder",
|
||||
"name": "Recorder",
|
||||
"documentation": "https://www.home-assistant.io/integrations/recorder",
|
||||
"requirements": ["sqlalchemy==1.4.42", "fnvhash==0.1.0"],
|
||||
"requirements": ["sqlalchemy==1.4.44", "fnvhash==0.1.0"],
|
||||
"codeowners": ["@home-assistant/core"],
|
||||
"quality_scale": "internal",
|
||||
"iot_class": "local_push",
|
||||
|
|
|
@ -1216,11 +1216,29 @@ def _get_max_mean_min_statistic(
|
|||
return result
|
||||
|
||||
|
||||
def _first_statistic(
|
||||
session: Session,
|
||||
table: type[Statistics | StatisticsShortTerm],
|
||||
metadata_id: int,
|
||||
) -> datetime | None:
|
||||
"""Return the data of the oldest statistic row for a given metadata id."""
|
||||
stmt = lambda_stmt(
|
||||
lambda: select(table.start)
|
||||
.filter(table.metadata_id == metadata_id)
|
||||
.order_by(table.start.asc())
|
||||
.limit(1)
|
||||
)
|
||||
if stats := execute_stmt_lambda_element(session, stmt):
|
||||
return process_timestamp(stats[0].start) # type: ignore[no-any-return]
|
||||
return None
|
||||
|
||||
|
||||
def _get_oldest_sum_statistic(
|
||||
session: Session,
|
||||
head_start_time: datetime | None,
|
||||
main_start_time: datetime | None,
|
||||
tail_start_time: datetime | None,
|
||||
oldest_stat: datetime | None,
|
||||
tail_only: bool,
|
||||
metadata_id: int,
|
||||
) -> float | None:
|
||||
|
@ -1231,10 +1249,10 @@ def _get_oldest_sum_statistic(
|
|||
start_time: datetime | None,
|
||||
table: type[Statistics | StatisticsShortTerm],
|
||||
metadata_id: int,
|
||||
) -> tuple[float | None, datetime | None]:
|
||||
) -> float | None:
|
||||
"""Return the oldest non-NULL sum during the period."""
|
||||
stmt = lambda_stmt(
|
||||
lambda: select(table.sum, table.start)
|
||||
lambda: select(table.sum)
|
||||
.filter(table.metadata_id == metadata_id)
|
||||
.filter(table.sum.is_not(None))
|
||||
.order_by(table.start.asc())
|
||||
|
@ -1248,49 +1266,49 @@ def _get_oldest_sum_statistic(
|
|||
else:
|
||||
period = start_time.replace(minute=0, second=0, microsecond=0)
|
||||
prev_period = period - table.duration
|
||||
stmt += lambda q: q.filter(table.start == prev_period)
|
||||
stmt += lambda q: q.filter(table.start >= prev_period)
|
||||
stats = execute_stmt_lambda_element(session, stmt)
|
||||
return (
|
||||
(stats[0].sum, process_timestamp(stats[0].start)) if stats else (None, None)
|
||||
)
|
||||
return stats[0].sum if stats else None
|
||||
|
||||
oldest_start: datetime | None
|
||||
oldest_sum: float | None = None
|
||||
|
||||
if head_start_time is not None:
|
||||
oldest_sum, oldest_start = _get_oldest_sum_statistic_in_sub_period(
|
||||
session, head_start_time, StatisticsShortTerm, metadata_id
|
||||
# This function won't be called if tail_only is False and main_start_time is None
|
||||
# the extra checks are added to satisfy MyPy
|
||||
if not tail_only and main_start_time is not None and oldest_stat is not None:
|
||||
period = main_start_time.replace(minute=0, second=0, microsecond=0)
|
||||
prev_period = period - Statistics.duration
|
||||
if prev_period < oldest_stat:
|
||||
return 0
|
||||
|
||||
if (
|
||||
head_start_time is not None
|
||||
and (
|
||||
oldest_sum := _get_oldest_sum_statistic_in_sub_period(
|
||||
session, head_start_time, StatisticsShortTerm, metadata_id
|
||||
)
|
||||
)
|
||||
if (
|
||||
oldest_start is not None
|
||||
and oldest_start < head_start_time
|
||||
and oldest_sum is not None
|
||||
):
|
||||
return oldest_sum
|
||||
is not None
|
||||
):
|
||||
return oldest_sum
|
||||
|
||||
if not tail_only:
|
||||
assert main_start_time is not None
|
||||
oldest_sum, oldest_start = _get_oldest_sum_statistic_in_sub_period(
|
||||
session, main_start_time, Statistics, metadata_id
|
||||
)
|
||||
if (
|
||||
oldest_start is not None
|
||||
and oldest_start < main_start_time
|
||||
and oldest_sum is not None
|
||||
):
|
||||
oldest_sum := _get_oldest_sum_statistic_in_sub_period(
|
||||
session, main_start_time, Statistics, metadata_id
|
||||
)
|
||||
) is not None:
|
||||
return oldest_sum
|
||||
return 0
|
||||
|
||||
if tail_start_time is not None:
|
||||
oldest_sum, oldest_start = _get_oldest_sum_statistic_in_sub_period(
|
||||
session, tail_start_time, StatisticsShortTerm, metadata_id
|
||||
if (
|
||||
tail_start_time is not None
|
||||
and (
|
||||
oldest_sum := _get_oldest_sum_statistic_in_sub_period(
|
||||
session, tail_start_time, StatisticsShortTerm, metadata_id
|
||||
)
|
||||
)
|
||||
if (
|
||||
oldest_start is not None
|
||||
and oldest_start < tail_start_time
|
||||
and oldest_sum is not None
|
||||
):
|
||||
return oldest_sum
|
||||
) is not None:
|
||||
return oldest_sum
|
||||
|
||||
return 0
|
||||
|
||||
|
@ -1373,51 +1391,79 @@ def statistic_during_period(
|
|||
|
||||
result: dict[str, Any] = {}
|
||||
|
||||
# To calculate the summary, data from the statistics (hourly) and short_term_statistics
|
||||
# (5 minute) tables is combined
|
||||
# - The short term statistics table is used for the head and tail of the period,
|
||||
# if the period it doesn't start or end on a full hour
|
||||
# - The statistics table is used for the remainder of the time
|
||||
now = dt_util.utcnow()
|
||||
if end_time is not None and end_time > now:
|
||||
end_time = now
|
||||
|
||||
tail_only = (
|
||||
start_time is not None
|
||||
and end_time is not None
|
||||
and end_time - start_time < timedelta(hours=1)
|
||||
)
|
||||
|
||||
# Calculate the head period
|
||||
head_start_time: datetime | None = None
|
||||
head_end_time: datetime | None = None
|
||||
if not tail_only and start_time is not None and start_time.minute:
|
||||
head_start_time = start_time
|
||||
head_end_time = start_time.replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
) + timedelta(hours=1)
|
||||
|
||||
# Calculate the tail period
|
||||
tail_start_time: datetime | None = None
|
||||
tail_end_time: datetime | None = None
|
||||
if end_time is None:
|
||||
tail_start_time = now.replace(minute=0, second=0, microsecond=0)
|
||||
elif end_time.minute:
|
||||
tail_start_time = (
|
||||
start_time
|
||||
if tail_only
|
||||
else end_time.replace(minute=0, second=0, microsecond=0)
|
||||
)
|
||||
tail_end_time = end_time
|
||||
|
||||
# Calculate the main period
|
||||
main_start_time: datetime | None = None
|
||||
main_end_time: datetime | None = None
|
||||
if not tail_only:
|
||||
main_start_time = start_time if head_end_time is None else head_end_time
|
||||
main_end_time = end_time if tail_start_time is None else tail_start_time
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
# Fetch metadata for the given statistic_id
|
||||
if not (
|
||||
metadata := get_metadata_with_session(session, statistic_ids=[statistic_id])
|
||||
):
|
||||
return result
|
||||
|
||||
metadata_id = metadata[statistic_id][0]
|
||||
|
||||
oldest_stat = _first_statistic(session, Statistics, metadata_id)
|
||||
oldest_5_min_stat = None
|
||||
if not valid_statistic_id(statistic_id):
|
||||
oldest_5_min_stat = _first_statistic(
|
||||
session, StatisticsShortTerm, metadata_id
|
||||
)
|
||||
|
||||
# To calculate the summary, data from the statistics (hourly) and
|
||||
# short_term_statistics (5 minute) tables is combined
|
||||
# - The short term statistics table is used for the head and tail of the period,
|
||||
# if the period it doesn't start or end on a full hour
|
||||
# - The statistics table is used for the remainder of the time
|
||||
now = dt_util.utcnow()
|
||||
if end_time is not None and end_time > now:
|
||||
end_time = now
|
||||
|
||||
tail_only = (
|
||||
start_time is not None
|
||||
and end_time is not None
|
||||
and end_time - start_time < timedelta(hours=1)
|
||||
)
|
||||
|
||||
# Calculate the head period
|
||||
head_start_time: datetime | None = None
|
||||
head_end_time: datetime | None = None
|
||||
if (
|
||||
not tail_only
|
||||
and oldest_stat is not None
|
||||
and oldest_5_min_stat is not None
|
||||
and oldest_5_min_stat - oldest_stat < timedelta(hours=1)
|
||||
and (start_time is None or start_time < oldest_5_min_stat)
|
||||
):
|
||||
# To improve accuracy of averaged for statistics which were added within
|
||||
# recorder's retention period.
|
||||
head_start_time = oldest_5_min_stat
|
||||
head_end_time = oldest_5_min_stat.replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
) + timedelta(hours=1)
|
||||
elif not tail_only and start_time is not None and start_time.minute:
|
||||
head_start_time = start_time
|
||||
head_end_time = start_time.replace(
|
||||
minute=0, second=0, microsecond=0
|
||||
) + timedelta(hours=1)
|
||||
|
||||
# Calculate the tail period
|
||||
tail_start_time: datetime | None = None
|
||||
tail_end_time: datetime | None = None
|
||||
if end_time is None:
|
||||
tail_start_time = now.replace(minute=0, second=0, microsecond=0)
|
||||
elif end_time.minute:
|
||||
tail_start_time = (
|
||||
start_time
|
||||
if tail_only
|
||||
else end_time.replace(minute=0, second=0, microsecond=0)
|
||||
)
|
||||
tail_end_time = end_time
|
||||
|
||||
# Calculate the main period
|
||||
main_start_time: datetime | None = None
|
||||
main_end_time: datetime | None = None
|
||||
if not tail_only:
|
||||
main_start_time = start_time if head_end_time is None else head_end_time
|
||||
main_end_time = end_time if tail_start_time is None else tail_start_time
|
||||
|
||||
# Fetch metadata for the given statistic_id
|
||||
metadata = get_metadata_with_session(session, statistic_ids=[statistic_id])
|
||||
if not metadata:
|
||||
|
@ -1449,6 +1495,7 @@ def statistic_during_period(
|
|||
head_start_time,
|
||||
main_start_time,
|
||||
tail_start_time,
|
||||
oldest_stat,
|
||||
tail_only,
|
||||
metadata_id,
|
||||
)
|
||||
|
|
|
@ -91,9 +91,8 @@ COMBINED_SCHEMA = vol.Schema(
|
|||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.All(
|
||||
# convert empty dict to empty list
|
||||
lambda x: [] if x == {} else x,
|
||||
cv.ensure_list,
|
||||
cv.remove_falsy,
|
||||
[COMBINED_SCHEMA],
|
||||
)
|
||||
},
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"name": "Ridwell",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/ridwell",
|
||||
"requirements": ["aioridwell==2022.03.0"],
|
||||
"requirements": ["aioridwell==2022.11.0"],
|
||||
"codeowners": ["@bachya"],
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioridwell"],
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"domain": "sql",
|
||||
"name": "SQL",
|
||||
"documentation": "https://www.home-assistant.io/integrations/sql",
|
||||
"requirements": ["sqlalchemy==1.4.42"],
|
||||
"requirements": ["sqlalchemy==1.4.44"],
|
||||
"codeowners": ["@dgomes", "@gjohansson-ST"],
|
||||
"config_flow": true,
|
||||
"iot_class": "local_polling"
|
||||
|
|
|
@ -61,6 +61,15 @@ class SwitchbotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):
|
|||
self.base_unique_id = base_unique_id
|
||||
self.model = model
|
||||
self._ready_event = asyncio.Event()
|
||||
self._was_unavailable = True
|
||||
|
||||
@callback
|
||||
def _async_handle_unavailable(
|
||||
self, service_info: bluetooth.BluetoothServiceInfoBleak
|
||||
) -> None:
|
||||
"""Handle the device going unavailable."""
|
||||
super()._async_handle_unavailable(service_info)
|
||||
self._was_unavailable = True
|
||||
|
||||
@callback
|
||||
def _async_handle_bluetooth_event(
|
||||
|
@ -70,16 +79,20 @@ class SwitchbotDataUpdateCoordinator(PassiveBluetoothDataUpdateCoordinator):
|
|||
) -> None:
|
||||
"""Handle a Bluetooth event."""
|
||||
self.ble_device = service_info.device
|
||||
if adv := switchbot.parse_advertisement_data(
|
||||
service_info.device, service_info.advertisement
|
||||
if not (
|
||||
adv := switchbot.parse_advertisement_data(
|
||||
service_info.device, service_info.advertisement
|
||||
)
|
||||
):
|
||||
if "modelName" in adv.data:
|
||||
self._ready_event.set()
|
||||
_LOGGER.debug("%s: Switchbot data: %s", self.ble_device.address, self.data)
|
||||
if not self.device.advertisement_changed(adv):
|
||||
return
|
||||
self.data = flatten_sensors_data(adv.data)
|
||||
self.device.update_from_advertisement(adv)
|
||||
return
|
||||
if "modelName" in adv.data:
|
||||
self._ready_event.set()
|
||||
_LOGGER.debug("%s: Switchbot data: %s", self.ble_device.address, self.data)
|
||||
if not self.device.advertisement_changed(adv) and not self._was_unavailable:
|
||||
return
|
||||
self._was_unavailable = False
|
||||
self.data = flatten_sensors_data(adv.data)
|
||||
self.device.update_from_advertisement(adv)
|
||||
super()._async_handle_bluetooth_event(service_info, change)
|
||||
|
||||
async def async_wait_ready(self) -> bool:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"domain": "switchbot",
|
||||
"name": "SwitchBot",
|
||||
"documentation": "https://www.home-assistant.io/integrations/switchbot",
|
||||
"requirements": ["PySwitchbot==0.20.2"],
|
||||
"requirements": ["PySwitchbot==0.20.5"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth"],
|
||||
"codeowners": [
|
||||
|
|
|
@ -367,8 +367,6 @@ class UnifiBlockClientSwitch(SwitchEntity):
|
|||
self.hass.async_create_task(self.remove_item({self._obj_id}))
|
||||
return
|
||||
|
||||
client = self.controller.api.clients[self._obj_id]
|
||||
self._attr_is_on = not client.blocked
|
||||
self._attr_available = self.controller.available
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
|
|
@ -1090,11 +1090,17 @@ async def websocket_update_zha_configuration(
|
|||
):
|
||||
data_to_save[CUSTOM_CONFIGURATION][section].pop(entry)
|
||||
# remove entire section block if empty
|
||||
if not data_to_save[CUSTOM_CONFIGURATION][section]:
|
||||
if (
|
||||
not data_to_save[CUSTOM_CONFIGURATION].get(section)
|
||||
and section in data_to_save[CUSTOM_CONFIGURATION]
|
||||
):
|
||||
data_to_save[CUSTOM_CONFIGURATION].pop(section)
|
||||
|
||||
# remove entire custom_configuration block if empty
|
||||
if not data_to_save[CUSTOM_CONFIGURATION]:
|
||||
if (
|
||||
not data_to_save.get(CUSTOM_CONFIGURATION)
|
||||
and CUSTOM_CONFIGURATION in data_to_save
|
||||
):
|
||||
data_to_save.pop(CUSTOM_CONFIGURATION)
|
||||
|
||||
_LOGGER.info(
|
||||
|
|
|
@ -221,11 +221,13 @@ def async_get_zha_config_value(
|
|||
)
|
||||
|
||||
|
||||
def async_cluster_exists(hass, cluster_id):
|
||||
def async_cluster_exists(hass, cluster_id, skip_coordinator=True):
|
||||
"""Determine if a device containing the specified in cluster is paired."""
|
||||
zha_gateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
|
||||
zha_devices = zha_gateway.devices.values()
|
||||
for zha_device in zha_devices:
|
||||
if skip_coordinator and zha_device.is_coordinator:
|
||||
continue
|
||||
clusters_by_endpoint = zha_device.async_get_clusters()
|
||||
for clusters in clusters_by_endpoint.values():
|
||||
if (
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
"bellows==0.34.2",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.85",
|
||||
"zha-quirks==0.0.86",
|
||||
"zigpy-deconz==0.19.0",
|
||||
"zigpy==0.51.5",
|
||||
"zigpy-xbee==0.16.2",
|
||||
|
|
|
@ -7,9 +7,6 @@ from zwave_js_server.client import Client as ZwaveClient
|
|||
from zwave_js_server.const import TARGET_STATE_PROPERTY, TARGET_VALUE_PROPERTY
|
||||
from zwave_js_server.const.command_class.barrier_operator import BarrierState
|
||||
from zwave_js_server.const.command_class.multilevel_switch import (
|
||||
COVER_CLOSE_PROPERTY,
|
||||
COVER_DOWN_PROPERTY,
|
||||
COVER_OFF_PROPERTY,
|
||||
COVER_ON_PROPERTY,
|
||||
COVER_OPEN_PROPERTY,
|
||||
COVER_UP_PROPERTY,
|
||||
|
@ -156,23 +153,14 @@ class ZWaveCover(ZWaveBaseEntity, CoverEntity):
|
|||
|
||||
async def async_stop_cover(self, **kwargs: Any) -> None:
|
||||
"""Stop cover."""
|
||||
open_value = (
|
||||
cover_property = (
|
||||
self.get_zwave_value(COVER_OPEN_PROPERTY)
|
||||
or self.get_zwave_value(COVER_UP_PROPERTY)
|
||||
or self.get_zwave_value(COVER_ON_PROPERTY)
|
||||
)
|
||||
if open_value:
|
||||
# Stop the cover if it's opening
|
||||
await self.info.node.async_set_value(open_value, False)
|
||||
|
||||
close_value = (
|
||||
self.get_zwave_value(COVER_CLOSE_PROPERTY)
|
||||
or self.get_zwave_value(COVER_DOWN_PROPERTY)
|
||||
or self.get_zwave_value(COVER_OFF_PROPERTY)
|
||||
)
|
||||
if close_value:
|
||||
# Stop the cover if it's closing
|
||||
await self.info.node.async_set_value(close_value, False)
|
||||
if cover_property:
|
||||
# Stop the cover, will stop regardless of the actual direction of travel.
|
||||
await self.info.node.async_set_value(cover_property, False)
|
||||
|
||||
|
||||
class ZWaveTiltCover(ZWaveCover):
|
||||
|
|
|
@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
|||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2022
|
||||
MINOR_VERSION: Final = 11
|
||||
PATCH_VERSION: Final = "2"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||
|
|
|
@ -159,21 +159,25 @@
|
|||
"integrations": {
|
||||
"alexa": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Amazon Alexa"
|
||||
},
|
||||
"amazon_polly": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Amazon Polly"
|
||||
},
|
||||
"aws": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Amazon Web Services (AWS)"
|
||||
},
|
||||
"route53": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "AWS Route53"
|
||||
}
|
||||
|
@ -284,6 +288,7 @@
|
|||
},
|
||||
"itunes": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Apple iTunes"
|
||||
}
|
||||
|
@ -336,11 +341,13 @@
|
|||
"integrations": {
|
||||
"aruba": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Aruba"
|
||||
},
|
||||
"cppm_tracker": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Aruba ClearPass"
|
||||
}
|
||||
|
@ -363,11 +370,13 @@
|
|||
"integrations": {
|
||||
"asterisk_cdr": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Asterisk Call Detail Records"
|
||||
},
|
||||
"asterisk_mbox": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Asterisk Voicemail"
|
||||
}
|
||||
|
@ -710,16 +719,19 @@
|
|||
"integrations": {
|
||||
"cisco_ios": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Cisco IOS"
|
||||
},
|
||||
"cisco_mobility_express": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Cisco Mobility Express"
|
||||
},
|
||||
"cisco_webex_teams": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Cisco Webex Teams"
|
||||
}
|
||||
|
@ -748,11 +760,13 @@
|
|||
"integrations": {
|
||||
"clicksend": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "ClickSend SMS"
|
||||
},
|
||||
"clicksend_tts": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "ClickSend TTS"
|
||||
}
|
||||
|
@ -944,6 +958,7 @@
|
|||
"integrations": {
|
||||
"denon": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Denon Network Receivers"
|
||||
},
|
||||
|
@ -1245,6 +1260,7 @@
|
|||
"integrations": {
|
||||
"avea": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Elgato Avea"
|
||||
},
|
||||
|
@ -1291,11 +1307,13 @@
|
|||
"integrations": {
|
||||
"emoncms": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Emoncms"
|
||||
},
|
||||
"emoncms_history": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Emoncms History"
|
||||
}
|
||||
|
@ -1377,6 +1395,7 @@
|
|||
},
|
||||
"epsonworkforce": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Epson Workforce"
|
||||
}
|
||||
|
@ -1387,11 +1406,13 @@
|
|||
"integrations": {
|
||||
"eq3btsmart": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "eQ-3 Bluetooth Smart Thermostats"
|
||||
},
|
||||
"maxcube": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "eQ-3 MAX!"
|
||||
}
|
||||
|
@ -1480,15 +1501,18 @@
|
|||
"integrations": {
|
||||
"ffmpeg": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"name": "FFmpeg"
|
||||
},
|
||||
"ffmpeg_motion": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "calculated",
|
||||
"name": "FFmpeg Motion"
|
||||
},
|
||||
"ffmpeg_noise": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "calculated",
|
||||
"name": "FFmpeg Noise"
|
||||
}
|
||||
|
@ -1871,11 +1895,13 @@
|
|||
"integrations": {
|
||||
"gc100": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Global Cach\u00e9 GC-100"
|
||||
},
|
||||
"itach": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "assumed_state",
|
||||
"name": "Global Cach\u00e9 iTach TCP/IP to IR"
|
||||
}
|
||||
|
@ -1910,26 +1936,31 @@
|
|||
"integrations": {
|
||||
"google_assistant": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Google Assistant"
|
||||
},
|
||||
"google_cloud": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Google Cloud Platform"
|
||||
},
|
||||
"google_domains": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Google Domains"
|
||||
},
|
||||
"google_maps": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Google Maps"
|
||||
},
|
||||
"google_pubsub": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Google Pub/Sub"
|
||||
},
|
||||
|
@ -1941,6 +1972,7 @@
|
|||
},
|
||||
"google_translate": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Google Translate Text-to-Speech"
|
||||
},
|
||||
|
@ -1951,6 +1983,7 @@
|
|||
},
|
||||
"google_wifi": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Google Wifi"
|
||||
},
|
||||
|
@ -2119,11 +2152,13 @@
|
|||
"integrations": {
|
||||
"hikvision": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Hikvision"
|
||||
},
|
||||
"hikvisioncam": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Hikvision"
|
||||
}
|
||||
|
@ -2176,6 +2211,7 @@
|
|||
"integrations": {
|
||||
"homematic": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Homematic"
|
||||
},
|
||||
|
@ -2204,6 +2240,7 @@
|
|||
},
|
||||
"evohome": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Honeywell Total Connect Comfort (Europe)"
|
||||
},
|
||||
|
@ -2297,11 +2334,13 @@
|
|||
"integrations": {
|
||||
"watson_iot": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "IBM Watson IoT Platform"
|
||||
},
|
||||
"watson_tts": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "IBM Watson TTS"
|
||||
}
|
||||
|
@ -2342,6 +2381,7 @@
|
|||
"integrations": {
|
||||
"symfonisk": {
|
||||
"integration_type": "virtual",
|
||||
"config_flow": false,
|
||||
"supported_by": "sonos",
|
||||
"name": "IKEA SYMFONISK"
|
||||
},
|
||||
|
@ -2720,6 +2760,7 @@
|
|||
"integrations": {
|
||||
"lg_netcast": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "LG Netcast"
|
||||
},
|
||||
|
@ -2855,6 +2896,7 @@
|
|||
},
|
||||
"ue_smart_radio": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Logitech UE Smart Radio"
|
||||
},
|
||||
|
@ -2901,6 +2943,7 @@
|
|||
"integrations": {
|
||||
"lutron": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Lutron"
|
||||
},
|
||||
|
@ -2912,6 +2955,7 @@
|
|||
},
|
||||
"homeworks": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Lutron Homeworks"
|
||||
}
|
||||
|
@ -3021,6 +3065,7 @@
|
|||
},
|
||||
"raincloud": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Melnor RainCloud"
|
||||
}
|
||||
|
@ -3097,31 +3142,37 @@
|
|||
},
|
||||
"azure_service_bus": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Azure Service Bus"
|
||||
},
|
||||
"microsoft_face_detect": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Microsoft Face Detect"
|
||||
},
|
||||
"microsoft_face_identify": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Microsoft Face Identify"
|
||||
},
|
||||
"microsoft_face": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Microsoft Face"
|
||||
},
|
||||
"microsoft": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Microsoft Text-to-Speech (TTS)"
|
||||
},
|
||||
"msteams": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Microsoft Teams"
|
||||
},
|
||||
|
@ -3133,6 +3184,7 @@
|
|||
},
|
||||
"xbox_live": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Xbox Live"
|
||||
}
|
||||
|
@ -3260,6 +3312,7 @@
|
|||
"integrations": {
|
||||
"manual_mqtt": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Manual MQTT Alarm Control Panel"
|
||||
},
|
||||
|
@ -3271,21 +3324,25 @@
|
|||
},
|
||||
"mqtt_eventstream": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "MQTT Eventstream"
|
||||
},
|
||||
"mqtt_json": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "MQTT JSON"
|
||||
},
|
||||
"mqtt_room": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "MQTT Room Presence"
|
||||
},
|
||||
"mqtt_statestream": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "MQTT Statestream"
|
||||
}
|
||||
|
@ -3404,6 +3461,7 @@
|
|||
},
|
||||
"netgear_lte": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "NETGEAR LTE"
|
||||
}
|
||||
|
@ -3765,11 +3823,13 @@
|
|||
"integrations": {
|
||||
"luci": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "OpenWrt (luci)"
|
||||
},
|
||||
"ubus": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "OpenWrt (ubus)"
|
||||
}
|
||||
|
@ -3846,6 +3906,7 @@
|
|||
"integrations": {
|
||||
"panasonic_bluray": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Panasonic Blu-Ray Player"
|
||||
},
|
||||
|
@ -4140,6 +4201,7 @@
|
|||
"integrations": {
|
||||
"qnap": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "QNAP"
|
||||
},
|
||||
|
@ -4228,6 +4290,7 @@
|
|||
"integrations": {
|
||||
"rpi_camera": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Raspberry Pi Camera"
|
||||
},
|
||||
|
@ -4238,6 +4301,7 @@
|
|||
},
|
||||
"remote_rpi_gpio": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Raspberry Pi Remote GPIO"
|
||||
}
|
||||
|
@ -4437,11 +4501,13 @@
|
|||
"integrations": {
|
||||
"russound_rio": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_push",
|
||||
"name": "Russound RIO"
|
||||
},
|
||||
"russound_rnet": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Russound RNET"
|
||||
}
|
||||
|
@ -4464,6 +4530,7 @@
|
|||
"integrations": {
|
||||
"familyhub": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Samsung Family Hub"
|
||||
},
|
||||
|
@ -4845,6 +4912,7 @@
|
|||
},
|
||||
"solaredge_local": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "SolarEdge Local"
|
||||
}
|
||||
|
@ -4908,6 +4976,7 @@
|
|||
},
|
||||
"sony_projector": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Sony Projector"
|
||||
},
|
||||
|
@ -5121,6 +5190,7 @@
|
|||
"integrations": {
|
||||
"synology_chat": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Synology Chat"
|
||||
},
|
||||
|
@ -5132,6 +5202,7 @@
|
|||
},
|
||||
"synology_srm": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Synology SRM"
|
||||
}
|
||||
|
@ -5218,11 +5289,13 @@
|
|||
"integrations": {
|
||||
"telegram": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Telegram"
|
||||
},
|
||||
"telegram_bot": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Telegram bot"
|
||||
}
|
||||
|
@ -5239,6 +5312,7 @@
|
|||
},
|
||||
"tellstick": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "assumed_state",
|
||||
"name": "TellStick"
|
||||
}
|
||||
|
@ -5522,11 +5596,13 @@
|
|||
},
|
||||
"twilio_call": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Twilio Call"
|
||||
},
|
||||
"twilio_sms": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Twilio SMS"
|
||||
}
|
||||
|
@ -5555,6 +5631,7 @@
|
|||
"integrations": {
|
||||
"ultraloq": {
|
||||
"integration_type": "virtual",
|
||||
"config_flow": false,
|
||||
"iot_standards": [
|
||||
"zwave"
|
||||
],
|
||||
|
@ -5573,11 +5650,13 @@
|
|||
},
|
||||
"unifi_direct": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "UniFi AP"
|
||||
},
|
||||
"unifiled": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "UniFi LED"
|
||||
},
|
||||
|
@ -5754,6 +5833,7 @@
|
|||
"integrations": {
|
||||
"vlc": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "VLC media player"
|
||||
},
|
||||
|
@ -5978,11 +6058,13 @@
|
|||
},
|
||||
"xiaomi_tv": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "assumed_state",
|
||||
"name": "Xiaomi TV"
|
||||
},
|
||||
"xiaomi": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Xiaomi"
|
||||
}
|
||||
|
@ -6040,11 +6122,13 @@
|
|||
"integrations": {
|
||||
"yandex_transport": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_polling",
|
||||
"name": "Yandex Transport"
|
||||
},
|
||||
"yandextts": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "cloud_push",
|
||||
"name": "Yandex TTS"
|
||||
}
|
||||
|
@ -6061,6 +6145,7 @@
|
|||
},
|
||||
"yeelightsunflower": {
|
||||
"integration_type": "hub",
|
||||
"config_flow": false,
|
||||
"iot_class": "local_polling",
|
||||
"name": "Yeelight Sunflower"
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ atomicwrites-homeassistant==1.4.1
|
|||
attrs==21.2.0
|
||||
awesomeversion==22.9.0
|
||||
bcrypt==3.1.7
|
||||
bleak-retry-connector==2.8.3
|
||||
bleak-retry-connector==2.8.4
|
||||
bleak==0.19.2
|
||||
bluetooth-adapters==0.7.0
|
||||
bluetooth-auto-recovery==0.3.6
|
||||
|
@ -37,7 +37,7 @@ pyudev==0.23.2
|
|||
pyyaml==6.0
|
||||
requests==2.28.1
|
||||
scapy==2.4.5
|
||||
sqlalchemy==1.4.42
|
||||
sqlalchemy==1.4.44
|
||||
typing-extensions>=4.4.0,<5.0
|
||||
voluptuous-serialize==2.5.0
|
||||
voluptuous==0.13.1
|
||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2022.11.2"
|
||||
version = "2022.11.3"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
|
|
@ -37,7 +37,7 @@ PyRMVtransport==0.3.3
|
|||
PySocks==1.7.1
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.20.2
|
||||
PySwitchbot==0.20.5
|
||||
|
||||
# homeassistant.components.transport_nsw
|
||||
PyTransportNSW==0.1.1
|
||||
|
@ -171,7 +171,7 @@ aioguardian==2022.07.0
|
|||
aioharmony==0.2.9
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==2.2.18
|
||||
aiohomekit==2.2.19
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
|
@ -246,7 +246,7 @@ aioqsw==0.2.2
|
|||
aiorecollect==1.0.8
|
||||
|
||||
# homeassistant.components.ridwell
|
||||
aioridwell==2022.03.0
|
||||
aioridwell==2022.11.0
|
||||
|
||||
# homeassistant.components.senseme
|
||||
aiosenseme==0.6.1
|
||||
|
@ -294,7 +294,7 @@ aioymaps==1.2.2
|
|||
airly==1.1.0
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==0.5.2
|
||||
airthings-ble==0.5.3
|
||||
|
||||
# homeassistant.components.airthings
|
||||
airthings_cloud==0.1.0
|
||||
|
@ -413,7 +413,7 @@ bimmer_connected==0.10.4
|
|||
bizkaibus==0.1.1
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak-retry-connector==2.8.3
|
||||
bleak-retry-connector==2.8.4
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak==0.19.2
|
||||
|
@ -725,7 +725,7 @@ gTTS==2.2.4
|
|||
garages-amsterdam==3.0.0
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==4.0.0
|
||||
gcal-sync==4.0.2
|
||||
|
||||
# homeassistant.components.geniushub
|
||||
geniushub-client==0.6.30
|
||||
|
@ -804,7 +804,7 @@ greenwavereality==0.5.1
|
|||
gridnet==4.0.0
|
||||
|
||||
# homeassistant.components.growatt_server
|
||||
growattServer==1.2.3
|
||||
growattServer==1.2.4
|
||||
|
||||
# homeassistant.components.google_sheets
|
||||
gspread==5.5.0
|
||||
|
@ -889,7 +889,7 @@ horimote==0.4.1
|
|||
httplib2==0.20.4
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.6.3
|
||||
huawei-lte-api==1.6.7
|
||||
|
||||
# homeassistant.components.hydrawise
|
||||
hydrawiser==0.2
|
||||
|
@ -1006,7 +1006,7 @@ librouteros==3.2.0
|
|||
libsoundtouch==0.8
|
||||
|
||||
# homeassistant.components.life360
|
||||
life360==5.1.1
|
||||
life360==5.3.0
|
||||
|
||||
# homeassistant.components.osramlightify
|
||||
lightify==1.0.7.3
|
||||
|
@ -1241,7 +1241,7 @@ openwrt-luci-rpc==1.1.11
|
|||
openwrt-ubus-rpc==0.0.2
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==0.13.0
|
||||
oralb-ble==0.14.2
|
||||
|
||||
# homeassistant.components.oru
|
||||
oru==0.1.11
|
||||
|
@ -2314,7 +2314,7 @@ spotipy==2.20.0
|
|||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.sql
|
||||
sqlalchemy==1.4.42
|
||||
sqlalchemy==1.4.44
|
||||
|
||||
# homeassistant.components.srp_energy
|
||||
srpenergy==1.3.6
|
||||
|
@ -2610,7 +2610,7 @@ zengge==0.2
|
|||
zeroconf==0.39.4
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.85
|
||||
zha-quirks==0.0.86
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong_hong_hvac==1.0.9
|
||||
|
|
|
@ -33,7 +33,7 @@ PyRMVtransport==0.3.3
|
|||
PySocks==1.7.1
|
||||
|
||||
# homeassistant.components.switchbot
|
||||
PySwitchbot==0.20.2
|
||||
PySwitchbot==0.20.5
|
||||
|
||||
# homeassistant.components.transport_nsw
|
||||
PyTransportNSW==0.1.1
|
||||
|
@ -155,7 +155,7 @@ aioguardian==2022.07.0
|
|||
aioharmony==0.2.9
|
||||
|
||||
# homeassistant.components.homekit_controller
|
||||
aiohomekit==2.2.18
|
||||
aiohomekit==2.2.19
|
||||
|
||||
# homeassistant.components.emulated_hue
|
||||
# homeassistant.components.http
|
||||
|
@ -221,7 +221,7 @@ aioqsw==0.2.2
|
|||
aiorecollect==1.0.8
|
||||
|
||||
# homeassistant.components.ridwell
|
||||
aioridwell==2022.03.0
|
||||
aioridwell==2022.11.0
|
||||
|
||||
# homeassistant.components.senseme
|
||||
aiosenseme==0.6.1
|
||||
|
@ -269,7 +269,7 @@ aioymaps==1.2.2
|
|||
airly==1.1.0
|
||||
|
||||
# homeassistant.components.airthings_ble
|
||||
airthings-ble==0.5.2
|
||||
airthings-ble==0.5.3
|
||||
|
||||
# homeassistant.components.airthings
|
||||
airthings_cloud==0.1.0
|
||||
|
@ -337,7 +337,7 @@ bellows==0.34.2
|
|||
bimmer_connected==0.10.4
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak-retry-connector==2.8.3
|
||||
bleak-retry-connector==2.8.4
|
||||
|
||||
# homeassistant.components.bluetooth
|
||||
bleak==0.19.2
|
||||
|
@ -541,7 +541,7 @@ gTTS==2.2.4
|
|||
garages-amsterdam==3.0.0
|
||||
|
||||
# homeassistant.components.google
|
||||
gcal-sync==4.0.0
|
||||
gcal-sync==4.0.2
|
||||
|
||||
# homeassistant.components.geocaching
|
||||
geocachingapi==0.2.1
|
||||
|
@ -599,7 +599,7 @@ greeneye_monitor==3.0.3
|
|||
gridnet==4.0.0
|
||||
|
||||
# homeassistant.components.growatt_server
|
||||
growattServer==1.2.3
|
||||
growattServer==1.2.4
|
||||
|
||||
# homeassistant.components.google_sheets
|
||||
gspread==5.5.0
|
||||
|
@ -666,7 +666,7 @@ homepluscontrol==0.0.5
|
|||
httplib2==0.20.4
|
||||
|
||||
# homeassistant.components.huawei_lte
|
||||
huawei-lte-api==1.6.3
|
||||
huawei-lte-api==1.6.7
|
||||
|
||||
# homeassistant.components.hyperion
|
||||
hyperion-py==0.7.5
|
||||
|
@ -744,7 +744,7 @@ librouteros==3.2.0
|
|||
libsoundtouch==0.8
|
||||
|
||||
# homeassistant.components.life360
|
||||
life360==5.1.1
|
||||
life360==5.3.0
|
||||
|
||||
# homeassistant.components.logi_circle
|
||||
logi_circle==0.2.3
|
||||
|
@ -886,7 +886,7 @@ open-meteo==0.2.1
|
|||
openerz-api==0.1.0
|
||||
|
||||
# homeassistant.components.oralb
|
||||
oralb-ble==0.13.0
|
||||
oralb-ble==0.14.2
|
||||
|
||||
# homeassistant.components.ovo_energy
|
||||
ovoenergy==1.2.0
|
||||
|
@ -1599,7 +1599,7 @@ spotipy==2.20.0
|
|||
|
||||
# homeassistant.components.recorder
|
||||
# homeassistant.components.sql
|
||||
sqlalchemy==1.4.42
|
||||
sqlalchemy==1.4.44
|
||||
|
||||
# homeassistant.components.srp_energy
|
||||
srpenergy==1.3.6
|
||||
|
@ -1811,7 +1811,7 @@ zamg==0.1.1
|
|||
zeroconf==0.39.4
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.85
|
||||
zha-quirks==0.0.86
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.19.0
|
||||
|
|
|
@ -113,8 +113,9 @@ def _populate_brand_integrations(
|
|||
metadata = {
|
||||
"integration_type": integration.integration_type,
|
||||
}
|
||||
if integration.config_flow:
|
||||
metadata["config_flow"] = integration.config_flow
|
||||
# Always set the config_flow key to avoid breaking the frontend
|
||||
# https://github.com/home-assistant/frontend/issues/14376
|
||||
metadata["config_flow"] = bool(integration.config_flow)
|
||||
if integration.iot_class:
|
||||
metadata["iot_class"] = integration.iot_class
|
||||
if integration.supported_by:
|
||||
|
|
|
@ -20,9 +20,11 @@ from homeassistant.components.bluetooth import (
|
|||
scanner,
|
||||
)
|
||||
from homeassistant.components.bluetooth.const import (
|
||||
BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS,
|
||||
CONF_PASSIVE,
|
||||
DEFAULT_ADDRESS,
|
||||
DOMAIN,
|
||||
LINUX_FIRMWARE_LOAD_FALLBACK_SECONDS,
|
||||
SOURCE_LOCAL,
|
||||
UNAVAILABLE_TRACK_SECONDS,
|
||||
)
|
||||
|
@ -2737,6 +2739,81 @@ async def test_discover_new_usb_adapters(hass, mock_bleak_scanner_start, one_ada
|
|||
assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 1
|
||||
|
||||
|
||||
async def test_discover_new_usb_adapters_with_firmware_fallback_delay(
|
||||
hass, mock_bleak_scanner_start, one_adapter
|
||||
):
|
||||
"""Test we can discover new usb adapters with a firmware fallback delay."""
|
||||
entry = MockConfigEntry(
|
||||
domain=bluetooth.DOMAIN, data={}, unique_id="00:00:00:00:00:01"
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
saved_callback = None
|
||||
|
||||
def _async_register_scan_request_callback(_hass, _callback):
|
||||
nonlocal saved_callback
|
||||
saved_callback = _callback
|
||||
return lambda: None
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.bluetooth.usb.async_register_scan_request_callback",
|
||||
_async_register_scan_request_callback,
|
||||
):
|
||||
assert await async_setup_component(hass, bluetooth.DOMAIN, {})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert not hass.config_entries.flow.async_progress(DOMAIN)
|
||||
|
||||
saved_callback()
|
||||
assert not hass.config_entries.flow.async_progress(DOMAIN)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.bluetooth.util.platform.system", return_value="Linux"
|
||||
), patch(
|
||||
"bluetooth_adapters.get_bluetooth_adapter_details",
|
||||
return_value={},
|
||||
):
|
||||
async_fire_time_changed(
|
||||
hass, dt_util.utcnow() + timedelta(BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS * 2)
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 0
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.bluetooth.util.platform.system", return_value="Linux"
|
||||
), patch(
|
||||
"bluetooth_adapters.get_bluetooth_adapter_details",
|
||||
return_value={
|
||||
"hci0": {
|
||||
"org.bluez.Adapter1": {
|
||||
"Address": "00:00:00:00:00:01",
|
||||
"Name": "BlueZ 4.63",
|
||||
"Modalias": "usbid:1234",
|
||||
}
|
||||
},
|
||||
"hci1": {
|
||||
"org.bluez.Adapter1": {
|
||||
"Address": "00:00:00:00:00:02",
|
||||
"Name": "BlueZ 4.63",
|
||||
"Modalias": "usbid:1234",
|
||||
}
|
||||
},
|
||||
},
|
||||
):
|
||||
async_fire_time_changed(
|
||||
hass,
|
||||
dt_util.utcnow()
|
||||
+ timedelta(
|
||||
seconds=LINUX_FIRMWARE_LOAD_FALLBACK_SECONDS
|
||||
+ (BLUETOOTH_DISCOVERY_COOLDOWN_SECONDS * 2)
|
||||
),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert len(hass.config_entries.flow.async_progress(DOMAIN)) == 1
|
||||
|
||||
|
||||
async def test_issue_outdated_haos(
|
||||
hass, mock_bleak_scanner_start, one_adapter, operating_system_85
|
||||
):
|
||||
|
|
|
@ -47,7 +47,6 @@ TEST_API_CALENDAR = {
|
|||
"id": CALENDAR_ID,
|
||||
"etag": '"3584134138943410"',
|
||||
"timeZone": "UTC",
|
||||
"accessRole": "reader",
|
||||
"foregroundColor": "#000000",
|
||||
"selected": True,
|
||||
"kind": "calendar#calendarListEntry",
|
||||
|
@ -62,10 +61,19 @@ CLIENT_ID = "client-id"
|
|||
CLIENT_SECRET = "client-secret"
|
||||
|
||||
|
||||
@pytest.fixture(name="calendar_access_role")
|
||||
def test_calendar_access_role() -> str:
|
||||
"""Default access role to use for test_api_calendar in tests."""
|
||||
return "reader"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_api_calendar():
|
||||
def test_api_calendar(calendar_access_role: str):
|
||||
"""Return a test calendar object used in API responses."""
|
||||
return TEST_API_CALENDAR
|
||||
return {
|
||||
**TEST_API_CALENDAR,
|
||||
"accessRole": calendar_access_role,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
@ -60,6 +60,14 @@ TEST_EVENT = {
|
|||
}
|
||||
|
||||
|
||||
@pytest.fixture(
|
||||
autouse=True, scope="module", params=["reader", "owner", "freeBusyReader"]
|
||||
)
|
||||
def calendar_access_role(request) -> str:
|
||||
"""Fixture to exercise access roles in tests."""
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_test_setup(
|
||||
hass,
|
||||
|
@ -724,12 +732,15 @@ async def test_invalid_unique_id_cleanup(
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"time_zone,event_order",
|
||||
"time_zone,event_order,calendar_access_role",
|
||||
# This only tests the reader role to force testing against the local
|
||||
# database filtering based on start/end time. (free busy reader would
|
||||
# just use the API response which this test is not exercising)
|
||||
[
|
||||
("America/Los_Angeles", ["One", "Two", "All Day Event"]),
|
||||
("America/Regina", ["One", "Two", "All Day Event"]),
|
||||
("UTC", ["One", "All Day Event", "Two"]),
|
||||
("Asia/Tokyo", ["All Day Event", "One", "Two"]),
|
||||
("America/Los_Angeles", ["One", "Two", "All Day Event"], "reader"),
|
||||
("America/Regina", ["One", "Two", "All Day Event"], "reader"),
|
||||
("UTC", ["One", "All Day Event", "Two"], "reader"),
|
||||
("Asia/Tokyo", ["All Day Event", "One", "Two"], "reader"),
|
||||
],
|
||||
)
|
||||
async def test_all_day_iter_order(
|
||||
|
|
|
@ -104,7 +104,7 @@ async def primary_calendar(
|
|||
"""Fixture to return the primary calendar."""
|
||||
mock_calendar_get(
|
||||
"primary",
|
||||
{"id": primary_calendar_email, "summary": "Personal", "accessRole": "owner"},
|
||||
{"id": primary_calendar_email, "summary": "Personal"},
|
||||
exc=primary_calendar_error,
|
||||
)
|
||||
|
||||
|
|
|
@ -768,7 +768,7 @@ async def test_assign_unique_id(
|
|||
|
||||
mock_calendar_get(
|
||||
"primary",
|
||||
{"id": EMAIL_ADDRESS, "summary": "Personal", "accessRole": "reader"},
|
||||
{"id": EMAIL_ADDRESS, "summary": "Personal"},
|
||||
)
|
||||
|
||||
mock_calendars_list({"items": [test_api_calendar]})
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
"""Test homekit diagnostics."""
|
||||
from unittest.mock import ANY, patch
|
||||
from unittest.mock import ANY, MagicMock, patch
|
||||
|
||||
from homeassistant.components.homekit.const import (
|
||||
CONF_DEVICES,
|
||||
CONF_HOMEKIT_MODE,
|
||||
DOMAIN,
|
||||
HOMEKIT_MODE_ACCESSORY,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PORT, EVENT_HOMEASSISTANT_STARTED
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .util import async_init_integration
|
||||
|
||||
|
@ -290,3 +292,321 @@ async def test_config_entry_accessory(
|
|||
), patch("homeassistant.components.homekit.async_port_is_available"):
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_config_entry_with_trigger_accessory(
|
||||
hass,
|
||||
hass_client,
|
||||
hk_driver,
|
||||
mock_async_zeroconf,
|
||||
events,
|
||||
demo_cleanup,
|
||||
device_reg,
|
||||
entity_reg,
|
||||
):
|
||||
"""Test generating diagnostics for a bridge config entry with a trigger accessory."""
|
||||
assert await async_setup_component(hass, "demo", {"demo": {}})
|
||||
hk_driver.publish = MagicMock()
|
||||
|
||||
demo_config_entry = MockConfigEntry(domain="domain")
|
||||
demo_config_entry.add_to_hass(hass)
|
||||
assert await async_setup_component(hass, "demo", {"demo": {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entry = entity_reg.async_get("light.ceiling_lights")
|
||||
assert entry is not None
|
||||
device_id = entry.device_id
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
data={
|
||||
CONF_NAME: "mock_name",
|
||||
CONF_PORT: 12345,
|
||||
CONF_DEVICES: [device_id],
|
||||
"filter": {
|
||||
"exclude_domains": [],
|
||||
"exclude_entities": [],
|
||||
"include_domains": [],
|
||||
"include_entities": ["light.none"],
|
||||
},
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
await hass.async_block_till_done()
|
||||
diag = await get_diagnostics_for_config_entry(hass, hass_client, entry)
|
||||
diag.pop("iid_storage")
|
||||
diag.pop("bridge")
|
||||
assert diag == {
|
||||
"accessories": [
|
||||
{
|
||||
"aid": 1,
|
||||
"services": [
|
||||
{
|
||||
"characteristics": [
|
||||
{"format": "bool", "iid": 2, "perms": ["pw"], "type": "14"},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 3,
|
||||
"perms": ["pr"],
|
||||
"type": "20",
|
||||
"value": "Home Assistant",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 4,
|
||||
"perms": ["pr"],
|
||||
"type": "21",
|
||||
"value": "Bridge",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 5,
|
||||
"perms": ["pr"],
|
||||
"type": "23",
|
||||
"value": "mock_name",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 6,
|
||||
"perms": ["pr"],
|
||||
"type": "30",
|
||||
"value": "homekit.bridge",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 7,
|
||||
"perms": ["pr"],
|
||||
"type": "52",
|
||||
"value": ANY,
|
||||
},
|
||||
],
|
||||
"iid": 1,
|
||||
"type": "3E",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 9,
|
||||
"perms": ["pr", "ev"],
|
||||
"type": "37",
|
||||
"value": "01.01.00",
|
||||
}
|
||||
],
|
||||
"iid": 8,
|
||||
"type": "A2",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"aid": ANY,
|
||||
"services": [
|
||||
{
|
||||
"characteristics": [
|
||||
{"format": "bool", "iid": 2, "perms": ["pw"], "type": "14"},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 3,
|
||||
"perms": ["pr"],
|
||||
"type": "20",
|
||||
"value": "Demo",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 4,
|
||||
"perms": ["pr"],
|
||||
"type": "21",
|
||||
"value": "Home Assistant",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 5,
|
||||
"perms": ["pr"],
|
||||
"type": "23",
|
||||
"value": "Ceiling Lights",
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 6,
|
||||
"perms": ["pr"],
|
||||
"type": "30",
|
||||
"value": ANY,
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 7,
|
||||
"perms": ["pr"],
|
||||
"type": "52",
|
||||
"value": ANY,
|
||||
},
|
||||
],
|
||||
"iid": 1,
|
||||
"type": "3E",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 9,
|
||||
"perms": ["pr", "ev"],
|
||||
"type": "73",
|
||||
"valid-values": [0],
|
||||
"value": None,
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 10,
|
||||
"perms": ["pr"],
|
||||
"type": "23",
|
||||
"value": "Ceiling Lights " "Changed States",
|
||||
},
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 11,
|
||||
"maxValue": 255,
|
||||
"minStep": 1,
|
||||
"minValue": 1,
|
||||
"perms": ["pr"],
|
||||
"type": "CB",
|
||||
"value": 1,
|
||||
},
|
||||
],
|
||||
"iid": 8,
|
||||
"linked": [12],
|
||||
"type": "89",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 13,
|
||||
"perms": ["pr"],
|
||||
"type": "CD",
|
||||
"valid-values": [0, 1],
|
||||
"value": 1,
|
||||
}
|
||||
],
|
||||
"iid": 12,
|
||||
"type": "CC",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 15,
|
||||
"perms": ["pr", "ev"],
|
||||
"type": "73",
|
||||
"valid-values": [0],
|
||||
"value": None,
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 16,
|
||||
"perms": ["pr"],
|
||||
"type": "23",
|
||||
"value": "Ceiling Lights " "Turned Off",
|
||||
},
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 17,
|
||||
"maxValue": 255,
|
||||
"minStep": 1,
|
||||
"minValue": 1,
|
||||
"perms": ["pr"],
|
||||
"type": "CB",
|
||||
"value": 2,
|
||||
},
|
||||
],
|
||||
"iid": 14,
|
||||
"linked": [18],
|
||||
"type": "89",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 19,
|
||||
"perms": ["pr"],
|
||||
"type": "CD",
|
||||
"valid-values": [0, 1],
|
||||
"value": 1,
|
||||
}
|
||||
],
|
||||
"iid": 18,
|
||||
"type": "CC",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 21,
|
||||
"perms": ["pr", "ev"],
|
||||
"type": "73",
|
||||
"valid-values": [0],
|
||||
"value": None,
|
||||
},
|
||||
{
|
||||
"format": "string",
|
||||
"iid": 22,
|
||||
"perms": ["pr"],
|
||||
"type": "23",
|
||||
"value": "Ceiling Lights " "Turned On",
|
||||
},
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 23,
|
||||
"maxValue": 255,
|
||||
"minStep": 1,
|
||||
"minValue": 1,
|
||||
"perms": ["pr"],
|
||||
"type": "CB",
|
||||
"value": 3,
|
||||
},
|
||||
],
|
||||
"iid": 20,
|
||||
"linked": [24],
|
||||
"type": "89",
|
||||
},
|
||||
{
|
||||
"characteristics": [
|
||||
{
|
||||
"format": "uint8",
|
||||
"iid": 25,
|
||||
"perms": ["pr"],
|
||||
"type": "CD",
|
||||
"valid-values": [0, 1],
|
||||
"value": 1,
|
||||
}
|
||||
],
|
||||
"iid": 24,
|
||||
"type": "CC",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
"client_properties": {},
|
||||
"config-entry": {
|
||||
"data": {"name": "mock_name", "port": 12345},
|
||||
"options": {
|
||||
"devices": [device_id],
|
||||
"filter": {
|
||||
"exclude_domains": [],
|
||||
"exclude_entities": [],
|
||||
"include_domains": [],
|
||||
"include_entities": ["light.none"],
|
||||
},
|
||||
},
|
||||
"title": "Mock Title",
|
||||
"version": 1,
|
||||
},
|
||||
"config_version": 2,
|
||||
"pairing_id": ANY,
|
||||
"status": 1,
|
||||
}
|
||||
with patch("pyhap.accessory_driver.AccessoryDriver.async_start"), patch(
|
||||
"homeassistant.components.homekit.HomeKit.async_stop"
|
||||
), patch("homeassistant.components.homekit.async_port_is_available"):
|
||||
assert await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
|
|
@ -7,9 +7,17 @@ from homeassistant.components.homekit.const import (
|
|||
DOMAIN as DOMAIN_HOMEKIT,
|
||||
EVENT_HOMEKIT_CHANGED,
|
||||
)
|
||||
from homeassistant.const import ATTR_ENTITY_ID, ATTR_SERVICE
|
||||
from homeassistant.config_entries import SOURCE_ZEROCONF, ConfigEntryState
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_SERVICE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
)
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .util import PATH_HOMEKIT
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
from tests.components.logbook.common import MockRow, mock_humanify
|
||||
|
||||
|
||||
|
@ -52,3 +60,57 @@ async def test_humanify_homekit_changed_event(hass, hk_driver, mock_get_source_i
|
|||
assert event2["domain"] == DOMAIN_HOMEKIT
|
||||
assert event2["message"] == "send command set_cover_position to 75 for Window"
|
||||
assert event2["entity_id"] == "cover.window"
|
||||
|
||||
|
||||
async def test_bridge_with_triggers(
|
||||
hass, hk_driver, mock_async_zeroconf, entity_reg, caplog
|
||||
):
|
||||
"""Test we can setup a bridge with triggers and we ignore numeric states.
|
||||
|
||||
Since numeric states are not supported by HomeKit as they require
|
||||
an above or below additional configuration which we have no way
|
||||
to input, we ignore them.
|
||||
"""
|
||||
assert await async_setup_component(hass, "demo", {"demo": {}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entry = entity_reg.async_get("cover.living_room_window")
|
||||
assert entry is not None
|
||||
device_id = entry.device_id
|
||||
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN_HOMEKIT,
|
||||
source=SOURCE_ZEROCONF,
|
||||
data={
|
||||
"name": "HASS Bridge",
|
||||
"port": 12345,
|
||||
},
|
||||
options={
|
||||
"filter": {
|
||||
"exclude_domains": [],
|
||||
"exclude_entities": [],
|
||||
"include_domains": [],
|
||||
"include_entities": ["cover.living_room_window"],
|
||||
},
|
||||
"exclude_accessory_mode": True,
|
||||
"mode": "bridge",
|
||||
"devices": [device_id],
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4"
|
||||
), patch(f"{PATH_HOMEKIT}.async_port_is_available", return_value=True):
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert entry.state == ConfigEntryState.LOADED
|
||||
await hass.config_entries.async_unload(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
"requires additional inputs which are not supported by HomeKit" in caplog.text
|
||||
)
|
||||
|
|
|
@ -760,6 +760,42 @@ async def test_heater_cooler_change_thermostat_state(hass, utcnow):
|
|||
)
|
||||
|
||||
|
||||
async def test_can_turn_on_after_off(hass, utcnow):
|
||||
"""
|
||||
Test that we always force device from inactive to active when setting mode.
|
||||
|
||||
This is a regression test for #81863.
|
||||
"""
|
||||
helper = await setup_test_component(hass, create_heater_cooler_service)
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
{"entity_id": "climate.testdevice", "hvac_mode": HVACMode.OFF},
|
||||
blocking=True,
|
||||
)
|
||||
helper.async_assert_service_values(
|
||||
ServicesTypes.HEATER_COOLER,
|
||||
{
|
||||
CharacteristicsTypes.ACTIVE: ActivationStateValues.INACTIVE,
|
||||
},
|
||||
)
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
SERVICE_SET_HVAC_MODE,
|
||||
{"entity_id": "climate.testdevice", "hvac_mode": HVACMode.HEAT},
|
||||
blocking=True,
|
||||
)
|
||||
helper.async_assert_service_values(
|
||||
ServicesTypes.HEATER_COOLER,
|
||||
{
|
||||
CharacteristicsTypes.ACTIVE: ActivationStateValues.ACTIVE,
|
||||
CharacteristicsTypes.TARGET_HEATER_COOLER_STATE: TargetHeaterCoolerStateValues.HEAT,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def test_heater_cooler_change_thermostat_temperature(hass, utcnow):
|
||||
"""Test that we can change the target temperature."""
|
||||
helper = await setup_test_component(hass, create_heater_cooler_service)
|
||||
|
|
|
@ -3,16 +3,19 @@
|
|||
|
||||
from dataclasses import replace
|
||||
from datetime import timedelta
|
||||
import time
|
||||
|
||||
from bleak.backends.scanner import BLEDevice
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.ibeacon.const import DOMAIN, UPDATE_INTERVAL
|
||||
from homeassistant.components.ibeacon.const import ATTR_SOURCE, DOMAIN, UPDATE_INTERVAL
|
||||
from homeassistant.const import STATE_HOME
|
||||
from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import (
|
||||
BLUECHARM_BEACON_SERVICE_INFO,
|
||||
BLUECHARM_BEACON_SERVICE_INFO_2,
|
||||
BLUECHARM_BEACON_SERVICE_INFO_DBUS,
|
||||
TESLA_TRANSIENT,
|
||||
TESLA_TRANSIENT_BLE_DEVICE,
|
||||
|
@ -20,6 +23,8 @@ from . import (
|
|||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
from tests.components.bluetooth import (
|
||||
generate_advertisement_data,
|
||||
inject_advertisement_with_time_and_source_connectable,
|
||||
inject_bluetooth_service_info,
|
||||
patch_all_discovered_devices,
|
||||
)
|
||||
|
@ -252,3 +257,65 @@ async def test_ignore_transient_devices_unless_we_see_them_a_few_times(hass):
|
|||
|
||||
await hass.async_block_till_done()
|
||||
assert hass.states.get("device_tracker.s6da7c9389bd5452cc_cccc").state == STATE_HOME
|
||||
|
||||
|
||||
async def test_changing_source_attribute(hass):
|
||||
"""Test update of the source attribute."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
assert await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
now = time.monotonic()
|
||||
info = BLUECHARM_BEACON_SERVICE_INFO_2
|
||||
device = BLEDevice(
|
||||
address=info.address,
|
||||
name=info.name,
|
||||
details={},
|
||||
)
|
||||
advertisement_data = generate_advertisement_data(
|
||||
local_name=info.name,
|
||||
manufacturer_data=info.manufacturer_data,
|
||||
service_data=info.service_data,
|
||||
service_uuids=info.service_uuids,
|
||||
rssi=info.rssi,
|
||||
)
|
||||
|
||||
inject_advertisement_with_time_and_source_connectable(
|
||||
hass,
|
||||
device,
|
||||
advertisement_data,
|
||||
now,
|
||||
"local",
|
||||
True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
attributes = hass.states.get(
|
||||
"sensor.bluecharm_177999_8105_estimated_distance"
|
||||
).attributes
|
||||
assert attributes[ATTR_SOURCE] == "local"
|
||||
|
||||
inject_advertisement_with_time_and_source_connectable(
|
||||
hass,
|
||||
device,
|
||||
advertisement_data,
|
||||
now,
|
||||
"proxy",
|
||||
True,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
with patch_all_discovered_devices([BLUECHARM_BEACON_SERVICE_INFO_2]):
|
||||
async_fire_time_changed(
|
||||
hass,
|
||||
dt_util.utcnow() + timedelta(seconds=UPDATE_INTERVAL.total_seconds() * 2),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
attributes = hass.states.get(
|
||||
"sensor.bluecharm_177999_8105_estimated_distance"
|
||||
).attributes
|
||||
assert attributes[ATTR_SOURCE] == "proxy"
|
||||
|
|
|
@ -1524,6 +1524,15 @@ async def test_lifx_set_state_kelvin(hass: HomeAssistant) -> None:
|
|||
assert bulb.set_color.calls[0][0][0] == [32000, 0, 25700, 2700]
|
||||
bulb.set_color.reset_mock()
|
||||
|
||||
await hass.services.async_call(
|
||||
DOMAIN,
|
||||
"set_state",
|
||||
{ATTR_ENTITY_ID: entity_id, ATTR_BRIGHTNESS: 255, ATTR_COLOR_TEMP: 400},
|
||||
blocking=True,
|
||||
)
|
||||
assert bulb.set_color.calls[0][0][0] == [32000, 0, 65535, 2500]
|
||||
bulb.set_color.reset_mock()
|
||||
|
||||
|
||||
async def test_infrared_color_bulb(hass: HomeAssistant) -> None:
|
||||
"""Test setting infrared with a color bulb."""
|
||||
|
|
|
@ -203,8 +203,9 @@ async def test_json_state_message(hass, mqtt_mock_entry_with_yaml_config):
|
|||
hass,
|
||||
state_topic,
|
||||
'{"installed_version":"1.9.0","latest_version":"1.9.0",'
|
||||
'"title":"Test Update Title","release_url":"https://example.com/release",'
|
||||
'"release_summary":"Test release summary"}',
|
||||
'"title":"Test Update 1 Title","release_url":"https://example.com/release1",'
|
||||
'"release_summary":"Test release summary 1",'
|
||||
'"entity_picture": "https://example.com/icon1.png"}',
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
@ -213,14 +214,16 @@ async def test_json_state_message(hass, mqtt_mock_entry_with_yaml_config):
|
|||
assert state.state == STATE_OFF
|
||||
assert state.attributes.get("installed_version") == "1.9.0"
|
||||
assert state.attributes.get("latest_version") == "1.9.0"
|
||||
assert state.attributes.get("release_summary") == "Test release summary"
|
||||
assert state.attributes.get("release_url") == "https://example.com/release"
|
||||
assert state.attributes.get("title") == "Test Update Title"
|
||||
assert state.attributes.get("release_summary") == "Test release summary 1"
|
||||
assert state.attributes.get("release_url") == "https://example.com/release1"
|
||||
assert state.attributes.get("title") == "Test Update 1 Title"
|
||||
assert state.attributes.get("entity_picture") == "https://example.com/icon1.png"
|
||||
|
||||
async_fire_mqtt_message(
|
||||
hass,
|
||||
state_topic,
|
||||
'{"installed_version":"1.9.0","latest_version":"2.0.0","title":"Test Update Title"}',
|
||||
'{"installed_version":"1.9.0","latest_version":"2.0.0",'
|
||||
'"title":"Test Update 2 Title","entity_picture":"https://example.com/icon2.png"}',
|
||||
)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
@ -229,6 +232,7 @@ async def test_json_state_message(hass, mqtt_mock_entry_with_yaml_config):
|
|||
assert state.state == STATE_ON
|
||||
assert state.attributes.get("installed_version") == "1.9.0"
|
||||
assert state.attributes.get("latest_version") == "2.0.0"
|
||||
assert state.attributes.get("entity_picture") == "https://example.com/icon2.png"
|
||||
|
||||
|
||||
async def test_json_state_message_with_template(hass, mqtt_mock_entry_with_yaml_config):
|
||||
|
|
|
@ -182,7 +182,8 @@ async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
|
|||
|
||||
|
||||
@freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.timezone.utc))
|
||||
async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
||||
@pytest.mark.parametrize("offset", (0, 1, 2))
|
||||
async def test_statistic_during_period(recorder_mock, hass, hass_ws_client, offset):
|
||||
"""Test statistic_during_period."""
|
||||
id = 1
|
||||
|
||||
|
@ -197,7 +198,9 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
client = await hass_ws_client()
|
||||
|
||||
zero = now
|
||||
start = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=-3)
|
||||
start = zero.replace(minute=offset * 5, second=0, microsecond=0) + timedelta(
|
||||
hours=-3
|
||||
)
|
||||
|
||||
imported_stats_5min = [
|
||||
{
|
||||
|
@ -209,22 +212,37 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
for i in range(0, 39)
|
||||
]
|
||||
imported_stats = [
|
||||
imported_stats = []
|
||||
slice_end = 12 - offset
|
||||
imported_stats.append(
|
||||
{
|
||||
"start": imported_stats_5min[i * 12]["start"],
|
||||
"max": max(
|
||||
stat["max"] for stat in imported_stats_5min[i * 12 : (i + 1) * 12]
|
||||
),
|
||||
"mean": fmean(
|
||||
stat["mean"] for stat in imported_stats_5min[i * 12 : (i + 1) * 12]
|
||||
),
|
||||
"min": min(
|
||||
stat["min"] for stat in imported_stats_5min[i * 12 : (i + 1) * 12]
|
||||
),
|
||||
"sum": imported_stats_5min[i * 12 + 11]["sum"],
|
||||
"start": imported_stats_5min[0]["start"].replace(minute=0),
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[0:slice_end]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats_5min[0:slice_end]),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[0:slice_end]),
|
||||
"sum": imported_stats_5min[slice_end - 1]["sum"],
|
||||
}
|
||||
for i in range(0, 3)
|
||||
]
|
||||
)
|
||||
for i in range(0, 2):
|
||||
slice_start = i * 12 + (12 - offset)
|
||||
slice_end = (i + 1) * 12 + (12 - offset)
|
||||
assert imported_stats_5min[slice_start]["start"].minute == 0
|
||||
imported_stats.append(
|
||||
{
|
||||
"start": imported_stats_5min[slice_start]["start"],
|
||||
"max": max(
|
||||
stat["max"] for stat in imported_stats_5min[slice_start:slice_end]
|
||||
),
|
||||
"mean": fmean(
|
||||
stat["mean"] for stat in imported_stats_5min[slice_start:slice_end]
|
||||
),
|
||||
"min": min(
|
||||
stat["min"] for stat in imported_stats_5min[slice_start:slice_end]
|
||||
),
|
||||
"sum": imported_stats_5min[slice_end - 1]["sum"],
|
||||
}
|
||||
)
|
||||
|
||||
imported_metadata = {
|
||||
"has_mean": False,
|
||||
"has_sum": True,
|
||||
|
@ -285,8 +303,14 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
# This should also include imported_statistics_5min[:]
|
||||
start_time = "2022-10-21T04:00:00+00:00"
|
||||
end_time = "2022-10-21T07:15:00+00:00"
|
||||
start_time = (
|
||||
dt_util.parse_datetime("2022-10-21T04:00:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
end_time = (
|
||||
dt_util.parse_datetime("2022-10-21T07:15:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
|
@ -308,8 +332,14 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
# This should also include imported_statistics_5min[:]
|
||||
start_time = "2022-10-20T04:00:00+00:00"
|
||||
end_time = "2022-10-21T08:20:00+00:00"
|
||||
start_time = (
|
||||
dt_util.parse_datetime("2022-10-21T04:00:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
end_time = (
|
||||
dt_util.parse_datetime("2022-10-21T08:20:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
|
@ -331,7 +361,10 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
# This should include imported_statistics_5min[26:]
|
||||
start_time = "2022-10-21T06:10:00+00:00"
|
||||
start_time = (
|
||||
dt_util.parse_datetime("2022-10-21T06:10:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
assert imported_stats_5min[26]["start"].isoformat() == start_time
|
||||
await client.send_json(
|
||||
{
|
||||
|
@ -353,7 +386,10 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
# This should also include imported_statistics_5min[26:]
|
||||
start_time = "2022-10-21T06:09:00+00:00"
|
||||
start_time = (
|
||||
dt_util.parse_datetime("2022-10-21T06:09:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
|
@ -374,7 +410,10 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
# This should include imported_statistics_5min[:26]
|
||||
end_time = "2022-10-21T06:10:00+00:00"
|
||||
end_time = (
|
||||
dt_util.parse_datetime("2022-10-21T06:10:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
assert imported_stats_5min[26]["start"].isoformat() == end_time
|
||||
await client.send_json(
|
||||
{
|
||||
|
@ -396,9 +435,15 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
# This should include imported_statistics_5min[26:32] (less than a full hour)
|
||||
start_time = "2022-10-21T06:10:00+00:00"
|
||||
start_time = (
|
||||
dt_util.parse_datetime("2022-10-21T06:10:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
assert imported_stats_5min[26]["start"].isoformat() == start_time
|
||||
end_time = "2022-10-21T06:40:00+00:00"
|
||||
end_time = (
|
||||
dt_util.parse_datetime("2022-10-21T06:40:00+00:00")
|
||||
+ timedelta(minutes=5 * offset)
|
||||
).isoformat()
|
||||
assert imported_stats_5min[32]["start"].isoformat() == end_time
|
||||
await client.send_json(
|
||||
{
|
||||
|
@ -422,7 +467,7 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
|
||||
# This should include imported_statistics[2:] + imported_statistics_5min[36:]
|
||||
start_time = "2022-10-21T06:00:00+00:00"
|
||||
assert imported_stats_5min[24]["start"].isoformat() == start_time
|
||||
assert imported_stats_5min[24 - offset]["start"].isoformat() == start_time
|
||||
assert imported_stats[2]["start"].isoformat() == start_time
|
||||
await client.send_json(
|
||||
{
|
||||
|
@ -437,10 +482,11 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[24:]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats_5min[24:]),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[24:]),
|
||||
"change": imported_stats_5min[-1]["sum"] - imported_stats_5min[23]["sum"],
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[24 - offset :]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats_5min[24 - offset :]),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[24 - offset :]),
|
||||
"change": imported_stats_5min[-1]["sum"]
|
||||
- imported_stats_5min[23 - offset]["sum"],
|
||||
}
|
||||
|
||||
# This should also include imported_statistics[2:] + imported_statistics_5min[36:]
|
||||
|
@ -457,10 +503,11 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[24:]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats_5min[24:]),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[24:]),
|
||||
"change": imported_stats_5min[-1]["sum"] - imported_stats_5min[23]["sum"],
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[24 - offset :]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats_5min[24 - offset :]),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[24 - offset :]),
|
||||
"change": imported_stats_5min[-1]["sum"]
|
||||
- imported_stats_5min[23 - offset]["sum"],
|
||||
}
|
||||
|
||||
# This should include imported_statistics[2:3]
|
||||
|
@ -477,11 +524,16 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
slice_start = 24 - offset
|
||||
slice_end = 36 - offset
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[24:36]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats_5min[24:36]),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[24:36]),
|
||||
"change": imported_stats_5min[35]["sum"] - imported_stats_5min[23]["sum"],
|
||||
"max": max(stat["max"] for stat in imported_stats_5min[slice_start:slice_end]),
|
||||
"mean": fmean(
|
||||
stat["mean"] for stat in imported_stats_5min[slice_start:slice_end]
|
||||
),
|
||||
"min": min(stat["min"] for stat in imported_stats_5min[slice_start:slice_end]),
|
||||
"change": imported_stats_5min[slice_end - 1]["sum"]
|
||||
- imported_stats_5min[slice_start - 1]["sum"],
|
||||
}
|
||||
|
||||
# Test we can get only selected types
|
||||
|
@ -539,6 +591,167 @@ async def test_statistic_during_period(recorder_mock, hass, hass_ws_client):
|
|||
}
|
||||
|
||||
|
||||
@freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.timezone.utc))
|
||||
async def test_statistic_during_period_hole(recorder_mock, hass, hass_ws_client):
|
||||
"""Test statistic_during_period when there are holes in the data."""
|
||||
id = 1
|
||||
|
||||
def next_id():
|
||||
nonlocal id
|
||||
id += 1
|
||||
return id
|
||||
|
||||
now = dt_util.utcnow()
|
||||
|
||||
await async_recorder_block_till_done(hass)
|
||||
client = await hass_ws_client()
|
||||
|
||||
zero = now
|
||||
start = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=-18)
|
||||
|
||||
imported_stats = [
|
||||
{
|
||||
"start": (start + timedelta(hours=3 * i)),
|
||||
"max": i * 2,
|
||||
"mean": i,
|
||||
"min": -76 + i * 2,
|
||||
"sum": i,
|
||||
}
|
||||
for i in range(0, 6)
|
||||
]
|
||||
|
||||
imported_metadata = {
|
||||
"has_mean": False,
|
||||
"has_sum": True,
|
||||
"name": "Total imported energy",
|
||||
"source": "recorder",
|
||||
"statistic_id": "sensor.test",
|
||||
"unit_of_measurement": "kWh",
|
||||
}
|
||||
|
||||
recorder.get_instance(hass).async_import_statistics(
|
||||
imported_metadata,
|
||||
imported_stats,
|
||||
Statistics,
|
||||
)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
# This should include imported_stats[:]
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
"type": "recorder/statistic_during_period",
|
||||
"statistic_id": "sensor.test",
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats[:]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats[:]),
|
||||
"min": min(stat["min"] for stat in imported_stats[:]),
|
||||
"change": imported_stats[-1]["sum"] - imported_stats[0]["sum"],
|
||||
}
|
||||
|
||||
# This should also include imported_stats[:]
|
||||
start_time = "2022-10-20T13:00:00+00:00"
|
||||
end_time = "2022-10-21T05:00:00+00:00"
|
||||
assert imported_stats[0]["start"].isoformat() == start_time
|
||||
assert imported_stats[-1]["start"].isoformat() < end_time
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
"type": "recorder/statistic_during_period",
|
||||
"statistic_id": "sensor.test",
|
||||
"fixed_period": {
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
},
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats[:]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats[:]),
|
||||
"min": min(stat["min"] for stat in imported_stats[:]),
|
||||
"change": imported_stats[-1]["sum"] - imported_stats[0]["sum"],
|
||||
}
|
||||
|
||||
# This should also include imported_stats[:]
|
||||
start_time = "2022-10-20T13:00:00+00:00"
|
||||
end_time = "2022-10-21T08:20:00+00:00"
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
"type": "recorder/statistic_during_period",
|
||||
"statistic_id": "sensor.test",
|
||||
"fixed_period": {
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
},
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats[:]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats[:]),
|
||||
"min": min(stat["min"] for stat in imported_stats[:]),
|
||||
"change": imported_stats[-1]["sum"] - imported_stats[0]["sum"],
|
||||
}
|
||||
|
||||
# This should include imported_stats[1:4]
|
||||
start_time = "2022-10-20T16:00:00+00:00"
|
||||
end_time = "2022-10-20T23:00:00+00:00"
|
||||
assert imported_stats[1]["start"].isoformat() == start_time
|
||||
assert imported_stats[3]["start"].isoformat() < end_time
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
"type": "recorder/statistic_during_period",
|
||||
"statistic_id": "sensor.test",
|
||||
"fixed_period": {
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
},
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats[1:4]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats[1:4]),
|
||||
"min": min(stat["min"] for stat in imported_stats[1:4]),
|
||||
"change": imported_stats[3]["sum"] - imported_stats[1]["sum"],
|
||||
}
|
||||
|
||||
# This should also include imported_stats[1:4]
|
||||
start_time = "2022-10-20T15:00:00+00:00"
|
||||
end_time = "2022-10-21T00:00:00+00:00"
|
||||
assert imported_stats[1]["start"].isoformat() > start_time
|
||||
assert imported_stats[3]["start"].isoformat() < end_time
|
||||
await client.send_json(
|
||||
{
|
||||
"id": next_id(),
|
||||
"type": "recorder/statistic_during_period",
|
||||
"statistic_id": "sensor.test",
|
||||
"fixed_period": {
|
||||
"start_time": start_time,
|
||||
"end_time": end_time,
|
||||
},
|
||||
}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == {
|
||||
"max": max(stat["max"] for stat in imported_stats[1:4]),
|
||||
"mean": fmean(stat["mean"] for stat in imported_stats[1:4]),
|
||||
"min": min(stat["min"] for stat in imported_stats[1:4]),
|
||||
"change": imported_stats[3]["sum"] - imported_stats[1]["sum"],
|
||||
}
|
||||
|
||||
|
||||
@freeze_time(datetime.datetime(2022, 10, 21, 7, 25, tzinfo=datetime.timezone.utc))
|
||||
@pytest.mark.parametrize(
|
||||
"calendar_period, start_time, end_time",
|
||||
|
|
|
@ -418,3 +418,19 @@ async def test_empty_config(hass: HomeAssistant) -> None:
|
|||
{DOMAIN: {}},
|
||||
)
|
||||
assert_setup_component(0, DOMAIN)
|
||||
|
||||
|
||||
async def test_config_schema_via_packages(hass: HomeAssistant) -> None:
|
||||
"""Test configuration via packages."""
|
||||
packages = {
|
||||
"pack_dict": {"rest": {}},
|
||||
"pack_11": {"rest": {"resource": "http://url1"}},
|
||||
"pack_list": {"rest": [{"resource": "http://url2"}]},
|
||||
}
|
||||
config = {hass_config.CONF_CORE: {hass_config.CONF_PACKAGES: packages}}
|
||||
await hass_config.merge_packages_config(hass, config, packages)
|
||||
|
||||
assert len(config) == 2
|
||||
assert len(config["rest"]) == 2
|
||||
assert config["rest"][0]["resource"] == "http://url1"
|
||||
assert config["rest"][1]["resource"] == "http://url2"
|
||||
|
|
|
@ -0,0 +1,153 @@
|
|||
"""Test data for ZHA API tests."""
|
||||
|
||||
BASE_CUSTOM_CONFIGURATION = {
|
||||
"schemas": {
|
||||
"zha_options": [
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "default_light_transition",
|
||||
"optional": True,
|
||||
"default": 0,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "enhanced_light_transition",
|
||||
"required": True,
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "light_transitioning_flag",
|
||||
"required": True,
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "always_prefer_xy_color_mode",
|
||||
"required": True,
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "enable_identify_on_join",
|
||||
"required": True,
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "consider_unavailable_mains",
|
||||
"optional": True,
|
||||
"default": 7200,
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "consider_unavailable_battery",
|
||||
"optional": True,
|
||||
"default": 21600,
|
||||
},
|
||||
]
|
||||
},
|
||||
"data": {
|
||||
"zha_options": {
|
||||
"enhanced_light_transition": True,
|
||||
"default_light_transition": 0,
|
||||
"light_transitioning_flag": True,
|
||||
"always_prefer_xy_color_mode": True,
|
||||
"enable_identify_on_join": True,
|
||||
"consider_unavailable_mains": 7200,
|
||||
"consider_unavailable_battery": 21600,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
CONFIG_WITH_ALARM_OPTIONS = {
|
||||
"schemas": {
|
||||
"zha_options": [
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "default_light_transition",
|
||||
"optional": True,
|
||||
"default": 0,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "enhanced_light_transition",
|
||||
"required": True,
|
||||
"default": False,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "light_transitioning_flag",
|
||||
"required": True,
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "always_prefer_xy_color_mode",
|
||||
"required": True,
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "enable_identify_on_join",
|
||||
"required": True,
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "consider_unavailable_mains",
|
||||
"optional": True,
|
||||
"default": 7200,
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "consider_unavailable_battery",
|
||||
"optional": True,
|
||||
"default": 21600,
|
||||
},
|
||||
],
|
||||
"zha_alarm_options": [
|
||||
{
|
||||
"type": "string",
|
||||
"name": "alarm_master_code",
|
||||
"required": True,
|
||||
"default": "1234",
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"valueMin": 0,
|
||||
"name": "alarm_failed_tries",
|
||||
"required": True,
|
||||
"default": 3,
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"name": "alarm_arm_requires_code",
|
||||
"required": True,
|
||||
"default": False,
|
||||
},
|
||||
],
|
||||
},
|
||||
"data": {
|
||||
"zha_options": {
|
||||
"enhanced_light_transition": True,
|
||||
"default_light_transition": 0,
|
||||
"light_transitioning_flag": True,
|
||||
"always_prefer_xy_color_mode": True,
|
||||
"enable_identify_on_join": True,
|
||||
"consider_unavailable_mains": 7200,
|
||||
"consider_unavailable_battery": 21600,
|
||||
},
|
||||
"zha_alarm_options": {
|
||||
"alarm_arm_requires_code": False,
|
||||
"alarm_master_code": "4321",
|
||||
"alarm_failed_tries": 2,
|
||||
},
|
||||
},
|
||||
}
|
|
@ -1,5 +1,6 @@
|
|||
"""Test ZHA API."""
|
||||
from binascii import unhexlify
|
||||
from copy import deepcopy
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
|
@ -8,6 +9,7 @@ import zigpy.backups
|
|||
import zigpy.profiles.zha
|
||||
import zigpy.types
|
||||
import zigpy.zcl.clusters.general as general
|
||||
import zigpy.zcl.clusters.security as security
|
||||
|
||||
from homeassistant.components.websocket_api import const
|
||||
from homeassistant.components.zha import DOMAIN
|
||||
|
@ -50,6 +52,7 @@ from .conftest import (
|
|||
SIG_EP_PROFILE,
|
||||
SIG_EP_TYPE,
|
||||
)
|
||||
from .data import BASE_CUSTOM_CONFIGURATION, CONFIG_WITH_ALARM_OPTIONS
|
||||
|
||||
IEEE_SWITCH_DEVICE = "01:2d:6f:00:0a:90:69:e7"
|
||||
IEEE_GROUPABLE_DEVICE = "01:2d:6f:00:0a:90:69:e8"
|
||||
|
@ -61,6 +64,7 @@ def required_platform_only():
|
|||
with patch(
|
||||
"homeassistant.components.zha.PLATFORMS",
|
||||
(
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
|
@ -89,6 +93,25 @@ async def device_switch(hass, zigpy_device_mock, zha_device_joined):
|
|||
return zha_device
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def device_ias_ace(hass, zigpy_device_mock, zha_device_joined):
|
||||
"""Test alarm control panel device."""
|
||||
|
||||
zigpy_device = zigpy_device_mock(
|
||||
{
|
||||
1: {
|
||||
SIG_EP_INPUT: [security.IasAce.cluster_id],
|
||||
SIG_EP_OUTPUT: [],
|
||||
SIG_EP_TYPE: zigpy.profiles.zha.DeviceType.IAS_ANCILLARY_CONTROL,
|
||||
SIG_EP_PROFILE: zigpy.profiles.zha.PROFILE_ID,
|
||||
}
|
||||
},
|
||||
)
|
||||
zha_device = await zha_device_joined(zigpy_device)
|
||||
zha_device.available = True
|
||||
return zha_device
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def device_groupable(hass, zigpy_device_mock, zha_device_joined):
|
||||
"""Test zha light platform."""
|
||||
|
@ -225,6 +248,58 @@ async def test_list_devices(zha_client):
|
|||
assert device == device2
|
||||
|
||||
|
||||
async def test_get_zha_config(zha_client):
|
||||
"""Test getting zha custom configuration."""
|
||||
await zha_client.send_json({ID: 5, TYPE: "zha/configuration"})
|
||||
|
||||
msg = await zha_client.receive_json()
|
||||
|
||||
configuration = msg["result"]
|
||||
assert configuration == BASE_CUSTOM_CONFIGURATION
|
||||
|
||||
|
||||
async def test_get_zha_config_with_alarm(hass, zha_client, device_ias_ace):
|
||||
"""Test getting zha custom configuration."""
|
||||
await zha_client.send_json({ID: 5, TYPE: "zha/configuration"})
|
||||
|
||||
msg = await zha_client.receive_json()
|
||||
|
||||
configuration = msg["result"]
|
||||
assert configuration == CONFIG_WITH_ALARM_OPTIONS
|
||||
|
||||
# test that the alarm options are not in the config when we remove the device
|
||||
device_ias_ace.gateway.device_removed(device_ias_ace.device)
|
||||
await hass.async_block_till_done()
|
||||
await zha_client.send_json({ID: 6, TYPE: "zha/configuration"})
|
||||
|
||||
msg = await zha_client.receive_json()
|
||||
|
||||
configuration = msg["result"]
|
||||
assert configuration == BASE_CUSTOM_CONFIGURATION
|
||||
|
||||
|
||||
async def test_update_zha_config(zha_client, zigpy_app_controller):
|
||||
"""Test updating zha custom configuration."""
|
||||
|
||||
configuration = deepcopy(CONFIG_WITH_ALARM_OPTIONS)
|
||||
configuration["data"]["zha_options"]["default_light_transition"] = 10
|
||||
|
||||
with patch(
|
||||
"bellows.zigbee.application.ControllerApplication.new",
|
||||
return_value=zigpy_app_controller,
|
||||
):
|
||||
await zha_client.send_json(
|
||||
{ID: 5, TYPE: "zha/configuration/update", "data": configuration["data"]}
|
||||
)
|
||||
msg = await zha_client.receive_json()
|
||||
assert msg["success"]
|
||||
|
||||
await zha_client.send_json({ID: 6, TYPE: "zha/configuration"})
|
||||
msg = await zha_client.receive_json()
|
||||
configuration = msg["result"]
|
||||
assert configuration == configuration
|
||||
|
||||
|
||||
async def test_device_not_found(zha_client):
|
||||
"""Test not found response from get device API."""
|
||||
await zha_client.send_json(
|
||||
|
|
|
@ -116,7 +116,7 @@ async def test_window_cover(hass, client, chain_actuator_zws12, integration):
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
assert len(client.async_send_command.call_args_list) == 2
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
open_args = client.async_send_command.call_args_list[0][0][0]
|
||||
assert open_args["command"] == "node.set_value"
|
||||
assert open_args["nodeId"] == 6
|
||||
|
@ -127,16 +127,6 @@ async def test_window_cover(hass, client, chain_actuator_zws12, integration):
|
|||
}
|
||||
assert not open_args["value"]
|
||||
|
||||
close_args = client.async_send_command.call_args_list[1][0][0]
|
||||
assert close_args["command"] == "node.set_value"
|
||||
assert close_args["nodeId"] == 6
|
||||
assert close_args["valueId"] == {
|
||||
"commandClass": 38,
|
||||
"endpoint": 0,
|
||||
"property": "Close",
|
||||
}
|
||||
assert not close_args["value"]
|
||||
|
||||
# Test position update from value updated event
|
||||
event = Event(
|
||||
type="value updated",
|
||||
|
@ -189,7 +179,7 @@ async def test_window_cover(hass, client, chain_actuator_zws12, integration):
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
assert len(client.async_send_command.call_args_list) == 2
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
open_args = client.async_send_command.call_args_list[0][0][0]
|
||||
assert open_args["command"] == "node.set_value"
|
||||
assert open_args["nodeId"] == 6
|
||||
|
@ -200,16 +190,6 @@ async def test_window_cover(hass, client, chain_actuator_zws12, integration):
|
|||
}
|
||||
assert not open_args["value"]
|
||||
|
||||
close_args = client.async_send_command.call_args_list[1][0][0]
|
||||
assert close_args["command"] == "node.set_value"
|
||||
assert close_args["nodeId"] == 6
|
||||
assert close_args["valueId"] == {
|
||||
"commandClass": 38,
|
||||
"endpoint": 0,
|
||||
"property": "Close",
|
||||
}
|
||||
assert not close_args["value"]
|
||||
|
||||
client.async_send_command.reset_mock()
|
||||
|
||||
event = Event(
|
||||
|
@ -329,7 +309,7 @@ async def test_aeotec_nano_shutter_cover(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
assert len(client.async_send_command.call_args_list) == 2
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
open_args = client.async_send_command.call_args_list[0][0][0]
|
||||
assert open_args["command"] == "node.set_value"
|
||||
assert open_args["nodeId"] == 3
|
||||
|
@ -340,16 +320,6 @@ async def test_aeotec_nano_shutter_cover(
|
|||
}
|
||||
assert not open_args["value"]
|
||||
|
||||
close_args = client.async_send_command.call_args_list[1][0][0]
|
||||
assert close_args["command"] == "node.set_value"
|
||||
assert close_args["nodeId"] == 3
|
||||
assert close_args["valueId"] == {
|
||||
"commandClass": 38,
|
||||
"endpoint": 0,
|
||||
"property": "Off",
|
||||
}
|
||||
assert not close_args["value"]
|
||||
|
||||
# Test position update from value updated event
|
||||
event = Event(
|
||||
type="value updated",
|
||||
|
@ -403,7 +373,7 @@ async def test_aeotec_nano_shutter_cover(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
assert len(client.async_send_command.call_args_list) == 2
|
||||
assert len(client.async_send_command.call_args_list) == 1
|
||||
open_args = client.async_send_command.call_args_list[0][0][0]
|
||||
assert open_args["command"] == "node.set_value"
|
||||
assert open_args["nodeId"] == 3
|
||||
|
@ -414,16 +384,6 @@ async def test_aeotec_nano_shutter_cover(
|
|||
}
|
||||
assert not open_args["value"]
|
||||
|
||||
close_args = client.async_send_command.call_args_list[1][0][0]
|
||||
assert close_args["command"] == "node.set_value"
|
||||
assert close_args["nodeId"] == 3
|
||||
assert close_args["valueId"] == {
|
||||
"commandClass": 38,
|
||||
"endpoint": 0,
|
||||
"property": "Off",
|
||||
}
|
||||
assert not close_args["value"]
|
||||
|
||||
|
||||
async def test_blind_cover(hass, client, iblinds_v2, integration):
|
||||
"""Test a blind cover entity."""
|
||||
|
|
Loading…
Reference in New Issue