Bump ruff to 0.9.1 (#135197)
parent
2b51ab1c75
commit
8db63adc11
|
@ -1,6 +1,6 @@
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.8.6
|
rev: v0.9.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
|
|
|
@ -1472,9 +1472,9 @@ class PipelineInput:
|
||||||
if stt_audio_buffer:
|
if stt_audio_buffer:
|
||||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||||
# This is basically an async itertools.chain.
|
# This is basically an async itertools.chain.
|
||||||
async def buffer_then_audio_stream() -> (
|
async def buffer_then_audio_stream() -> AsyncGenerator[
|
||||||
AsyncGenerator[EnhancedAudioChunk]
|
EnhancedAudioChunk
|
||||||
):
|
]:
|
||||||
# Buffered audio
|
# Buffered audio
|
||||||
for chunk in stt_audio_buffer:
|
for chunk in stt_audio_buffer:
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
|
@ -978,9 +978,9 @@ class DeviceScanner:
|
||||||
|
|
||||||
async def async_scan_devices(self) -> list[str]:
|
async def async_scan_devices(self) -> list[str]:
|
||||||
"""Scan for devices."""
|
"""Scan for devices."""
|
||||||
assert (
|
assert self.hass is not None, (
|
||||||
self.hass is not None
|
"hass should be set by async_setup_scanner_platform"
|
||||||
), "hass should be set by async_setup_scanner_platform"
|
)
|
||||||
return await self.hass.async_add_executor_job(self.scan_devices)
|
return await self.hass.async_add_executor_job(self.scan_devices)
|
||||||
|
|
||||||
def get_device_name(self, device: str) -> str | None:
|
def get_device_name(self, device: str) -> str | None:
|
||||||
|
@ -989,9 +989,9 @@ class DeviceScanner:
|
||||||
|
|
||||||
async def async_get_device_name(self, device: str) -> str | None:
|
async def async_get_device_name(self, device: str) -> str | None:
|
||||||
"""Get the name of a device."""
|
"""Get the name of a device."""
|
||||||
assert (
|
assert self.hass is not None, (
|
||||||
self.hass is not None
|
"hass should be set by async_setup_scanner_platform"
|
||||||
), "hass should be set by async_setup_scanner_platform"
|
)
|
||||||
return await self.hass.async_add_executor_job(self.get_device_name, device)
|
return await self.hass.async_add_executor_job(self.get_device_name, device)
|
||||||
|
|
||||||
def get_extra_attributes(self, device: str) -> dict:
|
def get_extra_attributes(self, device: str) -> dict:
|
||||||
|
@ -1000,9 +1000,9 @@ class DeviceScanner:
|
||||||
|
|
||||||
async def async_get_extra_attributes(self, device: str) -> dict:
|
async def async_get_extra_attributes(self, device: str) -> dict:
|
||||||
"""Get the extra attributes of a device."""
|
"""Get the extra attributes of a device."""
|
||||||
assert (
|
assert self.hass is not None, (
|
||||||
self.hass is not None
|
"hass should be set by async_setup_scanner_platform"
|
||||||
), "hass should be set by async_setup_scanner_platform"
|
)
|
||||||
return await self.hass.async_add_executor_job(self.get_extra_attributes, device)
|
return await self.hass.async_add_executor_job(self.get_extra_attributes, device)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -114,9 +114,9 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
async def request_generator() -> (
|
async def request_generator() -> AsyncGenerator[
|
||||||
AsyncGenerator[speech_v1.StreamingRecognizeRequest]
|
speech_v1.StreamingRecognizeRequest
|
||||||
):
|
]:
|
||||||
# The first request must only contain a streaming_config
|
# The first request must only contain a streaming_config
|
||||||
yield speech_v1.StreamingRecognizeRequest(streaming_config=streaming_config)
|
yield speech_v1.StreamingRecognizeRequest(streaming_config=streaming_config)
|
||||||
# All subsequent requests must only contain audio_content
|
# All subsequent requests must only contain audio_content
|
||||||
|
|
|
@ -115,9 +115,9 @@ class EventProcessor:
|
||||||
include_entity_name: bool = True,
|
include_entity_name: bool = True,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Init the event stream."""
|
"""Init the event stream."""
|
||||||
assert not (
|
assert not (context_id and (entity_ids or device_ids)), (
|
||||||
context_id and (entity_ids or device_ids)
|
"can't pass in both context_id and (entity_ids or device_ids)"
|
||||||
), "can't pass in both context_id and (entity_ids or device_ids)"
|
)
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.ent_reg = er.async_get(hass)
|
self.ent_reg = er.async_get(hass)
|
||||||
self.event_types = event_types
|
self.event_types = event_types
|
||||||
|
|
|
@ -310,13 +310,11 @@ class MatterClimate(MatterEntity, ClimateEntity):
|
||||||
):
|
):
|
||||||
match running_state_value:
|
match running_state_value:
|
||||||
case (
|
case (
|
||||||
ThermostatRunningState.Heat
|
ThermostatRunningState.Heat | ThermostatRunningState.HeatStage2
|
||||||
| ThermostatRunningState.HeatStage2
|
|
||||||
):
|
):
|
||||||
self._attr_hvac_action = HVACAction.HEATING
|
self._attr_hvac_action = HVACAction.HEATING
|
||||||
case (
|
case (
|
||||||
ThermostatRunningState.Cool
|
ThermostatRunningState.Cool | ThermostatRunningState.CoolStage2
|
||||||
| ThermostatRunningState.CoolStage2
|
|
||||||
):
|
):
|
||||||
self._attr_hvac_action = HVACAction.COOLING
|
self._attr_hvac_action = HVACAction.COOLING
|
||||||
case (
|
case (
|
||||||
|
|
|
@ -98,9 +98,9 @@ class BasePlatform(Entity):
|
||||||
def get_optional_numeric_config(config_name: str) -> int | float | None:
|
def get_optional_numeric_config(config_name: str) -> int | float | None:
|
||||||
if (val := entry.get(config_name)) is None:
|
if (val := entry.get(config_name)) is None:
|
||||||
return None
|
return None
|
||||||
assert isinstance(
|
assert isinstance(val, (float, int)), (
|
||||||
val, (float, int)
|
f"Expected float or int but {config_name} was {type(val)}"
|
||||||
), f"Expected float or int but {config_name} was {type(val)}"
|
)
|
||||||
return val
|
return val
|
||||||
|
|
||||||
self._min_value = get_optional_numeric_config(CONF_MIN_VALUE)
|
self._min_value = get_optional_numeric_config(CONF_MIN_VALUE)
|
||||||
|
|
|
@ -84,7 +84,7 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
)
|
)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
config_entry_name = f"Train from {station_from["standardname"]} to {station_to["standardname"]}"
|
config_entry_name = f"Train from {station_from['standardname']} to {station_to['standardname']}"
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=config_entry_name,
|
title=config_entry_name,
|
||||||
data=user_input,
|
data=user_input,
|
||||||
|
@ -157,18 +157,18 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
f"{prefix}_{station_live["standardname"]}_{station_from["standardname"]}_{station_to["standardname"]}",
|
f"{prefix}_{station_live['standardname']}_{station_from['standardname']}_{station_to['standardname']}",
|
||||||
):
|
):
|
||||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live["id"]}_{station_from["id"]}_{station_to["id"]}"
|
new_unique_id = f"{DOMAIN}_{prefix}_{station_live['id']}_{station_from['id']}_{station_to['id']}"
|
||||||
entity_registry.async_update_entity(
|
entity_registry.async_update_entity(
|
||||||
entity_id, new_unique_id=new_unique_id
|
entity_id, new_unique_id=new_unique_id
|
||||||
)
|
)
|
||||||
if entity_id := entity_registry.async_get_entity_id(
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
f"{prefix}_{station_live["name"]}_{station_from["name"]}_{station_to["name"]}",
|
f"{prefix}_{station_live['name']}_{station_from['name']}_{station_to['name']}",
|
||||||
):
|
):
|
||||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live["id"]}_{station_from["id"]}_{station_to["id"]}"
|
new_unique_id = f"{DOMAIN}_{prefix}_{station_live['id']}_{station_from['id']}_{station_to['id']}"
|
||||||
entity_registry.async_update_entity(
|
entity_registry.async_update_entity(
|
||||||
entity_id, new_unique_id=new_unique_id
|
entity_id, new_unique_id=new_unique_id
|
||||||
)
|
)
|
||||||
|
|
|
@ -201,15 +201,14 @@ class NMBSLiveBoard(SensorEntity):
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
"""Return the sensor default name."""
|
"""Return the sensor default name."""
|
||||||
return f"Trains in {self._station["standardname"]}"
|
return f"Trains in {self._station['standardname']}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unique_id(self) -> str:
|
def unique_id(self) -> str:
|
||||||
"""Return the unique ID."""
|
"""Return the unique ID."""
|
||||||
|
|
||||||
unique_id = (
|
unique_id = (
|
||||||
f"{self._station["id"]}_{self._station_from["id"]}_"
|
f"{self._station['id']}_{self._station_from['id']}_{self._station_to['id']}"
|
||||||
f"{self._station_to["id"]}"
|
|
||||||
)
|
)
|
||||||
return f"nmbs_live_{unique_id}"
|
return f"nmbs_live_{unique_id}"
|
||||||
|
|
||||||
|
@ -302,7 +301,7 @@ class NMBSSensor(SensorEntity):
|
||||||
@property
|
@property
|
||||||
def unique_id(self) -> str:
|
def unique_id(self) -> str:
|
||||||
"""Return the unique ID."""
|
"""Return the unique ID."""
|
||||||
unique_id = f"{self._station_from["id"]}_{self._station_to["id"]}"
|
unique_id = f"{self._station_from['id']}_{self._station_to['id']}"
|
||||||
|
|
||||||
return f"nmbs_connection_{unique_id}"
|
return f"nmbs_connection_{unique_id}"
|
||||||
|
|
||||||
|
@ -310,7 +309,7 @@ class NMBSSensor(SensorEntity):
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
"""Return the name of the sensor."""
|
"""Return the name of the sensor."""
|
||||||
if self._name is None:
|
if self._name is None:
|
||||||
return f"Train from {self._station_from["standardname"]} to {self._station_to["standardname"]}"
|
return f"Train from {self._station_from['standardname']} to {self._station_to['standardname']}"
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -252,9 +252,9 @@ class PullPointManager:
|
||||||
|
|
||||||
async def async_start(self) -> bool:
|
async def async_start(self) -> bool:
|
||||||
"""Start pullpoint subscription."""
|
"""Start pullpoint subscription."""
|
||||||
assert (
|
assert self.state == PullPointManagerState.STOPPED, (
|
||||||
self.state == PullPointManagerState.STOPPED
|
"PullPoint manager already started"
|
||||||
), "PullPoint manager already started"
|
)
|
||||||
LOGGER.debug("%s: Starting PullPoint manager", self._name)
|
LOGGER.debug("%s: Starting PullPoint manager", self._name)
|
||||||
if not await self._async_start_pullpoint():
|
if not await self._async_start_pullpoint():
|
||||||
self.state = PullPointManagerState.FAILED
|
self.state = PullPointManagerState.FAILED
|
||||||
|
@ -501,9 +501,9 @@ class WebHookManager:
|
||||||
async def async_start(self) -> bool:
|
async def async_start(self) -> bool:
|
||||||
"""Start polling events."""
|
"""Start polling events."""
|
||||||
LOGGER.debug("%s: Starting webhook manager", self._name)
|
LOGGER.debug("%s: Starting webhook manager", self._name)
|
||||||
assert (
|
assert self.state == WebHookManagerState.STOPPED, (
|
||||||
self.state == WebHookManagerState.STOPPED
|
"Webhook manager already started"
|
||||||
), "Webhook manager already started"
|
)
|
||||||
assert self._webhook_url is None, "Webhook already registered"
|
assert self._webhook_url is None, "Webhook already registered"
|
||||||
self._async_register_webhook()
|
self._async_register_webhook()
|
||||||
if not await self._async_start_webhook():
|
if not await self._async_start_webhook():
|
||||||
|
|
|
@ -67,11 +67,9 @@ type _ReturnFuncType[_T, **_P, _R] = Callable[
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def catch_request_errors[_OpenhomeDeviceT: OpenhomeDevice, **_P, _R]() -> (
|
def catch_request_errors[_OpenhomeDeviceT: OpenhomeDevice, **_P, _R]() -> Callable[
|
||||||
Callable[
|
[_FuncType[_OpenhomeDeviceT, _P, _R]], _ReturnFuncType[_OpenhomeDeviceT, _P, _R]
|
||||||
[_FuncType[_OpenhomeDeviceT, _P, _R]], _ReturnFuncType[_OpenhomeDeviceT, _P, _R]
|
]:
|
||||||
]
|
|
||||||
):
|
|
||||||
"""Catch TimeoutError, aiohttp.ClientError, UpnpError errors."""
|
"""Catch TimeoutError, aiohttp.ClientError, UpnpError errors."""
|
||||||
|
|
||||||
def call_wrapper(
|
def call_wrapper(
|
||||||
|
|
|
@ -48,9 +48,9 @@ class QSSensor(QSEntity, SensorEntity):
|
||||||
|
|
||||||
self._decode, self.unit = SENSORS[sensor_type]
|
self._decode, self.unit = SENSORS[sensor_type]
|
||||||
# this cannot happen because it only happens in bool and this should be redirected to binary_sensor
|
# this cannot happen because it only happens in bool and this should be redirected to binary_sensor
|
||||||
assert not isinstance(
|
assert not isinstance(self.unit, type), (
|
||||||
self.unit, type
|
f"boolean sensor id={sensor['id']} name={sensor['name']}"
|
||||||
), f"boolean sensor id={sensor['id']} name={sensor['name']}"
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def update_packet(self, packet):
|
def update_packet(self, packet):
|
||||||
|
|
|
@ -2752,9 +2752,9 @@ class EventTypeIDMigration(BaseMigrationWithQuery, BaseOffLineMigration):
|
||||||
for db_event_type in missing_db_event_types:
|
for db_event_type in missing_db_event_types:
|
||||||
# We cannot add the assigned ids to the event_type_manager
|
# We cannot add the assigned ids to the event_type_manager
|
||||||
# because the commit could get rolled back
|
# because the commit could get rolled back
|
||||||
assert (
|
assert db_event_type.event_type is not None, (
|
||||||
db_event_type.event_type is not None
|
"event_type should never be None"
|
||||||
), "event_type should never be None"
|
)
|
||||||
event_type_to_id[db_event_type.event_type] = (
|
event_type_to_id[db_event_type.event_type] = (
|
||||||
db_event_type.event_type_id
|
db_event_type.event_type_id
|
||||||
)
|
)
|
||||||
|
@ -2830,9 +2830,9 @@ class EntityIDMigration(BaseMigrationWithQuery, BaseOffLineMigration):
|
||||||
for db_states_metadata in missing_states_metadata:
|
for db_states_metadata in missing_states_metadata:
|
||||||
# We cannot add the assigned ids to the event_type_manager
|
# We cannot add the assigned ids to the event_type_manager
|
||||||
# because the commit could get rolled back
|
# because the commit could get rolled back
|
||||||
assert (
|
assert db_states_metadata.entity_id is not None, (
|
||||||
db_states_metadata.entity_id is not None
|
"entity_id should never be None"
|
||||||
), "entity_id should never be None"
|
)
|
||||||
entity_id_to_metadata_id[db_states_metadata.entity_id] = (
|
entity_id_to_metadata_id[db_states_metadata.entity_id] = (
|
||||||
db_states_metadata.metadata_id
|
db_states_metadata.metadata_id
|
||||||
)
|
)
|
||||||
|
|
|
@ -47,9 +47,9 @@ class RecorderPool(SingletonThreadPool, NullPool):
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Create the pool."""
|
"""Create the pool."""
|
||||||
kw["pool_size"] = POOL_SIZE
|
kw["pool_size"] = POOL_SIZE
|
||||||
assert (
|
assert recorder_and_worker_thread_ids is not None, (
|
||||||
recorder_and_worker_thread_ids is not None
|
"recorder_and_worker_thread_ids is required"
|
||||||
), "recorder_and_worker_thread_ids is required"
|
)
|
||||||
self.recorder_and_worker_thread_ids = recorder_and_worker_thread_ids
|
self.recorder_and_worker_thread_ids = recorder_and_worker_thread_ids
|
||||||
SingletonThreadPool.__init__(self, creator, **kw)
|
SingletonThreadPool.__init__(self, creator, **kw)
|
||||||
|
|
||||||
|
|
|
@ -968,12 +968,10 @@ def _reduce_statistics(
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def reduce_day_ts_factory() -> (
|
def reduce_day_ts_factory() -> tuple[
|
||||||
tuple[
|
Callable[[float, float], bool],
|
||||||
Callable[[float, float], bool],
|
Callable[[float], tuple[float, float]],
|
||||||
Callable[[float], tuple[float, float]],
|
]:
|
||||||
]
|
|
||||||
):
|
|
||||||
"""Return functions to match same day and day start end."""
|
"""Return functions to match same day and day start end."""
|
||||||
_lower_bound: float = 0
|
_lower_bound: float = 0
|
||||||
_upper_bound: float = 0
|
_upper_bound: float = 0
|
||||||
|
@ -1017,12 +1015,10 @@ def _reduce_statistics_per_day(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def reduce_week_ts_factory() -> (
|
def reduce_week_ts_factory() -> tuple[
|
||||||
tuple[
|
Callable[[float, float], bool],
|
||||||
Callable[[float, float], bool],
|
Callable[[float], tuple[float, float]],
|
||||||
Callable[[float], tuple[float, float]],
|
]:
|
||||||
]
|
|
||||||
):
|
|
||||||
"""Return functions to match same week and week start end."""
|
"""Return functions to match same week and week start end."""
|
||||||
_lower_bound: float = 0
|
_lower_bound: float = 0
|
||||||
_upper_bound: float = 0
|
_upper_bound: float = 0
|
||||||
|
@ -1075,12 +1071,10 @@ def _find_month_end_time(timestamp: datetime) -> datetime:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def reduce_month_ts_factory() -> (
|
def reduce_month_ts_factory() -> tuple[
|
||||||
tuple[
|
Callable[[float, float], bool],
|
||||||
Callable[[float, float], bool],
|
Callable[[float], tuple[float, float]],
|
||||||
Callable[[float], tuple[float, float]],
|
]:
|
||||||
]
|
|
||||||
):
|
|
||||||
"""Return functions to match same month and month start end."""
|
"""Return functions to match same month and month start end."""
|
||||||
_lower_bound: float = 0
|
_lower_bound: float = 0
|
||||||
_upper_bound: float = 0
|
_upper_bound: float = 0
|
||||||
|
|
|
@ -615,9 +615,9 @@ def info_from_service(service: AsyncServiceInfo) -> ZeroconfServiceInfo | None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
assert (
|
assert service.server is not None, (
|
||||||
service.server is not None
|
"server cannot be none if there are addresses"
|
||||||
), "server cannot be none if there are addresses"
|
)
|
||||||
return ZeroconfServiceInfo(
|
return ZeroconfServiceInfo(
|
||||||
ip_address=ip_address,
|
ip_address=ip_address,
|
||||||
ip_addresses=ip_addresses,
|
ip_addresses=ip_addresses,
|
||||||
|
|
|
@ -15,9 +15,9 @@ if TYPE_CHECKING:
|
||||||
_function_cache: dict[str, Callable[[str, str, dict[str, str] | None], str]] = {}
|
_function_cache: dict[str, Callable[[str, str, dict[str, str] | None], str]] = {}
|
||||||
|
|
||||||
|
|
||||||
def import_async_get_exception_message() -> (
|
def import_async_get_exception_message() -> Callable[
|
||||||
Callable[[str, str, dict[str, str] | None], str]
|
[str, str, dict[str, str] | None], str
|
||||||
):
|
]:
|
||||||
"""Return a method that can fetch a translated exception message.
|
"""Return a method that can fetch a translated exception message.
|
||||||
|
|
||||||
Defaults to English, requires translations to already be cached.
|
Defaults to English, requires translations to already be cached.
|
||||||
|
|
|
@ -1480,9 +1480,9 @@ class Entity(
|
||||||
|
|
||||||
if self.registry_entry is not None:
|
if self.registry_entry is not None:
|
||||||
# This is an assert as it should never happen, but helps in tests
|
# This is an assert as it should never happen, but helps in tests
|
||||||
assert (
|
assert not self.registry_entry.disabled_by, (
|
||||||
not self.registry_entry.disabled_by
|
f"Entity '{self.entity_id}' is being added while it's disabled"
|
||||||
), f"Entity '{self.entity_id}' is being added while it's disabled"
|
)
|
||||||
|
|
||||||
self.async_on_remove(
|
self.async_on_remove(
|
||||||
async_track_entity_registry_updated_event(
|
async_track_entity_registry_updated_event(
|
||||||
|
|
|
@ -46,9 +46,9 @@ def request_handler_factory(
|
||||||
) -> Callable[[web.Request], Awaitable[web.StreamResponse]]:
|
) -> Callable[[web.Request], Awaitable[web.StreamResponse]]:
|
||||||
"""Wrap the handler classes."""
|
"""Wrap the handler classes."""
|
||||||
is_coroutinefunction = asyncio.iscoroutinefunction(handler)
|
is_coroutinefunction = asyncio.iscoroutinefunction(handler)
|
||||||
assert is_coroutinefunction or is_callback(
|
assert is_coroutinefunction or is_callback(handler), (
|
||||||
handler
|
"Handler should be a coroutine or a callback."
|
||||||
), "Handler should be a coroutine or a callback."
|
)
|
||||||
|
|
||||||
async def handle(request: web.Request) -> web.StreamResponse:
|
async def handle(request: web.Request) -> web.StreamResponse:
|
||||||
"""Handle incoming request."""
|
"""Handle incoming request."""
|
||||||
|
|
|
@ -841,16 +841,16 @@ class Template:
|
||||||
self.ensure_valid()
|
self.ensure_valid()
|
||||||
|
|
||||||
assert self.hass is not None, "hass variable not set on template"
|
assert self.hass is not None, "hass variable not set on template"
|
||||||
assert (
|
assert self._limited is None or self._limited == limited, (
|
||||||
self._limited is None or self._limited == limited
|
"can't change between limited and non limited template"
|
||||||
), "can't change between limited and non limited template"
|
)
|
||||||
assert (
|
assert self._strict is None or self._strict == strict, (
|
||||||
self._strict is None or self._strict == strict
|
"can't change between strict and non strict template"
|
||||||
), "can't change between strict and non strict template"
|
)
|
||||||
assert not (strict and limited), "can't combine strict and limited template"
|
assert not (strict and limited), "can't combine strict and limited template"
|
||||||
assert (
|
assert self._log_fn is None or self._log_fn == log_fn, (
|
||||||
self._log_fn is None or self._log_fn == log_fn
|
"can't change custom log function"
|
||||||
), "can't change custom log function"
|
)
|
||||||
assert self._compiled_code is not None, "template code was not compiled"
|
assert self._compiled_code is not None, "template code was not compiled"
|
||||||
|
|
||||||
self._limited = limited
|
self._limited = limited
|
||||||
|
|
|
@ -701,7 +701,7 @@ exclude_lines = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
required-version = ">=0.8.0"
|
required-version = ">=0.9.1"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||||
|
|
||||||
codespell==2.3.0
|
codespell==2.3.0
|
||||||
ruff==0.8.6
|
ruff==0.9.1
|
||||||
yamllint==1.35.1
|
yamllint==1.35.1
|
||||||
|
|
|
@ -24,7 +24,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.18,source=/uv,target=/bin/uv \
|
||||||
--no-cache \
|
--no-cache \
|
||||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||||
-r /usr/src/homeassistant/requirements.txt \
|
-r /usr/src/homeassistant/requirements.txt \
|
||||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.6 \
|
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.9.1 \
|
||||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2025.1.1 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2025.1.1 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||||
|
|
||||||
LABEL "name"="hassfest"
|
LABEL "name"="hassfest"
|
||||||
|
|
|
@ -1197,16 +1197,16 @@ def assert_setup_component(count, domain=None):
|
||||||
yield config
|
yield config
|
||||||
|
|
||||||
if domain is None:
|
if domain is None:
|
||||||
assert (
|
assert len(config) == 1, (
|
||||||
len(config) == 1
|
f"assert_setup_component requires DOMAIN: {list(config.keys())}"
|
||||||
), f"assert_setup_component requires DOMAIN: {list(config.keys())}"
|
)
|
||||||
domain = list(config.keys())[0]
|
domain = list(config.keys())[0]
|
||||||
|
|
||||||
res = config.get(domain)
|
res = config.get(domain)
|
||||||
res_len = 0 if res is None else len(res)
|
res_len = 0 if res is None else len(res)
|
||||||
assert (
|
assert res_len == count, (
|
||||||
res_len == count
|
f"setup_component failed, expected {count} got {res_len}: {res}"
|
||||||
), f"setup_component failed, expected {count} got {res_len}: {res}"
|
)
|
||||||
|
|
||||||
|
|
||||||
def mock_restore_cache(hass: HomeAssistant, states: Sequence[State]) -> None:
|
def mock_restore_cache(hass: HomeAssistant, states: Sequence[State]) -> None:
|
||||||
|
@ -1814,9 +1814,9 @@ async def snapshot_platform(
|
||||||
"""Snapshot a platform."""
|
"""Snapshot a platform."""
|
||||||
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||||
assert entity_entries
|
assert entity_entries
|
||||||
assert (
|
assert len({entity_entry.domain for entity_entry in entity_entries}) == 1, (
|
||||||
len({entity_entry.domain for entity_entry in entity_entries}) == 1
|
"Please limit the loaded platforms to 1 platform."
|
||||||
), "Please limit the loaded platforms to 1 platform."
|
)
|
||||||
for entity_entry in entity_entries:
|
for entity_entry in entity_entries:
|
||||||
assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry")
|
assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry")
|
||||||
assert entity_entry.disabled_by is None, "Please enable all entities."
|
assert entity_entry.disabled_by is None, "Please enable all entities."
|
||||||
|
|
|
@ -127,9 +127,9 @@ async def test_template_variables(
|
||||||
hass, "hello", None, context, agent_id="conversation.claude"
|
hass, "hello", None, context, agent_id="conversation.claude"
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"]
|
assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"]
|
||||||
assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"]
|
assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"]
|
||||||
|
|
||||||
|
|
|
@ -123,9 +123,9 @@ async def test_sensors(hass: HomeAssistant, entity_registry: EntityRegistry) ->
|
||||||
]
|
]
|
||||||
for entity_id, _ in sensors:
|
for entity_id, _ in sensors:
|
||||||
assert not hass.states.get(entity_id)
|
assert not hass.states.get(entity_id)
|
||||||
assert (
|
assert (entry := entity_registry.async_get(entity_id)), (
|
||||||
entry := entity_registry.async_get(entity_id)
|
f"Entity registry entry for {entity_id} is missing"
|
||||||
), f"Entity registry entry for {entity_id} is missing"
|
)
|
||||||
assert entry.disabled
|
assert entry.disabled
|
||||||
assert entry.disabled_by is RegistryEntryDisabler.INTEGRATION
|
assert entry.disabled_by is RegistryEntryDisabler.INTEGRATION
|
||||||
|
|
||||||
|
|
|
@ -14,9 +14,9 @@ async def test_request_least_info(hass: HomeAssistant) -> None:
|
||||||
"""Test request config with least amount of data."""
|
"""Test request config with least amount of data."""
|
||||||
request_id = configurator.async_request_config(hass, "Test Request", lambda _: None)
|
request_id = configurator.async_request_config(hass, "Test Request", lambda _: None)
|
||||||
|
|
||||||
assert (
|
assert len(hass.services.async_services().get(configurator.DOMAIN, [])) == 1, (
|
||||||
len(hass.services.async_services().get(configurator.DOMAIN, [])) == 1
|
"No new service registered"
|
||||||
), "No new service registered"
|
)
|
||||||
|
|
||||||
states = hass.states.async_all()
|
states = hass.states.async_all()
|
||||||
|
|
||||||
|
|
|
@ -399,9 +399,9 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None:
|
||||||
result = await conversation.async_converse(hass, sentence, None, Context())
|
result = await conversation.async_converse(hass, sentence, None, Context())
|
||||||
assert callback.call_count == 1
|
assert callback.call_count == 1
|
||||||
assert callback.call_args[0][0].text == sentence
|
assert callback.call_args[0][0].text == sentence
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
sentence
|
||||||
), sentence
|
)
|
||||||
assert result.response.speech == {
|
assert result.response.speech == {
|
||||||
"plain": {"speech": trigger_response, "extra_data": None}
|
"plain": {"speech": trigger_response, "extra_data": None}
|
||||||
}
|
}
|
||||||
|
@ -412,9 +412,9 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None:
|
||||||
callback.reset_mock()
|
callback.reset_mock()
|
||||||
for sentence in test_sentences:
|
for sentence in test_sentences:
|
||||||
result = await conversation.async_converse(hass, sentence, None, Context())
|
result = await conversation.async_converse(hass, sentence, None, Context())
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ERROR, (
|
||||||
result.response.response_type == intent.IntentResponseType.ERROR
|
sentence
|
||||||
), sentence
|
)
|
||||||
|
|
||||||
assert len(callback.mock_calls) == 0
|
assert len(callback.mock_calls) == 0
|
||||||
|
|
||||||
|
|
|
@ -159,9 +159,9 @@ async def test_duplicate_mac_dev_id(mock_warning, hass: HomeAssistant) -> None:
|
||||||
]
|
]
|
||||||
legacy.DeviceTracker(hass, False, True, {}, devices)
|
legacy.DeviceTracker(hass, False, True, {}, devices)
|
||||||
_LOGGER.debug(mock_warning.call_args_list)
|
_LOGGER.debug(mock_warning.call_args_list)
|
||||||
assert (
|
assert mock_warning.call_count == 1, (
|
||||||
mock_warning.call_count == 1
|
"The only warning call should be duplicates (check DEBUG)"
|
||||||
), "The only warning call should be duplicates (check DEBUG)"
|
)
|
||||||
args, _ = mock_warning.call_args
|
args, _ = mock_warning.call_args
|
||||||
assert "Duplicate device MAC" in args[0], "Duplicate MAC warning expected"
|
assert "Duplicate device MAC" in args[0], "Duplicate MAC warning expected"
|
||||||
|
|
||||||
|
@ -177,9 +177,9 @@ async def test_duplicate_mac_dev_id(mock_warning, hass: HomeAssistant) -> None:
|
||||||
legacy.DeviceTracker(hass, False, True, {}, devices)
|
legacy.DeviceTracker(hass, False, True, {}, devices)
|
||||||
|
|
||||||
_LOGGER.debug(mock_warning.call_args_list)
|
_LOGGER.debug(mock_warning.call_args_list)
|
||||||
assert (
|
assert mock_warning.call_count == 1, (
|
||||||
mock_warning.call_count == 1
|
"The only warning call should be duplicates (check DEBUG)"
|
||||||
), "The only warning call should be duplicates (check DEBUG)"
|
)
|
||||||
args, _ = mock_warning.call_args
|
args, _ = mock_warning.call_args
|
||||||
assert "Duplicate device IDs" in args[0], "Duplicate device IDs warning expected"
|
assert "Duplicate device IDs" in args[0], "Duplicate device IDs warning expected"
|
||||||
|
|
||||||
|
|
|
@ -45,9 +45,9 @@ def dsmr_connection_fixture() -> Generator[tuple[MagicMock, MagicMock, MagicMock
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def rfxtrx_dsmr_connection_fixture() -> (
|
def rfxtrx_dsmr_connection_fixture() -> Generator[
|
||||||
Generator[tuple[MagicMock, MagicMock, MagicMock]]
|
tuple[MagicMock, MagicMock, MagicMock]
|
||||||
):
|
]:
|
||||||
"""Fixture that mocks RFXtrx connection."""
|
"""Fixture that mocks RFXtrx connection."""
|
||||||
|
|
||||||
transport = MagicMock(spec=asyncio.Transport)
|
transport = MagicMock(spec=asyncio.Transport)
|
||||||
|
@ -73,9 +73,9 @@ def rfxtrx_dsmr_connection_fixture() -> (
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def dsmr_connection_send_validate_fixture() -> (
|
def dsmr_connection_send_validate_fixture() -> Generator[
|
||||||
Generator[tuple[MagicMock, MagicMock, MagicMock]]
|
tuple[MagicMock, MagicMock, MagicMock]
|
||||||
):
|
]:
|
||||||
"""Fixture that mocks serial connection."""
|
"""Fixture that mocks serial connection."""
|
||||||
|
|
||||||
transport = MagicMock(spec=asyncio.Transport)
|
transport = MagicMock(spec=asyncio.Transport)
|
||||||
|
@ -156,9 +156,9 @@ def dsmr_connection_send_validate_fixture() -> (
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def rfxtrx_dsmr_connection_send_validate_fixture() -> (
|
def rfxtrx_dsmr_connection_send_validate_fixture() -> Generator[
|
||||||
Generator[tuple[MagicMock, MagicMock, MagicMock]]
|
tuple[MagicMock, MagicMock, MagicMock]
|
||||||
):
|
]:
|
||||||
"""Fixture that mocks serial connection."""
|
"""Fixture that mocks serial connection."""
|
||||||
|
|
||||||
transport = MagicMock(spec=asyncio.Transport)
|
transport = MagicMock(spec=asyncio.Transport)
|
||||||
|
|
|
@ -161,8 +161,8 @@ async def test_disabled_by_default_buttons(
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
assert not hass.states.get(entity_id)
|
assert not hass.states.get(entity_id)
|
||||||
|
|
||||||
assert (
|
assert (entry := entity_registry.async_get(entity_id)), (
|
||||||
entry := entity_registry.async_get(entity_id)
|
f"Entity registry entry for {entity_id} is missing"
|
||||||
), f"Entity registry entry for {entity_id} is missing"
|
)
|
||||||
assert entry.disabled
|
assert entry.disabled
|
||||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||||
|
|
|
@ -116,6 +116,6 @@ async def test_all_entities_loaded(
|
||||||
entities: int,
|
entities: int,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test that all entities are loaded together."""
|
"""Test that all entities are loaded together."""
|
||||||
assert (
|
assert hass.states.async_entity_ids_count() == entities, (
|
||||||
hass.states.async_entity_ids_count() == entities
|
f"loaded entities for {device_fixture}: {hass.states.async_entity_ids()}"
|
||||||
), f"loaded entities for {device_fixture}: {hass.states.async_entity_ids()}"
|
)
|
||||||
|
|
|
@ -136,9 +136,9 @@ async def test_disabled_by_default_number_entities(
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
assert not hass.states.get(entity_id)
|
assert not hass.states.get(entity_id)
|
||||||
|
|
||||||
assert (
|
assert (entry := entity_registry.async_get(entity_id)), (
|
||||||
entry := entity_registry.async_get(entity_id)
|
f"Entity registry entry for {entity_id} is missing"
|
||||||
), f"Entity registry entry for {entity_id} is missing"
|
)
|
||||||
assert entry.disabled
|
assert entry.disabled
|
||||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||||
|
|
||||||
|
|
|
@ -172,9 +172,9 @@ async def test_disabled_by_default_sensors(
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
assert not hass.states.get(entity_id)
|
assert not hass.states.get(entity_id)
|
||||||
|
|
||||||
assert (
|
assert (entry := entity_registry.async_get(entity_id)), (
|
||||||
entry := entity_registry.async_get(entity_id)
|
f"Entity registry entry for {entity_id} is missing"
|
||||||
), f"Entity registry entry for {entity_id} is missing"
|
)
|
||||||
assert entry.disabled
|
assert entry.disabled
|
||||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||||
|
|
||||||
|
|
|
@ -214,8 +214,8 @@ async def test_disabled_by_default_switch_entities(
|
||||||
for entity_id in entity_ids:
|
for entity_id in entity_ids:
|
||||||
assert not hass.states.get(entity_id)
|
assert not hass.states.get(entity_id)
|
||||||
|
|
||||||
assert (
|
assert (entry := entity_registry.async_get(entity_id)), (
|
||||||
entry := entity_registry.async_get(entity_id)
|
f"Entity registry entry for {entity_id} is missing"
|
||||||
), f"Entity registry entry for {entity_id} is missing"
|
)
|
||||||
assert entry.disabled
|
assert entry.disabled
|
||||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||||
|
|
|
@ -603,9 +603,9 @@ async def test_template_variables(
|
||||||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert (
|
assert (
|
||||||
"The user name is Test User."
|
"The user name is Test User."
|
||||||
in mock_model.mock_calls[0][2]["system_instruction"]
|
in mock_model.mock_calls[0][2]["system_instruction"]
|
||||||
|
|
|
@ -192,16 +192,16 @@ async def test_cannot_access_with_trusted_ip(
|
||||||
for remote_addr in UNTRUSTED_ADDRESSES:
|
for remote_addr in UNTRUSTED_ADDRESSES:
|
||||||
set_mock_ip(remote_addr)
|
set_mock_ip(remote_addr)
|
||||||
resp = await client.get("/")
|
resp = await client.get("/")
|
||||||
assert (
|
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||||
resp.status == HTTPStatus.UNAUTHORIZED
|
f"{remote_addr} shouldn't be trusted"
|
||||||
), f"{remote_addr} shouldn't be trusted"
|
)
|
||||||
|
|
||||||
for remote_addr in TRUSTED_ADDRESSES:
|
for remote_addr in TRUSTED_ADDRESSES:
|
||||||
set_mock_ip(remote_addr)
|
set_mock_ip(remote_addr)
|
||||||
resp = await client.get("/")
|
resp = await client.get("/")
|
||||||
assert (
|
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||||
resp.status == HTTPStatus.UNAUTHORIZED
|
f"{remote_addr} shouldn't be trusted"
|
||||||
), f"{remote_addr} shouldn't be trusted"
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_auth_active_access_with_access_token_in_header(
|
async def test_auth_active_access_with_access_token_in_header(
|
||||||
|
@ -256,16 +256,16 @@ async def test_auth_active_access_with_trusted_ip(
|
||||||
for remote_addr in UNTRUSTED_ADDRESSES:
|
for remote_addr in UNTRUSTED_ADDRESSES:
|
||||||
set_mock_ip(remote_addr)
|
set_mock_ip(remote_addr)
|
||||||
resp = await client.get("/")
|
resp = await client.get("/")
|
||||||
assert (
|
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||||
resp.status == HTTPStatus.UNAUTHORIZED
|
f"{remote_addr} shouldn't be trusted"
|
||||||
), f"{remote_addr} shouldn't be trusted"
|
)
|
||||||
|
|
||||||
for remote_addr in TRUSTED_ADDRESSES:
|
for remote_addr in TRUSTED_ADDRESSES:
|
||||||
set_mock_ip(remote_addr)
|
set_mock_ip(remote_addr)
|
||||||
resp = await client.get("/")
|
resp = await client.get("/")
|
||||||
assert (
|
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||||
resp.status == HTTPStatus.UNAUTHORIZED
|
f"{remote_addr} shouldn't be trusted"
|
||||||
), f"{remote_addr} shouldn't be trusted"
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_auth_legacy_support_api_password_cannot_access(
|
async def test_auth_legacy_support_api_password_cannot_access(
|
||||||
|
|
|
@ -174,12 +174,12 @@ class KNXTestKit:
|
||||||
)
|
)
|
||||||
|
|
||||||
telegram = self._outgoing_telegrams.pop(0)
|
telegram = self._outgoing_telegrams.pop(0)
|
||||||
assert isinstance(
|
assert isinstance(telegram.payload, apci_type), (
|
||||||
telegram.payload, apci_type
|
f"APCI type mismatch in {telegram} - Expected: {apci_type.__name__}"
|
||||||
), f"APCI type mismatch in {telegram} - Expected: {apci_type.__name__}"
|
)
|
||||||
assert (
|
assert telegram.destination_address == _expected_ga, (
|
||||||
telegram.destination_address == _expected_ga
|
f"Group address mismatch in {telegram} - Expected: {group_address}"
|
||||||
), f"Group address mismatch in {telegram} - Expected: {group_address}"
|
)
|
||||||
if payload is not None:
|
if payload is not None:
|
||||||
assert (
|
assert (
|
||||||
telegram.payload.value.value == payload # type: ignore[attr-defined]
|
telegram.payload.value.value == payload # type: ignore[attr-defined]
|
||||||
|
|
|
@ -1012,9 +1012,9 @@ async def test_media_permission_unauthorized(
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(media_url)
|
response = await client.get(media_url)
|
||||||
assert (
|
assert response.status == HTTPStatus.UNAUTHORIZED, (
|
||||||
response.status == HTTPStatus.UNAUTHORIZED
|
f"Response not matched: {response}"
|
||||||
), f"Response not matched: {response}"
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_multiple_devices(
|
async def test_multiple_devices(
|
||||||
|
@ -1306,9 +1306,9 @@ async def test_media_store_load_filesystem_error(
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
f"/api/nest/event_media/{device.id}/{event_identifier}"
|
f"/api/nest/event_media/{device.id}/{event_identifier}"
|
||||||
)
|
)
|
||||||
assert (
|
assert response.status == HTTPStatus.NOT_FOUND, (
|
||||||
response.status == HTTPStatus.NOT_FOUND
|
f"Response not matched: {response}"
|
||||||
), f"Response not matched: {response}"
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)])
|
@pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)])
|
||||||
|
|
|
@ -68,7 +68,7 @@ async def test_full_flow(
|
||||||
}
|
}
|
||||||
assert (
|
assert (
|
||||||
result["result"].unique_id
|
result["result"].unique_id
|
||||||
== f"{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_{DUMMY_DATA["STAT_BRUSSELS_SOUTH"]}"
|
== f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -247,14 +247,14 @@ async def test_sensor_id_migration_standardname(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test migrating unique id."""
|
"""Test migrating unique id."""
|
||||||
old_unique_id = (
|
old_unique_id = (
|
||||||
f"live_{DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
f"live_{DUMMY_DATA_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
f"{DUMMY_DATA_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"]}"
|
f"{DUMMY_DATA_IMPORT['STAT_BRUSSELS_SOUTH']}"
|
||||||
)
|
)
|
||||||
new_unique_id = (
|
new_unique_id = (
|
||||||
f"nmbs_live_{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
f"nmbs_live_{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA["STAT_BRUSSELS_SOUTH"]}"
|
f"{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
||||||
)
|
)
|
||||||
old_entry = entity_registry.async_get_or_create(
|
old_entry = entity_registry.async_get_or_create(
|
||||||
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
||||||
|
@ -287,14 +287,14 @@ async def test_sensor_id_migration_localized_name(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test migrating unique id."""
|
"""Test migrating unique id."""
|
||||||
old_unique_id = (
|
old_unique_id = (
|
||||||
f"live_{DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
f"live_{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
f"{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_SOUTH"]}"
|
f"{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_SOUTH']}"
|
||||||
)
|
)
|
||||||
new_unique_id = (
|
new_unique_id = (
|
||||||
f"nmbs_live_{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
f"nmbs_live_{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||||
f"{DUMMY_DATA["STAT_BRUSSELS_SOUTH"]}"
|
f"{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
||||||
)
|
)
|
||||||
old_entry = entity_registry.async_get_or_create(
|
old_entry = entity_registry.async_get_or_create(
|
||||||
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
||||||
|
|
|
@ -55,9 +55,9 @@ async def test_chat(
|
||||||
Message(role="user", content="test message"),
|
Message(role="user", content="test message"),
|
||||||
]
|
]
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert result.response.speech["plain"]["speech"] == "test response"
|
assert result.response.speech["plain"]["speech"] == "test response"
|
||||||
|
|
||||||
# Test Conversation tracing
|
# Test Conversation tracing
|
||||||
|
@ -106,9 +106,9 @@ async def test_template_variables(
|
||||||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
|
|
||||||
args = mock_chat.call_args.kwargs
|
args = mock_chat.call_args.kwargs
|
||||||
prompt = args["messages"][0]["content"]
|
prompt = args["messages"][0]["content"]
|
||||||
|
@ -463,9 +463,9 @@ async def test_message_history_pruning(
|
||||||
context=Context(),
|
context=Context(),
|
||||||
agent_id=mock_config_entry.entry_id,
|
agent_id=mock_config_entry.entry_id,
|
||||||
)
|
)
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
|
|
||||||
# Only the most recent histories should remain
|
# Only the most recent histories should remain
|
||||||
assert len(agent._history) == 2
|
assert len(agent._history) == 2
|
||||||
|
|
|
@ -136,9 +136,9 @@ async def test_template_variables(
|
||||||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert (
|
assert (
|
||||||
"The user name is Test User."
|
"The user name is Test User."
|
||||||
in mock_create.mock_calls[0][2]["messages"][0]["content"]
|
in mock_create.mock_calls[0][2]["messages"][0]["content"]
|
||||||
|
@ -178,9 +178,9 @@ async def test_extra_systen_prompt(
|
||||||
extra_system_prompt=extra_system_prompt,
|
extra_system_prompt=extra_system_prompt,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||||
extra_system_prompt
|
extra_system_prompt
|
||||||
)
|
)
|
||||||
|
@ -201,9 +201,9 @@ async def test_extra_systen_prompt(
|
||||||
extra_system_prompt=None,
|
extra_system_prompt=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||||
extra_system_prompt
|
extra_system_prompt
|
||||||
)
|
)
|
||||||
|
@ -222,9 +222,9 @@ async def test_extra_systen_prompt(
|
||||||
extra_system_prompt=extra_system_prompt2,
|
extra_system_prompt=extra_system_prompt2,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||||
extra_system_prompt2
|
extra_system_prompt2
|
||||||
)
|
)
|
||||||
|
@ -242,9 +242,9 @@ async def test_extra_systen_prompt(
|
||||||
agent_id=mock_config_entry.entry_id,
|
agent_id=mock_config_entry.entry_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
assert (
|
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
result
|
||||||
), result
|
)
|
||||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||||
extra_system_prompt2
|
extra_system_prompt2
|
||||||
)
|
)
|
||||||
|
|
|
@ -36,9 +36,9 @@ async def test_mapping_integrity() -> None:
|
||||||
for sensor_map in maps:
|
for sensor_map in maps:
|
||||||
assert sensor_map.attribute in ATTRIBUTES, sensor_map.attribute
|
assert sensor_map.attribute in ATTRIBUTES, sensor_map.attribute
|
||||||
if sensor_map.device_class:
|
if sensor_map.device_class:
|
||||||
assert (
|
assert sensor_map.device_class in DEVICE_CLASSES, (
|
||||||
sensor_map.device_class in DEVICE_CLASSES
|
sensor_map.device_class
|
||||||
), sensor_map.device_class
|
)
|
||||||
if sensor_map.state_class:
|
if sensor_map.state_class:
|
||||||
assert sensor_map.state_class in STATE_CLASSES, sensor_map.state_class
|
assert sensor_map.state_class in STATE_CLASSES, sensor_map.state_class
|
||||||
|
|
||||||
|
|
|
@ -99,9 +99,9 @@ def make_segment_with_parts(
|
||||||
if discontinuity:
|
if discontinuity:
|
||||||
response.append("#EXT-X-DISCONTINUITY")
|
response.append("#EXT-X-DISCONTINUITY")
|
||||||
response.extend(
|
response.extend(
|
||||||
f'#EXT-X-PART:DURATION={TEST_PART_DURATION:.3f},'
|
f"#EXT-X-PART:DURATION={TEST_PART_DURATION:.3f},"
|
||||||
f'URI="./segment/{segment}.{i}.m4s"'
|
f'URI="./segment/{segment}.{i}.m4s"'
|
||||||
f'{",INDEPENDENT=YES" if i % independent_period == 0 else ""}'
|
f"{',INDEPENDENT=YES' if i % independent_period == 0 else ''}"
|
||||||
for i in range(num_parts)
|
for i in range(num_parts)
|
||||||
)
|
)
|
||||||
response.append(
|
response.append(
|
||||||
|
|
|
@ -124,9 +124,9 @@ async def _test_sensors(
|
||||||
for entity in entities_and_expected_values:
|
for entity in entities_and_expected_values:
|
||||||
state = hass.states.get(entity.entity_id)
|
state = hass.states.get(entity.entity_id)
|
||||||
assert state, f"Unable to get state of {entity.entity_id}"
|
assert state, f"Unable to get state of {entity.entity_id}"
|
||||||
assert (
|
assert state.state == entity.first_value, (
|
||||||
state.state == entity.first_value
|
f"First update: {entity.entity_id} is expected to have state {entity.first_value} but has {state.state}"
|
||||||
), f"First update: {entity.entity_id} is expected to have state {entity.first_value} but has {state.state}"
|
)
|
||||||
|
|
||||||
# Simulate second data update
|
# Simulate second data update
|
||||||
with (
|
with (
|
||||||
|
@ -147,6 +147,6 @@ async def _test_sensors(
|
||||||
# Verify expected vs actual values of second update
|
# Verify expected vs actual values of second update
|
||||||
for entity in entities_and_expected_values:
|
for entity in entities_and_expected_values:
|
||||||
state = hass.states.get(entity.entity_id)
|
state = hass.states.get(entity.entity_id)
|
||||||
assert (
|
assert state.state == entity.second_value, (
|
||||||
state.state == entity.second_value
|
f"Second update: {entity.entity_id} is expected to have state {entity.second_value} but has {state.state}"
|
||||||
), f"Second update: {entity.entity_id} is expected to have state {entity.second_value} but has {state.state}"
|
)
|
||||||
|
|
|
@ -79,15 +79,15 @@ async def snapshot_platform(
|
||||||
device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id)
|
device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id)
|
||||||
assert device_entries
|
assert device_entries
|
||||||
for device_entry in device_entries:
|
for device_entry in device_entries:
|
||||||
assert device_entry == snapshot(
|
assert device_entry == snapshot(name=f"{device_entry.name}-entry"), (
|
||||||
name=f"{device_entry.name}-entry"
|
f"device entry snapshot failed for {device_entry.name}"
|
||||||
), f"device entry snapshot failed for {device_entry.name}"
|
)
|
||||||
|
|
||||||
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||||
assert entity_entries
|
assert entity_entries
|
||||||
assert (
|
assert len({entity_entry.domain for entity_entry in entity_entries}) == 1, (
|
||||||
len({entity_entry.domain for entity_entry in entity_entries}) == 1
|
"Please limit the loaded platforms to 1 platform."
|
||||||
), "Please limit the loaded platforms to 1 platform."
|
)
|
||||||
|
|
||||||
translations = await async_get_translations(hass, "en", "entity", [DOMAIN])
|
translations = await async_get_translations(hass, "en", "entity", [DOMAIN])
|
||||||
unique_device_classes = []
|
unique_device_classes = []
|
||||||
|
@ -99,18 +99,18 @@ async def snapshot_platform(
|
||||||
if entity_entry.original_device_class not in unique_device_classes:
|
if entity_entry.original_device_class not in unique_device_classes:
|
||||||
single_device_class_translation = True
|
single_device_class_translation = True
|
||||||
unique_device_classes.append(entity_entry.original_device_class)
|
unique_device_classes.append(entity_entry.original_device_class)
|
||||||
assert (
|
assert (key in translations) or single_device_class_translation, (
|
||||||
(key in translations) or single_device_class_translation
|
f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}"
|
||||||
), f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}"
|
)
|
||||||
assert entity_entry == snapshot(
|
assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry"), (
|
||||||
name=f"{entity_entry.entity_id}-entry"
|
f"entity entry snapshot failed for {entity_entry.entity_id}"
|
||||||
), f"entity entry snapshot failed for {entity_entry.entity_id}"
|
)
|
||||||
if entity_entry.disabled_by is None:
|
if entity_entry.disabled_by is None:
|
||||||
state = hass.states.get(entity_entry.entity_id)
|
state = hass.states.get(entity_entry.entity_id)
|
||||||
assert state, f"State not found for {entity_entry.entity_id}"
|
assert state, f"State not found for {entity_entry.entity_id}"
|
||||||
assert state == snapshot(
|
assert state == snapshot(name=f"{entity_entry.entity_id}-state"), (
|
||||||
name=f"{entity_entry.entity_id}-state"
|
f"state snapshot failed for {entity_entry.entity_id}"
|
||||||
), f"state snapshot failed for {entity_entry.entity_id}"
|
)
|
||||||
|
|
||||||
|
|
||||||
async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None:
|
async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None:
|
||||||
|
@ -242,12 +242,12 @@ def _mocked_feature(
|
||||||
feature.name = name or id.upper()
|
feature.name = name or id.upper()
|
||||||
feature.set_value = AsyncMock()
|
feature.set_value = AsyncMock()
|
||||||
if not (fixture := FEATURES_FIXTURE.get(id)):
|
if not (fixture := FEATURES_FIXTURE.get(id)):
|
||||||
assert (
|
assert require_fixture is False, (
|
||||||
require_fixture is False
|
f"No fixture defined for feature {id} and require_fixture is True"
|
||||||
), f"No fixture defined for feature {id} and require_fixture is True"
|
)
|
||||||
assert (
|
assert value is not UNDEFINED, (
|
||||||
value is not UNDEFINED
|
f"Value must be provided if feature {id} not defined in features.json"
|
||||||
), f"Value must be provided if feature {id} not defined in features.json"
|
)
|
||||||
fixture = {"value": value, "category": "Primary", "type": "Sensor"}
|
fixture = {"value": value, "category": "Primary", "type": "Sensor"}
|
||||||
elif value is not UNDEFINED:
|
elif value is not UNDEFINED:
|
||||||
fixture["value"] = value
|
fixture["value"] = value
|
||||||
|
@ -318,12 +318,12 @@ def _mocked_light_effect_module(device) -> LightEffect:
|
||||||
effect.effect_list = ["Off", "Effect1", "Effect2"]
|
effect.effect_list = ["Off", "Effect1", "Effect2"]
|
||||||
|
|
||||||
async def _set_effect(effect_name, *_, **__):
|
async def _set_effect(effect_name, *_, **__):
|
||||||
assert (
|
assert effect_name in effect.effect_list, (
|
||||||
effect_name in effect.effect_list
|
f"set_effect '{effect_name}' not in {effect.effect_list}"
|
||||||
), f"set_effect '{effect_name}' not in {effect.effect_list}"
|
)
|
||||||
assert device.modules[
|
assert device.modules[Module.Light], (
|
||||||
Module.Light
|
"Need a light module to test set_effect method"
|
||||||
], "Need a light module to test set_effect method"
|
)
|
||||||
device.modules[Module.Light].state.light_on = True
|
device.modules[Module.Light].state.light_on = True
|
||||||
effect.effect = effect_name
|
effect.effect = effect_name
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ async def test_turn_on_off(
|
||||||
with (
|
with (
|
||||||
expectation,
|
expectation,
|
||||||
patch(
|
patch(
|
||||||
f"pyvesync.vesyncfan.VeSyncHumid200300S.{"turn_on" if turn_on else "turn_off"}",
|
f"pyvesync.vesyncfan.VeSyncHumid200300S.{'turn_on' if turn_on else 'turn_off'}",
|
||||||
return_value=api_response,
|
return_value=api_response,
|
||||||
) as method_mock,
|
) as method_mock,
|
||||||
):
|
):
|
||||||
|
|
|
@ -133,9 +133,9 @@ def climate_radio_thermostat_ct100_plus_state_fixture() -> dict[str, Any]:
|
||||||
name="climate_radio_thermostat_ct100_plus_different_endpoints_state",
|
name="climate_radio_thermostat_ct100_plus_different_endpoints_state",
|
||||||
scope="package",
|
scope="package",
|
||||||
)
|
)
|
||||||
def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture() -> (
|
def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture() -> dict[
|
||||||
dict[str, Any]
|
str, Any
|
||||||
):
|
]:
|
||||||
"""Load the thermostat fixture state with values on different endpoints.
|
"""Load the thermostat fixture state with values on different endpoints.
|
||||||
|
|
||||||
This device is a radio thermostat ct100.
|
This device is a radio thermostat ct100.
|
||||||
|
@ -336,9 +336,9 @@ def lock_id_lock_as_id150_state_fixture() -> dict[str, Any]:
|
||||||
@pytest.fixture(
|
@pytest.fixture(
|
||||||
name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package"
|
name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package"
|
||||||
)
|
)
|
||||||
def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> (
|
def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> dict[
|
||||||
dict[str, Any]
|
str, Any
|
||||||
):
|
]:
|
||||||
"""Load the climate multiple temp units node state fixture data."""
|
"""Load the climate multiple temp units node state fixture data."""
|
||||||
return load_json_object_fixture(
|
return load_json_object_fixture(
|
||||||
"climate_radio_thermostat_ct101_multiple_temp_units_state.json", DOMAIN
|
"climate_radio_thermostat_ct101_multiple_temp_units_state.json", DOMAIN
|
||||||
|
|
|
@ -412,7 +412,9 @@ def verify_cleanup(
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Verify respx.mock has been cleaned up
|
# Verify respx.mock has been cleaned up
|
||||||
assert not respx.mock.routes, "respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock"
|
assert not respx.mock.routes, (
|
||||||
|
"respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock"
|
||||||
|
)
|
||||||
finally:
|
finally:
|
||||||
# Clear mock routes not break subsequent tests
|
# Clear mock routes not break subsequent tests
|
||||||
respx.mock.clear()
|
respx.mock.clear()
|
||||||
|
|
Loading…
Reference in New Issue