Bump ruff to 0.9.1 (#135197)
parent
2b51ab1c75
commit
8db63adc11
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.6
|
||||
rev: v0.9.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
|
|
@ -1472,9 +1472,9 @@ class PipelineInput:
|
|||
if stt_audio_buffer:
|
||||
# Send audio in the buffer first to speech-to-text, then move on to stt_stream.
|
||||
# This is basically an async itertools.chain.
|
||||
async def buffer_then_audio_stream() -> (
|
||||
AsyncGenerator[EnhancedAudioChunk]
|
||||
):
|
||||
async def buffer_then_audio_stream() -> AsyncGenerator[
|
||||
EnhancedAudioChunk
|
||||
]:
|
||||
# Buffered audio
|
||||
for chunk in stt_audio_buffer:
|
||||
yield chunk
|
||||
|
|
|
@ -978,9 +978,9 @@ class DeviceScanner:
|
|||
|
||||
async def async_scan_devices(self) -> list[str]:
|
||||
"""Scan for devices."""
|
||||
assert (
|
||||
self.hass is not None
|
||||
), "hass should be set by async_setup_scanner_platform"
|
||||
assert self.hass is not None, (
|
||||
"hass should be set by async_setup_scanner_platform"
|
||||
)
|
||||
return await self.hass.async_add_executor_job(self.scan_devices)
|
||||
|
||||
def get_device_name(self, device: str) -> str | None:
|
||||
|
@ -989,9 +989,9 @@ class DeviceScanner:
|
|||
|
||||
async def async_get_device_name(self, device: str) -> str | None:
|
||||
"""Get the name of a device."""
|
||||
assert (
|
||||
self.hass is not None
|
||||
), "hass should be set by async_setup_scanner_platform"
|
||||
assert self.hass is not None, (
|
||||
"hass should be set by async_setup_scanner_platform"
|
||||
)
|
||||
return await self.hass.async_add_executor_job(self.get_device_name, device)
|
||||
|
||||
def get_extra_attributes(self, device: str) -> dict:
|
||||
|
@ -1000,9 +1000,9 @@ class DeviceScanner:
|
|||
|
||||
async def async_get_extra_attributes(self, device: str) -> dict:
|
||||
"""Get the extra attributes of a device."""
|
||||
assert (
|
||||
self.hass is not None
|
||||
), "hass should be set by async_setup_scanner_platform"
|
||||
assert self.hass is not None, (
|
||||
"hass should be set by async_setup_scanner_platform"
|
||||
)
|
||||
return await self.hass.async_add_executor_job(self.get_extra_attributes, device)
|
||||
|
||||
|
||||
|
|
|
@ -114,9 +114,9 @@ class GoogleCloudSpeechToTextEntity(SpeechToTextEntity):
|
|||
)
|
||||
)
|
||||
|
||||
async def request_generator() -> (
|
||||
AsyncGenerator[speech_v1.StreamingRecognizeRequest]
|
||||
):
|
||||
async def request_generator() -> AsyncGenerator[
|
||||
speech_v1.StreamingRecognizeRequest
|
||||
]:
|
||||
# The first request must only contain a streaming_config
|
||||
yield speech_v1.StreamingRecognizeRequest(streaming_config=streaming_config)
|
||||
# All subsequent requests must only contain audio_content
|
||||
|
|
|
@ -115,9 +115,9 @@ class EventProcessor:
|
|||
include_entity_name: bool = True,
|
||||
) -> None:
|
||||
"""Init the event stream."""
|
||||
assert not (
|
||||
context_id and (entity_ids or device_ids)
|
||||
), "can't pass in both context_id and (entity_ids or device_ids)"
|
||||
assert not (context_id and (entity_ids or device_ids)), (
|
||||
"can't pass in both context_id and (entity_ids or device_ids)"
|
||||
)
|
||||
self.hass = hass
|
||||
self.ent_reg = er.async_get(hass)
|
||||
self.event_types = event_types
|
||||
|
|
|
@ -310,13 +310,11 @@ class MatterClimate(MatterEntity, ClimateEntity):
|
|||
):
|
||||
match running_state_value:
|
||||
case (
|
||||
ThermostatRunningState.Heat
|
||||
| ThermostatRunningState.HeatStage2
|
||||
ThermostatRunningState.Heat | ThermostatRunningState.HeatStage2
|
||||
):
|
||||
self._attr_hvac_action = HVACAction.HEATING
|
||||
case (
|
||||
ThermostatRunningState.Cool
|
||||
| ThermostatRunningState.CoolStage2
|
||||
ThermostatRunningState.Cool | ThermostatRunningState.CoolStage2
|
||||
):
|
||||
self._attr_hvac_action = HVACAction.COOLING
|
||||
case (
|
||||
|
|
|
@ -98,9 +98,9 @@ class BasePlatform(Entity):
|
|||
def get_optional_numeric_config(config_name: str) -> int | float | None:
|
||||
if (val := entry.get(config_name)) is None:
|
||||
return None
|
||||
assert isinstance(
|
||||
val, (float, int)
|
||||
), f"Expected float or int but {config_name} was {type(val)}"
|
||||
assert isinstance(val, (float, int)), (
|
||||
f"Expected float or int but {config_name} was {type(val)}"
|
||||
)
|
||||
return val
|
||||
|
||||
self._min_value = get_optional_numeric_config(CONF_MIN_VALUE)
|
||||
|
|
|
@ -84,7 +84,7 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
config_entry_name = f"Train from {station_from["standardname"]} to {station_to["standardname"]}"
|
||||
config_entry_name = f"Train from {station_from['standardname']} to {station_to['standardname']}"
|
||||
return self.async_create_entry(
|
||||
title=config_entry_name,
|
||||
data=user_input,
|
||||
|
@ -157,18 +157,18 @@ class NMBSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||
if entity_id := entity_registry.async_get_entity_id(
|
||||
Platform.SENSOR,
|
||||
DOMAIN,
|
||||
f"{prefix}_{station_live["standardname"]}_{station_from["standardname"]}_{station_to["standardname"]}",
|
||||
f"{prefix}_{station_live['standardname']}_{station_from['standardname']}_{station_to['standardname']}",
|
||||
):
|
||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live["id"]}_{station_from["id"]}_{station_to["id"]}"
|
||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live['id']}_{station_from['id']}_{station_to['id']}"
|
||||
entity_registry.async_update_entity(
|
||||
entity_id, new_unique_id=new_unique_id
|
||||
)
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
Platform.SENSOR,
|
||||
DOMAIN,
|
||||
f"{prefix}_{station_live["name"]}_{station_from["name"]}_{station_to["name"]}",
|
||||
f"{prefix}_{station_live['name']}_{station_from['name']}_{station_to['name']}",
|
||||
):
|
||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live["id"]}_{station_from["id"]}_{station_to["id"]}"
|
||||
new_unique_id = f"{DOMAIN}_{prefix}_{station_live['id']}_{station_from['id']}_{station_to['id']}"
|
||||
entity_registry.async_update_entity(
|
||||
entity_id, new_unique_id=new_unique_id
|
||||
)
|
||||
|
|
|
@ -201,15 +201,14 @@ class NMBSLiveBoard(SensorEntity):
|
|||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the sensor default name."""
|
||||
return f"Trains in {self._station["standardname"]}"
|
||||
return f"Trains in {self._station['standardname']}"
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the unique ID."""
|
||||
|
||||
unique_id = (
|
||||
f"{self._station["id"]}_{self._station_from["id"]}_"
|
||||
f"{self._station_to["id"]}"
|
||||
f"{self._station['id']}_{self._station_from['id']}_{self._station_to['id']}"
|
||||
)
|
||||
return f"nmbs_live_{unique_id}"
|
||||
|
||||
|
@ -302,7 +301,7 @@ class NMBSSensor(SensorEntity):
|
|||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the unique ID."""
|
||||
unique_id = f"{self._station_from["id"]}_{self._station_to["id"]}"
|
||||
unique_id = f"{self._station_from['id']}_{self._station_to['id']}"
|
||||
|
||||
return f"nmbs_connection_{unique_id}"
|
||||
|
||||
|
@ -310,7 +309,7 @@ class NMBSSensor(SensorEntity):
|
|||
def name(self) -> str:
|
||||
"""Return the name of the sensor."""
|
||||
if self._name is None:
|
||||
return f"Train from {self._station_from["standardname"]} to {self._station_to["standardname"]}"
|
||||
return f"Train from {self._station_from['standardname']} to {self._station_to['standardname']}"
|
||||
return self._name
|
||||
|
||||
@property
|
||||
|
|
|
@ -252,9 +252,9 @@ class PullPointManager:
|
|||
|
||||
async def async_start(self) -> bool:
|
||||
"""Start pullpoint subscription."""
|
||||
assert (
|
||||
self.state == PullPointManagerState.STOPPED
|
||||
), "PullPoint manager already started"
|
||||
assert self.state == PullPointManagerState.STOPPED, (
|
||||
"PullPoint manager already started"
|
||||
)
|
||||
LOGGER.debug("%s: Starting PullPoint manager", self._name)
|
||||
if not await self._async_start_pullpoint():
|
||||
self.state = PullPointManagerState.FAILED
|
||||
|
@ -501,9 +501,9 @@ class WebHookManager:
|
|||
async def async_start(self) -> bool:
|
||||
"""Start polling events."""
|
||||
LOGGER.debug("%s: Starting webhook manager", self._name)
|
||||
assert (
|
||||
self.state == WebHookManagerState.STOPPED
|
||||
), "Webhook manager already started"
|
||||
assert self.state == WebHookManagerState.STOPPED, (
|
||||
"Webhook manager already started"
|
||||
)
|
||||
assert self._webhook_url is None, "Webhook already registered"
|
||||
self._async_register_webhook()
|
||||
if not await self._async_start_webhook():
|
||||
|
|
|
@ -67,11 +67,9 @@ type _ReturnFuncType[_T, **_P, _R] = Callable[
|
|||
]
|
||||
|
||||
|
||||
def catch_request_errors[_OpenhomeDeviceT: OpenhomeDevice, **_P, _R]() -> (
|
||||
Callable[
|
||||
def catch_request_errors[_OpenhomeDeviceT: OpenhomeDevice, **_P, _R]() -> Callable[
|
||||
[_FuncType[_OpenhomeDeviceT, _P, _R]], _ReturnFuncType[_OpenhomeDeviceT, _P, _R]
|
||||
]
|
||||
):
|
||||
]:
|
||||
"""Catch TimeoutError, aiohttp.ClientError, UpnpError errors."""
|
||||
|
||||
def call_wrapper(
|
||||
|
|
|
@ -48,9 +48,9 @@ class QSSensor(QSEntity, SensorEntity):
|
|||
|
||||
self._decode, self.unit = SENSORS[sensor_type]
|
||||
# this cannot happen because it only happens in bool and this should be redirected to binary_sensor
|
||||
assert not isinstance(
|
||||
self.unit, type
|
||||
), f"boolean sensor id={sensor['id']} name={sensor['name']}"
|
||||
assert not isinstance(self.unit, type), (
|
||||
f"boolean sensor id={sensor['id']} name={sensor['name']}"
|
||||
)
|
||||
|
||||
@callback
|
||||
def update_packet(self, packet):
|
||||
|
|
|
@ -2752,9 +2752,9 @@ class EventTypeIDMigration(BaseMigrationWithQuery, BaseOffLineMigration):
|
|||
for db_event_type in missing_db_event_types:
|
||||
# We cannot add the assigned ids to the event_type_manager
|
||||
# because the commit could get rolled back
|
||||
assert (
|
||||
db_event_type.event_type is not None
|
||||
), "event_type should never be None"
|
||||
assert db_event_type.event_type is not None, (
|
||||
"event_type should never be None"
|
||||
)
|
||||
event_type_to_id[db_event_type.event_type] = (
|
||||
db_event_type.event_type_id
|
||||
)
|
||||
|
@ -2830,9 +2830,9 @@ class EntityIDMigration(BaseMigrationWithQuery, BaseOffLineMigration):
|
|||
for db_states_metadata in missing_states_metadata:
|
||||
# We cannot add the assigned ids to the event_type_manager
|
||||
# because the commit could get rolled back
|
||||
assert (
|
||||
db_states_metadata.entity_id is not None
|
||||
), "entity_id should never be None"
|
||||
assert db_states_metadata.entity_id is not None, (
|
||||
"entity_id should never be None"
|
||||
)
|
||||
entity_id_to_metadata_id[db_states_metadata.entity_id] = (
|
||||
db_states_metadata.metadata_id
|
||||
)
|
||||
|
|
|
@ -47,9 +47,9 @@ class RecorderPool(SingletonThreadPool, NullPool):
|
|||
) -> None:
|
||||
"""Create the pool."""
|
||||
kw["pool_size"] = POOL_SIZE
|
||||
assert (
|
||||
recorder_and_worker_thread_ids is not None
|
||||
), "recorder_and_worker_thread_ids is required"
|
||||
assert recorder_and_worker_thread_ids is not None, (
|
||||
"recorder_and_worker_thread_ids is required"
|
||||
)
|
||||
self.recorder_and_worker_thread_ids = recorder_and_worker_thread_ids
|
||||
SingletonThreadPool.__init__(self, creator, **kw)
|
||||
|
||||
|
|
|
@ -968,12 +968,10 @@ def _reduce_statistics(
|
|||
return result
|
||||
|
||||
|
||||
def reduce_day_ts_factory() -> (
|
||||
tuple[
|
||||
def reduce_day_ts_factory() -> tuple[
|
||||
Callable[[float, float], bool],
|
||||
Callable[[float], tuple[float, float]],
|
||||
]
|
||||
):
|
||||
]:
|
||||
"""Return functions to match same day and day start end."""
|
||||
_lower_bound: float = 0
|
||||
_upper_bound: float = 0
|
||||
|
@ -1017,12 +1015,10 @@ def _reduce_statistics_per_day(
|
|||
)
|
||||
|
||||
|
||||
def reduce_week_ts_factory() -> (
|
||||
tuple[
|
||||
def reduce_week_ts_factory() -> tuple[
|
||||
Callable[[float, float], bool],
|
||||
Callable[[float], tuple[float, float]],
|
||||
]
|
||||
):
|
||||
]:
|
||||
"""Return functions to match same week and week start end."""
|
||||
_lower_bound: float = 0
|
||||
_upper_bound: float = 0
|
||||
|
@ -1075,12 +1071,10 @@ def _find_month_end_time(timestamp: datetime) -> datetime:
|
|||
)
|
||||
|
||||
|
||||
def reduce_month_ts_factory() -> (
|
||||
tuple[
|
||||
def reduce_month_ts_factory() -> tuple[
|
||||
Callable[[float, float], bool],
|
||||
Callable[[float], tuple[float, float]],
|
||||
]
|
||||
):
|
||||
]:
|
||||
"""Return functions to match same month and month start end."""
|
||||
_lower_bound: float = 0
|
||||
_upper_bound: float = 0
|
||||
|
|
|
@ -615,9 +615,9 @@ def info_from_service(service: AsyncServiceInfo) -> ZeroconfServiceInfo | None:
|
|||
return None
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert (
|
||||
service.server is not None
|
||||
), "server cannot be none if there are addresses"
|
||||
assert service.server is not None, (
|
||||
"server cannot be none if there are addresses"
|
||||
)
|
||||
return ZeroconfServiceInfo(
|
||||
ip_address=ip_address,
|
||||
ip_addresses=ip_addresses,
|
||||
|
|
|
@ -15,9 +15,9 @@ if TYPE_CHECKING:
|
|||
_function_cache: dict[str, Callable[[str, str, dict[str, str] | None], str]] = {}
|
||||
|
||||
|
||||
def import_async_get_exception_message() -> (
|
||||
Callable[[str, str, dict[str, str] | None], str]
|
||||
):
|
||||
def import_async_get_exception_message() -> Callable[
|
||||
[str, str, dict[str, str] | None], str
|
||||
]:
|
||||
"""Return a method that can fetch a translated exception message.
|
||||
|
||||
Defaults to English, requires translations to already be cached.
|
||||
|
|
|
@ -1480,9 +1480,9 @@ class Entity(
|
|||
|
||||
if self.registry_entry is not None:
|
||||
# This is an assert as it should never happen, but helps in tests
|
||||
assert (
|
||||
not self.registry_entry.disabled_by
|
||||
), f"Entity '{self.entity_id}' is being added while it's disabled"
|
||||
assert not self.registry_entry.disabled_by, (
|
||||
f"Entity '{self.entity_id}' is being added while it's disabled"
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_entity_registry_updated_event(
|
||||
|
|
|
@ -46,9 +46,9 @@ def request_handler_factory(
|
|||
) -> Callable[[web.Request], Awaitable[web.StreamResponse]]:
|
||||
"""Wrap the handler classes."""
|
||||
is_coroutinefunction = asyncio.iscoroutinefunction(handler)
|
||||
assert is_coroutinefunction or is_callback(
|
||||
handler
|
||||
), "Handler should be a coroutine or a callback."
|
||||
assert is_coroutinefunction or is_callback(handler), (
|
||||
"Handler should be a coroutine or a callback."
|
||||
)
|
||||
|
||||
async def handle(request: web.Request) -> web.StreamResponse:
|
||||
"""Handle incoming request."""
|
||||
|
|
|
@ -841,16 +841,16 @@ class Template:
|
|||
self.ensure_valid()
|
||||
|
||||
assert self.hass is not None, "hass variable not set on template"
|
||||
assert (
|
||||
self._limited is None or self._limited == limited
|
||||
), "can't change between limited and non limited template"
|
||||
assert (
|
||||
self._strict is None or self._strict == strict
|
||||
), "can't change between strict and non strict template"
|
||||
assert self._limited is None or self._limited == limited, (
|
||||
"can't change between limited and non limited template"
|
||||
)
|
||||
assert self._strict is None or self._strict == strict, (
|
||||
"can't change between strict and non strict template"
|
||||
)
|
||||
assert not (strict and limited), "can't combine strict and limited template"
|
||||
assert (
|
||||
self._log_fn is None or self._log_fn == log_fn
|
||||
), "can't change custom log function"
|
||||
assert self._log_fn is None or self._log_fn == log_fn, (
|
||||
"can't change custom log function"
|
||||
)
|
||||
assert self._compiled_code is not None, "template code was not compiled"
|
||||
|
||||
self._limited = limited
|
||||
|
|
|
@ -701,7 +701,7 @@ exclude_lines = [
|
|||
]
|
||||
|
||||
[tool.ruff]
|
||||
required-version = ">=0.8.0"
|
||||
required-version = ">=0.9.1"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||
|
||||
codespell==2.3.0
|
||||
ruff==0.8.6
|
||||
ruff==0.9.1
|
||||
yamllint==1.35.1
|
||||
|
|
|
@ -24,7 +24,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.18,source=/uv,target=/bin/uv \
|
|||
--no-cache \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.6 \
|
||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.9.1 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.2 ha-ffmpeg==3.2.2 hassil==2.1.0 home-assistant-intents==2025.1.1 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
|
|
|
@ -1197,16 +1197,16 @@ def assert_setup_component(count, domain=None):
|
|||
yield config
|
||||
|
||||
if domain is None:
|
||||
assert (
|
||||
len(config) == 1
|
||||
), f"assert_setup_component requires DOMAIN: {list(config.keys())}"
|
||||
assert len(config) == 1, (
|
||||
f"assert_setup_component requires DOMAIN: {list(config.keys())}"
|
||||
)
|
||||
domain = list(config.keys())[0]
|
||||
|
||||
res = config.get(domain)
|
||||
res_len = 0 if res is None else len(res)
|
||||
assert (
|
||||
res_len == count
|
||||
), f"setup_component failed, expected {count} got {res_len}: {res}"
|
||||
assert res_len == count, (
|
||||
f"setup_component failed, expected {count} got {res_len}: {res}"
|
||||
)
|
||||
|
||||
|
||||
def mock_restore_cache(hass: HomeAssistant, states: Sequence[State]) -> None:
|
||||
|
@ -1814,9 +1814,9 @@ async def snapshot_platform(
|
|||
"""Snapshot a platform."""
|
||||
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||
assert entity_entries
|
||||
assert (
|
||||
len({entity_entry.domain for entity_entry in entity_entries}) == 1
|
||||
), "Please limit the loaded platforms to 1 platform."
|
||||
assert len({entity_entry.domain for entity_entry in entity_entries}) == 1, (
|
||||
"Please limit the loaded platforms to 1 platform."
|
||||
)
|
||||
for entity_entry in entity_entries:
|
||||
assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry")
|
||||
assert entity_entry.disabled_by is None, "Please enable all entities."
|
||||
|
|
|
@ -127,9 +127,9 @@ async def test_template_variables(
|
|||
hass, "hello", None, context, agent_id="conversation.claude"
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"]
|
||||
assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"]
|
||||
|
||||
|
|
|
@ -123,9 +123,9 @@ async def test_sensors(hass: HomeAssistant, entity_registry: EntityRegistry) ->
|
|||
]
|
||||
for entity_id, _ in sensors:
|
||||
assert not hass.states.get(entity_id)
|
||||
assert (
|
||||
entry := entity_registry.async_get(entity_id)
|
||||
), f"Entity registry entry for {entity_id} is missing"
|
||||
assert (entry := entity_registry.async_get(entity_id)), (
|
||||
f"Entity registry entry for {entity_id} is missing"
|
||||
)
|
||||
assert entry.disabled
|
||||
assert entry.disabled_by is RegistryEntryDisabler.INTEGRATION
|
||||
|
||||
|
|
|
@ -14,9 +14,9 @@ async def test_request_least_info(hass: HomeAssistant) -> None:
|
|||
"""Test request config with least amount of data."""
|
||||
request_id = configurator.async_request_config(hass, "Test Request", lambda _: None)
|
||||
|
||||
assert (
|
||||
len(hass.services.async_services().get(configurator.DOMAIN, [])) == 1
|
||||
), "No new service registered"
|
||||
assert len(hass.services.async_services().get(configurator.DOMAIN, [])) == 1, (
|
||||
"No new service registered"
|
||||
)
|
||||
|
||||
states = hass.states.async_all()
|
||||
|
||||
|
|
|
@ -399,9 +399,9 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None:
|
|||
result = await conversation.async_converse(hass, sentence, None, Context())
|
||||
assert callback.call_count == 1
|
||||
assert callback.call_args[0][0].text == sentence
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), sentence
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
sentence
|
||||
)
|
||||
assert result.response.speech == {
|
||||
"plain": {"speech": trigger_response, "extra_data": None}
|
||||
}
|
||||
|
@ -412,9 +412,9 @@ async def test_trigger_sentences(hass: HomeAssistant) -> None:
|
|||
callback.reset_mock()
|
||||
for sentence in test_sentences:
|
||||
result = await conversation.async_converse(hass, sentence, None, Context())
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ERROR
|
||||
), sentence
|
||||
assert result.response.response_type == intent.IntentResponseType.ERROR, (
|
||||
sentence
|
||||
)
|
||||
|
||||
assert len(callback.mock_calls) == 0
|
||||
|
||||
|
|
|
@ -159,9 +159,9 @@ async def test_duplicate_mac_dev_id(mock_warning, hass: HomeAssistant) -> None:
|
|||
]
|
||||
legacy.DeviceTracker(hass, False, True, {}, devices)
|
||||
_LOGGER.debug(mock_warning.call_args_list)
|
||||
assert (
|
||||
mock_warning.call_count == 1
|
||||
), "The only warning call should be duplicates (check DEBUG)"
|
||||
assert mock_warning.call_count == 1, (
|
||||
"The only warning call should be duplicates (check DEBUG)"
|
||||
)
|
||||
args, _ = mock_warning.call_args
|
||||
assert "Duplicate device MAC" in args[0], "Duplicate MAC warning expected"
|
||||
|
||||
|
@ -177,9 +177,9 @@ async def test_duplicate_mac_dev_id(mock_warning, hass: HomeAssistant) -> None:
|
|||
legacy.DeviceTracker(hass, False, True, {}, devices)
|
||||
|
||||
_LOGGER.debug(mock_warning.call_args_list)
|
||||
assert (
|
||||
mock_warning.call_count == 1
|
||||
), "The only warning call should be duplicates (check DEBUG)"
|
||||
assert mock_warning.call_count == 1, (
|
||||
"The only warning call should be duplicates (check DEBUG)"
|
||||
)
|
||||
args, _ = mock_warning.call_args
|
||||
assert "Duplicate device IDs" in args[0], "Duplicate device IDs warning expected"
|
||||
|
||||
|
|
|
@ -45,9 +45,9 @@ def dsmr_connection_fixture() -> Generator[tuple[MagicMock, MagicMock, MagicMock
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def rfxtrx_dsmr_connection_fixture() -> (
|
||||
Generator[tuple[MagicMock, MagicMock, MagicMock]]
|
||||
):
|
||||
def rfxtrx_dsmr_connection_fixture() -> Generator[
|
||||
tuple[MagicMock, MagicMock, MagicMock]
|
||||
]:
|
||||
"""Fixture that mocks RFXtrx connection."""
|
||||
|
||||
transport = MagicMock(spec=asyncio.Transport)
|
||||
|
@ -73,9 +73,9 @@ def rfxtrx_dsmr_connection_fixture() -> (
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def dsmr_connection_send_validate_fixture() -> (
|
||||
Generator[tuple[MagicMock, MagicMock, MagicMock]]
|
||||
):
|
||||
def dsmr_connection_send_validate_fixture() -> Generator[
|
||||
tuple[MagicMock, MagicMock, MagicMock]
|
||||
]:
|
||||
"""Fixture that mocks serial connection."""
|
||||
|
||||
transport = MagicMock(spec=asyncio.Transport)
|
||||
|
@ -156,9 +156,9 @@ def dsmr_connection_send_validate_fixture() -> (
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def rfxtrx_dsmr_connection_send_validate_fixture() -> (
|
||||
Generator[tuple[MagicMock, MagicMock, MagicMock]]
|
||||
):
|
||||
def rfxtrx_dsmr_connection_send_validate_fixture() -> Generator[
|
||||
tuple[MagicMock, MagicMock, MagicMock]
|
||||
]:
|
||||
"""Fixture that mocks serial connection."""
|
||||
|
||||
transport = MagicMock(spec=asyncio.Transport)
|
||||
|
|
|
@ -161,8 +161,8 @@ async def test_disabled_by_default_buttons(
|
|||
for entity_id in entity_ids:
|
||||
assert not hass.states.get(entity_id)
|
||||
|
||||
assert (
|
||||
entry := entity_registry.async_get(entity_id)
|
||||
), f"Entity registry entry for {entity_id} is missing"
|
||||
assert (entry := entity_registry.async_get(entity_id)), (
|
||||
f"Entity registry entry for {entity_id} is missing"
|
||||
)
|
||||
assert entry.disabled
|
||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
|
|
|
@ -116,6 +116,6 @@ async def test_all_entities_loaded(
|
|||
entities: int,
|
||||
) -> None:
|
||||
"""Test that all entities are loaded together."""
|
||||
assert (
|
||||
hass.states.async_entity_ids_count() == entities
|
||||
), f"loaded entities for {device_fixture}: {hass.states.async_entity_ids()}"
|
||||
assert hass.states.async_entity_ids_count() == entities, (
|
||||
f"loaded entities for {device_fixture}: {hass.states.async_entity_ids()}"
|
||||
)
|
||||
|
|
|
@ -136,9 +136,9 @@ async def test_disabled_by_default_number_entities(
|
|||
for entity_id in entity_ids:
|
||||
assert not hass.states.get(entity_id)
|
||||
|
||||
assert (
|
||||
entry := entity_registry.async_get(entity_id)
|
||||
), f"Entity registry entry for {entity_id} is missing"
|
||||
assert (entry := entity_registry.async_get(entity_id)), (
|
||||
f"Entity registry entry for {entity_id} is missing"
|
||||
)
|
||||
assert entry.disabled
|
||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
|
||||
|
|
|
@ -172,9 +172,9 @@ async def test_disabled_by_default_sensors(
|
|||
for entity_id in entity_ids:
|
||||
assert not hass.states.get(entity_id)
|
||||
|
||||
assert (
|
||||
entry := entity_registry.async_get(entity_id)
|
||||
), f"Entity registry entry for {entity_id} is missing"
|
||||
assert (entry := entity_registry.async_get(entity_id)), (
|
||||
f"Entity registry entry for {entity_id} is missing"
|
||||
)
|
||||
assert entry.disabled
|
||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
|
||||
|
|
|
@ -214,8 +214,8 @@ async def test_disabled_by_default_switch_entities(
|
|||
for entity_id in entity_ids:
|
||||
assert not hass.states.get(entity_id)
|
||||
|
||||
assert (
|
||||
entry := entity_registry.async_get(entity_id)
|
||||
), f"Entity registry entry for {entity_id} is missing"
|
||||
assert (entry := entity_registry.async_get(entity_id)), (
|
||||
f"Entity registry entry for {entity_id} is missing"
|
||||
)
|
||||
assert entry.disabled
|
||||
assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION
|
||||
|
|
|
@ -603,9 +603,9 @@ async def test_template_variables(
|
|||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert (
|
||||
"The user name is Test User."
|
||||
in mock_model.mock_calls[0][2]["system_instruction"]
|
||||
|
|
|
@ -192,16 +192,16 @@ async def test_cannot_access_with_trusted_ip(
|
|||
for remote_addr in UNTRUSTED_ADDRESSES:
|
||||
set_mock_ip(remote_addr)
|
||||
resp = await client.get("/")
|
||||
assert (
|
||||
resp.status == HTTPStatus.UNAUTHORIZED
|
||||
), f"{remote_addr} shouldn't be trusted"
|
||||
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||
f"{remote_addr} shouldn't be trusted"
|
||||
)
|
||||
|
||||
for remote_addr in TRUSTED_ADDRESSES:
|
||||
set_mock_ip(remote_addr)
|
||||
resp = await client.get("/")
|
||||
assert (
|
||||
resp.status == HTTPStatus.UNAUTHORIZED
|
||||
), f"{remote_addr} shouldn't be trusted"
|
||||
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||
f"{remote_addr} shouldn't be trusted"
|
||||
)
|
||||
|
||||
|
||||
async def test_auth_active_access_with_access_token_in_header(
|
||||
|
@ -256,16 +256,16 @@ async def test_auth_active_access_with_trusted_ip(
|
|||
for remote_addr in UNTRUSTED_ADDRESSES:
|
||||
set_mock_ip(remote_addr)
|
||||
resp = await client.get("/")
|
||||
assert (
|
||||
resp.status == HTTPStatus.UNAUTHORIZED
|
||||
), f"{remote_addr} shouldn't be trusted"
|
||||
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||
f"{remote_addr} shouldn't be trusted"
|
||||
)
|
||||
|
||||
for remote_addr in TRUSTED_ADDRESSES:
|
||||
set_mock_ip(remote_addr)
|
||||
resp = await client.get("/")
|
||||
assert (
|
||||
resp.status == HTTPStatus.UNAUTHORIZED
|
||||
), f"{remote_addr} shouldn't be trusted"
|
||||
assert resp.status == HTTPStatus.UNAUTHORIZED, (
|
||||
f"{remote_addr} shouldn't be trusted"
|
||||
)
|
||||
|
||||
|
||||
async def test_auth_legacy_support_api_password_cannot_access(
|
||||
|
|
|
@ -174,12 +174,12 @@ class KNXTestKit:
|
|||
)
|
||||
|
||||
telegram = self._outgoing_telegrams.pop(0)
|
||||
assert isinstance(
|
||||
telegram.payload, apci_type
|
||||
), f"APCI type mismatch in {telegram} - Expected: {apci_type.__name__}"
|
||||
assert (
|
||||
telegram.destination_address == _expected_ga
|
||||
), f"Group address mismatch in {telegram} - Expected: {group_address}"
|
||||
assert isinstance(telegram.payload, apci_type), (
|
||||
f"APCI type mismatch in {telegram} - Expected: {apci_type.__name__}"
|
||||
)
|
||||
assert telegram.destination_address == _expected_ga, (
|
||||
f"Group address mismatch in {telegram} - Expected: {group_address}"
|
||||
)
|
||||
if payload is not None:
|
||||
assert (
|
||||
telegram.payload.value.value == payload # type: ignore[attr-defined]
|
||||
|
|
|
@ -1012,9 +1012,9 @@ async def test_media_permission_unauthorized(
|
|||
|
||||
client = await hass_client()
|
||||
response = await client.get(media_url)
|
||||
assert (
|
||||
response.status == HTTPStatus.UNAUTHORIZED
|
||||
), f"Response not matched: {response}"
|
||||
assert response.status == HTTPStatus.UNAUTHORIZED, (
|
||||
f"Response not matched: {response}"
|
||||
)
|
||||
|
||||
|
||||
async def test_multiple_devices(
|
||||
|
@ -1306,9 +1306,9 @@ async def test_media_store_load_filesystem_error(
|
|||
response = await client.get(
|
||||
f"/api/nest/event_media/{device.id}/{event_identifier}"
|
||||
)
|
||||
assert (
|
||||
response.status == HTTPStatus.NOT_FOUND
|
||||
), f"Response not matched: {response}"
|
||||
assert response.status == HTTPStatus.NOT_FOUND, (
|
||||
f"Response not matched: {response}"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(("device_traits", "cache_size"), [(BATTERY_CAMERA_TRAITS, 5)])
|
||||
|
|
|
@ -68,7 +68,7 @@ async def test_full_flow(
|
|||
}
|
||||
assert (
|
||||
result["result"].unique_id
|
||||
== f"{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_{DUMMY_DATA["STAT_BRUSSELS_SOUTH"]}"
|
||||
== f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
||||
)
|
||||
|
||||
|
||||
|
@ -247,14 +247,14 @@ async def test_sensor_id_migration_standardname(
|
|||
) -> None:
|
||||
"""Test migrating unique id."""
|
||||
old_unique_id = (
|
||||
f"live_{DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA_IMPORT["STAT_BRUSSELS_SOUTH"]}"
|
||||
f"live_{DUMMY_DATA_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA_IMPORT['STAT_BRUSSELS_SOUTH']}"
|
||||
)
|
||||
new_unique_id = (
|
||||
f"nmbs_live_{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA["STAT_BRUSSELS_SOUTH"]}"
|
||||
f"nmbs_live_{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
||||
)
|
||||
old_entry = entity_registry.async_get_or_create(
|
||||
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
||||
|
@ -287,14 +287,14 @@ async def test_sensor_id_migration_localized_name(
|
|||
) -> None:
|
||||
"""Test migrating unique id."""
|
||||
old_unique_id = (
|
||||
f"live_{DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT["STAT_BRUSSELS_SOUTH"]}"
|
||||
f"live_{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA_ALTERNATIVE_IMPORT['STAT_BRUSSELS_SOUTH']}"
|
||||
)
|
||||
new_unique_id = (
|
||||
f"nmbs_live_{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA["STAT_BRUSSELS_NORTH"]}_"
|
||||
f"{DUMMY_DATA["STAT_BRUSSELS_SOUTH"]}"
|
||||
f"nmbs_live_{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA['STAT_BRUSSELS_NORTH']}_"
|
||||
f"{DUMMY_DATA['STAT_BRUSSELS_SOUTH']}"
|
||||
)
|
||||
old_entry = entity_registry.async_get_or_create(
|
||||
SENSOR_DOMAIN, DOMAIN, old_unique_id
|
||||
|
|
|
@ -55,9 +55,9 @@ async def test_chat(
|
|||
Message(role="user", content="test message"),
|
||||
]
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert result.response.speech["plain"]["speech"] == "test response"
|
||||
|
||||
# Test Conversation tracing
|
||||
|
@ -106,9 +106,9 @@ async def test_template_variables(
|
|||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
|
||||
args = mock_chat.call_args.kwargs
|
||||
prompt = args["messages"][0]["content"]
|
||||
|
@ -463,9 +463,9 @@ async def test_message_history_pruning(
|
|||
context=Context(),
|
||||
agent_id=mock_config_entry.entry_id,
|
||||
)
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
|
||||
# Only the most recent histories should remain
|
||||
assert len(agent._history) == 2
|
||||
|
|
|
@ -136,9 +136,9 @@ async def test_template_variables(
|
|||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert (
|
||||
"The user name is Test User."
|
||||
in mock_create.mock_calls[0][2]["messages"][0]["content"]
|
||||
|
@ -178,9 +178,9 @@ async def test_extra_systen_prompt(
|
|||
extra_system_prompt=extra_system_prompt,
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||
extra_system_prompt
|
||||
)
|
||||
|
@ -201,9 +201,9 @@ async def test_extra_systen_prompt(
|
|||
extra_system_prompt=None,
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||
extra_system_prompt
|
||||
)
|
||||
|
@ -222,9 +222,9 @@ async def test_extra_systen_prompt(
|
|||
extra_system_prompt=extra_system_prompt2,
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||
extra_system_prompt2
|
||||
)
|
||||
|
@ -242,9 +242,9 @@ async def test_extra_systen_prompt(
|
|||
agent_id=mock_config_entry.entry_id,
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert result.response.response_type == intent.IntentResponseType.ACTION_DONE, (
|
||||
result
|
||||
)
|
||||
assert mock_create.mock_calls[0][2]["messages"][0]["content"].endswith(
|
||||
extra_system_prompt2
|
||||
)
|
||||
|
|
|
@ -36,9 +36,9 @@ async def test_mapping_integrity() -> None:
|
|||
for sensor_map in maps:
|
||||
assert sensor_map.attribute in ATTRIBUTES, sensor_map.attribute
|
||||
if sensor_map.device_class:
|
||||
assert (
|
||||
sensor_map.device_class in DEVICE_CLASSES
|
||||
), sensor_map.device_class
|
||||
assert sensor_map.device_class in DEVICE_CLASSES, (
|
||||
sensor_map.device_class
|
||||
)
|
||||
if sensor_map.state_class:
|
||||
assert sensor_map.state_class in STATE_CLASSES, sensor_map.state_class
|
||||
|
||||
|
|
|
@ -99,9 +99,9 @@ def make_segment_with_parts(
|
|||
if discontinuity:
|
||||
response.append("#EXT-X-DISCONTINUITY")
|
||||
response.extend(
|
||||
f'#EXT-X-PART:DURATION={TEST_PART_DURATION:.3f},'
|
||||
f"#EXT-X-PART:DURATION={TEST_PART_DURATION:.3f},"
|
||||
f'URI="./segment/{segment}.{i}.m4s"'
|
||||
f'{",INDEPENDENT=YES" if i % independent_period == 0 else ""}'
|
||||
f"{',INDEPENDENT=YES' if i % independent_period == 0 else ''}"
|
||||
for i in range(num_parts)
|
||||
)
|
||||
response.append(
|
||||
|
|
|
@ -124,9 +124,9 @@ async def _test_sensors(
|
|||
for entity in entities_and_expected_values:
|
||||
state = hass.states.get(entity.entity_id)
|
||||
assert state, f"Unable to get state of {entity.entity_id}"
|
||||
assert (
|
||||
state.state == entity.first_value
|
||||
), f"First update: {entity.entity_id} is expected to have state {entity.first_value} but has {state.state}"
|
||||
assert state.state == entity.first_value, (
|
||||
f"First update: {entity.entity_id} is expected to have state {entity.first_value} but has {state.state}"
|
||||
)
|
||||
|
||||
# Simulate second data update
|
||||
with (
|
||||
|
@ -147,6 +147,6 @@ async def _test_sensors(
|
|||
# Verify expected vs actual values of second update
|
||||
for entity in entities_and_expected_values:
|
||||
state = hass.states.get(entity.entity_id)
|
||||
assert (
|
||||
state.state == entity.second_value
|
||||
), f"Second update: {entity.entity_id} is expected to have state {entity.second_value} but has {state.state}"
|
||||
assert state.state == entity.second_value, (
|
||||
f"Second update: {entity.entity_id} is expected to have state {entity.second_value} but has {state.state}"
|
||||
)
|
||||
|
|
|
@ -79,15 +79,15 @@ async def snapshot_platform(
|
|||
device_entries = dr.async_entries_for_config_entry(device_registry, config_entry_id)
|
||||
assert device_entries
|
||||
for device_entry in device_entries:
|
||||
assert device_entry == snapshot(
|
||||
name=f"{device_entry.name}-entry"
|
||||
), f"device entry snapshot failed for {device_entry.name}"
|
||||
assert device_entry == snapshot(name=f"{device_entry.name}-entry"), (
|
||||
f"device entry snapshot failed for {device_entry.name}"
|
||||
)
|
||||
|
||||
entity_entries = er.async_entries_for_config_entry(entity_registry, config_entry_id)
|
||||
assert entity_entries
|
||||
assert (
|
||||
len({entity_entry.domain for entity_entry in entity_entries}) == 1
|
||||
), "Please limit the loaded platforms to 1 platform."
|
||||
assert len({entity_entry.domain for entity_entry in entity_entries}) == 1, (
|
||||
"Please limit the loaded platforms to 1 platform."
|
||||
)
|
||||
|
||||
translations = await async_get_translations(hass, "en", "entity", [DOMAIN])
|
||||
unique_device_classes = []
|
||||
|
@ -99,18 +99,18 @@ async def snapshot_platform(
|
|||
if entity_entry.original_device_class not in unique_device_classes:
|
||||
single_device_class_translation = True
|
||||
unique_device_classes.append(entity_entry.original_device_class)
|
||||
assert (
|
||||
(key in translations) or single_device_class_translation
|
||||
), f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}"
|
||||
assert entity_entry == snapshot(
|
||||
name=f"{entity_entry.entity_id}-entry"
|
||||
), f"entity entry snapshot failed for {entity_entry.entity_id}"
|
||||
assert (key in translations) or single_device_class_translation, (
|
||||
f"No translation or non unique device_class for entity {entity_entry.unique_id}, expected {key}"
|
||||
)
|
||||
assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry"), (
|
||||
f"entity entry snapshot failed for {entity_entry.entity_id}"
|
||||
)
|
||||
if entity_entry.disabled_by is None:
|
||||
state = hass.states.get(entity_entry.entity_id)
|
||||
assert state, f"State not found for {entity_entry.entity_id}"
|
||||
assert state == snapshot(
|
||||
name=f"{entity_entry.entity_id}-state"
|
||||
), f"state snapshot failed for {entity_entry.entity_id}"
|
||||
assert state == snapshot(name=f"{entity_entry.entity_id}-state"), (
|
||||
f"state snapshot failed for {entity_entry.entity_id}"
|
||||
)
|
||||
|
||||
|
||||
async def setup_automation(hass: HomeAssistant, alias: str, entity_id: str) -> None:
|
||||
|
@ -242,12 +242,12 @@ def _mocked_feature(
|
|||
feature.name = name or id.upper()
|
||||
feature.set_value = AsyncMock()
|
||||
if not (fixture := FEATURES_FIXTURE.get(id)):
|
||||
assert (
|
||||
require_fixture is False
|
||||
), f"No fixture defined for feature {id} and require_fixture is True"
|
||||
assert (
|
||||
value is not UNDEFINED
|
||||
), f"Value must be provided if feature {id} not defined in features.json"
|
||||
assert require_fixture is False, (
|
||||
f"No fixture defined for feature {id} and require_fixture is True"
|
||||
)
|
||||
assert value is not UNDEFINED, (
|
||||
f"Value must be provided if feature {id} not defined in features.json"
|
||||
)
|
||||
fixture = {"value": value, "category": "Primary", "type": "Sensor"}
|
||||
elif value is not UNDEFINED:
|
||||
fixture["value"] = value
|
||||
|
@ -318,12 +318,12 @@ def _mocked_light_effect_module(device) -> LightEffect:
|
|||
effect.effect_list = ["Off", "Effect1", "Effect2"]
|
||||
|
||||
async def _set_effect(effect_name, *_, **__):
|
||||
assert (
|
||||
effect_name in effect.effect_list
|
||||
), f"set_effect '{effect_name}' not in {effect.effect_list}"
|
||||
assert device.modules[
|
||||
Module.Light
|
||||
], "Need a light module to test set_effect method"
|
||||
assert effect_name in effect.effect_list, (
|
||||
f"set_effect '{effect_name}' not in {effect.effect_list}"
|
||||
)
|
||||
assert device.modules[Module.Light], (
|
||||
"Need a light module to test set_effect method"
|
||||
)
|
||||
device.modules[Module.Light].state.light_on = True
|
||||
effect.effect = effect_name
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ async def test_turn_on_off(
|
|||
with (
|
||||
expectation,
|
||||
patch(
|
||||
f"pyvesync.vesyncfan.VeSyncHumid200300S.{"turn_on" if turn_on else "turn_off"}",
|
||||
f"pyvesync.vesyncfan.VeSyncHumid200300S.{'turn_on' if turn_on else 'turn_off'}",
|
||||
return_value=api_response,
|
||||
) as method_mock,
|
||||
):
|
||||
|
|
|
@ -133,9 +133,9 @@ def climate_radio_thermostat_ct100_plus_state_fixture() -> dict[str, Any]:
|
|||
name="climate_radio_thermostat_ct100_plus_different_endpoints_state",
|
||||
scope="package",
|
||||
)
|
||||
def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture() -> (
|
||||
dict[str, Any]
|
||||
):
|
||||
def climate_radio_thermostat_ct100_plus_different_endpoints_state_fixture() -> dict[
|
||||
str, Any
|
||||
]:
|
||||
"""Load the thermostat fixture state with values on different endpoints.
|
||||
|
||||
This device is a radio thermostat ct100.
|
||||
|
@ -336,9 +336,9 @@ def lock_id_lock_as_id150_state_fixture() -> dict[str, Any]:
|
|||
@pytest.fixture(
|
||||
name="climate_radio_thermostat_ct101_multiple_temp_units_state", scope="package"
|
||||
)
|
||||
def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> (
|
||||
dict[str, Any]
|
||||
):
|
||||
def climate_radio_thermostat_ct101_multiple_temp_units_state_fixture() -> dict[
|
||||
str, Any
|
||||
]:
|
||||
"""Load the climate multiple temp units node state fixture data."""
|
||||
return load_json_object_fixture(
|
||||
"climate_radio_thermostat_ct101_multiple_temp_units_state.json", DOMAIN
|
||||
|
|
|
@ -412,7 +412,9 @@ def verify_cleanup(
|
|||
|
||||
try:
|
||||
# Verify respx.mock has been cleaned up
|
||||
assert not respx.mock.routes, "respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock"
|
||||
assert not respx.mock.routes, (
|
||||
"respx.mock routes not cleaned up, maybe the test needs to be decorated with @respx.mock"
|
||||
)
|
||||
finally:
|
||||
# Clear mock routes not break subsequent tests
|
||||
respx.mock.clear()
|
||||
|
|
Loading…
Reference in New Issue