Enable RUF021 (#135832)
parent
85b4be2f16
commit
b4f4b06f29
|
@ -474,25 +474,30 @@ class ClimateCapabilities(AlexaEntity):
|
|||
# If we support two modes, one being off, we allow turning on too.
|
||||
supported_features = self.entity.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
if (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and climate.HVACMode.OFF
|
||||
in (self.entity.attributes.get(climate.ATTR_HVAC_MODES) or [])
|
||||
or self.entity.domain == climate.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& (
|
||||
climate.ClimateEntityFeature.TURN_ON
|
||||
| climate.ClimateEntityFeature.TURN_OFF
|
||||
(
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and climate.HVACMode.OFF
|
||||
in (self.entity.attributes.get(climate.ATTR_HVAC_MODES) or [])
|
||||
)
|
||||
or (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& (
|
||||
climate.ClimateEntityFeature.TURN_ON
|
||||
| climate.ClimateEntityFeature.TURN_OFF
|
||||
)
|
||||
)
|
||||
)
|
||||
or self.entity.domain == water_heater.DOMAIN
|
||||
and (supported_features & water_heater.WaterHeaterEntityFeature.ON_OFF)
|
||||
or (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (supported_features & water_heater.WaterHeaterEntityFeature.ON_OFF)
|
||||
)
|
||||
):
|
||||
yield AlexaPowerController(self.entity)
|
||||
|
||||
if (
|
||||
self.entity.domain == climate.DOMAIN
|
||||
or self.entity.domain == water_heater.DOMAIN
|
||||
if self.entity.domain == climate.DOMAIN or (
|
||||
self.entity.domain == water_heater.DOMAIN
|
||||
and (
|
||||
supported_features
|
||||
& water_heater.WaterHeaterEntityFeature.OPERATION_MODE
|
||||
|
|
|
@ -317,9 +317,8 @@ async def async_enable_proactive_mode(
|
|||
|
||||
if should_doorbell:
|
||||
old_state = data["old_state"]
|
||||
if (
|
||||
new_state.domain == event.DOMAIN
|
||||
or new_state.state == STATE_ON
|
||||
if new_state.domain == event.DOMAIN or (
|
||||
new_state.state == STATE_ON
|
||||
and (old_state is None or old_state.state != STATE_ON)
|
||||
):
|
||||
await async_send_doorbell_event_message(
|
||||
|
|
|
@ -115,11 +115,8 @@ class BondEntity(Entity):
|
|||
def _async_update_if_bpup_not_alive(self, now: datetime) -> None:
|
||||
"""Fetch via the API if BPUP is not alive."""
|
||||
self._async_schedule_bpup_alive_or_poll()
|
||||
if (
|
||||
self.hass.is_stopping
|
||||
or self._bpup_subs.alive
|
||||
and self._initialized
|
||||
and self.available
|
||||
if self.hass.is_stopping or (
|
||||
self._bpup_subs.alive and self._initialized and self.available
|
||||
):
|
||||
return
|
||||
if self._update_lock.locked():
|
||||
|
|
|
@ -26,7 +26,7 @@ def async_describe_events(
|
|||
"""Describe bthome logbook event."""
|
||||
data = event.data
|
||||
device = dev_reg.async_get(data["device_id"])
|
||||
name = device and device.name or f"BTHome {data['address']}"
|
||||
name = (device and device.name) or f"BTHome {data['address']}"
|
||||
if properties := data["event_properties"]:
|
||||
message = f"{data['event_class']} {data['event_type']}: {properties}"
|
||||
else:
|
||||
|
|
|
@ -186,12 +186,12 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
|||
|
||||
pattern = re.compile(search)
|
||||
return (
|
||||
hasattr(vevent, "summary")
|
||||
and pattern.match(vevent.summary.value)
|
||||
or hasattr(vevent, "location")
|
||||
and pattern.match(vevent.location.value)
|
||||
or hasattr(vevent, "description")
|
||||
and pattern.match(vevent.description.value)
|
||||
(hasattr(vevent, "summary") and pattern.match(vevent.summary.value))
|
||||
or (hasattr(vevent, "location") and pattern.match(vevent.location.value))
|
||||
or (
|
||||
hasattr(vevent, "description")
|
||||
and pattern.match(vevent.description.value)
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
|
|
@ -101,7 +101,8 @@ async def update_listener(hass: HomeAssistant, config_entry: ConfigEntry) -> Non
|
|||
if (
|
||||
"xe" in entity.unique_id
|
||||
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
|
||||
or "wallet" in entity.unique_id
|
||||
) or (
|
||||
"wallet" in entity.unique_id
|
||||
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
|
||||
):
|
||||
registry.async_remove(entity.entity_id)
|
||||
|
|
|
@ -279,9 +279,8 @@ def websocket_update_entity(
|
|||
result: dict[str, Any] = {"entity_entry": entity_entry.extended_dict}
|
||||
if "disabled_by" in changes and changes["disabled_by"] is None:
|
||||
# Enabling an entity requires a config entry reload, or HA restart
|
||||
if (
|
||||
not (config_entry_id := entity_entry.config_entry_id)
|
||||
or (config_entry := hass.config_entries.async_get_entry(config_entry_id))
|
||||
if not (config_entry_id := entity_entry.config_entry_id) or (
|
||||
(config_entry := hass.config_entries.async_get_entry(config_entry_id))
|
||||
and not config_entry.supports_unload
|
||||
):
|
||||
result["require_restart"] = True
|
||||
|
|
|
@ -362,12 +362,11 @@ class EnergyCostSensor(SensorEntity):
|
|||
return
|
||||
|
||||
if (
|
||||
(
|
||||
state_class != SensorStateClass.TOTAL_INCREASING
|
||||
and energy_state.attributes.get(ATTR_LAST_RESET)
|
||||
!= self._last_energy_sensor_state.attributes.get(ATTR_LAST_RESET)
|
||||
)
|
||||
or state_class == SensorStateClass.TOTAL_INCREASING
|
||||
state_class != SensorStateClass.TOTAL_INCREASING
|
||||
and energy_state.attributes.get(ATTR_LAST_RESET)
|
||||
!= self._last_energy_sensor_state.attributes.get(ATTR_LAST_RESET)
|
||||
) or (
|
||||
state_class == SensorStateClass.TOTAL_INCREASING
|
||||
and reset_detected(
|
||||
self.hass,
|
||||
cast(str, self._config[self._adapter.stat_energy_key]),
|
||||
|
|
|
@ -83,13 +83,8 @@ async def async_attach_trigger(
|
|||
)
|
||||
to_match = condition.zone(hass, zone_state, to_state) if to_state else False
|
||||
|
||||
if (
|
||||
trigger_event == EVENT_ENTER
|
||||
and not from_match
|
||||
and to_match
|
||||
or trigger_event == EVENT_LEAVE
|
||||
and from_match
|
||||
and not to_match
|
||||
if (trigger_event == EVENT_ENTER and not from_match and to_match) or (
|
||||
trigger_event == EVENT_LEAVE and from_match and not to_match
|
||||
):
|
||||
hass.async_run_hass_job(
|
||||
job,
|
||||
|
|
|
@ -375,6 +375,8 @@ class GlancesSensor(CoordinatorEntity[GlancesDataUpdateCoordinator], SensorEntit
|
|||
self._data_valid = self._attr_native_value is not None and (
|
||||
not self._numeric_state_expected
|
||||
or isinstance(self._attr_native_value, (int, float))
|
||||
or isinstance(self._attr_native_value, str)
|
||||
and self._attr_native_value.isnumeric()
|
||||
or (
|
||||
isinstance(self._attr_native_value, str)
|
||||
and self._attr_native_value.isnumeric()
|
||||
)
|
||||
)
|
||||
|
|
|
@ -49,10 +49,8 @@ class AsyncConfigEntryAuth:
|
|||
"OAuth session is not valid, reauth required"
|
||||
) from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
if (
|
||||
isinstance(ex, RefreshError)
|
||||
or hasattr(ex, "status")
|
||||
and ex.status == 400
|
||||
if isinstance(ex, RefreshError) or (
|
||||
hasattr(ex, "status") and ex.status == 400
|
||||
):
|
||||
self.oauth_session.config_entry.async_start_reauth(
|
||||
self.oauth_session.hass
|
||||
|
|
|
@ -440,10 +440,8 @@ class Group(Entity):
|
|||
if not self._on_off:
|
||||
return
|
||||
|
||||
if (
|
||||
tr_state is None
|
||||
or self._assumed_state
|
||||
and not tr_state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
if tr_state is None or (
|
||||
self._assumed_state and not tr_state.attributes.get(ATTR_ASSUMED_STATE)
|
||||
):
|
||||
self._assumed_state = self.mode(self._assumed.values())
|
||||
|
||||
|
|
|
@ -111,10 +111,12 @@ class HistoryPeriodView(HomeAssistantView):
|
|||
# end_time. If it's false, we know there are no states in the
|
||||
# database up until end_time.
|
||||
(end_time and not has_states_before(hass, end_time))
|
||||
or not include_start_time_state
|
||||
and entity_ids
|
||||
and not entities_may_have_state_changes_after(
|
||||
hass, entity_ids, start_time, no_attributes
|
||||
or (
|
||||
not include_start_time_state
|
||||
and entity_ids
|
||||
and not entities_may_have_state_changes_after(
|
||||
hass, entity_ids, start_time, no_attributes
|
||||
)
|
||||
)
|
||||
):
|
||||
return self.json([])
|
||||
|
|
|
@ -146,10 +146,12 @@ async def ws_get_history_during_period(
|
|||
# end_time. If it's false, we know there are no states in the
|
||||
# database up until end_time.
|
||||
(end_time and not has_states_before(hass, end_time))
|
||||
or not include_start_time_state
|
||||
and entity_ids
|
||||
and not entities_may_have_state_changes_after(
|
||||
hass, entity_ids, start_time, no_attributes
|
||||
or (
|
||||
not include_start_time_state
|
||||
and entity_ids
|
||||
and not entities_may_have_state_changes_after(
|
||||
hass, entity_ids, start_time, no_attributes
|
||||
)
|
||||
)
|
||||
):
|
||||
connection.send_result(msg["id"], {})
|
||||
|
|
|
@ -409,11 +409,8 @@ class WindowCoveringBasic(OpeningDeviceBase, HomeAccessory):
|
|||
"""Move cover to value if call came from HomeKit."""
|
||||
_LOGGER.debug("%s: Set position to %d", self.entity_id, value)
|
||||
|
||||
if (
|
||||
self._supports_stop
|
||||
and value > 70
|
||||
or not self._supports_stop
|
||||
and value >= 50
|
||||
if (self._supports_stop and value > 70) or (
|
||||
not self._supports_stop and value >= 50
|
||||
):
|
||||
service, position = (SERVICE_OPEN_COVER, 100)
|
||||
elif value < 30 or not self._supports_stop:
|
||||
|
|
|
@ -643,7 +643,8 @@ def state_needs_accessory_mode(state: State) -> bool:
|
|||
state.domain == MEDIA_PLAYER_DOMAIN
|
||||
and state.attributes.get(ATTR_DEVICE_CLASS)
|
||||
in (MediaPlayerDeviceClass.TV, MediaPlayerDeviceClass.RECEIVER)
|
||||
or state.domain == REMOTE_DOMAIN
|
||||
) or (
|
||||
state.domain == REMOTE_DOMAIN
|
||||
and state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||
& RemoteEntityFeature.ACTIVITY
|
||||
)
|
||||
|
|
|
@ -160,10 +160,8 @@ class LidarrSensor(LidarrEntity[T], SensorEntity):
|
|||
|
||||
def queue_str(item: LidarrQueueItem) -> str:
|
||||
"""Return string description of queue item."""
|
||||
if (
|
||||
item.sizeleft > 0
|
||||
and item.timeleft == "00:00:00"
|
||||
or not hasattr(item, "trackedDownloadState")
|
||||
if (item.sizeleft > 0 and item.timeleft == "00:00:00") or not hasattr(
|
||||
item, "trackedDownloadState"
|
||||
):
|
||||
return "stopped"
|
||||
return item.trackedDownloadState
|
||||
|
|
|
@ -126,9 +126,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
HTTPStatus.UNAUTHORIZED,
|
||||
HTTPStatus.FORBIDDEN,
|
||||
)
|
||||
or isinstance(err, AttributeError)
|
||||
and err.name == "get"
|
||||
):
|
||||
) or (isinstance(err, AttributeError) and err.name == "get"):
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
@ -165,9 +163,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
|||
HTTPStatus.UNAUTHORIZED,
|
||||
HTTPStatus.FORBIDDEN,
|
||||
)
|
||||
or isinstance(err, AttributeError)
|
||||
and err.name == "get"
|
||||
):
|
||||
) or (isinstance(err, AttributeError) and err.name == "get"):
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
|
|
@ -768,11 +768,8 @@ async def async_get_broker_settings(
|
|||
validated_user_input.update(user_input)
|
||||
client_certificate_id: str | None = user_input.get(CONF_CLIENT_CERT)
|
||||
client_key_id: str | None = user_input.get(CONF_CLIENT_KEY)
|
||||
if (
|
||||
client_certificate_id
|
||||
and not client_key_id
|
||||
or not client_certificate_id
|
||||
and client_key_id
|
||||
if (client_certificate_id and not client_key_id) or (
|
||||
not client_certificate_id and client_key_id
|
||||
):
|
||||
errors["base"] = "invalid_inclusion"
|
||||
return False
|
||||
|
@ -782,14 +779,20 @@ async def async_get_broker_settings(
|
|||
|
||||
# Return to form for file upload CA cert or client cert and key
|
||||
if (
|
||||
not client_certificate
|
||||
and user_input.get(SET_CLIENT_CERT)
|
||||
and not client_certificate_id
|
||||
or not certificate
|
||||
and user_input.get(SET_CA_CERT, "off") == "custom"
|
||||
and not certificate_id
|
||||
or user_input.get(CONF_TRANSPORT) == TRANSPORT_WEBSOCKETS
|
||||
and CONF_WS_PATH not in user_input
|
||||
(
|
||||
not client_certificate
|
||||
and user_input.get(SET_CLIENT_CERT)
|
||||
and not client_certificate_id
|
||||
)
|
||||
or (
|
||||
not certificate
|
||||
and user_input.get(SET_CA_CERT, "off") == "custom"
|
||||
and not certificate_id
|
||||
)
|
||||
or (
|
||||
user_input.get(CONF_TRANSPORT) == TRANSPORT_WEBSOCKETS
|
||||
and CONF_WS_PATH not in user_input
|
||||
)
|
||||
):
|
||||
return False
|
||||
|
||||
|
|
|
@ -26,10 +26,8 @@ def find_matching_platform(
|
|||
if len(device_point.enum_values) > 0 and device_point.writable:
|
||||
return Platform.SELECT
|
||||
|
||||
if (
|
||||
description
|
||||
and description.native_unit_of_measurement == "DM"
|
||||
or (device_point.raw["maxValue"] and device_point.raw["minValue"])
|
||||
if (description and description.native_unit_of_measurement == "DM") or (
|
||||
device_point.raw["maxValue"] and device_point.raw["minValue"]
|
||||
):
|
||||
if device_point.writable:
|
||||
return Platform.NUMBER
|
||||
|
|
|
@ -257,7 +257,6 @@ async def async_remove_config_entry_device(
|
|||
return not any(
|
||||
identifier
|
||||
for identifier in device_entry.identifiers
|
||||
if identifier[0] == DOMAIN
|
||||
and identifier[1] in modules
|
||||
if (identifier[0] == DOMAIN and identifier[1] in modules)
|
||||
or identifier[1] in rooms
|
||||
)
|
||||
|
|
|
@ -80,7 +80,7 @@ class OctoprintDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
"""Device info."""
|
||||
unique_id = cast(str, self.config_entry.unique_id)
|
||||
configuration_url = URL.build(
|
||||
scheme=self.config_entry.data[CONF_SSL] and "https" or "http",
|
||||
scheme=(self.config_entry.data[CONF_SSL] and "https") or "http",
|
||||
host=self.config_entry.data[CONF_HOST],
|
||||
port=self.config_entry.data[CONF_PORT],
|
||||
path=self.config_entry.data[CONF_PATH],
|
||||
|
|
|
@ -534,8 +534,7 @@ class OverkizStateSensor(OverkizDescriptiveEntity, SensorEntity):
|
|||
# This is probably incorrect and should be fixed in a follow up PR.
|
||||
# To ensure measurement sensors do not get an `unknown` state on
|
||||
# a falsy value (e.g. 0 or 0.0) we also check the state_class.
|
||||
or self.state_class != SensorStateClass.MEASUREMENT
|
||||
and not state.value
|
||||
or (self.state_class != SensorStateClass.MEASUREMENT and not state.value)
|
||||
):
|
||||
return None
|
||||
|
||||
|
|
|
@ -64,10 +64,8 @@ class DomesticHotWaterProduction(OverkizEntity, WaterHeaterEntity):
|
|||
for param, mode in OVERKIZ_TO_OPERATION_MODE.items():
|
||||
# Filter only for mode allowed by this device
|
||||
# or allow all if no mode definition found
|
||||
if (
|
||||
not state_mode_definition
|
||||
or state_mode_definition.values
|
||||
and param in state_mode_definition.values
|
||||
if not state_mode_definition or (
|
||||
state_mode_definition.values and param in state_mode_definition.values
|
||||
):
|
||||
self.operation_mode_to_overkiz[mode] = param
|
||||
self._attr_operation_list.append(param)
|
||||
|
|
|
@ -239,20 +239,13 @@ def execute(
|
|||
if name.startswith("async_"):
|
||||
raise ScriptError("Not allowed to access async methods")
|
||||
if (
|
||||
obj is hass
|
||||
and name not in ALLOWED_HASS
|
||||
or obj is hass.bus
|
||||
and name not in ALLOWED_EVENTBUS
|
||||
or obj is hass.states
|
||||
and name not in ALLOWED_STATEMACHINE
|
||||
or obj is hass.services
|
||||
and name not in ALLOWED_SERVICEREGISTRY
|
||||
or obj is dt_util
|
||||
and name not in ALLOWED_DT_UTIL
|
||||
or obj is datetime
|
||||
and name not in ALLOWED_DATETIME
|
||||
or isinstance(obj, TimeWrapper)
|
||||
and name not in ALLOWED_TIME
|
||||
(obj is hass and name not in ALLOWED_HASS)
|
||||
or (obj is hass.bus and name not in ALLOWED_EVENTBUS)
|
||||
or (obj is hass.states and name not in ALLOWED_STATEMACHINE)
|
||||
or (obj is hass.services and name not in ALLOWED_SERVICEREGISTRY)
|
||||
or (obj is dt_util and name not in ALLOWED_DT_UTIL)
|
||||
or (obj is datetime and name not in ALLOWED_DATETIME)
|
||||
or (isinstance(obj, TimeWrapper) and name not in ALLOWED_TIME)
|
||||
):
|
||||
raise ScriptError(f"Not allowed to access {obj.__class__.__name__}.{name}")
|
||||
|
||||
|
|
|
@ -35,8 +35,7 @@ def supported(event: rfxtrxmod.RFXtrxEvent) -> bool:
|
|||
isinstance(event.device, rfxtrxmod.LightingDevice)
|
||||
and not event.device.known_to_be_dimmable
|
||||
and not event.device.known_to_be_rollershutter
|
||||
or isinstance(event.device, rfxtrxmod.RfyDevice)
|
||||
)
|
||||
) or isinstance(event.device, rfxtrxmod.RfyDevice)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
|
|
@ -271,11 +271,9 @@ class RMVDepartureData:
|
|||
if not dest_found:
|
||||
continue
|
||||
|
||||
if (
|
||||
self._lines
|
||||
and journey["number"] not in self._lines
|
||||
or journey["minutes"] < self._time_offset
|
||||
):
|
||||
if (self._lines and journey["number"] not in self._lines) or journey[
|
||||
"minutes"
|
||||
] < self._time_offset:
|
||||
continue
|
||||
|
||||
for attr in ("direction", "departure_time", "product", "minutes"):
|
||||
|
|
|
@ -120,9 +120,8 @@ def async_setup_block_entry(
|
|||
relay_blocks = []
|
||||
assert coordinator.device.blocks
|
||||
for block in coordinator.device.blocks:
|
||||
if (
|
||||
block.type != "relay"
|
||||
or block.channel is not None
|
||||
if block.type != "relay" or (
|
||||
block.channel is not None
|
||||
and is_block_channel_type_light(
|
||||
coordinator.device.settings, int(block.channel)
|
||||
)
|
||||
|
|
|
@ -68,10 +68,8 @@ def process_turn_on_params(
|
|||
isinstance(siren.available_tones, dict)
|
||||
and tone in siren.available_tones.values()
|
||||
)
|
||||
if (
|
||||
not siren.available_tones
|
||||
or tone not in siren.available_tones
|
||||
and not is_tone_dict_value
|
||||
if not siren.available_tones or (
|
||||
tone not in siren.available_tones and not is_tone_dict_value
|
||||
):
|
||||
raise ValueError(
|
||||
f"Invalid tone specified for entity {siren.entity_id}: {tone}, "
|
||||
|
|
|
@ -172,7 +172,7 @@ class SnmpScanner(DeviceScanner):
|
|||
_LOGGER.error(
|
||||
"SNMP error: %s at %s",
|
||||
errstatus.prettyPrint(),
|
||||
errindex and res[int(errindex) - 1][0] or "?",
|
||||
(errindex and res[int(errindex) - 1][0]) or "?",
|
||||
)
|
||||
return None
|
||||
|
||||
|
|
|
@ -264,7 +264,7 @@ class SnmpSwitch(SwitchEntity):
|
|||
_LOGGER.error(
|
||||
"SNMP error: %s at %s",
|
||||
errstatus.prettyPrint(),
|
||||
errindex and restable[-1][int(errindex) - 1] or "?",
|
||||
(errindex and restable[-1][int(errindex) - 1]) or "?",
|
||||
)
|
||||
else:
|
||||
for resrow in restable:
|
||||
|
|
|
@ -105,9 +105,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: SqueezeboxConfigEntry) -
|
|||
lms.name = (
|
||||
(STATUS_QUERY_LIBRARYNAME in status and status[STATUS_QUERY_LIBRARYNAME])
|
||||
and status[STATUS_QUERY_LIBRARYNAME]
|
||||
or host
|
||||
)
|
||||
version = STATUS_QUERY_VERSION in status and status[STATUS_QUERY_VERSION] or None
|
||||
) or host
|
||||
version = (STATUS_QUERY_VERSION in status and status[STATUS_QUERY_VERSION]) or None
|
||||
# mac can be missing
|
||||
mac_connect = (
|
||||
{(CONNECTION_NETWORK_MAC, format_mac(status[STATUS_QUERY_MAC]))}
|
||||
|
|
|
@ -273,7 +273,7 @@ async def async_build_source_set(hass: HomeAssistant) -> set[IPv4Address | IPv6A
|
|||
for source_ip in await network.async_get_enabled_source_ips(hass)
|
||||
if not source_ip.is_loopback
|
||||
and not source_ip.is_global
|
||||
and (source_ip.version == 6 and source_ip.scope_id or source_ip.version == 4)
|
||||
and ((source_ip.version == 6 and source_ip.scope_id) or source_ip.version == 4)
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1052,10 +1052,8 @@ class TextToSpeechUrlView(HomeAssistantView):
|
|||
data = await request.json()
|
||||
except ValueError:
|
||||
return self.json_message("Invalid JSON specified", HTTPStatus.BAD_REQUEST)
|
||||
if (
|
||||
not data.get("engine_id")
|
||||
and not data.get(ATTR_PLATFORM)
|
||||
or not data.get(ATTR_MESSAGE)
|
||||
if (not data.get("engine_id") and not data.get(ATTR_PLATFORM)) or not data.get(
|
||||
ATTR_MESSAGE
|
||||
):
|
||||
return self.json_message(
|
||||
"Must specify platform and message", HTTPStatus.BAD_REQUEST
|
||||
|
|
|
@ -89,9 +89,8 @@ class TuyaVacuumEntity(TuyaEntity, StateVacuumEntity):
|
|||
if self.find_dpcode(DPCode.PAUSE, prefer_function=True):
|
||||
self._attr_supported_features |= VacuumEntityFeature.PAUSE
|
||||
|
||||
if (
|
||||
self.find_dpcode(DPCode.SWITCH_CHARGE, prefer_function=True)
|
||||
or (
|
||||
if self.find_dpcode(DPCode.SWITCH_CHARGE, prefer_function=True) or (
|
||||
(
|
||||
enum_type := self.find_dpcode(
|
||||
DPCode.MODE, dptype=DPType.ENUM, prefer_function=True
|
||||
)
|
||||
|
|
|
@ -69,8 +69,7 @@ async def async_reconnect_client(hass: HomeAssistant, data: Mapping[str, Any]) -
|
|||
|
||||
for config_entry in hass.config_entries.async_entries(UNIFI_DOMAIN):
|
||||
if config_entry.state is not ConfigEntryState.LOADED or (
|
||||
(hub := config_entry.runtime_data)
|
||||
and not hub.available
|
||||
((hub := config_entry.runtime_data) and not hub.available)
|
||||
or (client := hub.api.clients.get(mac)) is None
|
||||
or client.is_wired
|
||||
):
|
||||
|
@ -87,10 +86,8 @@ async def async_remove_clients(hass: HomeAssistant, data: Mapping[str, Any]) ->
|
|||
- Neither IP, hostname nor name is configured.
|
||||
"""
|
||||
for config_entry in hass.config_entries.async_entries(UNIFI_DOMAIN):
|
||||
if (
|
||||
config_entry.state is not ConfigEntryState.LOADED
|
||||
or (hub := config_entry.runtime_data)
|
||||
and not hub.available
|
||||
if config_entry.state is not ConfigEntryState.LOADED or (
|
||||
(hub := config_entry.runtime_data) and not hub.available
|
||||
):
|
||||
continue
|
||||
|
||||
|
|
|
@ -291,9 +291,8 @@ class WasherDryerTimeClass(RestoreSensor):
|
|||
seconds=int(self._wd.get_attribute("Cavity_TimeStatusEstTimeRemaining"))
|
||||
)
|
||||
|
||||
if (
|
||||
self._attr_native_value is None
|
||||
or isinstance(self._attr_native_value, datetime)
|
||||
if self._attr_native_value is None or (
|
||||
isinstance(self._attr_native_value, datetime)
|
||||
and abs(new_timestamp - self._attr_native_value) > timedelta(seconds=60)
|
||||
):
|
||||
self._attr_native_value = new_timestamp
|
||||
|
|
|
@ -85,11 +85,8 @@ async def async_attach_trigger(
|
|||
from_s = zone_event.data["old_state"]
|
||||
to_s = zone_event.data["new_state"]
|
||||
|
||||
if (
|
||||
from_s
|
||||
and not location.has_location(from_s)
|
||||
or to_s
|
||||
and not location.has_location(to_s)
|
||||
if (from_s and not location.has_location(from_s)) or (
|
||||
to_s and not location.has_location(to_s)
|
||||
):
|
||||
return
|
||||
|
||||
|
@ -107,13 +104,8 @@ async def async_attach_trigger(
|
|||
from_match = condition.zone(hass, zone_state, from_s) if from_s else False
|
||||
to_match = condition.zone(hass, zone_state, to_s) if to_s else False
|
||||
|
||||
if (
|
||||
event == EVENT_ENTER
|
||||
and not from_match
|
||||
and to_match
|
||||
or event == EVENT_LEAVE
|
||||
and from_match
|
||||
and not to_match
|
||||
if (event == EVENT_ENTER and not from_match and to_match) or (
|
||||
event == EVENT_LEAVE and from_match and not to_match
|
||||
):
|
||||
description = f"{entity} {_EVENT_DESCRIPTION[event]} {zone_state.attributes[ATTR_FRIENDLY_NAME]}"
|
||||
hass.async_run_hass_job(
|
||||
|
|
|
@ -458,7 +458,7 @@ class ZwaveLight(ZWaveBaseEntity, LightEntity):
|
|||
if warm_white and cool_white:
|
||||
self._supports_color_temp = True
|
||||
# only one white channel (warm white or cool white) = rgbw support
|
||||
elif red and green and blue and warm_white or cool_white:
|
||||
elif (red and green and blue and warm_white) or cool_white:
|
||||
self._supports_rgbw = True
|
||||
|
||||
@callback
|
||||
|
|
|
@ -884,10 +884,8 @@ def time(
|
|||
|
||||
condition_trace_update_result(weekday=weekday, now_weekday=now_weekday)
|
||||
if (
|
||||
isinstance(weekday, str)
|
||||
and weekday != now_weekday
|
||||
or now_weekday not in weekday
|
||||
):
|
||||
isinstance(weekday, str) and weekday != now_weekday
|
||||
) or now_weekday not in weekday:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
|
|
@ -951,8 +951,7 @@ def async_track_template(
|
|||
if (
|
||||
not isinstance(last_result, TemplateError)
|
||||
and result_as_boolean(last_result)
|
||||
or not result_as_boolean(result)
|
||||
):
|
||||
) or not result_as_boolean(result):
|
||||
return
|
||||
|
||||
hass.async_run_hass_job(
|
||||
|
|
|
@ -756,10 +756,8 @@ class _ScriptRun:
|
|||
)
|
||||
|
||||
running_script = (
|
||||
params[CONF_DOMAIN] == "automation"
|
||||
and params[CONF_SERVICE] == "trigger"
|
||||
or params[CONF_DOMAIN] in ("python_script", "script")
|
||||
)
|
||||
params[CONF_DOMAIN] == "automation" and params[CONF_SERVICE] == "trigger"
|
||||
) or params[CONF_DOMAIN] in ("python_script", "script")
|
||||
trace_set_result(params=params, running_script=running_script)
|
||||
response_data = await self._async_run_long_action(
|
||||
self._hass.async_create_task_internal(
|
||||
|
|
|
@ -601,7 +601,7 @@ class Template:
|
|||
or filter depending on hass or the state machine.
|
||||
"""
|
||||
if self.is_static:
|
||||
if not parse_result or self.hass and self.hass.config.legacy_templates:
|
||||
if not parse_result or (self.hass and self.hass.config.legacy_templates):
|
||||
return self.template
|
||||
return self._parse_result(self.template)
|
||||
assert self.hass is not None, "hass variable not set on template"
|
||||
|
@ -630,7 +630,7 @@ class Template:
|
|||
self._renders += 1
|
||||
|
||||
if self.is_static:
|
||||
if not parse_result or self.hass and self.hass.config.legacy_templates:
|
||||
if not parse_result or (self.hass and self.hass.config.legacy_templates):
|
||||
return self.template
|
||||
return self._parse_result(self.template)
|
||||
|
||||
|
@ -651,7 +651,7 @@ class Template:
|
|||
|
||||
render_result = render_result.strip()
|
||||
|
||||
if not parse_result or self.hass and self.hass.config.legacy_templates:
|
||||
if not parse_result or (self.hass and self.hass.config.legacy_templates):
|
||||
return render_result
|
||||
|
||||
return self._parse_result(render_result)
|
||||
|
@ -826,7 +826,7 @@ class Template:
|
|||
)
|
||||
return value if error_value is _SENTINEL else error_value
|
||||
|
||||
if not parse_result or self.hass and self.hass.config.legacy_templates:
|
||||
if not parse_result or (self.hass and self.hass.config.legacy_templates):
|
||||
return render_result
|
||||
|
||||
return self._parse_result(render_result)
|
||||
|
@ -1873,7 +1873,8 @@ def is_state(hass: HomeAssistant, entity_id: str, state: str | list[str]) -> boo
|
|||
"""Test if a state is a specific value."""
|
||||
state_obj = _get_state(hass, entity_id)
|
||||
return state_obj is not None and (
|
||||
state_obj.state == state or isinstance(state, list) and state_obj.state in state
|
||||
state_obj.state == state
|
||||
or (isinstance(state, list) and state_obj.state in state)
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -359,7 +359,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
|
|||
self._async_unsub_refresh()
|
||||
self._debounced_refresh.async_cancel()
|
||||
|
||||
if self._shutdown_requested or scheduled and self.hass.is_stopping:
|
||||
if self._shutdown_requested or (scheduled and self.hass.is_stopping):
|
||||
return
|
||||
|
||||
if log_timing := self.logger.isEnabledFor(logging.DEBUG):
|
||||
|
|
|
@ -55,10 +55,14 @@ class TypeHintMatch:
|
|||
"""Confirm if function should be checked."""
|
||||
return (
|
||||
self.function_name == node.name
|
||||
or self.has_async_counterpart
|
||||
and node.name == f"async_{self.function_name}"
|
||||
or self.function_name.endswith("*")
|
||||
and node.name.startswith(self.function_name[:-1])
|
||||
or (
|
||||
self.has_async_counterpart
|
||||
and node.name == f"async_{self.function_name}"
|
||||
)
|
||||
or (
|
||||
self.function_name.endswith("*")
|
||||
and node.name.startswith(self.function_name[:-1])
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
@ -2998,8 +3002,8 @@ def _is_valid_type(
|
|||
isinstance(node, nodes.Subscript)
|
||||
and isinstance(node.value, nodes.Name)
|
||||
and node.value.name in _KNOWN_GENERIC_TYPES
|
||||
or isinstance(node, nodes.Name)
|
||||
and node.name.endswith(_KNOWN_GENERIC_TYPES_TUPLE)
|
||||
) or (
|
||||
isinstance(node, nodes.Name) and node.name.endswith(_KNOWN_GENERIC_TYPES_TUPLE)
|
||||
):
|
||||
return True
|
||||
|
||||
|
|
|
@ -268,9 +268,8 @@ class HassImportsFormatChecker(BaseChecker):
|
|||
self, current_package: str, node: nodes.ImportFrom
|
||||
) -> None:
|
||||
"""Check for improper 'from ._ import _' invocations."""
|
||||
if (
|
||||
node.level <= 1
|
||||
or not current_package.startswith("homeassistant.components.")
|
||||
if node.level <= 1 or (
|
||||
not current_package.startswith("homeassistant.components.")
|
||||
and not current_package.startswith("tests.components.")
|
||||
):
|
||||
return
|
||||
|
|
|
@ -763,6 +763,7 @@ select = [
|
|||
"RUF018", # Avoid assignment expressions in assert statements
|
||||
"RUF019", # Unnecessary key check before dictionary access
|
||||
"RUF020", # {never_like} | T is equivalent to T
|
||||
"RUF021", # Parenthesize a and b expressions when chaining and and or together, to make the precedence clear
|
||||
"RUF022", # Sort __all__
|
||||
"RUF024", # Do not pass mutable objects as values to dict.fromkeys
|
||||
"RUF026", # default_factory is a positional-only argument to defaultdict
|
||||
|
|
|
@ -70,8 +70,10 @@ def run():
|
|||
# If we want to only add references to own integrations
|
||||
# but not include entity integrations
|
||||
if (
|
||||
args.limit_reference
|
||||
and (key_integration != key_to_reference_integration and not is_common)
|
||||
(
|
||||
args.limit_reference
|
||||
and (key_integration != key_to_reference_integration and not is_common)
|
||||
)
|
||||
# Do not create self-references in entity integrations
|
||||
or key_integration in Platform.__members__.values()
|
||||
):
|
||||
|
|
|
@ -316,7 +316,7 @@ async def test_sync_notifications(agents) -> None:
|
|||
config, "async_sync_notification", return_value=HTTPStatus.NO_CONTENT
|
||||
) as mock:
|
||||
await config.async_sync_notification_all("1234", {})
|
||||
assert not agents or bool(mock.mock_calls) and agents
|
||||
assert not agents or (bool(mock.mock_calls) and agents)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
|
@ -171,11 +171,8 @@ async def test_receiving_message_successfully(
|
|||
assert data["subject"] == "Test subject"
|
||||
assert data["uid"] == "1"
|
||||
assert "Test body" in data["text"]
|
||||
assert (
|
||||
valid_date
|
||||
and isinstance(data["date"], datetime)
|
||||
or not valid_date
|
||||
and data["date"] is None
|
||||
assert (valid_date and isinstance(data["date"], datetime)) or (
|
||||
not valid_date and data["date"] is None
|
||||
)
|
||||
|
||||
|
||||
|
@ -581,11 +578,8 @@ async def test_reset_last_message(
|
|||
assert data["subject"] == "Test subject"
|
||||
assert data["text"]
|
||||
assert data["initial"]
|
||||
assert (
|
||||
valid_date
|
||||
and isinstance(data["date"], datetime)
|
||||
or not valid_date
|
||||
and data["date"] is None
|
||||
assert (valid_date and isinstance(data["date"], datetime)) or (
|
||||
not valid_date and data["date"] is None
|
||||
)
|
||||
|
||||
# Simulate an update where no messages are found (needed for pushed coordinator)
|
||||
|
|
|
@ -42,9 +42,8 @@ class CommandTestParameters:
|
|||
Commands that are named with 'Subset' are expected not to be read from Room A.
|
||||
"""
|
||||
|
||||
if (
|
||||
self.expected_event_data_extra is None
|
||||
or "Subset" in self.expected_event_data_extra["command"]
|
||||
if self.expected_event_data_extra is None or (
|
||||
"Subset" in self.expected_event_data_extra["command"]
|
||||
and self.room_id not in SUBSET_ROOMS
|
||||
):
|
||||
return None
|
||||
|
|
|
@ -25,10 +25,8 @@ def setup_owproxy_mock_devices(owproxy: MagicMock, device_ids: list[str]) -> Non
|
|||
if (side_effect := dir_side_effect.get(path)) is None:
|
||||
raise NotImplementedError(f"Unexpected _dir call: {path}")
|
||||
result = side_effect.pop(0)
|
||||
if (
|
||||
isinstance(result, Exception)
|
||||
or isinstance(result, type)
|
||||
and issubclass(result, Exception)
|
||||
if isinstance(result, Exception) or (
|
||||
isinstance(result, type) and issubclass(result, Exception)
|
||||
):
|
||||
raise result
|
||||
return result
|
||||
|
@ -39,10 +37,8 @@ def setup_owproxy_mock_devices(owproxy: MagicMock, device_ids: list[str]) -> Non
|
|||
if len(side_effect) == 0:
|
||||
raise ProtocolError(f"Missing injected value for: {path}")
|
||||
result = side_effect.pop(0)
|
||||
if (
|
||||
isinstance(result, Exception)
|
||||
or isinstance(result, type)
|
||||
and issubclass(result, Exception)
|
||||
if isinstance(result, Exception) or (
|
||||
isinstance(result, type) and issubclass(result, Exception)
|
||||
):
|
||||
raise result
|
||||
return result
|
||||
|
|
Loading…
Reference in New Issue