Remove unused noqas (#135583)
parent
f57640c2cd
commit
8a35261fd8
|
@ -308,7 +308,7 @@ class AuthStore:
|
||||||
credentials.data = data
|
credentials.data = data
|
||||||
self._async_schedule_save()
|
self._async_schedule_save()
|
||||||
|
|
||||||
async def async_load(self) -> None: # noqa: C901
|
async def async_load(self) -> None:
|
||||||
"""Load the users."""
|
"""Load the users."""
|
||||||
if self._loaded:
|
if self._loaded:
|
||||||
raise RuntimeError("Auth storage is already loaded")
|
raise RuntimeError("Auth storage is already loaded")
|
||||||
|
|
|
@ -31,7 +31,7 @@ def _check_import_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||||
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
def _check_file_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||||
# If the file is in /proc we can ignore it.
|
# If the file is in /proc we can ignore it.
|
||||||
args = mapped_args["args"]
|
args = mapped_args["args"]
|
||||||
path = args[0] if type(args[0]) is str else str(args[0]) # noqa: E721
|
path = args[0] if type(args[0]) is str else str(args[0])
|
||||||
return path.startswith(ALLOWED_FILE_PREFIXES)
|
return path.startswith(ALLOWED_FILE_PREFIXES)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ class ActiveBluetoothDataUpdateCoordinator[_T](PassiveBluetoothDataUpdateCoordin
|
||||||
)
|
)
|
||||||
self.last_poll_successful = False
|
self.last_poll_successful = False
|
||||||
return
|
return
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
if self.last_poll_successful:
|
if self.last_poll_successful:
|
||||||
self.logger.exception("%s: Failure while polling", self.address)
|
self.logger.exception("%s: Failure while polling", self.address)
|
||||||
self.last_poll_successful = False
|
self.last_poll_successful = False
|
||||||
|
|
|
@ -127,7 +127,7 @@ class ActiveBluetoothProcessorCoordinator[_DataT](
|
||||||
)
|
)
|
||||||
self.last_poll_successful = False
|
self.last_poll_successful = False
|
||||||
return
|
return
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
if self.last_poll_successful:
|
if self.last_poll_successful:
|
||||||
self.logger.exception("%s: Failure while polling", self.address)
|
self.logger.exception("%s: Failure while polling", self.address)
|
||||||
self.last_poll_successful = False
|
self.last_poll_successful = False
|
||||||
|
|
|
@ -523,7 +523,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||||
Remove this compatibility shim in 2025.1 or later.
|
Remove this compatibility shim in 2025.1 or later.
|
||||||
"""
|
"""
|
||||||
features = self.supported_features
|
features = self.supported_features
|
||||||
if type(features) is int: # noqa: E721
|
if type(features) is int:
|
||||||
new_features = CameraEntityFeature(features)
|
new_features = CameraEntityFeature(features)
|
||||||
self._report_deprecated_supported_features_values(new_features)
|
self._report_deprecated_supported_features_values(new_features)
|
||||||
return new_features
|
return new_features
|
||||||
|
|
|
@ -65,7 +65,7 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
|
||||||
services: list[dict[str, Any]]
|
services: list[dict[str, Any]]
|
||||||
if DATA_SERVICES in hass.data:
|
if DATA_SERVICES in hass.data:
|
||||||
services = hass.data[DATA_SERVICES]
|
services = hass.data[DATA_SERVICES]
|
||||||
return services # noqa: RET504
|
return services
|
||||||
|
|
||||||
try:
|
try:
|
||||||
services = await account_link.async_fetch_available_services(
|
services = await account_link.async_fetch_available_services(
|
||||||
|
|
|
@ -300,7 +300,7 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||||
def supported_features(self) -> CoverEntityFeature:
|
def supported_features(self) -> CoverEntityFeature:
|
||||||
"""Flag supported features."""
|
"""Flag supported features."""
|
||||||
if (features := self._attr_supported_features) is not None:
|
if (features := self._attr_supported_features) is not None:
|
||||||
if type(features) is int: # noqa: E721
|
if type(features) is int:
|
||||||
new_features = CoverEntityFeature(features)
|
new_features = CoverEntityFeature(features)
|
||||||
self._report_deprecated_supported_features_values(new_features)
|
self._report_deprecated_supported_features_values(new_features)
|
||||||
return new_features
|
return new_features
|
||||||
|
|
|
@ -41,7 +41,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DelugeConfigEntry) -> bo
|
||||||
await hass.async_add_executor_job(api.connect)
|
await hass.async_add_executor_job(api.connect)
|
||||||
except (ConnectionRefusedError, TimeoutError, SSLError) as ex:
|
except (ConnectionRefusedError, TimeoutError, SSLError) as ex:
|
||||||
raise ConfigEntryNotReady("Connection to Deluge Daemon failed") from ex
|
raise ConfigEntryNotReady("Connection to Deluge Daemon failed") from ex
|
||||||
except Exception as ex: # noqa: BLE001
|
except Exception as ex:
|
||||||
if type(ex).__name__ == "BadLoginError":
|
if type(ex).__name__ == "BadLoginError":
|
||||||
raise ConfigEntryAuthFailed(
|
raise ConfigEntryAuthFailed(
|
||||||
"Credentials for Deluge client are not valid"
|
"Credentials for Deluge client are not valid"
|
||||||
|
|
|
@ -865,7 +865,7 @@ def state_supports_hue_brightness(
|
||||||
return False
|
return False
|
||||||
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
|
||||||
enum = ENTITY_FEATURES_BY_DOMAIN[domain]
|
enum = ENTITY_FEATURES_BY_DOMAIN[domain]
|
||||||
features = enum(features) if type(features) is int else features # noqa: E721
|
features = enum(features) if type(features) is int else features
|
||||||
return required_feature in features
|
return required_feature in features
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -441,7 +441,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]):
|
||||||
hosts_info = await self.hass.async_add_executor_job(
|
hosts_info = await self.hass.async_add_executor_job(
|
||||||
self.fritz_hosts.get_hosts_info
|
self.fritz_hosts.get_hosts_info
|
||||||
)
|
)
|
||||||
except Exception as ex: # noqa: BLE001
|
except Exception as ex:
|
||||||
if not self.hass.is_stopping:
|
if not self.hass.is_stopping:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
|
|
|
@ -78,7 +78,7 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
errors["base"] = "invalid_host"
|
errors["base"] = "invalid_host"
|
||||||
except (TimeoutError, aiohttp.ClientConnectionError):
|
except (TimeoutError, aiohttp.ClientConnectionError):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
|
@ -113,7 +113,7 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
errors["base"] = "invalid_host"
|
errors["base"] = "invalid_host"
|
||||||
except (TimeoutError, aiohttp.ClientConnectionError):
|
except (TimeoutError, aiohttp.ClientConnectionError):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -342,7 +342,7 @@ def get_next_departure(
|
||||||
{tomorrow_order}
|
{tomorrow_order}
|
||||||
origin_stop_time.departure_time
|
origin_stop_time.departure_time
|
||||||
LIMIT :limit
|
LIMIT :limit
|
||||||
""" # noqa: S608
|
"""
|
||||||
result = schedule.engine.connect().execute(
|
result = schedule.engine.connect().execute(
|
||||||
text(sql_query),
|
text(sql_query),
|
||||||
{
|
{
|
||||||
|
|
|
@ -115,7 +115,7 @@ from .coordinator import (
|
||||||
get_supervisor_info, # noqa: F401
|
get_supervisor_info, # noqa: F401
|
||||||
get_supervisor_stats, # noqa: F401
|
get_supervisor_stats, # noqa: F401
|
||||||
)
|
)
|
||||||
from .discovery import async_setup_discovery_view # noqa: F401
|
from .discovery import async_setup_discovery_view
|
||||||
from .handler import ( # noqa: F401
|
from .handler import ( # noqa: F401
|
||||||
HassIO,
|
HassIO,
|
||||||
HassioAPIError,
|
HassioAPIError,
|
||||||
|
|
|
@ -93,7 +93,7 @@ ILLUMINATION_DEVICE_ATTRIBUTES = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry( # noqa: C901
|
async def async_setup_entry(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
config_entry: ConfigEntry,
|
config_entry: ConfigEntry,
|
||||||
async_add_entities: AddEntitiesCallback,
|
async_add_entities: AddEntitiesCallback,
|
||||||
|
|
|
@ -78,7 +78,7 @@ class LetPotConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except LetPotAuthenticationException:
|
except LetPotAuthenticationException:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -83,7 +83,7 @@ class SkyType(IntEnum):
|
||||||
CLOUDS = 2
|
CLOUDS = 2
|
||||||
|
|
||||||
|
|
||||||
class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): # noqa: PLR0904
|
class LIFXUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||||
"""DataUpdateCoordinator to gather data for a specific lifx device."""
|
"""DataUpdateCoordinator to gather data for a specific lifx device."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -456,7 +456,7 @@ class LIFXUpdateCoordinator(DataUpdateCoordinator[None]): # noqa: PLR0904
|
||||||
)
|
)
|
||||||
self.active_effect = FirmwareEffect[effect.upper()]
|
self.active_effect = FirmwareEffect[effect.upper()]
|
||||||
|
|
||||||
async def async_set_matrix_effect( # noqa: PLR0917
|
async def async_set_matrix_effect(
|
||||||
self,
|
self,
|
||||||
effect: str,
|
effect: str,
|
||||||
palette: list[tuple[int, int, int, int]] | None = None,
|
palette: list[tuple[int, int, int, int]] | None = None,
|
||||||
|
|
|
@ -1388,7 +1388,7 @@ class LightEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||||
Remove this compatibility shim in 2025.1 or later.
|
Remove this compatibility shim in 2025.1 or later.
|
||||||
"""
|
"""
|
||||||
features = self.supported_features
|
features = self.supported_features
|
||||||
if type(features) is not int: # noqa: E721
|
if type(features) is not int:
|
||||||
return features
|
return features
|
||||||
new_features = LightEntityFeature(features)
|
new_features = LightEntityFeature(features)
|
||||||
if self._deprecated_supported_features_reported is True:
|
if self._deprecated_supported_features_reported is True:
|
||||||
|
|
|
@ -178,7 +178,7 @@ async def _client_listen(
|
||||||
if entry.state != ConfigEntryState.LOADED:
|
if entry.state != ConfigEntryState.LOADED:
|
||||||
raise
|
raise
|
||||||
LOGGER.error("Failed to listen: %s", err)
|
LOGGER.error("Failed to listen: %s", err)
|
||||||
except Exception as err: # noqa: BLE001
|
except Exception as err:
|
||||||
# We need to guard against unknown exceptions to not crash this task.
|
# We need to guard against unknown exceptions to not crash this task.
|
||||||
LOGGER.exception("Unexpected exception: %s", err)
|
LOGGER.exception("Unexpected exception: %s", err)
|
||||||
if entry.state != ConfigEntryState.LOADED:
|
if entry.state != ConfigEntryState.LOADED:
|
||||||
|
|
|
@ -780,7 +780,7 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||||
Remove this compatibility shim in 2025.1 or later.
|
Remove this compatibility shim in 2025.1 or later.
|
||||||
"""
|
"""
|
||||||
features = self.supported_features
|
features = self.supported_features
|
||||||
if type(features) is int: # noqa: E721
|
if type(features) is int:
|
||||||
new_features = MediaPlayerEntityFeature(features)
|
new_features = MediaPlayerEntityFeature(features)
|
||||||
self._report_deprecated_supported_features_values(new_features)
|
self._report_deprecated_supported_features_values(new_features)
|
||||||
return new_features
|
return new_features
|
||||||
|
|
|
@ -386,7 +386,7 @@ async def async_start( # noqa: C901
|
||||||
_async_add_component(discovery_payload)
|
_async_add_component(discovery_payload)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_discovery_message_received(msg: ReceiveMessage) -> None: # noqa: C901
|
def async_discovery_message_received(msg: ReceiveMessage) -> None:
|
||||||
"""Process the received message."""
|
"""Process the received message."""
|
||||||
mqtt_data.last_discovery = msg.timestamp
|
mqtt_data.last_discovery = msg.timestamp
|
||||||
payload = msg.payload
|
payload = msg.payload
|
||||||
|
|
|
@ -587,7 +587,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity):
|
||||||
self._attr_xy_color = cast(tuple[float, float], xy_color)
|
self._attr_xy_color = cast(tuple[float, float], xy_color)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _prepare_subscribe_topics(self) -> None: # noqa: C901
|
def _prepare_subscribe_topics(self) -> None:
|
||||||
"""(Re)Subscribe to topics."""
|
"""(Re)Subscribe to topics."""
|
||||||
self.add_subscription(CONF_STATE_TOPIC, self._state_received, {"_attr_is_on"})
|
self.add_subscription(CONF_STATE_TOPIC, self._state_received, {"_attr_is_on"})
|
||||||
self.add_subscription(
|
self.add_subscription(
|
||||||
|
|
|
@ -50,7 +50,7 @@ class NiceGOConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
)
|
)
|
||||||
except AuthFailedError:
|
except AuthFailedError:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
|
@ -92,7 +92,7 @@ class NiceGOConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
)
|
)
|
||||||
except AuthFailedError:
|
except AuthFailedError:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -105,7 +105,7 @@ async def verify_connection(
|
||||||
errors[CONF_BASE] = "response_error"
|
errors[CONF_BASE] = "response_error"
|
||||||
except UnsupportedDeviceError:
|
except UnsupportedDeviceError:
|
||||||
errors[CONF_BASE] = "unsupported"
|
errors[CONF_BASE] = "unsupported"
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Unknown exception while verifying connection with your Plugwise Smile"
|
"Unknown exception while verifying connection with your Plugwise Smile"
|
||||||
)
|
)
|
||||||
|
|
|
@ -63,7 +63,7 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
return {"base": "cannot_connect"}
|
return {"base": "cannot_connect"}
|
||||||
except (AuthenticationFailed, DigestAuthCalcError):
|
except (AuthenticationFailed, DigestAuthCalcError):
|
||||||
return {"base": "invalid_auth"}
|
return {"base": "invalid_auth"}
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Exception occurred during connection test")
|
_LOGGER.exception("Exception occurred during connection test")
|
||||||
return {"base": "unknown"}
|
return {"base": "unknown"}
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ class SlideConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
return {"base": "cannot_connect"}
|
return {"base": "cannot_connect"}
|
||||||
except (AuthenticationFailed, DigestAuthCalcError):
|
except (AuthenticationFailed, DigestAuthCalcError):
|
||||||
return {"base": "invalid_auth"}
|
return {"base": "invalid_auth"}
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception("Exception occurred during connection test")
|
_LOGGER.exception("Exception occurred during connection test")
|
||||||
return {"base": "unknown"}
|
return {"base": "unknown"}
|
||||||
|
|
||||||
|
|
|
@ -98,7 +98,7 @@ def async_get_energy_site_for_entry(
|
||||||
return energy_data
|
return energy_data
|
||||||
|
|
||||||
|
|
||||||
def async_register_services(hass: HomeAssistant) -> None: # noqa: C901
|
def async_register_services(hass: HomeAssistant) -> None:
|
||||||
"""Set up the Teslemetry services."""
|
"""Set up the Teslemetry services."""
|
||||||
|
|
||||||
async def navigate_gps_request(call: ServiceCall) -> None:
|
async def navigate_gps_request(call: ServiceCall) -> None:
|
||||||
|
|
|
@ -376,7 +376,7 @@ class StateVacuumEntity(
|
||||||
Remove this compatibility shim in 2025.1 or later.
|
Remove this compatibility shim in 2025.1 or later.
|
||||||
"""
|
"""
|
||||||
features = self.supported_features
|
features = self.supported_features
|
||||||
if type(features) is int: # noqa: E721
|
if type(features) is int:
|
||||||
new_features = VacuumEntityFeature(features)
|
new_features = VacuumEntityFeature(features)
|
||||||
self._report_deprecated_supported_features_values(new_features)
|
self._report_deprecated_supported_features_values(new_features)
|
||||||
return new_features
|
return new_features
|
||||||
|
|
|
@ -189,13 +189,13 @@ class ActiveConnection:
|
||||||
if (
|
if (
|
||||||
# Not using isinstance as we don't care about children
|
# Not using isinstance as we don't care about children
|
||||||
# as these are always coming from JSON
|
# as these are always coming from JSON
|
||||||
type(msg) is not dict # noqa: E721
|
type(msg) is not dict
|
||||||
or (
|
or (
|
||||||
not (cur_id := msg.get("id"))
|
not (cur_id := msg.get("id"))
|
||||||
or type(cur_id) is not int # noqa: E721
|
or type(cur_id) is not int
|
||||||
or cur_id < 0
|
or cur_id < 0
|
||||||
or not (type_ := msg.get("type"))
|
or not (type_ := msg.get("type"))
|
||||||
or type(type_) is not str # noqa: E721
|
or type(type_) is not str
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
self.logger.error("Received invalid command: %s", msg)
|
self.logger.error("Received invalid command: %s", msg)
|
||||||
|
|
|
@ -197,7 +197,7 @@ class WebSocketHandler:
|
||||||
# max pending messages.
|
# max pending messages.
|
||||||
return
|
return
|
||||||
|
|
||||||
if type(message) is not bytes: # noqa: E721
|
if type(message) is not bytes:
|
||||||
if isinstance(message, dict):
|
if isinstance(message, dict):
|
||||||
message = message_to_json_bytes(message)
|
message = message_to_json_bytes(message)
|
||||||
elif isinstance(message, str):
|
elif isinstance(message, str):
|
||||||
|
@ -490,7 +490,7 @@ class WebSocketHandler:
|
||||||
)
|
)
|
||||||
|
|
||||||
# command_msg_data is always deserialized from JSON as a list
|
# command_msg_data is always deserialized from JSON as a list
|
||||||
if type(command_msg_data) is not list: # noqa: E721
|
if type(command_msg_data) is not list:
|
||||||
async_handle_str(command_msg_data)
|
async_handle_str(command_msg_data)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
|
@ -260,10 +260,10 @@ class XiaomiGenericSelector(XiaomiSelector):
|
||||||
|
|
||||||
if description.options_map:
|
if description.options_map:
|
||||||
self._options_map = {}
|
self._options_map = {}
|
||||||
for key, val in enum_class._member_map_.items(): # noqa: SLF001
|
for key, val in enum_class._member_map_.items():
|
||||||
self._options_map[description.options_map[key]] = val
|
self._options_map[description.options_map[key]] = val
|
||||||
else:
|
else:
|
||||||
self._options_map = enum_class._member_map_ # noqa: SLF001
|
self._options_map = enum_class._member_map_
|
||||||
self._reverse_map = {val: key for key, val in self._options_map.items()}
|
self._reverse_map = {val: key for key, val in self._options_map.items()}
|
||||||
self._enum_class = enum_class
|
self._enum_class = enum_class
|
||||||
|
|
||||||
|
|
|
@ -1170,7 +1170,7 @@ def async_add_entities(
|
||||||
# broad exception to prevent a single entity from preventing an entire platform from loading
|
# broad exception to prevent a single entity from preventing an entire platform from loading
|
||||||
# this can potentially be caused by a misbehaving device or a bad quirk. Not ideal but the
|
# this can potentially be caused by a misbehaving device or a bad quirk. Not ideal but the
|
||||||
# alternative is adding try/catch to each entity class __init__ method with a specific exception
|
# alternative is adding try/catch to each entity class __init__ method with a specific exception
|
||||||
except Exception: # noqa: BLE001
|
except Exception:
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Error while adding entity from entity data: %s", entity_data
|
"Error while adding entity from entity data: %s", entity_data
|
||||||
)
|
)
|
||||||
|
|
|
@ -220,7 +220,7 @@ async def async_check_ha_config_file( # noqa: C901
|
||||||
except (vol.Invalid, HomeAssistantError) as ex:
|
except (vol.Invalid, HomeAssistantError) as ex:
|
||||||
_comp_error(ex, domain, config, config[domain])
|
_comp_error(ex, domain, config, config[domain])
|
||||||
continue
|
continue
|
||||||
except Exception as err: # noqa: BLE001
|
except Exception as err:
|
||||||
logging.getLogger(__name__).exception(
|
logging.getLogger(__name__).exception(
|
||||||
"Unexpected error validating config"
|
"Unexpected error validating config"
|
||||||
)
|
)
|
||||||
|
|
|
@ -674,11 +674,7 @@ def string(value: Any) -> str:
|
||||||
raise vol.Invalid("string value is None")
|
raise vol.Invalid("string value is None")
|
||||||
|
|
||||||
# This is expected to be the most common case, so check it first.
|
# This is expected to be the most common case, so check it first.
|
||||||
if (
|
if type(value) is str or type(value) is NodeStrClass or isinstance(value, str):
|
||||||
type(value) is str # noqa: E721
|
|
||||||
or type(value) is NodeStrClass
|
|
||||||
or isinstance(value, str)
|
|
||||||
):
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
if isinstance(value, template_helper.ResultWrapper):
|
if isinstance(value, template_helper.ResultWrapper):
|
||||||
|
|
|
@ -1028,7 +1028,7 @@ class Entity(
|
||||||
return STATE_UNAVAILABLE
|
return STATE_UNAVAILABLE
|
||||||
if (state := self.state) is None:
|
if (state := self.state) is None:
|
||||||
return STATE_UNKNOWN
|
return STATE_UNKNOWN
|
||||||
if type(state) is str: # noqa: E721
|
if type(state) is str:
|
||||||
# fast path for strings
|
# fast path for strings
|
||||||
return state
|
return state
|
||||||
if isinstance(state, float):
|
if isinstance(state, float):
|
||||||
|
|
|
@ -12,7 +12,7 @@ from typing import TYPE_CHECKING, Any, Final
|
||||||
import orjson
|
import orjson
|
||||||
|
|
||||||
from homeassistant.util.file import write_utf8_file, write_utf8_file_atomic
|
from homeassistant.util.file import write_utf8_file, write_utf8_file_atomic
|
||||||
from homeassistant.util.json import ( # noqa: F401
|
from homeassistant.util.json import (
|
||||||
JSON_DECODE_EXCEPTIONS as _JSON_DECODE_EXCEPTIONS,
|
JSON_DECODE_EXCEPTIONS as _JSON_DECODE_EXCEPTIONS,
|
||||||
JSON_ENCODE_EXCEPTIONS as _JSON_ENCODE_EXCEPTIONS,
|
JSON_ENCODE_EXCEPTIONS as _JSON_ENCODE_EXCEPTIONS,
|
||||||
SerializationError,
|
SerializationError,
|
||||||
|
|
|
@ -502,7 +502,7 @@ def _has_match(ids: str | list[str] | None) -> TypeGuard[str | list[str]]:
|
||||||
|
|
||||||
|
|
||||||
@bind_hass
|
@bind_hass
|
||||||
def async_extract_referenced_entity_ids( # noqa: C901
|
def async_extract_referenced_entity_ids(
|
||||||
hass: HomeAssistant, service_call: ServiceCall, expand_group: bool = True
|
hass: HomeAssistant, service_call: ServiceCall, expand_group: bool = True
|
||||||
) -> SelectedEntities:
|
) -> SelectedEntities:
|
||||||
"""Extract referenced entity IDs from a service call."""
|
"""Extract referenced entity IDs from a service call."""
|
||||||
|
|
|
@ -46,7 +46,7 @@ def json_loads_array(obj: bytes | bytearray | memoryview | str, /) -> JsonArrayT
|
||||||
"""Parse JSON data and ensure result is a list."""
|
"""Parse JSON data and ensure result is a list."""
|
||||||
value: JsonValueType = json_loads(obj)
|
value: JsonValueType = json_loads(obj)
|
||||||
# Avoid isinstance overhead as we are not interested in list subclasses
|
# Avoid isinstance overhead as we are not interested in list subclasses
|
||||||
if type(value) is list: # noqa: E721
|
if type(value) is list:
|
||||||
return value
|
return value
|
||||||
raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}")
|
raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}")
|
||||||
|
|
||||||
|
@ -55,7 +55,7 @@ def json_loads_object(obj: bytes | bytearray | memoryview | str, /) -> JsonObjec
|
||||||
"""Parse JSON data and ensure result is a dictionary."""
|
"""Parse JSON data and ensure result is a dictionary."""
|
||||||
value: JsonValueType = json_loads(obj)
|
value: JsonValueType = json_loads(obj)
|
||||||
# Avoid isinstance overhead as we are not interested in dict subclasses
|
# Avoid isinstance overhead as we are not interested in dict subclasses
|
||||||
if type(value) is dict: # noqa: E721
|
if type(value) is dict:
|
||||||
return value
|
return value
|
||||||
raise ValueError(f"Expected JSON to be parsed as a dict got {type(value)}")
|
raise ValueError(f"Expected JSON to be parsed as a dict got {type(value)}")
|
||||||
|
|
||||||
|
@ -95,7 +95,7 @@ def load_json_array(
|
||||||
default = []
|
default = []
|
||||||
value: JsonValueType = load_json(filename, default=default)
|
value: JsonValueType = load_json(filename, default=default)
|
||||||
# Avoid isinstance overhead as we are not interested in list subclasses
|
# Avoid isinstance overhead as we are not interested in list subclasses
|
||||||
if type(value) is list: # noqa: E721
|
if type(value) is list:
|
||||||
return value
|
return value
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Expected JSON to be parsed as a list got %s in: %s", {type(value)}, filename
|
"Expected JSON to be parsed as a list got %s in: %s", {type(value)}, filename
|
||||||
|
@ -115,7 +115,7 @@ def load_json_object(
|
||||||
default = {}
|
default = {}
|
||||||
value: JsonValueType = load_json(filename, default=default)
|
value: JsonValueType = load_json(filename, default=default)
|
||||||
# Avoid isinstance overhead as we are not interested in dict subclasses
|
# Avoid isinstance overhead as we are not interested in dict subclasses
|
||||||
if type(value) is dict: # noqa: E721
|
if type(value) is dict:
|
||||||
return value
|
return value
|
||||||
_LOGGER.exception(
|
_LOGGER.exception(
|
||||||
"Expected JSON to be parsed as a dict got %s in: %s", {type(value)}, filename
|
"Expected JSON to be parsed as a dict got %s in: %s", {type(value)}, filename
|
||||||
|
|
|
@ -41,7 +41,7 @@ GENERAL_SETTINGS: Final[dict[str, str]] = {
|
||||||
),
|
),
|
||||||
"show_error_codes": "true",
|
"show_error_codes": "true",
|
||||||
"follow_imports": "normal",
|
"follow_imports": "normal",
|
||||||
# "enable_incomplete_feature": ", ".join( # noqa: FLY002
|
# "enable_incomplete_feature": ", ".join(
|
||||||
# []
|
# []
|
||||||
# ),
|
# ),
|
||||||
# Enable some checks globally.
|
# Enable some checks globally.
|
||||||
|
|
|
@ -454,7 +454,7 @@ ONBOARDING_SCHEMA = vol.Schema(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_translation_file( # noqa: C901
|
def validate_translation_file(
|
||||||
config: Config,
|
config: Config,
|
||||||
integration: Integration,
|
integration: Integration,
|
||||||
all_strings: dict[str, Any] | None,
|
all_strings: dict[str, Any] | None,
|
||||||
|
|
|
@ -47,7 +47,7 @@ async def test_full_flow(
|
||||||
result = await hass.config_entries.flow.async_init(
|
result = await hass.config_entries.flow.async_init(
|
||||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||||
)
|
)
|
||||||
state = config_entry_oauth2_flow._encode_jwt( # noqa: SLF001
|
state = config_entry_oauth2_flow._encode_jwt(
|
||||||
hass,
|
hass,
|
||||||
{
|
{
|
||||||
"flow_id": result["flow_id"],
|
"flow_id": result["flow_id"],
|
||||||
|
|
|
@ -570,7 +570,7 @@ async def test_unit_translation_key_without_platform_raises(
|
||||||
match="cannot have a translation key for unit of measurement before "
|
match="cannot have a translation key for unit of measurement before "
|
||||||
"being added to the entity platform",
|
"being added to the entity platform",
|
||||||
):
|
):
|
||||||
unit = entity0.unit_of_measurement # noqa: F841
|
unit = entity0.unit_of_measurement
|
||||||
|
|
||||||
setup_test_component_platform(hass, sensor.DOMAIN, [entity0])
|
setup_test_component_platform(hass, sensor.DOMAIN, [entity0])
|
||||||
|
|
||||||
|
@ -580,7 +580,7 @@ async def test_unit_translation_key_without_platform_raises(
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
# Should not raise after being added to the platform
|
# Should not raise after being added to the platform
|
||||||
unit = entity0.unit_of_measurement # noqa: F841
|
unit = entity0.unit_of_measurement
|
||||||
assert unit == "Tests"
|
assert unit == "Tests"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ from homeassistant import block_async_io
|
||||||
from homeassistant.exceptions import ServiceNotFound
|
from homeassistant.exceptions import ServiceNotFound
|
||||||
|
|
||||||
# Setup patching of recorder functions before any other Home Assistant imports
|
# Setup patching of recorder functions before any other Home Assistant imports
|
||||||
from . import patch_recorder # noqa: F401, isort:skip
|
from . import patch_recorder
|
||||||
|
|
||||||
# Setup patching of dt_util time functions before any other Home Assistant imports
|
# Setup patching of dt_util time functions before any other Home Assistant imports
|
||||||
from . import patch_time # noqa: F401, isort:skip
|
from . import patch_time # noqa: F401, isort:skip
|
||||||
|
|
|
@ -6,7 +6,7 @@ from contextlib import contextmanager
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Patch recorder util session scope
|
# Patch recorder util session scope
|
||||||
from homeassistant.helpers import recorder as recorder_helper # noqa: E402
|
from homeassistant.helpers import recorder as recorder_helper
|
||||||
|
|
||||||
# Make sure homeassistant.components.recorder.util is not already imported
|
# Make sure homeassistant.components.recorder.util is not already imported
|
||||||
assert "homeassistant.components.recorder.util" not in sys.modules
|
assert "homeassistant.components.recorder.util" not in sys.modules
|
||||||
|
|
|
@ -261,7 +261,7 @@ async def test_protect_path_read_bytes(caplog: pytest.LogCaptureFixture) -> None
|
||||||
block_async_io.enable()
|
block_async_io.enable()
|
||||||
with (
|
with (
|
||||||
contextlib.suppress(FileNotFoundError),
|
contextlib.suppress(FileNotFoundError),
|
||||||
Path("/config/data_not_exist").read_bytes(), # noqa: ASYNC230
|
Path("/config/data_not_exist").read_bytes(),
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ async def test_protect_path_read_text(caplog: pytest.LogCaptureFixture) -> None:
|
||||||
block_async_io.enable()
|
block_async_io.enable()
|
||||||
with (
|
with (
|
||||||
contextlib.suppress(FileNotFoundError),
|
contextlib.suppress(FileNotFoundError),
|
||||||
Path("/config/data_not_exist").read_text(encoding="utf8"), # noqa: ASYNC230
|
Path("/config/data_not_exist").read_text(encoding="utf8"),
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -287,7 +287,7 @@ async def test_protect_path_write_bytes(caplog: pytest.LogCaptureFixture) -> Non
|
||||||
block_async_io.enable()
|
block_async_io.enable()
|
||||||
with (
|
with (
|
||||||
contextlib.suppress(FileNotFoundError),
|
contextlib.suppress(FileNotFoundError),
|
||||||
Path("/config/data/not/exist").write_bytes(b"xxx"), # noqa: ASYNC230
|
Path("/config/data/not/exist").write_bytes(b"xxx"),
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -300,7 +300,7 @@ async def test_protect_path_write_text(caplog: pytest.LogCaptureFixture) -> None
|
||||||
block_async_io.enable()
|
block_async_io.enable()
|
||||||
with (
|
with (
|
||||||
contextlib.suppress(FileNotFoundError),
|
contextlib.suppress(FileNotFoundError),
|
||||||
Path("/config/data/not/exist").write_text("xxx", encoding="utf8"), # noqa: ASYNC230
|
Path("/config/data/not/exist").write_text("xxx", encoding="utf8"),
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue