Bump ruff to 0.8.0 (#131273)

pull/131294/head
Sid 2024-11-22 16:53:26 +01:00 committed by GitHub
parent d3f3fdc7ef
commit 9e98e446a2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 62 additions and 64 deletions

View File

@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.4 rev: v0.8.0
hooks: hooks:
- id: ruff - id: ruff
args: args:

View File

@ -173,17 +173,17 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
# setting the state directly. # setting the state directly.
cls.__alarm_legacy_state = True cls.__alarm_legacy_state = True
def __setattr__(self, __name: str, __value: Any) -> None: def __setattr__(self, name: str, value: Any, /) -> None:
"""Set attribute. """Set attribute.
Deprecation warning if setting '_attr_state' directly Deprecation warning if setting '_attr_state' directly
unless already reported. unless already reported.
""" """
if __name == "_attr_state": if name == "_attr_state":
if self.__alarm_legacy_state_reported is not True: if self.__alarm_legacy_state_reported is not True:
self._report_deprecated_alarm_state_handling() self._report_deprecated_alarm_state_handling()
self.__alarm_legacy_state_reported = True self.__alarm_legacy_state_reported = True
return super().__setattr__(__name, __value) return super().__setattr__(name, value)
@callback @callback
def add_to_platform_start( def add_to_platform_start(

View File

@ -314,14 +314,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
# once migrated and set the feature flags TURN_ON/TURN_OFF as needed. # once migrated and set the feature flags TURN_ON/TURN_OFF as needed.
_enable_turn_on_off_backwards_compatibility: bool = True _enable_turn_on_off_backwards_compatibility: bool = True
def __getattribute__(self, __name: str) -> Any: def __getattribute__(self, name: str, /) -> Any:
"""Get attribute. """Get attribute.
Modify return of `supported_features` to Modify return of `supported_features` to
include `_mod_supported_features` if attribute is set. include `_mod_supported_features` if attribute is set.
""" """
if __name != "supported_features": if name != "supported_features":
return super().__getattribute__(__name) return super().__getattribute__(name)
# Convert the supported features to ClimateEntityFeature. # Convert the supported features to ClimateEntityFeature.
# Remove this compatibility shim in 2025.1 or later. # Remove this compatibility shim in 2025.1 or later.

View File

@ -245,14 +245,14 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
# once migrated and set the feature flags TURN_ON/TURN_OFF as needed. # once migrated and set the feature flags TURN_ON/TURN_OFF as needed.
_enable_turn_on_off_backwards_compatibility: bool = True _enable_turn_on_off_backwards_compatibility: bool = True
def __getattribute__(self, __name: str) -> Any: def __getattribute__(self, name: str, /) -> Any:
"""Get attribute. """Get attribute.
Modify return of `supported_features` to Modify return of `supported_features` to
include `_mod_supported_features` if attribute is set. include `_mod_supported_features` if attribute is set.
""" """
if __name != "supported_features": if name != "supported_features":
return super().__getattribute__(__name) return super().__getattribute__(name)
# Convert the supported features to ClimateEntityFeature. # Convert the supported features to ClimateEntityFeature.
# Remove this compatibility shim in 2025.1 or later. # Remove this compatibility shim in 2025.1 or later.

View File

@ -36,7 +36,7 @@ class CecEntity(Entity):
"""Initialize the device.""" """Initialize the device."""
self._device = device self._device = device
self._logical_address = logical self._logical_address = logical
self.entity_id = "%s.%d" % (DOMAIN, self._logical_address) self.entity_id = f"{DOMAIN}.{self._logical_address}"
self._set_attr_name() self._set_attr_name()
self._attr_icon = ICONS_BY_TYPE.get(self._device.type, ICON_UNKNOWN) self._attr_icon = ICONS_BY_TYPE.get(self._device.type, ICON_UNKNOWN)

View File

@ -320,15 +320,15 @@ class ShoppingData:
# Remove the item from mapping after it's appended in the result array. # Remove the item from mapping after it's appended in the result array.
del all_items_mapping[item_id] del all_items_mapping[item_id]
# Append the rest of the items # Append the rest of the items
for key in all_items_mapping: for value in all_items_mapping.values():
# All the unchecked items must be passed in the item_ids array, # All the unchecked items must be passed in the item_ids array,
# so all items left in the mapping should be checked items. # so all items left in the mapping should be checked items.
if all_items_mapping[key]["complete"] is False: if value["complete"] is False:
raise vol.Invalid( raise vol.Invalid(
"The item ids array doesn't contain all the unchecked shopping list" "The item ids array doesn't contain all the unchecked shopping list"
" items." " items."
) )
new_items.append(all_items_mapping[key]) new_items.append(value)
self.items = new_items self.items = new_items
self.hass.async_add_executor_job(self.save) self.hass.async_add_executor_job(self.save)
self._async_notify() self._async_notify()

View File

@ -60,9 +60,9 @@ class SteamDataUpdateCoordinator(
for player in response["response"]["players"]["player"] for player in response["response"]["players"]["player"]
if player["steamid"] in _ids if player["steamid"] in _ids
} }
for k in players: for value in players.values():
data = self.player_interface.GetSteamLevel(steamid=players[k]["steamid"]) data = self.player_interface.GetSteamLevel(steamid=value["steamid"])
players[k]["level"] = data["response"].get("player_level") value["level"] = data["response"].get("player_level")
return players return players
async def _async_update_data(self) -> dict[str, dict[str, str | int]]: async def _async_update_data(self) -> dict[str, dict[str, str | int]]:

View File

@ -30,32 +30,30 @@ class SerializationError(HomeAssistantError):
"""Error serializing the data to JSON.""" """Error serializing the data to JSON."""
def json_loads(__obj: bytes | bytearray | memoryview | str) -> JsonValueType: def json_loads(obj: bytes | bytearray | memoryview | str, /) -> JsonValueType:
"""Parse JSON data. """Parse JSON data.
This adds a workaround for orjson not handling subclasses of str, This adds a workaround for orjson not handling subclasses of str,
https://github.com/ijl/orjson/issues/445. https://github.com/ijl/orjson/issues/445.
""" """
# Avoid isinstance overhead for the common case # Avoid isinstance overhead for the common case
if type(__obj) not in (bytes, bytearray, memoryview, str) and isinstance( if type(obj) not in (bytes, bytearray, memoryview, str) and isinstance(obj, str):
__obj, str return orjson.loads(str(obj)) # type:ignore[no-any-return]
): return orjson.loads(obj) # type:ignore[no-any-return]
return orjson.loads(str(__obj)) # type:ignore[no-any-return]
return orjson.loads(__obj) # type:ignore[no-any-return]
def json_loads_array(__obj: bytes | bytearray | memoryview | str) -> JsonArrayType: def json_loads_array(obj: bytes | bytearray | memoryview | str, /) -> JsonArrayType:
"""Parse JSON data and ensure result is a list.""" """Parse JSON data and ensure result is a list."""
value: JsonValueType = json_loads(__obj) value: JsonValueType = json_loads(obj)
# Avoid isinstance overhead as we are not interested in list subclasses # Avoid isinstance overhead as we are not interested in list subclasses
if type(value) is list: # noqa: E721 if type(value) is list: # noqa: E721
return value return value
raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}") raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}")
def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObjectType: def json_loads_object(obj: bytes | bytearray | memoryview | str, /) -> JsonObjectType:
"""Parse JSON data and ensure result is a dictionary.""" """Parse JSON data and ensure result is a dictionary."""
value: JsonValueType = json_loads(__obj) value: JsonValueType = json_loads(obj)
# Avoid isinstance overhead as we are not interested in dict subclasses # Avoid isinstance overhead as we are not interested in dict subclasses
if type(value) is dict: # noqa: E721 if type(value) is dict: # noqa: E721
return value return value

View File

@ -700,7 +700,7 @@ exclude_lines = [
] ]
[tool.ruff] [tool.ruff]
required-version = ">=0.6.8" required-version = ">=0.8.0"
[tool.ruff.lint] [tool.ruff.lint]
select = [ select = [
@ -783,7 +783,7 @@ select = [
"SLOT", # flake8-slots "SLOT", # flake8-slots
"T100", # Trace found: {name} used "T100", # Trace found: {name} used
"T20", # flake8-print "T20", # flake8-print
"TCH", # flake8-type-checking "TC", # flake8-type-checking
"TID", # Tidy imports "TID", # Tidy imports
"TRY", # tryceratops "TRY", # tryceratops
"UP", # pyupgrade "UP", # pyupgrade
@ -807,7 +807,6 @@ ignore = [
"PLR0915", # Too many statements ({statements} > {max_statements}) "PLR0915", # Too many statements ({statements} > {max_statements})
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"PT004", # Fixture {fixture} does not return anything, add leading underscore
"PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception
"PT018", # Assertion should be broken down into multiple parts "PT018", # Assertion should be broken down into multiple parts
"RUF001", # String contains ambiguous unicode character. "RUF001", # String contains ambiguous unicode character.
@ -820,9 +819,9 @@ ignore = [
"SIM115", # Use context handler for opening files "SIM115", # Use context handler for opening files
# Moving imports into type-checking blocks can mess with pytest.patch() # Moving imports into type-checking blocks can mess with pytest.patch()
"TCH001", # Move application import {} into a type-checking block "TC001", # Move application import {} into a type-checking block
"TCH002", # Move third-party import {} into a type-checking block "TC002", # Move third-party import {} into a type-checking block
"TCH003", # Move standard library import {} into a type-checking block "TC003", # Move standard library import {} into a type-checking block
"TRY003", # Avoid specifying long messages outside the exception class "TRY003", # Avoid specifying long messages outside the exception class
"TRY400", # Use `logging.exception` instead of `logging.error` "TRY400", # Use `logging.exception` instead of `logging.error`

View File

@ -1,5 +1,5 @@
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
codespell==2.3.0 codespell==2.3.0
ruff==0.7.4 ruff==0.8.0
yamllint==1.35.1 yamllint==1.35.1

View File

@ -350,8 +350,8 @@ def gather_modules() -> dict[str, list[str]] | None:
gather_requirements_from_manifests(errors, reqs) gather_requirements_from_manifests(errors, reqs)
gather_requirements_from_modules(errors, reqs) gather_requirements_from_modules(errors, reqs)
for key in reqs: for value in reqs.values():
reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name)) value = sorted(value, key=lambda name: (len(name.split(".")), name))
if errors: if errors:
print("******* ERROR") print("******* ERROR")

View File

@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \
--no-cache \ --no-cache \
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \ -c /usr/src/homeassistant/homeassistant/package_constraints.txt \
-r /usr/src/homeassistant/requirements.txt \ -r /usr/src/homeassistant/requirements.txt \
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.4 \ stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.0 \
PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.2 home-assistant-intents==2024.11.13 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2 PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.2 home-assistant-intents==2024.11.13 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
LABEL "name"="hassfest" LABEL "name"="hassfest"

View File

@ -55,19 +55,19 @@ def generate_and_validate(integrations: dict[str, Integration]) -> str:
# HomeKit models are matched on starting string, make sure none overlap. # HomeKit models are matched on starting string, make sure none overlap.
warned = set() warned = set()
for key in homekit_dict: for key, value in homekit_dict.items():
if key in warned: if key in warned:
continue continue
# n^2 yoooo # n^2 yoooo
for key_2 in homekit_dict: for key_2, value_2 in homekit_dict.items():
if key == key_2 or key_2 in warned: if key == key_2 or key_2 in warned:
continue continue
if key.startswith(key_2) or key_2.startswith(key): if key.startswith(key_2) or key_2.startswith(key):
integration.add_error( integration.add_error(
"zeroconf", "zeroconf",
f"Integrations {homekit_dict[key]} and {homekit_dict[key_2]} " f"Integrations {value} and {value_2} "
"have overlapping HomeKit models", "have overlapping HomeKit models",
) )
warned.add(key) warned.add(key)

View File

@ -222,8 +222,8 @@ async def test_config_flow(
state = hass.states.get(f"{template_type}.my_template") state = hass.states.get(f"{template_type}.my_template")
assert state.state == template_state assert state.state == template_state
for key in extra_attrs: for key, value in extra_attrs.items():
assert state.attributes[key] == extra_attrs[key] assert state.attributes[key] == value
@pytest.mark.parametrize( @pytest.mark.parametrize(

View File

@ -182,8 +182,8 @@ def test_exclude_none_values(
result = exclude_none_values(obj) result = exclude_none_values(obj)
assert result == expected_output assert result == expected_output
for key in expected_output: for key, value in expected_output.items():
assert expected_output[key] == obj[key] assert value == obj[key]
async def test_create_zha_config_remove_unused( async def test_create_zha_config_remove_unused(

View File

@ -510,30 +510,31 @@ def aiohttp_client(
clients = [] clients = []
async def go( async def go(
__param: Application | BaseTestServer, param: Application | BaseTestServer,
/,
*args: Any, *args: Any,
server_kwargs: dict[str, Any] | None = None, server_kwargs: dict[str, Any] | None = None,
**kwargs: Any, **kwargs: Any,
) -> TestClient: ) -> TestClient:
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] if isinstance(param, Callable) and not isinstance( # type: ignore[arg-type]
__param, (Application, BaseTestServer) param, (Application, BaseTestServer)
): ):
__param = __param(loop, *args, **kwargs) param = param(loop, *args, **kwargs)
kwargs = {} kwargs = {}
else: else:
assert not args, "args should be empty" assert not args, "args should be empty"
client: TestClient client: TestClient
if isinstance(__param, Application): if isinstance(param, Application):
server_kwargs = server_kwargs or {} server_kwargs = server_kwargs or {}
server = TestServer(__param, loop=loop, **server_kwargs) server = TestServer(param, loop=loop, **server_kwargs)
# Registering a view after starting the server should still work. # Registering a view after starting the server should still work.
server.app._router.freeze = lambda: None server.app._router.freeze = lambda: None
client = CoalescingClient(server, loop=loop, **kwargs) client = CoalescingClient(server, loop=loop, **kwargs)
elif isinstance(__param, BaseTestServer): elif isinstance(param, BaseTestServer):
client = TestClient(__param, loop=loop, **kwargs) client = TestClient(param, loop=loop, **kwargs)
else: else:
raise TypeError(f"Unknown argument type: {type(__param)!r}") raise TypeError(f"Unknown argument type: {type(param)!r}")
await client.start_server() await client.start_server()
clients.append(client) clients.append(client)

View File

@ -4387,8 +4387,8 @@ async def test_call_later(hass: HomeAssistant) -> None:
schedule_utctime = dt_util.utcnow() schedule_utctime = dt_util.utcnow()
@callback @callback
def action(__utcnow: datetime): def action(utcnow: datetime, /):
_current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() _current_delay = utcnow.timestamp() - schedule_utctime.timestamp()
future.set_result(delay < _current_delay < (delay + delay_tolerance)) future.set_result(delay < _current_delay < (delay + delay_tolerance))
async_call_later(hass, delay, action) async_call_later(hass, delay, action)
@ -4407,8 +4407,8 @@ async def test_async_call_later(hass: HomeAssistant) -> None:
schedule_utctime = dt_util.utcnow() schedule_utctime = dt_util.utcnow()
@callback @callback
def action(__utcnow: datetime): def action(utcnow: datetime, /):
_current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() _current_delay = utcnow.timestamp() - schedule_utctime.timestamp()
future.set_result(delay < _current_delay < (delay + delay_tolerance)) future.set_result(delay < _current_delay < (delay + delay_tolerance))
remove = async_call_later(hass, delay, action) remove = async_call_later(hass, delay, action)
@ -4429,8 +4429,8 @@ async def test_async_call_later_timedelta(hass: HomeAssistant) -> None:
schedule_utctime = dt_util.utcnow() schedule_utctime = dt_util.utcnow()
@callback @callback
def action(__utcnow: datetime): def action(utcnow: datetime, /):
_current_delay = __utcnow.timestamp() - schedule_utctime.timestamp() _current_delay = utcnow.timestamp() - schedule_utctime.timestamp()
future.set_result(delay < _current_delay < (delay + delay_tolerance)) future.set_result(delay < _current_delay < (delay + delay_tolerance))
remove = async_call_later(hass, timedelta(seconds=delay), action) remove = async_call_later(hass, timedelta(seconds=delay), action)
@ -4450,7 +4450,7 @@ async def test_async_call_later_cancel(hass: HomeAssistant) -> None:
delay_tolerance = 0.1 delay_tolerance = 0.1
@callback @callback
def action(__now: datetime): def action(now: datetime, /):
future.set_result(False) future.set_result(False)
remove = async_call_later(hass, delay, action) remove = async_call_later(hass, delay, action)

View File

@ -5697,8 +5697,8 @@ async def test_starting_config_flow_on_single_config_entry(
"comp", context=context, data=user_input "comp", context=context, data=user_input
) )
for key in expected_result: for key, value in expected_result.items():
assert result[key] == expected_result[key] assert result[key] == value
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -5778,8 +5778,8 @@ async def test_starting_config_flow_on_single_config_entry_2(
"comp", context=context, data=user_input "comp", context=context, data=user_input
) )
for key in expected_result: for key, value in expected_result.items():
assert result[key] == expected_result[key] assert result[key] == value
async def test_avoid_adding_second_config_entry_on_single_config_entry( async def test_avoid_adding_second_config_entry_on_single_config_entry(