Bump ruff to 0.8.0 (#131273)
parent
d3f3fdc7ef
commit
9e98e446a2
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.4
|
||||
rev: v0.8.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
|
|
@ -173,17 +173,17 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
|||
# setting the state directly.
|
||||
cls.__alarm_legacy_state = True
|
||||
|
||||
def __setattr__(self, __name: str, __value: Any) -> None:
|
||||
def __setattr__(self, name: str, value: Any, /) -> None:
|
||||
"""Set attribute.
|
||||
|
||||
Deprecation warning if setting '_attr_state' directly
|
||||
unless already reported.
|
||||
"""
|
||||
if __name == "_attr_state":
|
||||
if name == "_attr_state":
|
||||
if self.__alarm_legacy_state_reported is not True:
|
||||
self._report_deprecated_alarm_state_handling()
|
||||
self.__alarm_legacy_state_reported = True
|
||||
return super().__setattr__(__name, __value)
|
||||
return super().__setattr__(name, value)
|
||||
|
||||
@callback
|
||||
def add_to_platform_start(
|
||||
|
|
|
@ -314,14 +314,14 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||
# once migrated and set the feature flags TURN_ON/TURN_OFF as needed.
|
||||
_enable_turn_on_off_backwards_compatibility: bool = True
|
||||
|
||||
def __getattribute__(self, __name: str) -> Any:
|
||||
def __getattribute__(self, name: str, /) -> Any:
|
||||
"""Get attribute.
|
||||
|
||||
Modify return of `supported_features` to
|
||||
include `_mod_supported_features` if attribute is set.
|
||||
"""
|
||||
if __name != "supported_features":
|
||||
return super().__getattribute__(__name)
|
||||
if name != "supported_features":
|
||||
return super().__getattribute__(name)
|
||||
|
||||
# Convert the supported features to ClimateEntityFeature.
|
||||
# Remove this compatibility shim in 2025.1 or later.
|
||||
|
|
|
@ -245,14 +245,14 @@ class FanEntity(ToggleEntity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||
# once migrated and set the feature flags TURN_ON/TURN_OFF as needed.
|
||||
_enable_turn_on_off_backwards_compatibility: bool = True
|
||||
|
||||
def __getattribute__(self, __name: str) -> Any:
|
||||
def __getattribute__(self, name: str, /) -> Any:
|
||||
"""Get attribute.
|
||||
|
||||
Modify return of `supported_features` to
|
||||
include `_mod_supported_features` if attribute is set.
|
||||
"""
|
||||
if __name != "supported_features":
|
||||
return super().__getattribute__(__name)
|
||||
if name != "supported_features":
|
||||
return super().__getattribute__(name)
|
||||
|
||||
# Convert the supported features to ClimateEntityFeature.
|
||||
# Remove this compatibility shim in 2025.1 or later.
|
||||
|
|
|
@ -36,7 +36,7 @@ class CecEntity(Entity):
|
|||
"""Initialize the device."""
|
||||
self._device = device
|
||||
self._logical_address = logical
|
||||
self.entity_id = "%s.%d" % (DOMAIN, self._logical_address)
|
||||
self.entity_id = f"{DOMAIN}.{self._logical_address}"
|
||||
self._set_attr_name()
|
||||
self._attr_icon = ICONS_BY_TYPE.get(self._device.type, ICON_UNKNOWN)
|
||||
|
||||
|
|
|
@ -320,15 +320,15 @@ class ShoppingData:
|
|||
# Remove the item from mapping after it's appended in the result array.
|
||||
del all_items_mapping[item_id]
|
||||
# Append the rest of the items
|
||||
for key in all_items_mapping:
|
||||
for value in all_items_mapping.values():
|
||||
# All the unchecked items must be passed in the item_ids array,
|
||||
# so all items left in the mapping should be checked items.
|
||||
if all_items_mapping[key]["complete"] is False:
|
||||
if value["complete"] is False:
|
||||
raise vol.Invalid(
|
||||
"The item ids array doesn't contain all the unchecked shopping list"
|
||||
" items."
|
||||
)
|
||||
new_items.append(all_items_mapping[key])
|
||||
new_items.append(value)
|
||||
self.items = new_items
|
||||
self.hass.async_add_executor_job(self.save)
|
||||
self._async_notify()
|
||||
|
|
|
@ -60,9 +60,9 @@ class SteamDataUpdateCoordinator(
|
|||
for player in response["response"]["players"]["player"]
|
||||
if player["steamid"] in _ids
|
||||
}
|
||||
for k in players:
|
||||
data = self.player_interface.GetSteamLevel(steamid=players[k]["steamid"])
|
||||
players[k]["level"] = data["response"].get("player_level")
|
||||
for value in players.values():
|
||||
data = self.player_interface.GetSteamLevel(steamid=value["steamid"])
|
||||
value["level"] = data["response"].get("player_level")
|
||||
return players
|
||||
|
||||
async def _async_update_data(self) -> dict[str, dict[str, str | int]]:
|
||||
|
|
|
@ -30,32 +30,30 @@ class SerializationError(HomeAssistantError):
|
|||
"""Error serializing the data to JSON."""
|
||||
|
||||
|
||||
def json_loads(__obj: bytes | bytearray | memoryview | str) -> JsonValueType:
|
||||
def json_loads(obj: bytes | bytearray | memoryview | str, /) -> JsonValueType:
|
||||
"""Parse JSON data.
|
||||
|
||||
This adds a workaround for orjson not handling subclasses of str,
|
||||
https://github.com/ijl/orjson/issues/445.
|
||||
"""
|
||||
# Avoid isinstance overhead for the common case
|
||||
if type(__obj) not in (bytes, bytearray, memoryview, str) and isinstance(
|
||||
__obj, str
|
||||
):
|
||||
return orjson.loads(str(__obj)) # type:ignore[no-any-return]
|
||||
return orjson.loads(__obj) # type:ignore[no-any-return]
|
||||
if type(obj) not in (bytes, bytearray, memoryview, str) and isinstance(obj, str):
|
||||
return orjson.loads(str(obj)) # type:ignore[no-any-return]
|
||||
return orjson.loads(obj) # type:ignore[no-any-return]
|
||||
|
||||
|
||||
def json_loads_array(__obj: bytes | bytearray | memoryview | str) -> JsonArrayType:
|
||||
def json_loads_array(obj: bytes | bytearray | memoryview | str, /) -> JsonArrayType:
|
||||
"""Parse JSON data and ensure result is a list."""
|
||||
value: JsonValueType = json_loads(__obj)
|
||||
value: JsonValueType = json_loads(obj)
|
||||
# Avoid isinstance overhead as we are not interested in list subclasses
|
||||
if type(value) is list: # noqa: E721
|
||||
return value
|
||||
raise ValueError(f"Expected JSON to be parsed as a list got {type(value)}")
|
||||
|
||||
|
||||
def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObjectType:
|
||||
def json_loads_object(obj: bytes | bytearray | memoryview | str, /) -> JsonObjectType:
|
||||
"""Parse JSON data and ensure result is a dictionary."""
|
||||
value: JsonValueType = json_loads(__obj)
|
||||
value: JsonValueType = json_loads(obj)
|
||||
# Avoid isinstance overhead as we are not interested in dict subclasses
|
||||
if type(value) is dict: # noqa: E721
|
||||
return value
|
||||
|
|
|
@ -700,7 +700,7 @@ exclude_lines = [
|
|||
]
|
||||
|
||||
[tool.ruff]
|
||||
required-version = ">=0.6.8"
|
||||
required-version = ">=0.8.0"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
|
@ -783,7 +783,7 @@ select = [
|
|||
"SLOT", # flake8-slots
|
||||
"T100", # Trace found: {name} used
|
||||
"T20", # flake8-print
|
||||
"TCH", # flake8-type-checking
|
||||
"TC", # flake8-type-checking
|
||||
"TID", # Tidy imports
|
||||
"TRY", # tryceratops
|
||||
"UP", # pyupgrade
|
||||
|
@ -807,7 +807,6 @@ ignore = [
|
|||
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||
"PT004", # Fixture {fixture} does not return anything, add leading underscore
|
||||
"PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception
|
||||
"PT018", # Assertion should be broken down into multiple parts
|
||||
"RUF001", # String contains ambiguous unicode character.
|
||||
|
@ -820,9 +819,9 @@ ignore = [
|
|||
"SIM115", # Use context handler for opening files
|
||||
|
||||
# Moving imports into type-checking blocks can mess with pytest.patch()
|
||||
"TCH001", # Move application import {} into a type-checking block
|
||||
"TCH002", # Move third-party import {} into a type-checking block
|
||||
"TCH003", # Move standard library import {} into a type-checking block
|
||||
"TC001", # Move application import {} into a type-checking block
|
||||
"TC002", # Move third-party import {} into a type-checking block
|
||||
"TC003", # Move standard library import {} into a type-checking block
|
||||
|
||||
"TRY003", # Avoid specifying long messages outside the exception class
|
||||
"TRY400", # Use `logging.exception` instead of `logging.error`
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
|
||||
|
||||
codespell==2.3.0
|
||||
ruff==0.7.4
|
||||
ruff==0.8.0
|
||||
yamllint==1.35.1
|
||||
|
|
|
@ -350,8 +350,8 @@ def gather_modules() -> dict[str, list[str]] | None:
|
|||
gather_requirements_from_manifests(errors, reqs)
|
||||
gather_requirements_from_modules(errors, reqs)
|
||||
|
||||
for key in reqs:
|
||||
reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name))
|
||||
for value in reqs.values():
|
||||
value = sorted(value, key=lambda name: (len(name.split(".")), name))
|
||||
|
||||
if errors:
|
||||
print("******* ERROR")
|
||||
|
|
|
@ -22,7 +22,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \
|
|||
--no-cache \
|
||||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.4 \
|
||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.8.0 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.2 home-assistant-intents==2024.11.13 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
|
|
|
@ -55,19 +55,19 @@ def generate_and_validate(integrations: dict[str, Integration]) -> str:
|
|||
|
||||
# HomeKit models are matched on starting string, make sure none overlap.
|
||||
warned = set()
|
||||
for key in homekit_dict:
|
||||
for key, value in homekit_dict.items():
|
||||
if key in warned:
|
||||
continue
|
||||
|
||||
# n^2 yoooo
|
||||
for key_2 in homekit_dict:
|
||||
for key_2, value_2 in homekit_dict.items():
|
||||
if key == key_2 or key_2 in warned:
|
||||
continue
|
||||
|
||||
if key.startswith(key_2) or key_2.startswith(key):
|
||||
integration.add_error(
|
||||
"zeroconf",
|
||||
f"Integrations {homekit_dict[key]} and {homekit_dict[key_2]} "
|
||||
f"Integrations {value} and {value_2} "
|
||||
"have overlapping HomeKit models",
|
||||
)
|
||||
warned.add(key)
|
||||
|
|
|
@ -222,8 +222,8 @@ async def test_config_flow(
|
|||
|
||||
state = hass.states.get(f"{template_type}.my_template")
|
||||
assert state.state == template_state
|
||||
for key in extra_attrs:
|
||||
assert state.attributes[key] == extra_attrs[key]
|
||||
for key, value in extra_attrs.items():
|
||||
assert state.attributes[key] == value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
|
@ -182,8 +182,8 @@ def test_exclude_none_values(
|
|||
result = exclude_none_values(obj)
|
||||
assert result == expected_output
|
||||
|
||||
for key in expected_output:
|
||||
assert expected_output[key] == obj[key]
|
||||
for key, value in expected_output.items():
|
||||
assert value == obj[key]
|
||||
|
||||
|
||||
async def test_create_zha_config_remove_unused(
|
||||
|
|
|
@ -510,30 +510,31 @@ def aiohttp_client(
|
|||
clients = []
|
||||
|
||||
async def go(
|
||||
__param: Application | BaseTestServer,
|
||||
param: Application | BaseTestServer,
|
||||
/,
|
||||
*args: Any,
|
||||
server_kwargs: dict[str, Any] | None = None,
|
||||
**kwargs: Any,
|
||||
) -> TestClient:
|
||||
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
||||
__param, (Application, BaseTestServer)
|
||||
if isinstance(param, Callable) and not isinstance( # type: ignore[arg-type]
|
||||
param, (Application, BaseTestServer)
|
||||
):
|
||||
__param = __param(loop, *args, **kwargs)
|
||||
param = param(loop, *args, **kwargs)
|
||||
kwargs = {}
|
||||
else:
|
||||
assert not args, "args should be empty"
|
||||
|
||||
client: TestClient
|
||||
if isinstance(__param, Application):
|
||||
if isinstance(param, Application):
|
||||
server_kwargs = server_kwargs or {}
|
||||
server = TestServer(__param, loop=loop, **server_kwargs)
|
||||
server = TestServer(param, loop=loop, **server_kwargs)
|
||||
# Registering a view after starting the server should still work.
|
||||
server.app._router.freeze = lambda: None
|
||||
client = CoalescingClient(server, loop=loop, **kwargs)
|
||||
elif isinstance(__param, BaseTestServer):
|
||||
client = TestClient(__param, loop=loop, **kwargs)
|
||||
elif isinstance(param, BaseTestServer):
|
||||
client = TestClient(param, loop=loop, **kwargs)
|
||||
else:
|
||||
raise TypeError(f"Unknown argument type: {type(__param)!r}")
|
||||
raise TypeError(f"Unknown argument type: {type(param)!r}")
|
||||
|
||||
await client.start_server()
|
||||
clients.append(client)
|
||||
|
|
|
@ -4387,8 +4387,8 @@ async def test_call_later(hass: HomeAssistant) -> None:
|
|||
schedule_utctime = dt_util.utcnow()
|
||||
|
||||
@callback
|
||||
def action(__utcnow: datetime):
|
||||
_current_delay = __utcnow.timestamp() - schedule_utctime.timestamp()
|
||||
def action(utcnow: datetime, /):
|
||||
_current_delay = utcnow.timestamp() - schedule_utctime.timestamp()
|
||||
future.set_result(delay < _current_delay < (delay + delay_tolerance))
|
||||
|
||||
async_call_later(hass, delay, action)
|
||||
|
@ -4407,8 +4407,8 @@ async def test_async_call_later(hass: HomeAssistant) -> None:
|
|||
schedule_utctime = dt_util.utcnow()
|
||||
|
||||
@callback
|
||||
def action(__utcnow: datetime):
|
||||
_current_delay = __utcnow.timestamp() - schedule_utctime.timestamp()
|
||||
def action(utcnow: datetime, /):
|
||||
_current_delay = utcnow.timestamp() - schedule_utctime.timestamp()
|
||||
future.set_result(delay < _current_delay < (delay + delay_tolerance))
|
||||
|
||||
remove = async_call_later(hass, delay, action)
|
||||
|
@ -4429,8 +4429,8 @@ async def test_async_call_later_timedelta(hass: HomeAssistant) -> None:
|
|||
schedule_utctime = dt_util.utcnow()
|
||||
|
||||
@callback
|
||||
def action(__utcnow: datetime):
|
||||
_current_delay = __utcnow.timestamp() - schedule_utctime.timestamp()
|
||||
def action(utcnow: datetime, /):
|
||||
_current_delay = utcnow.timestamp() - schedule_utctime.timestamp()
|
||||
future.set_result(delay < _current_delay < (delay + delay_tolerance))
|
||||
|
||||
remove = async_call_later(hass, timedelta(seconds=delay), action)
|
||||
|
@ -4450,7 +4450,7 @@ async def test_async_call_later_cancel(hass: HomeAssistant) -> None:
|
|||
delay_tolerance = 0.1
|
||||
|
||||
@callback
|
||||
def action(__now: datetime):
|
||||
def action(now: datetime, /):
|
||||
future.set_result(False)
|
||||
|
||||
remove = async_call_later(hass, delay, action)
|
||||
|
|
|
@ -5697,8 +5697,8 @@ async def test_starting_config_flow_on_single_config_entry(
|
|||
"comp", context=context, data=user_input
|
||||
)
|
||||
|
||||
for key in expected_result:
|
||||
assert result[key] == expected_result[key]
|
||||
for key, value in expected_result.items():
|
||||
assert result[key] == value
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -5778,8 +5778,8 @@ async def test_starting_config_flow_on_single_config_entry_2(
|
|||
"comp", context=context, data=user_input
|
||||
)
|
||||
|
||||
for key in expected_result:
|
||||
assert result[key] == expected_result[key]
|
||||
for key, value in expected_result.items():
|
||||
assert result[key] == value
|
||||
|
||||
|
||||
async def test_avoid_adding_second_config_entry_on_single_config_entry(
|
||||
|
|
Loading…
Reference in New Issue