Switch formatting from black to ruff-format (#102893)

Co-authored-by: Franck Nijhof <git@frenck.dev>
pull/104592/head
Aarni Koskela 2023-11-27 15:38:59 +02:00 committed by GitHub
parent cf9b0e804f
commit 706add4a57
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
161 changed files with 530 additions and 607 deletions

View File

@ -10,7 +10,7 @@
"customizations": { "customizations": {
"vscode": { "vscode": {
"extensions": [ "extensions": [
"ms-python.black-formatter", "charliermarsh.ruff",
"ms-python.pylint", "ms-python.pylint",
"ms-python.vscode-pylance", "ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode", "visualstudioexptteam.vscodeintellicode",
@ -39,7 +39,10 @@
"!include_dir_list scalar", "!include_dir_list scalar",
"!include_dir_merge_list scalar", "!include_dir_merge_list scalar",
"!include_dir_merge_named scalar" "!include_dir_merge_named scalar"
] ],
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
} }
} }
} }

View File

@ -60,7 +60,7 @@
- [ ] There is no commented out code in this PR. - [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist] - [ ] I have followed the [development checklist][dev-checklist]
- [ ] I have followed the [perfect PR recommendations][perfect-pr] - [ ] I have followed the [perfect PR recommendations][perfect-pr]
- [ ] The code has been formatted using Black (`black --fast homeassistant tests`) - [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`)
- [ ] Tests have been added to verify that the new code works. - [ ] Tests have been added to verify that the new code works.
If user exposed functionality or configuration variables are added/changed: If user exposed functionality or configuration variables are added/changed:

View File

@ -36,7 +36,6 @@ env:
CACHE_VERSION: 5 CACHE_VERSION: 5
PIP_CACHE_VERSION: 4 PIP_CACHE_VERSION: 4
MYPY_CACHE_VERSION: 6 MYPY_CACHE_VERSION: 6
BLACK_CACHE_VERSION: 1
HA_SHORT_VERSION: "2023.12" HA_SHORT_VERSION: "2023.12"
DEFAULT_PYTHON: "3.11" DEFAULT_PYTHON: "3.11"
ALL_PYTHON_VERSIONS: "['3.11', '3.12']" ALL_PYTHON_VERSIONS: "['3.11', '3.12']"
@ -58,7 +57,6 @@ env:
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']" POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
PRE_COMMIT_CACHE: ~/.cache/pre-commit PRE_COMMIT_CACHE: ~/.cache/pre-commit
PIP_CACHE: /tmp/pip-cache PIP_CACHE: /tmp/pip-cache
BLACK_CACHE: /tmp/black-cache
SQLALCHEMY_WARN_20: 1 SQLALCHEMY_WARN_20: 1
PYTHONASYNCIODEBUG: 1 PYTHONASYNCIODEBUG: 1
HASS_CI: 1 HASS_CI: 1
@ -261,8 +259,8 @@ jobs:
. venv/bin/activate . venv/bin/activate
pre-commit install-hooks pre-commit install-hooks
lint-black: lint-ruff-format:
name: Check black name: Check ruff-format
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
needs: needs:
- info - info
@ -276,13 +274,6 @@ jobs:
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true check-latest: true
- name: Generate partial black restore key
id: generate-black-key
run: |
black_version=$(cat requirements_test_pre_commit.txt | grep black | cut -d '=' -f 3)
echo "version=$black_version" >> $GITHUB_OUTPUT
echo "key=black-${{ env.BLACK_CACHE_VERSION }}-$black_version-${{
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment - name: Restore base Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache/restore@v3.3.2 uses: actions/cache/restore@v3.3.2
@ -301,33 +292,17 @@ jobs:
key: >- key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{ ${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }} needs.info.outputs.pre-commit_cache_key }}
- name: Restore black cache - name: Run ruff-format (fully)
uses: actions/cache@v3.3.2
with:
path: ${{ env.BLACK_CACHE }}
key: >-
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
steps.generate-black-key.outputs.key }}
restore-keys: |
${{ runner.os }}-${{ steps.python.outputs.python-version }}-black-${{
env.BLACK_CACHE_VERSION }}-${{ steps.generate-black-key.outputs.version }}-${{
env.HA_SHORT_VERSION }}-
- name: Run black (fully)
if: needs.info.outputs.test_full_suite == 'true'
env:
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
run: | run: |
. venv/bin/activate . venv/bin/activate
pre-commit run --hook-stage manual black --all-files --show-diff-on-failure pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
- name: Run black (partially) - name: Run ruff-format (partially)
if: needs.info.outputs.test_full_suite == 'false' if: needs.info.outputs.test_full_suite == 'false'
shell: bash shell: bash
env:
BLACK_CACHE_DIR: ${{ env.BLACK_CACHE }}
run: | run: |
. venv/bin/activate . venv/bin/activate
shopt -s globstar shopt -s globstar
pre-commit run --hook-stage manual black --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure pre-commit run --hook-stage manual ruff-format --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*} --show-diff-on-failure
lint-ruff: lint-ruff:
name: Check ruff name: Check ruff

View File

@ -1,16 +1,11 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.1 rev: v0.1.6
hooks: hooks:
- id: ruff - id: ruff
args: args:
- --fix - --fix
- repo: https://github.com/psf/black-pre-commit-mirror - id: ruff-format
rev: 23.11.0
hooks:
- id: black
args:
- --quiet
files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$ files: ^((homeassistant|pylint|script|tests)/.+)?[^/]+\.py$
- repo: https://github.com/codespell-project/codespell - repo: https://github.com/codespell-project/codespell
rev: v2.2.2 rev: v2.2.2

View File

@ -1,3 +1,7 @@
{ {
"recommendations": ["esbenp.prettier-vscode", "ms-python.python"] "recommendations": [
"charliermarsh.ruff",
"esbenp.prettier-vscode",
"ms-python.python"
]
} }

View File

@ -5,8 +5,7 @@ SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# Uninstall pre-installed formatting and linting tools # Uninstall pre-installed formatting and linting tools
# They would conflict with our pinned versions # They would conflict with our pinned versions
RUN \ RUN \
pipx uninstall black \ pipx uninstall pydocstyle \
&& pipx uninstall pydocstyle \
&& pipx uninstall pycodestyle \ && pipx uninstall pycodestyle \
&& pipx uninstall mypy \ && pipx uninstall mypy \
&& pipx uninstall pylint && pipx uninstall pylint

View File

@ -5,9 +5,7 @@ from collections.abc import Mapping
ValueType = ( ValueType = (
# Example: entities.all = { read: true, control: true } # Example: entities.all = { read: true, control: true }
Mapping[str, bool] Mapping[str, bool] | bool | None
| bool
| None
) )
# Example: entities.domains = { light: … } # Example: entities.domains = { light: … }

View File

@ -1315,9 +1315,9 @@ class PipelineInput:
if stt_audio_buffer: if stt_audio_buffer:
# Send audio in the buffer first to speech-to-text, then move on to stt_stream. # Send audio in the buffer first to speech-to-text, then move on to stt_stream.
# This is basically an async itertools.chain. # This is basically an async itertools.chain.
async def buffer_then_audio_stream() -> AsyncGenerator[ async def buffer_then_audio_stream() -> (
ProcessedAudioChunk, None AsyncGenerator[ProcessedAudioChunk, None]
]: ):
# Buffered audio # Buffered audio
for chunk in stt_audio_buffer: for chunk in stt_audio_buffer:
yield chunk yield chunk

View File

@ -417,8 +417,7 @@ async def websocket_device_capture(
# single sample (16 bits) per queue item. # single sample (16 bits) per queue item.
max_queue_items = ( max_queue_items = (
# +1 for None to signal end # +1 for None to signal end
int(math.ceil(timeout_seconds * CAPTURE_RATE)) int(math.ceil(timeout_seconds * CAPTURE_RATE)) + 1
+ 1
) )
audio_queue = DeviceAudioQueue(queue=asyncio.Queue(maxsize=max_queue_items)) audio_queue = DeviceAudioQueue(queue=asyncio.Queue(maxsize=max_queue_items))

View File

@ -44,7 +44,8 @@ SELECT_TYPES: dict[str, BMWSelectEntityDescription] = {
translation_key="ac_limit", translation_key="ac_limit",
is_available=lambda v: v.is_remote_set_ac_limit_enabled, is_available=lambda v: v.is_remote_set_ac_limit_enabled,
dynamic_options=lambda v: [ dynamic_options=lambda v: [
str(lim) for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr] str(lim)
for lim in v.charging_profile.ac_available_limits # type: ignore[union-attr]
], ],
current_option=lambda v: str(v.charging_profile.ac_current_limit), # type: ignore[union-attr] current_option=lambda v: str(v.charging_profile.ac_current_limit), # type: ignore[union-attr]
remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update( remote_service=lambda v, o: v.remote_services.trigger_charging_settings_update(

View File

@ -140,7 +140,7 @@ def _ws_handle_cloud_errors(
handler: Callable[ handler: Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
Coroutine[None, None, None], Coroutine[None, None, None],
] ],
) -> Callable[ ) -> Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
Coroutine[None, None, None], Coroutine[None, None, None],
@ -362,8 +362,11 @@ def _require_cloud_login(
handler: Callable[ handler: Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], [HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
None, None,
] ],
) -> Callable[[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]], None,]: ) -> Callable[
[HomeAssistant, websocket_api.ActiveConnection, dict[str, Any]],
None,
]:
"""Websocket decorator that requires cloud to be logged in.""" """Websocket decorator that requires cloud to be logged in."""
@wraps(handler) @wraps(handler)

View File

@ -129,9 +129,8 @@ class DeconzDevice(DeconzBase[_DeviceT], Entity):
if self.gateway.ignore_state_updates: if self.gateway.ignore_state_updates:
return return
if ( if self._update_keys is not None and not self._device.changed_keys.intersection(
self._update_keys is not None self._update_keys
and not self._device.changed_keys.intersection(self._update_keys)
): ):
return return

View File

@ -63,7 +63,8 @@ async def async_setup_entry( # noqa: C901
) )
await device.async_connect(session_instance=async_client) await device.async_connect(session_instance=async_client)
device.password = entry.data.get( device.password = entry.data.get(
CONF_PASSWORD, "" # This key was added in HA Core 2022.6 CONF_PASSWORD,
"", # This key was added in HA Core 2022.6
) )
except DeviceNotFound as err: except DeviceNotFound as err:
raise ConfigEntryNotReady( raise ConfigEntryNotReady(

View File

@ -453,10 +453,9 @@ class DlnaDmrEntity(MediaPlayerEntity):
for state_variable in state_variables: for state_variable in state_variables:
# Force a state refresh when player begins or pauses playback # Force a state refresh when player begins or pauses playback
# to update the position info. # to update the position info.
if ( if state_variable.name == "TransportState" and state_variable.value in (
state_variable.name == "TransportState" TransportState.PLAYING,
and state_variable.value TransportState.PAUSED_PLAYBACK,
in (TransportState.PLAYING, TransportState.PAUSED_PLAYBACK)
): ):
force_refresh = True force_refresh = True

View File

@ -441,9 +441,7 @@ async def async_setup_entry(
description, description,
entry, entry,
telegram, telegram,
*device_class_and_uom( *device_class_and_uom(telegram, description), # type: ignore[arg-type]
telegram, description
), # type: ignore[arg-type]
) )
for description in all_sensors for description in all_sensors
if ( if (

View File

@ -18,13 +18,11 @@ from .const import DOMAIN
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
_COMMAND_BY_MOTION_STATUS = ( _COMMAND_BY_MOTION_STATUS = { # Maps the stop command to use for every cover motion status
{ # Maps the stop command to use for every cover motion status CoverStatus.DOWN: CoverCommand.DOWN,
CoverStatus.DOWN: CoverCommand.DOWN, CoverStatus.UP: CoverCommand.UP,
CoverStatus.UP: CoverCommand.UP, CoverStatus.IDLE: None,
CoverStatus.IDLE: None, }
}
)
async def async_setup_entry( async def async_setup_entry(

View File

@ -14,9 +14,7 @@ class EsphomeEnumMapper(Generic[_EnumT, _ValT]):
def __init__(self, mapping: dict[_EnumT, _ValT]) -> None: def __init__(self, mapping: dict[_EnumT, _ValT]) -> None:
"""Construct a EsphomeEnumMapper.""" """Construct a EsphomeEnumMapper."""
# Add none mapping # Add none mapping
augmented_mapping: dict[ augmented_mapping: dict[_EnumT | None, _ValT | None] = mapping # type: ignore[assignment]
_EnumT | None, _ValT | None
] = mapping # type: ignore[assignment]
augmented_mapping[None] = None augmented_mapping[None] = None
self._mapping = augmented_mapping self._mapping = augmented_mapping

View File

@ -117,7 +117,8 @@ class EsphomeFan(EsphomeEntity[FanInfo, FanState], FanEntity):
"""Return the current speed percentage.""" """Return the current speed percentage."""
if not self._supports_speed_levels: if not self._supports_speed_levels:
return ordered_list_item_to_percentage( return ordered_list_item_to_percentage(
ORDERED_NAMED_FAN_SPEEDS, self._state.speed # type: ignore[misc] ORDERED_NAMED_FAN_SPEEDS,
self._state.speed, # type: ignore[misc]
) )
return ranged_value_to_percentage( return ranged_value_to_percentage(

View File

@ -124,10 +124,13 @@ def convert_dict(dictionary: dict[str, Any]) -> dict[str, Any]:
def convert_key(key: str) -> str: def convert_key(key: str) -> str:
"""Convert a string to snake_case.""" """Convert a string to snake_case."""
string = re.sub(r"[\-\.\s]", "_", str(key)) string = re.sub(r"[\-\.\s]", "_", str(key))
return (string[0]).lower() + re.sub( return (
r"[A-Z]", (string[0]).lower()
lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe] + re.sub(
string[1:], r"[A-Z]",
lambda matched: f"_{matched.group(0).lower()}", # type:ignore[str-bytes-safe]
string[1:],
)
) )
return { return {

View File

@ -79,12 +79,12 @@ _ICONS: dict[SensorKind, str] = {
class GoodweSensorEntityDescription(SensorEntityDescription): class GoodweSensorEntityDescription(SensorEntityDescription):
"""Class describing Goodwe sensor entities.""" """Class describing Goodwe sensor entities."""
value: Callable[ value: Callable[[GoodweUpdateCoordinator, str], Any] = (
[GoodweUpdateCoordinator, str], Any lambda coordinator, sensor: coordinator.sensor_value(sensor)
] = lambda coordinator, sensor: coordinator.sensor_value(sensor) )
available: Callable[ available: Callable[[GoodweUpdateCoordinator], bool] = (
[GoodweUpdateCoordinator], bool lambda coordinator: coordinator.last_update_success
] = lambda coordinator: coordinator.last_update_success )
_DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = { _DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = {

View File

@ -59,7 +59,11 @@ LOCAL_SDK_MIN_VERSION = AwesomeVersion("2.1.5")
@callback @callback
def _get_registry_entries( def _get_registry_entries(
hass: HomeAssistant, entity_id: str hass: HomeAssistant, entity_id: str
) -> tuple[er.RegistryEntry | None, dr.DeviceEntry | None, ar.AreaEntry | None,]: ) -> tuple[
er.RegistryEntry | None,
dr.DeviceEntry | None,
ar.AreaEntry | None,
]:
"""Get registry entries.""" """Get registry entries."""
ent_reg = er.async_get(hass) ent_reg = er.async_get(hass)
dev_reg = dr.async_get(hass) dev_reg = dr.async_get(hass)

View File

@ -93,7 +93,8 @@ class GoogleTaskTodoListEntity(
summary=item["title"], summary=item["title"],
uid=item["id"], uid=item["id"],
status=TODO_STATUS_MAP.get( status=TODO_STATUS_MAP.get(
item.get("status"), TodoItemStatus.NEEDS_ACTION # type: ignore[arg-type] item.get("status"), # type: ignore[arg-type]
TodoItemStatus.NEEDS_ACTION,
), ),
) )
for item in _order_tasks(self.coordinator.data) for item in _order_tasks(self.coordinator.data)

View File

@ -195,9 +195,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901
loop = ( loop = (
# Create own thread if more than 1 CPU # Create own thread if more than 1 CPU
hass.loop hass.loop if multiprocessing.cpu_count() < 2 else None
if multiprocessing.cpu_count() < 2
else None
) )
host = base_config[DOMAIN].get(CONF_HOST) host = base_config[DOMAIN].get(CONF_HOST)
display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME) display_name = base_config[DOMAIN].get(CONF_DISPLAY_NAME, DEFAULT_DISPLAY_NAME)

View File

@ -124,12 +124,15 @@ class Fan(HomeAccessory):
), ),
) )
setter_callback = (
lambda value, preset_mode=preset_mode: self.set_preset_mode(
value, preset_mode
)
)
self.preset_mode_chars[preset_mode] = preset_serv.configure_char( self.preset_mode_chars[preset_mode] = preset_serv.configure_char(
CHAR_ON, CHAR_ON,
value=False, value=False,
setter_callback=lambda value, preset_mode=preset_mode: self.set_preset_mode( setter_callback=setter_callback,
value, preset_mode
),
) )
if CHAR_SWING_MODE in self.chars: if CHAR_SWING_MODE in self.chars:

View File

@ -116,5 +116,6 @@ class PowerViewSelect(ShadeEntity, SelectEntity):
async def async_select_option(self, option: str) -> None: async def async_select_option(self, option: str) -> None:
"""Change the selected option.""" """Change the selected option."""
await self.entity_description.select_fn(self._shade, option) await self.entity_description.select_fn(self._shade, option)
await self._shade.refresh() # force update data to ensure new info is in coordinator # force update data to ensure new info is in coordinator
await self._shade.refresh()
self.async_write_ha_state() self.async_write_ha_state()

View File

@ -66,8 +66,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
coordinator: ImapPushDataUpdateCoordinator | ImapPollingDataUpdateCoordinator = hass.data[ coordinator: ImapPushDataUpdateCoordinator | ImapPollingDataUpdateCoordinator = hass.data[
DOMAIN DOMAIN
].pop( ].pop(entry.entry_id)
entry.entry_id
)
await coordinator.shutdown() await coordinator.shutdown()
return unload_ok return unload_ok

View File

@ -259,7 +259,8 @@ class KrakenSensor(
return return
try: try:
self._attr_native_value = self.entity_description.value_fn( self._attr_native_value = self.entity_description.value_fn(
self.coordinator, self.tracked_asset_pair_wsname # type: ignore[arg-type] self.coordinator, # type: ignore[arg-type]
self.tracked_asset_pair_wsname,
) )
self._received_data_at_least_once = True self._received_data_at_least_once = True
except KeyError: except KeyError:

View File

@ -316,7 +316,9 @@ class HeatMeterSensor(
"""Set up the sensor with the initial values.""" """Set up the sensor with the initial values."""
super().__init__(coordinator) super().__init__(coordinator)
self.key = description.key self.key = description.key
self._attr_unique_id = f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr] self._attr_unique_id = (
f"{coordinator.config_entry.data['device_number']}_{description.key}" # type: ignore[union-attr]
)
self._attr_name = f"Heat Meter {description.name}" self._attr_name = f"Heat Meter {description.name}"
self.entity_description = description self.entity_description = description
self._attr_device_info = device self._attr_device_info = device

View File

@ -118,7 +118,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
push_coordinator = LookinPushCoordinator(entry.title) push_coordinator = LookinPushCoordinator(entry.title)
if lookin_device.model >= 2: if lookin_device.model >= 2:
meteo_coordinator = LookinDataUpdateCoordinator[MeteoSensor]( coordinator_class = LookinDataUpdateCoordinator[MeteoSensor]
meteo_coordinator = coordinator_class(
hass, hass,
push_coordinator, push_coordinator,
name=entry.title, name=entry.title,

View File

@ -348,7 +348,10 @@ class MatrixBot:
self._access_tokens[self._mx_id] = token self._access_tokens[self._mx_id] = token
await self.hass.async_add_executor_job( await self.hass.async_add_executor_job(
save_json, self._session_filepath, self._access_tokens, True # private=True save_json,
self._session_filepath,
self._access_tokens,
True, # private=True
) )
async def _login(self) -> None: async def _login(self) -> None:

View File

@ -104,9 +104,11 @@ class MatterEventEntity(MatterEntity, EventEntity):
"""Call when Node attribute(s) changed.""" """Call when Node attribute(s) changed."""
@callback @callback
def _on_matter_node_event( def _on_matter_node_event( # noqa: F821
self, event: EventType, data: MatterNodeEvent self,
) -> None: # noqa: F821 event: EventType,
data: MatterNodeEvent,
) -> None:
"""Call on NodeEvent.""" """Call on NodeEvent."""
if data.endpoint_id != self._endpoint.endpoint_id: if data.endpoint_id != self._endpoint.endpoint_id:
return return

View File

@ -1137,8 +1137,7 @@ class MediaPlayerImageView(HomeAssistantView):
extra_urls = [ extra_urls = [
# Need to modify the default regex for media_content_id as it may # Need to modify the default regex for media_content_id as it may
# include arbitrary characters including '/','{', or '}' # include arbitrary characters including '/','{', or '}'
url url + "/browse_media/{media_content_type}/{media_content_id:.+}",
+ "/browse_media/{media_content_type}/{media_content_id:.+}",
] ]
def __init__(self, component: EntityComponent[MediaPlayerEntity]) -> None: def __init__(self, component: EntityComponent[MediaPlayerEntity]) -> None:

View File

@ -470,9 +470,10 @@ class MqttTemperatureControlEntity(MqttEntity, ABC):
except ValueError: except ValueError:
_LOGGER.error("Could not parse %s from %s", template_name, payload) _LOGGER.error("Could not parse %s from %s", template_name, payload)
def prepare_subscribe_topics( def prepare_subscribe_topics( # noqa: C901
self, topics: dict[str, dict[str, Any]] self,
) -> None: # noqa: C901 topics: dict[str, dict[str, Any]],
) -> None:
"""(Re)Subscribe to topics.""" """(Re)Subscribe to topics."""
@callback @callback

View File

@ -63,9 +63,8 @@ async def async_wait_for_mqtt_client(hass: HomeAssistant) -> bool:
state_reached_future: asyncio.Future[bool] state_reached_future: asyncio.Future[bool]
if DATA_MQTT_AVAILABLE not in hass.data: if DATA_MQTT_AVAILABLE not in hass.data:
hass.data[ state_reached_future = hass.loop.create_future()
DATA_MQTT_AVAILABLE hass.data[DATA_MQTT_AVAILABLE] = state_reached_future
] = state_reached_future = hass.loop.create_future()
else: else:
state_reached_future = hass.data[DATA_MQTT_AVAILABLE] state_reached_future = hass.data[DATA_MQTT_AVAILABLE]
if state_reached_future.done(): if state_reached_future.done():

View File

@ -34,9 +34,9 @@ UNIT_OF_LOAD: Final[str] = "load"
class NextcloudSensorEntityDescription(SensorEntityDescription): class NextcloudSensorEntityDescription(SensorEntityDescription):
"""Describes Nextcloud sensor entity.""" """Describes Nextcloud sensor entity."""
value_fn: Callable[ value_fn: Callable[[str | int | float], str | int | float | datetime] = (
[str | int | float], str | int | float | datetime lambda value: value
] = lambda value: value )
SENSORS: Final[list[NextcloudSensorEntityDescription]] = [ SENSORS: Final[list[NextcloudSensorEntityDescription]] = [

View File

@ -32,8 +32,7 @@ class ONVIFBaseEntity(Entity):
See: https://github.com/home-assistant/core/issues/35883 See: https://github.com/home-assistant/core/issues/35883
""" """
return ( return (
self.device.info.mac self.device.info.mac or self.device.info.serial_number # type:ignore[return-value]
or self.device.info.serial_number # type:ignore[return-value]
) )
@property @property

View File

@ -245,12 +245,13 @@ class HitachiAirToAirHeatPumpHLRRWIFI(OverkizEntity, ClimateEntity):
MODE_CHANGE_STATE, MODE_CHANGE_STATE,
OverkizCommandParam.AUTO, OverkizCommandParam.AUTO,
).lower() # Overkiz can return states that have uppercase characters which are not accepted back as commands ).lower() # Overkiz can return states that have uppercase characters which are not accepted back as commands
if hvac_mode.replace( if (
" ", "" hvac_mode.replace(" ", "")
) in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto' in [ # Overkiz can return states like 'auto cooling' or 'autoHeating' that are not valid commands and need to be converted to 'auto'
OverkizCommandParam.AUTOCOOLING, OverkizCommandParam.AUTOCOOLING,
OverkizCommandParam.AUTOHEATING, OverkizCommandParam.AUTOHEATING,
]: ]
):
hvac_mode = OverkizCommandParam.AUTO hvac_mode = OverkizCommandParam.AUTO
swing_mode = self._control_backfill( swing_mode = self._control_backfill(

View File

@ -83,13 +83,17 @@ SENSOR_DESCRIPTIONS = (
native_unit_of_measurement=UnitOfTime.SECONDS, native_unit_of_measurement=UnitOfTime.SECONDS,
entity_registry_enabled_default=False, entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,
value_fn=lambda hass, service_info: bluetooth.async_get_learned_advertising_interval( value_fn=(
hass, service_info.address lambda hass, service_info: (
) bluetooth.async_get_learned_advertising_interval(
or bluetooth.async_get_fallback_availability_interval( hass, service_info.address
hass, service_info.address )
) or bluetooth.async_get_fallback_availability_interval(
or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS, hass, service_info.address
)
or bluetooth.FALLBACK_MAXIMUM_STALE_ADVERTISEMENT_SECONDS
)
),
suggested_display_precision=1, suggested_display_precision=1,
), ),
) )

View File

@ -101,9 +101,8 @@ def _validate_table_schema_has_correct_collation(
collate = ( collate = (
dialect_kwargs.get("mysql_collate") dialect_kwargs.get("mysql_collate")
or dialect_kwargs.get( or dialect_kwargs.get("mariadb_collate")
"mariadb_collate" # pylint: disable-next=protected-access
) # pylint: disable-next=protected-access
or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined] or connection.dialect._fetch_setting(connection, "collation_server") # type: ignore[attr-defined]
) )
if collate and collate != "utf8mb4_unicode_ci": if collate and collate != "utf8mb4_unicode_ci":

View File

@ -176,13 +176,17 @@ class NativeLargeBinary(LargeBinary):
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32 # For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
# for sqlite and postgresql we use a bigint # for sqlite and postgresql we use a bigint
UINT_32_TYPE = BigInteger().with_variant( UINT_32_TYPE = BigInteger().with_variant(
mysql.INTEGER(unsigned=True), "mysql", "mariadb" # type: ignore[no-untyped-call] mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
"mysql",
"mariadb",
) )
JSON_VARIANT_CAST = Text().with_variant( JSON_VARIANT_CAST = Text().with_variant(
postgresql.JSON(none_as_null=True), "postgresql" # type: ignore[no-untyped-call] postgresql.JSON(none_as_null=True), # type: ignore[no-untyped-call]
"postgresql",
) )
JSONB_VARIANT_CAST = Text().with_variant( JSONB_VARIANT_CAST = Text().with_variant(
postgresql.JSONB(none_as_null=True), "postgresql" # type: ignore[no-untyped-call] postgresql.JSONB(none_as_null=True), # type: ignore[no-untyped-call]
"postgresql",
) )
DATETIME_TYPE = ( DATETIME_TYPE = (
DateTime(timezone=True) DateTime(timezone=True)

View File

@ -244,7 +244,8 @@ class Filters:
), ),
# Needs https://github.com/bdraco/home-assistant/commit/bba91945006a46f3a01870008eb048e4f9cbb1ef # Needs https://github.com/bdraco/home-assistant/commit/bba91945006a46f3a01870008eb048e4f9cbb1ef
self._generate_filter_for_columns( self._generate_filter_for_columns(
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder # type: ignore[arg-type] (ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), # type: ignore[arg-type]
_encoder,
).self_group(), ).self_group(),
) )

View File

@ -532,7 +532,9 @@ def _update_states_table_with_foreign_key_options(
states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints
old_states_table = Table( # noqa: F841 old_states_table = Table( # noqa: F841
TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters) # type: ignore[arg-type] TABLE_STATES,
MetaData(),
*(alter["old_fk"] for alter in alters), # type: ignore[arg-type]
) )
for alter in alters: for alter in alters:

View File

@ -89,9 +89,9 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)): async with asyncio.timeout(host.api.timeout * (RETRY_ATTEMPTS + 2)):
await host.renew() await host.renew()
async def async_check_firmware_update() -> str | Literal[ async def async_check_firmware_update() -> (
False str | Literal[False] | NewSoftwareVersion
] | NewSoftwareVersion: ):
"""Check for firmware updates.""" """Check for firmware updates."""
if not host.api.supported(None, "update"): if not host.api.supported(None, "update"):
return False return False

View File

@ -566,10 +566,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports) ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports)
list_of_ports = {} list_of_ports = {}
for port in ports: for port in ports:
list_of_ports[ list_of_ports[port.device] = (
port.device f"{port}, s/n: {port.serial_number or 'n/a'}"
] = f"{port}, s/n: {port.serial_number or 'n/a'}" + ( + (f" - {port.manufacturer}" if port.manufacturer else "")
f" - {port.manufacturer}" if port.manufacturer else ""
) )
list_of_ports[CONF_MANUAL_PATH] = CONF_MANUAL_PATH list_of_ports[CONF_MANUAL_PATH] = CONF_MANUAL_PATH

View File

@ -280,9 +280,9 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
async def _async_fallback_poll(self) -> None: async def _async_fallback_poll(self) -> None:
"""Retrieve latest state by polling.""" """Retrieve latest state by polling."""
await self.hass.data[DATA_SONOS].favorites[ await (
self.speaker.household_id self.hass.data[DATA_SONOS].favorites[self.speaker.household_id].async_poll()
].async_poll() )
await self.hass.async_add_executor_job(self._update) await self.hass.async_add_executor_job(self._update)
def _update(self) -> None: def _update(self) -> None:

View File

@ -78,7 +78,9 @@ class RecorderOutput(StreamOutput):
def write_segment(segment: Segment) -> None: def write_segment(segment: Segment) -> None:
"""Write a segment to output.""" """Write a segment to output."""
# fmt: off
nonlocal output, output_v, output_a, last_stream_id, running_duration, last_sequence nonlocal output, output_v, output_a, last_stream_id, running_duration, last_sequence
# fmt: on
# Because the stream_worker is in a different thread from the record service, # Because the stream_worker is in a different thread from the record service,
# the lookback segments may still have some overlap with the recorder segments # the lookback segments may still have some overlap with the recorder segments
if segment.sequence <= last_sequence: if segment.sequence <= last_sequence:

View File

@ -153,7 +153,9 @@ class SynoDSMCamera(SynologyDSMBaseEntity[SynologyDSMCameraUpdateCoordinator], C
if not self.available: if not self.available:
return None return None
try: try:
return await self._api.surveillance_station.get_camera_image(self.entity_description.key, self.snapshot_quality) # type: ignore[no-any-return] return await self._api.surveillance_station.get_camera_image( # type: ignore[no-any-return]
self.entity_description.key, self.snapshot_quality
)
except ( except (
SynologyDSMAPIErrorException, SynologyDSMAPIErrorException,
SynologyDSMRequestException, SynologyDSMRequestException,

View File

@ -57,7 +57,8 @@ from .template_entity import TemplateEntity, rewrite_common_legacy_to_modern_con
from .trigger_entity import TriggerEntity from .trigger_entity import TriggerEntity
CHECK_FORECAST_KEYS = ( CHECK_FORECAST_KEYS = (
set().union(Forecast.__annotations__.keys()) set()
.union(Forecast.__annotations__.keys())
# Manually add the forecast resulting attributes that only exists # Manually add the forecast resulting attributes that only exists
# as native_* in the Forecast definition # as native_* in the Forecast definition
.union(("apparent_temperature", "wind_gust_speed", "dew_point")) .union(("apparent_temperature", "wind_gust_speed", "dew_point"))

View File

@ -119,9 +119,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Handle Memo Text service call.""" """Handle Memo Text service call."""
memo_text = call.data[CONF_MEMO_TEXT] memo_text = call.data[CONF_MEMO_TEXT]
memo_text.hass = hass memo_text.hass = hass
await hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"].get_module( await (
call.data[CONF_ADDRESS] hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"]
).set_memo_text(memo_text.async_render()) .get_module(call.data[CONF_ADDRESS])
.set_memo_text(memo_text.async_render())
)
hass.services.async_register( hass.services.async_register(
DOMAIN, DOMAIN,

View File

@ -48,12 +48,12 @@ class VeSyncSensorEntityDescription(
): ):
"""Describe VeSync sensor entity.""" """Describe VeSync sensor entity."""
exists_fn: Callable[ exists_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool] = (
[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], bool lambda _: True
] = lambda _: True )
update_fn: Callable[ update_fn: Callable[[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None] = (
[VeSyncAirBypass | VeSyncOutlet | VeSyncSwitch], None lambda _: None
] = lambda _: None )
def update_energy(device): def update_energy(device):

View File

@ -28,9 +28,9 @@ NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"]
class VodafoneStationBaseEntityDescription: class VodafoneStationBaseEntityDescription:
"""Vodafone Station entity base description.""" """Vodafone Station entity base description."""
value: Callable[ value: Callable[[Any, Any], Any] = (
[Any, Any], Any lambda coordinator, key: coordinator.data.sensors[key]
] = lambda coordinator, key: coordinator.data.sensors[key] )
is_suitable: Callable[[dict], bool] = lambda val: True is_suitable: Callable[[dict], bool] = lambda val: True

View File

@ -111,11 +111,13 @@ class HassVoipDatagramProtocol(VoipDatagramProtocol):
valid_protocol_factory=lambda call_info, rtcp_state: make_protocol( valid_protocol_factory=lambda call_info, rtcp_state: make_protocol(
hass, devices, call_info, rtcp_state hass, devices, call_info, rtcp_state
), ),
invalid_protocol_factory=lambda call_info, rtcp_state: PreRecordMessageProtocol( invalid_protocol_factory=(
hass, lambda call_info, rtcp_state: PreRecordMessageProtocol(
"not_configured.pcm", hass,
opus_payload_type=call_info.opus_payload_type, "not_configured.pcm",
rtcp_state=rtcp_state, opus_payload_type=call_info.opus_payload_type,
rtcp_state=rtcp_state,
)
), ),
) )
self.hass = hass self.hass = hass

View File

@ -95,9 +95,7 @@ class MusicCastFlowHandler(ConfigFlow, domain=DOMAIN):
self.upnp_description = discovery_info.ssdp_location self.upnp_description = discovery_info.ssdp_location
# ssdp_location and hostname have been checked in check_yamaha_ssdp so it is safe to ignore type assignment # ssdp_location and hostname have been checked in check_yamaha_ssdp so it is safe to ignore type assignment
self.host = urlparse( self.host = urlparse(discovery_info.ssdp_location).hostname # type: ignore[assignment]
discovery_info.ssdp_location
).hostname # type: ignore[assignment]
await self.async_set_unique_id(self.serial_number) await self.async_set_unique_id(self.serial_number)
self._abort_if_unique_id_configured( self._abort_if_unique_id_configured(

View File

@ -276,9 +276,7 @@ async def async_setup_entry(
if state_key == "0": if state_key == "0":
continue continue
notification_description: NotificationZWaveJSEntityDescription | None = ( notification_description: NotificationZWaveJSEntityDescription | None = None
None
)
for description in NOTIFICATION_SENSOR_MAPPINGS: for description in NOTIFICATION_SENSOR_MAPPINGS:
if ( if (

View File

@ -344,7 +344,8 @@ class ZWaveNodeFirmwareUpdate(UpdateEntity):
is not None is not None
and (extra_data := await self.async_get_last_extra_data()) and (extra_data := await self.async_get_last_extra_data())
and ( and (
latest_version_firmware := ZWaveNodeFirmwareUpdateExtraStoredData.from_dict( latest_version_firmware
:= ZWaveNodeFirmwareUpdateExtraStoredData.from_dict(
extra_data.as_dict() extra_data.as_dict()
).latest_version_firmware ).latest_version_firmware
) )

View File

@ -251,7 +251,9 @@ def async_track_state_change(
return async_track_state_change_event(hass, entity_ids, state_change_listener) return async_track_state_change_event(hass, entity_ids, state_change_listener)
return hass.bus.async_listen( return hass.bus.async_listen(
EVENT_STATE_CHANGED, state_change_dispatcher, event_filter=state_change_filter # type: ignore[arg-type] EVENT_STATE_CHANGED,
state_change_dispatcher, # type: ignore[arg-type]
event_filter=state_change_filter, # type: ignore[arg-type]
) )
@ -761,7 +763,8 @@ class _TrackStateChangeFiltered:
@callback @callback
def _setup_all_listener(self) -> None: def _setup_all_listener(self) -> None:
self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen( self._listeners[_ALL_LISTENER] = self.hass.bus.async_listen(
EVENT_STATE_CHANGED, self._action # type: ignore[arg-type] EVENT_STATE_CHANGED,
self._action, # type: ignore[arg-type]
) )
@ -1335,7 +1338,8 @@ def async_track_same_state(
if entity_ids == MATCH_ALL: if entity_ids == MATCH_ALL:
async_remove_state_for_cancel = hass.bus.async_listen( async_remove_state_for_cancel = hass.bus.async_listen(
EVENT_STATE_CHANGED, state_for_cancel_listener # type: ignore[arg-type] EVENT_STATE_CHANGED,
state_for_cancel_listener, # type: ignore[arg-type]
) )
else: else:
async_remove_state_for_cancel = async_track_state_change_event( async_remove_state_for_cancel = async_track_state_change_event(

View File

@ -190,7 +190,8 @@ class RestoreStateData:
state, self.entities[state.entity_id].extra_restore_state_data, now state, self.entities[state.entity_id].extra_restore_state_data, now
) )
for state in all_states for state in all_states
if state.entity_id in self.entities and if state.entity_id in self.entities
and
# Ignore all states that are entity registry placeholders # Ignore all states that are entity registry placeholders
not state.attributes.get(ATTR_RESTORED) not state.attributes.get(ATTR_RESTORED)
] ]

View File

@ -99,8 +99,7 @@ class DataUpdateCoordinator(BaseDataUpdateCoordinatorProtocol, Generic[_DataT]):
# Pick a random microsecond in range 0.05..0.50 to stagger the refreshes # Pick a random microsecond in range 0.05..0.50 to stagger the refreshes
# and avoid a thundering herd. # and avoid a thundering herd.
self._microsecond = ( self._microsecond = (
randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) randint(event.RANDOM_MICROSECOND_MIN, event.RANDOM_MICROSECOND_MAX) / 10**6
/ 10**6
) )
self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {} self._listeners: dict[CALLBACK_TYPE, tuple[CALLBACK_TYPE, object | None]] = {}

View File

@ -403,9 +403,7 @@ async def async_get_zeroconf(
hass: HomeAssistant, hass: HomeAssistant,
) -> dict[str, list[dict[str, str | dict[str, str]]]]: ) -> dict[str, list[dict[str, str | dict[str, str]]]]:
"""Return cached list of zeroconf types.""" """Return cached list of zeroconf types."""
zeroconf: dict[ zeroconf: dict[str, list[dict[str, str | dict[str, str]]]] = ZEROCONF.copy() # type: ignore[assignment]
str, list[dict[str, str | dict[str, str]]]
] = ZEROCONF.copy() # type: ignore[assignment]
integrations = await async_get_custom_components(hass) integrations = await async_get_custom_components(hass)
for integration in integrations.values(): for integration in integrations.values():
@ -1013,9 +1011,7 @@ def _load_file(
Async friendly. Async friendly.
""" """
with suppress(KeyError): with suppress(KeyError):
return hass.data[DATA_COMPONENTS][ # type: ignore[no-any-return] return hass.data[DATA_COMPONENTS][comp_or_platform] # type: ignore[no-any-return]
comp_or_platform
]
cache = hass.data[DATA_COMPONENTS] cache = hass.data[DATA_COMPONENTS]

View File

@ -57,7 +57,8 @@ def json_loads_object(__obj: bytes | bytearray | memoryview | str) -> JsonObject
def load_json( def load_json(
filename: str | PathLike, default: JsonValueType = _SENTINEL # type: ignore[assignment] filename: str | PathLike,
default: JsonValueType = _SENTINEL, # type: ignore[assignment]
) -> JsonValueType: ) -> JsonValueType:
"""Load JSON data from a file. """Load JSON data from a file.
@ -79,7 +80,8 @@ def load_json(
def load_json_array( def load_json_array(
filename: str | PathLike, default: JsonArrayType = _SENTINEL # type: ignore[assignment] filename: str | PathLike,
default: JsonArrayType = _SENTINEL, # type: ignore[assignment]
) -> JsonArrayType: ) -> JsonArrayType:
"""Load JSON data from a file and return as list. """Load JSON data from a file and return as list.
@ -98,7 +100,8 @@ def load_json_array(
def load_json_object( def load_json_object(
filename: str | PathLike, default: JsonObjectType = _SENTINEL # type: ignore[assignment] filename: str | PathLike,
default: JsonObjectType = _SENTINEL, # type: ignore[assignment]
) -> JsonObjectType: ) -> JsonObjectType:
"""Load JSON data from a file and return as dict. """Load JSON data from a file and return as dict.

View File

@ -129,6 +129,7 @@ def vincenty(
uSq = cosSqAlpha * (AXIS_A**2 - AXIS_B**2) / (AXIS_B**2) uSq = cosSqAlpha * (AXIS_A**2 - AXIS_B**2) / (AXIS_B**2)
A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq))) A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq)))
B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq))) B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq)))
# fmt: off
deltaSigma = ( deltaSigma = (
B B
* sinSigma * sinSigma
@ -141,11 +142,12 @@ def vincenty(
- B - B
/ 6 / 6
* cos2SigmaM * cos2SigmaM
* (-3 + 4 * sinSigma**2) * (-3 + 4 * sinSigma ** 2)
* (-3 + 4 * cos2SigmaM**2) * (-3 + 4 * cos2SigmaM ** 2)
) )
) )
) )
# fmt: on
s = AXIS_B * A * (sigma - deltaSigma) s = AXIS_B * A * (sigma - deltaSigma)
s /= 1000 # Conversion of meters to kilometers s /= 1000 # Conversion of meters to kilometers

View File

@ -340,7 +340,12 @@ def _handle_mapping_tag(
raise yaml.MarkedYAMLError( raise yaml.MarkedYAMLError(
context=f'invalid key: "{key}"', context=f'invalid key: "{key}"',
context_mark=yaml.Mark( context_mark=yaml.Mark(
fname, 0, line, -1, None, None # type: ignore[arg-type] fname,
0,
line,
-1,
None,
None, # type: ignore[arg-type]
), ),
) from exc ) from exc

View File

@ -79,9 +79,6 @@ include-package-data = true
[tool.setuptools.packages.find] [tool.setuptools.packages.find]
include = ["homeassistant*"] include = ["homeassistant*"]
[tool.black]
extend-exclude = "/generated/"
[tool.pylint.MAIN] [tool.pylint.MAIN]
py-version = "3.11" py-version = "3.11"
ignore = [ ignore = [

View File

@ -1,6 +1,5 @@
# Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit
black==23.11.0
codespell==2.2.2 codespell==2.2.2
ruff==0.1.1 ruff==0.1.6
yamllint==1.32.0 yamllint==1.32.0

View File

@ -1,10 +1,10 @@
#!/bin/sh #!/bin/sh
# Format code with black. # Format code with ruff-format.
cd "$(dirname "$0")/.." cd "$(dirname "$0")/.."
black \ ruff \
format \
--check \ --check \
--fast \
--quiet \ --quiet \
homeassistant tests script *.py homeassistant tests script *.py

View File

@ -192,6 +192,7 @@ IGNORE_PRE_COMMIT_HOOK_ID = (
"no-commit-to-branch", "no-commit-to-branch",
"prettier", "prettier",
"python-typing-update", "python-typing-update",
"ruff-format", # it's just ruff
) )
PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$") PACKAGE_REGEX = re.compile(r"^(?:--.+\s)?([-_\.\w\d]+).*==.+$")
@ -394,7 +395,8 @@ def requirements_test_all_output(reqs: dict[str, list[str]]) -> str:
for requirement, modules in reqs.items() for requirement, modules in reqs.items()
if any( if any(
# Always install requirements that are not part of integrations # Always install requirements that are not part of integrations
not mdl.startswith("homeassistant.components.") or not mdl.startswith("homeassistant.components.")
or
# Install tests for integrations that have tests # Install tests for integrations that have tests
has_tests(mdl) has_tests(mdl)
for mdl in modules for mdl in modules

View File

@ -2,11 +2,10 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Collection, Iterable, Mapping from collections.abc import Collection, Iterable, Mapping
import shutil
import subprocess
from typing import Any from typing import Any
import black
from black.mode import Mode
DEFAULT_GENERATOR = "script.hassfest" DEFAULT_GENERATOR = "script.hassfest"
@ -72,7 +71,14 @@ To update, run python3 -m {generator}
{content} {content}
""" """
return black.format_str(content.strip(), mode=Mode()) ruff = shutil.which("ruff")
if not ruff:
raise RuntimeError("ruff not found")
return subprocess.check_output(
[ruff, "format", "-"],
input=content.strip(),
encoding="utf-8",
)
def format_python_namespace( def format_python_namespace(

View File

@ -267,7 +267,7 @@ async def async_test_home_assistant(event_loop, load_registries=True):
"homeassistant.helpers.restore_state.RestoreStateData.async_setup_dump", "homeassistant.helpers.restore_state.RestoreStateData.async_setup_dump",
return_value=None, return_value=None,
), patch( ), patch(
"homeassistant.helpers.restore_state.start.async_at_start" "homeassistant.helpers.restore_state.start.async_at_start",
): ):
await asyncio.gather( await asyncio.gather(
ar.async_load(hass), ar.async_load(hass),

View File

@ -78,9 +78,7 @@ async def setup_airvisual_pro_fixture(hass, config, pro):
"homeassistant.components.airvisual_pro.config_flow.NodeSamba", return_value=pro "homeassistant.components.airvisual_pro.config_flow.NodeSamba", return_value=pro
), patch( ), patch(
"homeassistant.components.airvisual_pro.NodeSamba", return_value=pro "homeassistant.components.airvisual_pro.NodeSamba", return_value=pro
), patch( ), patch("homeassistant.components.airvisual.PLATFORMS", []):
"homeassistant.components.airvisual.PLATFORMS", []
):
assert await async_setup_component(hass, DOMAIN, config) assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done() await hass.async_block_till_done()
yield yield

View File

@ -180,9 +180,11 @@ async def test_send_base_with_supervisor(
"homeassistant.components.hassio.is_hassio", "homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True), side_effect=Mock(return_value=True),
), patch( ), patch(
"uuid.UUID.hex", new_callable=PropertyMock "uuid.UUID.hex",
new_callable=PropertyMock,
) as hex, patch( ) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION "homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
): ):
hex.return_value = MOCK_UUID hex.return_value = MOCK_UUID
await analytics.load() await analytics.load()
@ -289,7 +291,8 @@ async def test_send_usage_with_supervisor(
"homeassistant.components.hassio.is_hassio", "homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True), side_effect=Mock(return_value=True),
), patch( ), patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION "homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
): ):
await analytics.send_analytics() await analytics.send_analytics()
assert ( assert (
@ -492,7 +495,8 @@ async def test_send_statistics_with_supervisor(
"homeassistant.components.hassio.is_hassio", "homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True), side_effect=Mock(return_value=True),
), patch( ), patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION "homeassistant.components.analytics.analytics.HA_VERSION",
MOCK_VERSION,
): ):
await analytics.send_analytics() await analytics.send_analytics()
assert "'addon_count': 1" in caplog.text assert "'addon_count': 1" in caplog.text

View File

@ -51,7 +51,7 @@ async def async_init_integration(
) as update_patch, patch( ) as update_patch, patch(
"homeassistant.components.anova.AnovaApi.authenticate" "homeassistant.components.anova.AnovaApi.authenticate"
), patch( ), patch(
"homeassistant.components.anova.AnovaApi.get_devices" "homeassistant.components.anova.AnovaApi.get_devices",
) as device_patch: ) as device_patch:
update_patch.return_value = ONLINE_UPDATE update_patch.return_value = ONLINE_UPDATE
device_patch.return_value = [ device_patch.return_value = [

View File

@ -92,7 +92,8 @@ async def test_load_backups(hass: HomeAssistant) -> None:
"date": TEST_BACKUP.date, "date": TEST_BACKUP.date,
}, },
), patch( ), patch(
"pathlib.Path.stat", return_value=MagicMock(st_size=TEST_BACKUP.size) "pathlib.Path.stat",
return_value=MagicMock(st_size=TEST_BACKUP.size),
): ):
await manager.load_backups() await manager.load_backups()
backups = await manager.get_backups() backups = await manager.get_backups()

View File

@ -120,7 +120,8 @@ async def test_form_2fa_connect_error(hass: HomeAssistant) -> None:
"homeassistant.components.blink.config_flow.Blink.setup_urls", "homeassistant.components.blink.config_flow.Blink.setup_urls",
side_effect=BlinkSetupError, side_effect=BlinkSetupError,
), patch( ), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True "homeassistant.components.blink.async_setup_entry",
return_value=True,
): ):
result3 = await hass.config_entries.flow.async_configure( result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"} result2["flow_id"], {"pin": "1234"}
@ -161,7 +162,8 @@ async def test_form_2fa_invalid_key(hass: HomeAssistant) -> None:
"homeassistant.components.blink.config_flow.Blink.setup_urls", "homeassistant.components.blink.config_flow.Blink.setup_urls",
return_value=True, return_value=True,
), patch( ), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True "homeassistant.components.blink.async_setup_entry",
return_value=True,
): ):
result3 = await hass.config_entries.flow.async_configure( result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"} result2["flow_id"], {"pin": "1234"}
@ -200,7 +202,8 @@ async def test_form_2fa_unknown_error(hass: HomeAssistant) -> None:
"homeassistant.components.blink.config_flow.Blink.setup_urls", "homeassistant.components.blink.config_flow.Blink.setup_urls",
side_effect=KeyError, side_effect=KeyError,
), patch( ), patch(
"homeassistant.components.blink.async_setup_entry", return_value=True "homeassistant.components.blink.async_setup_entry",
return_value=True,
): ):
result3 = await hass.config_entries.flow.async_configure( result3 = await hass.config_entries.flow.async_configure(
result2["flow_id"], {"pin": "1234"} result2["flow_id"], {"pin": "1234"}

View File

@ -47,12 +47,14 @@ def mock_operating_system_90():
def macos_adapter(): def macos_adapter():
"""Fixture that mocks the macos adapter.""" """Fixture that mocks the macos adapter."""
with patch("bleak.get_platform_scanner_backend_type"), patch( with patch("bleak.get_platform_scanner_backend_type"), patch(
"homeassistant.components.bluetooth.platform.system", return_value="Darwin" "homeassistant.components.bluetooth.platform.system",
return_value="Darwin",
), patch( ), patch(
"homeassistant.components.bluetooth.scanner.platform.system", "homeassistant.components.bluetooth.scanner.platform.system",
return_value="Darwin", return_value="Darwin",
), patch( ), patch(
"bluetooth_adapters.systems.platform.system", return_value="Darwin" "bluetooth_adapters.systems.platform.system",
return_value="Darwin",
): ):
yield yield
@ -71,14 +73,16 @@ def windows_adapter():
def no_adapter_fixture(): def no_adapter_fixture():
"""Fixture that mocks no adapters on Linux.""" """Fixture that mocks no adapters on Linux."""
with patch( with patch(
"homeassistant.components.bluetooth.platform.system", return_value="Linux" "homeassistant.components.bluetooth.platform.system",
return_value="Linux",
), patch( ), patch(
"homeassistant.components.bluetooth.scanner.platform.system", "homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux", return_value="Linux",
), patch( ), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux" "bluetooth_adapters.systems.platform.system",
return_value="Linux",
), patch( ), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh" "bluetooth_adapters.systems.linux.LinuxAdapters.refresh",
), patch( ), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters", "bluetooth_adapters.systems.linux.LinuxAdapters.adapters",
{}, {},
@ -90,14 +94,16 @@ def no_adapter_fixture():
def one_adapter_fixture(): def one_adapter_fixture():
"""Fixture that mocks one adapter on Linux.""" """Fixture that mocks one adapter on Linux."""
with patch( with patch(
"homeassistant.components.bluetooth.platform.system", return_value="Linux" "homeassistant.components.bluetooth.platform.system",
return_value="Linux",
), patch( ), patch(
"homeassistant.components.bluetooth.scanner.platform.system", "homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux", return_value="Linux",
), patch( ), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux" "bluetooth_adapters.systems.platform.system",
return_value="Linux",
), patch( ), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh" "bluetooth_adapters.systems.linux.LinuxAdapters.refresh",
), patch( ), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters", "bluetooth_adapters.systems.linux.LinuxAdapters.adapters",
{ {
@ -124,9 +130,7 @@ def two_adapters_fixture():
), patch( ), patch(
"homeassistant.components.bluetooth.scanner.platform.system", "homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux", return_value="Linux",
), patch( ), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux"
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh" "bluetooth_adapters.systems.linux.LinuxAdapters.refresh"
), patch( ), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters", "bluetooth_adapters.systems.linux.LinuxAdapters.adapters",
@ -166,9 +170,7 @@ def one_adapter_old_bluez():
), patch( ), patch(
"homeassistant.components.bluetooth.scanner.platform.system", "homeassistant.components.bluetooth.scanner.platform.system",
return_value="Linux", return_value="Linux",
), patch( ), patch("bluetooth_adapters.systems.platform.system", return_value="Linux"), patch(
"bluetooth_adapters.systems.platform.system", return_value="Linux"
), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.refresh" "bluetooth_adapters.systems.linux.LinuxAdapters.refresh"
), patch( ), patch(
"bluetooth_adapters.systems.linux.LinuxAdapters.adapters", "bluetooth_adapters.systems.linux.LinuxAdapters.adapters",

View File

@ -67,13 +67,9 @@ async def setup_bond_entity(
enabled=patch_token enabled=patch_token
), patch_bond_version(enabled=patch_version), patch_bond_device_ids( ), patch_bond_version(enabled=patch_version), patch_bond_device_ids(
enabled=patch_device_ids enabled=patch_device_ids
), patch_setup_entry( ), patch_setup_entry("cover", enabled=patch_platforms), patch_setup_entry(
"cover", enabled=patch_platforms
), patch_setup_entry(
"fan", enabled=patch_platforms "fan", enabled=patch_platforms
), patch_setup_entry( ), patch_setup_entry("light", enabled=patch_platforms), patch_setup_entry(
"light", enabled=patch_platforms
), patch_setup_entry(
"switch", enabled=patch_platforms "switch", enabled=patch_platforms
): ):
return await hass.config_entries.async_setup(config_entry.entry_id) return await hass.config_entries.async_setup(config_entry.entry_id)
@ -102,15 +98,11 @@ async def setup_platform(
"homeassistant.components.bond.PLATFORMS", [platform] "homeassistant.components.bond.PLATFORMS", [platform]
), patch_bond_version(return_value=bond_version), patch_bond_bridge( ), patch_bond_version(return_value=bond_version), patch_bond_bridge(
return_value=bridge return_value=bridge
), patch_bond_token( ), patch_bond_token(return_value=token), patch_bond_device_ids(
return_value=token
), patch_bond_device_ids(
return_value=[bond_device_id] return_value=[bond_device_id]
), patch_start_bpup(), patch_bond_device( ), patch_start_bpup(), patch_bond_device(
return_value=discovered_device return_value=discovered_device
), patch_bond_device_properties( ), patch_bond_device_properties(return_value=props), patch_bond_device_state(
return_value=props
), patch_bond_device_state(
return_value=state return_value=state
): ):
assert await async_setup_component(hass, BOND_DOMAIN, {}) assert await async_setup_component(hass, BOND_DOMAIN, {})

View File

@ -184,9 +184,7 @@ async def test_old_identifiers_are_removed(
"name": "test1", "name": "test1",
"type": DeviceType.GENERIC_DEVICE, "type": DeviceType.GENERIC_DEVICE,
} }
), patch_bond_device_properties( ), patch_bond_device_properties(return_value={}), patch_bond_device_state(
return_value={}
), patch_bond_device_state(
return_value={} return_value={}
): ):
assert await hass.config_entries.async_setup(config_entry.entry_id) is True assert await hass.config_entries.async_setup(config_entry.entry_id) is True
@ -228,9 +226,7 @@ async def test_smart_by_bond_device_suggested_area(
"type": DeviceType.GENERIC_DEVICE, "type": DeviceType.GENERIC_DEVICE,
"location": "Den", "location": "Den",
} }
), patch_bond_device_properties( ), patch_bond_device_properties(return_value={}), patch_bond_device_state(
return_value={}
), patch_bond_device_state(
return_value={} return_value={}
): ):
assert await hass.config_entries.async_setup(config_entry.entry_id) is True assert await hass.config_entries.async_setup(config_entry.entry_id) is True
@ -275,9 +271,7 @@ async def test_bridge_device_suggested_area(
"type": DeviceType.GENERIC_DEVICE, "type": DeviceType.GENERIC_DEVICE,
"location": "Bathroom", "location": "Bathroom",
} }
), patch_bond_device_properties( ), patch_bond_device_properties(return_value={}), patch_bond_device_state(
return_value={}
), patch_bond_device_state(
return_value={} return_value={}
): ):
assert await hass.config_entries.async_setup(config_entry.entry_id) is True assert await hass.config_entries.async_setup(config_entry.entry_id) is True

View File

@ -19,7 +19,7 @@ async def test_creating_entry_sets_up_media_player(hass: HomeAssistant) -> None:
) as mock_setup, patch( ) as mock_setup, patch(
"pychromecast.discovery.discover_chromecasts", return_value=(True, None) "pychromecast.discovery.discover_chromecasts", return_value=(True, None)
), patch( ), patch(
"pychromecast.discovery.stop_discovery" "pychromecast.discovery.stop_discovery",
): ):
result = await hass.config_entries.flow.async_init( result = await hass.config_entries.flow.async_init(
cast.DOMAIN, context={"source": config_entries.SOURCE_USER} cast.DOMAIN, context={"source": config_entries.SOURCE_USER}

View File

@ -24,7 +24,7 @@ async def test_user(hass: HomeAssistant) -> None:
), patch( ), patch(
"homeassistant.components.comelit.async_setup_entry" "homeassistant.components.comelit.async_setup_entry"
) as mock_setup_entry, patch( ) as mock_setup_entry, patch(
"requests.get" "requests.get",
) as mock_request_get: ) as mock_request_get:
mock_request_get.return_value.status_code = 200 mock_request_get.return_value.status_code = 200
@ -70,7 +70,7 @@ async def test_exception_connection(hass: HomeAssistant, side_effect, error) ->
), patch( ), patch(
"aiocomelit.api.ComeliteSerialBridgeApi.logout", "aiocomelit.api.ComeliteSerialBridgeApi.logout",
), patch( ), patch(
"homeassistant.components.comelit.async_setup_entry" "homeassistant.components.comelit.async_setup_entry",
): ):
result = await hass.config_entries.flow.async_configure( result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input=MOCK_USER_DATA result["flow_id"], user_input=MOCK_USER_DATA
@ -135,9 +135,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) ->
"aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect "aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect
), patch( ), patch(
"aiocomelit.api.ComeliteSerialBridgeApi.logout", "aiocomelit.api.ComeliteSerialBridgeApi.logout",
), patch( ), patch("homeassistant.components.comelit.async_setup_entry"):
"homeassistant.components.comelit.async_setup_entry"
):
result = await hass.config_entries.flow.async_init( result = await hass.config_entries.flow.async_init(
DOMAIN, DOMAIN,
context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id},

View File

@ -23,7 +23,9 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None:
@pytest.fixture @pytest.fixture
async def setup_automation( async def setup_automation(
hass, automation_config, stub_blueprint_populate # noqa: F811 hass,
automation_config,
stub_blueprint_populate, # noqa: F811
): ):
"""Set up automation integration.""" """Set up automation integration."""
assert await async_setup_component( assert await async_setup_component(

View File

@ -65,7 +65,8 @@ def denonavr_connect_fixture():
"homeassistant.components.denonavr.receiver.DenonAVR.receiver_type", "homeassistant.components.denonavr.receiver.DenonAVR.receiver_type",
TEST_RECEIVER_TYPE, TEST_RECEIVER_TYPE,
), patch( ), patch(
"homeassistant.components.denonavr.async_setup_entry", return_value=True "homeassistant.components.denonavr.async_setup_entry",
return_value=True,
): ):
yield yield

View File

@ -151,8 +151,11 @@ async def _async_get_handle_dhcp_packet(hass, integration_matchers):
with patch( with patch(
"homeassistant.components.dhcp._verify_l2socket_setup", "homeassistant.components.dhcp._verify_l2socket_setup",
), patch( ), patch(
"scapy.arch.common.compile_filter" "scapy.arch.common.compile_filter",
), patch("scapy.sendrecv.AsyncSniffer", _mock_sniffer): ), patch(
"scapy.sendrecv.AsyncSniffer",
_mock_sniffer,
):
await dhcp_watcher.async_start() await dhcp_watcher.async_start()
return async_handle_dhcp_packet return async_handle_dhcp_packet

View File

@ -198,9 +198,7 @@ async def test_import_flow_triggered_with_ecobee_conf_and_valid_data_and_stale_t
return_value=MOCK_ECOBEE_CONF, return_value=MOCK_ECOBEE_CONF,
), patch( ), patch(
"homeassistant.components.ecobee.config_flow.Ecobee" "homeassistant.components.ecobee.config_flow.Ecobee"
) as mock_ecobee, patch.object( ) as mock_ecobee, patch.object(flow, "async_step_user") as mock_async_step_user:
flow, "async_step_user"
) as mock_async_step_user:
mock_ecobee = mock_ecobee.return_value mock_ecobee = mock_ecobee.return_value
mock_ecobee.refresh_tokens.return_value = False mock_ecobee.refresh_tokens.return_value = False

View File

@ -55,7 +55,8 @@ async def test_one_time_password(hass: HomeAssistant):
"electrasmart.api.ElectraAPI.validate_one_time_password", "electrasmart.api.ElectraAPI.validate_one_time_password",
return_value=mock_otp_response, return_value=mock_otp_response,
), patch( ), patch(
"electrasmart.api.ElectraAPI.fetch_devices", return_value=[] "electrasmart.api.ElectraAPI.fetch_devices",
return_value=[],
): ):
result = await hass.config_entries.flow.async_init( result = await hass.config_entries.flow.async_init(
DOMAIN, DOMAIN,

View File

@ -229,9 +229,7 @@ async def test_form_user_with_insecure_elk_times_out(hass: HomeAssistant) -> Non
0, 0,
), patch( ), patch(
"homeassistant.components.elkm1.config_flow.LOGIN_TIMEOUT", 0 "homeassistant.components.elkm1.config_flow.LOGIN_TIMEOUT", 0
), _patch_discovery(), _patch_elk( ), _patch_discovery(), _patch_elk(elk=mocked_elk):
elk=mocked_elk
):
result2 = await hass.config_entries.flow.async_configure( result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], result["flow_id"],
{ {

View File

@ -89,7 +89,8 @@ async def setup_enphase_envoy_fixture(hass, config, mock_envoy):
"homeassistant.components.enphase_envoy.Envoy", "homeassistant.components.enphase_envoy.Envoy",
return_value=mock_envoy, return_value=mock_envoy,
), patch( ), patch(
"homeassistant.components.enphase_envoy.PLATFORMS", [] "homeassistant.components.enphase_envoy.PLATFORMS",
[],
): ):
assert await async_setup_component(hass, DOMAIN, config) assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done() await hass.async_block_till_done()

View File

@ -38,7 +38,7 @@ async def test_set_unique_id(
), patch( ), patch(
"homeassistant.components.epson.Projector.get_serial_number", return_value="123" "homeassistant.components.epson.Projector.get_serial_number", return_value="123"
), patch( ), patch(
"homeassistant.components.epson.Projector.get_property" "homeassistant.components.epson.Projector.get_property",
): ):
freezer.tick(timedelta(seconds=30)) freezer.tick(timedelta(seconds=30))
async_fire_time_changed(hass) async_fire_time_changed(hass)

View File

@ -100,7 +100,8 @@ async def test_update_entity(
) as mock_compile, patch( ) as mock_compile, patch(
"esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=True "esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=True
) as mock_upload, pytest.raises( ) as mock_upload, pytest.raises(
HomeAssistantError, match="compiling" HomeAssistantError,
match="compiling",
): ):
await hass.services.async_call( await hass.services.async_call(
"update", "update",
@ -120,7 +121,8 @@ async def test_update_entity(
) as mock_compile, patch( ) as mock_compile, patch(
"esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=False "esphome_dashboard_api.ESPHomeDashboardAPI.upload", return_value=False
) as mock_upload, pytest.raises( ) as mock_upload, pytest.raises(
HomeAssistantError, match="OTA" HomeAssistantError,
match="OTA",
): ):
await hass.services.async_call( await hass.services.async_call(
"update", "update",

View File

@ -51,7 +51,8 @@ async def setup_evil_genius_labs(
"pyevilgenius.EvilGeniusDevice.get_product", "pyevilgenius.EvilGeniusDevice.get_product",
return_value=product_fixture, return_value=product_fixture,
), patch( ), patch(
"homeassistant.components.evil_genius_labs.PLATFORMS", platforms "homeassistant.components.evil_genius_labs.PLATFORMS",
platforms,
): ):
assert await async_setup_component(hass, "evil_genius_labs", {}) assert await async_setup_component(hass, "evil_genius_labs", {})
await hass.async_block_till_done() await hass.async_block_till_done()

View File

@ -48,9 +48,9 @@ async def test_user(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N
), patch( ), patch(
"homeassistant.components.fritz.async_setup_entry" "homeassistant.components.fritz.async_setup_entry"
) as mock_setup_entry, patch( ) as mock_setup_entry, patch(
"requests.get" "requests.get",
) as mock_request_get, patch( ) as mock_request_get, patch(
"requests.post" "requests.post",
) as mock_request_post, patch( ) as mock_request_post, patch(
"homeassistant.components.fritz.config_flow.socket.gethostbyname", "homeassistant.components.fritz.config_flow.socket.gethostbyname",
return_value=MOCK_IPS["fritz.box"], return_value=MOCK_IPS["fritz.box"],
@ -98,9 +98,9 @@ async def test_user_already_configured(
"homeassistant.components.fritz.common.FritzBoxTools._update_device_info", "homeassistant.components.fritz.common.FritzBoxTools._update_device_info",
return_value=MOCK_FIRMWARE_INFO, return_value=MOCK_FIRMWARE_INFO,
), patch( ), patch(
"requests.get" "requests.get",
) as mock_request_get, patch( ) as mock_request_get, patch(
"requests.post" "requests.post",
) as mock_request_post, patch( ) as mock_request_post, patch(
"homeassistant.components.fritz.config_flow.socket.gethostbyname", "homeassistant.components.fritz.config_flow.socket.gethostbyname",
return_value=MOCK_IPS["fritz.box"], return_value=MOCK_IPS["fritz.box"],
@ -211,11 +211,11 @@ async def test_reauth_successful(
"homeassistant.components.fritz.common.FritzBoxTools._update_device_info", "homeassistant.components.fritz.common.FritzBoxTools._update_device_info",
return_value=MOCK_FIRMWARE_INFO, return_value=MOCK_FIRMWARE_INFO,
), patch( ), patch(
"homeassistant.components.fritz.async_setup_entry" "homeassistant.components.fritz.async_setup_entry",
) as mock_setup_entry, patch( ) as mock_setup_entry, patch(
"requests.get" "requests.get",
) as mock_request_get, patch( ) as mock_request_get, patch(
"requests.post" "requests.post",
) as mock_request_post: ) as mock_request_post:
mock_request_get.return_value.status_code = 200 mock_request_get.return_value.status_code = 200
mock_request_get.return_value.content = MOCK_REQUEST mock_request_get.return_value.content = MOCK_REQUEST
@ -399,9 +399,7 @@ async def test_ssdp(hass: HomeAssistant, fc_class_mock, mock_get_source_ip) -> N
return_value=MOCK_FIRMWARE_INFO, return_value=MOCK_FIRMWARE_INFO,
), patch( ), patch(
"homeassistant.components.fritz.async_setup_entry" "homeassistant.components.fritz.async_setup_entry"
) as mock_setup_entry, patch( ) as mock_setup_entry, patch("requests.get") as mock_request_get, patch(
"requests.get"
) as mock_request_get, patch(
"requests.post" "requests.post"
) as mock_request_post: ) as mock_request_post:
mock_request_get.return_value.status_code = 200 mock_request_get.return_value.status_code = 200

View File

@ -43,7 +43,8 @@ async def init_integration(
"homeassistant.components.gios.Gios._get_all_sensors", "homeassistant.components.gios.Gios._get_all_sensors",
return_value=sensors, return_value=sensors,
), patch( ), patch(
"homeassistant.components.gios.Gios._get_indexes", return_value=indexes "homeassistant.components.gios.Gios._get_indexes",
return_value=indexes,
): ):
entry.add_to_hass(hass) entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id) await hass.config_entries.async_setup(entry.entry_id)

View File

@ -55,7 +55,8 @@ async def test_invalid_sensor_data(hass: HomeAssistant) -> None:
"homeassistant.components.gios.Gios._get_station", "homeassistant.components.gios.Gios._get_station",
return_value=json.loads(load_fixture("gios/station.json")), return_value=json.loads(load_fixture("gios/station.json")),
), patch( ), patch(
"homeassistant.components.gios.Gios._get_sensor", return_value={} "homeassistant.components.gios.Gios._get_sensor",
return_value={},
): ):
flow = config_flow.GiosFlowHandler() flow = config_flow.GiosFlowHandler()
flow.hass = hass flow.hass = hass
@ -83,7 +84,8 @@ async def test_cannot_connect(hass: HomeAssistant) -> None:
async def test_create_entry(hass: HomeAssistant) -> None: async def test_create_entry(hass: HomeAssistant) -> None:
"""Test that the user step works.""" """Test that the user step works."""
with patch( with patch(
"homeassistant.components.gios.Gios._get_stations", return_value=STATIONS "homeassistant.components.gios.Gios._get_stations",
return_value=STATIONS,
), patch( ), patch(
"homeassistant.components.gios.Gios._get_station", "homeassistant.components.gios.Gios._get_station",
return_value=json.loads(load_fixture("gios/station.json")), return_value=json.loads(load_fixture("gios/station.json")),

View File

@ -82,9 +82,7 @@ async def test_migrate_device_and_config_entry(
), patch( ), patch(
"homeassistant.components.gios.Gios._get_all_sensors", "homeassistant.components.gios.Gios._get_all_sensors",
return_value=sensors, return_value=sensors,
), patch( ), patch("homeassistant.components.gios.Gios._get_indexes", return_value=indexes):
"homeassistant.components.gios.Gios._get_indexes", return_value=indexes
):
config_entry.add_to_hass(hass) config_entry.add_to_hass(hass)
device_entry = device_registry.async_get_or_create( device_entry = device_registry.async_get_or_create(

View File

@ -92,7 +92,7 @@ async def test_update_access_token(hass: HomeAssistant) -> None:
) as mock_get_token, patch( ) as mock_get_token, patch(
"homeassistant.components.google_assistant.http._get_homegraph_jwt" "homeassistant.components.google_assistant.http._get_homegraph_jwt"
) as mock_get_jwt, patch( ) as mock_get_jwt, patch(
"homeassistant.core.dt_util.utcnow" "homeassistant.core.dt_util.utcnow",
) as mock_utcnow: ) as mock_utcnow:
mock_utcnow.return_value = base_time mock_utcnow.return_value = base_time
mock_get_jwt.return_value = jwt mock_get_jwt.return_value = jwt

View File

@ -66,7 +66,12 @@ async def test_broadcast_no_targets(
"Anuncia en el salón Es hora de hacer los deberes", "Anuncia en el salón Es hora de hacer los deberes",
), ),
("ko-KR", "숙제할 시간이야", "거실", "숙제할 시간이야 라고 거실에 방송해 줘"), ("ko-KR", "숙제할 시간이야", "거실", "숙제할 시간이야 라고 거실에 방송해 줘"),
("ja-JP", "宿題の時間だよ", "リビング", "宿題の時間だよとリビングにブロードキャストして"), (
"ja-JP",
"宿題の時間だよ",
"リビング",
"宿題の時間だよとリビングにブロードキャストして",
),
], ],
ids=["english", "spanish", "korean", "japanese"], ids=["english", "spanish", "korean", "japanese"],
) )

View File

@ -131,9 +131,10 @@ async def setup_guardian_fixture(
"aioguardian.commands.wifi.WiFiCommands.status", "aioguardian.commands.wifi.WiFiCommands.status",
return_value=data_wifi_status, return_value=data_wifi_status,
), patch( ), patch(
"aioguardian.client.Client.disconnect" "aioguardian.client.Client.disconnect",
), patch( ), patch(
"homeassistant.components.guardian.PLATFORMS", [] "homeassistant.components.guardian.PLATFORMS",
[],
): ):
assert await async_setup_component(hass, DOMAIN, config) assert await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done() await hass.async_block_till_done()

View File

@ -54,9 +54,9 @@ def hassio_stubs(hassio_env, hass, hass_client, aioclient_mock):
"homeassistant.components.hassio.HassIO.get_ingress_panels", "homeassistant.components.hassio.HassIO.get_ingress_panels",
return_value={"panels": []}, return_value={"panels": []},
), patch( ), patch(
"homeassistant.components.hassio.issues.SupervisorIssues.setup" "homeassistant.components.hassio.issues.SupervisorIssues.setup",
), patch( ), patch(
"homeassistant.components.hassio.HassIO.refresh_updates" "homeassistant.components.hassio.HassIO.refresh_updates",
): ):
hass.state = CoreState.starting hass.state = CoreState.starting
hass.loop.run_until_complete(async_setup_component(hass, "hassio", {})) hass.loop.run_until_complete(async_setup_component(hass, "hassio", {}))

View File

@ -31,7 +31,7 @@ def run_driver(hass, event_loop, iid_storage):
), patch("pyhap.accessory_driver.HAPServer"), patch( ), patch("pyhap.accessory_driver.HAPServer"), patch(
"pyhap.accessory_driver.AccessoryDriver.publish" "pyhap.accessory_driver.AccessoryDriver.publish"
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.persist" "pyhap.accessory_driver.AccessoryDriver.persist",
): ):
yield HomeDriver( yield HomeDriver(
hass, hass,
@ -53,9 +53,9 @@ def hk_driver(hass, event_loop, iid_storage):
), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch( ), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch(
"pyhap.accessory_driver.HAPServer.async_start" "pyhap.accessory_driver.HAPServer.async_start"
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.publish" "pyhap.accessory_driver.AccessoryDriver.publish",
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.persist" "pyhap.accessory_driver.AccessoryDriver.persist",
): ):
yield HomeDriver( yield HomeDriver(
hass, hass,
@ -77,13 +77,13 @@ def mock_hap(hass, event_loop, iid_storage, mock_zeroconf):
), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch( ), patch("pyhap.accessory_driver.HAPServer.async_stop"), patch(
"pyhap.accessory_driver.HAPServer.async_start" "pyhap.accessory_driver.HAPServer.async_start"
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.publish" "pyhap.accessory_driver.AccessoryDriver.publish",
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.async_start" "pyhap.accessory_driver.AccessoryDriver.async_start",
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.async_stop" "pyhap.accessory_driver.AccessoryDriver.async_stop",
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.persist" "pyhap.accessory_driver.AccessoryDriver.persist",
): ):
yield HomeDriver( yield HomeDriver(
hass, hass,

View File

@ -1202,9 +1202,7 @@ async def test_homekit_reset_accessories_not_supported(
"pyhap.accessory_driver.AccessoryDriver.async_update_advertisement" "pyhap.accessory_driver.AccessoryDriver.async_update_advertisement"
) as hk_driver_async_update_advertisement, patch( ) as hk_driver_async_update_advertisement, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start" "pyhap.accessory_driver.AccessoryDriver.async_start"
), patch.object( ), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0):
homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0
):
await async_init_entry(hass, entry) await async_init_entry(hass, entry)
acc_mock = MagicMock() acc_mock = MagicMock()
@ -1247,9 +1245,7 @@ async def test_homekit_reset_accessories_state_missing(
"pyhap.accessory_driver.AccessoryDriver.config_changed" "pyhap.accessory_driver.AccessoryDriver.config_changed"
) as hk_driver_config_changed, patch( ) as hk_driver_config_changed, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start" "pyhap.accessory_driver.AccessoryDriver.async_start"
), patch.object( ), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0):
homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0
):
await async_init_entry(hass, entry) await async_init_entry(hass, entry)
acc_mock = MagicMock() acc_mock = MagicMock()
@ -1291,9 +1287,7 @@ async def test_homekit_reset_accessories_not_bridged(
"pyhap.accessory_driver.AccessoryDriver.async_update_advertisement" "pyhap.accessory_driver.AccessoryDriver.async_update_advertisement"
) as hk_driver_async_update_advertisement, patch( ) as hk_driver_async_update_advertisement, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start" "pyhap.accessory_driver.AccessoryDriver.async_start"
), patch.object( ), patch.object(homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0):
homekit_base, "_HOMEKIT_CONFIG_UPDATE_TIME", 0
):
await async_init_entry(hass, entry) await async_init_entry(hass, entry)
assert hk_driver_async_update_advertisement.call_count == 0 assert hk_driver_async_update_advertisement.call_count == 0
@ -1338,7 +1332,7 @@ async def test_homekit_reset_single_accessory(
) as hk_driver_async_update_advertisement, patch( ) as hk_driver_async_update_advertisement, patch(
"pyhap.accessory_driver.AccessoryDriver.async_start" "pyhap.accessory_driver.AccessoryDriver.async_start"
), patch( ), patch(
f"{PATH_HOMEKIT}.accessories.HomeAccessory.run" f"{PATH_HOMEKIT}.accessories.HomeAccessory.run",
) as mock_run: ) as mock_run:
await async_init_entry(hass, entry) await async_init_entry(hass, entry)
homekit.status = STATUS_RUNNING homekit.status = STATUS_RUNNING
@ -2071,9 +2065,9 @@ async def test_reload(hass: HomeAssistant, mock_async_zeroconf: None) -> None:
) as mock_homekit2, patch.object(homekit.bridge, "add_accessory"), patch( ) as mock_homekit2, patch.object(homekit.bridge, "add_accessory"), patch(
f"{PATH_HOMEKIT}.async_show_setup_message" f"{PATH_HOMEKIT}.async_show_setup_message"
), patch( ), patch(
f"{PATH_HOMEKIT}.get_accessory" f"{PATH_HOMEKIT}.get_accessory",
), patch( ), patch(
"pyhap.accessory_driver.AccessoryDriver.async_start" "pyhap.accessory_driver.AccessoryDriver.async_start",
), patch( ), patch(
"homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4" "homeassistant.components.network.async_get_source_ip", return_value="1.2.3.4"
): ):

View File

@ -102,7 +102,7 @@ async def test_hmip_add_device(
), patch.object(reloaded_hap, "async_connect"), patch.object( ), patch.object(reloaded_hap, "async_connect"), patch.object(
reloaded_hap, "get_hap", return_value=mock_hap.home reloaded_hap, "get_hap", return_value=mock_hap.home
), patch( ), patch(
"homeassistant.components.homematicip_cloud.hap.asyncio.sleep" "homeassistant.components.homematicip_cloud.hap.asyncio.sleep",
): ):
mock_hap.home.fire_create_event(event_type=EventType.DEVICE_ADDED) mock_hap.home.fire_create_event(event_type=EventType.DEVICE_ADDED)
await hass.async_block_till_done() await hass.async_block_till_done()

View File

@ -53,7 +53,8 @@ async def test_auth_auth_check_and_register(hass: HomeAssistant) -> None:
), patch.object( ), patch.object(
hmip_auth.auth, "requestAuthToken", return_value="ABC" hmip_auth.auth, "requestAuthToken", return_value="ABC"
), patch.object( ), patch.object(
hmip_auth.auth, "confirmAuthToken" hmip_auth.auth,
"confirmAuthToken",
): ):
assert await hmip_auth.async_checkbutton() assert await hmip_auth.async_checkbutton()
assert await hmip_auth.async_register() == "ABC" assert await hmip_auth.async_register() == "ABC"

Some files were not shown because too many files have changed in this diff Show More