Enable Ruff B905 (#114197)

pull/115567/head
Sid 2024-04-14 07:14:26 +02:00 committed by GitHub
parent b70edb89bf
commit 3799d20d43
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
46 changed files with 116 additions and 79 deletions

View File

@ -85,10 +85,10 @@ def handle_errors_and_zip(
return data
if isinstance(data, dict):
return dict(zip(keys, list(data.values())))
return dict(zip(keys, list(data.values()), strict=False))
if not isinstance(data, (list, tuple)):
raise UpdateFailed("Received invalid data type")
return dict(zip(keys, data))
return dict(zip(keys, data, strict=False))
return _wrapper

View File

@ -137,7 +137,7 @@ def queued_event_fetcher(
# time span, but need to be triggered later when the end happens.
results = []
for trigger_time, event in zip(
map(get_trigger_time, active_events), active_events
map(get_trigger_time, active_events), active_events, strict=False
):
if trigger_time not in offset_timespan:
continue

View File

@ -90,7 +90,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
sorted_coefficients = sorted(initial_coefficients, key=itemgetter(0))
# get x values and y values from the x,y point pairs
x_values, y_values = zip(*initial_coefficients)
x_values, y_values = zip(*initial_coefficients, strict=False)
# try to get valid coefficients for a polynomial
coefficients = None

View File

@ -222,7 +222,9 @@ def get_forecast(ec_data, hourly) -> list[Forecast] | None:
forecast_array.append(today)
for day, high, low in zip(range(1, 6), range(0, 9, 2), range(1, 10, 2)):
for day, high, low in zip(
range(1, 6), range(0, 9, 2), range(1, 10, 2), strict=False
):
forecast_array.append(
{
ATTR_FORECAST_TIME: (

View File

@ -262,7 +262,7 @@ async def handle_devices_execute(
),
EXECUTE_LIMIT,
)
for entity_id, result in zip(executions, execute_results):
for entity_id, result in zip(executions, execute_results, strict=False):
if result is not None:
results[entity_id] = result
except TimeoutError:

View File

@ -29,7 +29,7 @@ def mean_int(*args: Any) -> int:
def mean_tuple(*args: Any) -> tuple[float | Any, ...]:
"""Return the mean values along the columns of the supplied values."""
return tuple(sum(x) / len(x) for x in zip(*args))
return tuple(sum(x) / len(x) for x in zip(*args, strict=False))
def attribute_equal(states: list[State], key: str) -> bool:

View File

@ -421,7 +421,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator): # pylint: disable=has
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
results = await asyncio.gather(*updates.values())
for key, result in zip(updates, results):
for key, result in zip(updates, results, strict=False):
data[key] = result
_addon_data = data[DATA_SUPERVISOR_INFO].get("addons", [])

View File

@ -9,7 +9,9 @@ def get_tradable_asset_pairs(kraken_api: KrakenAPI) -> dict[str, str]:
"""Get a list of tradable asset pairs."""
tradable_asset_pairs = {}
asset_pairs_df = kraken_api.get_tradable_asset_pairs()
for pair in zip(asset_pairs_df.index.values, asset_pairs_df["wsname"]):
for pair in zip(
asset_pairs_df.index.values, asset_pairs_df["wsname"], strict=False
):
# Remove darkpools
# https://support.kraken.com/hc/en-us/articles/360001391906-Introducing-the-Kraken-Dark-Pool
if not pair[0].endswith(".d"):

View File

@ -299,7 +299,9 @@ class SendKeys(LcnServiceCall):
keys = [[False] * 8 for i in range(4)]
key_strings = zip(service.data[CONF_KEYS][::2], service.data[CONF_KEYS][1::2])
key_strings = zip(
service.data[CONF_KEYS][::2], service.data[CONF_KEYS][1::2], strict=False
)
for table, key in key_strings:
table_id = ord(table) - 65

View File

@ -131,7 +131,7 @@ class LgTVDevice(MediaPlayerEntity):
channel_name = channel.find("chname")
if channel_name is not None:
channel_names.append(str(channel_name.text))
self._sources = dict(zip(channel_names, channel_list))
self._sources = dict(zip(channel_names, channel_list, strict=False))
# sort source names by the major channel number
source_tuples = [
(k, source.find("major").text)

View File

@ -149,7 +149,7 @@ def merge_hsbk(
Hue, Saturation, Brightness, Kelvin
"""
return [b if c is None else c for b, c in zip(base, change)]
return [b if c is None else c for b, c in zip(base, change, strict=False)]
def _get_mac_offset(mac_addr: str, offset: int) -> str:

View File

@ -712,7 +712,7 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity):
keys.append("white")
elif color_mode == ColorMode.RGBWW:
keys.extend(["cold_white", "warm_white"])
variables = dict(zip(keys, color))
variables = dict(zip(keys, color, strict=False))
return self._command_templates[template](rgb_color_str, variables)
def set_optimistic(

View File

@ -492,7 +492,7 @@ def _modify_columns(
if engine.dialect.name == SupportedDialect.POSTGRESQL:
columns_def = [
"ALTER {column} TYPE {type}".format(
**dict(zip(["column", "type"], col_def.split(" ", 1)))
**dict(zip(["column", "type"], col_def.split(" ", 1), strict=False))
)
for col_def in columns_def
]

View File

@ -694,7 +694,7 @@ def _purge_filtered_states(
)
if not to_purge:
return True
state_ids, attributes_ids, event_ids = zip(*to_purge)
state_ids, attributes_ids, event_ids = zip(*to_purge, strict=False)
filtered_event_ids = {id_ for id_ in event_ids if id_ is not None}
_LOGGER.debug(
"Selected %s state_ids to remove that should be filtered", len(state_ids)
@ -735,7 +735,7 @@ def _purge_filtered_events(
)
if not to_purge:
return True
event_ids, data_ids = zip(*to_purge)
event_ids, data_ids = zip(*to_purge, strict=False)
event_ids_set = set(event_ids)
_LOGGER.debug(
"Selected %s event_ids to remove that should be filtered", len(event_ids_set)

View File

@ -412,7 +412,7 @@ def find_possible_pt2262_device(device_ids: set[str], device_id: str) -> str | N
for dev_id in device_ids:
if len(dev_id) == len(device_id):
size = None
for i, (char1, char2) in enumerate(zip(dev_id, device_id)):
for i, (char1, char2) in enumerate(zip(dev_id, device_id, strict=False)):
if char1 != char2:
break
size = i

View File

@ -73,7 +73,9 @@ async def async_setup_entry(
return_exceptions=True,
)
valid_entities: list[RoborockNumberEntity] = []
for (coordinator, description), result in zip(possible_entities, results):
for (coordinator, description), result in zip(
possible_entities, results, strict=False
):
if result is None or isinstance(result, RoborockException):
_LOGGER.debug("Not adding entity because of %s", result)
else:

View File

@ -121,7 +121,9 @@ async def async_setup_entry(
return_exceptions=True,
)
valid_entities: list[RoborockSwitch] = []
for (coordinator, description), result in zip(possible_entities, results):
for (coordinator, description), result in zip(
possible_entities, results, strict=False
):
if result is None or isinstance(result, Exception):
_LOGGER.debug("Not adding entity because of %s", result)
else:

View File

@ -137,7 +137,9 @@ async def async_setup_entry(
return_exceptions=True,
)
valid_entities: list[RoborockTimeEntity] = []
for (coordinator, description), result in zip(possible_entities, results):
for (coordinator, description), result in zip(
possible_entities, results, strict=False
):
if result is None or isinstance(result, RoborockException):
_LOGGER.debug("Not adding entity because of %s", result)
else:

View File

@ -511,9 +511,13 @@ def compile_statistics( # noqa: C901
# Make calculations
stat: StatisticData = {"start": start}
if "max" in wanted_statistics[entity_id]:
stat["max"] = max(*itertools.islice(zip(*valid_float_states), 1))
stat["max"] = max(
*itertools.islice(zip(*valid_float_states, strict=False), 1)
)
if "min" in wanted_statistics[entity_id]:
stat["min"] = min(*itertools.islice(zip(*valid_float_states), 1))
stat["min"] = min(
*itertools.islice(zip(*valid_float_states, strict=False), 1)
)
if "mean" in wanted_statistics[entity_id]:
stat["mean"] = _time_weighted_average(valid_float_states, start, end)

View File

@ -262,7 +262,7 @@ class SlackNotificationService(BaseNotificationService):
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for target, result in zip(tasks, results):
for target, result in zip(tasks, results, strict=False):
if isinstance(result, SlackApiError):
_LOGGER.error(
"There was a Slack API error while sending to %s: %r",

View File

@ -763,7 +763,8 @@ class StatisticsSensor(SensorEntity):
def _stat_sum_differences(self) -> StateType:
if len(self.states) >= 2:
return sum(
abs(j - i) for i, j in zip(list(self.states), list(self.states)[1:])
abs(j - i)
for i, j in zip(list(self.states), list(self.states)[1:], strict=False)
)
return None
@ -771,7 +772,7 @@ class StatisticsSensor(SensorEntity):
if len(self.states) >= 2:
return sum(
(j - i if j >= i else j - 0)
for i, j in zip(list(self.states), list(self.states)[1:])
for i, j in zip(list(self.states), list(self.states)[1:], strict=False)
)
return None

View File

@ -127,6 +127,7 @@ async def handle_info(
for registration in registrations.values()
)
),
strict=False,
):
for key, value in domain_data["info"].items():
if asyncio.iscoroutine(value):

View File

@ -66,7 +66,7 @@ def reset_devices():
This assumes the same sensor devices are present in the same order.
"""
temper_devices = get_temper_devices()
for sensor, device in zip(TEMPER_SENSORS, temper_devices):
for sensor, device in zip(TEMPER_SENSORS, temper_devices, strict=False):
sensor.set_temper_device(device)

View File

@ -376,7 +376,7 @@ class TensorFlowImageProcessor(ImageProcessingEntity):
matches = {}
total_matches = 0
for box, score, obj_class in zip(boxes, scores, classes):
for box, score, obj_class in zip(boxes, scores, classes, strict=False):
score = score * 100
boxes = box.tolist()

View File

@ -224,7 +224,7 @@ class BaseZhaFlow(ConfigEntryBaseFlow):
default_port = vol.UNDEFINED
if self._radio_mgr.device_path is not None:
for description, port in zip(list_of_ports, ports):
for description, port in zip(list_of_ports, ports, strict=False):
if port.device == self._radio_mgr.device_path:
default_port = description
break

View File

@ -996,7 +996,7 @@ class ZHADevice(LogMixin):
)
)
res = await asyncio.gather(*(t[0] for t in tasks), return_exceptions=True)
for outcome, log_msg in zip(res, tasks):
for outcome, log_msg in zip(res, tasks, strict=False):
if isinstance(outcome, Exception):
fmt = f"{log_msg[1]} failed: %s"
else:

View File

@ -198,7 +198,7 @@ class Endpoint:
gather = functools.partial(gather_with_limited_concurrency, max_concurrency)
results = await gather(*tasks, return_exceptions=True)
for cluster_handler, outcome in zip(cluster_handlers, results):
for cluster_handler, outcome in zip(cluster_handlers, results, strict=False):
if isinstance(outcome, Exception):
cluster_handler.debug(
"'%s' stage failed: %s", func_name, str(outcome), exc_info=outcome

View File

@ -292,7 +292,7 @@ def mean_int(*args):
def mean_tuple(*args):
"""Return the mean values along the columns of the supplied values."""
return tuple(sum(x) / len(x) for x in zip(*args))
return tuple(sum(x) / len(x) for x in zip(*args, strict=False))
def reduce_attribute(

View File

@ -1034,7 +1034,7 @@ async def async_binding_operation(
)
)
res = await asyncio.gather(*(t[0] for t in bind_tasks), return_exceptions=True)
for outcome, log_msg in zip(res, bind_tasks):
for outcome, log_msg in zip(res, bind_tasks, strict=False):
if isinstance(outcome, Exception):
fmt = f"{log_msg[1]} failed: %s"
else:

View File

@ -96,7 +96,7 @@ def value_matches_matcher(
return all(
redacted_field_val is None or redacted_field_val == zwave_value_field_val
for redacted_field_val, zwave_value_field_val in zip(
astuple(matcher), astuple(zwave_value_id)
astuple(matcher), astuple(zwave_value_id), strict=False
)
)

View File

@ -85,7 +85,7 @@ def get_valid_responses_from_results(
zwave_objects: Sequence[T], results: Sequence[Any]
) -> Generator[tuple[T, Any], None, None]:
"""Return valid responses from a list of results."""
for zwave_object, result in zip(zwave_objects, results):
for zwave_object, result in zip(zwave_objects, results, strict=False):
if not isinstance(result, Exception):
yield zwave_object, result
@ -96,7 +96,9 @@ def raise_exceptions_from_results(
"""Raise list of exceptions from a list of results."""
errors: Sequence[tuple[T, Any]]
if errors := [
tup for tup in zip(zwave_objects, results) if isinstance(tup[1], Exception)
tup
for tup in zip(zwave_objects, results, strict=True)
if isinstance(tup[1], Exception)
]:
lines = [
*(

View File

@ -610,7 +610,9 @@ class DynamicServiceIntentHandler(IntentHandler):
# Handle service calls in parallel, noting failures as they occur.
failed_results: list[IntentResponseTarget] = []
for state, service_coro in zip(states, asyncio.as_completed(service_coros)):
for state, service_coro in zip(
states, asyncio.as_completed(service_coros), strict=False
):
target = IntentResponseTarget(
type=IntentResponseTargetType.ENTITY,
name=state.name,

View File

@ -709,7 +709,7 @@ async def async_get_all_descriptions(
contents = await hass.async_add_executor_job(
_load_services_files, hass, integrations
)
loaded = dict(zip(domains_with_missing_services, contents))
loaded = dict(zip(domains_with_missing_services, contents, strict=False))
# Load translations for all service domains
translations = await translation.async_get_translations(
@ -993,7 +993,7 @@ async def entity_service_call(
)
response_data: EntityServiceResponse = {}
for entity, result in zip(entities, results):
for entity, result in zip(entities, results, strict=False):
if isinstance(result, BaseException):
raise result from None
response_data[entity.entity_id] = result

View File

@ -423,7 +423,7 @@ def _handle_mapping_tag(
nodes = loader.construct_pairs(node)
seen: dict = {}
for (key, _), (child_node, _) in zip(nodes, node.value):
for (key, _), (child_node, _) in zip(nodes, node.value, strict=False):
line = child_node.start_mark.line
try:

View File

@ -673,6 +673,7 @@ select = [
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"B032", # Possible unintentional type annotation (using :). Did you mean to assign (using =)?
"B904", # Use raise from to specify exception cause
"B905", # zip() without an explicit strict= parameter
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings

View File

@ -651,7 +651,7 @@ async def test_probability_updates(hass: HomeAssistant) -> None:
prob_given_false = [0.7, 0.4, 0.2]
prior = 0.5
for p_t, p_f in zip(prob_given_true, prob_given_false):
for p_t, p_f in zip(prob_given_true, prob_given_false, strict=False):
prior = bayesian.update_probability(prior, p_t, p_f)
assert round(abs(0.720000 - prior), 7) == 0
@ -660,7 +660,7 @@ async def test_probability_updates(hass: HomeAssistant) -> None:
prob_given_false = [0.6, 0.4, 0.2]
prior = 0.7
for p_t, p_f in zip(prob_given_true, prob_given_false):
for p_t, p_f in zip(prob_given_true, prob_given_false, strict=False):
prior = bayesian.update_probability(prior, p_t, p_f)
assert round(abs(0.9130434782608695 - prior), 7) == 0

View File

@ -74,7 +74,7 @@ async def setup_tests(hass, config, times, values, expected_state):
# Testing a energy sensor with non-monotonic intervals and values
base = dt_util.utcnow()
with freeze_time(base) as freezer:
for time, value in zip(times, values):
for time, value in zip(times, values, strict=False):
freezer.move_to(base + timedelta(seconds=time))
hass.states.async_set(entity_id, value, {}, force_update=True)
await hass.async_block_till_done()
@ -175,7 +175,7 @@ async def test_data_moving_average_for_discrete_sensor(hass: HomeAssistant) -> N
base = dt_util.utcnow()
with freeze_time(base) as freezer:
for time, value in zip(times, temperature_values):
for time, value in zip(times, temperature_values, strict=False):
now = base + timedelta(seconds=time)
freezer.move_to(now)
hass.states.async_set(entity_id, value, {}, force_update=True)
@ -219,7 +219,7 @@ async def test_data_moving_average_for_irregular_times(hass: HomeAssistant) -> N
base = dt_util.utcnow()
with freeze_time(base) as freezer:
for time, value in zip(times, temperature_values):
for time, value in zip(times, temperature_values, strict=False):
now = base + timedelta(seconds=time)
freezer.move_to(now)
hass.states.async_set(entity_id, value, {}, force_update=True)
@ -257,7 +257,7 @@ async def test_double_signal_after_delay(hass: HomeAssistant) -> None:
base = dt_util.utcnow()
previous = 0
with freeze_time(base) as freezer:
for time, value in zip(times, temperature_values):
for time, value in zip(times, temperature_values, strict=False):
now = base + timedelta(seconds=time)
freezer.move_to(now)
hass.states.async_set(entity_id, value, {}, force_update=True)

View File

@ -249,7 +249,7 @@ async def test_resolve_media_path(hass: HomeAssistant, dms_device_mock: Mock) ->
res_mime: Final = "audio/mpeg"
search_directory_result = []
for ob_id, ob_title in zip(object_ids, path.split("/")):
for ob_id, ob_title in zip(object_ids, path.split("/"), strict=False):
didl_item = didl_lite.Item(
id=ob_id,
restricted="false",
@ -274,7 +274,9 @@ async def test_resolve_media_path(hass: HomeAssistant, dms_device_mock: Mock) ->
metadata_filter=["id", "upnp:class", "dc:title"],
requested_count=1,
)
for parent_id, title in zip(["0"] + object_ids[:-1], path.split("/"))
for parent_id, title in zip(
["0"] + object_ids[:-1], path.split("/"), strict=False
)
]
assert result.url == res_abs_url
assert result.mime_type == res_mime
@ -290,7 +292,9 @@ async def test_resolve_media_path(hass: HomeAssistant, dms_device_mock: Mock) ->
metadata_filter=["id", "upnp:class", "dc:title"],
requested_count=1,
)
for parent_id, title in zip(["0"] + object_ids[:-1], path.split("/"))
for parent_id, title in zip(
["0"] + object_ids[:-1], path.split("/"), strict=False
)
]
assert result.url == res_abs_url
assert result.mime_type == res_mime
@ -305,7 +309,7 @@ async def test_resolve_path_browsed(hass: HomeAssistant, dms_device_mock: Mock)
# Setup expected calls
search_directory_result = []
for ob_id, ob_title in zip(object_ids, path.split("/")):
for ob_id, ob_title in zip(object_ids, path.split("/"), strict=False):
didl_item = didl_lite.Item(
id=ob_id,
restricted="false",
@ -346,7 +350,9 @@ async def test_resolve_path_browsed(hass: HomeAssistant, dms_device_mock: Mock)
metadata_filter=["id", "upnp:class", "dc:title"],
requested_count=1,
)
for parent_id, title in zip(["0"] + object_ids[:-1], path.split("/"))
for parent_id, title in zip(
["0"] + object_ids[:-1], path.split("/"), strict=False
)
]
assert result.didl_metadata.id == object_ids[-1]
# 2nd level should also be browsed
@ -608,7 +614,7 @@ async def test_browse_media_object(hass: HomeAssistant, dms_device_mock: Mock) -
assert not result.can_play
assert result.can_expand
assert result.children
for child, title in zip(result.children, child_titles):
for child, title in zip(result.children, child_titles, strict=False):
assert isinstance(child, BrowseMediaSource)
assert child.identifier == f"{MOCK_SOURCE_ID}/:{title}_id"
assert child.title == title
@ -746,7 +752,7 @@ async def test_browse_media_search(hass: HomeAssistant, dms_device_mock: Mock) -
assert result.title == "Search results"
assert result.children
for obj, child in zip(object_details, result.children):
for obj, child in zip(object_details, result.children, strict=False):
assert isinstance(child, BrowseMediaSource)
assert child.identifier == f"{MOCK_SOURCE_ID}/:{obj[0]}"
assert child.title == obj[1]

View File

@ -155,6 +155,7 @@ async def test_sync_request(
for dev, demo in zip(
sorted(devices, key=lambda d: d["id"]),
sorted(DEMO_DEVICES, key=lambda d: d["id"]),
strict=False,
):
assert dev["name"] == demo["name"]
assert set(dev["traits"]) == set(demo["traits"])

View File

@ -85,7 +85,7 @@ async def test_sensors2(
entity_ids = config["sensor"]["entities"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(
entity_id,
value,
@ -135,7 +135,7 @@ async def test_sensors_attributes_defined(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entities"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(
entity_id,
value,
@ -269,7 +269,7 @@ async def test_sensor_incorrect_state_with_ignore_non_numeric(
entity_ids = config["sensor"]["entities"]
# Check that the final sensor value ignores the non numeric input
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR)).items():
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -280,7 +280,7 @@ async def test_sensor_incorrect_state_with_ignore_non_numeric(
)
# Check that the final sensor value with all numeric inputs
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -310,7 +310,7 @@ async def test_sensor_incorrect_state_with_not_ignore_non_numeric(
entity_ids = config["sensor"]["entities"]
# Check that the final sensor value is unavailable if a non numeric input exists
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR)).items():
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -319,7 +319,7 @@ async def test_sensor_incorrect_state_with_not_ignore_non_numeric(
assert "Unable to use state. Only numerical states are supported" in caplog.text
# Check that the final sensor value is correct with all numeric inputs
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -346,7 +346,7 @@ async def test_sensor_require_all_states(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entities"]
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR)).items():
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -755,7 +755,7 @@ async def test_last_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entities"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
state = hass.states.get("sensor.test_last")

View File

@ -123,7 +123,13 @@ ENTITY_ID_STATES = {
@pytest.mark.parametrize("appliance", [TEST_HC_APP], indirect=True)
@pytest.mark.parametrize(
("states", "event_run"),
list(zip(list(zip(*ENTITY_ID_STATES.values())), PROGRAM_SEQUENCE_EVENTS)),
list(
zip(
list(zip(*ENTITY_ID_STATES.values(), strict=False)),
PROGRAM_SEQUENCE_EVENTS,
strict=False,
)
),
)
async def test_event_sensors(
appliance: Mock,
@ -150,7 +156,7 @@ async def test_event_sensors(
assert config_entry.state == ConfigEntryState.LOADED
appliance.status.update(event_run)
for entity_id, state in zip(entity_ids, states):
for entity_id, state in zip(entity_ids, states, strict=False):
await async_update_entity(hass, entity_id)
await hass.async_block_till_done()
assert hass.states.is_state(entity_id, state)
@ -197,7 +203,7 @@ async def test_remaining_prog_time_edge_cases(
for (
event,
expected_state,
) in zip(PROGRAM_SEQUENCE_EDGE_CASE, ENTITY_ID_EDGE_CASE_STATES):
) in zip(PROGRAM_SEQUENCE_EDGE_CASE, ENTITY_ID_EDGE_CASE_STATES, strict=False):
appliance.status.update(event)
await async_update_entity(hass, entity_id)
await hass.async_block_till_done()

View File

@ -190,6 +190,7 @@ async def test_ip_ban_manager_never_started(
BANNED_IPS_WITH_SUPERVISOR,
[1, 1, 0],
[HTTPStatus.FORBIDDEN, HTTPStatus.FORBIDDEN, HTTPStatus.UNAUTHORIZED],
strict=False,
)
),
)

View File

@ -51,7 +51,7 @@ async def test_default_name_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -80,7 +80,7 @@ async def test_min_sensor(
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -110,7 +110,7 @@ async def test_max_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -137,7 +137,7 @@ async def test_mean_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -164,7 +164,7 @@ async def test_mean_1_digit_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -190,7 +190,7 @@ async def test_mean_4_digit_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -215,7 +215,7 @@ async def test_median_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -242,7 +242,7 @@ async def test_range_4_digit_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -268,7 +268,7 @@ async def test_range_1_digit_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -394,7 +394,7 @@ async def test_last_sensor(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
state = hass.states.get("sensor.test_last")
@ -462,7 +462,7 @@ async def test_sensor_incorrect_state(
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR)).items():
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -491,7 +491,7 @@ async def test_sum_sensor(
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES)).items():
for entity_id, value in dict(zip(entity_ids, VALUES, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()
@ -521,7 +521,7 @@ async def test_sum_sensor_no_state(hass: HomeAssistant) -> None:
entity_ids = config["sensor"]["entity_ids"]
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR)).items():
for entity_id, value in dict(zip(entity_ids, VALUES_ERROR, strict=False)).items():
hass.states.async_set(entity_id, value)
await hass.async_block_till_done()

View File

@ -203,7 +203,7 @@ async def async_test_rejoin(hass, zigpy_device, clusters, report_counts, ep_id=1
zha_gateway = get_zha_gateway(hass)
await zha_gateway.async_device_initialized(zigpy_device)
await hass.async_block_till_done()
for cluster, reports in zip(clusters, report_counts):
for cluster, reports in zip(clusters, report_counts, strict=False):
assert cluster.bind.call_count == 1
assert cluster.bind.await_count == 1
if reports:

View File

@ -420,7 +420,7 @@ def _test_single_input_cluster_device_class(probe_mock):
(Platform.BINARY_SENSOR, ias_ch),
(Platform.SENSOR, analog_ch),
)
for call, details in zip(probe_mock.call_args_list, probes):
for call, details in zip(probe_mock.call_args_list, probes, strict=False):
platform, ch = details
assert call[0][0] == platform
assert call[0][1] == ch

View File

@ -803,7 +803,7 @@ async def test_saving_and_loading(
# Ensure same order
for orig, loaded in zip(
hass.config_entries.async_entries(), manager.async_entries()
hass.config_entries.async_entries(), manager.async_entries(), strict=False
):
assert orig.as_dict() == loaded.as_dict()