Update Ruff to v0.0.247 (#88210)
* Update Ruff to v0.0.247 * Fix new found violationspull/88215/head
parent
a1efd59760
commit
d3ed8de232
|
@ -1,6 +1,6 @@
|
|||
repos:
|
||||
- repo: https://github.com/charliermarsh/ruff-pre-commit
|
||||
rev: v0.0.241
|
||||
rev: v0.0.247
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
|
|
@ -61,7 +61,7 @@ def verify_connected(func: _WrapFuncType) -> _WrapFuncType:
|
|||
"""Define a wrapper throw BleakError if not connected."""
|
||||
|
||||
async def _async_wrap_bluetooth_connected_operation(
|
||||
self: "ESPHomeClient", *args: Any, **kwargs: Any
|
||||
self: ESPHomeClient, *args: Any, **kwargs: Any
|
||||
) -> Any:
|
||||
disconnected_event = (
|
||||
self._disconnected_event # pylint: disable=protected-access
|
||||
|
@ -94,7 +94,7 @@ def api_error_as_bleak_error(func: _WrapFuncType) -> _WrapFuncType:
|
|||
"""Define a wrapper throw esphome api errors as BleakErrors."""
|
||||
|
||||
async def _async_wrap_bluetooth_operation(
|
||||
self: "ESPHomeClient", *args: Any, **kwargs: Any
|
||||
self: ESPHomeClient, *args: Any, **kwargs: Any
|
||||
) -> Any:
|
||||
try:
|
||||
return await func(self, *args, **kwargs)
|
||||
|
|
|
@ -119,7 +119,7 @@ def _get_hue_event_from_device_id(hass, device_id):
|
|||
|
||||
|
||||
async def async_validate_trigger_config(
|
||||
bridge: "HueBridge", device_entry: DeviceEntry, config: ConfigType
|
||||
bridge: HueBridge, device_entry: DeviceEntry, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
config = TRIGGER_SCHEMA(config)
|
||||
|
@ -144,7 +144,7 @@ async def async_validate_trigger_config(
|
|||
|
||||
|
||||
async def async_attach_trigger(
|
||||
bridge: "HueBridge",
|
||||
bridge: HueBridge,
|
||||
device_entry: DeviceEntry,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
|
|
|
@ -62,7 +62,7 @@ DEVICE_SPECIFIC_EVENT_TYPES = {
|
|||
|
||||
|
||||
async def async_validate_trigger_config(
|
||||
bridge: "HueBridge",
|
||||
bridge: HueBridge,
|
||||
device_entry: DeviceEntry,
|
||||
config: ConfigType,
|
||||
) -> ConfigType:
|
||||
|
@ -71,7 +71,7 @@ async def async_validate_trigger_config(
|
|||
|
||||
|
||||
async def async_attach_trigger(
|
||||
bridge: "HueBridge",
|
||||
bridge: HueBridge,
|
||||
device_entry: DeviceEntry,
|
||||
config: ConfigType,
|
||||
action: TriggerActionType,
|
||||
|
|
|
@ -238,7 +238,7 @@ def _parse_custom_effects(effects_config) -> dict[str, dict[str, Any]]:
|
|||
def _async_cmd(func):
|
||||
"""Define a wrapper to catch exceptions from the bulb."""
|
||||
|
||||
async def _async_wrap(self: "YeelightGenericLight", *args, **kwargs):
|
||||
async def _async_wrap(self: YeelightGenericLight, *args, **kwargs):
|
||||
for attempts in range(2):
|
||||
try:
|
||||
_LOGGER.debug("Calling %s with %s %s", func, args, kwargs)
|
||||
|
|
|
@ -14,5 +14,5 @@ pycodestyle==2.10.0
|
|||
pydocstyle==6.2.3
|
||||
pyflakes==3.0.1
|
||||
pyupgrade==3.3.1
|
||||
ruff==0.0.241
|
||||
ruff==0.0.247
|
||||
yamllint==1.28.0
|
||||
|
|
|
@ -53,7 +53,7 @@ from .conftest import (
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"response,errors",
|
||||
("response", "errors"),
|
||||
[
|
||||
(AsyncMock(side_effect=AirVisualError), {"base": "unknown"}),
|
||||
(AsyncMock(side_effect=InvalidKeyError), {CONF_API_KEY: "invalid_api_key"}),
|
||||
|
|
|
@ -2202,7 +2202,7 @@ async def test_trigger_condition_explicit_id(hass: HomeAssistant, calls) -> None
|
|||
),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"script_mode,script_warning_msg",
|
||||
("script_mode", "script_warning_msg"),
|
||||
(
|
||||
(SCRIPT_MODE_PARALLEL, "script1: Maximum number of runs exceeded"),
|
||||
(SCRIPT_MODE_QUEUED, "script1: Disallowed recursion detected"),
|
||||
|
|
|
@ -138,10 +138,10 @@ async def test_cost_sensor_attributes(
|
|||
("initial_energy", "initial_cost"), [(0, "0.0"), (None, "unknown")]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"price_entity,fixed_price", [("sensor.energy_price", None), (None, 1)]
|
||||
("price_entity", "fixed_price"), [("sensor.energy_price", None), (None, 1)]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"usage_sensor_entity_id,cost_sensor_entity_id,flow_type",
|
||||
("usage_sensor_entity_id", "cost_sensor_entity_id", "flow_type"),
|
||||
[
|
||||
("sensor.energy_consumption", "sensor.energy_consumption_cost", "flow_from"),
|
||||
(
|
||||
|
@ -342,10 +342,10 @@ async def test_cost_sensor_price_entity_total_increasing(
|
|||
("initial_energy", "initial_cost"), [(0, "0.0"), (None, "unknown")]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"price_entity,fixed_price", [("sensor.energy_price", None), (None, 1)]
|
||||
("price_entity", "fixed_price"), [("sensor.energy_price", None), (None, 1)]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"usage_sensor_entity_id,cost_sensor_entity_id,flow_type",
|
||||
("usage_sensor_entity_id", "cost_sensor_entity_id", "flow_type"),
|
||||
[
|
||||
("sensor.energy_consumption", "sensor.energy_consumption_cost", "flow_from"),
|
||||
(
|
||||
|
@ -550,10 +550,10 @@ async def test_cost_sensor_price_entity_total(
|
|||
("initial_energy", "initial_cost"), [(0, "0.0"), (None, "unknown")]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"price_entity,fixed_price", [("sensor.energy_price", None), (None, 1)]
|
||||
("price_entity", "fixed_price"), [("sensor.energy_price", None), (None, 1)]
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"usage_sensor_entity_id,cost_sensor_entity_id,flow_type",
|
||||
("usage_sensor_entity_id", "cost_sensor_entity_id", "flow_type"),
|
||||
[
|
||||
("sensor.energy_consumption", "sensor.energy_consumption_cost", "flow_from"),
|
||||
(
|
||||
|
|
|
@ -116,7 +116,7 @@ async def test_config_flow(
|
|||
("hide_members", "hidden_by"), ((False, None), (True, "integration"))
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"group_type,extra_input",
|
||||
("group_type", "extra_input"),
|
||||
(
|
||||
("binary_sensor", {"all": False}),
|
||||
("cover", {}),
|
||||
|
@ -371,7 +371,7 @@ async def test_all_options(
|
|||
),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"group_type,extra_input",
|
||||
("group_type", "extra_input"),
|
||||
(
|
||||
("binary_sensor", {"all": False}),
|
||||
("cover", {}),
|
||||
|
|
|
@ -1508,7 +1508,7 @@ async def test_setup_and_remove_config_entry(
|
|||
),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"group_type,extra_options",
|
||||
("group_type", "extra_options"),
|
||||
(
|
||||
("binary_sensor", {"all": False}),
|
||||
("cover", {}),
|
||||
|
|
|
@ -291,7 +291,7 @@ async def test_service_update_devices(hass: HomeAssistant, create_hdmi_network)
|
|||
),
|
||||
],
|
||||
)
|
||||
@pytest.mark.parametrize("direction,key", [("up", 65), ("down", 66)])
|
||||
@pytest.mark.parametrize(("direction", "key"), [("up", 65), ("down", 66)])
|
||||
async def test_service_volume_x_times(
|
||||
hass: HomeAssistant, create_hdmi_network, count, calls, direction, key
|
||||
) -> None:
|
||||
|
|
|
@ -481,7 +481,7 @@ SHABBAT_TEST_IDS = [
|
|||
|
||||
@pytest.mark.parametrize("language", ["english", "hebrew"])
|
||||
@pytest.mark.parametrize(
|
||||
[
|
||||
(
|
||||
"now",
|
||||
"candle_lighting",
|
||||
"havdalah",
|
||||
|
@ -490,7 +490,7 @@ SHABBAT_TEST_IDS = [
|
|||
"latitude",
|
||||
"longitude",
|
||||
"result",
|
||||
],
|
||||
),
|
||||
SHABBAT_PARAMS,
|
||||
ids=SHABBAT_TEST_IDS,
|
||||
)
|
||||
|
|
|
@ -117,7 +117,7 @@ async def test_config_binary_sensor(hass: HomeAssistant, mock_modbus) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,expected",
|
||||
("register_words", "do_exception", "expected"),
|
||||
[
|
||||
(
|
||||
[True] * 8,
|
||||
|
@ -188,7 +188,7 @@ async def test_all_binary_sensor(hass: HomeAssistant, expected, mock_do_cycle) -
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,start_expect,end_expect",
|
||||
("register_words", "do_exception", "start_expect", "end_expect"),
|
||||
[
|
||||
(
|
||||
[False * 16],
|
||||
|
@ -347,7 +347,7 @@ async def test_config_slave_binary_sensor(hass: HomeAssistant, mock_modbus) -> N
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words,expected, slaves",
|
||||
("config_addon", "register_words", "expected", "slaves"),
|
||||
[
|
||||
(
|
||||
{CONF_SLAVE_COUNT: 1, CONF_UNIQUE_ID: SLAVE_UNIQUE_ID},
|
||||
|
|
|
@ -185,7 +185,7 @@ async def test_config_hvac_onoff_register(hass: HomeAssistant, mock_modbus) -> N
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,expected",
|
||||
("register_words", "expected"),
|
||||
[
|
||||
(
|
||||
[0x00, 0x00],
|
||||
|
@ -485,7 +485,7 @@ async def test_restore_state_climate(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,start_expect,end_expect",
|
||||
("register_words", "do_exception", "start_expect", "end_expect"),
|
||||
[
|
||||
(
|
||||
[0x8000],
|
||||
|
@ -525,7 +525,7 @@ async def test_lazy_error_climate(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words",
|
||||
("config_addon", "register_words"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
|
|
@ -84,7 +84,7 @@ async def test_config_cover(hass: HomeAssistant, mock_modbus) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,expected",
|
||||
("register_words", "expected"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
@ -131,7 +131,7 @@ async def test_coil_cover(hass: HomeAssistant, expected, mock_do_cycle) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception, start_expect,end_expect",
|
||||
("register_words", "do_exception", "start_expect", "end_expect"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
@ -168,7 +168,7 @@ async def test_lazy_error_cover(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,expected",
|
||||
("register_words", "expected"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
|
|
@ -154,7 +154,7 @@ async def test_config_fan(hass: HomeAssistant, mock_modbus) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,config_addon,expected",
|
||||
("register_words", "do_exception", "config_addon", "expected"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
|
|
@ -603,7 +603,7 @@ async def mock_modbus_read_pymodbus_fixture(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"do_return,do_exception,do_expect_state,do_expect_value",
|
||||
("do_return", "do_exception", "do_expect_state", "do_expect_value"),
|
||||
[
|
||||
[ReadResult([1]), None, STATE_ON, "1"],
|
||||
[IllegalFunctionRequest(0x99), None, STATE_UNAVAILABLE, STATE_UNAVAILABLE],
|
||||
|
|
|
@ -154,7 +154,7 @@ async def test_config_light(hass: HomeAssistant, mock_modbus) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,config_addon,expected",
|
||||
("register_words", "do_exception", "config_addon", "expected"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
|
|
@ -153,7 +153,7 @@ async def test_config_sensor(hass: HomeAssistant, mock_modbus) -> None:
|
|||
|
||||
@pytest.mark.parametrize("check_config_loaded", [False])
|
||||
@pytest.mark.parametrize(
|
||||
"do_config,error_message",
|
||||
("do_config", "error_message"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -270,7 +270,7 @@ async def test_config_wrong_struct_sensor(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words,do_exception,expected",
|
||||
("config_addon", "register_words", "do_exception", "expected"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -612,7 +612,7 @@ async def test_all_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words,do_exception,expected",
|
||||
("config_addon", "register_words", "do_exception", "expected"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -703,7 +703,7 @@ async def test_slave_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> Non
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words",
|
||||
("config_addon", "register_words"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -740,7 +740,7 @@ async def test_wrong_unpack(hass: HomeAssistant, mock_do_cycle) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,start_expect,end_expect",
|
||||
("register_words", "do_exception", "start_expect", "end_expect"),
|
||||
[
|
||||
(
|
||||
[0x8000],
|
||||
|
@ -779,7 +779,7 @@ async def test_lazy_error_sensor(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words,expected",
|
||||
("config_addon", "register_words", "expected"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -833,7 +833,7 @@ async def test_struct_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> No
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"config_addon,register_words,expected",
|
||||
("config_addon", "register_words", "expected"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
|
|
@ -168,7 +168,7 @@ async def test_config_switch(hass: HomeAssistant, mock_modbus) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,config_addon,expected",
|
||||
("register_words", "do_exception", "config_addon", "expected"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
@ -226,7 +226,7 @@ async def test_all_switch(hass: HomeAssistant, mock_do_cycle, expected) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"register_words,do_exception,start_expect,end_expect",
|
||||
("register_words", "do_exception", "start_expect", "end_expect"),
|
||||
[
|
||||
(
|
||||
[0x00],
|
||||
|
|
|
@ -311,7 +311,7 @@ async def test_reauth(hass: HomeAssistant) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"nanoleaf_conf_file, remove_config",
|
||||
("nanoleaf_conf_file", "remove_config"),
|
||||
[
|
||||
({TEST_DEVICE_ID: {"token": TEST_TOKEN}}, True),
|
||||
({TEST_HOST: {"token": TEST_TOKEN}}, True),
|
||||
|
|
|
@ -25,7 +25,7 @@ TEST_LONGITUDE = -0.2416796
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"get_nearby_sensors_mock,get_nearby_sensors_errors",
|
||||
("get_nearby_sensors_mock", "get_nearby_sensors_errors"),
|
||||
[
|
||||
(AsyncMock(return_value=[]), {"base": "no_sensors_near_coordinates"}),
|
||||
(AsyncMock(side_effect=Exception), {"base": "unknown"}),
|
||||
|
|
|
@ -491,7 +491,7 @@ def test_statistics_duplicated(hass_recorder, caplog):
|
|||
|
||||
@pytest.mark.parametrize("last_reset_str", ("2022-01-01T00:00:00+02:00", None))
|
||||
@pytest.mark.parametrize(
|
||||
"source, statistic_id, import_fn",
|
||||
("source", "statistic_id", "import_fn"),
|
||||
(
|
||||
("test", "test:total_energy_import", async_add_external_statistics),
|
||||
("recorder", "sensor.total_energy_import", async_import_statistics),
|
||||
|
@ -1546,10 +1546,10 @@ async def test_validate_db_schema_fix_utf8_issue(
|
|||
@pytest.mark.parametrize("enable_statistics_table_validation", [True])
|
||||
@pytest.mark.parametrize("db_engine", ("mysql", "postgresql"))
|
||||
@pytest.mark.parametrize(
|
||||
"table, replace_index", (("statistics", 0), ("statistics_short_term", 1))
|
||||
("table", "replace_index"), (("statistics", 0), ("statistics_short_term", 1))
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"column, value",
|
||||
("column", "value"),
|
||||
(("max", 1.0), ("mean", 1.0), ("min", 1.0), ("state", 1.0), ("sum", 1.0)),
|
||||
)
|
||||
async def test_validate_db_schema_fix_float_issue(
|
||||
|
@ -1616,7 +1616,7 @@ async def test_validate_db_schema_fix_float_issue(
|
|||
|
||||
@pytest.mark.parametrize("enable_statistics_table_validation", [True])
|
||||
@pytest.mark.parametrize(
|
||||
"db_engine, modification",
|
||||
("db_engine", "modification"),
|
||||
(
|
||||
("mysql", ["last_reset DATETIME(6)", "start DATETIME(6)"]),
|
||||
(
|
||||
|
@ -1629,10 +1629,10 @@ async def test_validate_db_schema_fix_float_issue(
|
|||
),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"table, replace_index", (("statistics", 0), ("statistics_short_term", 1))
|
||||
("table", "replace_index"), (("statistics", 0), ("statistics_short_term", 1))
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"column, value",
|
||||
("column", "value"),
|
||||
(
|
||||
("last_reset", "2020-10-06T00:00:00+00:00"),
|
||||
("start", "2020-10-06T00:00:00+00:00"),
|
||||
|
|
|
@ -1641,7 +1641,7 @@ async def test_device_classes_with_invalid_unit_of_measurement(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"native_value,expected",
|
||||
("native_value", "expected"),
|
||||
[
|
||||
("abc", "abc"),
|
||||
("13.7.1", "13.7.1"),
|
||||
|
@ -1687,7 +1687,7 @@ async def test_non_numeric_validation_warn(
|
|||
("device_class", "state_class", "unit", "precision"), ((None, None, None, 1),)
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"native_value,expected",
|
||||
("native_value", "expected"),
|
||||
[
|
||||
("abc", "abc"),
|
||||
("13.7.1", "13.7.1"),
|
||||
|
@ -1737,7 +1737,7 @@ async def test_non_numeric_validation_raise(
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"native_value,expected",
|
||||
("native_value", "expected"),
|
||||
[
|
||||
(13, "13"),
|
||||
(17.50, "17.5"),
|
||||
|
|
|
@ -429,7 +429,15 @@ def test_compile_hourly_statistics_wrong_unit(
|
|||
|
||||
@pytest.mark.parametrize("state_class", ["total"])
|
||||
@pytest.mark.parametrize(
|
||||
"units, device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
|
||||
(
|
||||
"units",
|
||||
"device_class",
|
||||
"state_unit",
|
||||
"display_unit",
|
||||
"statistics_unit",
|
||||
"unit_class",
|
||||
"factor",
|
||||
),
|
||||
[
|
||||
(US_CUSTOMARY_SYSTEM, "distance", "m", "m", "m", "distance", 1),
|
||||
(US_CUSTOMARY_SYSTEM, "distance", "mi", "mi", "mi", "distance", 1),
|
||||
|
@ -622,7 +630,14 @@ async def test_compile_hourly_sum_statistics_amount(
|
|||
|
||||
@pytest.mark.parametrize("state_class", ["total"])
|
||||
@pytest.mark.parametrize(
|
||||
"device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
|
||||
(
|
||||
"device_class",
|
||||
"state_unit",
|
||||
"display_unit",
|
||||
"statistics_unit",
|
||||
"unit_class",
|
||||
"factor",
|
||||
),
|
||||
[
|
||||
("energy", "kWh", "kWh", "kWh", "energy", 1),
|
||||
("energy", "Wh", "Wh", "Wh", "energy", 1),
|
||||
|
@ -741,7 +756,14 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
|
|||
|
||||
@pytest.mark.parametrize("state_class", ["total"])
|
||||
@pytest.mark.parametrize(
|
||||
"device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
|
||||
(
|
||||
"device_class",
|
||||
"state_unit",
|
||||
"display_unit",
|
||||
"statistics_unit",
|
||||
"unit_class",
|
||||
"factor",
|
||||
),
|
||||
[
|
||||
("energy", "kWh", "kWh", "kWh", "energy", 1),
|
||||
],
|
||||
|
@ -831,7 +853,14 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
|
|||
|
||||
@pytest.mark.parametrize("state_class", ["total"])
|
||||
@pytest.mark.parametrize(
|
||||
"device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
|
||||
(
|
||||
"device_class",
|
||||
"state_unit",
|
||||
"display_unit",
|
||||
"statistics_unit",
|
||||
"unit_class",
|
||||
"factor",
|
||||
),
|
||||
[
|
||||
("energy", "kWh", "kWh", "kWh", "energy", 1),
|
||||
],
|
||||
|
|
|
@ -775,7 +775,7 @@ async def test_indexed_sensor_attributes(hass, mqtt_mock, setup_tasmota):
|
|||
|
||||
@pytest.mark.parametrize("status_sensor_disabled", [False])
|
||||
@pytest.mark.parametrize(
|
||||
"sensor_name, disabled, disabled_by",
|
||||
("sensor_name", "disabled", "disabled_by"),
|
||||
[
|
||||
("tasmota_firmware_version", True, er.RegistryEntryDisabler.INTEGRATION),
|
||||
("tasmota_ip", True, er.RegistryEntryDisabler.INTEGRATION),
|
||||
|
|
|
@ -161,7 +161,7 @@ async def test_optimistic_states(hass, start_ha):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(0, "alarm_control_panel")])
|
||||
@pytest.mark.parametrize(
|
||||
"config,msg",
|
||||
("config", "msg"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -317,7 +317,7 @@ async def test_unique_id(hass, start_ha):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, "alarm_control_panel")])
|
||||
@pytest.mark.parametrize(
|
||||
"config,code_format,code_arm_required",
|
||||
("config", "code_format", "code_arm_required"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
|
|
@ -34,7 +34,7 @@ OFF = "off"
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id,name,attributes",
|
||||
("config", "domain", "entity_id", "name", "attributes"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -78,7 +78,7 @@ async def test_setup_minimal(hass, start_ha, entity_id, name, attributes):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -122,7 +122,7 @@ async def test_setup(hass, start_ha, entity_id):
|
|||
|
||||
@pytest.mark.parametrize("count", [0])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain",
|
||||
("config", "domain"),
|
||||
[
|
||||
# No legacy binary sensors
|
||||
(
|
||||
|
@ -185,7 +185,7 @@ async def test_setup_invalid_sensors(hass, count, start_ha):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -237,7 +237,7 @@ async def test_icon_template(hass, start_ha, entity_id):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -289,7 +289,7 @@ async def test_entity_picture_template(hass, start_ha, entity_id):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -548,7 +548,7 @@ async def test_template_delay_on_off(hass, start_ha):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -593,7 +593,7 @@ async def test_available_without_availability_template(hass, start_ha, entity_id
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -860,7 +860,7 @@ async def test_template_validation_error(hass, caplog, start_ha):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"config,domain,entity_id",
|
||||
("config", "domain", "entity_id"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -934,7 +934,7 @@ async def test_availability_icon_picture(hass, start_ha, entity_id):
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"extra_config, restored_state, initial_state",
|
||||
("extra_config", "restored_state", "initial_state"),
|
||||
[
|
||||
({}, ON, OFF),
|
||||
({}, OFF, OFF),
|
||||
|
@ -1156,7 +1156,7 @@ async def test_template_with_trigger_templated_delay_on(hass, start_ha):
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"restored_state, initial_state, initial_attributes",
|
||||
("restored_state", "initial_state", "initial_attributes"),
|
||||
[
|
||||
(ON, ON, ["entity_picture", "icon", "plus_one"]),
|
||||
(OFF, OFF, ["entity_picture", "icon", "plus_one"]),
|
||||
|
|
|
@ -48,7 +48,7 @@ OPEN_CLOSE_COVER_CONFIG = {
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"config, states",
|
||||
("config", "states"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -509,7 +509,7 @@ async def test_set_position(hass, start_ha, calls):
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"service,attr,tilt_position",
|
||||
("service", "attr", "tilt_position"),
|
||||
[
|
||||
(
|
||||
SERVICE_SET_COVER_TILT_POSITION,
|
||||
|
|
|
@ -186,7 +186,7 @@ async def test_templates_with_entities(hass, start_ha):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"config,entity,tests",
|
||||
("config", "entity", "tests"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -281,7 +281,7 @@ async def test_availability_template_with_entities(hass, start_ha):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"config, states",
|
||||
("config", "states"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
|
|
@ -110,7 +110,7 @@ async def setup_light(hass, count, light_config):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"supported_features,supported_color_modes",
|
||||
("supported_features", "supported_color_modes"),
|
||||
[(0, [ColorMode.BRIGHTNESS])],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -170,7 +170,7 @@ async def test_template_state_text(hass, setup_light):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"value_template,expected_state,expected_color_mode",
|
||||
("value_template", "expected_state", "expected_color_mode"),
|
||||
[
|
||||
(
|
||||
"{{ 1 == 1 }}",
|
||||
|
@ -591,7 +591,7 @@ async def test_level_action_no_template(
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_level,level_template,expected_color_mode",
|
||||
("expected_level", "level_template", "expected_color_mode"),
|
||||
[
|
||||
(255, "{{255}}", ColorMode.BRIGHTNESS),
|
||||
(None, "{{256}}", ColorMode.BRIGHTNESS),
|
||||
|
@ -631,7 +631,7 @@ async def test_level_template(
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_temp,temperature_template,expected_color_mode",
|
||||
("expected_temp", "temperature_template", "expected_color_mode"),
|
||||
[
|
||||
(500, "{{500}}", ColorMode.COLOR_TEMP),
|
||||
(None, "{{501}}", ColorMode.COLOR_TEMP),
|
||||
|
@ -831,7 +831,7 @@ async def test_color_action_no_template(
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_hs,color_template,expected_color_mode",
|
||||
("expected_hs", "color_template", "expected_color_mode"),
|
||||
[
|
||||
((360, 100), "{{(360, 100)}}", ColorMode.HS),
|
||||
((359.9, 99.9), "{{(359.9, 99.9)}}", ColorMode.HS),
|
||||
|
@ -1076,7 +1076,7 @@ async def test_effect_action_invalid_effect(hass, setup_light, calls):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_effect_list,effect_list_template",
|
||||
("expected_effect_list", "effect_list_template"),
|
||||
[
|
||||
(
|
||||
["Strobe color", "Police", "Christmas", "RGB", "Random Loop"],
|
||||
|
@ -1121,7 +1121,7 @@ async def test_effect_list_template(
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_effect,effect_template",
|
||||
("expected_effect", "effect_template"),
|
||||
[
|
||||
(None, "Disco"),
|
||||
(None, "None"),
|
||||
|
@ -1157,7 +1157,7 @@ async def test_effect_template(hass, expected_effect, count, effect_template):
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_min_mireds,min_mireds_template",
|
||||
("expected_min_mireds", "min_mireds_template"),
|
||||
[
|
||||
(118, "{{118}}"),
|
||||
(153, "{{x - 12}}"),
|
||||
|
@ -1194,7 +1194,7 @@ async def test_min_mireds_template(
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_max_mireds,max_mireds_template",
|
||||
("expected_max_mireds", "max_mireds_template"),
|
||||
[
|
||||
(488, "{{488}}"),
|
||||
(500, "{{x - 12}}"),
|
||||
|
@ -1231,7 +1231,7 @@ async def test_max_mireds_template(
|
|||
|
||||
@pytest.mark.parametrize("count", [1])
|
||||
@pytest.mark.parametrize(
|
||||
"expected_supports_transition,supports_transition_template",
|
||||
("expected_supports_transition", "supports_transition_template"),
|
||||
[
|
||||
(True, "{{true}}"),
|
||||
(True, "{{1 == 1}}"),
|
||||
|
|
|
@ -120,7 +120,7 @@ async def test_entity_picture_template(hass, start_ha):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, sensor.DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"attribute,config,expected",
|
||||
("attribute", "config", "expected"),
|
||||
[
|
||||
(
|
||||
"friendly_name",
|
||||
|
@ -1449,7 +1449,7 @@ async def test_entity_device_class_errors_works(hass: HomeAssistant) -> None:
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"restored_state, restored_native_value, initial_state, initial_attributes",
|
||||
("restored_state", "restored_native_value", "initial_state", "initial_attributes"),
|
||||
[
|
||||
# the native value should be used, not the state
|
||||
("dog", 10, "10", ["entity_picture", "icon", "plus_one"]),
|
||||
|
|
|
@ -64,7 +64,7 @@ async def test_if_fires_on_change_bool(hass, start_ha, calls):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, automation.DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"config, call_setup",
|
||||
("config", "call_setup"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
@ -275,7 +275,7 @@ async def test_general(hass, call_setup, start_ha, calls):
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, automation.DOMAIN)])
|
||||
@pytest.mark.parametrize(
|
||||
"config, call_setup",
|
||||
("config", "call_setup"),
|
||||
[
|
||||
(
|
||||
{
|
||||
|
|
|
@ -27,7 +27,7 @@ _BATTERY_LEVEL_INPUT_NUMBER = "input_number.battery_level"
|
|||
|
||||
@pytest.mark.parametrize(("count", "domain"), [(1, "vacuum")])
|
||||
@pytest.mark.parametrize(
|
||||
"parm1,parm2,config",
|
||||
("parm1", "parm2", "config"),
|
||||
[
|
||||
(
|
||||
STATE_UNKNOWN,
|
||||
|
|
|
@ -98,7 +98,7 @@ async def test_abort_already_configured(hass: HomeAssistant, source: str) -> Non
|
|||
|
||||
@pytest.mark.parametrize("source", [config_entries.SOURCE_USER])
|
||||
@pytest.mark.parametrize(
|
||||
"error, connect_side_effect, login_side_effect",
|
||||
("error", "connect_side_effect", "login_side_effect"),
|
||||
[
|
||||
("invalid_auth", None, AuthError),
|
||||
("cannot_connect", ConnectError, None),
|
||||
|
|
|
@ -137,7 +137,7 @@ async def create_entity(hass: HomeAssistant, **kwargs):
|
|||
"native_unit", (UnitOfTemperature.FAHRENHEIT, UnitOfTemperature.CELSIUS)
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfTemperature.CELSIUS, METRIC_SYSTEM),
|
||||
(UnitOfTemperature.FAHRENHEIT, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -173,7 +173,7 @@ async def test_temperature(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfTemperature.CELSIUS, METRIC_SYSTEM),
|
||||
(UnitOfTemperature.FAHRENHEIT, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -209,7 +209,7 @@ async def test_temperature_no_unit(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (UnitOfPressure.INHG, UnitOfPressure.INHG))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
((UnitOfPressure.HPA, METRIC_SYSTEM), (UnitOfPressure.INHG, US_CUSTOMARY_SYSTEM)),
|
||||
)
|
||||
async def test_pressure(
|
||||
|
@ -239,7 +239,7 @@ async def test_pressure(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
((UnitOfPressure.HPA, METRIC_SYSTEM), (UnitOfPressure.INHG, US_CUSTOMARY_SYSTEM)),
|
||||
)
|
||||
async def test_pressure_no_unit(
|
||||
|
@ -276,7 +276,7 @@ async def test_pressure_no_unit(
|
|||
),
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfSpeed.KILOMETERS_PER_HOUR, METRIC_SYSTEM),
|
||||
(UnitOfSpeed.MILES_PER_HOUR, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -312,7 +312,7 @@ async def test_wind_speed(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfSpeed.KILOMETERS_PER_HOUR, METRIC_SYSTEM),
|
||||
(UnitOfSpeed.MILES_PER_HOUR, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -348,7 +348,7 @@ async def test_wind_speed_no_unit(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (UnitOfLength.MILES, UnitOfLength.KILOMETERS))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfLength.KILOMETERS, METRIC_SYSTEM),
|
||||
(UnitOfLength.MILES, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -379,7 +379,7 @@ async def test_visibility(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfLength.KILOMETERS, METRIC_SYSTEM),
|
||||
(UnitOfLength.MILES, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -410,7 +410,7 @@ async def test_visibility_no_unit(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (UnitOfLength.INCHES, UnitOfLength.MILLIMETERS))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfLength.MILLIMETERS, METRIC_SYSTEM),
|
||||
(UnitOfLength.INCHES, US_CUSTOMARY_SYSTEM),
|
||||
|
@ -443,7 +443,7 @@ async def test_precipitation(
|
|||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
@pytest.mark.parametrize(
|
||||
"state_unit, unit_system",
|
||||
("state_unit", "unit_system"),
|
||||
(
|
||||
(UnitOfLength.MILLIMETERS, METRIC_SYSTEM),
|
||||
(UnitOfLength.INCHES, US_CUSTOMARY_SYSTEM),
|
||||
|
|
|
@ -351,7 +351,7 @@ async def test_xiaomi_vacuum_services(hass, mock_mirobo_is_got_error):
|
|||
[(None, STATUS_CALLS), (DeviceException("dummy exception"), [])],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"service, service_data, device_method, device_method_call",
|
||||
("service", "service_data", "device_method", "device_method_call"),
|
||||
[
|
||||
(
|
||||
SERVICE_START_REMOTE_CONTROL,
|
||||
|
|
|
@ -240,7 +240,7 @@ async def slow_server_version(*args):
|
|||
],
|
||||
)
|
||||
@pytest.mark.parametrize(
|
||||
"url, server_version_side_effect, server_version_timeout, error",
|
||||
("url", "server_version_side_effect", "server_version_timeout", "error"),
|
||||
[
|
||||
(
|
||||
"not-ws-url",
|
||||
|
|
|
@ -3829,7 +3829,7 @@ async def test_script_mode_single(hass, caplog):
|
|||
|
||||
@pytest.mark.parametrize("max_exceeded", [None, "WARNING", "INFO", "ERROR", "SILENT"])
|
||||
@pytest.mark.parametrize(
|
||||
"script_mode,max_runs", [("single", 1), ("parallel", 2), ("queued", 2)]
|
||||
("script_mode", "max_runs"), [("single", 1), ("parallel", 2), ("queued", 2)]
|
||||
)
|
||||
async def test_max_exceeded(hass, caplog, max_exceeded, script_mode, max_runs):
|
||||
"""Test max_exceeded option."""
|
||||
|
|
Loading…
Reference in New Issue