Add flake8 comprehensions checks to pre-commit & CI (#48111)
parent
fa5ce70af1
commit
8a56dbf587
|
@ -32,6 +32,7 @@ repos:
|
|||
# default yet due to https://github.com/plinss/flake8-noqa/issues/1
|
||||
# - flake8-noqa==1.1.0
|
||||
- pydocstyle==5.1.1
|
||||
- flake8-comprehensions==3.4.0
|
||||
files: ^(homeassistant|script|tests)/.+\.py$
|
||||
- repo: https://github.com/PyCQA/bandit
|
||||
rev: 1.7.0
|
||||
|
|
|
@ -230,7 +230,7 @@ class DenonDevice(MediaPlayerEntity):
|
|||
@property
|
||||
def source_list(self):
|
||||
"""Return the list of available input sources."""
|
||||
return sorted(list(self._source_list))
|
||||
return sorted(self._source_list)
|
||||
|
||||
@property
|
||||
def media_title(self):
|
||||
|
|
|
@ -391,11 +391,9 @@ class HueOneLightChangeView(HomeAssistantView):
|
|||
return self.json_message("Bad request", HTTP_BAD_REQUEST)
|
||||
if HUE_API_STATE_XY in request_json:
|
||||
try:
|
||||
parsed[STATE_XY] = tuple(
|
||||
(
|
||||
float(request_json[HUE_API_STATE_XY][0]),
|
||||
float(request_json[HUE_API_STATE_XY][1]),
|
||||
)
|
||||
parsed[STATE_XY] = (
|
||||
float(request_json[HUE_API_STATE_XY][0]),
|
||||
float(request_json[HUE_API_STATE_XY][1]),
|
||||
)
|
||||
except ValueError:
|
||||
_LOGGER.error("Unable to parse data (2): %s", request_json)
|
||||
|
|
|
@ -98,7 +98,7 @@ TRANSITION_GRADUAL = "gradual"
|
|||
TRANSITION_JUMP = "jump"
|
||||
TRANSITION_STROBE = "strobe"
|
||||
|
||||
FLUX_EFFECT_LIST = sorted(list(EFFECT_MAP)) + [EFFECT_RANDOM]
|
||||
FLUX_EFFECT_LIST = sorted(EFFECT_MAP) + [EFFECT_RANDOM]
|
||||
|
||||
CUSTOM_EFFECT_SCHEMA = vol.Schema(
|
||||
{
|
||||
|
|
|
@ -610,7 +610,7 @@ class LightEntity(ToggleEntity):
|
|||
data[ATTR_EFFECT_LIST] = self.effect_list
|
||||
|
||||
data[ATTR_SUPPORTED_COLOR_MODES] = sorted(
|
||||
list(self._light_internal_supported_color_modes)
|
||||
self._light_internal_supported_color_modes
|
||||
)
|
||||
|
||||
return data
|
||||
|
|
|
@ -163,7 +163,7 @@ class NAD(MediaPlayerEntity):
|
|||
@property
|
||||
def source_list(self):
|
||||
"""List of available input sources."""
|
||||
return sorted(list(self._reverse_mapping))
|
||||
return sorted(self._reverse_mapping)
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
|
|
|
@ -43,7 +43,7 @@ class Gateway:
|
|||
)
|
||||
entries = self.get_and_delete_all_sms(state_machine)
|
||||
_LOGGER.debug("SMS entries:%s", entries)
|
||||
data = list()
|
||||
data = []
|
||||
|
||||
for entry in entries:
|
||||
decoded_entry = gammu.DecodeSMS(entry)
|
||||
|
@ -78,7 +78,7 @@ class Gateway:
|
|||
start_remaining = remaining
|
||||
# Get all sms
|
||||
start = True
|
||||
entries = list()
|
||||
entries = []
|
||||
all_parts = -1
|
||||
all_parts_arrived = False
|
||||
_LOGGER.debug("Start remaining:%i", start_remaining)
|
||||
|
|
|
@ -32,9 +32,7 @@ CONF_DESTINATION = "destination"
|
|||
|
||||
_QUERY_SCHEME = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_MODE): vol.All(
|
||||
cv.ensure_list, [vol.In(list(["bus", "train"]))]
|
||||
),
|
||||
vol.Required(CONF_MODE): vol.All(cv.ensure_list, [vol.In(["bus", "train"])]),
|
||||
vol.Required(CONF_ORIGIN): cv.string,
|
||||
vol.Required(CONF_DESTINATION): cv.string,
|
||||
}
|
||||
|
|
|
@ -319,7 +319,7 @@ class UnifiOptionsFlowHandler(config_entries.OptionsFlow):
|
|||
if "name" in wlan
|
||||
}
|
||||
)
|
||||
ssid_filter = {ssid: ssid for ssid in sorted(list(ssids))}
|
||||
ssid_filter = {ssid: ssid for ssid in sorted(ssids)}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="device_tracker",
|
||||
|
|
|
@ -271,7 +271,7 @@ class LgWebOSMediaPlayerEntity(MediaPlayerEntity):
|
|||
@property
|
||||
def source_list(self):
|
||||
"""List of available input sources."""
|
||||
return sorted(list(self._source_list))
|
||||
return sorted(self._source_list)
|
||||
|
||||
@property
|
||||
def media_content_type(self):
|
||||
|
|
|
@ -227,7 +227,7 @@ def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> lis
|
|||
elif not hasattr(parameter, "__iter__"):
|
||||
res = [int(parameter)]
|
||||
else:
|
||||
res = list(sorted(int(x) for x in parameter))
|
||||
res = sorted(int(x) for x in parameter)
|
||||
|
||||
for val in res:
|
||||
if val < min_value or val > max_value:
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
bandit==1.7.0
|
||||
black==20.8b1
|
||||
codespell==2.0.0
|
||||
flake8-comprehensions==3.4.0
|
||||
flake8-docstrings==1.5.0
|
||||
flake8==3.8.4
|
||||
isort==5.7.0
|
||||
|
|
|
@ -766,7 +766,7 @@ class MockConfigEntry(config_entries.ConfigEntry):
|
|||
def patch_yaml_files(files_dict, endswith=True):
|
||||
"""Patch load_yaml with a dictionary of yaml files."""
|
||||
# match using endswith, start search with longest string
|
||||
matchlist = sorted(list(files_dict.keys()), key=len) if endswith else []
|
||||
matchlist = sorted(files_dict.keys(), key=len) if endswith else []
|
||||
|
||||
def mock_open_f(fname, **_):
|
||||
"""Mock open() in the yaml module, used by load_yaml."""
|
||||
|
|
|
@ -32,7 +32,7 @@ def test_json_encoder(hass):
|
|||
|
||||
# Test serializing a set()
|
||||
data = {"milk", "beer"}
|
||||
assert sorted(ha_json_enc.default(data)) == sorted(list(data))
|
||||
assert sorted(ha_json_enc.default(data)) == sorted(data)
|
||||
|
||||
# Test serializong object which implements as_dict
|
||||
assert ha_json_enc.default(state) == state.as_dict()
|
||||
|
|
|
@ -579,7 +579,7 @@ async def test_service_group_set_group_remove_group(hass):
|
|||
assert group_state.attributes[group.ATTR_AUTO]
|
||||
assert group_state.attributes["friendly_name"] == "Test2"
|
||||
assert group_state.attributes["icon"] == "mdi:camera"
|
||||
assert sorted(list(group_state.attributes["entity_id"])) == sorted(
|
||||
assert sorted(group_state.attributes["entity_id"]) == sorted(
|
||||
["test.entity_bla1", "test.entity_id2"]
|
||||
)
|
||||
|
||||
|
|
|
@ -861,7 +861,7 @@ async def test_entity_discovery(
|
|||
|
||||
assert values.primary is value_class.primary
|
||||
assert len(list(values)) == 3
|
||||
assert sorted(list(values), key=lambda a: id(a)) == sorted(
|
||||
assert sorted(values, key=lambda a: id(a)) == sorted(
|
||||
[value_class.primary, None, None], key=lambda a: id(a)
|
||||
)
|
||||
|
||||
|
@ -885,7 +885,7 @@ async def test_entity_discovery(
|
|||
|
||||
assert values.secondary is value_class.secondary
|
||||
assert len(list(values)) == 3
|
||||
assert sorted(list(values), key=lambda a: id(a)) == sorted(
|
||||
assert sorted(values, key=lambda a: id(a)) == sorted(
|
||||
[value_class.primary, value_class.secondary, None], key=lambda a: id(a)
|
||||
)
|
||||
|
||||
|
@ -902,7 +902,7 @@ async def test_entity_discovery(
|
|||
|
||||
assert values.optional is value_class.optional
|
||||
assert len(list(values)) == 3
|
||||
assert sorted(list(values), key=lambda a: id(a)) == sorted(
|
||||
assert sorted(values, key=lambda a: id(a)) == sorted(
|
||||
[value_class.primary, value_class.secondary, value_class.optional],
|
||||
key=lambda a: id(a),
|
||||
)
|
||||
|
@ -961,7 +961,7 @@ async def test_entity_existing_values(
|
|||
assert values.secondary is value_class.secondary
|
||||
assert values.optional is value_class.optional
|
||||
assert len(list(values)) == 3
|
||||
assert sorted(list(values), key=lambda a: id(a)) == sorted(
|
||||
assert sorted(values, key=lambda a: id(a)) == sorted(
|
||||
[value_class.primary, value_class.secondary, value_class.optional],
|
||||
key=lambda a: id(a),
|
||||
)
|
||||
|
|
|
@ -17,7 +17,7 @@ def test_json_encoder(hass):
|
|||
|
||||
# Test serializing a set()
|
||||
data = {"milk", "beer"}
|
||||
assert sorted(ha_json_enc.default(data)) == sorted(list(data))
|
||||
assert sorted(ha_json_enc.default(data)) == sorted(data)
|
||||
|
||||
# Test serializing an object which implements as_dict
|
||||
assert ha_json_enc.default(state) == state.as_dict()
|
||||
|
|
|
@ -177,14 +177,14 @@ def test_get_age():
|
|||
|
||||
def test_parse_time_expression():
|
||||
"""Test parse_time_expression."""
|
||||
assert [x for x in range(60)] == dt_util.parse_time_expression("*", 0, 59)
|
||||
assert [x for x in range(60)] == dt_util.parse_time_expression(None, 0, 59)
|
||||
assert list(range(60)) == dt_util.parse_time_expression("*", 0, 59)
|
||||
assert list(range(60)) == dt_util.parse_time_expression(None, 0, 59)
|
||||
|
||||
assert [x for x in range(0, 60, 5)] == dt_util.parse_time_expression("/5", 0, 59)
|
||||
assert list(range(0, 60, 5)) == dt_util.parse_time_expression("/5", 0, 59)
|
||||
|
||||
assert [1, 2, 3] == dt_util.parse_time_expression([2, 1, 3], 0, 59)
|
||||
|
||||
assert [x for x in range(24)] == dt_util.parse_time_expression("*", 0, 23)
|
||||
assert list(range(24)) == dt_util.parse_time_expression("*", 0, 23)
|
||||
|
||||
assert [42] == dt_util.parse_time_expression(42, 0, 59)
|
||||
assert [42] == dt_util.parse_time_expression("42", 0, 59)
|
||||
|
|
Loading…
Reference in New Issue