Add flake8 comprehensions checks to pre-commit & CI (#48111)

This commit is contained in:
Franck Nijhof 2021-03-19 13:41:09 +01:00 committed by GitHub
parent fa5ce70af1
commit 8a56dbf587
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 27 additions and 29 deletions

View file

@ -32,6 +32,7 @@ repos:
# default yet due to https://github.com/plinss/flake8-noqa/issues/1 # default yet due to https://github.com/plinss/flake8-noqa/issues/1
# - flake8-noqa==1.1.0 # - flake8-noqa==1.1.0
- pydocstyle==5.1.1 - pydocstyle==5.1.1
- flake8-comprehensions==3.4.0
files: ^(homeassistant|script|tests)/.+\.py$ files: ^(homeassistant|script|tests)/.+\.py$
- repo: https://github.com/PyCQA/bandit - repo: https://github.com/PyCQA/bandit
rev: 1.7.0 rev: 1.7.0

View file

@ -230,7 +230,7 @@ class DenonDevice(MediaPlayerEntity):
@property @property
def source_list(self): def source_list(self):
"""Return the list of available input sources.""" """Return the list of available input sources."""
return sorted(list(self._source_list)) return sorted(self._source_list)
@property @property
def media_title(self): def media_title(self):

View file

@ -391,11 +391,9 @@ class HueOneLightChangeView(HomeAssistantView):
return self.json_message("Bad request", HTTP_BAD_REQUEST) return self.json_message("Bad request", HTTP_BAD_REQUEST)
if HUE_API_STATE_XY in request_json: if HUE_API_STATE_XY in request_json:
try: try:
parsed[STATE_XY] = tuple( parsed[STATE_XY] = (
( float(request_json[HUE_API_STATE_XY][0]),
float(request_json[HUE_API_STATE_XY][0]), float(request_json[HUE_API_STATE_XY][1]),
float(request_json[HUE_API_STATE_XY][1]),
)
) )
except ValueError: except ValueError:
_LOGGER.error("Unable to parse data (2): %s", request_json) _LOGGER.error("Unable to parse data (2): %s", request_json)

View file

@ -98,7 +98,7 @@ TRANSITION_GRADUAL = "gradual"
TRANSITION_JUMP = "jump" TRANSITION_JUMP = "jump"
TRANSITION_STROBE = "strobe" TRANSITION_STROBE = "strobe"
FLUX_EFFECT_LIST = sorted(list(EFFECT_MAP)) + [EFFECT_RANDOM] FLUX_EFFECT_LIST = sorted(EFFECT_MAP) + [EFFECT_RANDOM]
CUSTOM_EFFECT_SCHEMA = vol.Schema( CUSTOM_EFFECT_SCHEMA = vol.Schema(
{ {

View file

@ -610,7 +610,7 @@ class LightEntity(ToggleEntity):
data[ATTR_EFFECT_LIST] = self.effect_list data[ATTR_EFFECT_LIST] = self.effect_list
data[ATTR_SUPPORTED_COLOR_MODES] = sorted( data[ATTR_SUPPORTED_COLOR_MODES] = sorted(
list(self._light_internal_supported_color_modes) self._light_internal_supported_color_modes
) )
return data return data

View file

@ -163,7 +163,7 @@ class NAD(MediaPlayerEntity):
@property @property
def source_list(self): def source_list(self):
"""List of available input sources.""" """List of available input sources."""
return sorted(list(self._reverse_mapping)) return sorted(self._reverse_mapping)
@property @property
def available(self): def available(self):

View file

@ -43,7 +43,7 @@ class Gateway:
) )
entries = self.get_and_delete_all_sms(state_machine) entries = self.get_and_delete_all_sms(state_machine)
_LOGGER.debug("SMS entries:%s", entries) _LOGGER.debug("SMS entries:%s", entries)
data = list() data = []
for entry in entries: for entry in entries:
decoded_entry = gammu.DecodeSMS(entry) decoded_entry = gammu.DecodeSMS(entry)
@ -78,7 +78,7 @@ class Gateway:
start_remaining = remaining start_remaining = remaining
# Get all sms # Get all sms
start = True start = True
entries = list() entries = []
all_parts = -1 all_parts = -1
all_parts_arrived = False all_parts_arrived = False
_LOGGER.debug("Start remaining:%i", start_remaining) _LOGGER.debug("Start remaining:%i", start_remaining)

View file

@ -32,9 +32,7 @@ CONF_DESTINATION = "destination"
_QUERY_SCHEME = vol.Schema( _QUERY_SCHEME = vol.Schema(
{ {
vol.Required(CONF_MODE): vol.All( vol.Required(CONF_MODE): vol.All(cv.ensure_list, [vol.In(["bus", "train"])]),
cv.ensure_list, [vol.In(list(["bus", "train"]))]
),
vol.Required(CONF_ORIGIN): cv.string, vol.Required(CONF_ORIGIN): cv.string,
vol.Required(CONF_DESTINATION): cv.string, vol.Required(CONF_DESTINATION): cv.string,
} }

View file

@ -319,7 +319,7 @@ class UnifiOptionsFlowHandler(config_entries.OptionsFlow):
if "name" in wlan if "name" in wlan
} }
) )
ssid_filter = {ssid: ssid for ssid in sorted(list(ssids))} ssid_filter = {ssid: ssid for ssid in sorted(ssids)}
return self.async_show_form( return self.async_show_form(
step_id="device_tracker", step_id="device_tracker",

View file

@ -271,7 +271,7 @@ class LgWebOSMediaPlayerEntity(MediaPlayerEntity):
@property @property
def source_list(self): def source_list(self):
"""List of available input sources.""" """List of available input sources."""
return sorted(list(self._source_list)) return sorted(self._source_list)
@property @property
def media_content_type(self): def media_content_type(self):

View file

@ -227,7 +227,7 @@ def parse_time_expression(parameter: Any, min_value: int, max_value: int) -> lis
elif not hasattr(parameter, "__iter__"): elif not hasattr(parameter, "__iter__"):
res = [int(parameter)] res = [int(parameter)]
else: else:
res = list(sorted(int(x) for x in parameter)) res = sorted(int(x) for x in parameter)
for val in res: for val in res:
if val < min_value or val > max_value: if val < min_value or val > max_value:

View file

@ -3,6 +3,7 @@
bandit==1.7.0 bandit==1.7.0
black==20.8b1 black==20.8b1
codespell==2.0.0 codespell==2.0.0
flake8-comprehensions==3.4.0
flake8-docstrings==1.5.0 flake8-docstrings==1.5.0
flake8==3.8.4 flake8==3.8.4
isort==5.7.0 isort==5.7.0

View file

@ -766,7 +766,7 @@ class MockConfigEntry(config_entries.ConfigEntry):
def patch_yaml_files(files_dict, endswith=True): def patch_yaml_files(files_dict, endswith=True):
"""Patch load_yaml with a dictionary of yaml files.""" """Patch load_yaml with a dictionary of yaml files."""
# match using endswith, start search with longest string # match using endswith, start search with longest string
matchlist = sorted(list(files_dict.keys()), key=len) if endswith else [] matchlist = sorted(files_dict.keys(), key=len) if endswith else []
def mock_open_f(fname, **_): def mock_open_f(fname, **_):
"""Mock open() in the yaml module, used by load_yaml.""" """Mock open() in the yaml module, used by load_yaml."""

View file

@ -32,7 +32,7 @@ def test_json_encoder(hass):
# Test serializing a set() # Test serializing a set()
data = {"milk", "beer"} data = {"milk", "beer"}
assert sorted(ha_json_enc.default(data)) == sorted(list(data)) assert sorted(ha_json_enc.default(data)) == sorted(data)
# Test serializong object which implements as_dict # Test serializong object which implements as_dict
assert ha_json_enc.default(state) == state.as_dict() assert ha_json_enc.default(state) == state.as_dict()

View file

@ -579,7 +579,7 @@ async def test_service_group_set_group_remove_group(hass):
assert group_state.attributes[group.ATTR_AUTO] assert group_state.attributes[group.ATTR_AUTO]
assert group_state.attributes["friendly_name"] == "Test2" assert group_state.attributes["friendly_name"] == "Test2"
assert group_state.attributes["icon"] == "mdi:camera" assert group_state.attributes["icon"] == "mdi:camera"
assert sorted(list(group_state.attributes["entity_id"])) == sorted( assert sorted(group_state.attributes["entity_id"]) == sorted(
["test.entity_bla1", "test.entity_id2"] ["test.entity_bla1", "test.entity_id2"]
) )

View file

@ -861,7 +861,7 @@ async def test_entity_discovery(
assert values.primary is value_class.primary assert values.primary is value_class.primary
assert len(list(values)) == 3 assert len(list(values)) == 3
assert sorted(list(values), key=lambda a: id(a)) == sorted( assert sorted(values, key=lambda a: id(a)) == sorted(
[value_class.primary, None, None], key=lambda a: id(a) [value_class.primary, None, None], key=lambda a: id(a)
) )
@ -885,7 +885,7 @@ async def test_entity_discovery(
assert values.secondary is value_class.secondary assert values.secondary is value_class.secondary
assert len(list(values)) == 3 assert len(list(values)) == 3
assert sorted(list(values), key=lambda a: id(a)) == sorted( assert sorted(values, key=lambda a: id(a)) == sorted(
[value_class.primary, value_class.secondary, None], key=lambda a: id(a) [value_class.primary, value_class.secondary, None], key=lambda a: id(a)
) )
@ -902,7 +902,7 @@ async def test_entity_discovery(
assert values.optional is value_class.optional assert values.optional is value_class.optional
assert len(list(values)) == 3 assert len(list(values)) == 3
assert sorted(list(values), key=lambda a: id(a)) == sorted( assert sorted(values, key=lambda a: id(a)) == sorted(
[value_class.primary, value_class.secondary, value_class.optional], [value_class.primary, value_class.secondary, value_class.optional],
key=lambda a: id(a), key=lambda a: id(a),
) )
@ -961,7 +961,7 @@ async def test_entity_existing_values(
assert values.secondary is value_class.secondary assert values.secondary is value_class.secondary
assert values.optional is value_class.optional assert values.optional is value_class.optional
assert len(list(values)) == 3 assert len(list(values)) == 3
assert sorted(list(values), key=lambda a: id(a)) == sorted( assert sorted(values, key=lambda a: id(a)) == sorted(
[value_class.primary, value_class.secondary, value_class.optional], [value_class.primary, value_class.secondary, value_class.optional],
key=lambda a: id(a), key=lambda a: id(a),
) )

View file

@ -17,7 +17,7 @@ def test_json_encoder(hass):
# Test serializing a set() # Test serializing a set()
data = {"milk", "beer"} data = {"milk", "beer"}
assert sorted(ha_json_enc.default(data)) == sorted(list(data)) assert sorted(ha_json_enc.default(data)) == sorted(data)
# Test serializing an object which implements as_dict # Test serializing an object which implements as_dict
assert ha_json_enc.default(state) == state.as_dict() assert ha_json_enc.default(state) == state.as_dict()

View file

@ -177,14 +177,14 @@ def test_get_age():
def test_parse_time_expression(): def test_parse_time_expression():
"""Test parse_time_expression.""" """Test parse_time_expression."""
assert [x for x in range(60)] == dt_util.parse_time_expression("*", 0, 59) assert list(range(60)) == dt_util.parse_time_expression("*", 0, 59)
assert [x for x in range(60)] == dt_util.parse_time_expression(None, 0, 59) assert list(range(60)) == dt_util.parse_time_expression(None, 0, 59)
assert [x for x in range(0, 60, 5)] == dt_util.parse_time_expression("/5", 0, 59) assert list(range(0, 60, 5)) == dt_util.parse_time_expression("/5", 0, 59)
assert [1, 2, 3] == dt_util.parse_time_expression([2, 1, 3], 0, 59) assert [1, 2, 3] == dt_util.parse_time_expression([2, 1, 3], 0, 59)
assert [x for x in range(24)] == dt_util.parse_time_expression("*", 0, 23) assert list(range(24)) == dt_util.parse_time_expression("*", 0, 23)
assert [42] == dt_util.parse_time_expression(42, 0, 59) assert [42] == dt_util.parse_time_expression(42, 0, 59)
assert [42] == dt_util.parse_time_expression("42", 0, 59) assert [42] == dt_util.parse_time_expression("42", 0, 59)