Add ruff rule PIE808 (#113621)

This commit is contained in:
Sid 2024-03-17 09:56:26 +01:00 committed by GitHub
parent 1a70dbfd94
commit d5fd005db8
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
42 changed files with 61 additions and 69 deletions

View file

@ -52,7 +52,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# Remove ozone sensors from registry if they exist # Remove ozone sensors from registry if they exist
ent_reg = er.async_get(hass) ent_reg = er.async_get(hass)
for day in range(0, 5): for day in range(5):
unique_id = f"{coordinator.location_key}-ozone-{day}" unique_id = f"{coordinator.location_key}-ozone-{day}"
if entity_id := ent_reg.async_get_entity_id(SENSOR_PLATFORM, DOMAIN, unique_id): if entity_id := ent_reg.async_get_entity_id(SENSOR_PLATFORM, DOMAIN, unique_id):
_LOGGER.debug("Removing ozone sensor entity %s", entity_id) _LOGGER.debug("Removing ozone sensor entity %s", entity_id)

View file

@ -98,7 +98,7 @@ class DdWrtDeviceScanner(DeviceScanner):
elements = cleaned_str.split(",") elements = cleaned_str.split(",")
num_clients = int(len(elements) / 5) num_clients = int(len(elements) / 5)
self.mac2name = {} self.mac2name = {}
for idx in range(0, num_clients): for idx in range(num_clients):
# The data is a single array # The data is a single array
# every 5 elements represents one host, the MAC # every 5 elements represents one host, the MAC
# is the third element and the name is the first. # is the third element and the name is the first.

View file

@ -34,7 +34,7 @@ class DemoMailbox(Mailbox):
super().__init__(hass, name) super().__init__(hass, name)
self._messages: dict[str, dict[str, Any]] = {} self._messages: dict[str, dict[str, Any]] = {}
txt = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " txt = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. "
for idx in range(0, 10): for idx in range(10):
msgtime = int( msgtime = int(
dt_util.as_timestamp(dt_util.utcnow()) - 3600 * 24 * (10 - idx) dt_util.as_timestamp(dt_util.utcnow()) - 3600 * 24 * (10 - idx)
) )

View file

@ -709,7 +709,7 @@ class Thermostat(ClimateEntity):
def set_humidity(self, humidity: int) -> None: def set_humidity(self, humidity: int) -> None:
"""Set the humidity level.""" """Set the humidity level."""
if humidity not in range(0, 101): if humidity not in range(101):
raise ValueError( raise ValueError(
f"Invalid set_humidity value (must be in range 0-100): {humidity}" f"Invalid set_humidity value (must be in range 0-100): {humidity}"
) )

View file

@ -172,7 +172,7 @@ class EcobeeWeather(WeatherEntity):
forecasts: list[Forecast] = [] forecasts: list[Forecast] = []
date = dt_util.utcnow() date = dt_util.utcnow()
for day in range(0, 5): for day in range(5):
forecast = _process_forecast(self.weather["forecasts"][day]) forecast = _process_forecast(self.weather["forecasts"][day])
if forecast is None: if forecast is None:
continue continue

View file

@ -151,7 +151,7 @@ PRODUCTION_PHASE_SENSORS = {
) )
for sensor in list(PRODUCTION_SENSORS) for sensor in list(PRODUCTION_SENSORS)
] ]
for phase in range(0, 3) for phase in range(3)
} }
@ -221,7 +221,7 @@ CONSUMPTION_PHASE_SENSORS = {
) )
for sensor in list(CONSUMPTION_SENSORS) for sensor in list(CONSUMPTION_SENSORS)
] ]
for phase in range(0, 3) for phase in range(3)
} }
@ -326,7 +326,7 @@ CT_NET_CONSUMPTION_PHASE_SENSORS = {
) )
for sensor in list(CT_NET_CONSUMPTION_SENSORS) for sensor in list(CT_NET_CONSUMPTION_SENSORS)
] ]
for phase in range(0, 3) for phase in range(3)
} }
CT_PRODUCTION_SENSORS = ( CT_PRODUCTION_SENSORS = (
@ -361,7 +361,7 @@ CT_PRODUCTION_PHASE_SENSORS = {
) )
for sensor in list(CT_PRODUCTION_SENSORS) for sensor in list(CT_PRODUCTION_SENSORS)
] ]
for phase in range(0, 3) for phase in range(3)
} }

View file

@ -253,7 +253,7 @@ def setup(hass: HomeAssistant, base_config: ConfigType) -> bool: # noqa: C901
hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM)) hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))
else: else:
att = 1 if att == "" else int(att) att = 1 if att == "" else int(att)
for _ in range(0, att): for _ in range(att):
hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM)) hdmi_network.send_command(KeyPressCommand(cmd, dst=ADDR_AUDIOSYSTEM))
hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM)) hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))

View file

@ -384,7 +384,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
await homekit.async_stop() await homekit.async_stop()
logged_shutdown_wait = False logged_shutdown_wait = False
for _ in range(0, SHUTDOWN_TIMEOUT): for _ in range(SHUTDOWN_TIMEOUT):
if async_port_is_available(entry.data[CONF_PORT]): if async_port_is_available(entry.data[CONF_PORT]):
break break

View file

@ -75,8 +75,8 @@ def _migrate_to_new_unique_id(
phase_list = ["A", "B", "C", "NET"] phase_list = ["A", "B", "C", "NET"]
id_phase_range = 1 if model == DEVICE_3080 else 4 id_phase_range = 1 if model == DEVICE_3080 else 4
id_name_range = 5 if model == DEVICE_3080 else 7 id_name_range = 5 if model == DEVICE_3080 else 7
for row in range(0, id_phase_range): for row in range(id_phase_range):
for idx in range(0, id_name_range): for idx in range(id_name_range):
old_unique_id = f"{serial_number}-{row}-{idx}" old_unique_id = f"{serial_number}-{row}-{idx}"
new_unique_id = ( new_unique_id = (
f"{serial_number}_{name_list[idx]}" f"{serial_number}_{name_list[idx]}"

View file

@ -79,7 +79,7 @@ ADD_DEFAULT_LINKS_SCHEMA = vol.Schema({vol.Required(CONF_ENTITY_ID): cv.entity_i
def normalize_byte_entry_to_int(entry: int | bytes | str): def normalize_byte_entry_to_int(entry: int | bytes | str):
"""Format a hex entry value.""" """Format a hex entry value."""
if isinstance(entry, int): if isinstance(entry, int):
if entry in range(0, 256): if entry in range(256):
return entry return entry
raise ValueError("Must be single byte") raise ValueError("Must be single byte")
if isinstance(entry, str): if isinstance(entry, str):

View file

@ -92,7 +92,7 @@ MOTION_ENERGY_GATES = [
entity_registry_enabled_default=False, entity_registry_enabled_default=False,
native_unit_of_measurement="Target Energy", native_unit_of_measurement="Target Energy",
) )
for i in range(0, 9) for i in range(9)
] ]
STATIC_ENERGY_GATES = [ STATIC_ENERGY_GATES = [
@ -103,7 +103,7 @@ STATIC_ENERGY_GATES = [
entity_registry_enabled_default=False, entity_registry_enabled_default=False,
native_unit_of_measurement="Target Energy", native_unit_of_measurement="Target Energy",
) )
for i in range(0, 9) for i in range(9)
] ]
SENSOR_DESCRIPTIONS = [ SENSOR_DESCRIPTIONS = [

View file

@ -418,7 +418,7 @@ class LIFXMultiZone(LIFXColor):
await super().set_color(hsbk, kwargs, duration) await super().set_color(hsbk, kwargs, duration)
return return
zones = list(range(0, num_zones)) zones = list(range(num_zones))
else: else:
zones = [x for x in set(zones) if x < num_zones] zones = [x for x in set(zones) if x < num_zones]

View file

@ -185,7 +185,7 @@ class BaseStructPlatform(BasePlatform, RestoreEntity):
"""Do swap as needed.""" """Do swap as needed."""
if slave_count: if slave_count:
swapped = [] swapped = []
for i in range(0, self._slave_count + 1): for i in range(self._slave_count + 1):
inx = i * self._slave_size inx = i * self._slave_size
inx2 = inx + self._slave_size inx2 = inx + self._slave_size
swapped.extend(self._swap_registers(registers[inx:inx2], 0)) swapped.extend(self._swap_registers(registers[inx:inx2], 0))

View file

@ -94,7 +94,7 @@ class ModbusBinarySensor(BasePlatform, RestoreEntity, BinarySensorEntity):
) )
return [ return [
SlaveSensor(self._coordinator, idx, entry) for idx in range(0, slave_count) SlaveSensor(self._coordinator, idx, entry) for idx in range(slave_count)
] ]
async def async_added_to_hass(self) -> None: async def async_added_to_hass(self) -> None:

View file

@ -95,7 +95,7 @@ class ModbusRegisterSensor(BaseStructPlatform, RestoreSensor, SensorEntity):
) )
return [ return [
SlaveSensor(self._coordinator, idx, entry) for idx in range(0, slave_count) SlaveSensor(self._coordinator, idx, entry) for idx in range(slave_count)
] ]
async def async_added_to_hass(self) -> None: async def async_added_to_hass(self) -> None:

View file

@ -47,7 +47,7 @@ async def async_setup_entry(
async_add_entities( async_add_entities(
NINAMessage(coordinator, ent, regions[ent], i + 1, config_entry) NINAMessage(coordinator, ent, regions[ent], i + 1, config_entry)
for ent in coordinator.data for ent in coordinator.data
for i in range(0, message_slots) for i in range(message_slots)
) )

View file

@ -225,7 +225,7 @@ class OptionsFlowHandler(OptionsFlow):
removed_entities_slots = [ removed_entities_slots = [
f"{region}-{slot_id}" f"{region}-{slot_id}"
for region in self.data[CONF_REGIONS] for region in self.data[CONF_REGIONS]
for slot_id in range(0, self.data[CONF_MESSAGE_SLOTS] + 1) for slot_id in range(self.data[CONF_MESSAGE_SLOTS] + 1)
if slot_id > user_input[CONF_MESSAGE_SLOTS] if slot_id > user_input[CONF_MESSAGE_SLOTS]
] ]

View file

@ -170,7 +170,7 @@ class OpenHardwareMonitorData:
result = devices.copy() result = devices.copy()
if json[OHM_CHILDREN]: if json[OHM_CHILDREN]:
for child_index in range(0, len(json[OHM_CHILDREN])): for child_index in range(len(json[OHM_CHILDREN])):
child_path = path.copy() child_path = path.copy()
child_path.append(child_index) child_path.append(child_index)

View file

@ -160,7 +160,7 @@ def execute(
This method also retries a few times in the case of stale connections. This method also retries a few times in the case of stale connections.
""" """
debug = _LOGGER.isEnabledFor(logging.DEBUG) debug = _LOGGER.isEnabledFor(logging.DEBUG)
for tryno in range(0, RETRIES): for tryno in range(RETRIES):
try: try:
if debug: if debug:
timer_start = time.perf_counter() timer_start = time.perf_counter()

View file

@ -251,7 +251,7 @@ class IRobotVacuum(IRobotEntity, StateVacuumEntity):
"""Set the vacuum cleaner to return to the dock.""" """Set the vacuum cleaner to return to the dock."""
if self.state == STATE_CLEANING: if self.state == STATE_CLEANING:
await self.async_pause() await self.async_pause()
for _ in range(0, 10): for _ in range(10):
if self.state == STATE_PAUSED: if self.state == STATE_PAUSED:
break break
await asyncio.sleep(1) await asyncio.sleep(1)

View file

@ -92,7 +92,7 @@ def get_codec_string(mp4_bytes: bytes) -> str:
stsd_box[112:116], byteorder="big" stsd_box[112:116], byteorder="big"
) )
reverse = 0 reverse = 0
for i in range(0, 32): for i in range(32):
reverse |= general_profile_compatibility & 1 reverse |= general_profile_compatibility & 1
if i == 31: if i == 31:
break break

View file

@ -22,7 +22,7 @@ COLORS = ["black", "cyan", "magenta", "yellow"]
DRUM_COLORS = COLORS DRUM_COLORS = COLORS
TONER_COLORS = COLORS TONER_COLORS = COLORS
TRAYS = range(1, 6) TRAYS = range(1, 6)
OUTPUT_TRAYS = range(0, 6) OUTPUT_TRAYS = range(6)
DEFAULT_MONITORED_CONDITIONS = [] DEFAULT_MONITORED_CONDITIONS = []
DEFAULT_MONITORED_CONDITIONS.extend([f"toner_{key}" for key in TONER_COLORS]) DEFAULT_MONITORED_CONDITIONS.extend([f"toner_{key}" for key in TONER_COLORS])
DEFAULT_MONITORED_CONDITIONS.extend([f"drum_{key}" for key in DRUM_COLORS]) DEFAULT_MONITORED_CONDITIONS.extend([f"drum_{key}" for key in DRUM_COLORS])

View file

@ -56,10 +56,7 @@ def setup_platform(
py_touchline = PyTouchline() py_touchline = PyTouchline()
number_of_devices = int(py_touchline.get_number_of_devices(host)) number_of_devices = int(py_touchline.get_number_of_devices(host))
add_entities( add_entities(
( (Touchline(PyTouchline(device_id)) for device_id in range(number_of_devices)),
Touchline(PyTouchline(device_id))
for device_id in range(0, number_of_devices)
),
True, True,
) )

View file

@ -203,7 +203,7 @@ class ZWaveBaseEntity(Entity):
property_key=primary_value.property_key, property_key=primary_value.property_key,
) )
in self.info.node.values in self.info.node.values
for endpoint_idx in range(0, primary_value.endpoint) for endpoint_idx in range(primary_value.endpoint)
): ):
name += f" ({primary_value.endpoint})" name += f" ({primary_value.endpoint})"

View file

@ -605,13 +605,8 @@ select = [
"N815", # Variable {name} in class scope should not be mixedCase "N815", # Variable {name} in class scope should not be mixedCase
"PERF", # Perflint "PERF", # Perflint
"PGH004", # Use specific rule codes when using noqa "PGH004", # Use specific rule codes when using noqa
"PIE800", # Unnecessary dictionary unpacking operators "PIE", # flake8-pie
"PL", # pylint "PL", # pylint
"PIE804", # Unnecessary dict kwargs
"PIE790", # Unnecessary pass statement
"PIE794", # Class field is defined multiple times
"PIE807", # Prefer list/dict over useless lambda
"PIE810", # Call startswith/endswith once with a tuple
"RUF005", # Consider iterable unpacking instead of concatenation "RUF005", # Consider iterable unpacking instead of concatenation
"RUF006", # Store a reference to the return value of asyncio.create_task "RUF006", # Store a reference to the return value of asyncio.create_task
# "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up

View file

@ -236,9 +236,9 @@ def mock_monitor(serial_number: int) -> MagicMock:
monitor = mock_with_listeners() monitor = mock_with_listeners()
monitor.serial_number = serial_number monitor.serial_number = serial_number
monitor.voltage_sensor = mock_voltage_sensor() monitor.voltage_sensor = mock_voltage_sensor()
monitor.pulse_counters = [mock_pulse_counter() for i in range(0, 4)] monitor.pulse_counters = [mock_pulse_counter() for i in range(4)]
monitor.temperature_sensors = [mock_temperature_sensor() for i in range(0, 8)] monitor.temperature_sensors = [mock_temperature_sensor() for i in range(8)]
monitor.channels = [mock_channel() for i in range(0, 32)] monitor.channels = [mock_channel() for i in range(32)]
return monitor return monitor

View file

@ -49,7 +49,7 @@ async def test_aid_generation(
aid_storage = AccessoryAidStorage(hass, config_entry) aid_storage = AccessoryAidStorage(hass, config_entry)
await aid_storage.async_initialize() await aid_storage.async_initialize()
for _ in range(0, 2): for _ in range(2):
assert ( assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id) aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
== 1953095294 == 1953095294
@ -72,7 +72,7 @@ async def test_aid_generation(
aid_storage.delete_aid(get_system_unique_id(remote_ent, remote_ent.unique_id)) aid_storage.delete_aid(get_system_unique_id(remote_ent, remote_ent.unique_id))
aid_storage.delete_aid("non-existent-one") aid_storage.delete_aid("non-existent-one")
for _ in range(0, 2): for _ in range(2):
assert ( assert (
aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id) aid_storage.get_or_allocate_aid_for_entity_id(light_ent.entity_id)
== 1953095294 == 1953095294
@ -112,7 +112,7 @@ async def test_no_aid_collision(
seen_aids = set() seen_aids = set()
for unique_id in range(0, 202): for unique_id in range(202):
ent = entity_registry.async_get_or_create( ent = entity_registry.async_get_or_create(
"light", "device", unique_id, device_id=device_entry.id "light", "device", unique_id, device_id=device_entry.id
) )
@ -141,7 +141,7 @@ async def test_aid_generation_no_unique_ids_handles_collision(
seen_aids = set() seen_aids = set()
collisions = [] collisions = []
for light_id in range(0, 220): for light_id in range(220):
entity_id = f"light.light{light_id}" entity_id = f"light.light{light_id}"
hass.states.async_set(entity_id, "on") hass.states.async_set(entity_id, "on")
expected_aid = fnv1a_32(entity_id.encode("utf-8")) expected_aid = fnv1a_32(entity_id.encode("utf-8"))

View file

@ -550,7 +550,7 @@ async def test_grouped_lights(
# PUT request should have been sent to ALL group lights with correct params # PUT request should have been sent to ALL group lights with correct params
assert len(mock_bridge_v2.mock_requests) == 3 assert len(mock_bridge_v2.mock_requests) == 3
for index in range(0, 3): for index in range(3):
assert ( assert (
mock_bridge_v2.mock_requests[index]["json"]["identify"]["action"] mock_bridge_v2.mock_requests[index]["json"]["identify"]["action"]
== "identify" == "identify"
@ -588,7 +588,7 @@ async def test_grouped_lights(
# PUT request should have been sent to ALL group lights with correct params # PUT request should have been sent to ALL group lights with correct params
assert len(mock_bridge_v2.mock_requests) == 3 assert len(mock_bridge_v2.mock_requests) == 3
for index in range(0, 3): for index in range(3):
assert ( assert (
mock_bridge_v2.mock_requests[index]["json"]["identify"]["action"] mock_bridge_v2.mock_requests[index]["json"]["identify"]["action"]
== "identify" == "identify"

View file

@ -185,7 +185,7 @@ async def test_bulk_remove(
ws_get_items: Callable[[], Awaitable[dict[str, str]]], ws_get_items: Callable[[], Awaitable[dict[str, str]]],
) -> None: ) -> None:
"""Test removing multiple todo items.""" """Test removing multiple todo items."""
for i in range(0, 5): for i in range(5):
await hass.services.async_call( await hass.services.async_call(
TODO_DOMAIN, TODO_DOMAIN,
"add_item", "add_item",

View file

@ -49,7 +49,7 @@ class TestMailbox(mailbox.Mailbox):
"""Initialize Test mailbox.""" """Initialize Test mailbox."""
super().__init__(hass, name) super().__init__(hass, name)
self._messages: dict[str, dict[str, Any]] = {} self._messages: dict[str, dict[str, Any]] = {}
for idx in range(0, 10): for idx in range(10):
msg = _create_message(idx) msg = _create_message(idx)
msgsha = msg["sha"] msgsha = msg["sha"]
self._messages[msgsha] = msg self._messages[msgsha] = msg

View file

@ -910,7 +910,7 @@ async def test_virtual_sensor(
hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_do_cycle, expected hass: HomeAssistant, entity_registry: er.EntityRegistry, mock_do_cycle, expected
) -> None: ) -> None:
"""Run test for sensor.""" """Run test for sensor."""
for i in range(0, len(expected)): for i in range(len(expected)):
entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_")
unique_id = f"{SLAVE_UNIQUE_ID}" unique_id = f"{SLAVE_UNIQUE_ID}"
if i: if i:
@ -1080,7 +1080,7 @@ async def test_virtual_swap_sensor(
hass: HomeAssistant, mock_do_cycle, expected hass: HomeAssistant, mock_do_cycle, expected
) -> None: ) -> None:
"""Run test for sensor.""" """Run test for sensor."""
for i in range(0, len(expected)): for i in range(len(expected)):
entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_") entity_id = f"{SENSOR_DOMAIN}.{TEST_ENTITY_NAME}".replace(" ", "_")
if i: if i:
entity_id = f"{entity_id}_{i}" entity_id = f"{entity_id}_{i}"

View file

@ -1327,7 +1327,7 @@ async def help_test_entity_debug_info_max_messages(
start_dt = datetime(2019, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) start_dt = datetime(2019, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC)
with freeze_time(start_dt): with freeze_time(start_dt):
for i in range(0, debug_info.STORED_MESSAGES + 1): for i in range(debug_info.STORED_MESSAGES + 1):
async_fire_mqtt_message(hass, "test-topic", f"{i}") async_fire_mqtt_message(hass, "test-topic", f"{i}")
debug_info_data = debug_info.info_for_device(hass, device.id) debug_info_data = debug_info.info_for_device(hass, device.id)

View file

@ -4130,7 +4130,7 @@ async def test_multi_platform_discovery(
}, },
} }
for platform, config in entity_configs.items(): for platform, config in entity_configs.items():
for set_number in range(0, 2): for set_number in range(2):
set_config = deepcopy(config) set_config = deepcopy(config)
set_config["name"] = f"test_{set_number}" set_config["name"] = f"test_{set_number}"
topic = f"homeassistant/{platform}/bla_{set_number}/config" topic = f"homeassistant/{platform}/bla_{set_number}/config"
@ -4139,7 +4139,7 @@ async def test_multi_platform_discovery(
topic = f"homeassistant/{platform}/bla/config" topic = f"homeassistant/{platform}/bla/config"
async_fire_mqtt_message(hass, topic, json.dumps(config)) async_fire_mqtt_message(hass, topic, json.dumps(config))
await hass.async_block_till_done() await hass.async_block_till_done()
for set_number in range(0, 2): for set_number in range(2):
for platform in entity_configs: for platform in entity_configs:
entity_id = f"{platform}.test_{set_number}" entity_id = f"{platform}.test_{set_number}"
state = hass.states.get(entity_id) state = hass.states.get(entity_id)

View file

@ -983,7 +983,7 @@ async def test_controlling_the_state_with_legacy_color_handling(
assert state.attributes.get("xy_color") is None assert state.attributes.get("xy_color") is None
assert not state.attributes.get(ATTR_ASSUMED_STATE) assert not state.attributes.get(ATTR_ASSUMED_STATE)
for _ in range(0, 2): for _ in range(2):
# Returned state after the light was turned on # Returned state after the light was turned on
# Receiving legacy color mode: rgb. # Receiving legacy color mode: rgb.
async_fire_mqtt_message( async_fire_mqtt_message(

View file

@ -1053,7 +1053,7 @@ async def test_multiple_devices(
assert len(browse.children) == 0 assert len(browse.children) == 0
# Send events for device #1 # Send events for device #1
for i in range(0, 5): for i in range(5):
auth.responses = [ auth.responses = [
aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE), aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE),
aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT), aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT),
@ -1078,7 +1078,7 @@ async def test_multiple_devices(
assert len(browse.children) == 0 assert len(browse.children) == 0
# Send events for device #2 # Send events for device #2
for i in range(0, 3): for i in range(3):
auth.responses = [ auth.responses = [
aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE), aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE),
aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT), aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT),
@ -1340,7 +1340,7 @@ async def test_camera_event_media_eviction(
assert len(browse.children) == 0 assert len(browse.children) == 0
event_timestamp = dt_util.now() event_timestamp = dt_util.now()
for i in range(0, 7): for i in range(7):
auth.responses = [aiohttp.web.Response(body=f"image-bytes-{i}".encode())] auth.responses = [aiohttp.web.Response(body=f"image-bytes-{i}".encode())]
ts = event_timestamp + datetime.timedelta(seconds=i) ts = event_timestamp + datetime.timedelta(seconds=i)
await subscriber.async_receive_event( await subscriber.async_receive_event(

View file

@ -964,7 +964,7 @@ async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None:
async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> None: async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> None:
"""Test the multiple entering.""" """Test the multiple entering."""
# Test race condition # Test race condition
for _ in range(0, 20): for _ in range(20):
async_fire_mqtt_message( async_fire_mqtt_message(
hass, EVENT_TOPIC, json.dumps(MOBILE_BEACON_ENTER_EVENT_MESSAGE) hass, EVENT_TOPIC, json.dumps(MOBILE_BEACON_ENTER_EVENT_MESSAGE)
) )

View file

@ -72,7 +72,7 @@ async def test_schema_update_calls(recorder_db_url: str, hass: HomeAssistant) ->
update.assert_has_calls( update.assert_has_calls(
[ [
call(instance, hass, engine, session_maker, version + 1, 0) call(instance, hass, engine, session_maker, version + 1, 0)
for version in range(0, db_schema.SCHEMA_VERSION) for version in range(db_schema.SCHEMA_VERSION)
] ]
) )

View file

@ -242,7 +242,7 @@ async def test_statistic_during_period(
"min": -76 + i * 2, "min": -76 + i * 2,
"sum": i, "sum": i,
} }
for i in range(0, 39) for i in range(39)
] ]
imported_stats = [] imported_stats = []
slice_end = 12 - offset slice_end = 12 - offset
@ -255,7 +255,7 @@ async def test_statistic_during_period(
"sum": imported_stats_5min[slice_end - 1]["sum"], "sum": imported_stats_5min[slice_end - 1]["sum"],
} }
) )
for i in range(0, 2): for i in range(2):
slice_start = i * 12 + (12 - offset) slice_start = i * 12 + (12 - offset)
slice_end = (i + 1) * 12 + (12 - offset) slice_end = (i + 1) * 12 + (12 - offset)
assert imported_stats_5min[slice_start]["start"].minute == 0 assert imported_stats_5min[slice_start]["start"].minute == 0
@ -664,7 +664,7 @@ async def test_statistic_during_period_hole(
"min": -76 + i * 2, "min": -76 + i * 2,
"sum": i, "sum": i,
} }
for i in range(0, 6) for i in range(6)
] ]
imported_metadata = { imported_metadata = {

View file

@ -166,7 +166,7 @@ async def test_bulk_remove(
) -> None: ) -> None:
"""Test removing a todo item.""" """Test removing a todo item."""
for _i in range(0, 5): for _i in range(5):
await hass.services.async_call( await hass.services.async_call(
TODO_DOMAIN, TODO_DOMAIN,
"add_item", "add_item",

View file

@ -71,7 +71,7 @@ class MockedInterface(dict):
fake_friends = [{"steamid": ACCOUNT_2}] fake_friends = [{"steamid": ACCOUNT_2}]
fake_friends.extend( fake_friends.extend(
{"steamid": "".join(random.choices(string.digits, k=len(ACCOUNT_1)))} {"steamid": "".join(random.choices(string.digits, k=len(ACCOUNT_1)))}
for _ in range(0, 4) for _ in range(4)
) )
return {"friendslist": {"friends": fake_friends}} return {"friendslist": {"friends": fake_friends}}

View file

@ -460,7 +460,7 @@ async def test_skip_initial_bad_packets(hass: HomeAssistant) -> None:
num_packets = LONGER_TEST_SEQUENCE_LENGTH num_packets = LONGER_TEST_SEQUENCE_LENGTH
packets = list(PacketSequence(num_packets)) packets = list(PacketSequence(num_packets))
num_bad_packets = MAX_MISSING_DTS - 1 num_bad_packets = MAX_MISSING_DTS - 1
for i in range(0, num_bad_packets): for i in range(num_bad_packets):
packets[i].dts = None packets[i].dts = None
decoded_stream = await async_decode_stream(hass, packets) decoded_stream = await async_decode_stream(hass, packets)
@ -490,7 +490,7 @@ async def test_too_many_initial_bad_packets_fails(hass: HomeAssistant) -> None:
num_packets = LONGER_TEST_SEQUENCE_LENGTH num_packets = LONGER_TEST_SEQUENCE_LENGTH
packets = list(PacketSequence(num_packets)) packets = list(PacketSequence(num_packets))
num_bad_packets = MAX_MISSING_DTS + 1 num_bad_packets = MAX_MISSING_DTS + 1
for i in range(0, num_bad_packets): for i in range(num_bad_packets):
packets[i].dts = None packets[i].dts = None
py_av = MockPyAv() py_av = MockPyAv()

View file

@ -278,7 +278,7 @@ async def test_different_endpoint_migration_status_sensor(
assert entity_entry.unique_id == old_unique_id assert entity_entry.unique_id == old_unique_id
# Do this twice to make sure re-interview doesn't do anything weird # Do this twice to make sure re-interview doesn't do anything weird
for _ in range(0, 2): for _ in range(2):
# Add a ready node, unique ID should be migrated # Add a ready node, unique ID should be migrated
event = {"node": node} event = {"node": node}
driver.controller.emit("node added", event) driver.controller.emit("node added", event)
@ -387,7 +387,7 @@ async def test_old_entity_migration_notification_binary_sensor(
assert entity_entry.unique_id == old_unique_id assert entity_entry.unique_id == old_unique_id
# Do this twice to make sure re-interview doesn't do anything weird # Do this twice to make sure re-interview doesn't do anything weird
for _ in range(0, 2): for _ in range(2):
# Add a ready node, unique ID should be migrated # Add a ready node, unique ID should be migrated
event = {"node": node} event = {"node": node}
driver.controller.emit("node added", event) driver.controller.emit("node added", event)