Upgrade pyupgrade to 2.21.2, apply its changes (#52987)

This commit is contained in:
Ville Skyttä 2021-07-19 11:46:09 +03:00 committed by GitHub
parent c96f01df1f
commit 470f2dd73f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
73 changed files with 152 additions and 162 deletions

View file

@ -1,6 +1,6 @@
repos:
- repo: https://github.com/asottile/pyupgrade
rev: v2.16.0
rev: v2.21.2
hooks:
- id: pyupgrade
args: [--py38-plus]

View file

@ -171,10 +171,10 @@ class Analytics:
ATTR_STATISTICS, False
):
configured_integrations = await asyncio.gather(
*[
*(
async_get_integration(self.hass, domain)
for domain in async_get_loaded_integrations(self.hass)
],
),
return_exceptions=True,
)
@ -201,10 +201,10 @@ class Analytics:
if supervisor_info is not None:
installed_addons = await asyncio.gather(
*[
*(
hassio.async_get_addon_info(self.hass, addon[ATTR_SLUG])
for addon in supervisor_info[ATTR_ADDONS]
]
)
)
for addon in installed_addons:
addons.append(

View file

@ -57,10 +57,10 @@ async def async_setup_entry(hass, entry):
async def setup_platforms():
"""Set up platforms and initiate connection."""
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
await manager.init()

View file

@ -43,7 +43,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType):
async def _stop(_):
asyncio.gather(
*[_await_cancel(task) for task in hass.data[DOMAIN_DATA_TASKS].values()]
*(_await_cancel(task) for task in hass.data[DOMAIN_DATA_TASKS].values())
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _stop)

View file

@ -173,10 +173,10 @@ class AugustData(AugustSubscriberMixin):
async def _async_refresh_device_detail_by_ids(self, device_ids_list):
await asyncio.gather(
*[
*(
self._async_refresh_device_detail_by_id(device_id)
for device_id in device_ids_list
]
)
)
async def _async_refresh_device_detail_by_id(self, device_id):

View file

@ -98,10 +98,10 @@ class ActivityStream(AugustSubscriberMixin):
async def _async_update_device_activities(self, time):
_LOGGER.debug("Start retrieving device activities")
await asyncio.gather(
*[
*(
self._update_debounce[house_id].async_call()
for house_id in self._house_ids
]
)
)
self._last_update_time = time

View file

@ -75,10 +75,10 @@ async def async_validate_config_item(hass, config, full_config=None):
if CONF_CONDITION in config:
config[CONF_CONDITION] = await asyncio.gather(
*[
*(
async_validate_condition_config(hass, cond)
for cond in config[CONF_CONDITION]
]
)
)
config[CONF_ACTION] = await script.async_validate_actions_config(

View file

@ -64,7 +64,7 @@ class AwairDataUpdateCoordinator(DataUpdateCoordinator):
user = await self._awair.user()
devices = await user.devices()
results = await gather(
*[self._fetch_air_data(device) for device in devices]
*(self._fetch_air_data(device) for device in devices)
)
return {result.device.uuid: result for result in results}
except AuthError as err:

View file

@ -226,12 +226,12 @@ class AxisNetworkDevice:
async def start_platforms():
await asyncio.gather(
*[
*(
self.hass.config_entries.async_forward_entry_setup(
self.config_entry, platform
)
for platform in PLATFORMS
]
)
)
if self.option_events:
self.api.stream.connection_status_callback.append(

View file

@ -223,10 +223,10 @@ class ClimaCellDataUpdateCoordinator(DataUpdateCoordinator):
CC_V3_ATTR_WIND_GUST,
CC_V3_ATTR_CLOUD_COVER,
CC_V3_ATTR_PRECIPITATION_TYPE,
*[
*(
sensor_type[ATTR_FIELD]
for sensor_type in CC_V3_SENSOR_TYPES
],
),
]
)
data[FORECASTS][HOURLY] = await self._api.forecast_hourly(
@ -283,7 +283,7 @@ class ClimaCellDataUpdateCoordinator(DataUpdateCoordinator):
CC_ATTR_WIND_GUST,
CC_ATTR_CLOUD_COVER,
CC_ATTR_PRECIPITATION_TYPE,
*[sensor_type[ATTR_FIELD] for sensor_type in CC_SENSOR_TYPES],
*(sensor_type[ATTR_FIELD] for sensor_type in CC_SENSOR_TYPES),
],
[
CC_ATTR_TEMPERATURE_LOW,

View file

@ -148,7 +148,7 @@ class CloudClient(Interface):
tasks.append(enable_google)
if tasks:
await asyncio.gather(*[task(None) for task in tasks])
await asyncio.gather(*(task(None) for task in tasks))
async def cleanups(self) -> None:
"""Cleanup some stuff after logout."""

View file

@ -82,10 +82,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
await asyncio.gather(
*[
*(
hass.async_add_executor_job(gateway.websocket_disconnect)
for gateway in hass.data[DOMAIN][entry.entry_id]["gateways"]
]
)
)
hass.data[DOMAIN][entry.entry_id]["listener"]()
hass.data[DOMAIN].pop(entry.entry_id)

View file

@ -212,10 +212,10 @@ class AbstractConfig(ABC):
async def async_sync_entities_all(self):
"""Sync all entities to Google for all registered agents."""
res = await gather(
*[
*(
self.async_sync_entities(agent_user_id)
for agent_user_id in self._store.agent_user_ids
]
)
)
return max(res, default=204)

View file

@ -213,10 +213,10 @@ async def handle_devices_execute(hass, data, payload):
executions[entity_id] = [execution]
execute_results = await asyncio.gather(
*[
*(
_entity_execute(entities[entity_id], data, execution)
for entity_id, execution in executions.items()
]
)
)
for entity_id, result in zip(executions, execute_results):

View file

@ -140,10 +140,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def start_platforms():
"""Continue setting up the platforms."""
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
# Only refresh the coordinator after all platforms are loaded.
await coordinator.async_refresh()

View file

@ -248,10 +248,10 @@ async def async_setup(hass: ha.HomeAssistant, config: dict) -> bool: # noqa: C9
if not reload_entries:
raise ValueError("There were no matching config entries to reload")
await asyncio.gather(
*[
*(
hass.config_entries.async_reload(config_entry_id)
for config_entry_id in reload_entries
]
)
)
hass.helpers.service.async_register_admin_service(

View file

@ -228,10 +228,10 @@ async def async_setup(hass, config):
async def _async_stop_homekit_controller(event):
await asyncio.gather(
*[
*(
connection.async_unload()
for connection in hass.data[KNOWN_DEVICES].values()
]
)
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, _async_stop_homekit_controller)

View file

@ -273,10 +273,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def setup_then_listen() -> None:
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
assert hyperion_client
if hyperion_client.instances is not None:
@ -306,12 +306,12 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
# Disconnect the shared instance clients.
await asyncio.gather(
*[
*(
config_data[CONF_INSTANCE_CLIENTS][
instance_num
].async_client_disconnect()
for instance_num in config_data[CONF_INSTANCE_CLIENTS]
]
)
)
# Disconnect the root client.

View file

@ -245,7 +245,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
await knx_module.xknx.stop()
await asyncio.gather(
*[platform.async_reset() for platform in async_get_platforms(hass, DOMAIN)]
*(platform.async_reset() for platform in async_get_platforms(hass, DOMAIN))
)
await async_setup(hass, config)

View file

@ -71,7 +71,7 @@ async def build_item_response(media_library, payload, get_thumbnail_url=None):
return None
children = await asyncio.gather(
*[item_payload(item, get_thumbnail_url) for item in media]
*(item_payload(item, get_thumbnail_url) for item in media)
)
if search_type in (MEDIA_TYPE_TVSHOW, MEDIA_TYPE_MOVIE) and search_id == "":
@ -209,7 +209,7 @@ async def library_payload():
}
library_info.children = await asyncio.gather(
*[
*(
item_payload(
{
"label": item["label"],
@ -220,7 +220,7 @@ async def library_payload():
for item in [
{"label": name, "type": type_} for type_, name in library.items()
]
]
)
)
return library_info

View file

@ -574,10 +574,10 @@ def _apply_event_types_filter(hass, query, event_types):
def _apply_event_entity_id_matchers(events_query, entity_ids):
return events_query.filter(
sqlalchemy.or_(
*[
*(
Events.event_data.contains(ENTITY_ID_JSON_TEMPLATE.format(entity_id))
for entity_id in entity_ids
]
)
)
)

View file

@ -22,10 +22,10 @@ async def system_health_info(hass):
health_info.update(await hass.data[DOMAIN]["resources"].async_get_info())
dashboards_info = await asyncio.gather(
*[
*(
hass.data[DOMAIN]["dashboards"][dashboard].async_get_info()
for dashboard in hass.data[DOMAIN]["dashboards"]
]
)
)
modes = set()

View file

@ -352,10 +352,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def setup_then_listen() -> None:
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
entry.async_on_unload(
coordinator.async_add_listener(_async_process_motioneye_cameras)

View file

@ -231,10 +231,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def finish() -> None:
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS_WITH_ENTRY_SUPPORT
]
)
)
await finish_setup(hass, entry, gateway)

View file

@ -252,7 +252,7 @@ class NiluSensor(AirQualityEntity):
sensors = self._api.data.sensors.values()
if sensors:
max_index = max([s.pollution_index for s in sensors])
max_index = max(s.pollution_index for s in sensors)
self._max_aqi = max_index
self._attrs[ATTR_POLLUTION_INDEX] = POLLUTION_INDEX[self._max_aqi]

View file

@ -60,7 +60,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
password = config[DOMAIN].get(CONF_PASSWORD)
timeout = config[DOMAIN].get(CONF_TIMEOUT)
auth_str = base64.b64encode(f"{user}:{password}".encode("utf-8"))
auth_str = base64.b64encode(f"{user}:{password}".encode())
session = hass.helpers.aiohttp_client.async_get_clientsession()

View file

@ -53,10 +53,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Start platforms and cleanup devices."""
# wait until all required platforms are ready
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
await cleanup_registry()

View file

@ -264,10 +264,10 @@ async def async_setup_entry( # noqa: C901
async def start_platforms():
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
if entry.data.get(CONF_USE_ADDON):
mqtt_client_task = asyncio.create_task(mqtt_client.start_client(manager))

View file

@ -102,14 +102,14 @@ async def async_setup_scanner(hass, config, async_see, discovery_info=None):
"""Update all the hosts on every interval time."""
results = await gather_with_concurrency(
CONCURRENT_PING_LIMIT,
*[hass.async_add_executor_job(host.update) for host in hosts],
*(hass.async_add_executor_job(host.update) for host in hosts),
)
await asyncio.gather(
*[
*(
async_see(dev_id=host.dev_id, source_type=SOURCE_TYPE_ROUTER)
for idx, host in enumerate(hosts)
if results[idx]
]
)
)
else:
@ -124,11 +124,11 @@ async def async_setup_scanner(hass, config, async_see, discovery_info=None):
)
_LOGGER.debug("Multiping responses: %s", responses)
await asyncio.gather(
*[
*(
async_see(dev_id=dev_id, source_type=SOURCE_TYPE_ROUTER)
for idx, dev_id in enumerate(ip_to_dev_id.values())
if responses[idx].is_alive
]
)
)
async def _async_update_interval(now):

View file

@ -207,7 +207,7 @@ class RadarrSensor(SensorEntity):
filter(lambda x: x["path"] in self.included, res.json())
)
self._state = "{:.2f}".format(
to_unit(sum([data["freeSpace"] for data in self.data]), self._unit)
to_unit(sum(data["freeSpace"] for data in self.data), self._unit)
)
elif self.type == "status":
self.data = res.json()

View file

@ -304,7 +304,7 @@ def _update_states_table_with_foreign_key_options(connection, engine):
states_key_constraints = Base.metadata.tables[TABLE_STATES].foreign_key_constraints
old_states_table = Table( # noqa: F841 pylint: disable=unused-variable
TABLE_STATES, MetaData(), *[alter["old_fk"] for alter in alters]
TABLE_STATES, MetaData(), *(alter["old_fk"] for alter in alters)
)
for alter in alters:

View file

@ -57,10 +57,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def start_platforms():
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
await events_coordinator.async_refresh()

View file

@ -71,10 +71,10 @@ async def async_validate_config_item(hass, config, full_config=None):
config = SCRIPT_ENTITY_SCHEMA(config)
config[CONF_SEQUENCE] = await asyncio.gather(
*[
*(
async_validate_action_config(hass, action)
for action in config[CONF_SEQUENCE]
]
)
)
return config

View file

@ -69,7 +69,7 @@ class SharkIqUpdateCoordinator(DataUpdateCoordinator):
_LOGGER.debug("Updating sharkiq data")
online_vacs = (self.shark_vacs[dsn] for dsn in self.online_dsns)
await asyncio.gather(*[self._async_update_vacuum(v) for v in online_vacs])
await asyncio.gather(*(self._async_update_vacuum(v) for v in online_vacs))
except (
SharkIqAuthError,
SharkIqNotAuthedError,

View file

@ -162,7 +162,7 @@ class SonarrDiskspaceSensor(SonarrSensor):
"""Update entity."""
app = await self.sonarr.update()
self._disks = app.disks
self._total_free = sum([disk.free for disk in self._disks])
self._total_free = sum(disk.free for disk in self._disks)
@property
def extra_state_attributes(self) -> dict[str, Any] | None:

View file

@ -172,7 +172,7 @@ class SonosDiscoveryManager:
async def _async_stop_event_listener(self, event: Event) -> None:
await asyncio.gather(
*[speaker.async_unsubscribe() for speaker in self.data.discovered.values()],
*(speaker.async_unsubscribe() for speaker in self.data.discovered.values()),
return_exceptions=True,
)
if events_asyncio.event_listener:
@ -285,10 +285,10 @@ class SonosDiscoveryManager:
async def setup_platforms_and_discovery(self):
"""Set up platforms and discovery."""
await asyncio.gather(
*[
*(
self.hass.config_entries.async_forward_entry_setup(self.entry, platform)
for platform in PLATFORMS
]
)
)
self.entry.async_on_unload(
self.hass.bus.async_listen_once(

View file

@ -352,7 +352,7 @@ class SonosSpeaker:
"""Cancel all subscriptions."""
_LOGGER.debug("Unsubscribing from events for %s", self.zone_name)
await asyncio.gather(
*[subscription.unsubscribe() for subscription in self._subscriptions],
*(subscription.unsubscribe() for subscription in self._subscriptions),
return_exceptions=True,
)
self._subscriptions = []

View file

@ -257,7 +257,7 @@ class Scanner:
EVENT_HOMEASSISTANT_STARTED, self.flow_dispatcher.async_start
)
await asyncio.gather(
*[listener.async_start() for listener in self._ssdp_listeners]
*(listener.async_start() for listener in self._ssdp_listeners)
)
self._cancel_scan = async_track_time_interval(
self.hass, self.async_scan, SCAN_INTERVAL

View file

@ -48,10 +48,10 @@ async def async_setup_entry(
BridgeCpuSpeedSensor(coordinator, bridge),
BridgeCpuTemperatureSensor(coordinator, bridge),
BridgeCpuVoltageSensor(coordinator, bridge),
*[
*(
BridgeFilesystemSensor(coordinator, bridge, key)
for key, _ in bridge.filesystem.fsSize.items()
],
),
BridgeMemoryFreeSensor(coordinator, bridge),
BridgeMemoryUsedSensor(coordinator, bridge),
BridgeMemoryUsedPercentageSensor(coordinator, bridge),

View file

@ -96,10 +96,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def start_platforms() -> None:
await device_automation.async_setup_entry(hass, entry)
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
)
discovery_prefix = entry.data[CONF_DISCOVERY_PREFIX]

View file

@ -159,10 +159,10 @@ class VizioOptionsConfigFlow(config_entries.OptionsFlow):
): cv.multi_select(
[
APP_HOME["name"],
*[
*(
app["name"]
for app in self.hass.data[DOMAIN][CONF_APPS].data
],
),
]
),
}

View file

@ -381,17 +381,17 @@ class VizioDevice(MediaPlayerEntity):
# show the combination with , otherwise just return inputs
if self._available_apps:
return [
*[
*(
_input
for _input in self._available_inputs
if _input not in INPUT_APPS
],
),
*self._available_apps,
*[
*(
app
for app in self._get_additional_app_names()
if app not in self._available_apps
],
),
]
return self._available_inputs

View file

@ -268,7 +268,7 @@ async def handle_manifest_list(
"""Handle integrations command."""
loaded_integrations = async_get_loaded_integrations(hass)
integrations = await asyncio.gather(
*[async_get_integration(hass, domain) for domain in loaded_integrations]
*(async_get_integration(hass, domain) for domain in loaded_integrations)
)
connection.send_result(
msg["id"], [integration.manifest for integration in integrations]

View file

@ -235,12 +235,12 @@ class WemoDiscovery:
_LOGGER.debug("Adding statically configured WeMo devices")
for device in await gather_with_concurrency(
MAX_CONCURRENCY,
*[
*(
self._hass.async_add_executor_job(
validate_static_config, host, port
)
for host, port in self._static_config
],
),
):
if device:
await self._wemo_dispatcher.async_add_unique_device(

View file

@ -21,10 +21,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.binary_sensor", _discovered_wemo)
await asyncio.gather(
*[
*(
_discovered_wemo(device)
for device in hass.data[WEMO_DOMAIN]["pending"].pop("binary_sensor")
]
)
)

View file

@ -75,10 +75,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.fan", _discovered_wemo)
await asyncio.gather(
*[
*(
_discovered_wemo(device)
for device in hass.data[WEMO_DOMAIN]["pending"].pop("fan")
]
)
)
platform = entity_platform.async_get_current_platform()

View file

@ -50,10 +50,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.light", _discovered_wemo)
await asyncio.gather(
*[
*(
_discovered_wemo(device)
for device in hass.data[WEMO_DOMAIN]["pending"].pop("light")
]
)
)

View file

@ -40,10 +40,10 @@ async def async_setup_entry(hass, config_entry, async_add_entities):
async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.switch", _discovered_wemo)
await asyncio.gather(
*[
*(
_discovered_wemo(device)
for device in hass.data[WEMO_DOMAIN]["pending"].pop("switch")
]
)
)

View file

@ -673,21 +673,17 @@ class DataManager:
response = await self._hass.async_add_executor_job(self._api.notify_list)
subscribed_applis = frozenset(
[
profile.appli
for profile in response.profiles
if profile.callbackurl == self._webhook_config.url
]
)
# Determine what subscriptions need to be created.
ignored_applis = frozenset({NotifyAppli.USER, NotifyAppli.UNKNOWN})
to_add_applis = frozenset(
[
appli
for appli in NotifyAppli
if appli not in subscribed_applis and appli not in ignored_applis
]
)
# Subscribe to each one.

View file

@ -509,10 +509,8 @@ class MusicCastMediaPlayer(MusicCastDeviceEntity, MediaPlayerEntity):
def get_distribution_num(self) -> int:
"""Return the distribution_num (number of clients in the whole musiccast system)."""
return sum(
[
len(server.coordinator.data.group_client_list)
for server in self.get_all_server_entities()
]
)
def is_part_of_group(self, group_server) -> bool:

View file

@ -145,10 +145,10 @@ async def async_unload_entry(hass, config_entry):
# our components don't have unload methods so no need to look at return values
await asyncio.gather(
*[
*(
hass.config_entries.async_forward_entry_unload(config_entry, platform)
for platform in PLATFORMS
]
)
)
hass.data[DATA_ZHA][DATA_ZHA_SHUTDOWN_TASK]()

View file

@ -217,20 +217,20 @@ class ZHAGateway:
_LOGGER.debug("Loading battery powered devices")
await asyncio.gather(
*[
*(
_throttle(dev, cached=True)
for dev in self.devices.values()
if not dev.is_mains_powered
]
)
)
_LOGGER.debug("Loading mains powered devices")
await asyncio.gather(
*[
*(
_throttle(dev, cached=False)
for dev in self.devices.values()
if dev.is_mains_powered
]
)
)
def device_joined(self, device):

View file

@ -419,7 +419,7 @@ class Light(BaseLight, ZhaEntity):
self.async_accept_signal(
self._level_channel, SIGNAL_SET_LEVEL, self.set_level
)
refresh_interval = random.randint(*[x * 60 for x in self._REFRESH_INTERVAL])
refresh_interval = random.randint(*(x * 60 for x in self._REFRESH_INTERVAL))
self._cancel_refresh_handle = async_track_time_interval(
self.hass, self._refresh, timedelta(seconds=refresh_interval)
)

View file

@ -426,10 +426,10 @@ async def async_setup_entry( # noqa: C901
# run discovery on all ready nodes
await asyncio.gather(
*[
*(
async_on_node_added(node)
for node in client.driver.controller.nodes.values()
]
)
)
# listen for new nodes being added to the mesh

View file

@ -447,4 +447,4 @@ class ZWaveServices:
async def async_ping(self, service: ServiceCall) -> None:
"""Ping node(s)."""
nodes: set[ZwaveNode] = service.data[const.ATTR_NODES]
await asyncio.gather(*[node.async_ping() for node in nodes])
await asyncio.gather(*(node.async_ping() for node in nodes))

View file

@ -850,7 +850,7 @@ class ConfigEntries:
async def _async_shutdown(self, event: Event) -> None:
"""Call when Home Assistant is stopping."""
await asyncio.gather(
*[entry.async_shutdown() for entry in self._entries.values()]
*(entry.async_shutdown() for entry in self._entries.values())
)
await self.flow.async_shutdown()
@ -1082,10 +1082,10 @@ class ConfigEntries:
"""Forward the unloading of an entry to platforms."""
return all(
await asyncio.gather(
*[
*(
self.async_forward_entry_unload(entry, platform)
for platform in platforms
]
)
)
)
@ -1506,7 +1506,7 @@ class EntityRegistryDisabledHandler:
)
await asyncio.gather(
*[self.hass.config_entries.async_reload(entry_id) for entry_id in to_reload]
*(self.hass.config_entries.async_reload(entry_id) for entry_id in to_reload)
)

View file

@ -139,15 +139,15 @@ class ObservableCollection(ABC):
async def notify_changes(self, change_sets: Iterable[CollectionChangeSet]) -> None:
"""Notify listeners of a change."""
await asyncio.gather(
*[
*(
listener(change_set.change_type, change_set.item_id, change_set.item)
for listener in self.listeners
for change_set in change_sets
],
*[
),
*(
change_set_listener(change_sets)
for change_set_listener in self.change_set_listeners
],
),
)
@ -368,10 +368,10 @@ def sync_entity_lifecycle(
new_entities = [
entity
for entity in await asyncio.gather(
*[
*(
_func_map[change_set.change_type](change_set)
for change_set in grouped
]
)
)
if entity is not None
]

View file

@ -331,5 +331,5 @@ class EntityComponent:
async def _async_shutdown(self, event: Event) -> None:
"""Call when Home Assistant is stopping."""
await asyncio.gather(
*[platform.async_shutdown() for platform in chain(self._platforms.values())]
*(platform.async_shutdown() for platform in chain(self._platforms.values()))
)

View file

@ -239,7 +239,7 @@ async def async_validate_actions_config(
) -> list[ConfigType]:
"""Validate a list of actions."""
return await asyncio.gather(
*[async_validate_action_config(hass, action) for action in actions]
*(async_validate_action_config(hass, action) for action in actions)
)
@ -880,10 +880,10 @@ async def _async_stop_scripts_after_shutdown(hass, point_in_time):
names = ", ".join([script["instance"].name for script in running_scripts])
_LOGGER.warning("Stopping scripts running too long after shutdown: %s", names)
await asyncio.gather(
*[
*(
script["instance"].async_stop(update_state=False)
for script in running_scripts
]
)
)
@ -902,7 +902,7 @@ async def _async_stop_scripts_at_shutdown(hass, event):
names = ", ".join([script["instance"].name for script in running_scripts])
_LOGGER.debug("Stopping scripts running at shutdown: %s", names)
await asyncio.gather(
*[script["instance"].async_stop() for script in running_scripts]
*(script["instance"].async_stop() for script in running_scripts)
)

View file

@ -155,7 +155,7 @@ async def async_get_component_strings(
domains,
await gather_with_concurrency(
MAX_LOAD_CONCURRENTLY,
*[async_get_integration(hass, domain) for domain in domains],
*(async_get_integration(hass, domain) for domain in domains),
),
)
)
@ -234,10 +234,10 @@ class _TranslationCache:
# Fetch the English resources, as a fallback for missing keys
languages = [LOCALE_EN] if language == LOCALE_EN else [LOCALE_EN, language]
for translation_strings in await asyncio.gather(
*[
*(
async_get_component_strings(self.hass, lang, components)
for lang in languages
]
)
):
self._build_category_cache(language, components, translation_strings)

View file

@ -118,10 +118,10 @@ async def _async_process_integration(
return
results = await asyncio.gather(
*[
*(
async_get_integration_with_requirements(hass, dep, done)
for dep in deps_to_check
],
),
return_exceptions=True,
)
for result in results:

View file

@ -280,10 +280,10 @@ async def _async_setup_component(
await hass.config_entries.flow.async_wait_init_flow_finish(domain)
await asyncio.gather(
*[
*(
entry.async_setup(hass, integration=integration)
for entry in hass.config_entries.async_entries(domain)
]
)
)
hass.config.components.add(domain)

View file

@ -12,5 +12,5 @@ mccabe==0.6.1
pycodestyle==2.7.0
pydocstyle==6.0.0
pyflakes==2.3.1
pyupgrade==2.16.0
pyupgrade==2.21.2
yamllint==1.26.1

View file

@ -199,9 +199,9 @@ async def test_sound_mode(player, state, mode, mode_sel, mode_2ch, mode_mch):
async def test_sound_mode_list(player, state):
"""Test sound mode list."""
player._get_2ch = Mock(return_value=True) # pylint: disable=W0212
assert sorted(player.sound_mode_list) == sorted([x.name for x in DecodeMode2CH])
assert sorted(player.sound_mode_list) == sorted(x.name for x in DecodeMode2CH)
player._get_2ch = Mock(return_value=False) # pylint: disable=W0212
assert sorted(player.sound_mode_list) == sorted([x.name for x in DecodeModeMCH])
assert sorted(player.sound_mode_list) == sorted(x.name for x in DecodeModeMCH)
async def test_sound_mode_zone_x(player, state):

View file

@ -42,7 +42,7 @@ async def test_list_devices(hass, client, registry):
await client.send_json({"id": 5, "type": "config/device_registry/list"})
msg = await client.receive_json()
dev1, dev2 = [entry.pop("id") for entry in msg["result"]]
dev1, dev2 = (entry.pop("id") for entry in msg["result"])
assert msg["result"] == [
{

View file

@ -134,8 +134,8 @@ async def test_sync_request(hass_fixture, assistant_client, auth_header):
body = await result.json()
assert body.get("requestId") == reqid
devices = body["payload"]["devices"]
assert sorted([dev["id"] for dev in devices]) == sorted(
[dev["id"] for dev in DEMO_DEVICES]
assert sorted(dev["id"] for dev in devices) == sorted(
dev["id"] for dev in DEMO_DEVICES
)
for dev in devices:

View file

@ -223,9 +223,7 @@ async def test_report_state_all(agents):
data = {}
with patch.object(config, "async_report_state") as mock:
await config.async_report_state_all(data)
assert sorted(mock.mock_calls) == sorted(
[call(data, agent) for agent in agents]
)
assert sorted(mock.mock_calls) == sorted(call(data, agent) for agent in agents)
@pytest.mark.parametrize(
@ -241,7 +239,7 @@ async def test_sync_entities_all(agents, result):
side_effect=lambda agent_user_id: agents[agent_user_id],
) as mock:
res = await config.async_sync_entities_all()
assert sorted(mock.mock_calls) == sorted([call(agent) for agent in agents])
assert sorted(mock.mock_calls) == sorted(call(agent) for agent in agents)
assert res == result

View file

@ -74,7 +74,7 @@ async def test_get_triggers(hass, mock_bridge, device_reg):
}
expected_triggers = [
trigger_batt,
*[
*(
{
"platform": "device",
"domain": hue.DOMAIN,
@ -83,7 +83,7 @@ async def test_get_triggers(hass, mock_bridge, device_reg):
"subtype": t_subtype,
}
for t_type, t_subtype in device_trigger.HUE_DIMMER_REMOTE.keys()
],
),
]
assert_lists_same(triggers, expected_triggers)

View file

@ -187,7 +187,7 @@ async def test_onboarding_user(hass, hass_storage, aiohttp_client):
# Validate created areas
area_registry = ar.async_get(hass)
assert len(area_registry.areas) == 3
assert sorted([area.name for area in area_registry.async_list_areas()]) == [
assert sorted(area.name for area in area_registry.async_list_areas()) == [
"Bedroom",
"Kitchen",
"Living Room",

View file

@ -292,11 +292,9 @@ def get_config_entries_for_user_id(
) -> tuple[ConfigEntry]:
"""Get a list of config entries that apply to a specific withings user."""
return tuple(
[
config_entry
for config_entry in hass.config_entries.async_entries(const.DOMAIN)
if config_entry.data.get("token", {}).get("userid") == user_id
]
)

View file

@ -266,7 +266,7 @@ async def test_discover_endpoint(device_info, channels_mock, hass):
)
assert device_info["event_channels"] == sorted(
[ch.id for pool in channels.pools for ch in pool.client_channels.values()]
ch.id for pool in channels.pools for ch in pool.client_channels.values()
)
assert new_ent.call_count == len(
[

View file

@ -315,9 +315,9 @@ def test_event_eq():
now = dt_util.utcnow()
data = {"some": "attr"}
context = ha.Context()
event1, event2 = [
event1, event2 = (
ha.Event("some_type", data, time_fired=now, context=context) for _ in range(2)
]
)
assert event1 == event2

View file

@ -165,7 +165,7 @@ async def test_gather_with_concurrency():
return runs
results = await hasync.gather_with_concurrency(
2, *[_increment_runs_if_in_time() for i in range(4)]
2, *(_increment_runs_if_in_time() for i in range(4))
)
assert results == [2, 2, -1, -1]