Improve setup_time typing (#66509)
This commit is contained in:
parent
dbd26c7faf
commit
b2ee7cebc9
3 changed files with 15 additions and 11 deletions
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||
|
||||
import asyncio
|
||||
import contextlib
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
|
@ -450,7 +450,7 @@ async def _async_set_up_integrations(
|
|||
) -> None:
|
||||
"""Set up all the integrations."""
|
||||
hass.data[DATA_SETUP_STARTED] = {}
|
||||
setup_time = hass.data[DATA_SETUP_TIME] = {}
|
||||
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
||||
|
||||
watch_task = asyncio.create_task(_async_watch_pending_setups(hass))
|
||||
|
||||
|
@ -459,9 +459,9 @@ async def _async_set_up_integrations(
|
|||
# Resolve all dependencies so we know all integrations
|
||||
# that will have to be loaded and start rightaway
|
||||
integration_cache: dict[str, loader.Integration] = {}
|
||||
to_resolve = domains_to_setup
|
||||
to_resolve: set[str] = domains_to_setup
|
||||
while to_resolve:
|
||||
old_to_resolve = to_resolve
|
||||
old_to_resolve: set[str] = to_resolve
|
||||
to_resolve = set()
|
||||
|
||||
integrations_to_process = [
|
||||
|
@ -508,11 +508,11 @@ async def _async_set_up_integrations(
|
|||
await async_setup_multi_components(hass, debuggers, config)
|
||||
|
||||
# calculate what components to setup in what stage
|
||||
stage_1_domains = set()
|
||||
stage_1_domains: set[str] = set()
|
||||
|
||||
# Find all dependencies of any dependency of any stage 1 integration that
|
||||
# we plan on loading and promote them to stage 1
|
||||
deps_promotion = STAGE_1_INTEGRATIONS
|
||||
deps_promotion: set[str] = STAGE_1_INTEGRATIONS
|
||||
while deps_promotion:
|
||||
old_deps_promotion = deps_promotion
|
||||
deps_promotion = set()
|
||||
|
@ -577,7 +577,7 @@ async def _async_set_up_integrations(
|
|||
{
|
||||
integration: timedelta.total_seconds()
|
||||
for integration, timedelta in sorted(
|
||||
setup_time.items(), key=lambda item: item[1].total_seconds() # type: ignore
|
||||
setup_time.items(), key=lambda item: item[1].total_seconds()
|
||||
)
|
||||
},
|
||||
)
|
||||
|
|
|
@ -3,8 +3,9 @@ from __future__ import annotations
|
|||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import datetime as dt
|
||||
import json
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
|
@ -305,7 +306,9 @@ async def handle_integration_setup_info(
|
|||
msg["id"],
|
||||
[
|
||||
{"domain": integration, "seconds": timedelta.total_seconds()}
|
||||
for integration, timedelta in hass.data[DATA_SETUP_TIME].items()
|
||||
for integration, timedelta in cast(
|
||||
dict[str, dt.timedelta], hass.data[DATA_SETUP_TIME]
|
||||
).items()
|
||||
],
|
||||
)
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@ from __future__ import annotations
|
|||
import asyncio
|
||||
from collections.abc import Awaitable, Callable, Generator, Iterable
|
||||
import contextlib
|
||||
from datetime import timedelta
|
||||
import logging.handlers
|
||||
from timeit import default_timer as timer
|
||||
from types import ModuleType
|
||||
|
@ -436,7 +437,7 @@ def async_start_setup(
|
|||
"""Keep track of when setup starts and finishes."""
|
||||
setup_started = hass.data.setdefault(DATA_SETUP_STARTED, {})
|
||||
started = dt_util.utcnow()
|
||||
unique_components = {}
|
||||
unique_components: dict[str, str] = {}
|
||||
for domain in components:
|
||||
unique = ensure_unique_string(domain, setup_started)
|
||||
unique_components[unique] = domain
|
||||
|
@ -444,7 +445,7 @@ def async_start_setup(
|
|||
|
||||
yield
|
||||
|
||||
setup_time = hass.data.setdefault(DATA_SETUP_TIME, {})
|
||||
setup_time: dict[str, timedelta] = hass.data.setdefault(DATA_SETUP_TIME, {})
|
||||
time_taken = dt_util.utcnow() - started
|
||||
for unique, domain in unique_components.items():
|
||||
del setup_started[unique]
|
||||
|
|
Loading…
Add table
Reference in a new issue