Core code styling improvements (#85963)

This commit is contained in:
Franck Nijhof 2023-01-15 23:00:51 +01:00 committed by GitHub
parent 8e5236528f
commit 64c2340fab
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 106 additions and 53 deletions

View file

@ -555,7 +555,9 @@ class AuthStore:
"client_icon": refresh_token.client_icon, "client_icon": refresh_token.client_icon,
"token_type": refresh_token.token_type, "token_type": refresh_token.token_type,
"created_at": refresh_token.created_at.isoformat(), "created_at": refresh_token.created_at.isoformat(),
"access_token_expiration": refresh_token.access_token_expiration.total_seconds(), "access_token_expiration": (
refresh_token.access_token_expiration.total_seconds()
),
"token": refresh_token.token, "token": refresh_token.token,
"jwt_key": refresh_token.jwt_key, "jwt_key": refresh_token.jwt_key,
"last_used_at": refresh_token.last_used_at.isoformat() "last_used_at": refresh_token.last_used_at.isoformat()

View file

@ -8,7 +8,9 @@ from .util.async_ import protect_loop
def enable() -> None: def enable() -> None:
"""Enable the detection of blocking calls in the event loop.""" """Enable the detection of blocking calls in the event loop."""
# Prevent urllib3 and requests doing I/O in event loop # Prevent urllib3 and requests doing I/O in event loop
HTTPConnection.putrequest = protect_loop(HTTPConnection.putrequest) # type: ignore[assignment] HTTPConnection.putrequest = protect_loop( # type: ignore[assignment]
HTTPConnection.putrequest
)
# Prevent sleeping in event loop. Non-strict since 2022.02 # Prevent sleeping in event loop. Non-strict since 2022.02
time.sleep = protect_loop(time.sleep, strict=False) time.sleep = protect_loop(time.sleep, strict=False)

View file

@ -385,7 +385,11 @@ def async_enable_logging(
) )
threading.excepthook = lambda args: logging.getLogger(None).exception( threading.excepthook = lambda args: logging.getLogger(None).exception(
"Uncaught thread exception", "Uncaught thread exception",
exc_info=(args.exc_type, args.exc_value, args.exc_traceback), # type: ignore[arg-type] exc_info=( # type: ignore[arg-type]
args.exc_type,
args.exc_value,
args.exc_traceback,
),
) )
# Log errors to a file if we have write access to file or config dir # Log errors to a file if we have write access to file or config dir
@ -403,7 +407,10 @@ def async_enable_logging(
not err_path_exists and os.access(err_dir, os.W_OK) not err_path_exists and os.access(err_dir, os.W_OK)
): ):
err_handler: logging.handlers.RotatingFileHandler | logging.handlers.TimedRotatingFileHandler err_handler: (
logging.handlers.RotatingFileHandler
| logging.handlers.TimedRotatingFileHandler
)
if log_rotate_days: if log_rotate_days:
err_handler = logging.handlers.TimedRotatingFileHandler( err_handler = logging.handlers.TimedRotatingFileHandler(
err_log_path, when="midnight", backupCount=log_rotate_days err_log_path, when="midnight", backupCount=log_rotate_days
@ -462,7 +469,10 @@ def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
async def _async_watch_pending_setups(hass: core.HomeAssistant) -> None: async def _async_watch_pending_setups(hass: core.HomeAssistant) -> None:
"""Periodic log of setups that are pending for longer than LOG_SLOW_STARTUP_INTERVAL.""" """Periodic log of setups that are pending.
Pending for longer than LOG_SLOW_STARTUP_INTERVAL.
"""
loop_count = 0 loop_count = 0
setup_started: dict[str, datetime] = hass.data[DATA_SETUP_STARTED] setup_started: dict[str, datetime] = hass.data[DATA_SETUP_STARTED]
previous_was_empty = True previous_was_empty = True

View file

@ -864,8 +864,8 @@ async def async_process_component_config( # noqa: C901
config_validator, "async_validate_config" config_validator, "async_validate_config"
): ):
try: try:
return await config_validator.async_validate_config( # type: ignore[no-any-return] return ( # type: ignore[no-any-return]
hass, config await config_validator.async_validate_config(hass, config)
) )
except (vol.Invalid, HomeAssistantError) as ex: except (vol.Invalid, HomeAssistantError) as ex:
async_log_exception(ex, domain, config, hass, integration.documentation) async_log_exception(ex, domain, config, hass, integration.documentation)

View file

@ -63,14 +63,14 @@ SOURCE_USB = "usb"
SOURCE_USER = "user" SOURCE_USER = "user"
SOURCE_ZEROCONF = "zeroconf" SOURCE_ZEROCONF = "zeroconf"
# If a user wants to hide a discovery from the UI they can "Ignore" it. The config_entries/ignore_flow # If a user wants to hide a discovery from the UI they can "Ignore" it. The
# websocket command creates a config entry with this source and while it exists normal discoveries # config_entries/ignore_flow websocket command creates a config entry with this
# with the same unique id are ignored. # source and while it exists normal discoveries with the same unique id are ignored.
SOURCE_IGNORE = "ignore" SOURCE_IGNORE = "ignore"
# This is used when a user uses the "Stop Ignoring" button in the UI (the # This is used when a user uses the "Stop Ignoring" button in the UI (the
# config_entries/ignore_flow websocket command). It's triggered after the "ignore" config entry has # config_entries/ignore_flow websocket command). It's triggered after the
# been removed and unloaded. # "ignore" config entry has been removed and unloaded.
SOURCE_UNIGNORE = "unignore" SOURCE_UNIGNORE = "unignore"
# This is used to signal that re-authentication is required by the user. # This is used to signal that re-authentication is required by the user.
@ -643,7 +643,8 @@ class ConfigEntry:
Returns function to unlisten. Returns function to unlisten.
""" """
weak_listener: Any weak_listener: Any
# weakref.ref is not applicable to a bound method, e.g. method of a class instance, as reference will die immediately # weakref.ref is not applicable to a bound method, e.g.,
# method of a class instance, as reference will die immediately.
if hasattr(listener, "__self__"): if hasattr(listener, "__self__"):
weak_listener = weakref.WeakMethod(cast(MethodType, listener)) weak_listener = weakref.WeakMethod(cast(MethodType, listener))
else: else:
@ -994,10 +995,10 @@ class ConfigEntries:
): ):
self.hass.config_entries.flow.async_abort(progress_flow["flow_id"]) self.hass.config_entries.flow.async_abort(progress_flow["flow_id"])
# After we have fully removed an "ignore" config entry we can try and rediscover it so that a # After we have fully removed an "ignore" config entry we can try and rediscover
# user is able to immediately start configuring it. We do this by starting a new flow with # it so that a user is able to immediately start configuring it. We do this by
# the 'unignore' step. If the integration doesn't implement async_step_unignore then # starting a new flow with the 'unignore' step. If the integration doesn't
# this will be a no-op. # implement async_step_unignore then this will be a no-op.
if entry.source == SOURCE_IGNORE: if entry.source == SOURCE_IGNORE:
self.hass.async_create_task( self.hass.async_create_task(
self.hass.config_entries.flow.async_init( self.hass.config_entries.flow.async_init(
@ -1040,7 +1041,8 @@ class ConfigEntries:
for entry in config["entries"]: for entry in config["entries"]:
pref_disable_new_entities = entry.get("pref_disable_new_entities") pref_disable_new_entities = entry.get("pref_disable_new_entities")
# Between 0.98 and 2021.6 we stored 'disable_new_entities' in a system options dictionary # Between 0.98 and 2021.6 we stored 'disable_new_entities' in a
# system options dictionary.
if pref_disable_new_entities is None and "system_options" in entry: if pref_disable_new_entities is None and "system_options" in entry:
pref_disable_new_entities = entry.get("system_options", {}).get( pref_disable_new_entities = entry.get("system_options", {}).get(
"disable_new_entities" "disable_new_entities"
@ -1100,7 +1102,9 @@ class ConfigEntries:
if not result: if not result:
return result return result
return entry.state is ConfigEntryState.LOADED # type: ignore[comparison-overlap] # mypy bug? return (
entry.state is ConfigEntryState.LOADED # type: ignore[comparison-overlap]
)
async def async_unload(self, entry_id: str) -> bool: async def async_unload(self, entry_id: str) -> bool:
"""Unload a config entry.""" """Unload a config entry."""
@ -1382,7 +1386,11 @@ class ConfigFlow(data_entry_flow.FlowHandler):
match_dict = {} # Match any entry match_dict = {} # Match any entry
for entry in self._async_current_entries(include_ignore=False): for entry in self._async_current_entries(include_ignore=False):
if all( if all(
item in ChainMap(entry.options, entry.data).items() # type: ignore[arg-type] item
in ChainMap(
entry.options, # type: ignore[arg-type]
entry.data, # type: ignore[arg-type]
).items()
for item in match_dict.items() for item in match_dict.items()
): ):
raise data_entry_flow.AbortFlow("already_configured") raise data_entry_flow.AbortFlow("already_configured")
@ -1474,7 +1482,8 @@ class ConfigFlow(data_entry_flow.FlowHandler):
) -> list[ConfigEntry]: ) -> list[ConfigEntry]:
"""Return current entries. """Return current entries.
If the flow is user initiated, filter out ignored entries unless include_ignore is True. If the flow is user initiated, filter out ignored entries,
unless include_ignore is True.
""" """
config_entries = self.hass.config_entries.async_entries(self.handler) config_entries = self.hass.config_entries.async_entries(self.handler)
@ -1737,7 +1746,7 @@ class OptionsFlowWithConfigEntry(OptionsFlow):
class EntityRegistryDisabledHandler: class EntityRegistryDisabledHandler:
"""Handler to handle when entities related to config entries updating disabled_by.""" """Handler when entities related to config entries updated disabled_by."""
def __init__(self, hass: HomeAssistant) -> None: def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the handler.""" """Initialize the handler."""

View file

@ -2041,8 +2041,8 @@ class Config:
if not (data := await self._store.async_load()): if not (data := await self._store.async_load()):
return return
# In 2021.9 we fixed validation to disallow a path (because that's never correct) # In 2021.9 we fixed validation to disallow a path (because that's never
# but this data still lives in storage, so we print a warning. # correct) but this data still lives in storage, so we print a warning.
if data.get("external_url") and urlparse(data["external_url"]).path not in ( if data.get("external_url") and urlparse(data["external_url"]).path not in (
"", "",
"/", "/",
@ -2125,7 +2125,8 @@ class Config:
if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL: if data["unit_system_v2"] == _CONF_UNIT_SYSTEM_IMPERIAL:
data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY data["unit_system_v2"] = _CONF_UNIT_SYSTEM_US_CUSTOMARY
if old_major_version == 1 and old_minor_version < 3: if old_major_version == 1 and old_minor_version < 3:
# In 1.3, we add the key "language", initialize it from the owner account # In 1.3, we add the key "language", initialize it from the
# owner account.
data["language"] = "en" data["language"] = "en"
try: try:
owner = await self.hass.auth.async_get_owner() owner = await self.hass.auth.async_get_owner()

View file

@ -175,7 +175,10 @@ class FlowManager(abc.ABC):
def async_has_matching_flow( def async_has_matching_flow(
self, handler: str, context: dict[str, Any], data: Any self, handler: str, context: dict[str, Any], data: Any
) -> bool: ) -> bool:
"""Check if an existing matching flow is in progress with the same handler, context, and data.""" """Check if an existing matching flow is in progress.
A flow with the same handler, context, and data.
"""
return any( return any(
flow flow
for flow in self._async_progress_by_handler(handler) for flow in self._async_progress_by_handler(handler)

View file

@ -378,9 +378,9 @@ class Template:
wanted_env = _ENVIRONMENT wanted_env = _ENVIRONMENT
ret: TemplateEnvironment | None = self.hass.data.get(wanted_env) ret: TemplateEnvironment | None = self.hass.data.get(wanted_env)
if ret is None: if ret is None:
ret = self.hass.data[wanted_env] = TemplateEnvironment( # type: ignore[no-untyped-call] ret = self.hass.data[wanted_env] = TemplateEnvironment(
self.hass, self.hass,
self._limited, self._limited, # type: ignore[no-untyped-call]
self._strict, self._strict,
) )
return ret return ret

View file

@ -123,8 +123,9 @@ class Manifest(TypedDict, total=False):
""" """
Integration manifest. Integration manifest.
Note that none of the attributes are marked Optional here. However, some of them may be optional in manifest.json Note that none of the attributes are marked Optional here. However, some of
in the sense that they can be omitted altogether. But when present, they should not have null values in it. them may be optional in manifest.json in the sense that they can be omitted
altogether. But when present, they should not have null values in it.
""" """
name: str name: str
@ -338,7 +339,9 @@ async def async_get_zeroconf(
hass: HomeAssistant, hass: HomeAssistant,
) -> dict[str, list[dict[str, str | dict[str, str]]]]: ) -> dict[str, list[dict[str, str | dict[str, str]]]]:
"""Return cached list of zeroconf types.""" """Return cached list of zeroconf types."""
zeroconf: dict[str, list[dict[str, str | dict[str, str]]]] = ZEROCONF.copy() # type: ignore[assignment] zeroconf: dict[
str, list[dict[str, str | dict[str, str]]]
] = ZEROCONF.copy() # type: ignore[assignment]
integrations = await async_get_custom_components(hass) integrations = await async_get_custom_components(hass)
for integration in integrations.values(): for integration in integrations.values():
@ -496,7 +499,8 @@ class Integration:
( (
"The custom integration '%s' does not have a version key in the" "The custom integration '%s' does not have a version key in the"
" manifest file and was blocked from loading. See" " manifest file and was blocked from loading. See"
" https://developers.home-assistant.io/blog/2021/01/29/custom-integration-changes#versions" " https://developers.home-assistant.io"
"/blog/2021/01/29/custom-integration-changes#versions"
" for more details" " for more details"
), ),
integration.domain, integration.domain,
@ -518,7 +522,8 @@ class Integration:
( (
"The custom integration '%s' does not have a valid version key" "The custom integration '%s' does not have a valid version key"
" (%s) in the manifest file and was blocked from loading. See" " (%s) in the manifest file and was blocked from loading. See"
" https://developers.home-assistant.io/blog/2021/01/29/custom-integration-changes#versions" " https://developers.home-assistant.io"
"/blog/2021/01/29/custom-integration-changes#versions"
" for more details" " for more details"
), ),
integration.domain, integration.domain,
@ -895,7 +900,9 @@ def _load_file(
Async friendly. Async friendly.
""" """
with suppress(KeyError): with suppress(KeyError):
return hass.data[DATA_COMPONENTS][comp_or_platform] # type: ignore[no-any-return] return hass.data[DATA_COMPONENTS][ # type: ignore[no-any-return]
comp_or_platform
]
if (cache := hass.data.get(DATA_COMPONENTS)) is None: if (cache := hass.data.get(DATA_COMPONENTS)) is None:
if not _async_mount_config_dir(hass): if not _async_mount_config_dir(hass):

View file

@ -15,7 +15,8 @@ from .helpers.typing import UNDEFINED, UndefinedType
from .loader import Integration, IntegrationNotFound, async_get_integration from .loader import Integration, IntegrationNotFound, async_get_integration
from .util import package as pkg_util from .util import package as pkg_util
PIP_TIMEOUT = 60 # The default is too low when the internet connection is satellite or high latency # The default is too low when the internet connection is satellite or high latency
PIP_TIMEOUT = 60
MAX_INSTALL_FAILURES = 3 MAX_INSTALL_FAILURES = 3
DATA_REQUIREMENTS_MANAGER = "requirements_manager" DATA_REQUIREMENTS_MANAGER = "requirements_manager"
CONSTRAINT_FILE = "package_constraints.txt" CONSTRAINT_FILE = "package_constraints.txt"
@ -132,7 +133,7 @@ class RequirementsManager:
async def async_get_integration_with_requirements( async def async_get_integration_with_requirements(
self, domain: str, done: set[str] | None = None self, domain: str, done: set[str] | None = None
) -> Integration: ) -> Integration:
"""Get an integration with all requirements installed, including the dependencies. """Get an integration with all requirements installed, including dependencies.
This can raise IntegrationNotFound if manifest or integration This can raise IntegrationNotFound if manifest or integration
is invalid, RequirementNotFound if there was some type of is invalid, RequirementNotFound if there was some type of
@ -257,7 +258,11 @@ class RequirementsManager:
def _raise_for_failed_requirements( def _raise_for_failed_requirements(
self, integration: str, missing: list[str] self, integration: str, missing: list[str]
) -> None: ) -> None:
"""Raise RequirementsNotFound so we do not keep trying requirements that have already failed.""" """Raise for failed installing integration requirements.
Raise RequirementsNotFound so we do not keep trying requirements
that have already failed.
"""
for req in missing: for req in missing:
if req in self.install_failure_history: if req in self.install_failure_history:
_LOGGER.info( _LOGGER.info(

View file

@ -90,11 +90,18 @@ def _async_loop_exception_handler(_: Any, context: dict[str, Any]) -> None:
if source_traceback := context.get("source_traceback"): if source_traceback := context.get("source_traceback"):
stack_summary = "".join(traceback.format_list(source_traceback)) stack_summary = "".join(traceback.format_list(source_traceback))
logger.error( logger.error(
"Error doing job: %s: %s", context["message"], stack_summary, **kwargs # type: ignore[arg-type] "Error doing job: %s: %s",
context["message"],
stack_summary,
**kwargs, # type: ignore[arg-type]
) )
return return
logger.error("Error doing job: %s", context["message"], **kwargs) # type: ignore[arg-type] logger.error(
"Error doing job: %s",
context["message"],
**kwargs, # type: ignore[arg-type]
)
async def setup_and_run_hass(runtime_config: RuntimeConfig) -> int: async def setup_and_run_hass(runtime_config: RuntimeConfig) -> int:
@ -105,7 +112,8 @@ async def setup_and_run_hass(runtime_config: RuntimeConfig) -> int:
return 1 return 1
# threading._shutdown can deadlock forever # threading._shutdown can deadlock forever
threading._shutdown = deadlock_safe_shutdown # type: ignore[attr-defined] # pylint: disable=protected-access # pylint: disable=protected-access
threading._shutdown = deadlock_safe_shutdown # type: ignore[attr-defined]
return await hass.async_run() return await hass.async_run()

View file

@ -191,7 +191,10 @@ async def state_changed_event_helper(hass):
@benchmark @benchmark
async def state_changed_event_filter_helper(hass): async def state_changed_event_filter_helper(hass):
"""Run a million events through state changed event helper with 1000 entities that all get filtered.""" """Run a million events through state changed event helper.
With 1000 entities that all get filtered.
"""
count = 0 count = 0
entity_id = "light.kitchen" entity_id = "light.kitchen"
events_to_fire = 10**6 events_to_fire = 10**6

View file

@ -30,24 +30,27 @@ ATTR_COMPONENT = "component"
BASE_PLATFORMS = {platform.value for platform in Platform} BASE_PLATFORMS = {platform.value for platform in Platform}
# DATA_SETUP is a dict[str, asyncio.Task[bool]], indicating domains which are currently # DATA_SETUP is a dict[str, asyncio.Task[bool]], indicating domains which are currently
# being setup or which failed to setup # being setup or which failed to setup:
# - Tasks are added to DATA_SETUP by `async_setup_component`, the key is the domain being setup # - Tasks are added to DATA_SETUP by `async_setup_component`, the key is the domain
# and the Task is the `_async_setup_component` helper. # being setup and the Task is the `_async_setup_component` helper.
# - Tasks are removed from DATA_SETUP if setup was successful, that is, the task returned True # - Tasks are removed from DATA_SETUP if setup was successful, that is,
# the task returned True.
DATA_SETUP = "setup_tasks" DATA_SETUP = "setup_tasks"
# DATA_SETUP_DONE is a dict [str, asyncio.Event], indicating components which will be setup # DATA_SETUP_DONE is a dict [str, asyncio.Event], indicating components which
# - Events are added to DATA_SETUP_DONE during bootstrap by async_set_domains_to_be_loaded, # will be setup:
# the key is the domain which will be loaded # - Events are added to DATA_SETUP_DONE during bootstrap by
# - Events are set and removed from DATA_SETUP_DONE when async_setup_component is finished, # async_set_domains_to_be_loaded, the key is the domain which will be loaded.
# regardless of if the setup was successful or not. # - Events are set and removed from DATA_SETUP_DONE when async_setup_component
# is finished, regardless of if the setup was successful or not.
DATA_SETUP_DONE = "setup_done" DATA_SETUP_DONE = "setup_done"
# DATA_SETUP_DONE is a dict [str, datetime], indicating when an attempt to setup a component # DATA_SETUP_DONE is a dict [str, datetime], indicating when an attempt
# started # to setup a component started.
DATA_SETUP_STARTED = "setup_started" DATA_SETUP_STARTED = "setup_started"
# DATA_SETUP_TIME is a dict [str, timedelta], indicating how time was spent setting up a component # DATA_SETUP_TIME is a dict [str, timedelta], indicating how time was spent
# setting up a component.
DATA_SETUP_TIME = "setup_time" DATA_SETUP_TIME = "setup_time"
DATA_DEPS_REQS = "deps_reqs_processed" DATA_DEPS_REQS = "deps_reqs_processed"