Prepare for upcoming mypy update (#70800)

This commit is contained in:
Marc Mueller 2022-04-26 16:41:52 +02:00 committed by GitHub
parent 7d2363ad22
commit 97af164858
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 8 additions and 7 deletions

View file

@ -130,7 +130,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
) )
self.context[CONF_HOST] = self._host self.context[CONF_HOST] = self._host
if ipaddress.ip_address(self._host).is_link_local: if not self._host or ipaddress.ip_address(self._host).is_link_local:
return self.async_abort(reason="ignore_ip6_link_local") return self.async_abort(reason="ignore_ip6_link_local")
if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN): if uuid := discovery_info.upnp.get(ssdp.ATTR_UPNP_UDN):

View file

@ -236,7 +236,7 @@ class StatisticsSensor(SensorEntity):
samples_max_age: timedelta | None, samples_max_age: timedelta | None,
precision: int, precision: int,
quantile_intervals: int, quantile_intervals: int,
quantile_method: str, quantile_method: Literal["exclusive", "inclusive"],
) -> None: ) -> None:
"""Initialize the Statistics sensor.""" """Initialize the Statistics sensor."""
self._attr_icon: str = ICON self._attr_icon: str = ICON
@ -252,7 +252,7 @@ class StatisticsSensor(SensorEntity):
self._samples_max_age: timedelta | None = samples_max_age self._samples_max_age: timedelta | None = samples_max_age
self._precision: int = precision self._precision: int = precision
self._quantile_intervals: int = quantile_intervals self._quantile_intervals: int = quantile_intervals
self._quantile_method: str = quantile_method self._quantile_method: Literal["exclusive", "inclusive"] = quantile_method
self._value: StateType | datetime = None self._value: StateType | datetime = None
self._unit_of_measurement: str | None = None self._unit_of_measurement: str | None = None
self._available: bool = False self._available: bool = False

View file

@ -109,7 +109,7 @@ def report_integration(
integration, integration,
found_frame.filename[index:], found_frame.filename[index:],
found_frame.lineno, found_frame.lineno,
found_frame.line.strip(), (found_frame.line or "?").strip(),
) )

View file

@ -236,7 +236,8 @@ def _get_internal_url(
scheme="http", host=hass.config.api.local_ip, port=hass.config.api.port scheme="http", host=hass.config.api.local_ip, port=hass.config.api.port
) )
if ( if (
not is_loopback(ip_address(ip_url.host)) ip_url.host
and not is_loopback(ip_address(ip_url.host))
and (not require_current_request or ip_url.host == _get_request_host()) and (not require_current_request or ip_url.host == _get_request_host())
and (not require_standard_port or ip_url.is_default_port()) and (not require_standard_port or ip_url.is_default_port())
): ):

View file

@ -155,13 +155,13 @@ def check_loop(func: Callable[..., Any], strict: bool = True) -> None:
integration, integration,
found_frame.filename[index:], found_frame.filename[index:],
found_frame.lineno, found_frame.lineno,
found_frame.line.strip(), (found_frame.line or "?").strip(),
) )
if strict: if strict:
raise RuntimeError( raise RuntimeError(
"Blocking calls must be done in the executor or a separate thread; " "Blocking calls must be done in the executor or a separate thread; "
"Use `await hass.async_add_executor_job()` " "Use `await hass.async_add_executor_job()` "
f"at {found_frame.filename[index:]}, line {found_frame.lineno}: {found_frame.line.strip()}" f"at {found_frame.filename[index:]}, line {found_frame.lineno}: {(found_frame.line or '?').strip()}"
) )