Spelling updates (#82867)

This commit is contained in:
Marc Mueller 2022-11-28 16:51:43 +01:00 committed by GitHub
parent d72802cfb0
commit 63d519c1a8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 27 additions and 27 deletions

View file

@ -225,7 +225,7 @@ class AmcrestCam(Camera):
# Amcrest cameras only support one snapshot command at a time. # Amcrest cameras only support one snapshot command at a time.
# Hence need to wait if a previous snapshot has not yet finished. # Hence need to wait if a previous snapshot has not yet finished.
# Also need to check that camera is online and turned on before each wait # Also need to check that camera is online and turned on before each wait
# and before initiating shapshot. # and before initiating snapshot.
while self._snapshot_task: while self._snapshot_task:
self._check_snapshot_ok() self._check_snapshot_ok()
_LOGGER.debug("Waiting for previous snapshot from %s", self._name) _LOGGER.debug("Waiting for previous snapshot from %s", self._name)

View file

@ -111,7 +111,7 @@ class EbusdData:
raise RuntimeError(err) from err raise RuntimeError(err) from err
def write(self, call: ServiceCall) -> None: def write(self, call: ServiceCall) -> None:
"""Call write methon on ebusd.""" """Call write method on ebusd."""
name = call.data.get("name") name = call.data.get("name")
value = call.data.get("value") value = call.data.get("value")

View file

@ -183,7 +183,7 @@ class Router:
if not self.subscriptions.get(key): if not self.subscriptions.get(key):
return return
if key in self.inflight_gets: if key in self.inflight_gets:
_LOGGER.debug("Skipping already inflight get for %s", key) _LOGGER.debug("Skipping already in-flight get for %s", key)
return return
self.inflight_gets.add(key) self.inflight_gets.add(key)
_LOGGER.debug("Getting %s for subscribers %s", key, self.subscriptions[key]) _LOGGER.debug("Getting %s for subscribers %s", key, self.subscriptions[key])

View file

@ -267,7 +267,7 @@ class JewishCalendarSensor(SensorEntity):
class JewishCalendarTimeSensor(JewishCalendarSensor): class JewishCalendarTimeSensor(JewishCalendarSensor):
"""Implement attrbutes for sensors returning times.""" """Implement attributes for sensors returning times."""
_attr_device_class = SensorDeviceClass.TIMESTAMP _attr_device_class = SensorDeviceClass.TIMESTAMP

View file

@ -134,7 +134,7 @@ def number_limit_sub_validator(entity_config: OrderedDict) -> OrderedDict:
if dpt_class is None: if dpt_class is None:
raise vol.Invalid(f"'type: {value_type}' is not a valid numeric sensor type.") raise vol.Invalid(f"'type: {value_type}' is not a valid numeric sensor type.")
# Inifinity is not supported by Home Assistant frontend so user defined # Infinity is not supported by Home Assistant frontend so user defined
# config is required if if xknx DPTNumeric subclass defines it as limit. # config is required if if xknx DPTNumeric subclass defines it as limit.
if min_config is None and dpt_class.value_min == float("-inf"): if min_config is None and dpt_class.value_min == float("-inf"):
raise vol.Invalid(f"'min' key required for value type '{value_type}'") raise vol.Invalid(f"'min' key required for value type '{value_type}'")

View file

@ -178,7 +178,7 @@ class MeteoFranceWeather(CoordinatorEntity, WeatherEntity):
) )
else: else:
for forecast in self.coordinator.data.daily_forecast: for forecast in self.coordinator.data.daily_forecast:
# stop when we don't have a weather condition (can happen around last days of forcast, max 14) # stop when we don't have a weather condition (can happen around last days of forecast, max 14)
if not forecast.get("weather12H"): if not forecast.get("weather12H"):
break break
forecast_data.append( forecast_data.append(

View file

@ -122,7 +122,7 @@ class NextBusDepartureSensor(SensorEntity):
both the route and the stop. both the route and the stop.
This is possibly a little convoluted to provide as it requires making a This is possibly a little convoluted to provide as it requires making a
request to the service to get these values. Perhaps it can be simplifed in request to the service to get these values. Perhaps it can be simplified in
the future using fuzzy logic and matching. the future using fuzzy logic and matching.
""" """

View file

@ -176,7 +176,7 @@ class EventManager:
).total_seconds() < 7200: ).total_seconds() < 7200:
await self.async_renew() await self.async_renew()
except RemoteProtocolError: except RemoteProtocolError:
# Likley a shutdown event, nothing to see here # Likely a shutdown event, nothing to see here
return return
except SUBSCRIPTION_ERRORS as err: except SUBSCRIPTION_ERRORS as err:
LOGGER.warning( LOGGER.warning(

View file

@ -73,7 +73,7 @@ class RTSPToWebRTCConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
return None return None
async def async_step_hassio(self, discovery_info: HassioServiceInfo) -> FlowResult: async def async_step_hassio(self, discovery_info: HassioServiceInfo) -> FlowResult:
"""Prepare confiugration for the RTSPtoWebRTC server add-on discovery.""" """Prepare configuration for the RTSPtoWebRTC server add-on discovery."""
if self._async_current_entries(): if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed") return self.async_abort(reason="single_instance_allowed")

View file

@ -112,7 +112,7 @@ def create_stream(
dynamic_stream_settings: DynamicStreamSettings, dynamic_stream_settings: DynamicStreamSettings,
stream_label: str | None = None, stream_label: str | None = None,
) -> Stream: ) -> Stream:
"""Create a stream with the specified identfier based on the source url. """Create a stream with the specified identifier based on the source url.
The stream_source is typically an rtsp url (though any url accepted by ffmpeg is fine) and The stream_source is typically an rtsp url (though any url accepted by ffmpeg is fine) and
options (see STREAM_OPTIONS_SCHEMA) are converted and passed into pyav / ffmpeg. options (see STREAM_OPTIONS_SCHEMA) are converted and passed into pyav / ffmpeg.

View file

@ -19,7 +19,7 @@ class Diagnostics:
self._values: dict[str, Any] = {} self._values: dict[str, Any] = {}
def increment(self, key: str) -> None: def increment(self, key: str) -> None:
"""Increment a counter for the spcified key/event.""" """Increment a counter for the specified key/event."""
self._counter.update(Counter({key: 1})) self._counter.update(Counter({key: 1}))
def set_value(self, key: str, value: Any) -> None: def set_value(self, key: str, value: Any) -> None:

View file

@ -102,7 +102,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
for platform in PLATFORMS: for platform in PLATFORMS:
hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format(platform))() hass.data.pop(DATA_REMOVE_DISCOVER_COMPONENT.format(platform))()
# deattach device triggers # detach device triggers
device_registry = dr.async_get(hass) device_registry = dr.async_get(hass)
devices = async_entries_for_config_entry(device_registry, entry.entry_id) devices = async_entries_for_config_entry(device_registry, entry.entry_id)
for device in devices: for device in devices:

View file

@ -43,7 +43,7 @@ def wilight_trigger(value: Any) -> str | None:
if (step == 6) & result_60: if (step == 6) & result_60:
step = 7 step = 7
err_desc = "Active part shoul be less than 2" err_desc = "Active part should be less than 2"
if (step == 7) & result_2: if (step == 7) & result_2:
return value return value

View file

@ -437,7 +437,7 @@ def _load_services_file(hass: HomeAssistant, integration: Integration) -> JSON_T
def _load_services_files( def _load_services_files(
hass: HomeAssistant, integrations: Iterable[Integration] hass: HomeAssistant, integrations: Iterable[Integration]
) -> list[JSON_TYPE]: ) -> list[JSON_TYPE]:
"""Load service files for multiple intergrations.""" """Load service files for multiple integrations."""
return [_load_services_file(hass, integration) for integration in integrations] return [_load_services_file(hass, integration) for integration in integrations]

View file

@ -177,7 +177,7 @@ def find_non_referenced_integrations(
integration: Integration, integration: Integration,
references: dict[Path, set[str]], references: dict[Path, set[str]],
) -> set[str]: ) -> set[str]:
"""Find intergrations that are not allowed to be referenced.""" """Find integrations that are not allowed to be referenced."""
allowed_references = calc_allowed_references(integration) allowed_references = calc_allowed_references(integration)
referenced = set() referenced = set()
for path, refs in references.items(): for path, refs in references.items():

View file

@ -27,7 +27,7 @@ def mock_config_entry() -> MockConfigEntry:
def mock_cpuinfo_config_flow() -> Generator[MagicMock, None, None]: def mock_cpuinfo_config_flow() -> Generator[MagicMock, None, None]:
"""Return a mocked get_cpu_info. """Return a mocked get_cpu_info.
It is only used to check thruthy or falsy values, so it is mocked It is only used to check truthy or falsy values, so it is mocked
to return True. to return True.
""" """
with patch( with patch(

View file

@ -56,7 +56,7 @@ async def test_step_import_existing_host(hass):
mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50}) mock_data.update({CONF_SK_NUM_TRIES: 3, CONF_DIM_MODE: 50})
mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data) mock_entry = MockConfigEntry(domain=DOMAIN, data=mock_data)
mock_entry.add_to_hass(hass) mock_entry.add_to_hass(hass)
# Inititalize a config flow with different data but same host address # Initialize a config flow with different data but same host address
with patch("pypck.connection.PchkConnectionManager.async_connect"): with patch("pypck.connection.PchkConnectionManager.async_connect"):
imported_data = IMPORT_DATA.copy() imported_data = IMPORT_DATA.copy()
result = await hass.config_entries.flow.async_init( result = await hass.config_entries.flow.async_init(

View file

@ -913,7 +913,7 @@ async def test_subscribe_unsubscribe_logbook_stream_included_entities(
async def test_logbook_stream_excluded_entities_inherits_filters_from_recorder( async def test_logbook_stream_excluded_entities_inherits_filters_from_recorder(
recorder_mock, hass, hass_ws_client recorder_mock, hass, hass_ws_client
): ):
"""Test subscribe/unsubscribe logbook stream inherts filters from recorder.""" """Test subscribe/unsubscribe logbook stream inherits filters from recorder."""
now = dt_util.utcnow() now = dt_util.utcnow()
await asyncio.gather( await asyncio.gather(
*[ *[

View file

@ -654,7 +654,7 @@ async def test_pubsub_subscriber_config_entry_reauth(
result = await oauth.async_reauth(config_entry) result = await oauth.async_reauth(config_entry)
await oauth.async_oauth_web_flow(result) await oauth.async_oauth_web_flow(result)
# Entering an updated access token refreshs the config entry. # Entering an updated access token refreshes the config entry.
entry = await oauth.async_finish_setup(result, {"code": "1234"}) entry = await oauth.async_finish_setup(result, {"code": "1234"})
entry.data["token"].pop("expires_at") entry.data["token"].pop("expires_at")
assert entry.unique_id == PROJECT_ID assert entry.unique_id == PROJECT_ID

View file

@ -1,4 +1,4 @@
"""Tests for RTSPtoWebRTC inititalization.""" """Tests for RTSPtoWebRTC initialization."""
from __future__ import annotations from __future__ import annotations

View file

@ -1,4 +1,4 @@
"""Tests for RTSPtoWebRTC inititalization.""" """Tests for RTSPtoWebRTC initialization."""
from __future__ import annotations from __future__ import annotations

View file

@ -486,7 +486,7 @@ async def test_climate_hvac_action_pi_demand(hass, device_climate):
), ),
) )
async def test_hvac_mode(hass, device_climate, sys_mode, hvac_mode): async def test_hvac_mode(hass, device_climate, sys_mode, hvac_mode):
"""Test HVAC modee.""" """Test HVAC mode."""
thrm_cluster = device_climate.device.endpoints[1].thermostat thrm_cluster = device_climate.device.endpoints[1].thermostat
entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass) entity_id = await find_entity_id(Platform.CLIMATE, device_climate, hass)

View file

@ -186,11 +186,11 @@ def test_with_include_domain_glob_filtering_case4a_include_strong():
) )
assert testfilter("sensor.working") assert testfilter("sensor.working")
assert testfilter("sensor.notworking") is True # iclude is stronger assert testfilter("sensor.notworking") is True # include is stronger
assert testfilter("light.test") assert testfilter("light.test")
assert testfilter("light.notworking") is True # iclude is stronger assert testfilter("light.notworking") is True # include is stronger
assert testfilter("light.ignoreme") is False assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.not_working") is True # iclude is stronger assert testfilter("binary_sensor.not_working") is True # include is stronger
assert testfilter("binary_sensor.another") is False assert testfilter("binary_sensor.another") is False
assert testfilter("binary_sensor.specificly_included") is True assert testfilter("binary_sensor.specificly_included") is True
assert testfilter("sun.sun") is False assert testfilter("sun.sun") is False

View file

@ -327,7 +327,7 @@ async def test_external_step(hass, manager):
"refresh": True, "refresh": True,
} }
# Frontend refreshses the flow # Frontend refreshes the flow
result = await manager.async_configure(result["flow_id"]) result = await manager.async_configure(result["flow_id"])
assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY
assert result["title"] == "Hello" assert result["title"] == "Hello"

View file

@ -639,7 +639,7 @@ async def test_integration_logs_is_custom(hass, caplog):
async def test_async_get_loaded_integrations(hass): async def test_async_get_loaded_integrations(hass):
"""Test we can enumerate loaded integations.""" """Test we can enumerate loaded integrations."""
hass.config.components.add("notbase") hass.config.components.add("notbase")
hass.config.components.add("switch") hass.config.components.add("switch")
hass.config.components.add("notbase.switch") hass.config.components.add("notbase.switch")