Have pylint warn when user visible log messages do not start with capital letter or end with a period (#48064)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
This commit is contained in:
parent
3742f175ad
commit
c820dd4cb5
107 changed files with 243 additions and 157 deletions
|
@ -163,7 +163,7 @@ class AbodeFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
async def async_step_import(self, import_config):
|
||||
"""Import a config entry from configuration.yaml."""
|
||||
if self._async_current_entries():
|
||||
LOGGER.warning("Already configured. Only a single configuration possible.")
|
||||
LOGGER.warning("Already configured; Only a single configuration possible")
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
self._polling = import_config.get(CONF_POLLING, False)
|
||||
|
|
|
@ -53,7 +53,7 @@ class ActiontecDeviceScanner(DeviceScanner):
|
|||
self.last_results = []
|
||||
data = self.get_actiontec_data()
|
||||
self.success_init = data is not None
|
||||
_LOGGER.info("canner initialized")
|
||||
_LOGGER.info("Scanner initialized")
|
||||
|
||||
def scan_devices(self):
|
||||
"""Scan for new devices and return a list with found device IDs."""
|
||||
|
|
|
@ -90,7 +90,7 @@ def format_condition(condition: str) -> str:
|
|||
for key, value in CONDITIONS_MAP.items():
|
||||
if condition in value:
|
||||
return key
|
||||
_LOGGER.error('condition "%s" not found in CONDITIONS_MAP', condition)
|
||||
_LOGGER.error('Condition "%s" not found in CONDITIONS_MAP', condition)
|
||||
return condition
|
||||
|
||||
|
||||
|
@ -175,14 +175,14 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||
)
|
||||
if self._town:
|
||||
_LOGGER.debug(
|
||||
"town found for coordinates [%s, %s]: %s",
|
||||
"Town found for coordinates [%s, %s]: %s",
|
||||
self._latitude,
|
||||
self._longitude,
|
||||
self._town,
|
||||
)
|
||||
if not self._town:
|
||||
_LOGGER.error(
|
||||
"town not found for coordinates [%s, %s]",
|
||||
"Town not found for coordinates [%s, %s]",
|
||||
self._latitude,
|
||||
self._longitude,
|
||||
)
|
||||
|
@ -197,7 +197,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||
daily = self._aemet.get_specific_forecast_town_daily(self._town[AEMET_ATTR_ID])
|
||||
if not daily:
|
||||
_LOGGER.error(
|
||||
'error fetching daily data for town "%s"', self._town[AEMET_ATTR_ID]
|
||||
'Error fetching daily data for town "%s"', self._town[AEMET_ATTR_ID]
|
||||
)
|
||||
|
||||
hourly = self._aemet.get_specific_forecast_town_hourly(
|
||||
|
@ -205,7 +205,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||
)
|
||||
if not hourly:
|
||||
_LOGGER.error(
|
||||
'error fetching hourly data for town "%s"', self._town[AEMET_ATTR_ID]
|
||||
'Error fetching hourly data for town "%s"', self._town[AEMET_ATTR_ID]
|
||||
)
|
||||
|
||||
station = None
|
||||
|
@ -215,7 +215,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
|||
)
|
||||
if not station:
|
||||
_LOGGER.error(
|
||||
'error fetching data for station "%s"',
|
||||
'Error fetching data for station "%s"',
|
||||
self._station[AEMET_ATTR_IDEMA],
|
||||
)
|
||||
|
||||
|
|
|
@ -197,7 +197,7 @@ class AmcrestCam(Camera):
|
|||
# and before initiating shapshot.
|
||||
while self._snapshot_task:
|
||||
self._check_snapshot_ok()
|
||||
_LOGGER.debug("Waiting for previous snapshot from %s ...", self._name)
|
||||
_LOGGER.debug("Waiting for previous snapshot from %s", self._name)
|
||||
await self._snapshot_task
|
||||
self._check_snapshot_ok()
|
||||
# Run snapshot command in separate Task that can't be cancelled so
|
||||
|
|
|
@ -42,7 +42,7 @@ PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
|||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Import Awair configuration from YAML."""
|
||||
LOGGER.warning(
|
||||
"Loading Awair via platform setup is deprecated. Please remove it from your configuration."
|
||||
"Loading Awair via platform setup is deprecated; Please remove it from your configuration"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -310,7 +310,7 @@ async def get_device(hass, host, port, username, password):
|
|||
return device
|
||||
|
||||
except axis.Unauthorized as err:
|
||||
LOGGER.warning("Connected to device at %s but not registered.", host)
|
||||
LOGGER.warning("Connected to device at %s but not registered", host)
|
||||
raise AuthenticationRequired from err
|
||||
|
||||
except (asyncio.TimeoutError, axis.RequestError) as err:
|
||||
|
|
|
@ -102,7 +102,7 @@ class AzureDevOpsEntity(Entity):
|
|||
else:
|
||||
if self._available:
|
||||
_LOGGER.debug(
|
||||
"An error occurred while updating Azure DevOps sensor.",
|
||||
"An error occurred while updating Azure DevOps sensor",
|
||||
exc_info=True,
|
||||
)
|
||||
self._available = False
|
||||
|
|
|
@ -75,7 +75,7 @@ class BboxDeviceScanner(DeviceScanner):
|
|||
|
||||
Returns boolean if scanning successful.
|
||||
"""
|
||||
_LOGGER.info("Scanning...")
|
||||
_LOGGER.info("Scanning")
|
||||
|
||||
box = pybbox.Bbox(ip=self.host)
|
||||
result = box.get_all_connected_devices()
|
||||
|
|
|
@ -60,7 +60,7 @@ class BloomSky:
|
|||
self._endpoint_argument = "unit=intl" if is_metric else ""
|
||||
self.devices = {}
|
||||
self.is_metric = is_metric
|
||||
_LOGGER.debug("Initial BloomSky device load...")
|
||||
_LOGGER.debug("Initial BloomSky device load")
|
||||
self.refresh_devices()
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
|
|
|
@ -309,7 +309,7 @@ class BrSensor(Entity):
|
|||
try:
|
||||
condition = data.get(FORECAST)[fcday].get(CONDITION)
|
||||
except IndexError:
|
||||
_LOGGER.warning("No forecast for fcday=%s...", fcday)
|
||||
_LOGGER.warning("No forecast for fcday=%s", fcday)
|
||||
return False
|
||||
|
||||
if condition:
|
||||
|
@ -339,7 +339,7 @@ class BrSensor(Entity):
|
|||
self._state = round(self._state * 3.6, 1)
|
||||
return True
|
||||
except IndexError:
|
||||
_LOGGER.warning("No forecast for fcday=%s...", fcday)
|
||||
_LOGGER.warning("No forecast for fcday=%s", fcday)
|
||||
return False
|
||||
|
||||
# update all other sensors
|
||||
|
@ -347,7 +347,7 @@ class BrSensor(Entity):
|
|||
self._state = data.get(FORECAST)[fcday].get(self.type[:-3])
|
||||
return True
|
||||
except IndexError:
|
||||
_LOGGER.warning("No forecast for fcday=%s...", fcday)
|
||||
_LOGGER.warning("No forecast for fcday=%s", fcday)
|
||||
return False
|
||||
|
||||
if self.type == SYMBOL or self.type.startswith(CONDITION):
|
||||
|
|
|
@ -82,7 +82,7 @@ class BrData:
|
|||
|
||||
async def get_data(self, url):
|
||||
"""Load data from specified url."""
|
||||
_LOGGER.debug("Calling url: %s...", url)
|
||||
_LOGGER.debug("Calling url: %s", url)
|
||||
result = {SUCCESS: False, MESSAGE: None}
|
||||
resp = None
|
||||
try:
|
||||
|
|
|
@ -47,7 +47,7 @@ class CiscoDeviceScanner(DeviceScanner):
|
|||
self.last_results = {}
|
||||
|
||||
self.success_init = self._update_info()
|
||||
_LOGGER.info("cisco_ios scanner initialized")
|
||||
_LOGGER.info("Initialized cisco_ios scanner")
|
||||
|
||||
def get_device_name(self, device):
|
||||
"""Get the firmware doesn't save the name of the wireless device."""
|
||||
|
@ -131,8 +131,7 @@ class CiscoDeviceScanner(DeviceScanner):
|
|||
|
||||
return devices_result.decode("utf-8")
|
||||
except pxssh.ExceptionPxssh as px_e:
|
||||
_LOGGER.error("pxssh failed on login")
|
||||
_LOGGER.error(px_e)
|
||||
_LOGGER.error("Failed to login via pxssh: %s", px_e)
|
||||
|
||||
return None
|
||||
|
||||
|
|
|
@ -62,7 +62,7 @@ def retry(method):
|
|||
return method(device, *args, **kwargs)
|
||||
except (decora.decoraException, AttributeError, BTLEException):
|
||||
_LOGGER.warning(
|
||||
"Decora connect error for device %s. Reconnecting...",
|
||||
"Decora connect error for device %s. Reconnecting",
|
||||
device.name,
|
||||
)
|
||||
# pylint: disable=protected-access
|
||||
|
|
|
@ -80,7 +80,7 @@ def setup(hass, config):
|
|||
|
||||
if req.status_code != HTTP_OK:
|
||||
_LOGGER.warning(
|
||||
"downloading '%s' failed, status_code=%d", url, req.status_code
|
||||
"Downloading '%s' failed, status_code=%d", url, req.status_code
|
||||
)
|
||||
hass.bus.fire(
|
||||
f"{DOMAIN}_{DOWNLOAD_FAILED_EVENT}",
|
||||
|
|
|
@ -103,7 +103,7 @@ async def _update_duckdns(session, domain, token, *, txt=_SENTINEL, clear=False)
|
|||
def async_track_time_interval_backoff(hass, action, intervals) -> CALLBACK_TYPE:
|
||||
"""Add a listener that fires repetitively at every timedelta interval."""
|
||||
if not iscoroutinefunction:
|
||||
_LOGGER.error("action needs to be a coroutine and return True/False")
|
||||
_LOGGER.error("Action needs to be a coroutine and return True/False")
|
||||
return
|
||||
|
||||
if not isinstance(intervals, (list, tuple)):
|
||||
|
|
|
@ -179,7 +179,7 @@ class Monitor:
|
|||
def stop(self):
|
||||
"""Signal runner to stop and join thread."""
|
||||
if self.scanning:
|
||||
_LOGGER.debug("Stopping...")
|
||||
_LOGGER.debug("Stopping")
|
||||
self.scanner.stop()
|
||||
_LOGGER.debug("Stopped")
|
||||
self.scanning = False
|
||||
|
|
|
@ -144,9 +144,7 @@ async def async_setup(hass, config):
|
|||
@callback
|
||||
def connection_fail_callback(data):
|
||||
"""Network failure callback."""
|
||||
_LOGGER.error(
|
||||
"Could not establish a connection with the Envisalink- retrying..."
|
||||
)
|
||||
_LOGGER.error("Could not establish a connection with the Envisalink- retrying")
|
||||
if not sync_connect.done():
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_envisalink)
|
||||
sync_connect.set_result(True)
|
||||
|
@ -162,13 +160,13 @@ async def async_setup(hass, config):
|
|||
@callback
|
||||
def zones_updated_callback(data):
|
||||
"""Handle zone timer updates."""
|
||||
_LOGGER.debug("Envisalink sent a zone update event. Updating zones...")
|
||||
_LOGGER.debug("Envisalink sent a zone update event. Updating zones")
|
||||
async_dispatcher_send(hass, SIGNAL_ZONE_UPDATE, data)
|
||||
|
||||
@callback
|
||||
def alarm_data_updated_callback(data):
|
||||
"""Handle non-alarm based info updates."""
|
||||
_LOGGER.debug("Envisalink sent new alarm info. Updating alarms...")
|
||||
_LOGGER.debug("Envisalink sent new alarm info. Updating alarms")
|
||||
async_dispatcher_send(hass, SIGNAL_KEYPAD_UPDATE, data)
|
||||
|
||||
@callback
|
||||
|
|
|
@ -230,7 +230,7 @@ class FanEntity(ToggleEntity):
|
|||
async def async_set_speed_deprecated(self, speed: str):
|
||||
"""Set the speed of the fan."""
|
||||
_LOGGER.warning(
|
||||
"fan.set_speed is deprecated, use fan.set_percentage or fan.set_preset_mode instead."
|
||||
"The fan.set_speed service is deprecated, use fan.set_percentage or fan.set_preset_mode instead"
|
||||
)
|
||||
await self.async_set_speed(speed)
|
||||
|
||||
|
@ -368,7 +368,7 @@ class FanEntity(ToggleEntity):
|
|||
percentage = None
|
||||
elif speed is not None:
|
||||
_LOGGER.warning(
|
||||
"Calling fan.turn_on with the speed argument is deprecated, use percentage or preset_mode instead."
|
||||
"Calling fan.turn_on with the speed argument is deprecated, use percentage or preset_mode instead"
|
||||
)
|
||||
if speed in self.preset_modes:
|
||||
preset_mode = speed
|
||||
|
|
|
@ -75,7 +75,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
|
||||
for account in balance_accounts:
|
||||
if config[CONF_ACCOUNTS] and account.iban not in account_config:
|
||||
_LOGGER.info("skipping account %s for bank %s", account.iban, fints_name)
|
||||
_LOGGER.info("Skipping account %s for bank %s", account.iban, fints_name)
|
||||
continue
|
||||
|
||||
account_name = account_config.get(account.iban)
|
||||
|
@ -87,7 +87,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
for account in holdings_accounts:
|
||||
if config[CONF_HOLDINGS] and account.accountnumber not in holdings_config:
|
||||
_LOGGER.info(
|
||||
"skipping holdings %s for bank %s", account.accountnumber, fints_name
|
||||
"Skipping holdings %s for bank %s", account.accountnumber, fints_name
|
||||
)
|
||||
continue
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
path = config.get(CONF_FOLDER_PATHS)
|
||||
|
||||
if not hass.config.is_allowed_path(path):
|
||||
_LOGGER.error("folder %s is not valid or allowed", path)
|
||||
_LOGGER.error("Folder %s is not valid or allowed", path)
|
||||
else:
|
||||
folder = Folder(path, config.get(CONF_FILTER))
|
||||
add_entities([folder], True)
|
||||
|
|
|
@ -43,7 +43,7 @@ def setup(hass, config):
|
|||
path = watcher[CONF_FOLDER]
|
||||
patterns = watcher[CONF_PATTERNS]
|
||||
if not hass.config.is_allowed_path(path):
|
||||
_LOGGER.error("folder %s is not valid or allowed", path)
|
||||
_LOGGER.error("Folder %s is not valid or allowed", path)
|
||||
return False
|
||||
Watcher(path, patterns, hass)
|
||||
|
||||
|
|
|
@ -68,7 +68,7 @@ PTZ_GOTO_PRESET_COMMAND = "ptz_goto_preset"
|
|||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up a Foscam IP Camera."""
|
||||
LOGGER.warning(
|
||||
"Loading foscam via platform config is deprecated, it will be automatically imported. Please remove it afterwards."
|
||||
"Loading foscam via platform config is deprecated, it will be automatically imported; Please remove it afterwards"
|
||||
)
|
||||
|
||||
config_new = {
|
||||
|
|
|
@ -127,16 +127,16 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
return await self._validate_and_create(import_config)
|
||||
|
||||
except CannotConnect:
|
||||
LOGGER.error("Error importing foscam platform config: cannot connect.")
|
||||
LOGGER.error("Error importing foscam platform config: cannot connect")
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
except InvalidAuth:
|
||||
LOGGER.error("Error importing foscam platform config: invalid auth.")
|
||||
LOGGER.error("Error importing foscam platform config: invalid auth")
|
||||
return self.async_abort(reason="invalid_auth")
|
||||
|
||||
except InvalidResponse:
|
||||
LOGGER.exception(
|
||||
"Error importing foscam platform config: invalid response from camera."
|
||||
"Error importing foscam platform config: invalid response from camera"
|
||||
)
|
||||
return self.async_abort(reason="invalid_response")
|
||||
|
||||
|
@ -145,7 +145,7 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
LOGGER.exception(
|
||||
"Error importing foscam platform config: unexpected exception."
|
||||
"Error importing foscam platform config: unexpected exception"
|
||||
)
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
|
|
|
@ -33,8 +33,8 @@ async def async_setup_platform(
|
|||
) -> None:
|
||||
"""Convert old style file configs to new style configs."""
|
||||
_LOGGER.warning(
|
||||
"Loading gogogate2 via platform config is deprecated. The configuration"
|
||||
" has been migrated to a config entry and can be safely removed."
|
||||
"Loading gogogate2 via platform config is deprecated; The configuration"
|
||||
" has been migrated to a config entry and can be safely removed"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -399,7 +399,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
sensors = STORAGE_SENSOR_TYPES
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Device type %s was found but is not supported right now.",
|
||||
"Device type %s was found but is not supported right now",
|
||||
device["deviceType"],
|
||||
)
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ class GstreamerDevice(MediaPlayerEntity):
|
|||
def play_media(self, media_type, media_id, **kwargs):
|
||||
"""Play media."""
|
||||
if media_type != MEDIA_TYPE_MUSIC:
|
||||
_LOGGER.error("invalid media type")
|
||||
_LOGGER.error("Invalid media type")
|
||||
return
|
||||
self._player.queue(media_id)
|
||||
|
||||
|
|
|
@ -96,8 +96,8 @@ class HabitipyData:
|
|||
except ClientResponseError as error:
|
||||
if error.status == HTTP_TOO_MANY_REQUESTS:
|
||||
_LOGGER.warning(
|
||||
"Sensor data update for %s has too many API requests."
|
||||
" Skipping the update.",
|
||||
"Sensor data update for %s has too many API requests;"
|
||||
" Skipping the update",
|
||||
DOMAIN,
|
||||
)
|
||||
else:
|
||||
|
@ -113,8 +113,8 @@ class HabitipyData:
|
|||
except ClientResponseError as error:
|
||||
if error.status == HTTP_TOO_MANY_REQUESTS:
|
||||
_LOGGER.warning(
|
||||
"Sensor data update for %s has too many API requests."
|
||||
" Skipping the update.",
|
||||
"Sensor data update for %s has too many API requests;"
|
||||
" Skipping the update",
|
||||
DOMAIN,
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -221,7 +221,7 @@ class HangoutsBot:
|
|||
async def _on_disconnect(self):
|
||||
"""Handle disconnecting."""
|
||||
if self._connected:
|
||||
_LOGGER.debug("Connection lost! Reconnect...")
|
||||
_LOGGER.debug("Connection lost! Reconnect")
|
||||
await self.async_connect()
|
||||
else:
|
||||
dispatcher.async_dispatcher_send(self.hass, EVENT_HANGOUTS_DISCONNECTED)
|
||||
|
|
|
@ -218,7 +218,7 @@ def setup(hass: HomeAssistant, base_config):
|
|||
_LOGGER.debug("Reached _adapter_watchdog")
|
||||
event.async_call_later(hass, WATCHDOG_INTERVAL, _adapter_watchdog)
|
||||
if not adapter.initialized:
|
||||
_LOGGER.info("Adapter not initialized. Trying to restart.")
|
||||
_LOGGER.info("Adapter not initialized; Trying to restart")
|
||||
hass.bus.fire(EVENT_HDMI_CEC_UNAVAILABLE)
|
||||
adapter.init()
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ class HitronCODADeviceScanner(DeviceScanner):
|
|||
|
||||
def _login(self):
|
||||
"""Log in to the router. This is required for subsequent api calls."""
|
||||
_LOGGER.info("Logging in to CODA...")
|
||||
_LOGGER.info("Logging in to CODA")
|
||||
|
||||
try:
|
||||
data = [("user", self._username), (self._type, self._password)]
|
||||
|
@ -101,7 +101,7 @@ class HitronCODADeviceScanner(DeviceScanner):
|
|||
|
||||
def _update_info(self):
|
||||
"""Get ARP from router."""
|
||||
_LOGGER.info("Fetching...")
|
||||
_LOGGER.info("Fetching")
|
||||
|
||||
if self._userid is None:
|
||||
if not self._login():
|
||||
|
|
|
@ -58,7 +58,8 @@ async def async_setup(hass: ha.HomeAssistant, config: dict) -> bool:
|
|||
# Generic turn on/off method requires entity id
|
||||
if not all_referenced:
|
||||
_LOGGER.error(
|
||||
"homeassistant.%s cannot be called without a target", service.service
|
||||
"The service homeassistant.%s cannot be called without a target",
|
||||
service.service,
|
||||
)
|
||||
return
|
||||
|
||||
|
|
|
@ -536,7 +536,7 @@ class HomeKit:
|
|||
"The bridge %s has entity %s. For best performance, "
|
||||
"and to prevent unexpected unavailability, create and "
|
||||
"pair a separate HomeKit instance in accessory mode for "
|
||||
"this entity.",
|
||||
"this entity",
|
||||
self._name,
|
||||
state.entity_id,
|
||||
)
|
||||
|
|
|
@ -63,6 +63,6 @@ class TurboJPEGSingleton:
|
|||
TurboJPEGSingleton.__instance = TurboJPEG()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception(
|
||||
"libturbojpeg is not installed, cameras may impact HomeKit performance"
|
||||
"Error loading libturbojpeg; Cameras may impact HomeKit performance"
|
||||
)
|
||||
TurboJPEGSingleton.__instance = False
|
||||
|
|
|
@ -132,8 +132,8 @@ class HomeKitHeaterCoolerEntity(HomeKitEntity, ClimateEntity):
|
|||
else:
|
||||
hvac_mode = TARGET_HEATER_COOLER_STATE_HOMEKIT_TO_HASS.get(state)
|
||||
_LOGGER.warning(
|
||||
"HomeKit device %s: Setting temperature in %s mode is not supported yet."
|
||||
" Consider raising a ticket if you have this device and want to help us implement this feature.",
|
||||
"HomeKit device %s: Setting temperature in %s mode is not supported yet;"
|
||||
" Consider raising a ticket if you have this device and want to help us implement this feature",
|
||||
self.entity_id,
|
||||
hvac_mode,
|
||||
)
|
||||
|
@ -147,8 +147,8 @@ class HomeKitHeaterCoolerEntity(HomeKitEntity, ClimateEntity):
|
|||
return
|
||||
if hvac_mode not in {HVAC_MODE_HEAT, HVAC_MODE_COOL}:
|
||||
_LOGGER.warning(
|
||||
"HomeKit device %s: Setting temperature in %s mode is not supported yet."
|
||||
" Consider raising a ticket if you have this device and want to help us implement this feature.",
|
||||
"HomeKit device %s: Setting temperature in %s mode is not supported yet;"
|
||||
" Consider raising a ticket if you have this device and want to help us implement this feature",
|
||||
self.entity_id,
|
||||
hvac_mode,
|
||||
)
|
||||
|
|
|
@ -184,9 +184,7 @@ class HorizonDevice(MediaPlayerEntity):
|
|||
elif channel:
|
||||
self._client.select_channel(channel)
|
||||
except OSError as msg:
|
||||
_LOGGER.error(
|
||||
"%s disconnected: %s. Trying to reconnect...", self._name, msg
|
||||
)
|
||||
_LOGGER.error("%s disconnected: %s. Trying to reconnect", self._name, msg)
|
||||
|
||||
# for reconnect, first gracefully disconnect
|
||||
self._client.disconnect()
|
||||
|
|
|
@ -197,14 +197,14 @@ class Router:
|
|||
self.subscriptions.pop(key)
|
||||
except ResponseErrorLoginRequiredException:
|
||||
if isinstance(self.connection, AuthorizedConnection):
|
||||
_LOGGER.debug("Trying to authorize again...")
|
||||
_LOGGER.debug("Trying to authorize again")
|
||||
if self.connection.enforce_authorized_connection():
|
||||
_LOGGER.debug(
|
||||
"...success, %s will be updated by a future periodic run",
|
||||
"success, %s will be updated by a future periodic run",
|
||||
key,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug("...failed")
|
||||
_LOGGER.debug("failed")
|
||||
return
|
||||
_LOGGER.info(
|
||||
"%s requires authorization, excluding from future updates", key
|
||||
|
|
|
@ -252,7 +252,7 @@ class HueBridge:
|
|||
# we already created a new config flow, no need to do it again
|
||||
return
|
||||
LOGGER.error(
|
||||
"Unable to authorize to bridge %s, setup the linking again.", self.host
|
||||
"Unable to authorize to bridge %s, setup the linking again", self.host
|
||||
)
|
||||
self.authorized = False
|
||||
create_config_flow(self.hass, self.host)
|
||||
|
|
|
@ -100,7 +100,7 @@ async def async_update_huisbaasje(huisbaasje):
|
|||
# handled by the data update coordinator.
|
||||
async with async_timeout.timeout(FETCH_TIMEOUT):
|
||||
if not huisbaasje.is_authenticated():
|
||||
_LOGGER.warning("Huisbaasje is unauthenticated. Reauthenticating...")
|
||||
_LOGGER.warning("Huisbaasje is unauthenticated. Reauthenticating")
|
||||
await huisbaasje.authenticate()
|
||||
|
||||
current_measurements = await huisbaasje.current_measurements()
|
||||
|
|
|
@ -125,9 +125,9 @@ class IcloudAccount:
|
|||
# Login failed which means credentials need to be updated.
|
||||
_LOGGER.error(
|
||||
(
|
||||
"Your password for '%s' is no longer working. Go to the "
|
||||
"Your password for '%s' is no longer working; Go to the "
|
||||
"Integrations menu and click on Configure on the discovered Apple "
|
||||
"iCloud card to login again."
|
||||
"iCloud card to login again"
|
||||
),
|
||||
self._config_entry.data[CONF_USERNAME],
|
||||
)
|
||||
|
|
|
@ -65,10 +65,10 @@ async def _async_connect(**kwargs):
|
|||
"""Connect to the Insteon modem."""
|
||||
try:
|
||||
await async_connect(**kwargs)
|
||||
_LOGGER.info("Connected to Insteon modem.")
|
||||
_LOGGER.info("Connected to Insteon modem")
|
||||
return True
|
||||
except ConnectionError:
|
||||
_LOGGER.error("Could not connect to Insteon modem.")
|
||||
_LOGGER.error("Could not connect to Insteon modem")
|
||||
return False
|
||||
|
||||
|
||||
|
|
|
@ -144,7 +144,7 @@ async def async_setup_entry(
|
|||
https = True
|
||||
port = host.port or 443
|
||||
else:
|
||||
_LOGGER.error("isy994 host value in configuration is invalid")
|
||||
_LOGGER.error("The isy994 host value in configuration is invalid")
|
||||
return False
|
||||
|
||||
# Connect to ISY controller.
|
||||
|
|
|
@ -60,7 +60,7 @@ async def validate_input(hass: core.HomeAssistant, data):
|
|||
https = True
|
||||
port = host.port or 443
|
||||
else:
|
||||
_LOGGER.error("isy994 host value in configuration is invalid")
|
||||
_LOGGER.error("The isy994 host value in configuration is invalid")
|
||||
raise InvalidHost
|
||||
|
||||
# Connect to ISY controller.
|
||||
|
|
|
@ -233,7 +233,7 @@ class KebaHandler(KebaKeContact):
|
|||
self._set_fast_polling()
|
||||
except (KeyError, ValueError) as ex:
|
||||
_LOGGER.warning(
|
||||
"failsafe_timeout, failsafe_fallback and/or "
|
||||
"failsafe_persist value are not correct. %s",
|
||||
"Values are not correct for: failsafe_timeout, failsafe_fallback and/or "
|
||||
"failsafe_persist: %s",
|
||||
ex,
|
||||
)
|
||||
|
|
|
@ -167,7 +167,7 @@ class KeeneticRouter:
|
|||
|
||||
def _update_devices(self):
|
||||
"""Get ARP from keenetic router."""
|
||||
_LOGGER.debug("Fetching devices from router...")
|
||||
_LOGGER.debug("Fetching devices from router")
|
||||
|
||||
try:
|
||||
_response = self._client.get_devices(
|
||||
|
|
|
@ -102,7 +102,7 @@ class KiwiLock(LockEntity):
|
|||
try:
|
||||
self._client.open_door(self.lock_id)
|
||||
except KiwiException:
|
||||
_LOGGER.error("failed to open door")
|
||||
_LOGGER.error("Failed to open door")
|
||||
else:
|
||||
self._state = STATE_UNLOCKED
|
||||
self.hass.add_job(
|
||||
|
|
|
@ -376,7 +376,7 @@ class AlarmPanel:
|
|||
self.async_desired_settings_payload()
|
||||
!= self.async_current_settings_payload()
|
||||
):
|
||||
_LOGGER.info("pushing settings to device %s", self.device_id)
|
||||
_LOGGER.info("Pushing settings to device %s", self.device_id)
|
||||
await self.client.put_settings(**self.async_desired_settings_payload())
|
||||
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
"""Set up the LIFX platform."""
|
||||
_LOGGER.warning(
|
||||
"The LIFX Legacy platform is deprecated and will be removed in "
|
||||
"Home Assistant Core 2021.6.0. Use the LIFX integration instead."
|
||||
"Home Assistant Core 2021.6.0; Use the LIFX integration instead"
|
||||
)
|
||||
|
||||
server_addr = config.get(CONF_SERVER)
|
||||
|
|
|
@ -56,7 +56,7 @@ class MerakiView(HomeAssistantView):
|
|||
return self.json_message("Invalid JSON", HTTP_BAD_REQUEST)
|
||||
_LOGGER.debug("Meraki Data from Post: %s", json.dumps(data))
|
||||
if not data.get("secret", False):
|
||||
_LOGGER.error("secret invalid")
|
||||
_LOGGER.error("The secret is invalid")
|
||||
return self.json_message("No secret", HTTP_UNPROCESSABLE_ENTITY)
|
||||
if data["secret"] != self.secret:
|
||||
_LOGGER.error("Invalid Secret received from Meraki")
|
||||
|
|
|
@ -159,7 +159,7 @@ async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool
|
|||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Weather alert not available: The city %s is not in metropolitan France or Andorre.",
|
||||
"Weather alert not available: The city %s is not in metropolitan France or Andorre",
|
||||
entry.title,
|
||||
)
|
||||
|
||||
|
@ -189,7 +189,7 @@ async def async_unload_entry(hass: HomeAssistantType, entry: ConfigEntry):
|
|||
].data.position.get("dept")
|
||||
hass.data[DOMAIN][department] = False
|
||||
_LOGGER.debug(
|
||||
"Weather alert for depatment %s unloaded and released. It can be added now by another city.",
|
||||
"Weather alert for depatment %s unloaded and released. It can be added now by another city",
|
||||
department,
|
||||
)
|
||||
|
||||
|
|
|
@ -59,7 +59,7 @@ async def async_setup_entry(
|
|||
True,
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Weather entity (%s) added for %s.",
|
||||
"Weather entity (%s) added for %s",
|
||||
entry.options.get(CONF_MODE, FORECAST_MODE_DAILY),
|
||||
coordinator.data.position["name"],
|
||||
)
|
||||
|
|
|
@ -30,7 +30,7 @@ class ConnectMotionGateway:
|
|||
|
||||
async def async_connect_gateway(self, host, key):
|
||||
"""Connect to the Motion Gateway."""
|
||||
_LOGGER.debug("Initializing with host %s (key %s...)", host, key[:3])
|
||||
_LOGGER.debug("Initializing with host %s (key %s)", host, key[:3])
|
||||
self._gateway_device = MotionGateway(
|
||||
ip=host, key=key, multicast=self._multicast
|
||||
)
|
||||
|
|
|
@ -120,7 +120,7 @@ def validate_options(value):
|
|||
and CONF_VALUE_TEMPLATE in value
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"using 'value_template' for 'position_topic' is deprecated "
|
||||
"Using 'value_template' for 'position_topic' is deprecated "
|
||||
"and will be removed from Home Assistant in version 2021.6, "
|
||||
"please replace it with 'position_template'"
|
||||
)
|
||||
|
|
|
@ -145,7 +145,7 @@ class MyStromLight(LightEntity):
|
|||
try:
|
||||
await self._bulb.set_off()
|
||||
except MyStromConnectionError:
|
||||
_LOGGER.warning("myStrom bulb not online")
|
||||
_LOGGER.warning("The myStrom bulb not online")
|
||||
|
||||
async def async_update(self):
|
||||
"""Fetch new state data for this light."""
|
||||
|
|
|
@ -207,7 +207,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
await hass.async_add_executor_job(
|
||||
hass.data[DOMAIN][entry.entry_id][AUTH].dropwebhook
|
||||
)
|
||||
_LOGGER.info("Unregister Netatmo webhook.")
|
||||
_LOGGER.info("Unregister Netatmo webhook")
|
||||
|
||||
await hass.data[DOMAIN][entry.entry_id][DATA_HANDLER].async_cleanup()
|
||||
|
||||
|
|
|
@ -123,10 +123,10 @@ async def async_setup_entry(hass, entry, async_add_entities):
|
|||
entities = []
|
||||
|
||||
for home_id in get_all_home_ids(home_data):
|
||||
_LOGGER.debug("Setting up home %s ...", home_id)
|
||||
_LOGGER.debug("Setting up home %s", home_id)
|
||||
for room_id in home_data.rooms[home_id].keys():
|
||||
room_name = home_data.rooms[home_id][room_id]["name"]
|
||||
_LOGGER.debug("Setting up room %s (%s) ...", room_name, room_id)
|
||||
_LOGGER.debug("Setting up room %s (%s)", room_name, room_id)
|
||||
signal_name = f"{HOMESTATUS_DATA_CLASS_NAME}-{home_id}"
|
||||
await data_handler.register_data_class(
|
||||
HOMESTATUS_DATA_CLASS_NAME, signal_name, None, home_id=home_id
|
||||
|
|
|
@ -135,7 +135,7 @@ def setup(hass, config):
|
|||
|
||||
def setup_leaf(car_config):
|
||||
"""Set up a car."""
|
||||
_LOGGER.debug("Logging into You+Nissan...")
|
||||
_LOGGER.debug("Logging into You+Nissan")
|
||||
|
||||
username = car_config[CONF_USERNAME]
|
||||
password = car_config[CONF_PASSWORD]
|
||||
|
|
|
@ -89,7 +89,7 @@ class NmapDeviceScanner(DeviceScanner):
|
|||
|
||||
Returns boolean if scanning successful.
|
||||
"""
|
||||
_LOGGER.debug("Scanning...")
|
||||
_LOGGER.debug("Scanning")
|
||||
|
||||
scanner = PortScanner()
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ def wsdiscovery() -> list[Service]:
|
|||
|
||||
async def async_discovery(hass) -> bool:
|
||||
"""Return if there are devices that can be discovered."""
|
||||
LOGGER.debug("Starting ONVIF discovery...")
|
||||
LOGGER.debug("Starting ONVIF discovery")
|
||||
services = await hass.async_add_executor_job(wsdiscovery)
|
||||
|
||||
devices = []
|
||||
|
|
|
@ -439,7 +439,7 @@ class ONVIFDevice:
|
|||
await ptz_service.Stop(req)
|
||||
except ONVIFError as err:
|
||||
if "Bad Request" in err.reason:
|
||||
LOGGER.warning("Device '%s' doesn't support PTZ.", self.name)
|
||||
LOGGER.warning("Device '%s' doesn't support PTZ", self.name)
|
||||
else:
|
||||
LOGGER.error("Error trying to perform PTZ action: %s", err)
|
||||
|
||||
|
|
|
@ -132,7 +132,7 @@ class EventManager:
|
|||
|
||||
if not restarted:
|
||||
LOGGER.warning(
|
||||
"Failed to restart ONVIF PullPoint subscription for '%s'. Retrying...",
|
||||
"Failed to restart ONVIF PullPoint subscription for '%s'. Retrying",
|
||||
self.unique_id,
|
||||
)
|
||||
# Try again in a minute
|
||||
|
|
|
@ -44,7 +44,7 @@ def setup_platform(hass, config, add_entities_callback, discovery_info=None):
|
|||
switch_conf = config.get(CONF_SWITCHES, [config])
|
||||
|
||||
if config.get(CONF_DISCOVERY):
|
||||
_LOGGER.info("Discovering S20 switches ...")
|
||||
_LOGGER.info("Discovering S20 switches")
|
||||
switch_data.update(discover())
|
||||
|
||||
for switch in switch_conf:
|
||||
|
|
|
@ -94,7 +94,7 @@ async def async_setup_entry(hass, config_entry):
|
|||
unique_id = config_entry.unique_id
|
||||
if device_info is None:
|
||||
_LOGGER.error(
|
||||
"Couldn't gather device info. Please restart Home Assistant with your TV turned on and connected to your network."
|
||||
"Couldn't gather device info; Please restart Home Assistant with your TV turned on and connected to your network"
|
||||
)
|
||||
else:
|
||||
unique_id = device_info[ATTR_UDN]
|
||||
|
|
|
@ -418,9 +418,11 @@ class PlexServer:
|
|||
"""Connect to a plex.tv resource and return a Plex client."""
|
||||
try:
|
||||
client = resource.connect(timeout=3)
|
||||
_LOGGER.debug("plex.tv resource connection successful: %s", client)
|
||||
_LOGGER.debug("Resource connection successful to plex.tv: %s", client)
|
||||
except NotFound:
|
||||
_LOGGER.error("plex.tv resource connection failed: %s", resource.name)
|
||||
_LOGGER.error(
|
||||
"Resource connection failed to plex.tv: %s", resource.name
|
||||
)
|
||||
else:
|
||||
client.proxyThroughServer(value=False, server=self._plex_server)
|
||||
self._client_device_cache[client.machineIdentifier] = client
|
||||
|
|
|
@ -38,7 +38,7 @@ async def async_setup(hass: HomeAssistant, config: dict):
|
|||
|
||||
conf = config[DOMAIN]
|
||||
|
||||
_LOGGER.info("Found Plum Lightpad configuration in config, importing...")
|
||||
_LOGGER.info("Found Plum Lightpad configuration in config, importing")
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=conf
|
||||
|
|
|
@ -118,7 +118,7 @@ class PoolSenseDataUpdateCoordinator(DataUpdateCoordinator):
|
|||
try:
|
||||
data = await self.poolsense.get_poolsense_data()
|
||||
except (PoolSenseError) as error:
|
||||
_LOGGER.error("PoolSense query did not complete.")
|
||||
_LOGGER.error("PoolSense query did not complete")
|
||||
raise UpdateFailed(error) from error
|
||||
|
||||
return data
|
||||
|
|
|
@ -93,7 +93,7 @@ class PushsaferNotificationService(BaseNotificationService):
|
|||
_LOGGER.debug("Loading image from file %s", local_path)
|
||||
picture1_encoded = self.load_from_file(local_path)
|
||||
else:
|
||||
_LOGGER.warning("missing url or local_path for picture1")
|
||||
_LOGGER.warning("Missing url or local_path for picture1")
|
||||
else:
|
||||
_LOGGER.debug("picture1 is not specified")
|
||||
|
||||
|
@ -143,7 +143,7 @@ class PushsaferNotificationService(BaseNotificationService):
|
|||
else:
|
||||
response = requests.get(url, timeout=CONF_TIMEOUT)
|
||||
return self.get_base64(response.content, response.headers["content-type"])
|
||||
_LOGGER.warning("url not found in param")
|
||||
_LOGGER.warning("No url was found in param")
|
||||
|
||||
return None
|
||||
|
||||
|
|
|
@ -372,6 +372,6 @@ class RadioThermostat(ClimateEntity):
|
|||
self.device.program_mode = PRESET_MODE_TO_CODE[preset_mode]
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"preset_mode %s not in PRESET_MODES",
|
||||
"Preset_mode %s not in PRESET_MODES",
|
||||
preset_mode,
|
||||
)
|
||||
|
|
|
@ -57,8 +57,8 @@ async def async_setup_platform(
|
|||
):
|
||||
"""Import Recollect Waste configuration from YAML."""
|
||||
LOGGER.warning(
|
||||
"Loading ReCollect Waste via platform setup is deprecated. "
|
||||
"Please remove it from your configuration."
|
||||
"Loading ReCollect Waste via platform setup is deprecated; "
|
||||
"Please remove it from your configuration"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -175,7 +175,7 @@ def validate_sqlite_database(dbpath: str, db_integrity_check: bool) -> bool:
|
|||
run_checks_on_open_db(dbpath, conn.cursor(), db_integrity_check)
|
||||
conn.close()
|
||||
except sqlite3.DatabaseError:
|
||||
_LOGGER.exception("The database at %s is corrupt or malformed.", dbpath)
|
||||
_LOGGER.exception("The database at %s is corrupt or malformed", dbpath)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@ -210,7 +210,7 @@ def run_checks_on_open_db(dbpath, cursor, db_integrity_check):
|
|||
|
||||
if not last_run_was_clean:
|
||||
_LOGGER.warning(
|
||||
"The system could not validate that the sqlite3 database at %s was shutdown cleanly.",
|
||||
"The system could not validate that the sqlite3 database at %s was shutdown cleanly",
|
||||
dbpath,
|
||||
)
|
||||
|
||||
|
|
|
@ -246,7 +246,7 @@ async def async_setup(hass, config):
|
|||
|
||||
# If HA is not stopping, initiate new connection
|
||||
if hass.state != CoreState.stopping:
|
||||
_LOGGER.warning("disconnected from Rflink, reconnecting")
|
||||
_LOGGER.warning("Disconnected from Rflink, reconnecting")
|
||||
hass.async_create_task(connect())
|
||||
|
||||
async def connect():
|
||||
|
|
|
@ -428,7 +428,7 @@ def find_possible_pt2262_device(device_ids, device_id):
|
|||
if size is not None:
|
||||
size = len(dev_id) - size - 1
|
||||
_LOGGER.info(
|
||||
"rfxtrx: found possible device %s for %s "
|
||||
"Found possible device %s for %s "
|
||||
"with the following configuration:\n"
|
||||
"data_bits=%d\n"
|
||||
"command_on=0x%s\n"
|
||||
|
|
|
@ -46,7 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
if mac in discovered_gateways:
|
||||
connect_info = discovered_gateways[mac]
|
||||
else:
|
||||
_LOGGER.warning("Gateway rediscovery failed.")
|
||||
_LOGGER.warning("Gateway rediscovery failed")
|
||||
# Static connection defined or fallback from discovery
|
||||
connect_info = {
|
||||
SL_GATEWAY_NAME: name_for_mac(mac),
|
||||
|
|
|
@ -47,10 +47,10 @@ class ScreenLogicSwitch(ScreenlogicEntity, SwitchEntity):
|
|||
if await self.hass.async_add_executor_job(
|
||||
self.gateway.set_circuit, self._data_key, circuit_value
|
||||
):
|
||||
_LOGGER.info("screenlogic turn %s %s", circuit_value, self._data_key)
|
||||
_LOGGER.debug("Screenlogic turn %s %s", circuit_value, self._data_key)
|
||||
await self.coordinator.async_request_refresh()
|
||||
else:
|
||||
_LOGGER.info("screenlogic turn %s %s error", circuit_value, self._data_key)
|
||||
_LOGGER.info("Screenlogic turn %s %s error", circuit_value, self._data_key)
|
||||
|
||||
@property
|
||||
def circuit(self):
|
||||
|
|
|
@ -105,7 +105,7 @@ class ScreenLogicWaterHeater(ScreenlogicEntity, WaterHeaterEntity):
|
|||
):
|
||||
await self.coordinator.async_request_refresh()
|
||||
else:
|
||||
_LOGGER.error("screenlogic set_temperature error")
|
||||
_LOGGER.error("Screenlogic set_temperature error")
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode) -> None:
|
||||
"""Set the operation mode."""
|
||||
|
@ -115,7 +115,7 @@ class ScreenLogicWaterHeater(ScreenlogicEntity, WaterHeaterEntity):
|
|||
):
|
||||
await self.coordinator.async_request_refresh()
|
||||
else:
|
||||
_LOGGER.error("screenlogic set_operation_mode error")
|
||||
_LOGGER.error("Screenlogic set_operation_mode error")
|
||||
|
||||
@property
|
||||
def body(self):
|
||||
|
|
|
@ -47,7 +47,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
"""Set up the Skybeacon sensor."""
|
||||
name = config.get(CONF_NAME)
|
||||
mac = config.get(CONF_MAC)
|
||||
_LOGGER.debug("Setting up...")
|
||||
_LOGGER.debug("Setting up")
|
||||
|
||||
mon = Monitor(hass, mac, name)
|
||||
add_entities([SkybeaconTemp(name, mon)])
|
||||
|
|
|
@ -59,12 +59,12 @@ def setup(hass, config):
|
|||
|
||||
def poll_device_update(event_time):
|
||||
"""Update Smarty device."""
|
||||
_LOGGER.debug("Updating Smarty device...")
|
||||
_LOGGER.debug("Updating Smarty device")
|
||||
if smarty.update():
|
||||
_LOGGER.debug("Update success...")
|
||||
_LOGGER.debug("Update success")
|
||||
dispatcher_send(hass, SIGNAL_UPDATE_SMARTY)
|
||||
else:
|
||||
_LOGGER.debug("Update failed...")
|
||||
_LOGGER.debug("Update failed")
|
||||
|
||||
track_time_interval(hass, poll_device_update, timedelta(seconds=30))
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
|
||||
if not mylink_status or "error" in mylink_status:
|
||||
_LOGGER.error(
|
||||
"mylink failed to setup because of an error: %s",
|
||||
"Somfy Mylink failed to setup because of an error: %s",
|
||||
mylink_status.get("error", {}).get(
|
||||
"message", "Empty response from mylink device"
|
||||
),
|
||||
|
|
|
@ -1330,7 +1330,7 @@ class SonosEntity(MediaPlayerEntity):
|
|||
if one_alarm._alarm_id == str(alarm_id):
|
||||
alarm = one_alarm
|
||||
if alarm is None:
|
||||
_LOGGER.warning("did not find alarm with id %s", alarm_id)
|
||||
_LOGGER.warning("Did not find alarm with id %s", alarm_id)
|
||||
return
|
||||
if time is not None:
|
||||
alarm.start_time = time
|
||||
|
|
|
@ -116,7 +116,7 @@ class StarlineAccount:
|
|||
|
||||
def unload(self):
|
||||
"""Unload StarLine API."""
|
||||
_LOGGER.debug("Unloading StarLine API.")
|
||||
_LOGGER.debug("Unloading StarLine API")
|
||||
if self._unsubscribe_auto_updater is not None:
|
||||
self._unsubscribe_auto_updater()
|
||||
self._unsubscribe_auto_updater = None
|
||||
|
|
|
@ -107,7 +107,7 @@ class SubaruConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
)
|
||||
_LOGGER.debug("Using subarulink %s", self.controller.version)
|
||||
_LOGGER.debug(
|
||||
"Setting up first time connection to Subuaru API. This may take up to 20 seconds."
|
||||
"Setting up first time connection to Subuaru API; This may take up to 20 seconds"
|
||||
)
|
||||
if await self.controller.connect():
|
||||
_LOGGER.debug("Successfully authenticated and authorized with Subaru API")
|
||||
|
|
|
@ -41,7 +41,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
"""Set up the SyncThru component."""
|
||||
_LOGGER.warning(
|
||||
"Loading syncthru via platform config is deprecated and no longer "
|
||||
"necessary as of 0.113. Please remove it from your configuration YAML."
|
||||
"necessary as of 0.113; Please remove it from your configuration YAML"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -36,7 +36,7 @@ def async_register_info(
|
|||
Deprecated.
|
||||
"""
|
||||
_LOGGER.warning(
|
||||
"system_health.async_register_info is deprecated. Add a system_health platform instead."
|
||||
"Calling system_health.async_register_info is deprecated; Add a system_health platform instead"
|
||||
)
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
SystemHealthRegistration(hass, domain).async_register_info(info_callback)
|
||||
|
|
|
@ -174,7 +174,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
# If not, do not create the entity and add a warning to the log
|
||||
if resource[CONF_TYPE] == "processor_temperature":
|
||||
if SystemMonitorSensor.read_cpu_temperature() is None:
|
||||
_LOGGER.warning("Cannot read CPU / processor temperature information.")
|
||||
_LOGGER.warning("Cannot read CPU / processor temperature information")
|
||||
continue
|
||||
|
||||
dev.append(SystemMonitorSensor(resource[CONF_TYPE], resource[CONF_ARG]))
|
||||
|
|
|
@ -42,7 +42,7 @@ async def async_setup_platform(hass, config):
|
|||
if (last_error_date is not None) and (isinstance(last_error_date, int)):
|
||||
last_error_date = dt.datetime.fromtimestamp(last_error_date)
|
||||
_LOGGER.info(
|
||||
"telegram webhook last_error_date: %s. Status: %s",
|
||||
"Telegram webhook last_error_date: %s. Status: %s",
|
||||
last_error_date,
|
||||
current_status,
|
||||
)
|
||||
|
|
|
@ -336,7 +336,7 @@ class TensorFlowImageProcessor(ImageProcessingEntity):
|
|||
"""Process the image."""
|
||||
model = self.hass.data[DOMAIN][CONF_MODEL]
|
||||
if not model:
|
||||
_LOGGER.debug("Model not yet ready.")
|
||||
_LOGGER.debug("Model not yet ready")
|
||||
return
|
||||
|
||||
start = time.perf_counter()
|
||||
|
|
|
@ -201,7 +201,7 @@ class TwitterNotificationService(BaseNotificationService):
|
|||
method_override="GET",
|
||||
)
|
||||
if resp.status_code != HTTP_OK:
|
||||
_LOGGER.error("media processing error: %s", resp.json())
|
||||
_LOGGER.error("Media processing error: %s", resp.json())
|
||||
processing_info = resp.json()["processing_info"]
|
||||
|
||||
_LOGGER.debug("media processing %s status: %s", media_id, processing_info)
|
||||
|
|
|
@ -127,7 +127,7 @@ async def async_setup(hass: HomeAssistantType, config) -> bool:
|
|||
|
||||
_LOGGER.warning(
|
||||
"Loading upcloud via top level config is deprecated and no longer "
|
||||
"necessary as of 0.117. Please remove it from your YAML configuration."
|
||||
"necessary as of 0.117; Please remove it from your YAML configuration"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -273,7 +273,8 @@ class VizioConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
|||
if errors and self.context["source"] == SOURCE_IMPORT:
|
||||
# Log an error message if import config flow fails since otherwise failure is silent
|
||||
_LOGGER.error(
|
||||
"configuration.yaml import failure: %s", ", ".join(errors.values())
|
||||
"Importing from configuration.yaml failed: %s",
|
||||
", ".join(errors.values()),
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
||||
|
|
|
@ -89,7 +89,7 @@ async def async_handle_webhook(hass, webhook_id, request):
|
|||
# Look at content to provide some context for received webhook
|
||||
# Limit to 64 chars to avoid flooding the log
|
||||
content = await request.content.read(64)
|
||||
_LOGGER.debug("%s...", content)
|
||||
_LOGGER.debug("%s", content)
|
||||
return Response(status=HTTP_OK)
|
||||
|
||||
try:
|
||||
|
|
|
@ -386,7 +386,7 @@ class LgWebOSMediaPlayerEntity(MediaPlayerEntity):
|
|||
_LOGGER.debug("Call play media type <%s>, Id <%s>", media_type, media_id)
|
||||
|
||||
if media_type == MEDIA_TYPE_CHANNEL:
|
||||
_LOGGER.debug("Searching channel...")
|
||||
_LOGGER.debug("Searching channel")
|
||||
partial_match_channel_id = None
|
||||
perfect_match_channel_id = None
|
||||
|
||||
|
|
|
@ -113,7 +113,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
|
|||
|
||||
static_conf = config.get(CONF_STATIC, [])
|
||||
if static_conf:
|
||||
_LOGGER.debug("Adding statically configured WeMo devices...")
|
||||
_LOGGER.debug("Adding statically configured WeMo devices")
|
||||
for device in await asyncio.gather(
|
||||
*[
|
||||
hass.async_add_executor_job(validate_static_config, host, port)
|
||||
|
@ -190,7 +190,7 @@ class WemoDiscovery:
|
|||
|
||||
async def async_discover_and_schedule(self, *_) -> None:
|
||||
"""Periodically scan the network looking for WeMo devices."""
|
||||
_LOGGER.debug("Scanning network for WeMo devices...")
|
||||
_LOGGER.debug("Scanning network for WeMo devices")
|
||||
try:
|
||||
for device in await self._hass.async_add_executor_job(
|
||||
pywemo.discover_devices
|
||||
|
|
|
@ -150,7 +150,7 @@ class WirelessTagPlatform:
|
|||
|
||||
def handle_update_tags_event(self, event):
|
||||
"""Handle push event from wireless tag manager."""
|
||||
_LOGGER.info("push notification for update arrived: %s", event)
|
||||
_LOGGER.info("Push notification for update arrived: %s", event)
|
||||
try:
|
||||
tag_id = event.data.get("id")
|
||||
mac = event.data.get("mac")
|
||||
|
|
|
@ -127,7 +127,7 @@ SERVICE_TO_METHOD = {
|
|||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Import Miio configuration from YAML."""
|
||||
_LOGGER.warning(
|
||||
"Loading Xiaomi Miio Switch via platform setup is deprecated. Please remove it from your configuration."
|
||||
"Loading Xiaomi Miio Switch via platform setup is deprecated; Please remove it from your configuration"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -122,7 +122,7 @@ STATE_CODE_TO_STATE = {
|
|||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Import Miio configuration from YAML."""
|
||||
_LOGGER.warning(
|
||||
"Loading Xiaomi Miio Vacuum via platform setup is deprecated. Please remove it from your configuration."
|
||||
"Loading Xiaomi Miio Vacuum via platform setup is deprecated; Please remove it from your configuration"
|
||||
)
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
|
|
|
@ -68,7 +68,7 @@ def setup(hass, config):
|
|||
)
|
||||
return False
|
||||
|
||||
_LOGGER.debug("Establishing connection to XS1 gateway and retrieving data...")
|
||||
_LOGGER.debug("Establishing connection to XS1 gateway and retrieving data")
|
||||
|
||||
hass.data[DOMAIN] = {}
|
||||
|
||||
|
@ -78,7 +78,7 @@ def setup(hass, config):
|
|||
hass.data[DOMAIN][ACTUATORS] = actuators
|
||||
hass.data[DOMAIN][SENSORS] = sensors
|
||||
|
||||
_LOGGER.debug("Loading platforms for XS1 integration...")
|
||||
_LOGGER.debug("Loading platforms for XS1 integration")
|
||||
# Load platforms for supported devices
|
||||
for platform in PLATFORMS:
|
||||
discovery.load_platform(hass, platform, DOMAIN, {}, config)
|
||||
|
|
|
@ -37,7 +37,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
|||
|
||||
zapi = hass.data[zabbix.DOMAIN]
|
||||
if not zapi:
|
||||
_LOGGER.error("zapi is None. Zabbix integration hasn't been loaded?")
|
||||
_LOGGER.error("Zabbix integration hasn't been loaded? zapi is None")
|
||||
return False
|
||||
|
||||
_LOGGER.info("Connected to Zabbix API Version %s", zapi.api_version())
|
||||
|
|
|
@ -43,7 +43,7 @@ def _report(what: str) -> None:
|
|||
|
||||
if not integration_frame:
|
||||
_LOGGER.warning(
|
||||
"Detected code that %s. Please report this issue.", what, stack_info=True
|
||||
"Detected code that %s; Please report this issue", what, stack_info=True
|
||||
)
|
||||
return
|
||||
|
||||
|
|
|
@ -889,9 +889,7 @@ async def async_setup_entry(hass, config_entry):
|
|||
continue
|
||||
network.manager.pressButton(value.value_id)
|
||||
network.manager.releaseButton(value.value_id)
|
||||
_LOGGER.info(
|
||||
"Resetting meters on node %s instance %s....", node_id, instance
|
||||
)
|
||||
_LOGGER.info("Resetting meters on node %s instance %s", node_id, instance)
|
||||
return
|
||||
_LOGGER.info(
|
||||
"Node %s on instance %s does not have resettable meters", node_id, instance
|
||||
|
@ -915,7 +913,7 @@ async def async_setup_entry(hass, config_entry):
|
|||
|
||||
def start_zwave(_service_or_event):
|
||||
"""Startup Z-Wave network."""
|
||||
_LOGGER.info("Starting Z-Wave network...")
|
||||
_LOGGER.info("Starting Z-Wave network")
|
||||
network.start()
|
||||
hass.bus.fire(const.EVENT_NETWORK_START)
|
||||
|
||||
|
@ -939,7 +937,7 @@ async def async_setup_entry(hass, config_entry):
|
|||
"Z-Wave not ready after %d seconds, continuing anyway", waited
|
||||
)
|
||||
_LOGGER.info(
|
||||
"final network state: %d %s", network.state, network.state_str
|
||||
"Final network state: %d %s", network.state, network.state_str
|
||||
)
|
||||
break
|
||||
|
||||
|
|
|
@ -143,7 +143,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||
async_on_node_ready(node)
|
||||
return
|
||||
# if node is not yet ready, register one-time callback for ready state
|
||||
LOGGER.debug("Node added: %s - waiting for it to become ready.", node.node_id)
|
||||
LOGGER.debug("Node added: %s - waiting for it to become ready", node.node_id)
|
||||
node.once(
|
||||
"ready",
|
||||
lambda event: async_on_node_ready(event["node"]),
|
||||
|
|
|
@ -36,8 +36,8 @@ def async_migrate_entity(
|
|||
except ValueError:
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Entity %s can't be migrated because the unique ID is taken. "
|
||||
"Cleaning it up since it is likely no longer valid."
|
||||
"Entity %s can't be migrated because the unique ID is taken; "
|
||||
"Cleaning it up since it is likely no longer valid"
|
||||
),
|
||||
entity_id,
|
||||
)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue