String formatting and max line length - Part 4

This commit is contained in:
Franck Nijhof 2022-12-22 11:55:28 +01:00
parent cb13418bab
commit 8ccba5a2c3
No known key found for this signature in database
GPG key ID: D62583BA8AB11CA3
51 changed files with 289 additions and 113 deletions

View file

@ -307,7 +307,8 @@ class InputNumber(collection.CollectionEntity, RestoreEntity):
if num_value < self._minimum or num_value > self._maximum: if num_value < self._minimum or num_value > self._maximum:
raise vol.Invalid( raise vol.Invalid(
f"Invalid value for {self.entity_id}: {value} (range {self._minimum} - {self._maximum})" f"Invalid value for {self.entity_id}: {value} (range {self._minimum} -"
f" {self._maximum})"
) )
self._current_value = num_value self._current_value = num_value

View file

@ -73,7 +73,10 @@ def _remove_duplicates(options: list[str], name: str | None) -> list[str]:
# Reject YAML configured input_select with duplicates from 2022.6 # Reject YAML configured input_select with duplicates from 2022.6
if len(unique_options) != len(options): if len(unique_options) != len(options):
_LOGGER.warning( _LOGGER.warning(
"Input select '%s' with options %s had duplicated options, the duplicates have been removed", (
"Input select '%s' with options %s had duplicated options, the"
" duplicates have been removed"
),
name or "<unnamed>", name or "<unnamed>",
options, options,
) )

View file

@ -83,9 +83,15 @@ class InsteonEntity(Entity):
return DeviceInfo( return DeviceInfo(
identifiers={(DOMAIN, str(self._insteon_device.address))}, identifiers={(DOMAIN, str(self._insteon_device.address))},
manufacturer="SmartLabs, Inc", manufacturer="SmartLabs, Inc",
model=f"{self._insteon_device.model} ({self._insteon_device.cat!r}, 0x{self._insteon_device.subcat:02x})", model=(
f"{self._insteon_device.model} ({self._insteon_device.cat!r},"
f" 0x{self._insteon_device.subcat:02x})"
),
name=f"{self._insteon_device.description} {self._insteon_device.address}", name=f"{self._insteon_device.description} {self._insteon_device.address}",
sw_version=f"{self._insteon_device.firmware:02x} Engine Version: {self._insteon_device.engine_version}", sw_version=(
f"{self._insteon_device.firmware:02x} Engine Version:"
f" {self._insteon_device.engine_version}"
),
via_device=(DOMAIN, str(devices.modem.address)), via_device=(DOMAIN, str(devices.modem.address)),
) )

View file

@ -158,7 +158,10 @@ class IPMAWeather(WeatherEntity):
@property @property
def unique_id(self) -> str: def unique_id(self) -> str:
"""Return a unique id.""" """Return a unique id."""
return f"{self._location.station_latitude}, {self._location.station_longitude}, {self._mode}" return (
f"{self._location.station_latitude}, {self._location.station_longitude},"
f" {self._mode}"
)
@property @property
def name(self): def name(self):

View file

@ -90,7 +90,8 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
if not unique_id and info[CONF_SERIAL]: if not unique_id and info[CONF_SERIAL]:
_LOGGER.debug( _LOGGER.debug(
"Printer UUID is missing from IPP response. Falling back to IPP serial number" "Printer UUID is missing from IPP response. Falling back to IPP serial"
" number"
) )
unique_id = info[CONF_SERIAL] unique_id = info[CONF_SERIAL]
elif not unique_id: elif not unique_id:
@ -154,7 +155,8 @@ class IPPFlowHandler(ConfigFlow, domain=DOMAIN):
unique_id = self.discovery_info[CONF_UUID] = info[CONF_UUID] unique_id = self.discovery_info[CONF_UUID] = info[CONF_UUID]
elif not unique_id and info[CONF_SERIAL]: elif not unique_id and info[CONF_SERIAL]:
_LOGGER.debug( _LOGGER.debug(
"Printer UUID is missing from discovery info and IPP response. Falling back to IPP serial number" "Printer UUID is missing from discovery info and IPP response. Falling"
" back to IPP serial number"
) )
unique_id = info[CONF_SERIAL] unique_id = info[CONF_SERIAL]
elif not unique_id: elif not unique_id:

View file

@ -101,7 +101,9 @@ class IPPMarkerSensor(IPPSensor):
unique_id=unique_id, unique_id=unique_id,
icon="mdi:water", icon="mdi:water",
key=f"marker_{marker_index}", key=f"marker_{marker_index}",
name=f"{coordinator.data.info.name} {coordinator.data.markers[marker_index].name}", name=(
f"{coordinator.data.info.name} {coordinator.data.markers[marker_index].name}"
),
unit_of_measurement=PERCENTAGE, unit_of_measurement=PERCENTAGE,
) )

View file

@ -105,11 +105,13 @@ class IslamicPrayerClient:
if now > dt_util.as_utc(midnight_dt): if now > dt_util.as_utc(midnight_dt):
next_update_at = midnight_dt + timedelta(days=1, minutes=1) next_update_at = midnight_dt + timedelta(days=1, minutes=1)
_LOGGER.debug( _LOGGER.debug(
"Midnight is after day the changes so schedule update for after Midnight the next day" "Midnight is after day the changes so schedule update for after"
" Midnight the next day"
) )
else: else:
_LOGGER.debug( _LOGGER.debug(
"Midnight is before the day changes so schedule update for the next start of day" "Midnight is before the day changes so schedule update for the next"
" start of day"
) )
next_update_at = dt_util.start_of_local_day(now + timedelta(days=1)) next_update_at = dt_util.start_of_local_day(now + timedelta(days=1))

View file

@ -180,7 +180,8 @@ async def async_setup_entry(
await isy.initialize() await isy.initialize()
except asyncio.TimeoutError as err: except asyncio.TimeoutError as err:
raise ConfigEntryNotReady( raise ConfigEntryNotReady(
f"Timed out initializing the ISY; device may be busy, trying again later: {err}" "Timed out initializing the ISY; device may be busy, trying again later:"
f" {err}"
) from err ) from err
except ISYInvalidAuthError as err: except ISYInvalidAuthError as err:
raise ConfigEntryAuthFailed(f"Invalid credentials for the ISY: {err}") from err raise ConfigEntryAuthFailed(f"Invalid credentials for the ISY: {err}") from err
@ -190,7 +191,8 @@ async def async_setup_entry(
) from err ) from err
except ISYResponseParseError as err: except ISYResponseParseError as err:
raise ConfigEntryNotReady( raise ConfigEntryNotReady(
f"Invalid XML response from ISY; Ensure the ISY is running the latest firmware: {err}" "Invalid XML response from ISY; Ensure the ISY is running the latest"
f" firmware: {err}"
) from err ) from err
except TypeError as err: except TypeError as err:
raise ConfigEntryNotReady( raise ConfigEntryNotReady(

View file

@ -116,8 +116,10 @@ async def async_setup_entry(
parent_entity = entities_by_address.get(node.parent_node.address) parent_entity = entities_by_address.get(node.parent_node.address)
if not parent_entity: if not parent_entity:
_LOGGER.error( _LOGGER.error(
"Node %s has a parent node %s, but no device " (
"was created for the parent. Skipping", "Node %s has a parent node %s, but no device "
"was created for the parent. Skipping"
),
node.address, node.address,
node.parent_node, node.parent_node,
) )

View file

@ -209,7 +209,8 @@ class ISYNodeEntity(ISYEntity):
"""Respond to an entity service command to request a Z-Wave device parameter from the ISY.""" """Respond to an entity service command to request a Z-Wave device parameter from the ISY."""
if not hasattr(self._node, "protocol") or self._node.protocol != PROTO_ZWAVE: if not hasattr(self._node, "protocol") or self._node.protocol != PROTO_ZWAVE:
raise HomeAssistantError( raise HomeAssistantError(
f"Invalid service call: cannot request Z-Wave Parameter for non-Z-Wave device {self.entity_id}" "Invalid service call: cannot request Z-Wave Parameter for non-Z-Wave"
f" device {self.entity_id}"
) )
await self._node.get_zwave_parameter(parameter) await self._node.get_zwave_parameter(parameter)
@ -219,7 +220,8 @@ class ISYNodeEntity(ISYEntity):
"""Respond to an entity service command to set a Z-Wave device parameter via the ISY.""" """Respond to an entity service command to set a Z-Wave device parameter via the ISY."""
if not hasattr(self._node, "protocol") or self._node.protocol != PROTO_ZWAVE: if not hasattr(self._node, "protocol") or self._node.protocol != PROTO_ZWAVE:
raise HomeAssistantError( raise HomeAssistantError(
f"Invalid service call: cannot set Z-Wave Parameter for non-Z-Wave device {self.entity_id}" "Invalid service call: cannot set Z-Wave Parameter for non-Z-Wave"
f" device {self.entity_id}"
) )
await self._node.set_zwave_parameter(parameter, value, size) await self._node.set_zwave_parameter(parameter, value, size)
await self._node.get_zwave_parameter(parameter) await self._node.get_zwave_parameter(parameter)

View file

@ -352,7 +352,10 @@ def _categorize_programs(hass_isy_data: dict, programs: Programs) -> None:
actions = entity_folder.get_by_name(KEY_ACTIONS) actions = entity_folder.get_by_name(KEY_ACTIONS)
if not actions or actions.protocol != PROTO_PROGRAM: if not actions or actions.protocol != PROTO_PROGRAM:
_LOGGER.warning( _LOGGER.warning(
"Program %s entity '%s' not loaded, invalid/missing actions program", (
"Program %s entity '%s' not loaded, invalid/missing actions"
" program"
),
platform, platform,
entity_folder.name, entity_folder.name,
) )

View file

@ -299,8 +299,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
entity_registry.async_remove(entity_id) entity_registry.async_remove(entity_id)
_LOGGER.debug( _LOGGER.debug(
"Cleaning up ISY994 Entities and devices: Config Entries: %s, Current Entries: %s, " (
"Extra Entries Removed: %s", "Cleaning up ISY994 Entities and devices: Config Entries: %s, Current"
" Entries: %s, Extra Entries Removed: %s"
),
len(config_ids), len(config_ids),
len(current_unique_ids), len(current_unique_ids),
len(extra_entities), len(extra_entities),

View file

@ -31,7 +31,9 @@ class JuiceNetDevice(CoordinatorEntity):
def device_info(self) -> DeviceInfo: def device_info(self) -> DeviceInfo:
"""Return device information about this JuiceNet Device.""" """Return device information about this JuiceNet Device."""
return DeviceInfo( return DeviceInfo(
configuration_url=f"https://home.juice.net/Portal/Details?unitID={self.device.id}", configuration_url=(
f"https://home.juice.net/Portal/Details?unitID={self.device.id}"
),
identifiers={(DOMAIN, self.device.id)}, identifiers={(DOMAIN, self.device.id)},
manufacturer="JuiceNet", manufacturer="JuiceNet",
name=self.device.name, name=self.device.name,

View file

@ -235,7 +235,9 @@ class KebaHandler(KebaKeContact):
self._set_fast_polling() self._set_fast_polling()
except (KeyError, ValueError) as ex: except (KeyError, ValueError) as ex:
_LOGGER.warning( _LOGGER.warning(
"Values are not correct for: failsafe_timeout, failsafe_fallback and/or " (
"failsafe_persist: %s", "Values are not correct for: failsafe_timeout, failsafe_fallback"
" and/or failsafe_persist: %s"
),
ex, ex,
) )

View file

@ -495,7 +495,10 @@ class KNXModule:
value = transcoder.from_knx(data) value = transcoder.from_knx(data)
except ConversionError as err: except ConversionError as err:
_LOGGER.warning( _LOGGER.warning(
"Error in `knx_event` at decoding type '%s' from telegram %s\n%s", (
"Error in `knx_event` at decoding type '%s' from"
" telegram %s\n%s"
),
transcoder.__name__, transcoder.__name__,
telegram, telegram,
err, err,
@ -584,7 +587,10 @@ class KNXModule:
if group_address in self.service_exposures: if group_address in self.service_exposures:
replaced_exposure = self.service_exposures.pop(group_address) replaced_exposure = self.service_exposures.pop(group_address)
_LOGGER.warning( _LOGGER.warning(
"Service exposure_register replacing already registered exposure for '%s' - %s", (
"Service exposure_register replacing already registered exposure"
" for '%s' - %s"
),
group_address, group_address,
replaced_exposure.device.name, replaced_exposure.device.name,
) )

View file

@ -399,7 +399,10 @@ class KNXCommonFlow(ABC, FlowHandler):
], ],
) )
return self.finish_flow( return self.finish_flow(
title=f"Secure Routing as {self.new_entry_data[CONF_KNX_INDIVIDUAL_ADDRESS]}" title=(
"Secure Routing as"
f" {self.new_entry_data[CONF_KNX_INDIVIDUAL_ADDRESS]}"
)
) )
fields = { fields = {
@ -464,7 +467,10 @@ class KNXCommonFlow(ABC, FlowHandler):
self.new_entry_data[CONF_KNX_CONNECTION_TYPE] self.new_entry_data[CONF_KNX_CONNECTION_TYPE]
== CONF_KNX_ROUTING_SECURE == CONF_KNX_ROUTING_SECURE
): ):
title = f"Secure Routing as {self.new_entry_data[CONF_KNX_INDIVIDUAL_ADDRESS]}" title = (
"Secure Routing as"
f" {self.new_entry_data[CONF_KNX_INDIVIDUAL_ADDRESS]}"
)
else: else:
title = f"Secure Tunneling @ {self.new_entry_data[CONF_HOST]}" title = f"Secure Tunneling @ {self.new_entry_data[CONF_HOST]}"
return self.finish_flow(title=title) return self.finish_flow(title=title)

View file

@ -74,7 +74,8 @@ def dpt_subclass_validator(dpt_base_class: type[DPTBase]) -> Callable[[Any], str
): ):
return value return value
raise vol.Invalid( raise vol.Invalid(
f"type '{value}' is not a valid DPT identifier for {dpt_base_class.__name__}." f"type '{value}' is not a valid DPT identifier for"
f" {dpt_base_class.__name__}."
) )
return dpt_value_validator return dpt_value_validator
@ -94,8 +95,9 @@ def ga_validator(value: Any) -> str | int:
except CouldNotParseAddress: except CouldNotParseAddress:
pass pass
raise vol.Invalid( raise vol.Invalid(
f"value '{value}' is not a valid KNX group address '<main>/<middle>/<sub>', '<main>/<sub>' " f"value '{value}' is not a valid KNX group address '<main>/<middle>/<sub>',"
"or '<free>' (eg.'1/2/3', '9/234', '123'), nor xknx internal address 'i-<string>'." " '<main>/<sub>' or '<free>' (eg.'1/2/3', '9/234', '123'), nor xknx internal"
" address 'i-<string>'."
) )
@ -104,7 +106,10 @@ ga_list_validator = vol.All(cv.ensure_list, [ga_validator])
ia_validator = vol.Any( ia_validator = vol.Any(
vol.All(str, str.strip, cv.matches_regex(IndividualAddress.ADDRESS_RE.pattern)), vol.All(str, str.strip, cv.matches_regex(IndividualAddress.ADDRESS_RE.pattern)),
vol.All(vol.Coerce(int), vol.Range(min=1, max=65535)), vol.All(vol.Coerce(int), vol.Range(min=1, max=65535)),
msg="value does not match pattern for KNX individual address '<area>.<line>.<device>' (eg.'1.1.100')", msg=(
"value does not match pattern for KNX individual address"
" '<area>.<line>.<device>' (eg.'1.1.100')"
),
) )
@ -120,7 +125,8 @@ def ip_v4_validator(value: Any, multicast: bool | None = None) -> str:
raise vol.Invalid(f"value '{value}' is not a valid IPv4 address: {ex}") from ex raise vol.Invalid(f"value '{value}' is not a valid IPv4 address: {ex}") from ex
if multicast is not None and address.is_multicast != multicast: if multicast is not None and address.is_multicast != multicast:
raise vol.Invalid( raise vol.Invalid(
f"value '{value}' is not a valid IPv4 {'multicast' if multicast else 'unicast'} address" f"value '{value}' is not a valid IPv4"
f" {'multicast' if multicast else 'unicast'} address"
) )
return str(address) return str(address)
@ -433,14 +439,18 @@ class ClimateSchema(KNXPlatformSchema):
vol.Inclusive( vol.Inclusive(
CONF_SETPOINT_SHIFT_ADDRESS, CONF_SETPOINT_SHIFT_ADDRESS,
"setpoint_shift", "setpoint_shift",
msg="'setpoint_shift_address' and 'setpoint_shift_state_address' " msg=(
"are required for setpoint_shift configuration", "'setpoint_shift_address' and 'setpoint_shift_state_address' "
"are required for setpoint_shift configuration"
),
): ga_list_validator, ): ga_list_validator,
vol.Inclusive( vol.Inclusive(
CONF_SETPOINT_SHIFT_STATE_ADDRESS, CONF_SETPOINT_SHIFT_STATE_ADDRESS,
"setpoint_shift", "setpoint_shift",
msg="'setpoint_shift_address' and 'setpoint_shift_state_address' " msg=(
"are required for setpoint_shift configuration", "'setpoint_shift_address' and 'setpoint_shift_state_address' "
"are required for setpoint_shift configuration"
),
): ga_list_validator, ): ga_list_validator,
vol.Optional(CONF_SETPOINT_SHIFT_MODE): vol.Maybe( vol.Optional(CONF_SETPOINT_SHIFT_MODE): vol.Maybe(
vol.All(vol.Upper, cv.enum(SetpointShiftMode)) vol.All(vol.Upper, cv.enum(SetpointShiftMode))
@ -509,7 +519,10 @@ class CoverSchema(KNXPlatformSchema):
{ {
vol.Required( vol.Required(
vol.Any(CONF_MOVE_LONG_ADDRESS, CONF_POSITION_ADDRESS), vol.Any(CONF_MOVE_LONG_ADDRESS, CONF_POSITION_ADDRESS),
msg=f"At least one of '{CONF_MOVE_LONG_ADDRESS}' or '{CONF_POSITION_ADDRESS}' is required.", msg=(
f"At least one of '{CONF_MOVE_LONG_ADDRESS}' or"
f" '{CONF_POSITION_ADDRESS}' is required."
),
): object, ): object,
}, },
extra=vol.ALLOW_EXTRA, extra=vol.ALLOW_EXTRA,
@ -669,17 +682,26 @@ class LightSchema(KNXPlatformSchema):
vol.Inclusive( vol.Inclusive(
CONF_RED, CONF_RED,
"individual_colors", "individual_colors",
msg="'red', 'green' and 'blue' are required for individual colors configuration", msg=(
"'red', 'green' and 'blue' are required for individual"
" colors configuration"
),
): INDIVIDUAL_COLOR_SCHEMA, ): INDIVIDUAL_COLOR_SCHEMA,
vol.Inclusive( vol.Inclusive(
CONF_GREEN, CONF_GREEN,
"individual_colors", "individual_colors",
msg="'red', 'green' and 'blue' are required for individual colors configuration", msg=(
"'red', 'green' and 'blue' are required for individual"
" colors configuration"
),
): INDIVIDUAL_COLOR_SCHEMA, ): INDIVIDUAL_COLOR_SCHEMA,
vol.Inclusive( vol.Inclusive(
CONF_BLUE, CONF_BLUE,
"individual_colors", "individual_colors",
msg="'red', 'green' and 'blue' are required for individual colors configuration", msg=(
"'red', 'green' and 'blue' are required for individual"
" colors configuration"
),
): INDIVIDUAL_COLOR_SCHEMA, ): INDIVIDUAL_COLOR_SCHEMA,
vol.Optional(CONF_WHITE): INDIVIDUAL_COLOR_SCHEMA, vol.Optional(CONF_WHITE): INDIVIDUAL_COLOR_SCHEMA,
}, },

View file

@ -145,8 +145,10 @@ class AlarmPanel:
self.connect_attempts = 0 self.connect_attempts = 0
self.connected = True self.connected = True
_LOGGER.info( _LOGGER.info(
"Set up Konnected device %s. Open http://%s:%s in a " (
"web browser to view device status", "Set up Konnected device %s. Open http://%s:%s in a "
"web browser to view device status"
),
self.device_id, self.device_id,
self.host, self.host,
self.port, self.port,

View file

@ -78,7 +78,8 @@ class KrakenData:
except pykrakenapi.pykrakenapi.KrakenAPIError as error: except pykrakenapi.pykrakenapi.KrakenAPIError as error:
if "Unknown asset pair" in str(error): if "Unknown asset pair" in str(error):
_LOGGER.info( _LOGGER.info(
"Kraken.com reported an unknown asset pair. Refreshing list of tradable asset pairs" "Kraken.com reported an unknown asset pair. Refreshing list of"
" tradable asset pairs"
) )
await self._async_refresh_tradable_asset_pairs() await self._async_refresh_tradable_asset_pairs()
else: else:
@ -87,7 +88,8 @@ class KrakenData:
) from error ) from error
except pykrakenapi.pykrakenapi.CallRateLimitError: except pykrakenapi.pykrakenapi.CallRateLimitError:
_LOGGER.warning( _LOGGER.warning(
"Exceeded the Kraken.com call rate limit. Increase the update interval to prevent this error" "Exceeded the Kraken.com call rate limit. Increase the update interval"
" to prevent this error"
) )
return None return None

View file

@ -111,7 +111,8 @@ async def async_setup_entry(
if description is None: if description is None:
message = ( message = (
f"Unsupported sensor field: {field}\nPlease create an issue on " f"Unsupported sensor field: {field}\nPlease create an issue on "
"GitHub. https://github.com/home-assistant/core/issues/new?assignees=&la" "GitHub."
" https://github.com/home-assistant/core/issues/new?assignees=&la"
"bels=&template=bug_report.yml&integration_name=LaCrosse%20View&integrat" "bels=&template=bug_report.yml&integration_name=LaCrosse%20View&integrat"
"ion_link=https://www.home-assistant.io/integrations/lacrosse_view/&addi" "ion_link=https://www.home-assistant.io/integrations/lacrosse_view/&addi"
f"tional_information=Field:%20{field}%0ASensor%20Model:%20{sensor.model}&" f"tional_information=Field:%20{field}%0ASensor%20Model:%20{sensor.model}&"

View file

@ -99,8 +99,10 @@ async def async_setup_entry(
return False return False
except pypck.connection.PchkLicenseError: except pypck.connection.PchkLicenseError:
_LOGGER.warning( _LOGGER.warning(
'Maximum number of connections on PCHK "%s" was ' (
"reached. An additional license key is required", 'Maximum number of connections on PCHK "%s" was '
"reached. An additional license key is required"
),
config_entry.title, config_entry.title,
) )
return False return False
@ -269,7 +271,10 @@ class LcnEntity(Entity):
def device_info(self) -> DeviceInfo | None: def device_info(self) -> DeviceInfo | None:
"""Return device specific attributes.""" """Return device specific attributes."""
address = f"{'g' if self.address[2] else 'm'}{self.address[0]:03d}{self.address[1]:03d}" address = f"{'g' if self.address[2] else 'm'}{self.address[0]:03d}{self.address[1]:03d}"
model = f"LCN resource ({get_device_model(self.config[CONF_DOMAIN], self.config[CONF_DOMAIN_DATA])})" model = (
"LCN resource"
f" ({get_device_model(self.config[CONF_DOMAIN], self.config[CONF_DOMAIN_DATA])})"
)
return { return {
"identifiers": {(DOMAIN, self.unique_id)}, "identifiers": {(DOMAIN, self.unique_id)},

View file

@ -81,8 +81,10 @@ class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="authentication_error") return self.async_abort(reason="authentication_error")
except pypck.connection.PchkLicenseError: except pypck.connection.PchkLicenseError:
_LOGGER.warning( _LOGGER.warning(
'Maximum number of connections on PCHK "%s" was ' (
"reached. An additional license key is required", 'Maximum number of connections on PCHK "%s" was '
"reached. An additional license key is required"
),
host_name, host_name,
) )
return self.async_abort(reason="license_error") return self.async_abort(reason="license_error")

View file

@ -104,7 +104,9 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
{ {
vol.Required(CONF_ADDRESS): vol.In( vol.Required(CONF_ADDRESS): vol.In(
{ {
service_info.address: f"{service_info.name} ({service_info.address})" service_info.address: (
f"{service_info.name} ({service_info.address})"
)
for service_info in self._discovered_devices.values() for service_info in self._discovered_devices.values()
} }
), ),

View file

@ -152,16 +152,20 @@ class Life360DeviceTracker(
if bad_last_seen or bad_accuracy: if bad_last_seen or bad_accuracy:
if bad_last_seen: if bad_last_seen:
LOGGER.warning( LOGGER.warning(
"%s: Ignoring location update because " (
"last_seen (%s) < previous last_seen (%s)", "%s: Ignoring location update because "
"last_seen (%s) < previous last_seen (%s)"
),
self.entity_id, self.entity_id,
last_seen, last_seen,
prev_seen, prev_seen,
) )
if bad_accuracy: if bad_accuracy:
LOGGER.warning( LOGGER.warning(
"%s: Ignoring location update because " (
"expected GPS accuracy (%0.1f) is not met: %i", "%s: Ignoring location update because "
"expected GPS accuracy (%0.1f) is not met: %i"
),
self.entity_id, self.entity_id,
max_gps_acc, max_gps_acc,
self.location_accuracy, self.location_accuracy,

View file

@ -143,7 +143,10 @@ class LIFXDiscoveryManager:
if migration_complete and migrating_was_in_progress: if migration_complete and migrating_was_in_progress:
self.migrating = False self.migrating = False
_LOGGER.debug( _LOGGER.debug(
"LIFX migration complete, switching to normal discovery interval: %s", (
"LIFX migration complete, switching to normal discovery"
" interval: %s"
),
DISCOVERY_INTERVAL, DISCOVERY_INTERVAL,
) )
self.async_setup_discovery_interval() self.async_setup_discovery_interval()

View file

@ -221,7 +221,10 @@ class LIFXLight(LIFXEntity, LightEntity):
Platform.SELECT, INFRARED_BRIGHTNESS Platform.SELECT, INFRARED_BRIGHTNESS
) )
_LOGGER.warning( _LOGGER.warning(
"The 'infrared' attribute of 'lifx.set_state' is deprecated: call 'select.select_option' targeting '%s' instead", (
"The 'infrared' attribute of 'lifx.set_state' is deprecated:"
" call 'select.select_option' targeting '%s' instead"
),
infrared_entity_id, infrared_entity_id,
) )
bulb.set_infrared(convert_8_to_16(kwargs[ATTR_INFRARED])) bulb.set_infrared(convert_8_to_16(kwargs[ATTR_INFRARED]))

View file

@ -110,7 +110,8 @@ def find_hsbk(hass: HomeAssistant, **kwargs: Any) -> list[float | int | None] |
if ATTR_KELVIN in kwargs: if ATTR_KELVIN in kwargs:
_LOGGER.warning( _LOGGER.warning(
"The 'kelvin' parameter is deprecated. Please use 'color_temp_kelvin' for all service calls" "The 'kelvin' parameter is deprecated. Please use 'color_temp_kelvin' for"
" all service calls"
) )
kelvin = kwargs.pop(ATTR_KELVIN) kelvin = kwargs.pop(ATTR_KELVIN)
saturation = 0 saturation = 0

View file

@ -89,8 +89,9 @@ DEPRECATED_GROUP = [
] ]
DEPRECATION_WARNING = ( DEPRECATION_WARNING = (
"The use of other attributes than device state attributes is deprecated and will be removed in a future release. " "The use of other attributes than device state attributes is deprecated and will be"
"Invalid attributes are %s. Read the logs for further details: https://www.home-assistant.io/integrations/scene/" " removed in a future release. Invalid attributes are %s. Read the logs for further"
" details: https://www.home-assistant.io/integrations/scene/"
) )

View file

@ -144,8 +144,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
persistent_notification.create( persistent_notification.create(
hass, hass,
( (
f"Error: The cached access tokens are missing from {DEFAULT_CACHEDB}.<br />" "Error: The cached access tokens are missing from"
f"Please unload then re-add the Logi Circle integration to resolve." f" {DEFAULT_CACHEDB}.<br />Please unload then re-add the Logi Circle"
" integration to resolve."
), ),
title=NOTIFICATION_TITLE, title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID, notification_id=NOTIFICATION_ID,
@ -160,10 +161,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
except AuthorizationFailed: except AuthorizationFailed:
persistent_notification.create( persistent_notification.create(
hass, hass,
"Error: Failed to obtain an access token from the cached " (
"refresh token.<br />" "Error: Failed to obtain an access token from the cached "
"Token may have expired or been revoked.<br />" "refresh token.<br />"
"Please unload then re-add the Logi Circle integration to resolve", "Token may have expired or been revoked.<br />"
"Please unload then re-add the Logi Circle integration to resolve"
),
title=NOTIFICATION_TITLE, title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID, notification_id=NOTIFICATION_ID,
) )

View file

@ -113,7 +113,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
if yaml_resources is not None: if yaml_resources is not None:
_LOGGER.warning( _LOGGER.warning(
"Lovelace is running in storage mode. Define resources via user interface" "Lovelace is running in storage mode. Define resources via user"
" interface"
) )
resource_collection = resources.ResourceStorageCollection(hass, default_config) resource_collection = resources.ResourceStorageCollection(hass, default_config)
@ -220,7 +221,8 @@ async def create_yaml_resource_col(hass, yaml_resources):
else: else:
if CONF_RESOURCES in ll_conf: if CONF_RESOURCES in ll_conf:
_LOGGER.warning( _LOGGER.warning(
"Resources need to be specified in your configuration.yaml. Please see the docs" "Resources need to be specified in your configuration.yaml. Please"
" see the docs"
) )
yaml_resources = ll_conf[CONF_RESOURCES] yaml_resources = ll_conf[CONF_RESOURCES]

View file

@ -116,7 +116,9 @@ class SensorCommunitySensor(CoordinatorEntity, SensorEntity):
ATTR_SENSOR_ID: sensor_id, ATTR_SENSOR_ID: sensor_id,
} }
self._attr_device_info = DeviceInfo( self._attr_device_info = DeviceInfo(
configuration_url=f"https://devices.sensor.community/sensors/{sensor_id}/settings", configuration_url=(
f"https://devices.sensor.community/sensors/{sensor_id}/settings"
),
identifiers={(DOMAIN, str(sensor_id))}, identifiers={(DOMAIN, str(sensor_id))},
name=f"Sensor {sensor_id}", name=f"Sensor {sensor_id}",
manufacturer="Sensor.Community", manufacturer="Sensor.Community",

View file

@ -51,7 +51,9 @@ def async_describe_events(
if rev_button_map is None: if rev_button_map is None:
return { return {
LOGBOOK_ENTRY_NAME: f"{data[ATTR_AREA_NAME]} {data[ATTR_DEVICE_NAME]}", LOGBOOK_ENTRY_NAME: f"{data[ATTR_AREA_NAME]} {data[ATTR_DEVICE_NAME]}",
LOGBOOK_ENTRY_MESSAGE: f"{data[ATTR_ACTION]} Error retrieving button description", LOGBOOK_ENTRY_MESSAGE: (
f"{data[ATTR_ACTION]} Error retrieving button description"
),
} }
button_description = rev_button_map.get(leap_button_number) button_description = rev_button_map.get(leap_button_number)

View file

@ -277,7 +277,8 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity):
if device.changeableValues.autoChangeoverActive: if device.changeableValues.autoChangeoverActive:
if target_temp_low is None or target_temp_high is None: if target_temp_low is None or target_temp_high is None:
raise HomeAssistantError( raise HomeAssistantError(
"Could not find target_temp_low and/or target_temp_high in arguments" "Could not find target_temp_low and/or target_temp_high in"
" arguments"
) )
_LOGGER.debug("Set temperature: %s - %s", target_temp_low, target_temp_high) _LOGGER.debug("Set temperature: %s - %s", target_temp_low, target_temp_high)
try: try:

View file

@ -168,12 +168,18 @@ class MagicSeaweedSensor(SensorEntity):
elif sensor_type == "max_breaking_swell": elif sensor_type == "max_breaking_swell":
self._attr_native_value = forecast.swell_maxBreakingHeight self._attr_native_value = forecast.swell_maxBreakingHeight
elif sensor_type == "swell_forecast": elif sensor_type == "swell_forecast":
summary = f"{forecast.swell_minBreakingHeight} - {forecast.swell_maxBreakingHeight}" summary = (
f"{forecast.swell_minBreakingHeight} -"
f" {forecast.swell_maxBreakingHeight}"
)
self._attr_native_value = summary self._attr_native_value = summary
if self.hour is None: if self.hour is None:
for hour, data in self.data.hourly.items(): for hour, data in self.data.hourly.items():
occurs = hour occurs = hour
hr_summary = f"{data.swell_minBreakingHeight} - {data.swell_maxBreakingHeight} {data.swell_unit}" hr_summary = (
f"{data.swell_minBreakingHeight} -"
f" {data.swell_maxBreakingHeight} {data.swell_unit}"
)
self._attr_extra_state_attributes[occurs] = hr_summary self._attr_extra_state_attributes[occurs] = hr_summary
if sensor_type != "swell_forecast": if sensor_type != "swell_forecast":

View file

@ -7,7 +7,9 @@ config_entry_flow.register_webhook_flow(
DOMAIN, DOMAIN,
"Mailgun Webhook", "Mailgun Webhook",
{ {
"mailgun_url": "https://documentation.mailgun.com/en/latest/user_manual.html#webhooks", "mailgun_url": (
"https://documentation.mailgun.com/en/latest/user_manual.html#webhooks"
),
"docs_url": "https://www.home-assistant.io/integrations/mailgun/", "docs_url": "https://www.home-assistant.io/integrations/mailgun/",
}, },
) )

View file

@ -69,7 +69,10 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool:
_LOGGER.error("Unable to connect to Max!Cube gateway: %s", str(ex)) _LOGGER.error("Unable to connect to Max!Cube gateway: %s", str(ex))
persistent_notification.create( persistent_notification.create(
hass, hass,
f"Error: {ex}<br />You will need to restart Home Assistant after fixing.", (
f"Error: {ex}<br />You will need to restart Home Assistant after"
" fixing."
),
title=NOTIFICATION_TITLE, title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID, notification_id=NOTIFICATION_ID,
) )

View file

@ -393,12 +393,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
elif value[ATTR_MEDIA_ENQUEUE] is True: elif value[ATTR_MEDIA_ENQUEUE] is True:
value[ATTR_MEDIA_ENQUEUE] = MediaPlayerEnqueue.ADD value[ATTR_MEDIA_ENQUEUE] = MediaPlayerEnqueue.ADD
_LOGGER.warning( _LOGGER.warning(
"Playing media with enqueue set to True is deprecated. Use 'add' instead" "Playing media with enqueue set to True is deprecated. Use 'add'"
" instead"
) )
elif value[ATTR_MEDIA_ENQUEUE] is False: elif value[ATTR_MEDIA_ENQUEUE] is False:
value[ATTR_MEDIA_ENQUEUE] = MediaPlayerEnqueue.PLAY value[ATTR_MEDIA_ENQUEUE] = MediaPlayerEnqueue.PLAY
_LOGGER.warning( _LOGGER.warning(
"Playing media with enqueue set to False is deprecated. Use 'play' instead" "Playing media with enqueue set to False is deprecated. Use 'play'"
" instead"
) )
return value return value

View file

@ -240,7 +240,8 @@ class AtaDeviceClimate(MelCloudClimate):
"""Set horizontal vane position.""" """Set horizontal vane position."""
if position not in self._device.vane_horizontal_positions: if position not in self._device.vane_horizontal_positions:
raise ValueError( raise ValueError(
f"Invalid horizontal vane position {position}. Valid positions: [{self._device.vane_horizontal_positions}]." f"Invalid horizontal vane position {position}. Valid positions:"
f" [{self._device.vane_horizontal_positions}]."
) )
await self._device.set({ata.PROPERTY_VANE_HORIZONTAL: position}) await self._device.set({ata.PROPERTY_VANE_HORIZONTAL: position})
@ -248,7 +249,8 @@ class AtaDeviceClimate(MelCloudClimate):
"""Set vertical vane position.""" """Set vertical vane position."""
if position not in self._device.vane_vertical_positions: if position not in self._device.vane_vertical_positions:
raise ValueError( raise ValueError(
f"Invalid vertical vane position {position}. Valid positions: [{self._device.vane_vertical_positions}]." f"Invalid vertical vane position {position}. Valid positions:"
f" [{self._device.vane_vertical_positions}]."
) )
await self._device.set({ata.PROPERTY_VANE_VERTICAL: position}) await self._device.set({ata.PROPERTY_VANE_VERTICAL: position})

View file

@ -36,8 +36,7 @@ from . import MetDataUpdateCoordinator
from .const import ATTR_MAP, CONDITIONS_MAP, CONF_TRACK_HOME, DOMAIN, FORECAST_MAP from .const import ATTR_MAP, CONDITIONS_MAP, CONF_TRACK_HOME, DOMAIN, FORECAST_MAP
ATTRIBUTION = ( ATTRIBUTION = (
"Weather forecast from met.no, delivered by the Norwegian " "Weather forecast from met.no, delivered by the Norwegian Meteorological Institute."
"Meteorological Institute."
) )
DEFAULT_NAME = "Met.no" DEFAULT_NAME = "Met.no"

View file

@ -139,13 +139,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data[DOMAIN][department] = True hass.data[DOMAIN][department] = True
else: else:
_LOGGER.warning( _LOGGER.warning(
"Weather alert for department %s won't be added with city %s, as it has already been added within another city", (
"Weather alert for department %s won't be added with city %s, as it"
" has already been added within another city"
),
department, department,
entry.title, entry.title,
) )
else: else:
_LOGGER.warning( _LOGGER.warning(
"Weather alert not available: The city %s is not in metropolitan France or Andorre", (
"Weather alert not available: The city %s is not in metropolitan France"
" or Andorre"
),
entry.title, entry.title,
) )
@ -172,7 +178,10 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
].data.position.get("dept") ].data.position.get("dept")
hass.data[DOMAIN][department] = False hass.data[DOMAIN][department] = False
_LOGGER.debug( _LOGGER.debug(
"Weather alert for depatment %s unloaded and released. It can be added now by another city", (
"Weather alert for depatment %s unloaded and released. It can be added"
" now by another city"
),
department, department,
) )

View file

@ -143,7 +143,10 @@ class MinecraftServer:
self.online = True self.online = True
except OSError as error: except OSError as error:
_LOGGER.debug( _LOGGER.debug(
"Error occurred while trying to check the connection to '%s:%s' - OSError: %s", (
"Error occurred while trying to check the connection to '%s:%s' -"
" OSError: %s"
),
self.host, self.host,
self.port, self.port,
error, error,

View file

@ -268,8 +268,10 @@ async def webhook_call_service(
) )
except (vol.Invalid, ServiceNotFound, Exception) as ex: except (vol.Invalid, ServiceNotFound, Exception) as ex:
_LOGGER.error( _LOGGER.error(
"Error when calling service during mobile_app " (
"webhook (device name: %s): %s", "Error when calling service during mobile_app "
"webhook (device name: %s): %s"
),
config_entry.data[ATTR_DEVICE_NAME], config_entry.data[ATTR_DEVICE_NAME],
ex, ex,
) )

View file

@ -322,7 +322,10 @@ class BaseSwitch(BasePlatform, ToggleEntity, RestoreEntity):
self._attr_is_on = False self._attr_is_on = False
elif value is not None: elif value is not None:
_LOGGER.error( _LOGGER.error(
"Unexpected response from modbus device slave %s register %s, got 0x%2x", (
"Unexpected response from modbus device slave %s register %s,"
" got 0x%2x"
),
self._slave, self._slave,
self._verify_address, self._verify_address,
value, value,

View file

@ -111,7 +111,7 @@ def struct_validator(config: dict[str, Any]) -> dict[str, Any]:
if count < regs_needed or (count % regs_needed) != 0: if count < regs_needed or (count % regs_needed) != 0:
raise vol.Invalid( raise vol.Invalid(
f"Error in sensor {name} swap({swap_type}) " f"Error in sensor {name} swap({swap_type}) "
f"not possible due to the registers " "not possible due to the registers "
f"count: {count}, needed: {regs_needed}" f"count: {count}, needed: {regs_needed}"
) )
@ -153,8 +153,10 @@ def scan_interval_validator(config: dict) -> dict:
continue continue
if scan_interval < 5: if scan_interval < 5:
_LOGGER.warning( _LOGGER.warning(
"%s %s scan_interval(%d) is lower than 5 seconds, " (
"which may cause Home Assistant stability issues", "%s %s scan_interval(%d) is lower than 5 seconds, "
"which may cause Home Assistant stability issues"
),
component, component,
entry.get(CONF_NAME), entry.get(CONF_NAME),
scan_interval, scan_interval,
@ -198,11 +200,17 @@ def duplicate_entity_validator(config: dict) -> dict:
addr += "_" + str(entry[CONF_COMMAND_OFF]) addr += "_" + str(entry[CONF_COMMAND_OFF])
addr += "_" + str(entry.get(CONF_SLAVE, 0)) addr += "_" + str(entry.get(CONF_SLAVE, 0))
if addr in addresses: if addr in addresses:
err = f"Modbus {component}/{name} address {addr} is duplicate, second entry not loaded!" err = (
f"Modbus {component}/{name} address {addr} is duplicate, second"
" entry not loaded!"
)
_LOGGER.warning(err) _LOGGER.warning(err)
errors.append(index) errors.append(index)
elif name in names: elif name in names:
err = f"Modbus {component}/{name}  is duplicate, second entry not loaded!" err = (
f"Modbus {component}/{name}  is duplicate, second entry not"
" loaded!"
)
_LOGGER.warning(err) _LOGGER.warning(err)
errors.append(index) errors.append(index)
else: else:

View file

@ -145,5 +145,8 @@ class ModernFormsDeviceEntity(CoordinatorEntity[ModernFormsDataUpdateCoordinator
name=self.coordinator.data.info.device_name, name=self.coordinator.data.info.device_name,
manufacturer="Modern Forms", manufacturer="Modern Forms",
model=self.coordinator.data.info.fan_type, model=self.coordinator.data.info.fan_type,
sw_version=f"{self.coordinator.data.info.firmware_version} / {self.coordinator.data.info.main_mcu_firmware_version}", sw_version=(
f"{self.coordinator.data.info.firmware_version} /"
f" {self.coordinator.data.info.main_mcu_firmware_version}"
),
) )

View file

@ -46,7 +46,10 @@ class Alpha2HeatControlValveOpeningSensor(
self._attr_unique_id = f"{heat_control_id}:valve_opening" self._attr_unique_id = f"{heat_control_id}:valve_opening"
heat_control = self.coordinator.data["heat_controls"][heat_control_id] heat_control = self.coordinator.data["heat_controls"][heat_control_id]
heat_area = self.coordinator.data["heat_areas"][heat_control["_HEATAREA_ID"]] heat_area = self.coordinator.data["heat_areas"][heat_control["_HEATAREA_ID"]]
self._attr_name = f"{heat_area['HEATAREA_NAME']} heat control {heat_control['NR']} valve opening" self._attr_name = (
f"{heat_area['HEATAREA_NAME']} heat control {heat_control['NR']} valve"
" opening"
)
@property @property
def native_value(self) -> int: def native_value(self) -> int:

View file

@ -135,8 +135,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
data = {**entry.data, CONF_INTERFACE: working_interface} data = {**entry.data, CONF_INTERFACE: working_interface}
hass.config_entries.async_update_entry(entry, data=data) hass.config_entries.async_update_entry(entry, data=data)
_LOGGER.debug( _LOGGER.debug(
"Motion Blinds interface updated from %s to %s, " (
"this should only occur after a network change", "Motion Blinds interface updated from %s to %s, "
"this should only occur after a network change"
),
multicast_interface, multicast_interface,
working_interface, working_interface,
) )

View file

@ -133,7 +133,10 @@ class ConnectMotionGateway:
return interface return interface
_LOGGER.error( _LOGGER.error(
"Could not find working interface for Motion Blinds host %s, using interface '%s'", (
"Could not find working interface for Motion Blinds host %s, using"
" interface '%s'"
),
host, host,
self._interface, self._interface,
) )

View file

@ -89,8 +89,8 @@ SERVICE_SET_TEXT_OVERLAY: Final = "set_text_overlay"
SERVICE_ACTION: Final = "action" SERVICE_ACTION: Final = "action"
SERVICE_SNAPSHOT: Final = "snapshot" SERVICE_SNAPSHOT: Final = "snapshot"
SIGNAL_CAMERA_ADD: Final = f"{DOMAIN}_camera_add_signal." "{}" SIGNAL_CAMERA_ADD: Final = f"{DOMAIN}_camera_add_signal.{{}}"
SIGNAL_CAMERA_REMOVE: Final = f"{DOMAIN}_camera_remove_signal." "{}" SIGNAL_CAMERA_REMOVE: Final = f"{DOMAIN}_camera_remove_signal.{{}}"
TYPE_MOTIONEYE_ACTION_SENSOR = f"{DOMAIN}_action_sensor" TYPE_MOTIONEYE_ACTION_SENSOR = f"{DOMAIN}_action_sensor"
TYPE_MOTIONEYE_MJPEG_CAMERA: Final = "motioneye_mjpeg_camera" TYPE_MOTIONEYE_MJPEG_CAMERA: Final = "motioneye_mjpeg_camera"

View file

@ -240,8 +240,10 @@ def _filter_entry_config(hass: HomeAssistant, entry: ConfigEntry) -> None:
} }
if entry.data.keys() != filtered_data.keys(): if entry.data.keys() != filtered_data.keys():
_LOGGER.warning( _LOGGER.warning(
"The following unsupported configuration options were removed from the " (
"MQTT config entry: %s", "The following unsupported configuration options were removed from the "
"MQTT config entry: %s"
),
entry.data.keys() - filtered_data.keys(), entry.data.keys() - filtered_data.keys(),
) )
hass.config_entries.async_update_entry(entry, data=filtered_data) hass.config_entries.async_update_entry(entry, data=filtered_data)
@ -329,8 +331,10 @@ async def async_fetch_config(
override[CONF_CLIENT_KEY] = "-----PRIVATE KEY-----" override[CONF_CLIENT_KEY] = "-----PRIVATE KEY-----"
if override: if override:
_LOGGER.warning( _LOGGER.warning(
"Deprecated configuration settings found in configuration.yaml. " (
"These settings from your configuration entry will override: %s", "Deprecated configuration settings found in configuration.yaml. "
"These settings from your configuration entry will override: %s"
),
override, override,
) )
# Register a repair issue # Register a repair issue
@ -389,16 +393,20 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
msg_topic = valid_publish_topic(rendered_topic) msg_topic = valid_publish_topic(rendered_topic)
except (jinja2.TemplateError, TemplateError) as exc: except (jinja2.TemplateError, TemplateError) as exc:
_LOGGER.error( _LOGGER.error(
"Unable to publish: rendering topic template of %s " (
"failed because %s", "Unable to publish: rendering topic template of %s "
"failed because %s"
),
msg_topic_template, msg_topic_template,
exc, exc,
) )
return return
except vol.Invalid as err: except vol.Invalid as err:
_LOGGER.error( _LOGGER.error(
"Unable to publish: topic template '%s' produced an " (
"invalid topic '%s' after rendering (%s)", "Unable to publish: topic template '%s' produced an "
"invalid topic '%s' after rendering (%s)"
),
msg_topic_template, msg_topic_template,
rendered_topic, rendered_topic,
err, err,
@ -412,8 +420,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
).async_render() ).async_render()
except (jinja2.TemplateError, TemplateError) as exc: except (jinja2.TemplateError, TemplateError) as exc:
_LOGGER.error( _LOGGER.error(
"Unable to publish to %s: rendering payload template of " (
"%s failed because %s", "Unable to publish to %s: rendering payload template of "
"%s failed because %s"
),
msg_topic, msg_topic,
payload_template, payload_template,
exc, exc,

View file

@ -145,7 +145,10 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
self.hass, self._value_is_expired, expiration_at self.hass, self._value_is_expired, expiration_at
) )
_LOGGER.debug( _LOGGER.debug(
"State recovered after reload for %s, remaining time before expiring %s", (
"State recovered after reload for %s, remaining time before"
" expiring %s"
),
self.entity_id, self.entity_id,
expiration_at - time_now, expiration_at - time_now,
) )
@ -215,7 +218,10 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
payload = self._value_template(msg.payload) payload = self._value_template(msg.payload)
if not payload.strip(): # No output from template, ignore if not payload.strip(): # No output from template, ignore
_LOGGER.debug( _LOGGER.debug(
"Empty template output for entity: %s with state topic: %s. Payload: '%s', with value template '%s'", (
"Empty template output for entity: %s with state topic: %s."
" Payload: '%s', with value template '%s'"
),
self._config[CONF_NAME], self._config[CONF_NAME],
self._config[CONF_STATE_TOPIC], self._config[CONF_STATE_TOPIC],
msg.payload, msg.payload,
@ -232,9 +238,15 @@ class MqttBinarySensor(MqttEntity, BinarySensorEntity, RestoreEntity):
else: # Payload is not for this entity else: # Payload is not for this entity
template_info = "" template_info = ""
if self._config.get(CONF_VALUE_TEMPLATE) is not None: if self._config.get(CONF_VALUE_TEMPLATE) is not None:
template_info = f", template output: '{str(payload)}', with value template '{str(self._config.get(CONF_VALUE_TEMPLATE))}'" template_info = (
f", template output: '{str(payload)}', with value template"
f" '{str(self._config.get(CONF_VALUE_TEMPLATE))}'"
)
_LOGGER.info( _LOGGER.info(
"No matching payload found for entity: %s with state topic: %s. Payload: '%s'%s", (
"No matching payload found for entity: %s with state topic: %s."
" Payload: '%s'%s"
),
self._config[CONF_NAME], self._config[CONF_NAME],
self._config[CONF_STATE_TOPIC], self._config[CONF_STATE_TOPIC],
msg.payload, msg.payload,