Collection of code styling tweaks (#87381)

This commit is contained in:
Franck Nijhof 2023-02-04 18:52:26 +01:00 committed by GitHub
parent e55f11296e
commit 3d557b5583
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
52 changed files with 274 additions and 156 deletions

View file

@ -129,8 +129,8 @@ async def async_setup_entry(
hass, calendar_item.dict(exclude_unset=True)
)
new_calendars.append(calendar_info)
# Yaml calendar config may map one calendar to multiple entities with extra options like
# offsets or search criteria.
# Yaml calendar config may map one calendar to multiple entities
# with extra options like offsets or search criteria.
num_entities = len(calendar_info[CONF_ENTITIES])
for data in calendar_info[CONF_ENTITIES]:
entity_enabled = data.get(CONF_TRACK, True)
@ -141,15 +141,17 @@ async def async_setup_entry(
" removed from google_calendars.yaml"
)
entity_name = data[CONF_DEVICE_ID]
# The unique id is based on the config entry and calendar id since multiple accounts
# can have a common calendar id (e.g. `en.usa#holiday@group.v.calendar.google.com`).
# When using google_calendars.yaml with multiple entities for a single calendar, we
# have no way to set a unique id.
# The unique id is based on the config entry and calendar id since
# multiple accounts can have a common calendar id
# (e.g. `en.usa#holiday@group.v.calendar.google.com`).
# When using google_calendars.yaml with multiple entities for a
# single calendar, we have no way to set a unique id.
if num_entities > 1:
unique_id = None
else:
unique_id = f"{config_entry.unique_id}-{calendar_id}"
# Migrate to new unique_id format which supports multiple config entries as of 2022.7
# Migrate to new unique_id format which supports
# multiple config entries as of 2022.7
for old_unique_id in (calendar_id, f"{calendar_id}-{entity_name}"):
if not (entity_entry := entity_entry_map.get(old_unique_id)):
continue
@ -173,9 +175,9 @@ async def async_setup_entry(
entity_entry.entity_id,
)
coordinator: CalendarSyncUpdateCoordinator | CalendarQueryUpdateCoordinator
# Prefer calendar sync down of resources when possible. However, sync does not work
# for search. Also free-busy calendars denormalize recurring events as individual
# events which is not efficient for sync
# Prefer calendar sync down of resources when possible. However,
# sync does not work for search. Also free-busy calendars denormalize
# recurring events as individual events which is not efficient for sync
support_write = (
calendar_item.access_role.is_writer
and get_feature_access(hass, config_entry) is FeatureAccess.read_write

View file

@ -134,7 +134,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
DATA_GROUP_MANAGER: group_manager,
DATA_SOURCE_MANAGER: source_manager,
Platform.MEDIA_PLAYER: players,
# Maps player_id to entity_id. Populated by the individual HeosMediaPlayer entities.
# Maps player_id to entity_id. Populated by the individual
# HeosMediaPlayer entities.
DATA_ENTITY_ID_MAP: {},
}
@ -251,11 +252,11 @@ class GroupManager:
self.controller = controller
def _get_entity_id_to_player_id_map(self) -> dict:
"""Return a dictionary which maps all HeosMediaPlayer entity_ids to player_ids."""
"""Return mapping of all HeosMediaPlayer entity_ids to player_ids."""
return {v: k for k, v in self._hass.data[DOMAIN][DATA_ENTITY_ID_MAP].items()}
async def async_get_group_membership(self):
"""Return a dictionary which contains all group members for each player as entity_ids."""
"""Return all group members for each player as entity_ids."""
group_info_by_entity_id = {
player_entity_id: []
for player_entity_id in self._get_entity_id_to_player_id_map()
@ -287,7 +288,7 @@ class GroupManager:
async def async_join_players(
self, leader_entity_id: str, member_entity_ids: list[str]
) -> None:
"""Create a group with `leader_entity_id` as group leader and `member_entity_ids` as member players."""
"""Create a group a group leader and member players."""
entity_id_to_player_id_map = self._get_entity_id_to_player_id_map()
leader_id = entity_id_to_player_id_map.get(leader_entity_id)
if not leader_id:

View file

@ -93,7 +93,8 @@ def find_existing_host(
def ensure_pin_format(pin: str, allow_insecure_setup_codes: Any = None) -> str:
"""Ensure a pin code is correctly formatted.
Ensures a pin code is in the format 111-11-111. Handles codes with and without dashes.
Ensures a pin code is in the format 111-11-111.
Handles codes with and without dashes.
If incorrect code is entered, an exception is raised.
"""
@ -284,7 +285,8 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
if self.controller is None:
await self._async_setup_controller()
# mypy can't see that self._async_setup_controller() always sets self.controller or throws
# mypy can't see that self._async_setup_controller() always
# sets self.controller or throws
assert self.controller
pairing = self.controller.load_pairing(
@ -344,7 +346,8 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
if model in HOMEKIT_IGNORE:
return self.async_abort(reason="ignored_model")
# If this is a HomeKit bridge/accessory exported by *this* HA instance ignore it.
# If this is a HomeKit bridge/accessory exported
# by *this* HA instance ignore it.
if await self._hkid_is_homekit(hkid):
return self.async_abort(reason="ignored_model")
@ -366,12 +369,11 @@ class HomekitControllerFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="ignored_model")
# Late imports in case BLE is not available
from aiohomekit.controller.ble.discovery import ( # pylint: disable=import-outside-toplevel
BleDiscovery,
)
from aiohomekit.controller.ble.manufacturer_data import ( # pylint: disable=import-outside-toplevel
HomeKitAdvertisement,
)
# pylint: disable-next=import-outside-toplevel
from aiohomekit.controller.ble.discovery import BleDiscovery
# pylint: disable-next=import-outside-toplevel
from aiohomekit.controller.ble.manufacturer_data import HomeKitAdvertisement
await self.async_set_unique_id(discovery_info.address)
self._abort_if_unique_id_configured()

View file

@ -817,8 +817,8 @@ class PowerViewShadeDualOverlappedCombined(PowerViewShadeDualOverlappedBase):
@callback
def _get_shade_move(self, target_hass_position: int) -> PowerviewShadeMove:
position_shade = hass_position_to_hd(target_hass_position, MAX_POSITION)
# note we set POS_KIND_VANE: MIN_POSITION here even with shades without tilt so no additional
# override is required for differences between type 8/9/10
# note we set POS_KIND_VANE: MIN_POSITION here even with shades without
# tilt so no additional override is required for differences between type 8/9/10
# this just stores the value in the coordinator for future reference
if target_hass_position <= 50:
target_hass_position = target_hass_position * 2
@ -846,11 +846,14 @@ class PowerViewShadeDualOverlappedFront(PowerViewShadeDualOverlappedBase):
This equates to two shades being controlled by one motor.
The front shade must be completely down before the rear shade will move.
Sibling Class: PowerViewShadeDualOverlappedCombined, PowerViewShadeDualOverlappedRear
API Class: ShadeDualOverlapped + ShadeDualOverlappedTilt90 + ShadeDualOverlappedTilt180
Sibling Class:
PowerViewShadeDualOverlappedCombined, PowerViewShadeDualOverlappedRear
API Class:
ShadeDualOverlapped + ShadeDualOverlappedTilt90 + ShadeDualOverlappedTilt180
Type 8 - Duolite (front and rear shades)
Type 9 - Duolite with 90° Tilt (front bottom up shade that also tilts plus a rear opaque (non-tilting) shade)
Type 9 - Duolite with 90° Tilt (front bottom up shade that also tilts
plus a rear opaque (non-tilting) shade)
Type 10 - Duolite with 180° Tilt
"""
@ -907,11 +910,14 @@ class PowerViewShadeDualOverlappedRear(PowerViewShadeDualOverlappedBase):
This equates to two shades being controlled by one motor.
The front shade must be completely down before the rear shade will move.
Sibling Class: PowerViewShadeDualOverlappedCombined, PowerViewShadeDualOverlappedFront
API Class: ShadeDualOverlapped + ShadeDualOverlappedTilt90 + ShadeDualOverlappedTilt180
Sibling Class:
PowerViewShadeDualOverlappedCombined, PowerViewShadeDualOverlappedFront
API Class:
ShadeDualOverlapped + ShadeDualOverlappedTilt90 + ShadeDualOverlappedTilt180
Type 8 - Duolite (front and rear shades)
Type 9 - Duolite with 90° Tilt (front bottom up shade that also tilts plus a rear opaque (non-tilting) shade)
Type 9 - Duolite with 90° Tilt (front bottom up shade that also tilts plus
a rear opaque (non-tilting) shade)
Type 10 - Duolite with 180° Tilt
"""

View file

@ -41,7 +41,8 @@ OPTIONS = "options"
async def async_get_device_config(hass, config_entry):
"""Initiate the connection and services."""
# Make a copy of addresses due to edge case where the list of devices could change during status update
# Make a copy of addresses due to edge case where the list of devices could
# change during status update
# Cannot be done concurrently due to issues with the underlying protocol.
for address in list(devices):
if devices[address].is_battery:

View file

@ -72,7 +72,12 @@ async def async_setup_entry(
child_nodes: list[
tuple[Node, BinarySensorDeviceClass | None, str | None, DeviceInfo | None]
] = []
entity: ISYInsteonBinarySensorEntity | ISYBinarySensorEntity | ISYBinarySensorHeartbeat | ISYBinarySensorProgramEntity
entity: (
ISYInsteonBinarySensorEntity
| ISYBinarySensorEntity
| ISYBinarySensorHeartbeat
| ISYBinarySensorProgramEntity
)
isy_data = hass.data[DOMAIN][entry.entry_id]
devices: dict[str, DeviceInfo] = isy_data.devices

View file

@ -51,8 +51,8 @@ async def async_setup_entry(
entities.append(ISYSwitchProgramEntity(name, status, actions))
for node, control in isy_data.aux_properties[Platform.SWITCH]:
# Currently only used for enable switches, will need to be updated for NS support
# by making sure control == TAG_ENABLED
# Currently only used for enable switches, will need to be updated for
# NS support by making sure control == TAG_ENABLED
description = SwitchEntityDescription(
key=control,
device_class=SwitchDeviceClass.SWITCH,

View file

@ -273,8 +273,9 @@ class ControllerDevice(ClimateEntity):
),
"control_zone": self._controller.zone_ctrl,
"control_zone_name": self.control_zone_name,
# Feature ClimateEntityFeature.TARGET_TEMPERATURE controls both displaying target temp & setting it
# As the feature is turned off for zone control, report target temp as extra state attribute
# Feature ClimateEntityFeature.TARGET_TEMPERATURE controls both displaying
# target temp & setting it as the feature is turned off for zone control,
# report target temp as extra state attribute
"control_zone_setpoint": show_temp(
self.hass,
self.control_zone_setpoint,

View file

@ -224,7 +224,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Load a config entry."""
# `config` is None when reloading the integration or no `knx` key in configuration.yaml
# `config` is None when reloading the integration
# or no `knx` key in configuration.yaml
if (config := hass.data.get(DATA_KNX_CONFIG)) is None:
_conf = await async_integration_yaml_config(hass, DOMAIN)
if not _conf or DOMAIN not in _conf:
@ -526,7 +527,10 @@ class KNXModule:
transcoder := DPTBase.parse_transcoder(dpt)
):
self._address_filter_transcoder.update(
{_filter: transcoder for _filter in _filters} # type: ignore[type-abstract]
{
_filter: transcoder # type: ignore[type-abstract]
for _filter in _filters
}
)
return self.xknx.telegram_queue.register_telegram_received_cb(
@ -558,7 +562,10 @@ class KNXModule:
transcoder := DPTBase.parse_transcoder(dpt)
):
self._group_address_transcoder.update(
{_address: transcoder for _address in group_addresses} # type: ignore[type-abstract]
{
_address: transcoder # type: ignore[type-abstract]
for _address in group_addresses
}
)
for group_address in group_addresses:
if group_address in self._knx_event_callback.group_addresses:

View file

@ -9,8 +9,8 @@ def get_tradable_asset_pairs(kraken_api: KrakenAPI) -> dict[str, str]:
tradable_asset_pairs = {}
asset_pairs_df = kraken_api.get_tradable_asset_pairs()
for pair in zip(asset_pairs_df.index.values, asset_pairs_df["wsname"]):
if not pair[0].endswith(
".d"
): # Remove darkpools https://support.kraken.com/hc/en-us/articles/360001391906-Introducing-the-Kraken-Dark-Pool
# Remove darkpools
# https://support.kraken.com/hc/en-us/articles/360001391906-Introducing-the-Kraken-Dark-Pool
if not pair[0].endswith(".d"):
tradable_asset_pairs[pair[1]] = pair[0]
return tradable_asset_pairs

View file

@ -122,7 +122,8 @@ async def get_usb_ports(hass: HomeAssistant) -> dict[str, str]:
ports = await hass.async_add_executor_job(list_ports.comports)
port_descriptions = {}
for port in ports:
# this prevents an issue with usb_device_from_port not working for ports without vid on RPi
# this prevents an issue with usb_device_from_port
# not working for ports without vid on RPi
if port.vid:
usb_device = usb.usb_device_from_port(port)
dev_path = usb.get_serial_by_id(usb_device.device)

View file

@ -537,7 +537,12 @@ class LutronCasetaDevice(Entity):
# here. Since it would be a breaking change to change the identifier
# we are ignoring the type error here until it can be migrated to
# a string in a future release.
identifiers={(DOMAIN, self._handle_none_serial(self.serial))}, # type: ignore[arg-type]
identifiers={
(
DOMAIN,
self._handle_none_serial(self.serial), # type: ignore[arg-type]
)
},
manufacturer=MANUFACTURER,
model=f"{device['model']} ({device['type']})",
name=full_name,

View file

@ -52,7 +52,8 @@ async def async_setup_entry(
.title()
)
# Append the child device name to the end of the parent keypad name to create the entity name
# Append the child device name to the end of the parent keypad
# name to create the entity name
full_name = f'{parent_device_info.get("name")} {device_name}'
# Set the device_info to the same as the Parent Keypad
# The entities will be nested inside the keypad device

View file

@ -314,10 +314,11 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity):
_LOGGER.debug("HVAC mode: %s", hvac_mode)
try:
if LYRIC_HVAC_MODES[hvac_mode] == LYRIC_HVAC_MODE_HEAT_COOL:
# If the system is off, turn it to Heat first then to Auto, otherwise it turns to
# Auto briefly and then reverts to Off (perhaps related to heatCoolMode). This is the
# behavior that happens with the native app as well, so likely a bug in the api itself
# If the system is off, turn it to Heat first then to Auto,
# otherwise it turns to.
# Auto briefly and then reverts to Off (perhaps related to
# heatCoolMode). This is the behavior that happens with the
# native app as well, so likely a bug in the api itself
if HVAC_MODES[self.device.changeableValues.mode] == HVACMode.OFF:
_LOGGER.debug(
"HVAC mode passed to lyric: %s",

View file

@ -64,7 +64,8 @@ async def handle_refresh_vehicle_status(
class MazdaButtonEntityDescription(ButtonEntityDescription):
"""Describes a Mazda button entity."""
# Function to determine whether the vehicle supports this button, given the coordinator data
# Function to determine whether the vehicle supports this button,
# given the coordinator data
is_supported: Callable[[dict[str, Any]], bool] = lambda data: True
async_press: Callable[

View file

@ -25,7 +25,8 @@ from .const import DATA_CLIENT, DATA_COORDINATOR, DOMAIN
class MazdaSensorRequiredKeysMixin:
"""Mixin for required keys."""
# Function to determine the value for this sensor, given the coordinator data and the configured unit system
# Function to determine the value for this sensor, given the coordinator data
# and the configured unit system
value: Callable[[dict[str, Any]], StateType]
@ -35,7 +36,8 @@ class MazdaSensorEntityDescription(
):
"""Describes a Mazda sensor entity."""
# Function to determine whether the vehicle supports this sensor, given the coordinator data
# Function to determine whether the vehicle supports this sensor,
# given the coordinator data
is_supported: Callable[[dict[str, Any]], bool] = lambda data: True

View file

@ -144,10 +144,14 @@ def setup_mysensors_platform(
) -> list[MySensorsDevice] | None:
"""Set up a MySensors platform.
Sets up a bunch of instances of a single platform that is supported by this integration.
The function is given a list of device ids, each one describing an instance to set up.
The function is also given a class.
A new instance of the class is created for every device id, and the device id is given to the constructor of the class
Sets up a bunch of instances of a single platform that is supported by this
integration.
The function is given a list of device ids, each one describing an instance
to set up. The function is also given a class.
A new instance of the class is created for every device id, and the device
id is given to the constructor of the class.
"""
if device_args is None:
device_args = ()

View file

@ -56,12 +56,15 @@ GatewayId = str
# a unique id generated by config_flow.py and stored in the ConfigEntry as the entry id.
DevId = tuple[GatewayId, int, int, int]
# describes the backend of a hass entity. Contents are: GatewayId, node_id, child_id, v_type as int
# describes the backend of a hass entity.
# Contents are: GatewayId, node_id, child_id, v_type as int
#
# The string version of v_type can be looked up in the enum gateway.const.SetReq of the appropriate BaseAsyncGateway
# The string version of v_type can be looked up in the enum gateway.const.SetReq
# of the appropriate BaseAsyncGateway
# Home Assistant Entities are quite limited and only ever do one thing.
# MySensors Nodes have multiple child_ids each with a s_type several associated v_types
# The MySensors integration brings these together by creating an entity for every v_type of every child_id of every node.
# The MySensors integration brings these together by creating an entity for every v_type
# of every child_id of every node.
# The DevId tuple perfectly captures this.
BINARY_SENSOR_TYPES: dict[SensorType, set[ValueType]] = {

View file

@ -53,7 +53,8 @@ class MySensorsDevice(ABC):
self.gateway: BaseAsyncGateway = gateway
self.node_id: int = node_id
self.child_id: int = child_id
self.value_type: int = value_type # value_type as int. string variant can be looked up in gateway consts
# value_type as int. string variant can be looked up in gateway consts
self.value_type: int = value_type
self.child_type = self._child.type
self._values: dict[int, Any] = {}
self._debouncer: Debouncer | None = None

View file

@ -28,7 +28,7 @@ class ConfigEntryAuth(pybotvac.OAuthSession): # type: ignore[misc]
super().__init__(self.session.token, vendor=pybotvac.Neato())
def refresh_tokens(self) -> str:
"""Refresh and return new Neato Botvac tokens using Home Assistant OAuth2 session."""
"""Refresh and return new Neato Botvac tokens."""
run_coroutine_threadsafe(
self.session.async_ensure_token_valid(), self.hass.loop
).result()
@ -39,7 +39,8 @@ class ConfigEntryAuth(pybotvac.OAuthSession): # type: ignore[misc]
class NeatoImplementation(AuthImplementation):
"""Neato implementation of LocalOAuth2Implementation.
We need this class because we have to add client_secret and scope to the authorization request.
We need this class because we have to add client_secret
and scope to the authorization request.
"""
@property

View file

@ -229,7 +229,8 @@ class NSDepartureSensor(SensorEntity):
self._trips = None
return
# Set the search parameter to search from a specific trip time or to just search for next trip.
# Set the search parameter to search from a specific trip time
# or to just search for next trip.
if self._time:
trip_time = (
datetime.today()

View file

@ -115,10 +115,11 @@ class NestCamera(Camera):
@property
def available(self) -> bool:
"""Return True if entity is available."""
# Cameras are marked unavailable on stream errors in #54659 however nest streams have
# a high error rate (#60353). Given nest streams are so flaky, marking the stream
# unavailable has other side effects like not showing the camera image which sometimes
# are still able to work. Until the streams are fixed, just leave the streams as available.
# Cameras are marked unavailable on stream errors in #54659 however nest
# streams have a high error rate (#60353). Given nest streams are so flaky,
# marking the stream unavailable has other side effects like not showing
# the camera image which sometimes are still able to work. Until the
# streams are fixed, just leave the streams as available.
return True
async def stream_source(self) -> str | None:

View file

@ -363,8 +363,8 @@ class NestFlowHandler(
) -> FlowResult:
"""Verify any last pre-requisites before sending user through OAuth flow."""
if user_input is None and self._upgrade:
# During app auth upgrade we need the user to update their device access project
# before we redirect to the authentication flow.
# During app auth upgrade we need the user to update their device
# access project before we redirect to the authentication flow.
return await self.async_step_device_project_upgrade()
return await super().async_step_auth(user_input)

View file

@ -357,10 +357,11 @@ class OnkyoDevice(MediaPlayerEntity):
def set_volume_level(self, volume: float) -> None:
"""Set volume level, input is range 0..1.
However full volume on the amp is usually far too loud so allow the user to specify the upper range
with CONF_MAX_VOLUME. we change as per max_volume set by user. This means that if max volume is 80 then full
volume in HA will give 80% volume on the receiver. Then we convert
that to the correct scale for the receiver.
However full volume on the amp is usually far too loud so allow the user to
specify the upper range with CONF_MAX_VOLUME. We change as per max_volume
set by user. This means that if max volume is 80 then full volume in HA will
give 80% volume on the receiver. Then we convert that to the correct scale
for the receiver.
"""
# HA_VOL * (MAX VOL / 100) * MAX_RECEIVER_VOL
self.command(
@ -524,10 +525,11 @@ class OnkyoDeviceZone(OnkyoDevice):
def set_volume_level(self, volume: float) -> None:
"""Set volume level, input is range 0..1.
However full volume on the amp is usually far too loud so allow the user to specify the upper range
with CONF_MAX_VOLUME. we change as per max_volume set by user. This means that if max volume is 80 then full
volume in HA will give 80% volume on the receiver. Then we convert
that to the correct scale for the receiver.
However full volume on the amp is usually far too loud so allow the user to
specify the upper range with CONF_MAX_VOLUME. We change as per max_volume
set by user. This means that if max volume is 80 then full volume in HA
will give 80% volume on the receiver. Then we convert that to the correct
scale for the receiver.
"""
# HA_VOL * (MAX VOL / 100) * MAX_RECEIVER_VOL
self.command(

View file

@ -53,11 +53,12 @@ class OverkizEntity(CoordinatorEntity[OverkizDataUpdateCoordinator]):
def generate_device_info(self) -> DeviceInfo:
"""Return device registry information for this entity."""
# Some devices, such as the Smart Thermostat have several devices in one physical device,
# with same device url, terminated by '#' and a number.
# Some devices, such as the Smart Thermostat have several devices
# in one physical device, with same device url, terminated by '#' and a number.
# In this case, we use the base device url as the device identifier.
if self.is_sub_device:
# Only return the url of the base device, to inherit device name and model from parent device.
# Only return the url of the base device, to inherit device name
# and model from parent device.
return {
"identifiers": {(DOMAIN, self.executor.base_device_url)},
}
@ -114,5 +115,6 @@ class OverkizDescriptiveEntity(OverkizEntity):
self._attr_unique_id = f"{super().unique_id}-{self.entity_description.key}"
if self.is_sub_device:
# In case of sub device, use the provided label and append the name of the type of entity
# In case of sub device, use the provided label
# and append the name of the type of entity
self._attr_name = f"{self.device.label} {description.name}"

View file

@ -86,7 +86,9 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
icon="mdi:shower-head",
state_class=SensorStateClass.MEASUREMENT,
),
# V40 is measured in litres (L) and shows the amount of warm (mixed) water with a temperature of 40 C, which can be drained from a switched off electric water heater.
# V40 is measured in litres (L) and shows the amount of warm (mixed) water
# with a temperature of 40 C, which can be drained from
# a switched off electric water heater.
OverkizSensorDescription(
key=OverkizState.CORE_V40_WATER_VOLUME_ESTIMATION,
name="Water volume estimation at 40 °C",
@ -150,7 +152,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_LUMINANCE,
name="Luminance",
device_class=SensorDeviceClass.ILLUMINANCE,
native_unit_of_measurement=LIGHT_LUX, # core:MeasuredValueType = core:LuminanceInLux
# core:MeasuredValueType = core:LuminanceInLux
native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
),
# ElectricitySensor/CumulativeElectricPowerConsumptionSensor
@ -158,21 +161,27 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_ELECTRIC_ENERGY_CONSUMPTION,
name="Electric energy consumption",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh (not for modbus:YutakiV2DHWElectricalEnergyConsumptionComponent)
state_class=SensorStateClass.TOTAL_INCREASING, # core:MeasurementCategory attribute = electric/overall
# core:MeasuredValueType = core:ElectricalEnergyInWh
# (not for modbus:YutakiV2DHWElectricalEnergyConsumptionComponent)
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
# core:MeasurementCategory attribute = electric/overall
state_class=SensorStateClass.TOTAL_INCREASING,
),
OverkizSensorDescription(
key=OverkizState.CORE_ELECTRIC_POWER_CONSUMPTION,
name="Electric power consumption",
device_class=SensorDeviceClass.POWER,
native_unit_of_measurement=UnitOfPower.WATT, # core:MeasuredValueType = core:ElectricalEnergyInWh (not for modbus:YutakiV2DHWElectricalEnergyConsumptionComponent)
# core:MeasuredValueType = core:ElectricalEnergyInWh
# (not for modbus:YutakiV2DHWElectricalEnergyConsumptionComponent)
native_unit_of_measurement=UnitOfPower.WATT,
state_class=SensorStateClass.MEASUREMENT,
),
OverkizSensorDescription(
key=OverkizState.CORE_CONSUMPTION_TARIFF1,
name="Consumption tariff 1",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -180,7 +189,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF2,
name="Consumption tariff 2",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -188,7 +198,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF3,
name="Consumption tariff 3",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -196,7 +207,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF4,
name="Consumption tariff 4",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -204,7 +216,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF5,
name="Consumption tariff 5",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -212,7 +225,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF6,
name="Consumption tariff 6",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -220,7 +234,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF7,
name="Consumption tariff 7",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -228,7 +243,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF8,
name="Consumption tariff 8",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -236,7 +252,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
key=OverkizState.CORE_CONSUMPTION_TARIFF9,
name="Consumption tariff 9",
device_class=SensorDeviceClass.ENERGY,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, # core:MeasuredValueType = core:ElectricalEnergyInWh
# core:MeasuredValueType = core:ElectricalEnergyInWh
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
entity_registry_enabled_default=False,
state_class=SensorStateClass.MEASUREMENT,
),
@ -246,7 +263,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
name="Relative humidity",
native_value=lambda value: round(cast(float, value), 2),
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE, # core:MeasuredValueType = core:RelativeValueInPercentage
# core:MeasuredValueType = core:RelativeValueInPercentage
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
# TemperatureSensor/TemperatureSensor
@ -255,7 +273,8 @@ SENSOR_DESCRIPTIONS: list[OverkizSensorDescription] = [
name="Temperature",
native_value=lambda value: round(cast(float, value), 2),
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS, # core:MeasuredValueType = core:TemperatureInCelcius
# core:MeasuredValueType = core:TemperatureInCelcius
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
state_class=SensorStateClass.MEASUREMENT,
),
# WeatherSensor/WeatherForecastSensor
@ -478,7 +497,8 @@ class OverkizHomeKitSetupCodeSensor(OverkizEntity, SensorEntity):
def device_info(self) -> DeviceInfo:
"""Return device registry information for this entity."""
# By default this sensor will be listed at a virtual HomekitStack device,
# but it makes more sense to show this at the gateway device in the entity registry.
# but it makes more sense to show this at the gateway device
# in the entity registry.
return {
"identifiers": {(DOMAIN, self.executor.get_gateway_id())},
}

View file

@ -68,8 +68,8 @@ class PandoraMediaPlayer(MediaPlayerEntity):
"""A media player that uses the Pianobar interface to Pandora."""
_attr_media_content_type = MediaType.MUSIC
# MediaPlayerEntityFeature.VOLUME_SET is close to available but we need volume up/down
# controls in the GUI.
# MediaPlayerEntityFeature.VOLUME_SET is close to available
# but we need volume up/down controls in the GUI.
_attr_supported_features = (
MediaPlayerEntityFeature.PAUSE
| MediaPlayerEntityFeature.TURN_ON

View file

@ -77,7 +77,8 @@ class PilightLight(PilightBaseDevice, LightEntity):
# Calculate pilight brightness (as a range of 0 to 15)
# By creating a percentage
percentage = self._brightness / 255
# Then calculate the dimmer range (aka amount of available brightness steps).
# Then calculate the dimmer range (aka amount
# of available brightness steps).
dimrange = self._dimlevel_max - self._dimlevel_min
# Finally calculate the pilight brightness.
# We add dimlevel_min back in to ensure the minimum is always reached.

View file

@ -122,7 +122,8 @@ class PlugwiseClimateEntity(PlugwiseEntity, ClimateEntity):
# When control_state is present, prefer this data
if (control_state := self.device.get("control_state")) == "cooling":
return HVACAction.COOLING
# Support preheating state as heating, until preheating is added as a separate state
# Support preheating state as heating,
# until preheating is added as a separate state
if control_state in ["heating", "preheating"]:
return HVACAction.HEATING
if control_state == "off":

View file

@ -65,7 +65,8 @@ def migrate_sensor_entities(
"""Migrate Sensors if needed."""
ent_reg = er.async_get(hass)
# Migrating opentherm_outdoor_temperature to opentherm_outdoor_air_temperature sensor
# Migrating opentherm_outdoor_temperature
# to opentherm_outdoor_air_temperature sensor
for device_id, device in coordinator.data.devices.items():
if device.get("dev_class") != "heater_central":
continue

View file

@ -96,7 +96,8 @@ class PowerwallDataManager:
raise UpdateFailed("Unable to fetch data from powerwall") from err
except MissingAttributeError as err:
_LOGGER.error("The powerwall api has changed: %s", str(err))
# The error might include some important information about what exactly changed.
# The error might include some important information
# about what exactly changed.
persistent_notification.create(
self.hass, API_CHANGED_ERROR_BODY, API_CHANGED_TITLE
)
@ -109,7 +110,8 @@ class PowerwallDataManager:
if self.password is None:
raise ConfigEntryAuthFailed from err
_LOGGER.debug("Access denied, trying to reauthenticate")
# there is still an attempt left to authenticate, so we continue in the loop
# there is still an attempt left to authenticate,
# so we continue in the loop
except APIError as err:
raise UpdateFailed(f"Updated failed due to {err}, will retry") from err
else:

View file

@ -262,4 +262,4 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
class WrongVersion(exceptions.HomeAssistantError):
"""Error to indicate the powerwall uses a software version we cannot interact with."""
"""Error indicating we cannot interact with the powerwall software version."""

View file

@ -309,7 +309,10 @@ class ProxmoxClient:
self._connection_start_time = None
def build_client(self):
"""Construct the ProxmoxAPI client. Allows inserting the realm within the `user` value."""
"""Construct the ProxmoxAPI client.
Allows inserting the realm within the `user` value.
"""
if "@" in self._user:
user_id = self._user

View file

@ -54,7 +54,8 @@ COUNTRYCODE_NAMES = {
"LU": "Luxembourg",
"MT": "Malta",
"MX": "Mexico",
"MY": "Maylasia", # spelling error compatibility with pyps4_2ndscreen.media_art.COUNTRIES
# spelling error compatibility with pyps4_2ndscreen.media_art.COUNTRIES
"MY": "Maylasia",
"NI": "Nicaragua",
"NL": "Nederland",
"NO": "Norway",

View file

@ -142,7 +142,10 @@ class QswBinarySensor(QswSensorEntity, BinarySensorEntity):
super().__init__(coordinator, entry, type_id)
self._attr_name = f"{self.product} {description.name}"
self._attr_unique_id = f"{entry.unique_id}_{description.key}{description.sep_key}{description.subkey}"
self._attr_unique_id = (
f"{entry.unique_id}_{description.key}"
f"{description.sep_key}{description.subkey}"
)
self.entity_description = description
self._async_update_attrs()

View file

@ -364,7 +364,10 @@ class QswSensor(QswSensorEntity, SensorEntity):
super().__init__(coordinator, entry, type_id)
self._attr_name = f"{self.product} {description.name}"
self._attr_unique_id = f"{entry.unique_id}_{description.key}{description.sep_key}{description.subkey}"
self._attr_unique_id = (
f"{entry.unique_id}_{description.key}"
f"{description.sep_key}{description.subkey}"
)
self.entity_description = description
self._async_update_attrs()

View file

@ -533,7 +533,8 @@ def _apply_update( # noqa: C901
_drop_index(session_maker, "states", "states__state_changes")
_drop_index(session_maker, "states", "states__significant_changes")
_drop_index(session_maker, "states", "ix_states_entity_id_created")
# This used to create ix_states_entity_id_last_updated, but it was removed in version 32
# This used to create ix_states_entity_id_last_updated,
# but it was removed in version 32
elif new_version == 5:
# Create supporting index for States.event_id foreign key
_create_index(session_maker, "states", "ix_states_event_id")
@ -544,21 +545,25 @@ def _apply_update( # noqa: C901
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
)
_create_index(session_maker, "events", "ix_events_context_id")
# This used to create ix_events_context_user_id, but it was removed in version 28
# This used to create ix_events_context_user_id,
# but it was removed in version 28
_add_columns(
session_maker,
"states",
["context_id CHARACTER(36)", "context_user_id CHARACTER(36)"],
)
_create_index(session_maker, "states", "ix_states_context_id")
# This used to create ix_states_context_user_id, but it was removed in version 28
# This used to create ix_states_context_user_id,
# but it was removed in version 28
elif new_version == 7:
# There used to be a ix_states_entity_id index here, but it was removed in later schema
# There used to be a ix_states_entity_id index here,
# but it was removed in later schema
pass
elif new_version == 8:
_add_columns(session_maker, "events", ["context_parent_id CHARACTER(36)"])
_add_columns(session_maker, "states", ["old_state_id INTEGER"])
# This used to create ix_events_context_parent_id, but it was removed in version 28
# This used to create ix_events_context_parent_id,
# but it was removed in version 28
elif new_version == 9:
# We now get the context from events with a join
# since its always there on state_changed events
@ -576,7 +581,8 @@ def _apply_update( # noqa: C901
# Redundant keys on composite index:
# We already have ix_states_entity_id_last_updated
_drop_index(session_maker, "states", "ix_states_entity_id")
# This used to create ix_events_event_type_time_fired, but it was removed in version 32
# This used to create ix_events_event_type_time_fired,
# but it was removed in version 32
_drop_index(session_maker, "events", "ix_events_event_type")
elif new_version == 10:
# Now done in step 11

View file

@ -280,7 +280,8 @@ class SamsungTVLegacyBridge(SamsungTVBridge):
CONF_HOST: self.host,
CONF_METHOD: self.method,
CONF_PORT: None,
# We need this high timeout because waiting for auth popup is just an open socket
# We need this high timeout because waiting for auth popup
# is just an open socket
CONF_TIMEOUT: TIMEOUT_REQUEST,
}
try:
@ -310,7 +311,8 @@ class SamsungTVLegacyBridge(SamsungTVBridge):
LOGGER.debug("Create SamsungTVLegacyBridge for %s", self.host)
self._remote = Remote(self.config.copy())
# This is only happening when the auth was switched to DENY
# A removed auth will lead to socket timeout because waiting for auth popup is just an open socket
# A removed auth will lead to socket timeout because waiting
# for auth popup is just an open socket
except AccessDenied:
self._notify_reauth_callback()
raise
@ -483,7 +485,8 @@ class SamsungTVWSBridge(
CONF_HOST: self.host,
CONF_METHOD: self.method,
CONF_PORT: self.port,
# We need this high timeout because waiting for auth popup is just an open socket
# We need this high timeout because waiting for auth popup
# is just an open socket
CONF_TIMEOUT: TIMEOUT_REQUEST,
}

View file

@ -494,7 +494,7 @@ class ScriptEntity(ToggleEntity, RestoreEntity):
self.script.last_triggered = parse_datetime(last_triggered)
async def async_will_remove_from_hass(self):
"""Stop script and remove service when it will be removed from Home Assistant."""
"""Stop script and remove service when it will be removed from HA."""
await self.script.async_stop()
# remove service

View file

@ -117,9 +117,13 @@ class SIABaseEntity(RestoreEntity):
def async_handle_event(self, sia_event: SIAEvent) -> None:
"""Listen to dispatcher events for this port and account and update state and attributes.
If the event is for either the zone or the 0 zone (hub zone), then handle it further.
If the event had a code that was relevant for the entity, then update the attributes.
If the event had a code that was relevant or it was a availability event then update the availability and schedule the next unavailability check.
If the event is for either the zone or the 0 zone (hub zone),
then handle it further.
If the event had a code that was relevant for the entity,
then update the attributes.
If the event had a code that was relevant or it was a availability event
then update the availability and schedule the next unavailability check.
"""
_LOGGER.debug("Received event: %s", sia_event)
if int(sia_event.ri) not in (self.zone, SIA_HUB_ZONE):

View file

@ -224,7 +224,9 @@ class SimpliSafeAlarm(SimpliSafeEntity, AlarmControlPanelEntity):
self._attr_extra_state_attributes.update(
{
ATTR_ALARM_DURATION: self._system.alarm_duration,
ATTR_BATTERY_BACKUP_POWER_LEVEL: self._system.battery_backup_power_level,
ATTR_BATTERY_BACKUP_POWER_LEVEL: (
self._system.battery_backup_power_level
),
ATTR_ENTRY_DELAY_AWAY: self._system.entry_delay_away,
ATTR_ENTRY_DELAY_HOME: self._system.entry_delay_home,
ATTR_EXIT_DELAY_AWAY: self._system.exit_delay_away,

View file

@ -44,9 +44,9 @@ async def async_setup_entry(
async def async_add_player(player: SlimClient) -> None:
"""Add MediaPlayerEntity from SlimClient."""
# we delay adding the player a small bit because the player name may be received
# just a bit after connect. This way we can create a device reg entry with the correct name
# the name will either be available within a few milliseconds after connect or not at all
# (its an optional data packet)
# just a bit after connect. This way we can create a device reg entry with the
# correct name the name will either be available within a few milliseconds after
# connect or not at all (its an optional data packet)
for _ in range(10):
if player.player_id not in player.name:
break

View file

@ -54,7 +54,8 @@ class SmartTubConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
# this is a reauth attempt
if self._reauth_entry.unique_id != self.unique_id:
# there is a config entry matching this account, but it is not the one we were trying to reauth
# there is a config entry matching this account,
# but it is not the one we were trying to reauth
return self.async_abort(reason="already_configured")
self.hass.config_entries.async_update_entry(
self._reauth_entry, data=user_input

View file

@ -102,7 +102,7 @@ class SonosMedia:
@soco_error()
def poll_track_info(self) -> dict[str, Any]:
"""Poll the speaker for current track info, add converted position values, and return."""
"""Poll the speaker for current track info, add converted position values."""
track_info: dict[str, Any] = self.soco.get_current_track_info()
track_info[DURATION_SECONDS] = _timespan_secs(track_info.get("duration"))
track_info[POSITION_SECONDS] = _timespan_secs(track_info.get("position"))

View file

@ -261,7 +261,8 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity):
"STOPPED",
):
# Sonos can consider itself "paused" but without having media loaded
# (happens if playing Spotify and via Spotify app you pick another device to play on)
# (happens if playing Spotify and via Spotify app
# you pick another device to play on)
if self.media.title is None:
return MediaPlayerState.IDLE
return MediaPlayerState.PAUSED

View file

@ -359,7 +359,8 @@ class SonosSpeaker:
if any(isinstance(result, Exception) for result in results):
raise SonosSubscriptionsFailed
# Create a polling task in case subscriptions fail or callback events do not arrive
# Create a polling task in case subscriptions fail
# or callback events do not arrive
if not self._poll_timer:
self._poll_timer = async_track_time_interval(
self.hass,

View file

@ -407,16 +407,19 @@ class SoundTouchMediaPlayer(MediaPlayerEntity):
return None
# Client devices do NOT return their siblings as part of the "slaves" list.
# Only the master has the full list of slaves. To compensate for this shortcoming
# we have to fetch the zone info from the master when the current device is a slave.
# Only the master has the full list of slaves. To compensate for this
# shortcoming we have to fetch the zone info from the master when the current
# device is a slave.
# In addition to this shortcoming, libsoundtouch seems to report the "is_master"
# property wrong on some slaves, so the only reliable way to detect if the current
# devices is the master, is by comparing the master_id of the zone with the device_id.
# property wrong on some slaves, so the only reliable way to detect
# if the current devices is the master, is by comparing the master_id
# of the zone with the device_id.
if zone_status.master_id == self._device.config.device_id:
return self._build_zone_info(self.entity_id, zone_status.slaves)
# The master device has to be searched by it's ID and not IP since libsoundtouch / BOSE API
# do not return the IP of the master for some slave objects/responses
# The master device has to be searched by it's ID and not IP since
# libsoundtouch / BOSE API do not return the IP of the master
# for some slave objects/responses
master_instance = self._get_instance_by_id(zone_status.master_id)
if master_instance is not None:
master_zone_status = master_instance.device.zone_status()
@ -424,8 +427,9 @@ class SoundTouchMediaPlayer(MediaPlayerEntity):
master_instance.entity_id, master_zone_status.slaves
)
# We should never end up here since this means we haven't found a master device to get the
# correct zone info from. In this case, assume current device is master
# We should never end up here since this means we haven't found a master
# device to get the correct zone info from. In this case,
# assume current device is master
return self._build_zone_info(self.entity_id, zone_status.slaves)
def _get_instance_by_ip(self, ip_address):

View file

@ -146,8 +146,9 @@ class Segment:
"""Render the HLS playlist section for the Segment.
The Segment may still be in progress.
This method stores intermediate data in hls_playlist_parts, hls_num_parts_rendered,
and hls_playlist_complete to avoid redoing work on subsequent calls.
This method stores intermediate data in hls_playlist_parts,
hls_num_parts_rendered, and hls_playlist_complete to avoid redoing
work on subsequent calls.
"""
if self.hls_playlist_complete:
return self.hls_playlist_template[0]
@ -164,12 +165,14 @@ class Segment:
):
self.hls_playlist_parts.append(
f"#EXT-X-PART:DURATION={part.duration:.3f},URI="
f'"./segment/{self.sequence}.{part_num}.m4s"{",INDEPENDENT=YES" if part.has_keyframe else ""}'
f'"./segment/{self.sequence}.{part_num}.m4s"'
f'{",INDEPENDENT=YES" if part.has_keyframe else ""}'
)
if self.complete:
# Construct the final playlist_template. The placeholder will share a line with
# the first element to avoid an extra newline when we don't render any parts.
# Append an empty string to create a trailing newline when we do render parts
# Construct the final playlist_template. The placeholder will share a
# line with the first element to avoid an extra newline when we don't
# render any parts. Append an empty string to create a trailing newline
# when we do render parts
self.hls_playlist_parts.append("")
self.hls_playlist_template = (
[] if last_stream_id == self.stream_id else ["#EXT-X-DISCONTINUITY"]
@ -204,9 +207,9 @@ class Segment:
)
if not add_hint:
return playlist
# Preload hints help save round trips by informing the client about the next part.
# The next part will usually be in this segment but will be first part of the next
# segment if this segment is already complete.
# Preload hints help save round trips by informing the client about the
# next part. The next part will usually be in this segment but will be
# first part of the next segment if this segment is already complete.
if self.complete: # Next part belongs to next segment
sequence = self.sequence + 1
part_num = 0
@ -434,7 +437,8 @@ class KeyFrameConverter:
) -> None:
"""Initialize."""
# Keep import here so that we can import stream integration without installing reqs
# Keep import here so that we can import stream integration
# without installingreqs
# pylint: disable-next=import-outside-toplevel
from homeassistant.components.camera.img_util import TurboJPEGSingleton
@ -456,7 +460,8 @@ class KeyFrameConverter:
if self._codec_context:
return
# Keep import here so that we can import stream integration without installing reqs
# Keep import here so that we can import stream integration without
# installing reqs
# pylint: disable-next=import-outside-toplevel
from av import CodecContext

View file

@ -69,7 +69,8 @@ class TriggerInstance:
event_config = event_trigger.TRIGGER_SCHEMA(event_config)
if self.remove:
self.remove()
# Note: No lock needed, event_trigger.async_attach_trigger is an synchronous function
# Note: No lock needed, event_trigger.async_attach_trigger
# is an synchronous function
self.remove = await event_trigger.async_attach_trigger(
self.trigger.hass,
event_config,

View file

@ -124,8 +124,9 @@ class TasmotaLight(
light_type = self._tasmota_entity.light_type
if light_type in [LIGHT_TYPE_RGB, LIGHT_TYPE_RGBW, LIGHT_TYPE_RGBCW]:
# Mark HS support for RGBW light because we don't have direct control over the
# white channel, so the base component's RGB->RGBW translation does not work
# Mark HS support for RGBW light because we don't have direct
# control over the white channel, so the base component's RGB->RGBW
# translation does not work
self._supported_color_modes.add(ColorMode.HS)
self._color_mode = ColorMode.HS

View file

@ -174,7 +174,8 @@ class TasmotaDiscoveryUpdate(TasmotaEntity):
# Unchanged payload: Ignore to avoid changing states
_LOGGER.debug("Ignoring unchanged update for: %s", self.entity_id)
# Set in case the entity has been removed and is re-added, for example when changing entity_id
# Set in case the entity has been removed and is re-added,
# for example when changing entity_id
set_discovery_hash(self.hass, self._discovery_hash)
self.async_on_remove(
async_dispatcher_connect(

View file

@ -207,7 +207,9 @@ SENSOR_DEVICE_CLASS_ICON_MAP: dict[str, dict[str, Any]] = {
}
SENSOR_UNIT_MAP = {
hc.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
hc.CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
),
hc.CONCENTRATION_PARTS_PER_BILLION: CONCENTRATION_PARTS_PER_BILLION,
hc.CONCENTRATION_PARTS_PER_MILLION: CONCENTRATION_PARTS_PER_MILLION,
hc.ELECTRICAL_CURRENT_AMPERE: UnitOfElectricCurrent.AMPERE,