Merge of nested IF-IF cases - K-N (#48370)
This commit is contained in:
parent
9737480742
commit
86212db71d
12 changed files with 86 additions and 65 deletions
|
@ -312,10 +312,12 @@ class KeyboardRemote:
|
|||
self.emulate_key_hold_repeat,
|
||||
)
|
||||
)
|
||||
elif event.value == KEY_VALUE["key_up"]:
|
||||
if event.code in repeat_tasks:
|
||||
repeat_tasks[event.code].cancel()
|
||||
del repeat_tasks[event.code]
|
||||
elif (
|
||||
event.value == KEY_VALUE["key_up"]
|
||||
and event.code in repeat_tasks
|
||||
):
|
||||
repeat_tasks[event.code].cancel()
|
||||
del repeat_tasks[event.code]
|
||||
except (OSError, PermissionError, asyncio.CancelledError):
|
||||
# cancel key repeat tasks
|
||||
for task in repeat_tasks.values():
|
||||
|
|
|
@ -51,11 +51,14 @@ async def handle_webhook(hass, webhook_id, request):
|
|||
except ValueError:
|
||||
return None
|
||||
|
||||
if isinstance(data, dict) and "signature" in data:
|
||||
if await verify_webhook(hass, **data["signature"]):
|
||||
data["webhook_id"] = webhook_id
|
||||
hass.bus.async_fire(MESSAGE_RECEIVED, data)
|
||||
return
|
||||
if (
|
||||
isinstance(data, dict)
|
||||
and "signature" in data
|
||||
and await verify_webhook(hass, **data["signature"])
|
||||
):
|
||||
data["webhook_id"] = webhook_id
|
||||
hass.bus.async_fire(MESSAGE_RECEIVED, data)
|
||||
return
|
||||
|
||||
_LOGGER.warning(
|
||||
"Mailgun webhook received an unauthenticated message - webhook_id: %s",
|
||||
|
|
|
@ -230,14 +230,13 @@ class MetWeather(CoordinatorEntity, WeatherEntity):
|
|||
for k, v in FORECAST_MAP.items()
|
||||
if met_item.get(v) is not None
|
||||
}
|
||||
if not self._is_metric:
|
||||
if ATTR_FORECAST_PRECIPITATION in ha_item:
|
||||
precip_inches = convert_distance(
|
||||
ha_item[ATTR_FORECAST_PRECIPITATION],
|
||||
LENGTH_MILLIMETERS,
|
||||
LENGTH_INCHES,
|
||||
)
|
||||
ha_item[ATTR_FORECAST_PRECIPITATION] = round(precip_inches, 2)
|
||||
if not self._is_metric and ATTR_FORECAST_PRECIPITATION in ha_item:
|
||||
precip_inches = convert_distance(
|
||||
ha_item[ATTR_FORECAST_PRECIPITATION],
|
||||
LENGTH_MILLIMETERS,
|
||||
LENGTH_INCHES,
|
||||
)
|
||||
ha_item[ATTR_FORECAST_PRECIPITATION] = round(precip_inches, 2)
|
||||
if ha_item.get(ATTR_FORECAST_CONDITION):
|
||||
ha_item[ATTR_FORECAST_CONDITION] = format_condition(
|
||||
ha_item[ATTR_FORECAST_CONDITION]
|
||||
|
|
|
@ -276,9 +276,8 @@ class MikrotikData:
|
|||
|
||||
def update(self):
|
||||
"""Update device_tracker from Mikrotik API."""
|
||||
if not self.available or not self.api:
|
||||
if not self.connect_to_hub():
|
||||
return
|
||||
if (not self.available or not self.api) and not self.connect_to_hub():
|
||||
return
|
||||
_LOGGER.debug("updating network devices for host: %s", self._host)
|
||||
self.update_devices()
|
||||
|
||||
|
|
|
@ -84,9 +84,10 @@ def calc_min(sensor_values):
|
|||
val = None
|
||||
entity_id = None
|
||||
for sensor_id, sensor_value in sensor_values:
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE]:
|
||||
if val is None or val > sensor_value:
|
||||
entity_id, val = sensor_id, sensor_value
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] and (
|
||||
val is None or val > sensor_value
|
||||
):
|
||||
entity_id, val = sensor_id, sensor_value
|
||||
return entity_id, val
|
||||
|
||||
|
||||
|
@ -95,30 +96,35 @@ def calc_max(sensor_values):
|
|||
val = None
|
||||
entity_id = None
|
||||
for sensor_id, sensor_value in sensor_values:
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE]:
|
||||
if val is None or val < sensor_value:
|
||||
entity_id, val = sensor_id, sensor_value
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE] and (
|
||||
val is None or val < sensor_value
|
||||
):
|
||||
entity_id, val = sensor_id, sensor_value
|
||||
return entity_id, val
|
||||
|
||||
|
||||
def calc_mean(sensor_values, round_digits):
|
||||
"""Calculate mean value, honoring unknown states."""
|
||||
result = []
|
||||
for _, sensor_value in sensor_values:
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE]:
|
||||
result.append(sensor_value)
|
||||
if len(result) == 0:
|
||||
result = [
|
||||
sensor_value
|
||||
for _, sensor_value in sensor_values
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
]
|
||||
|
||||
if not result:
|
||||
return None
|
||||
return round(sum(result) / len(result), round_digits)
|
||||
|
||||
|
||||
def calc_median(sensor_values, round_digits):
|
||||
"""Calculate median value, honoring unknown states."""
|
||||
result = []
|
||||
for _, sensor_value in sensor_values:
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE]:
|
||||
result.append(sensor_value)
|
||||
if len(result) == 0:
|
||||
result = [
|
||||
sensor_value
|
||||
for _, sensor_value in sensor_values
|
||||
if sensor_value not in [STATE_UNKNOWN, STATE_UNAVAILABLE]
|
||||
]
|
||||
|
||||
if not result:
|
||||
return None
|
||||
result.sort()
|
||||
if len(result) % 2 == 0:
|
||||
|
|
|
@ -147,9 +147,8 @@ class MinecraftServerPlayersOnlineSensor(MinecraftServerSensorEntity):
|
|||
extra_state_attributes = None
|
||||
players_list = self._server.players_list
|
||||
|
||||
if players_list is not None:
|
||||
if len(players_list) != 0:
|
||||
extra_state_attributes = {ATTR_PLAYERS_LIST: self._server.players_list}
|
||||
if players_list is not None and len(players_list) != 0:
|
||||
extra_state_attributes = {ATTR_PLAYERS_LIST: self._server.players_list}
|
||||
|
||||
self._extra_state_attributes = extra_state_attributes
|
||||
|
||||
|
|
|
@ -98,9 +98,12 @@ class MjpegCamera(Camera):
|
|||
self._still_image_url = device_info.get(CONF_STILL_IMAGE_URL)
|
||||
|
||||
self._auth = None
|
||||
if self._username and self._password:
|
||||
if self._authentication == HTTP_BASIC_AUTHENTICATION:
|
||||
self._auth = aiohttp.BasicAuth(self._username, password=self._password)
|
||||
if (
|
||||
self._username
|
||||
and self._password
|
||||
and self._authentication == HTTP_BASIC_AUTHENTICATION
|
||||
):
|
||||
self._auth = aiohttp.BasicAuth(self._username, password=self._password)
|
||||
self._verify_ssl = device_info.get(CONF_VERIFY_SSL)
|
||||
|
||||
async def async_camera_image(self):
|
||||
|
|
|
@ -105,10 +105,12 @@ class MobileAppNotificationService(BaseNotificationService):
|
|||
"""Send a message to the Lambda APNS gateway."""
|
||||
data = {ATTR_MESSAGE: message}
|
||||
|
||||
if kwargs.get(ATTR_TITLE) is not None:
|
||||
# Remove default title from notifications.
|
||||
if kwargs.get(ATTR_TITLE) != ATTR_TITLE_DEFAULT:
|
||||
data[ATTR_TITLE] = kwargs.get(ATTR_TITLE)
|
||||
# Remove default title from notifications.
|
||||
if (
|
||||
kwargs.get(ATTR_TITLE) is not None
|
||||
and kwargs.get(ATTR_TITLE) != ATTR_TITLE_DEFAULT
|
||||
):
|
||||
data[ATTR_TITLE] = kwargs.get(ATTR_TITLE)
|
||||
|
||||
targets = kwargs.get(ATTR_TARGET)
|
||||
|
||||
|
|
|
@ -600,9 +600,8 @@ class MqttLight(MqttEntity, LightEntity, RestoreEntity):
|
|||
# If brightness is being used instead of an on command, make sure
|
||||
# there is a brightness input. Either set the brightness to our
|
||||
# saved value or the maximum value if this is the first call
|
||||
elif on_command_type == "brightness":
|
||||
if ATTR_BRIGHTNESS not in kwargs:
|
||||
kwargs[ATTR_BRIGHTNESS] = self._brightness if self._brightness else 255
|
||||
elif on_command_type == "brightness" and ATTR_BRIGHTNESS not in kwargs:
|
||||
kwargs[ATTR_BRIGHTNESS] = self._brightness if self._brightness else 255
|
||||
|
||||
if ATTR_HS_COLOR in kwargs and self._topic[CONF_RGB_COMMAND_TOPIC] is not None:
|
||||
|
||||
|
|
|
@ -60,13 +60,13 @@ async def async_setup(hass, config):
|
|||
|
||||
# Filter out the events that were triggered by publishing
|
||||
# to the MQTT topic, or you will end up in an infinite loop.
|
||||
if event.event_type == EVENT_CALL_SERVICE:
|
||||
if (
|
||||
event.data.get("domain") == mqtt.DOMAIN
|
||||
and event.data.get("service") == mqtt.SERVICE_PUBLISH
|
||||
and event.data[ATTR_SERVICE_DATA].get("topic") == pub_topic
|
||||
):
|
||||
return
|
||||
if (
|
||||
event.event_type == EVENT_CALL_SERVICE
|
||||
and event.data.get("domain") == mqtt.DOMAIN
|
||||
and event.data.get("service") == mqtt.SERVICE_PUBLISH
|
||||
and event.data[ATTR_SERVICE_DATA].get("topic") == pub_topic
|
||||
):
|
||||
return
|
||||
|
||||
event_info = {"event_type": event.event_type, "event_data": event.data}
|
||||
msg = json.dumps(event_info, cls=JSONEncoder)
|
||||
|
|
|
@ -180,9 +180,11 @@ class ThermostatEntity(ClimateEntity):
|
|||
@property
|
||||
def _target_temperature_trait(self):
|
||||
"""Return the correct trait with a target temp depending on mode."""
|
||||
if self.preset_mode == PRESET_ECO:
|
||||
if ThermostatEcoTrait.NAME in self._device.traits:
|
||||
return self._device.traits[ThermostatEcoTrait.NAME]
|
||||
if (
|
||||
self.preset_mode == PRESET_ECO
|
||||
and ThermostatEcoTrait.NAME in self._device.traits
|
||||
):
|
||||
return self._device.traits[ThermostatEcoTrait.NAME]
|
||||
if ThermostatTemperatureSetpointTrait.NAME in self._device.traits:
|
||||
return self._device.traits[ThermostatTemperatureSetpointTrait.NAME]
|
||||
return None
|
||||
|
|
|
@ -334,12 +334,19 @@ class NexiaZone(NexiaThermostatZoneEntity, ClimateEntity):
|
|||
new_cool_temp = min_temp + deadband
|
||||
|
||||
# Check that we're within the deadband range, fix it if we're not
|
||||
if new_heat_temp and new_heat_temp != cur_heat_temp:
|
||||
if new_cool_temp - new_heat_temp < deadband:
|
||||
new_cool_temp = new_heat_temp + deadband
|
||||
if new_cool_temp and new_cool_temp != cur_cool_temp:
|
||||
if new_cool_temp - new_heat_temp < deadband:
|
||||
new_heat_temp = new_cool_temp - deadband
|
||||
if (
|
||||
new_heat_temp
|
||||
and new_heat_temp != cur_heat_temp
|
||||
and new_cool_temp - new_heat_temp < deadband
|
||||
):
|
||||
new_cool_temp = new_heat_temp + deadband
|
||||
|
||||
if (
|
||||
new_cool_temp
|
||||
and new_cool_temp != cur_cool_temp
|
||||
and new_cool_temp - new_heat_temp < deadband
|
||||
):
|
||||
new_heat_temp = new_cool_temp - deadband
|
||||
|
||||
self._zone.set_heat_cool_temp(
|
||||
heat_temperature=new_heat_temp,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue