Use assignment expressions 29 (#58713)
This commit is contained in:
parent
887d04be60
commit
b1d49b3b66
15 changed files with 27 additions and 62 deletions
|
@ -111,8 +111,7 @@ class AcerSwitch(SwitchEntity):
|
||||||
"""Write msg, obtain answer and format output."""
|
"""Write msg, obtain answer and format output."""
|
||||||
# answers are formatted as ***\answer\r***
|
# answers are formatted as ***\answer\r***
|
||||||
awns = self._write_read(msg)
|
awns = self._write_read(msg)
|
||||||
match = re.search(r"\r(.+)\r", awns)
|
if match := re.search(r"\r(.+)\r", awns):
|
||||||
if match:
|
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
return STATE_UNKNOWN
|
return STATE_UNKNOWN
|
||||||
|
|
||||||
|
|
|
@ -59,9 +59,7 @@ def _get_github_import_url(url: str) -> str:
|
||||||
if url.startswith("https://raw.githubusercontent.com/"):
|
if url.startswith("https://raw.githubusercontent.com/"):
|
||||||
return url
|
return url
|
||||||
|
|
||||||
match = GITHUB_FILE_PATTERN.match(url)
|
if (match := GITHUB_FILE_PATTERN.match(url)) is None:
|
||||||
|
|
||||||
if match is None:
|
|
||||||
raise UnsupportedUrl("Not a GitHub file url")
|
raise UnsupportedUrl("Not a GitHub file url")
|
||||||
|
|
||||||
repo, path = match.groups()
|
repo, path = match.groups()
|
||||||
|
@ -74,8 +72,7 @@ def _get_community_post_import_url(url: str) -> str:
|
||||||
|
|
||||||
Async friendly.
|
Async friendly.
|
||||||
"""
|
"""
|
||||||
match = COMMUNITY_TOPIC_PATTERN.match(url)
|
if (match := COMMUNITY_TOPIC_PATTERN.match(url)) is None:
|
||||||
if match is None:
|
|
||||||
raise UnsupportedUrl("Not a topic url")
|
raise UnsupportedUrl("Not a topic url")
|
||||||
|
|
||||||
_topic, post = match.groups()
|
_topic, post = match.groups()
|
||||||
|
|
|
@ -118,9 +118,7 @@ class DefaultAgent(AbstractConversationAgent):
|
||||||
|
|
||||||
for intent_type, matchers in intents.items():
|
for intent_type, matchers in intents.items():
|
||||||
for matcher in matchers:
|
for matcher in matchers:
|
||||||
match = matcher.match(text)
|
if not (match := matcher.match(text)):
|
||||||
|
|
||||||
if not match:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return await intent.async_handle(
|
return await intent.async_handle(
|
||||||
|
|
|
@ -130,16 +130,14 @@ async def ws_get_statistics_during_period(
|
||||||
start_time_str = msg["start_time"]
|
start_time_str = msg["start_time"]
|
||||||
end_time_str = msg.get("end_time")
|
end_time_str = msg.get("end_time")
|
||||||
|
|
||||||
start_time = dt_util.parse_datetime(start_time_str)
|
if start_time := dt_util.parse_datetime(start_time_str):
|
||||||
if start_time:
|
|
||||||
start_time = dt_util.as_utc(start_time)
|
start_time = dt_util.as_utc(start_time)
|
||||||
else:
|
else:
|
||||||
connection.send_error(msg["id"], "invalid_start_time", "Invalid start_time")
|
connection.send_error(msg["id"], "invalid_start_time", "Invalid start_time")
|
||||||
return
|
return
|
||||||
|
|
||||||
if end_time_str:
|
if end_time_str:
|
||||||
end_time = dt_util.parse_datetime(end_time_str)
|
if end_time := dt_util.parse_datetime(end_time_str):
|
||||||
if end_time:
|
|
||||||
end_time = dt_util.as_utc(end_time)
|
end_time = dt_util.as_utc(end_time)
|
||||||
else:
|
else:
|
||||||
connection.send_error(msg["id"], "invalid_end_time", "Invalid end_time")
|
connection.send_error(msg["id"], "invalid_end_time", "Invalid end_time")
|
||||||
|
@ -194,11 +192,8 @@ class HistoryPeriodView(HomeAssistantView):
|
||||||
) -> web.Response:
|
) -> web.Response:
|
||||||
"""Return history over a period of time."""
|
"""Return history over a period of time."""
|
||||||
datetime_ = None
|
datetime_ = None
|
||||||
if datetime:
|
if datetime and (datetime_ := dt_util.parse_datetime(datetime)) is None:
|
||||||
datetime_ = dt_util.parse_datetime(datetime)
|
return self.json_message("Invalid datetime", HTTPStatus.BAD_REQUEST)
|
||||||
|
|
||||||
if datetime_ is None:
|
|
||||||
return self.json_message("Invalid datetime", HTTPStatus.BAD_REQUEST)
|
|
||||||
|
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
|
@ -212,8 +207,7 @@ class HistoryPeriodView(HomeAssistantView):
|
||||||
return self.json([])
|
return self.json([])
|
||||||
|
|
||||||
if end_time_str := request.query.get("end_time"):
|
if end_time_str := request.query.get("end_time"):
|
||||||
end_time = dt_util.parse_datetime(end_time_str)
|
if end_time := dt_util.parse_datetime(end_time_str):
|
||||||
if end_time:
|
|
||||||
end_time = dt_util.as_utc(end_time)
|
end_time = dt_util.as_utc(end_time)
|
||||||
else:
|
else:
|
||||||
return self.json_message("Invalid end_time", HTTPStatus.BAD_REQUEST)
|
return self.json_message("Invalid end_time", HTTPStatus.BAD_REQUEST)
|
||||||
|
|
|
@ -87,19 +87,16 @@ def valid_initial(conf):
|
||||||
return conf
|
return conf
|
||||||
|
|
||||||
if conf[CONF_HAS_DATE] and conf[CONF_HAS_TIME]:
|
if conf[CONF_HAS_DATE] and conf[CONF_HAS_TIME]:
|
||||||
parsed_value = dt_util.parse_datetime(initial)
|
if dt_util.parse_datetime(initial) is not None:
|
||||||
if parsed_value is not None:
|
|
||||||
return conf
|
return conf
|
||||||
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a datetime")
|
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a datetime")
|
||||||
|
|
||||||
if conf[CONF_HAS_DATE]:
|
if conf[CONF_HAS_DATE]:
|
||||||
parsed_value = dt_util.parse_date(initial)
|
if dt_util.parse_date(initial) is not None:
|
||||||
if parsed_value is not None:
|
|
||||||
return conf
|
return conf
|
||||||
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a date")
|
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a date")
|
||||||
|
|
||||||
parsed_value = dt_util.parse_time(initial)
|
if dt_util.parse_time(initial) is not None:
|
||||||
if parsed_value is not None:
|
|
||||||
return conf
|
return conf
|
||||||
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a time")
|
raise vol.Invalid(f"Initial value '{initial}' can't be parsed as a time")
|
||||||
|
|
||||||
|
@ -282,15 +279,13 @@ class InputDatetime(RestoreEntity):
|
||||||
current_datetime = date_time
|
current_datetime = date_time
|
||||||
|
|
||||||
elif self.has_date:
|
elif self.has_date:
|
||||||
date = dt_util.parse_date(old_state.state)
|
if (date := dt_util.parse_date(old_state.state)) is None:
|
||||||
if date is None:
|
|
||||||
current_datetime = dt_util.parse_datetime(default_value)
|
current_datetime = dt_util.parse_datetime(default_value)
|
||||||
else:
|
else:
|
||||||
current_datetime = py_datetime.datetime.combine(date, DEFAULT_TIME)
|
current_datetime = py_datetime.datetime.combine(date, DEFAULT_TIME)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
time = dt_util.parse_time(old_state.state)
|
if (time := dt_util.parse_time(old_state.state)) is None:
|
||||||
if time is None:
|
|
||||||
current_datetime = dt_util.parse_datetime(default_value)
|
current_datetime = dt_util.parse_datetime(default_value)
|
||||||
else:
|
else:
|
||||||
current_datetime = py_datetime.datetime.combine(
|
current_datetime = py_datetime.datetime.combine(
|
||||||
|
|
|
@ -385,8 +385,7 @@ def is_address(value: str) -> tuple[AddressType, str]:
|
||||||
myhome.0.g11
|
myhome.0.g11
|
||||||
myhome.s0.g11
|
myhome.s0.g11
|
||||||
"""
|
"""
|
||||||
matcher = PATTERN_ADDRESS.match(value)
|
if matcher := PATTERN_ADDRESS.match(value):
|
||||||
if matcher:
|
|
||||||
is_group = matcher.group("type") == "g"
|
is_group = matcher.group("type") == "g"
|
||||||
addr = (int(matcher.group("seg_id")), int(matcher.group("id")), is_group)
|
addr = (int(matcher.group("seg_id")), int(matcher.group("id")), is_group)
|
||||||
conn_id = matcher.group("conn_id")
|
conn_id = matcher.group("conn_id")
|
||||||
|
|
|
@ -99,9 +99,7 @@ class LitterRobotControlEntity(LitterRobotEntity):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def parse_time_at_default_timezone(time_str: str) -> time | None:
|
def parse_time_at_default_timezone(time_str: str) -> time | None:
|
||||||
"""Parse a time string and add default timezone."""
|
"""Parse a time string and add default timezone."""
|
||||||
parsed_time = dt_util.parse_time(time_str)
|
if (parsed_time := dt_util.parse_time(time_str)) is None:
|
||||||
|
|
||||||
if parsed_time is None:
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -22,8 +22,7 @@ def normalize_metadata(metadata: dict) -> dict:
|
||||||
"""Normalize object metadata by stripping the prefix."""
|
"""Normalize object metadata by stripping the prefix."""
|
||||||
new_metadata = {}
|
new_metadata = {}
|
||||||
for meta_key, meta_value in metadata.items():
|
for meta_key, meta_value in metadata.items():
|
||||||
match = _METADATA_RE.match(meta_key)
|
if not (match := _METADATA_RE.match(meta_key)):
|
||||||
if not match:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
new_metadata[match.group(1).lower()] = meta_value
|
new_metadata[match.group(1).lower()] = meta_value
|
||||||
|
|
|
@ -97,9 +97,8 @@ async def async_start( # noqa: C901
|
||||||
payload = msg.payload
|
payload = msg.payload
|
||||||
topic = msg.topic
|
topic = msg.topic
|
||||||
topic_trimmed = topic.replace(f"{discovery_topic}/", "", 1)
|
topic_trimmed = topic.replace(f"{discovery_topic}/", "", 1)
|
||||||
match = TOPIC_MATCHER.match(topic_trimmed)
|
|
||||||
|
|
||||||
if not match:
|
if not (match := TOPIC_MATCHER.match(topic_trimmed)):
|
||||||
if topic_trimmed.endswith("config"):
|
if topic_trimmed.endswith("config"):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Received message on illegal discovery topic '%s'", topic
|
"Received message on illegal discovery topic '%s'", topic
|
||||||
|
|
|
@ -157,8 +157,7 @@ class OpenUvSensor(OpenUvEntity, SensorEntity):
|
||||||
self._attr_native_value = UV_LEVEL_LOW
|
self._attr_native_value = UV_LEVEL_LOW
|
||||||
elif self.entity_description.key == TYPE_MAX_UV_INDEX:
|
elif self.entity_description.key == TYPE_MAX_UV_INDEX:
|
||||||
self._attr_native_value = data["uv_max"]
|
self._attr_native_value = data["uv_max"]
|
||||||
uv_max_time = parse_datetime(data["uv_max_time"])
|
if uv_max_time := parse_datetime(data["uv_max_time"]):
|
||||||
if uv_max_time:
|
|
||||||
self._attr_extra_state_attributes.update(
|
self._attr_extra_state_attributes.update(
|
||||||
{ATTR_MAX_UV_TIME: as_local(uv_max_time)}
|
{ATTR_MAX_UV_TIME: as_local(uv_max_time)}
|
||||||
)
|
)
|
||||||
|
|
|
@ -96,9 +96,7 @@ def entities_in_script(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||||
|
|
||||||
component = hass.data[DOMAIN]
|
component = hass.data[DOMAIN]
|
||||||
|
|
||||||
script_entity = component.get_entity(entity_id)
|
if (script_entity := component.get_entity(entity_id)) is None:
|
||||||
|
|
||||||
if script_entity is None:
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
return list(script_entity.script.referenced_entities)
|
return list(script_entity.script.referenced_entities)
|
||||||
|
@ -127,9 +125,7 @@ def devices_in_script(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||||
|
|
||||||
component = hass.data[DOMAIN]
|
component = hass.data[DOMAIN]
|
||||||
|
|
||||||
script_entity = component.get_entity(entity_id)
|
if (script_entity := component.get_entity(entity_id)) is None:
|
||||||
|
|
||||||
if script_entity is None:
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
return list(script_entity.script.referenced_devices)
|
return list(script_entity.script.referenced_devices)
|
||||||
|
@ -158,9 +154,7 @@ def areas_in_script(hass: HomeAssistant, entity_id: str) -> list[str]:
|
||||||
|
|
||||||
component = hass.data[DOMAIN]
|
component = hass.data[DOMAIN]
|
||||||
|
|
||||||
script_entity = component.get_entity(entity_id)
|
if (script_entity := component.get_entity(entity_id)) is None:
|
||||||
|
|
||||||
if script_entity is None:
|
|
||||||
return []
|
return []
|
||||||
|
|
||||||
return list(script_entity.script.referenced_areas)
|
return list(script_entity.script.referenced_areas)
|
||||||
|
|
|
@ -462,8 +462,7 @@ class SpeechManager:
|
||||||
|
|
||||||
This method is a coroutine.
|
This method is a coroutine.
|
||||||
"""
|
"""
|
||||||
record = _RE_VOICE_FILE.match(filename.lower())
|
if not (record := _RE_VOICE_FILE.match(filename.lower())):
|
||||||
if not record:
|
|
||||||
raise HomeAssistantError("Wrong tts file format!")
|
raise HomeAssistantError("Wrong tts file format!")
|
||||||
|
|
||||||
key = KEY_PATTERN.format(
|
key = KEY_PATTERN.format(
|
||||||
|
@ -571,8 +570,7 @@ def _get_cache_files(cache_dir):
|
||||||
|
|
||||||
folder_data = os.listdir(cache_dir)
|
folder_data = os.listdir(cache_dir)
|
||||||
for file_data in folder_data:
|
for file_data in folder_data:
|
||||||
record = _RE_VOICE_FILE.match(file_data)
|
if record := _RE_VOICE_FILE.match(file_data):
|
||||||
if record:
|
|
||||||
key = KEY_PATTERN.format(
|
key = KEY_PATTERN.format(
|
||||||
record.group(1), record.group(2), record.group(3), record.group(4)
|
record.group(1), record.group(2), record.group(3), record.group(4)
|
||||||
)
|
)
|
||||||
|
|
|
@ -161,8 +161,7 @@ class XiaomiMiioFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||||
self.mac = discovery_info.get("properties", {}).get("mac")
|
self.mac = discovery_info.get("properties", {}).get("mac")
|
||||||
if self.mac is None:
|
if self.mac is None:
|
||||||
poch = discovery_info.get("properties", {}).get("poch", "")
|
poch = discovery_info.get("properties", {}).get("poch", "")
|
||||||
result = search(r"mac=\w+", poch)
|
if (result := search(r"mac=\w+", poch)) is not None:
|
||||||
if result is not None:
|
|
||||||
self.mac = result.group(0).split("=")[1]
|
self.mac = result.group(0).split("=")[1]
|
||||||
|
|
||||||
if not name or not self.host or not self.mac:
|
if not name or not self.host or not self.mac:
|
||||||
|
|
|
@ -49,9 +49,7 @@ async def async_update_entity(hass: HomeAssistant, entity_id: str) -> None:
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
entity_obj = entity_comp.get_entity(entity_id)
|
if (entity_obj := entity_comp.get_entity(entity_id)) is None:
|
||||||
|
|
||||||
if entity_obj is None:
|
|
||||||
logging.getLogger(__name__).warning(
|
logging.getLogger(__name__).warning(
|
||||||
"Forced update failed. Entity %s not found.", entity_id
|
"Forced update failed. Entity %s not found.", entity_id
|
||||||
)
|
)
|
||||||
|
|
|
@ -168,8 +168,7 @@ def _fuzzymatch(name: str, items: Iterable[T], key: Callable[[T], str]) -> T | N
|
||||||
pattern = ".*?".join(name)
|
pattern = ".*?".join(name)
|
||||||
regex = re.compile(pattern, re.IGNORECASE)
|
regex = re.compile(pattern, re.IGNORECASE)
|
||||||
for idx, item in enumerate(items):
|
for idx, item in enumerate(items):
|
||||||
match = regex.search(key(item))
|
if match := regex.search(key(item)):
|
||||||
if match:
|
|
||||||
# Add key length so we prefer shorter keys with the same group and start.
|
# Add key length so we prefer shorter keys with the same group and start.
|
||||||
# Add index so we pick first match in case same group, start, and key length.
|
# Add index so we pick first match in case same group, start, and key length.
|
||||||
matches.append(
|
matches.append(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue