Use assignment expressions 11 (#57792)
This commit is contained in:
parent
238b488642
commit
aa7dc78a1e
25 changed files with 45 additions and 111 deletions
|
@ -211,8 +211,7 @@ class HistoryPeriodView(HomeAssistantView):
|
|||
if start_time > now:
|
||||
return self.json([])
|
||||
|
||||
end_time_str = request.query.get("end_time")
|
||||
if end_time_str:
|
||||
if end_time_str := request.query.get("end_time"):
|
||||
end_time = dt_util.parse_datetime(end_time_str)
|
||||
if end_time:
|
||||
end_time = dt_util.as_utc(end_time)
|
||||
|
@ -304,13 +303,11 @@ class HistoryPeriodView(HomeAssistantView):
|
|||
def sqlalchemy_filter_from_include_exclude_conf(conf):
|
||||
"""Build a sql filter from config."""
|
||||
filters = Filters()
|
||||
exclude = conf.get(CONF_EXCLUDE)
|
||||
if exclude:
|
||||
if exclude := conf.get(CONF_EXCLUDE):
|
||||
filters.excluded_entities = exclude.get(CONF_ENTITIES, [])
|
||||
filters.excluded_domains = exclude.get(CONF_DOMAINS, [])
|
||||
filters.excluded_entity_globs = exclude.get(CONF_ENTITY_GLOBS, [])
|
||||
include = conf.get(CONF_INCLUDE)
|
||||
if include:
|
||||
if include := conf.get(CONF_INCLUDE):
|
||||
filters.included_entities = include.get(CONF_ENTITIES, [])
|
||||
filters.included_domains = include.get(CONF_DOMAINS, [])
|
||||
filters.included_entity_globs = include.get(CONF_ENTITY_GLOBS, [])
|
||||
|
|
|
@ -150,9 +150,7 @@ def entities_in_scene(hass: HomeAssistant, entity_id: str) -> list[str]:
|
|||
|
||||
platform = hass.data[DATA_PLATFORM]
|
||||
|
||||
entity = platform.entities.get(entity_id)
|
||||
|
||||
if entity is None:
|
||||
if (entity := platform.entities.get(entity_id)) is None:
|
||||
return []
|
||||
|
||||
return list(entity.scene_config.states)
|
||||
|
@ -233,8 +231,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
entities = call.data[CONF_ENTITIES]
|
||||
|
||||
for entity_id in snapshot:
|
||||
state = hass.states.get(entity_id)
|
||||
if state is None:
|
||||
if (state := hass.states.get(entity_id)) is None:
|
||||
_LOGGER.warning(
|
||||
"Entity %s does not exist and therefore cannot be snapshotted",
|
||||
entity_id,
|
||||
|
@ -248,8 +245,7 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
|
||||
scene_config = SceneConfig(None, call.data[CONF_SCENE_ID], None, entities)
|
||||
entity_id = f"{SCENE_DOMAIN}.{scene_config.name}"
|
||||
old = platform.entities.get(entity_id)
|
||||
if old is not None:
|
||||
if (old := platform.entities.get(entity_id)) is not None:
|
||||
if not old.from_service:
|
||||
_LOGGER.warning("The scene %s already exists", entity_id)
|
||||
return
|
||||
|
@ -263,10 +259,8 @@ async def async_setup_platform(hass, config, async_add_entities, discovery_info=
|
|||
|
||||
def _process_scenes_config(hass, async_add_entities, config):
|
||||
"""Process multiple scenes and add them."""
|
||||
scene_config = config[STATES]
|
||||
|
||||
# Check empty list
|
||||
if not scene_config:
|
||||
if not (scene_config := config[STATES]):
|
||||
return
|
||||
|
||||
async_add_entities(
|
||||
|
|
|
@ -79,13 +79,11 @@ async def async_attach_trigger(hass, config, action, automation_info):
|
|||
|
||||
# Check state of entity. If valid, set up a listener.
|
||||
if new_state.domain == "input_datetime":
|
||||
has_date = new_state.attributes["has_date"]
|
||||
if has_date:
|
||||
if has_date := new_state.attributes["has_date"]:
|
||||
year = new_state.attributes["year"]
|
||||
month = new_state.attributes["month"]
|
||||
day = new_state.attributes["day"]
|
||||
has_time = new_state.attributes["has_time"]
|
||||
if has_time:
|
||||
if has_time := new_state.attributes["has_time"]:
|
||||
hour = new_state.attributes["hour"]
|
||||
minute = new_state.attributes["minute"]
|
||||
second = new_state.attributes["second"]
|
||||
|
|
|
@ -29,9 +29,7 @@ def async_sign_path(
|
|||
hass: HomeAssistant, refresh_token_id: str, path: str, expiration: timedelta
|
||||
) -> str:
|
||||
"""Sign a path for temporary access without auth header."""
|
||||
secret = hass.data.get(DATA_SIGN_SECRET)
|
||||
|
||||
if secret is None:
|
||||
if (secret := hass.data.get(DATA_SIGN_SECRET)) is None:
|
||||
secret = hass.data[DATA_SIGN_SECRET] = secrets.token_hex()
|
||||
|
||||
now = dt_util.utcnow()
|
||||
|
@ -80,14 +78,10 @@ def setup_auth(hass: HomeAssistant, app: Application) -> None:
|
|||
|
||||
async def async_validate_signed_request(request: Request) -> bool:
|
||||
"""Validate a signed request."""
|
||||
secret = hass.data.get(DATA_SIGN_SECRET)
|
||||
|
||||
if secret is None:
|
||||
if (secret := hass.data.get(DATA_SIGN_SECRET)) is None:
|
||||
return False
|
||||
|
||||
signature = request.query.get(SIGN_QUERY_PARAM)
|
||||
|
||||
if signature is None:
|
||||
if (signature := request.query.get(SIGN_QUERY_PARAM)) is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
|
|
|
@ -40,8 +40,7 @@ class HomeAssistantView:
|
|||
@staticmethod
|
||||
def context(request: web.Request) -> Context:
|
||||
"""Generate a context from a request."""
|
||||
user = request.get("hass_user")
|
||||
if user is None:
|
||||
if (user := request.get("hass_user")) is None:
|
||||
return Context()
|
||||
|
||||
return Context(user_id=user.id)
|
||||
|
|
|
@ -28,9 +28,7 @@ async def _async_reproduce_states(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce input boolean states."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -83,8 +83,7 @@ def has_date_or_time(conf):
|
|||
|
||||
def valid_initial(conf):
|
||||
"""Check the initial value is valid."""
|
||||
initial = conf.get(CONF_INITIAL)
|
||||
if not initial:
|
||||
if not (initial := conf.get(CONF_INITIAL)):
|
||||
return conf
|
||||
|
||||
if conf[CONF_HAS_DATE] and conf[CONF_HAS_TIME]:
|
||||
|
@ -226,8 +225,7 @@ class InputDatetime(RestoreEntity):
|
|||
self.editable = True
|
||||
self._current_datetime = None
|
||||
|
||||
initial = config.get(CONF_INITIAL)
|
||||
if not initial:
|
||||
if not (initial := config.get(CONF_INITIAL)):
|
||||
return
|
||||
|
||||
if self.has_date and self.has_time:
|
||||
|
|
|
@ -41,9 +41,7 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -31,10 +31,8 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
# Return if we can't find entity
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -22,10 +22,8 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
# Return if we can't find the entity
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -125,13 +125,11 @@ def get_supported_color_modes(hass: HomeAssistant, entity_id: str) -> set | None
|
|||
First try the statemachine, then entity registry.
|
||||
This is the equivalent of entity helper get_supported_features.
|
||||
"""
|
||||
state = hass.states.get(entity_id)
|
||||
if state:
|
||||
if state := hass.states.get(entity_id):
|
||||
return state.attributes.get(ATTR_SUPPORTED_COLOR_MODES)
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
entry = entity_registry.async_get(entity_id)
|
||||
if not entry:
|
||||
if not (entry := entity_registry.async_get(entity_id)):
|
||||
raise HomeAssistantError(f"Unknown entity {entity_id}")
|
||||
if not entry.capabilities:
|
||||
return None
|
||||
|
@ -629,9 +627,7 @@ class Profiles:
|
|||
@callback
|
||||
def apply_profile(self, name: str, params: dict) -> None:
|
||||
"""Apply a profile."""
|
||||
profile = self.data.get(name)
|
||||
|
||||
if profile is None:
|
||||
if (profile := self.data.get(name)) is None:
|
||||
return
|
||||
|
||||
if profile.hs_color is not None:
|
||||
|
|
|
@ -123,9 +123,7 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -150,9 +150,7 @@ async def async_setup(hass, config):
|
|||
"logbook", "logbook", "hass:format-list-bulleted-type"
|
||||
)
|
||||
|
||||
conf = config.get(DOMAIN, {})
|
||||
|
||||
if conf:
|
||||
if conf := config.get(DOMAIN, {}):
|
||||
filters = sqlalchemy_filter_from_include_exclude_conf(conf)
|
||||
entities_filter = convert_include_exclude_filter(conf)
|
||||
else:
|
||||
|
@ -202,8 +200,7 @@ class LogbookView(HomeAssistantView):
|
|||
else:
|
||||
datetime = dt_util.start_of_local_day()
|
||||
|
||||
period = request.query.get("period")
|
||||
if period is None:
|
||||
if (period := request.query.get("period")) is None:
|
||||
period = 1
|
||||
else:
|
||||
period = int(period)
|
||||
|
@ -218,8 +215,7 @@ class LogbookView(HomeAssistantView):
|
|||
"Format should be <domain>.<object_id>"
|
||||
) from vol.Invalid
|
||||
|
||||
end_time = request.query.get("end_time")
|
||||
if end_time is None:
|
||||
if (end_time := request.query.get("end_time")) is None:
|
||||
start_day = dt_util.as_utc(datetime) - timedelta(days=period - 1)
|
||||
end_day = start_day + timedelta(days=period)
|
||||
else:
|
||||
|
@ -605,9 +601,7 @@ def _keep_event(hass, event, entities_filter):
|
|||
def _augment_data_with_context(
|
||||
data, entity_id, event, context_lookup, entity_attr_cache, external_events
|
||||
):
|
||||
context_event = context_lookup.get(event.context_id)
|
||||
|
||||
if not context_event:
|
||||
if not (context_event := context_lookup.get(event.context_id)):
|
||||
return
|
||||
|
||||
if event == context_event:
|
||||
|
@ -663,8 +657,7 @@ def _augment_data_with_context(
|
|||
if event_type in external_events:
|
||||
domain, describe_event = external_events[event_type]
|
||||
data["context_domain"] = domain
|
||||
name = describe_event(context_event).get(ATTR_NAME)
|
||||
if name:
|
||||
if name := describe_event(context_event).get(ATTR_NAME):
|
||||
data["context_name"] = name
|
||||
|
||||
|
||||
|
@ -789,8 +782,7 @@ class EntityAttributeCache:
|
|||
else:
|
||||
self._cache[entity_id] = {}
|
||||
|
||||
current_state = self._hass.states.get(entity_id)
|
||||
if current_state:
|
||||
if current_state := self._hass.states.get(entity_id):
|
||||
# Try the current state as its faster than decoding the
|
||||
# attributes
|
||||
self._cache[entity_id][attribute] = current_state.attributes.get(attribute)
|
||||
|
|
|
@ -984,8 +984,7 @@ class MediaPlayerEntity(Entity):
|
|||
response = await websession.get(url)
|
||||
if response.status == HTTP_OK:
|
||||
content = await response.read()
|
||||
content_type = response.headers.get(CONTENT_TYPE)
|
||||
if content_type:
|
||||
if content_type := response.headers.get(CONTENT_TYPE):
|
||||
content_type = content_type.split(";")[0]
|
||||
|
||||
if content is None:
|
||||
|
|
|
@ -22,9 +22,7 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -71,8 +71,7 @@ def async_create(
|
|||
context: Context | None = None,
|
||||
) -> None:
|
||||
"""Generate a notification."""
|
||||
notifications = hass.data.get(DOMAIN)
|
||||
if notifications is None:
|
||||
if (notifications := hass.data.get(DOMAIN)) is None:
|
||||
notifications = hass.data[DOMAIN] = {}
|
||||
|
||||
if notification_id is not None:
|
||||
|
@ -134,8 +133,7 @@ def async_dismiss(
|
|||
hass: HomeAssistant, notification_id: str, *, context: Context | None = None
|
||||
) -> None:
|
||||
"""Remove a notification."""
|
||||
notifications = hass.data.get(DOMAIN)
|
||||
if notifications is None:
|
||||
if (notifications := hass.data.get(DOMAIN)) is None:
|
||||
notifications = hass.data[DOMAIN] = {}
|
||||
|
||||
entity_id = ENTITY_ID_FORMAT.format(slugify(notification_id))
|
||||
|
|
|
@ -226,9 +226,7 @@ class PersonStorageCollection(collection.StorageCollection):
|
|||
"""Validate the config is valid."""
|
||||
data = self.CREATE_SCHEMA(data)
|
||||
|
||||
user_id = data.get(CONF_USER_ID)
|
||||
|
||||
if user_id is not None:
|
||||
if (user_id := data.get(CONF_USER_ID)) is not None:
|
||||
await self._validate_user_id(user_id)
|
||||
|
||||
return data
|
||||
|
@ -410,8 +408,7 @@ class Person(RestoreEntity):
|
|||
data[ATTR_GPS_ACCURACY] = self._gps_accuracy
|
||||
if self._source is not None:
|
||||
data[ATTR_SOURCE] = self._source
|
||||
user_id = self._config.get(CONF_USER_ID)
|
||||
if user_id is not None:
|
||||
if (user_id := self._config.get(CONF_USER_ID)) is not None:
|
||||
data[ATTR_USER_ID] = user_id
|
||||
return data
|
||||
|
||||
|
@ -448,9 +445,7 @@ class Person(RestoreEntity):
|
|||
self._unsub_track_device()
|
||||
self._unsub_track_device = None
|
||||
|
||||
trackers = self._config[CONF_DEVICE_TRACKERS]
|
||||
|
||||
if trackers:
|
||||
if trackers := self._config[CONF_DEVICE_TRACKERS]:
|
||||
_LOGGER.debug("Subscribe to device trackers for %s", self.entity_id)
|
||||
|
||||
self._unsub_track_device = async_track_state_change_event(
|
||||
|
|
|
@ -463,9 +463,7 @@ class Recorder(threading.Thread):
|
|||
if event.event_type in self.exclude_t:
|
||||
return False
|
||||
|
||||
entity_id = event.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
if entity_id is None:
|
||||
if (entity_id := event.data.get(ATTR_ENTITY_ID)) is None:
|
||||
return True
|
||||
|
||||
if isinstance(entity_id, str):
|
||||
|
@ -496,8 +494,7 @@ class Recorder(threading.Thread):
|
|||
|
||||
def do_adhoc_statistics(self, **kwargs):
|
||||
"""Trigger an adhoc statistics run."""
|
||||
start = kwargs.get("start")
|
||||
if not start:
|
||||
if not (start := kwargs.get("start")):
|
||||
start = statistics.get_start_time()
|
||||
self.queue.put(StatisticsTask(start))
|
||||
|
||||
|
|
|
@ -523,8 +523,7 @@ def list_statistic_ids(
|
|||
metadata = get_metadata_with_session(hass, session, None, statistic_type)
|
||||
|
||||
for _, meta in metadata.values():
|
||||
unit = meta["unit_of_measurement"]
|
||||
if unit is not None:
|
||||
if (unit := meta["unit_of_measurement"]) is not None:
|
||||
# Display unit according to user settings
|
||||
unit = _configured_unit(unit, units)
|
||||
meta["unit_of_measurement"] = unit
|
||||
|
|
|
@ -30,9 +30,7 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -33,9 +33,7 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -50,9 +50,7 @@ async def _async_reproduce_state(
|
|||
reproduce_options: dict[str, Any] | None = None,
|
||||
) -> None:
|
||||
"""Reproduce a single state."""
|
||||
cur_state = hass.states.get(state.entity_id)
|
||||
|
||||
if cur_state is None:
|
||||
if (cur_state := hass.states.get(state.entity_id)) is None:
|
||||
_LOGGER.warning("Unable to find entity %s", state.entity_id)
|
||||
return
|
||||
|
||||
|
|
|
@ -81,10 +81,9 @@ def async_generate_path(webhook_id: str) -> str:
|
|||
async def async_handle_webhook(hass, webhook_id, request):
|
||||
"""Handle a webhook."""
|
||||
handlers = hass.data.setdefault(DOMAIN, {})
|
||||
webhook = handlers.get(webhook_id)
|
||||
|
||||
# Always respond successfully to not give away if a hook exists or not.
|
||||
if webhook is None:
|
||||
if (webhook := handlers.get(webhook_id)) is None:
|
||||
if isinstance(request, MockRequest):
|
||||
received_from = request.mock_source
|
||||
else:
|
||||
|
|
|
@ -58,8 +58,7 @@ def async_register_command(
|
|||
schema = handler._ws_schema # type: ignore[attr-defined]
|
||||
else:
|
||||
command = command_or_handler
|
||||
handlers = hass.data.get(DOMAIN)
|
||||
if handlers is None:
|
||||
if (handlers := hass.data.get(DOMAIN)) is None:
|
||||
handlers = hass.data[DOMAIN] = {}
|
||||
handlers[command] = (handler, schema)
|
||||
|
||||
|
|
|
@ -420,9 +420,7 @@ def handle_entity_source(
|
|||
perm_category=CAT_ENTITIES,
|
||||
)
|
||||
|
||||
source = raw_sources.get(entity_id)
|
||||
|
||||
if source is None:
|
||||
if (source := raw_sources.get(entity_id)) is None:
|
||||
connection.send_error(msg["id"], ERR_NOT_FOUND, "Entity not found")
|
||||
return
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue