Reduce number of columns selected for logbook (#91718)

This commit is contained in:
J. Nick Koston 2023-04-22 07:25:22 -05:00 committed by GitHub
parent 34b824a27b
commit b76551cf35
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 20 additions and 33 deletions

View file

@ -71,7 +71,7 @@ class LazyEventPartialState:
# json decode process as we already have the data # json decode process as we already have the data
self.data = row.data self.data = row.data
return return
source = cast(str, self.row.shared_data or self.row.event_data) source = cast(str, self.row.event_data)
if not source: if not source:
self.data = {} self.data = {}
elif event_data := self._event_data_cache.get(source): elif event_data := self._event_data_cache.get(source):
@ -105,17 +105,14 @@ class EventAsRow:
context: Context context: Context
context_id_bin: bytes context_id_bin: bytes
time_fired_ts: float time_fired_ts: float
state_id: int row_id: int
event_data: str | None = None event_data: str | None = None
old_format_icon: None = None
event_id: None = None
entity_id: str | None = None entity_id: str | None = None
icon: str | None = None icon: str | None = None
context_user_id_bin: bytes | None = None context_user_id_bin: bytes | None = None
context_parent_id_bin: bytes | None = None context_parent_id_bin: bytes | None = None
event_type: str | None = None event_type: str | None = None
state: str | None = None state: str | None = None
shared_data: str | None = None
context_only: None = None context_only: None = None
@ -132,7 +129,7 @@ def async_event_to_row(event: Event) -> EventAsRow:
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
time_fired_ts=dt_util.utc_to_timestamp(event.time_fired), time_fired_ts=dt_util.utc_to_timestamp(event.time_fired),
state_id=hash(event), row_id=hash(event),
) )
# States are prefiltered so we never get states # States are prefiltered so we never get states
# that are missing new_state or old_state # that are missing new_state or old_state
@ -148,6 +145,6 @@ def async_event_to_row(event: Event) -> EventAsRow:
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
time_fired_ts=dt_util.utc_to_timestamp(new_state.last_updated), time_fired_ts=dt_util.utc_to_timestamp(new_state.last_updated),
state_id=hash(event), row_id=hash(event),
icon=new_state.attributes.get(ATTR_ICON), icon=new_state.attributes.get(ATTR_ICON),
) )

View file

@ -227,7 +227,7 @@ def _humanify(
} }
if include_entity_name: if include_entity_name:
data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id) data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id)
if icon := row.icon or row.old_format_icon: if icon := row.icon:
data[LOGBOOK_ENTRY_ICON] = icon data[LOGBOOK_ENTRY_ICON] = icon
context_augmenter.augment(data, row, context_id_bin) context_augmenter.augment(data, row, context_id_bin)
@ -358,15 +358,9 @@ class ContextAugmenter:
def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool:
"""Check of rows match by using the same method as Events __hash__.""" """Check of rows match by using the same method as Events __hash__."""
if ( return bool(
row is other_row row is other_row or (row_id := row.row_id) and row_id == other_row.row_id
or (state_id := row.state_id) )
and state_id == other_row.state_id
or (event_id := row.event_id)
and event_id == other_row.event_id
):
return True
return False
def _row_time_fired_isoformat(row: Row | EventAsRow) -> str: def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:

View file

@ -14,6 +14,7 @@ from homeassistant.components.recorder.db_schema import (
OLD_FORMAT_ATTRS_JSON, OLD_FORMAT_ATTRS_JSON,
OLD_STATE, OLD_STATE,
SHARED_ATTRS_JSON, SHARED_ATTRS_JSON,
SHARED_DATA_OR_LEGACY_EVENT_DATA,
STATES_CONTEXT_ID_BIN_INDEX, STATES_CONTEXT_ID_BIN_INDEX,
EventData, EventData,
Events, Events,
@ -36,6 +37,11 @@ ALWAYS_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(ALWAYS_CONTINUOUS_DOMAIN
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":' UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%" UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
ICON_OR_OLD_FORMAT_ICON_JSON = sqlalchemy.case(
(SHARED_ATTRS_JSON["icon"].is_(None), OLD_FORMAT_ATTRS_JSON["icon"].as_string()),
else_=SHARED_ATTRS_JSON["icon"].as_string(),
).label("icon")
PSEUDO_EVENT_STATE_CHANGED: Final = None PSEUDO_EVENT_STATE_CHANGED: Final = None
# Since we don't store event_types and None # Since we don't store event_types and None
# and we don't store state_changed in events # and we don't store state_changed in events
@ -45,9 +51,9 @@ PSEUDO_EVENT_STATE_CHANGED: Final = None
# in the payload # in the payload
EVENT_COLUMNS = ( EVENT_COLUMNS = (
Events.event_id.label("event_id"), Events.event_id.label("row_id"),
EventTypes.event_type.label("event_type"), EventTypes.event_type.label("event_type"),
Events.event_data.label("event_data"), SHARED_DATA_OR_LEGACY_EVENT_DATA,
Events.time_fired_ts.label("time_fired_ts"), Events.time_fired_ts.label("time_fired_ts"),
Events.context_id_bin.label("context_id_bin"), Events.context_id_bin.label("context_id_bin"),
Events.context_user_id_bin.label("context_user_id_bin"), Events.context_user_id_bin.label("context_user_id_bin"),
@ -55,23 +61,19 @@ EVENT_COLUMNS = (
) )
STATE_COLUMNS = ( STATE_COLUMNS = (
States.state_id.label("state_id"),
States.state.label("state"), States.state.label("state"),
StatesMeta.entity_id.label("entity_id"), StatesMeta.entity_id.label("entity_id"),
SHARED_ATTRS_JSON["icon"].as_string().label("icon"), ICON_OR_OLD_FORMAT_ICON_JSON,
OLD_FORMAT_ATTRS_JSON["icon"].as_string().label("old_format_icon"),
) )
STATE_CONTEXT_ONLY_COLUMNS = ( STATE_CONTEXT_ONLY_COLUMNS = (
States.state_id.label("state_id"),
States.state.label("state"), States.state.label("state"),
StatesMeta.entity_id.label("entity_id"), StatesMeta.entity_id.label("entity_id"),
literal(value=None, type_=sqlalchemy.String).label("icon"), literal(value=None, type_=sqlalchemy.String).label("icon"),
literal(value=None, type_=sqlalchemy.String).label("old_format_icon"),
) )
EVENT_COLUMNS_FOR_STATE_SELECT = ( EVENT_COLUMNS_FOR_STATE_SELECT = (
literal(value=None, type_=sqlalchemy.Text).label("event_id"), States.state_id.label("row_id"),
# We use PSEUDO_EVENT_STATE_CHANGED aka None for # We use PSEUDO_EVENT_STATE_CHANGED aka None for
# state_changed events since it takes up less # state_changed events since it takes up less
# space in the response and every row has to be # space in the response and every row has to be
@ -84,21 +86,17 @@ EVENT_COLUMNS_FOR_STATE_SELECT = (
States.context_id_bin.label("context_id_bin"), States.context_id_bin.label("context_id_bin"),
States.context_user_id_bin.label("context_user_id_bin"), States.context_user_id_bin.label("context_user_id_bin"),
States.context_parent_id_bin.label("context_parent_id_bin"), States.context_parent_id_bin.label("context_parent_id_bin"),
literal(value=None, type_=sqlalchemy.Text).label("shared_data"),
) )
EMPTY_STATE_COLUMNS = ( EMPTY_STATE_COLUMNS = (
literal(value=0, type_=sqlalchemy.Integer).label("state_id"),
literal(value=None, type_=sqlalchemy.String).label("state"), literal(value=None, type_=sqlalchemy.String).label("state"),
literal(value=None, type_=sqlalchemy.String).label("entity_id"), literal(value=None, type_=sqlalchemy.String).label("entity_id"),
literal(value=None, type_=sqlalchemy.String).label("icon"), literal(value=None, type_=sqlalchemy.String).label("icon"),
literal(value=None, type_=sqlalchemy.String).label("old_format_icon"),
) )
EVENT_ROWS_NO_STATES = ( EVENT_ROWS_NO_STATES = (
*EVENT_COLUMNS, *EVENT_COLUMNS,
EventData.shared_data.label("shared_data"),
*EMPTY_STATE_COLUMNS, *EMPTY_STATE_COLUMNS,
) )

View file

@ -29,7 +29,7 @@ class MockRow:
): ):
"""Init the fake row.""" """Init the fake row."""
self.event_type = event_type self.event_type = event_type
self.shared_data = json.dumps(data, cls=JSONEncoder) self.event_data = json.dumps(data, cls=JSONEncoder)
self.data = data self.data = data
self.time_fired = dt_util.utcnow() self.time_fired = dt_util.utcnow()
self.time_fired_ts = dt_util.utc_to_timestamp(self.time_fired) self.time_fired_ts = dt_util.utc_to_timestamp(self.time_fired)
@ -42,8 +42,7 @@ class MockRow:
self.context_id_bin = ulid_to_bytes_or_none(context.id) if context else None self.context_id_bin = ulid_to_bytes_or_none(context.id) if context else None
self.state = None self.state = None
self.entity_id = None self.entity_id = None
self.state_id = None self.row_id = None
self.event_id = None
self.shared_attrs = None self.shared_attrs = None
self.attributes = None self.attributes = None
self.context_only = False self.context_only = False

View file

@ -352,7 +352,6 @@ def create_state_changed_event_from_old_new(
row.context_id_bin = None row.context_id_bin = None
row.friendly_name = None row.friendly_name = None
row.icon = None row.icon = None
row.old_format_icon = None
row.context_user_id_bin = None row.context_user_id_bin = None
row.context_parent_id_bin = None row.context_parent_id_bin = None
row.old_state_id = old_state and 1 row.old_state_id = old_state and 1