Fix logbook with empty filter and remove unused code (#40565)

This commit is contained in:
J. Nick Koston 2020-09-30 06:10:11 -05:00 committed by GitHub
parent 729ed1cab6
commit aada6a1d88
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 126 additions and 82 deletions

View file

@ -127,8 +127,14 @@ def _get_significant_states(
else:
baked_query += lambda q: q.filter(States.last_updated > bindparam("start_time"))
if entity_ids is not None:
baked_query += lambda q: q.filter(
States.entity_id.in_(bindparam("entity_ids", expanding=True))
)
else:
baked_query += lambda q: q.filter(~States.domain.in_(IGNORE_DOMAINS))
if filters:
filters.bake(baked_query, entity_ids)
filters.bake(baked_query)
if end_time is not None:
baked_query += lambda q: q.filter(States.last_updated < bindparam("end_time"))
@ -296,10 +302,14 @@ def _get_states_with_session(
query = query.join(
most_recent_state_ids,
States.state_id == most_recent_state_ids.c.max_state_id,
).filter(~States.domain.in_(IGNORE_DOMAINS))
)
if entity_ids is not None:
query = query.filter(States.entity_id.in_(entity_ids))
else:
query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
if filters:
query = filters.apply(query, entity_ids)
query = filters.apply(query)
return [LazyState(row) for row in execute(query)]
@ -539,7 +549,7 @@ class HistoryPeriodView(HomeAssistantView):
# Optionally reorder the result to respect the ordering given
# by any entities explicitly included in the configuration.
if self.use_include_order:
if self.filters and self.use_include_order:
sorted_result = []
for order_entity in self.filters.included_entities:
for state_list in result:
@ -566,7 +576,8 @@ def sqlalchemy_filter_from_include_exclude_conf(conf):
filters.included_entities = include.get(CONF_ENTITIES, [])
filters.included_domains = include.get(CONF_DOMAINS, [])
filters.included_entity_globs = include.get(CONF_ENTITY_GLOBS, [])
return filters
return filters if filters.has_config else None
class Filters:
@ -582,42 +593,16 @@ class Filters:
self.included_domains = []
self.included_entity_globs = []
def apply(self, query, entity_ids=None):
"""Apply the include/exclude filter on domains and entities on query.
Following rules apply:
* only the include section is configured - just query the specified
entities or domains.
* only the exclude section is configured - filter the specified
entities and domains from all the entities in the system.
* if include and exclude is defined - select the entities specified in
the include and filter out the ones from the exclude list.
"""
# specific entities requested - do not in/exclude anything
if entity_ids is not None:
return query.filter(States.entity_id.in_(entity_ids))
query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
entity_filter = self.entity_filter()
if entity_filter is not None:
query = query.filter(entity_filter)
def apply(self, query):
"""Apply the entity filter."""
if not self.has_config:
return query
def bake(self, baked_query, entity_ids=None):
"""Update a baked query.
Works the same as apply on a baked_query.
"""
if entity_ids is not None:
baked_query += lambda q: q.filter(
States.entity_id.in_(bindparam("entity_ids", expanding=True))
)
return
baked_query += lambda q: q.filter(~States.domain.in_(IGNORE_DOMAINS))
return query.filter(self.entity_filter())
@property
def has_config(self):
"""Determine if there is any filter configuration."""
if (
self.excluded_entities
or self.excluded_domains
@ -626,6 +611,18 @@ class Filters:
or self.included_domains
or self.included_entity_globs
):
return True
return False
def bake(self, baked_query):
"""Update a baked query.
Works the same as apply on a baked_query.
"""
if not self.has_config:
return
baked_query += lambda q: q.filter(self.entity_filter())
def entity_filter(self):

View file

@ -17,7 +17,6 @@ from homeassistant.components.http import HomeAssistantView
from homeassistant.components.recorder.models import (
Events,
States,
process_timestamp,
process_timestamp_to_utc_isoformat,
)
from homeassistant.components.recorder.util import session_scope
@ -266,6 +265,7 @@ def humanify(hass, events, entity_attr_cache, context_lookup):
- if 2+ sensor updates in GROUP_BY_MINUTES, show last
- if Home Assistant stop and start happen in same minute call it restarted
"""
external_events = hass.data.get(DOMAIN, {})
# Group events in batches of GROUP_BY_MINUTES
for _, g_events in groupby(
@ -300,27 +300,7 @@ def humanify(hass, events, entity_attr_cache, context_lookup):
start_stop_events[event.time_fired_minute] = 2
# Yield entries
external_events = hass.data.get(DOMAIN, {})
for event in events_batch:
if event.event_type in external_events:
domain, describe_event = external_events[event.event_type]
data = describe_event(event)
data["when"] = event.time_fired_isoformat
data["domain"] = domain
if event.context_user_id:
data["context_user_id"] = event.context_user_id
context_event = context_lookup.get(event.context_id)
if context_event:
_augment_data_with_context(
data,
data.get(ATTR_ENTITY_ID),
event,
context_event,
entity_attr_cache,
external_events,
)
yield data
if event.event_type == EVENT_STATE_CHANGED:
entity_id = event.entity_id
domain = event.domain
@ -365,6 +345,25 @@ def humanify(hass, events, entity_attr_cache, context_lookup):
yield data
elif event.event_type in external_events:
domain, describe_event = external_events[event.event_type]
data = describe_event(event)
data["when"] = event.time_fired_isoformat
data["domain"] = domain
if event.context_user_id:
data["context_user_id"] = event.context_user_id
context_event = context_lookup.get(event.context_id)
if context_event:
_augment_data_with_context(
data,
data.get(ATTR_ENTITY_ID),
event,
context_event,
entity_attr_cache,
external_events,
)
yield data
elif event.event_type == EVENT_HOMEASSISTANT_START:
if start_stop_events.get(event.time_fired_minute) == 2:
continue
@ -764,7 +763,6 @@ class LazyEventPartialState:
__slots__ = [
"_row",
"_event_data",
"_time_fired",
"_time_fired_isoformat",
"_attributes",
"event_type",
@ -780,7 +778,6 @@ class LazyEventPartialState:
"""Init the lazy event."""
self._row = row
self._event_data = None
self._time_fired = None
self._time_fired_isoformat = None
self._attributes = None
self.event_type = self._row.event_type
@ -841,25 +838,14 @@ class LazyEventPartialState:
self._event_data = json.loads(self._row.event_data)
return self._event_data
@property
def time_fired(self):
"""Time event was fired in utc."""
if not self._time_fired:
self._time_fired = (
process_timestamp(self._row.time_fired) or dt_util.utcnow()
)
return self._time_fired
@property
def time_fired_isoformat(self):
"""Time event was fired in utc isoformat."""
if not self._time_fired_isoformat:
if self._time_fired:
self._time_fired_isoformat = self._time_fired.isoformat()
else:
self._time_fired_isoformat = process_timestamp_to_utc_isoformat(
self._row.time_fired or dt_util.utcnow()
)
return self._time_fired_isoformat

View file

@ -218,7 +218,8 @@ def process_timestamp_to_utc_isoformat(ts):
"""Process a timestamp into UTC isotime."""
if ts is None:
return None
if ts.tzinfo == dt_util.UTC:
return ts.isoformat()
if ts.tzinfo is None:
return f"{ts.isoformat()}{DB_TIMEZONE}"
return dt_util.as_utc(ts).isoformat()
return ts.astimezone(dt_util.UTC).isoformat()

View file

@ -940,3 +940,33 @@ async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_
assert response_json[0][0]["entity_id"] == "light.match"
assert response_json[1][0]["entity_id"] == "media_player.test"
assert response_json[2][0]["entity_id"] == "switch.match"
async def test_entity_ids_limit_via_api(hass, hass_client):
"""Test limiting history to entity_ids."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(
hass,
"history",
{"history": {}},
)
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await hass.async_block_till_done()
await hass.async_add_executor_job(trigger_db_commit, hass)
await hass.async_block_till_done()
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow",
)
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"

View file

@ -2223,6 +2223,36 @@ async def test_include_exclude_events_with_glob_filters(hass, hass_client):
_assert_entry(entries[2], name="included", domain="light", entity_id=entity_id4)
async def test_empty_config(hass, hass_client):
"""Test we can handle an empty entity filter."""
entity_id = "sensor.blu"
config = logbook.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
logbook.DOMAIN: {},
}
)
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", config)
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, 10)
await _async_commit_and_wait(hass)
client = await hass_client()
entries = await _async_fetch_logbook(client)
assert len(entries) == 2
_assert_entry(
entries[0], name="Home Assistant", message="started", domain=ha.DOMAIN
)
_assert_entry(entries[1], name="blu", domain="sensor", entity_id=entity_id)
async def _async_fetch_logbook(client):
# Today time 00:00:00