Ensure removed entities are not displayed in logbook (#37395)

This commit is contained in:
J. Nick Koston 2020-07-04 01:08:46 -05:00 committed by GitHub
parent 045cdee30c
commit 4b2ebf5487
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 121 additions and 98 deletions

View file

@ -355,7 +355,7 @@ def _get_events(
"""Yield Events that are not filtered away."""
for row in query.yield_per(1000):
event = LazyEventPartialState(row)
if _keep_event(hass, event, entities_filter, entity_attr_cache):
if _keep_event(hass, event, entities_filter):
yield event
with session_scope(hass=hass) as session:
@ -375,12 +375,10 @@ def _get_events(
Events.event_data,
Events.time_fired,
Events.context_user_id,
States.state_id,
States.state,
States.entity_id,
States.domain,
States.attributes,
old_state.state_id.label("old_state_id"),
)
.order_by(Events.time_fired)
.outerjoin(States, (Events.event_id == States.event_id))
@ -400,6 +398,7 @@ def _get_events(
| (
(States.state_id.isnot(None))
& (old_state.state_id.isnot(None))
& (States.state.isnot(None))
& (States.state != old_state.state)
)
)
@ -444,12 +443,9 @@ def _get_events(
return list(humanify(hass, yield_events(query), entity_attr_cache, prev_states))
def _keep_event(hass, event, entities_filter, entity_attr_cache):
def _keep_event(hass, event, entities_filter):
if event.event_type == EVENT_STATE_CHANGED:
entity_id = event.entity_id
if entity_id is None:
return False
# Do not report on new entities
# Do not report on entity removal
if not event.has_old_and_new_state:
@ -650,9 +646,12 @@ class LazyEventPartialState:
# Delete this check once all states are saved in the v8 schema
# format or later (they have the old_state_id column).
# New events in v8 schema format
# New events in v8+ schema format
if self._row.event_data == EMPTY_JSON_OBJECT:
return self._row.state_id is not None and self._row.old_state_id is not None
# Events are already pre-filtered in sql
# to exclude missing old and new state
# if they are in v8+ format
return True
# Old events not in v8 schema format
return (

View file

@ -386,11 +386,14 @@ class Recorder(threading.Thread):
if dbevent and event.event_type == EVENT_STATE_CHANGED:
try:
dbstate = States.from_event(event)
has_new_state = event.data.get("new_state")
dbstate.old_state_id = self._old_state_ids.get(dbstate.entity_id)
if not has_new_state:
dbstate.state = None
dbstate.event_id = dbevent.event_id
self.event_session.add(dbstate)
self.event_session.flush()
if "new_state" in event.data:
if has_new_state:
self._old_state_ids[dbstate.entity_id] = dbstate.state_id
elif dbstate.entity_id in self._old_state_ids:
del self._old_state_ids[dbstate.entity_id]

View file

@ -186,7 +186,7 @@ async def _logbook_filtering(hass, last_changed, last_updated):
def yield_events(event):
for _ in range(10 ** 5):
# pylint: disable=protected-access
if logbook._keep_event(hass, event, entities_filter, entity_attr_cache):
if logbook._keep_event(hass, event, entities_filter):
yield event
start = timer()

View file

@ -165,83 +165,6 @@ class TestComponentLogbook(unittest.TestCase):
entries[1], pointC, "bla", domain="sensor", entity_id=entity_id
)
def test_exclude_new_entities(self):
"""Test if events are excluded on first update."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
pointA = dt_util.utcnow()
pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
state_on = ha.State(
entity_id, "on", {"brightness": 200}, pointA, pointA
).as_dict()
eventA = self.create_state_changed_event_from_old_new(
entity_id, pointA, None, state_on
)
eventB = self.create_state_changed_event(pointB, entity_id2, 20)
entities_filter = convert_include_exclude_filter(
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
)
events = [
e
for e in (
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
assert len(entries) == 2
self.assert_entry(
entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN
)
self.assert_entry(
entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2
)
def test_exclude_removed_entities(self):
"""Test if events are excluded on last update."""
entity_id = "sensor.bla"
entity_id2 = "sensor.blu"
pointA = dt_util.utcnow()
pointB = pointA + timedelta(minutes=logbook.GROUP_BY_MINUTES)
entity_attr_cache = logbook.EntityAttributeCache(self.hass)
state_on = ha.State(
entity_id, "on", {"brightness": 200}, pointA, pointA
).as_dict()
eventA = self.create_state_changed_event_from_old_new(
None, pointA, state_on, None,
)
eventB = self.create_state_changed_event(pointB, entity_id2, 20)
entities_filter = convert_include_exclude_filter(
logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})[logbook.DOMAIN]
)
events = [
e
for e in (
MockLazyEventPartialState(EVENT_HOMEASSISTANT_STOP),
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
assert len(entries) == 2
self.assert_entry(
entries[0], name="Home Assistant", message="stopped", domain=ha.DOMAIN
)
self.assert_entry(
entries[1], pointB, "blu", domain="sensor", entity_id=entity_id2
)
def test_exclude_events_entity(self):
"""Test if events are filtered if entity is excluded in config."""
entity_id = "sensor.bla"
@ -267,7 +190,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -305,7 +228,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -352,7 +275,7 @@ class TestComponentLogbook(unittest.TestCase):
eventB,
eventC,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -394,7 +317,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -440,7 +363,7 @@ class TestComponentLogbook(unittest.TestCase):
eventA,
eventB,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -494,7 +417,7 @@ class TestComponentLogbook(unittest.TestCase):
eventB,
eventC,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -551,7 +474,7 @@ class TestComponentLogbook(unittest.TestCase):
eventB1,
eventB2,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -625,7 +548,7 @@ class TestComponentLogbook(unittest.TestCase):
eventC2,
eventC3,
)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -677,7 +600,7 @@ class TestComponentLogbook(unittest.TestCase):
events = [
e
for e in (eventA, eventB)
if logbook._keep_event(self.hass, e, entities_filter, entity_attr_cache)
if logbook._keep_event(self.hass, e, entities_filter)
]
entries = list(logbook.humanify(self.hass, events, entity_attr_cache))
@ -1835,6 +1758,83 @@ async def test_filter_continuous_sensor_values(hass, hass_client):
assert response_json[1]["entity_id"] == entity_id_third
async def test_exclude_new_entities(hass, hass_client):
"""Test if events are excluded on first update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_OFF)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_add_job(partial(trigger_db_commit, hass))
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0]["entity_id"] == entity_id2
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
async def test_exclude_removed_entities(hass, hass_client):
"""Test if events are excluded on last update."""
await hass.async_add_executor_job(init_recorder_component, hass)
await async_setup_component(hass, "logbook", {})
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "climate.bla"
entity_id2 = "climate.blu"
hass.states.async_set(entity_id, STATE_ON)
hass.states.async_set(entity_id, STATE_OFF)
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.states.async_set(entity_id2, STATE_ON)
hass.states.async_set(entity_id2, STATE_OFF)
hass.states.async_remove(entity_id)
hass.states.async_remove(entity_id2)
await hass.async_add_job(partial(trigger_db_commit, hass))
await hass.async_block_till_done()
await hass.async_add_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
client = await hass_client()
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day)
# Test today entries without filters
response = await client.get(f"/api/logbook/{start_date.isoformat()}")
assert response.status == 200
response_json = await response.json()
assert len(response_json) == 3
assert response_json[0]["entity_id"] == entity_id
assert response_json[1]["domain"] == "homeassistant"
assert response_json[1]["message"] == "started"
assert response_json[2]["entity_id"] == entity_id2
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""

View file

@ -16,7 +16,7 @@ from homeassistant.components.recorder import (
from homeassistant.components.recorder.const import DATA_INSTANCE
from homeassistant.components.recorder.models import Events, RecorderRuns, States
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import MATCH_ALL
from homeassistant.const import MATCH_ALL, STATE_LOCKED, STATE_UNLOCKED
from homeassistant.core import ATTR_NOW, EVENT_TIME_CHANGED, Context, callback
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
@ -261,6 +261,27 @@ def test_saving_state_include_domain_glob_exclude_entity(hass_recorder):
assert _state_empty_context(hass, "test.ok").state == "state2"
def test_saving_state_and_removing_entity(hass, hass_recorder):
"""Test saving the state of a removed entity."""
hass = hass_recorder()
entity_id = "lock.mine"
hass.states.set(entity_id, STATE_LOCKED)
hass.states.set(entity_id, STATE_UNLOCKED)
hass.states.async_remove(entity_id)
wait_recording_done(hass)
with session_scope(hass=hass) as session:
states = list(session.query(States))
assert len(states) == 3
assert states[0].entity_id == entity_id
assert states[0].state == STATE_LOCKED
assert states[1].entity_id == entity_id
assert states[1].state == STATE_UNLOCKED
assert states[2].entity_id == entity_id
assert states[2].state is None
def test_recorder_setup_failure():
"""Test some exceptions."""
hass = get_test_home_assistant()