Allow filtering the logbook by context_id (#47783)

This commit is contained in:
Paulus Schoutsen 2021-03-12 09:04:02 -08:00 committed by GitHub
parent bf5028df2b
commit 04b335afe9
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 59 additions and 6 deletions

View file

@ -231,6 +231,12 @@ class LogbookView(HomeAssistantView):
hass = request.app["hass"]
entity_matches_only = "entity_matches_only" in request.query
context_id = request.query.get("context_id")
if entity_ids and context_id:
return self.json_message(
"Can't combine entity with context_id", HTTP_BAD_REQUEST
)
def json_events():
"""Fetch events and generate JSON."""
@ -243,6 +249,7 @@ class LogbookView(HomeAssistantView):
self.filters,
self.entities_filter,
entity_matches_only,
context_id,
)
)
@ -413,8 +420,13 @@ def _get_events(
filters=None,
entities_filter=None,
entity_matches_only=False,
context_id=None,
):
"""Get events for a period of time."""
assert not (
entity_ids and context_id
), "can't pass in both entity_ids and context_id"
entity_attr_cache = EntityAttributeCache(hass)
context_lookup = {None: None}
@ -466,6 +478,9 @@ def _get_events(
filters.entity_filter() | (Events.event_type != EVENT_STATE_CHANGED)
)
if context_id is not None:
query = query.filter(Events.context_id == context_id)
query = query.order_by(Events.time_fired)
return list(

View file

@ -1801,17 +1801,52 @@ async def test_empty_config(hass, hass_client):
_assert_entry(entries[1], name="blu", entity_id=entity_id)
async def _async_fetch_logbook(client):
async def test_context_filter(hass, hass_client):
"""Test we can filter by context."""
await hass.async_add_executor_job(init_recorder_component, hass)
assert await async_setup_component(hass, "logbook", {})
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
entity_id = "switch.blu"
context = ha.Context()
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
hass.states.async_set(entity_id, None)
hass.states.async_set(entity_id, "on", context=context)
hass.states.async_set(entity_id, "off")
hass.states.async_set(entity_id, "unknown", context=context)
await _async_commit_and_wait(hass)
client = await hass_client()
# Test results
entries = await _async_fetch_logbook(client, {"context_id": context.id})
assert len(entries) == 2
_assert_entry(entries[0], entity_id=entity_id, state="on")
_assert_entry(entries[1], entity_id=entity_id, state="unknown")
# Test we can't combine context filter with entity_id filter
response = await client.get(
"/api/logbook", params={"context_id": context.id, "entity": entity_id}
)
assert response.status == 400
async def _async_fetch_logbook(client, params=None):
if params is None:
params = {}
# Today time 00:00:00
start = dt_util.utcnow().date()
start_date = datetime(start.year, start.month, start.day) - timedelta(hours=24)
if "end_time" not in params:
params["end_time"] = str(start + timedelta(hours=48))
# Test today entries without filters
end_time = start + timedelta(hours=48)
response = await client.get(
f"/api/logbook/{start_date.isoformat()}?end_time={end_time}"
)
response = await client.get(f"/api/logbook/{start_date.isoformat()}", params=params)
assert response.status == 200
return await response.json()
@ -1825,7 +1860,7 @@ async def _async_commit_and_wait(hass):
def _assert_entry(
entry, when=None, name=None, message=None, domain=None, entity_id=None
entry, when=None, name=None, message=None, domain=None, entity_id=None, state=None
):
"""Assert an entry is what is expected."""
if when:
@ -1843,6 +1878,9 @@ def _assert_entry(
if entity_id:
assert entity_id == entry["entity_id"]
if state:
assert state == entry["state"]
class MockLazyEventPartialState(ha.Event):
"""Minimal mock of a Lazy event."""