Refactor history component (#50287)

* Refactor history component

* Update tests

* Address review comments

* Correct deprecated functions
This commit is contained in:
Erik Montnemery 2021-05-11 09:21:57 +02:00 committed by GitHub
parent d5e39e8748
commit 973f59e423
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 1008 additions and 670 deletions

View file

@ -2,7 +2,7 @@
"domain": "filter",
"name": "Filter",
"documentation": "https://www.home-assistant.io/integrations/filter",
"dependencies": ["history"],
"dependencies": ["recorder"],
"codeowners": ["@dgomes"],
"quality_scale": "internal",
"iot_class": "local_push"

View file

@ -11,9 +11,9 @@ import statistics
import voluptuous as vol
from homeassistant.components import history
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.input_number import DOMAIN as INPUT_NUMBER_DOMAIN
from homeassistant.components.recorder import history
from homeassistant.components.sensor import (
DEVICE_CLASSES as SENSOR_DEVICE_CLASSES,
DOMAIN as SENSOR_DOMAIN,

View file

@ -1,28 +1,20 @@
"""Provide pre-made queries on top of the recorder component."""
from __future__ import annotations
from collections import defaultdict
from collections.abc import Iterable
from datetime import datetime as dt, timedelta
from itertools import groupby
import json
import logging
import time
from typing import cast
from aiohttp import web
from sqlalchemy import and_, bindparam, func, not_, or_
from sqlalchemy.ext import baked
from sqlalchemy import not_, or_
import voluptuous as vol
from homeassistant.components import recorder
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.recorder.models import (
States,
process_timestamp,
process_timestamp_to_utc_isoformat,
)
from homeassistant.components.recorder.util import execute, session_scope
from homeassistant.components.recorder import history
from homeassistant.components.recorder.models import States
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import (
CONF_DOMAINS,
CONF_ENTITIES,
@ -30,8 +22,9 @@ from homeassistant.const import (
CONF_INCLUDE,
HTTP_BAD_REQUEST,
)
from homeassistant.core import Context, HomeAssistant, State, split_entity_id
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.deprecation import deprecated_function
from homeassistant.helpers.entityfilter import (
CONF_ENTITY_GLOBS,
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
@ -45,9 +38,6 @@ _LOGGER = logging.getLogger(__name__)
DOMAIN = "history"
CONF_ORDER = "use_include_order"
STATE_KEY = "state"
LAST_CHANGED_KEY = "last_changed"
GLOB_TO_SQL_CHARS = {
42: "%", # *
46: "_", # .
@ -62,375 +52,41 @@ CONFIG_SCHEMA = vol.Schema(
extra=vol.ALLOW_EXTRA,
)
SIGNIFICANT_DOMAINS = (
"climate",
"device_tracker",
"humidifier",
"thermostat",
"water_heater",
)
IGNORE_DOMAINS = ("zone", "scene")
NEED_ATTRIBUTE_DOMAINS = {
"climate",
"humidifier",
"input_datetime",
"thermostat",
"water_heater",
}
QUERY_STATES = [
States.domain,
States.entity_id,
States.state,
States.attributes,
States.last_changed,
States.last_updated,
]
HISTORY_BAKERY = "history_bakery"
@deprecated_function("homeassistant.components.recorder.history.get_significant_states")
def get_significant_states(hass, *args, **kwargs):
"""Wrap _get_significant_states with a sql session."""
with session_scope(hass=hass) as session:
return _get_significant_states(hass, session, *args, **kwargs)
def _get_significant_states(
hass,
session,
start_time,
end_time=None,
entity_ids=None,
filters=None,
include_start_time_state=True,
significant_changes_only=True,
minimal_response=False,
):
"""
Return states changes during UTC period start_time - end_time.
Significant states are all states where there is a state change,
as well as all states from certain domains (for instance
thermostat so that we get current temperature in our graphs).
"""
timer_start = time.perf_counter()
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
if significant_changes_only:
baked_query += lambda q: q.filter(
(
States.domain.in_(SIGNIFICANT_DOMAINS)
| (States.last_changed == States.last_updated)
)
& (States.last_updated > bindparam("start_time"))
)
else:
baked_query += lambda q: q.filter(States.last_updated > bindparam("start_time"))
if entity_ids is not None:
baked_query += lambda q: q.filter(
States.entity_id.in_(bindparam("entity_ids", expanding=True))
)
else:
baked_query += lambda q: q.filter(~States.domain.in_(IGNORE_DOMAINS))
if filters:
filters.bake(baked_query)
if end_time is not None:
baked_query += lambda q: q.filter(States.last_updated < bindparam("end_time"))
baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)
states = execute(
baked_query(session).params(
start_time=start_time, end_time=end_time, entity_ids=entity_ids
)
)
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug("get_significant_states took %fs", elapsed)
return _sorted_states_to_json(
hass,
session,
states,
start_time,
entity_ids,
filters,
include_start_time_state,
minimal_response,
)
return history.get_significant_states(hass, *args, **kwargs)
@deprecated_function(
"homeassistant.components.recorder.history.state_changes_during_period"
)
def state_changes_during_period(hass, start_time, end_time=None, entity_id=None):
"""Return states changes during UTC period start_time - end_time."""
with session_scope(hass=hass) as session:
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
baked_query += lambda q: q.filter(
(States.last_changed == States.last_updated)
& (States.last_updated > bindparam("start_time"))
)
if end_time is not None:
baked_query += lambda q: q.filter(
States.last_updated < bindparam("end_time")
)
if entity_id is not None:
baked_query += lambda q: q.filter_by(entity_id=bindparam("entity_id"))
entity_id = entity_id.lower()
baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)
states = execute(
baked_query(session).params(
start_time=start_time, end_time=end_time, entity_id=entity_id
)
)
entity_ids = [entity_id] if entity_id is not None else None
return _sorted_states_to_json(hass, session, states, start_time, entity_ids)
return history.state_changes_during_period(
hass, start_time, end_time=None, entity_id=None
)
@deprecated_function("homeassistant.components.recorder.history.get_last_state_changes")
def get_last_state_changes(hass, number_of_states, entity_id):
"""Return the last number_of_states."""
start_time = dt_util.utcnow()
with session_scope(hass=hass) as session:
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
baked_query += lambda q: q.filter(States.last_changed == States.last_updated)
if entity_id is not None:
baked_query += lambda q: q.filter_by(entity_id=bindparam("entity_id"))
entity_id = entity_id.lower()
baked_query += lambda q: q.order_by(
States.entity_id, States.last_updated.desc()
)
baked_query += lambda q: q.limit(bindparam("number_of_states"))
states = execute(
baked_query(session).params(
number_of_states=number_of_states, entity_id=entity_id
)
)
entity_ids = [entity_id] if entity_id is not None else None
return _sorted_states_to_json(
hass,
session,
reversed(states),
start_time,
entity_ids,
include_start_time_state=False,
)
return history.get_last_state_changes(hass, number_of_states, entity_id)
@deprecated_function("homeassistant.components.recorder.history.get_states")
def get_states(hass, utc_point_in_time, entity_ids=None, run=None, filters=None):
"""Return the states at a specific point in time."""
if run is None:
run = recorder.run_information_from_instance(hass, utc_point_in_time)
# History did not run before utc_point_in_time
if run is None:
return []
with session_scope(hass=hass) as session:
return _get_states_with_session(
hass, session, utc_point_in_time, entity_ids, run, filters
)
def _get_states_with_session(
hass, session, utc_point_in_time, entity_ids=None, run=None, filters=None
):
"""Return the states at a specific point in time."""
if entity_ids and len(entity_ids) == 1:
return _get_single_entity_states_with_session(
hass, session, utc_point_in_time, entity_ids[0]
)
if run is None:
run = recorder.run_information_with_session(session, utc_point_in_time)
# History did not run before utc_point_in_time
if run is None:
return []
# We have more than one entity to look at (most commonly we want
# all entities,) so we need to do a search on all states since the
# last recorder run started.
query = session.query(*QUERY_STATES)
most_recent_states_by_date = session.query(
States.entity_id.label("max_entity_id"),
func.max(States.last_updated).label("max_last_updated"),
).filter(
(States.last_updated >= run.start) & (States.last_updated < utc_point_in_time)
return history.get_states(
hass, utc_point_in_time, entity_ids=None, run=None, filters=None
)
if entity_ids:
most_recent_states_by_date.filter(States.entity_id.in_(entity_ids))
most_recent_states_by_date = most_recent_states_by_date.group_by(States.entity_id)
most_recent_states_by_date = most_recent_states_by_date.subquery()
most_recent_state_ids = session.query(
func.max(States.state_id).label("max_state_id")
).join(
most_recent_states_by_date,
and_(
States.entity_id == most_recent_states_by_date.c.max_entity_id,
States.last_updated == most_recent_states_by_date.c.max_last_updated,
),
)
most_recent_state_ids = most_recent_state_ids.group_by(States.entity_id)
most_recent_state_ids = most_recent_state_ids.subquery()
query = query.join(
most_recent_state_ids,
States.state_id == most_recent_state_ids.c.max_state_id,
)
if entity_ids is not None:
query = query.filter(States.entity_id.in_(entity_ids))
else:
query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
if filters:
query = filters.apply(query)
return [LazyState(row) for row in execute(query)]
def _get_single_entity_states_with_session(hass, session, utc_point_in_time, entity_id):
# Use an entirely different (and extremely fast) query if we only
# have a single entity id
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
baked_query += lambda q: q.filter(
States.last_updated < bindparam("utc_point_in_time"),
States.entity_id == bindparam("entity_id"),
)
baked_query += lambda q: q.order_by(States.last_updated.desc())
baked_query += lambda q: q.limit(1)
query = baked_query(session).params(
utc_point_in_time=utc_point_in_time, entity_id=entity_id
)
return [LazyState(row) for row in execute(query)]
def _sorted_states_to_json(
hass,
session,
states,
start_time,
entity_ids,
filters=None,
include_start_time_state=True,
minimal_response=False,
):
"""Convert SQL results into JSON friendly data structure.
This takes our state list and turns it into a JSON friendly data
structure {'entity_id': [list of states], 'entity_id2': [list of states]}
States must be sorted by entity_id and last_updated
We also need to go back and create a synthetic zero data point for
each list of states, otherwise our graphs won't start on the Y
axis correctly.
"""
result = defaultdict(list)
# Set all entity IDs to empty lists in result set to maintain the order
if entity_ids is not None:
for ent_id in entity_ids:
result[ent_id] = []
# Get the states at the start time
timer_start = time.perf_counter()
if include_start_time_state:
run = recorder.run_information_from_instance(hass, start_time)
for state in _get_states_with_session(
hass, session, start_time, entity_ids, run=run, filters=filters
):
state.last_changed = start_time
state.last_updated = start_time
result[state.entity_id].append(state)
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug("getting %d first datapoints took %fs", len(result), elapsed)
# Called in a tight loop so cache the function
# here
_process_timestamp_to_utc_isoformat = process_timestamp_to_utc_isoformat
# Append all changes to it
for ent_id, group in groupby(states, lambda state: state.entity_id):
domain = split_entity_id(ent_id)[0]
ent_results = result[ent_id]
if not minimal_response or domain in NEED_ATTRIBUTE_DOMAINS:
ent_results.extend(LazyState(db_state) for db_state in group)
# With minimal response we only provide a native
# State for the first and last response. All the states
# in-between only provide the "state" and the
# "last_changed".
if not ent_results:
ent_results.append(LazyState(next(group)))
prev_state = ent_results[-1]
initial_state_count = len(ent_results)
for db_state in group:
# With minimal response we do not care about attribute
# changes so we can filter out duplicate states
if db_state.state == prev_state.state:
continue
ent_results.append(
{
STATE_KEY: db_state.state,
LAST_CHANGED_KEY: _process_timestamp_to_utc_isoformat(
db_state.last_changed
),
}
)
prev_state = db_state
if prev_state and len(ent_results) != initial_state_count:
# There was at least one state change
# replace the last minimal state with
# a full state
ent_results[-1] = LazyState(prev_state)
# Filter out the empty lists if some states had 0 results.
return {key: val for key, val in result.items() if val}
@deprecated_function("homeassistant.components.recorder.history.get_state")
def get_state(hass, utc_point_in_time, entity_id, run=None):
"""Return a state at a specific point in time."""
states = get_states(hass, utc_point_in_time, (entity_id,), run)
return states[0] if states else None
return history.get_state(hass, utc_point_in_time, entity_id, run=None)
async def async_setup(hass, config):
@ -439,8 +95,6 @@ async def async_setup(hass, config):
filters = sqlalchemy_filter_from_include_exclude_conf(conf)
hass.data[HISTORY_BAKERY] = baked.bakery()
use_include_order = conf.get(CONF_ORDER)
hass.http.register_view(HistoryPeriodView(filters, use_include_order))
@ -542,16 +196,18 @@ class HistoryPeriodView(HomeAssistantView):
timer_start = time.perf_counter()
with session_scope(hass=hass) as session:
result = _get_significant_states(
hass,
session,
start_time,
end_time,
entity_ids,
self.filters,
include_start_time_state,
significant_changes_only,
minimal_response,
result = (
history._get_significant_states( # pylint: disable=protected-access
hass,
session,
start_time,
end_time,
entity_ids,
self.filters,
include_start_time_state,
significant_changes_only,
minimal_response,
)
)
result = list(result.values())
@ -683,116 +339,3 @@ def _entities_may_have_state_changes_after(
return True
return False
class LazyState(State):
"""A lazy version of core State."""
__slots__ = [
"_row",
"entity_id",
"state",
"_attributes",
"_last_changed",
"_last_updated",
"_context",
]
def __init__(self, row): # pylint: disable=super-init-not-called
"""Init the lazy state."""
self._row = row
self.entity_id = self._row.entity_id
self.state = self._row.state or ""
self._attributes = None
self._last_changed = None
self._last_updated = None
self._context = None
@property # type: ignore
def attributes(self):
"""State attributes."""
if not self._attributes:
try:
self._attributes = json.loads(self._row.attributes)
except ValueError:
# When json.loads fails
_LOGGER.exception("Error converting row to state: %s", self._row)
self._attributes = {}
return self._attributes
@attributes.setter
def attributes(self, value):
"""Set attributes."""
self._attributes = value
@property # type: ignore
def context(self):
"""State context."""
if not self._context:
self._context = Context(id=None)
return self._context
@context.setter
def context(self, value):
"""Set context."""
self._context = value
@property # type: ignore
def last_changed(self):
"""Last changed datetime."""
if not self._last_changed:
self._last_changed = process_timestamp(self._row.last_changed)
return self._last_changed
@last_changed.setter
def last_changed(self, value):
"""Set last changed datetime."""
self._last_changed = value
@property # type: ignore
def last_updated(self):
"""Last updated datetime."""
if not self._last_updated:
self._last_updated = process_timestamp(self._row.last_updated)
return self._last_updated
@last_updated.setter
def last_updated(self, value):
"""Set last updated datetime."""
self._last_updated = value
def as_dict(self):
"""Return a dict representation of the LazyState.
Async friendly.
To be used for JSON serialization.
"""
if self._last_changed:
last_changed_isoformat = self._last_changed.isoformat()
else:
last_changed_isoformat = process_timestamp_to_utc_isoformat(
self._row.last_changed
)
if self._last_updated:
last_updated_isoformat = self._last_updated.isoformat()
else:
last_updated_isoformat = process_timestamp_to_utc_isoformat(
self._row.last_updated
)
return {
"entity_id": self.entity_id,
"state": self.state,
"attributes": self._attributes or self.attributes,
"last_changed": last_changed_isoformat,
"last_updated": last_updated_isoformat,
}
def __eq__(self, other):
"""Return the comparison."""
return (
other.__class__ in [self.__class__, State]
and self.entity_id == other.entity_id
and self.state == other.state
and self.attributes == other.attributes
)

View file

@ -2,7 +2,7 @@
"domain": "history_stats",
"name": "History Stats",
"documentation": "https://www.home-assistant.io/integrations/history_stats",
"dependencies": ["history"],
"dependencies": ["recorder"],
"codeowners": [],
"quality_scale": "internal",
"iot_class": "local_polling"

View file

@ -5,7 +5,7 @@ import math
import voluptuous as vol
from homeassistant.components import history
from homeassistant.components.recorder import history
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import (
CONF_ENTITY_ID,

View file

@ -40,7 +40,7 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
import homeassistant.util.dt as dt_util
from . import migration, purge
from . import history, migration, purge
from .const import CONF_DB_INTEGRITY_CHECK, DATA_INSTANCE, DOMAIN, SQLITE_URL_PREFIX
from .models import Base, Events, RecorderRuns, States
from .pool import RecorderPool
@ -220,6 +220,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
instance.async_initialize()
instance.start()
_async_register_services(hass, instance)
history.async_setup(hass)
return await instance.async_db_ready

View file

@ -0,0 +1,403 @@
"""Provide pre-made queries on top of the recorder component."""
from __future__ import annotations
from collections import defaultdict
from itertools import groupby
import logging
import time
from sqlalchemy import and_, bindparam, func
from sqlalchemy.ext import baked
from homeassistant.components import recorder
from homeassistant.components.recorder.models import (
States,
process_timestamp_to_utc_isoformat,
)
from homeassistant.components.recorder.util import execute, session_scope
from homeassistant.core import split_entity_id
import homeassistant.util.dt as dt_util
from .models import LazyState
# mypy: allow-untyped-defs, no-check-untyped-defs
_LOGGER = logging.getLogger(__name__)
STATE_KEY = "state"
LAST_CHANGED_KEY = "last_changed"
SIGNIFICANT_DOMAINS = (
"climate",
"device_tracker",
"humidifier",
"thermostat",
"water_heater",
)
IGNORE_DOMAINS = ("zone", "scene")
NEED_ATTRIBUTE_DOMAINS = {
"climate",
"humidifier",
"input_datetime",
"thermostat",
"water_heater",
}
QUERY_STATES = [
States.domain,
States.entity_id,
States.state,
States.attributes,
States.last_changed,
States.last_updated,
]
HISTORY_BAKERY = "history_bakery"
def async_setup(hass):
"""Set up the history hooks."""
hass.data[HISTORY_BAKERY] = baked.bakery()
def get_significant_states(hass, *args, **kwargs):
"""Wrap _get_significant_states with a sql session."""
with session_scope(hass=hass) as session:
return _get_significant_states(hass, session, *args, **kwargs)
def _get_significant_states(
hass,
session,
start_time,
end_time=None,
entity_ids=None,
filters=None,
include_start_time_state=True,
significant_changes_only=True,
minimal_response=False,
):
"""
Return states changes during UTC period start_time - end_time.
Significant states are all states where there is a state change,
as well as all states from certain domains (for instance
thermostat so that we get current temperature in our graphs).
"""
timer_start = time.perf_counter()
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
if significant_changes_only:
baked_query += lambda q: q.filter(
(
States.domain.in_(SIGNIFICANT_DOMAINS)
| (States.last_changed == States.last_updated)
)
& (States.last_updated > bindparam("start_time"))
)
else:
baked_query += lambda q: q.filter(States.last_updated > bindparam("start_time"))
if entity_ids is not None:
baked_query += lambda q: q.filter(
States.entity_id.in_(bindparam("entity_ids", expanding=True))
)
else:
baked_query += lambda q: q.filter(~States.domain.in_(IGNORE_DOMAINS))
if filters:
filters.bake(baked_query)
if end_time is not None:
baked_query += lambda q: q.filter(States.last_updated < bindparam("end_time"))
baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)
states = execute(
baked_query(session).params(
start_time=start_time, end_time=end_time, entity_ids=entity_ids
)
)
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug("get_significant_states took %fs", elapsed)
return _sorted_states_to_dict(
hass,
session,
states,
start_time,
entity_ids,
filters,
include_start_time_state,
minimal_response,
)
def state_changes_during_period(hass, start_time, end_time=None, entity_id=None):
"""Return states changes during UTC period start_time - end_time."""
with session_scope(hass=hass) as session:
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
baked_query += lambda q: q.filter(
(States.last_changed == States.last_updated)
& (States.last_updated > bindparam("start_time"))
)
if end_time is not None:
baked_query += lambda q: q.filter(
States.last_updated < bindparam("end_time")
)
if entity_id is not None:
baked_query += lambda q: q.filter_by(entity_id=bindparam("entity_id"))
entity_id = entity_id.lower()
baked_query += lambda q: q.order_by(States.entity_id, States.last_updated)
states = execute(
baked_query(session).params(
start_time=start_time, end_time=end_time, entity_id=entity_id
)
)
entity_ids = [entity_id] if entity_id is not None else None
return _sorted_states_to_dict(hass, session, states, start_time, entity_ids)
def get_last_state_changes(hass, number_of_states, entity_id):
"""Return the last number_of_states."""
start_time = dt_util.utcnow()
with session_scope(hass=hass) as session:
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
baked_query += lambda q: q.filter(States.last_changed == States.last_updated)
if entity_id is not None:
baked_query += lambda q: q.filter_by(entity_id=bindparam("entity_id"))
entity_id = entity_id.lower()
baked_query += lambda q: q.order_by(
States.entity_id, States.last_updated.desc()
)
baked_query += lambda q: q.limit(bindparam("number_of_states"))
states = execute(
baked_query(session).params(
number_of_states=number_of_states, entity_id=entity_id
)
)
entity_ids = [entity_id] if entity_id is not None else None
return _sorted_states_to_dict(
hass,
session,
reversed(states),
start_time,
entity_ids,
include_start_time_state=False,
)
def get_states(hass, utc_point_in_time, entity_ids=None, run=None, filters=None):
"""Return the states at a specific point in time."""
if run is None:
run = recorder.run_information_from_instance(hass, utc_point_in_time)
# History did not run before utc_point_in_time
if run is None:
return []
with session_scope(hass=hass) as session:
return _get_states_with_session(
hass, session, utc_point_in_time, entity_ids, run, filters
)
def _get_states_with_session(
hass, session, utc_point_in_time, entity_ids=None, run=None, filters=None
):
"""Return the states at a specific point in time."""
if entity_ids and len(entity_ids) == 1:
return _get_single_entity_states_with_session(
hass, session, utc_point_in_time, entity_ids[0]
)
if run is None:
run = recorder.run_information_with_session(session, utc_point_in_time)
# History did not run before utc_point_in_time
if run is None:
return []
# We have more than one entity to look at (most commonly we want
# all entities,) so we need to do a search on all states since the
# last recorder run started.
query = session.query(*QUERY_STATES)
most_recent_states_by_date = session.query(
States.entity_id.label("max_entity_id"),
func.max(States.last_updated).label("max_last_updated"),
).filter(
(States.last_updated >= run.start) & (States.last_updated < utc_point_in_time)
)
if entity_ids:
most_recent_states_by_date.filter(States.entity_id.in_(entity_ids))
most_recent_states_by_date = most_recent_states_by_date.group_by(States.entity_id)
most_recent_states_by_date = most_recent_states_by_date.subquery()
most_recent_state_ids = session.query(
func.max(States.state_id).label("max_state_id")
).join(
most_recent_states_by_date,
and_(
States.entity_id == most_recent_states_by_date.c.max_entity_id,
States.last_updated == most_recent_states_by_date.c.max_last_updated,
),
)
most_recent_state_ids = most_recent_state_ids.group_by(States.entity_id)
most_recent_state_ids = most_recent_state_ids.subquery()
query = query.join(
most_recent_state_ids,
States.state_id == most_recent_state_ids.c.max_state_id,
)
if entity_ids is not None:
query = query.filter(States.entity_id.in_(entity_ids))
else:
query = query.filter(~States.domain.in_(IGNORE_DOMAINS))
if filters:
query = filters.apply(query)
return [LazyState(row) for row in execute(query)]
def _get_single_entity_states_with_session(hass, session, utc_point_in_time, entity_id):
# Use an entirely different (and extremely fast) query if we only
# have a single entity id
baked_query = hass.data[HISTORY_BAKERY](
lambda session: session.query(*QUERY_STATES)
)
baked_query += lambda q: q.filter(
States.last_updated < bindparam("utc_point_in_time"),
States.entity_id == bindparam("entity_id"),
)
baked_query += lambda q: q.order_by(States.last_updated.desc())
baked_query += lambda q: q.limit(1)
query = baked_query(session).params(
utc_point_in_time=utc_point_in_time, entity_id=entity_id
)
return [LazyState(row) for row in execute(query)]
def _sorted_states_to_dict(
hass,
session,
states,
start_time,
entity_ids,
filters=None,
include_start_time_state=True,
minimal_response=False,
):
"""Convert SQL results into JSON friendly data structure.
This takes our state list and turns it into a JSON friendly data
structure {'entity_id': [list of states], 'entity_id2': [list of states]}
States must be sorted by entity_id and last_updated
We also need to go back and create a synthetic zero data point for
each list of states, otherwise our graphs won't start on the Y
axis correctly.
"""
result = defaultdict(list)
# Set all entity IDs to empty lists in result set to maintain the order
if entity_ids is not None:
for ent_id in entity_ids:
result[ent_id] = []
# Get the states at the start time
timer_start = time.perf_counter()
if include_start_time_state:
run = recorder.run_information_from_instance(hass, start_time)
for state in _get_states_with_session(
hass, session, start_time, entity_ids, run=run, filters=filters
):
state.last_changed = start_time
state.last_updated = start_time
result[state.entity_id].append(state)
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug("getting %d first datapoints took %fs", len(result), elapsed)
# Called in a tight loop so cache the function
# here
_process_timestamp_to_utc_isoformat = process_timestamp_to_utc_isoformat
# Append all changes to it
for ent_id, group in groupby(states, lambda state: state.entity_id):
domain = split_entity_id(ent_id)[0]
ent_results = result[ent_id]
if not minimal_response or domain in NEED_ATTRIBUTE_DOMAINS:
ent_results.extend(LazyState(db_state) for db_state in group)
# With minimal response we only provide a native
# State for the first and last response. All the states
# in-between only provide the "state" and the
# "last_changed".
if not ent_results:
ent_results.append(LazyState(next(group)))
prev_state = ent_results[-1]
initial_state_count = len(ent_results)
for db_state in group:
# With minimal response we do not care about attribute
# changes so we can filter out duplicate states
if db_state.state == prev_state.state:
continue
ent_results.append(
{
STATE_KEY: db_state.state,
LAST_CHANGED_KEY: _process_timestamp_to_utc_isoformat(
db_state.last_changed
),
}
)
prev_state = db_state
if prev_state and len(ent_results) != initial_state_count:
# There was at least one state change
# replace the last minimal state with
# a full state
ent_results[-1] = LazyState(prev_state)
# Filter out the empty lists if some states had 0 results.
return {key: val for key, val in result.items() if val}
def get_state(hass, utc_point_in_time, entity_id, run=None):
"""Return a state at a specific point in time."""
states = get_states(hass, utc_point_in_time, (entity_id,), run)
return states[0] if states else None

View file

@ -286,3 +286,116 @@ def process_timestamp_to_utc_isoformat(ts):
if ts.tzinfo is None:
return f"{ts.isoformat()}{DB_TIMEZONE}"
return ts.astimezone(dt_util.UTC).isoformat()
class LazyState(State):
"""A lazy version of core State."""
__slots__ = [
"_row",
"entity_id",
"state",
"_attributes",
"_last_changed",
"_last_updated",
"_context",
]
def __init__(self, row): # pylint: disable=super-init-not-called
"""Init the lazy state."""
self._row = row
self.entity_id = self._row.entity_id
self.state = self._row.state or ""
self._attributes = None
self._last_changed = None
self._last_updated = None
self._context = None
@property # type: ignore
def attributes(self):
"""State attributes."""
if not self._attributes:
try:
self._attributes = json.loads(self._row.attributes)
except ValueError:
# When json.loads fails
_LOGGER.exception("Error converting row to state: %s", self._row)
self._attributes = {}
return self._attributes
@attributes.setter
def attributes(self, value):
"""Set attributes."""
self._attributes = value
@property # type: ignore
def context(self):
"""State context."""
if not self._context:
self._context = Context(id=None)
return self._context
@context.setter
def context(self, value):
"""Set context."""
self._context = value
@property # type: ignore
def last_changed(self):
"""Last changed datetime."""
if not self._last_changed:
self._last_changed = process_timestamp(self._row.last_changed)
return self._last_changed
@last_changed.setter
def last_changed(self, value):
"""Set last changed datetime."""
self._last_changed = value
@property # type: ignore
def last_updated(self):
"""Last updated datetime."""
if not self._last_updated:
self._last_updated = process_timestamp(self._row.last_updated)
return self._last_updated
@last_updated.setter
def last_updated(self, value):
"""Set last updated datetime."""
self._last_updated = value
def as_dict(self):
"""Return a dict representation of the LazyState.
Async friendly.
To be used for JSON serialization.
"""
if self._last_changed:
last_changed_isoformat = self._last_changed.isoformat()
else:
last_changed_isoformat = process_timestamp_to_utc_isoformat(
self._row.last_changed
)
if self._last_updated:
last_updated_isoformat = self._last_updated.isoformat()
else:
last_updated_isoformat = process_timestamp_to_utc_isoformat(
self._row.last_updated
)
return {
"entity_id": self.entity_id,
"state": self.state,
"attributes": self._attributes or self.attributes,
"last_changed": last_changed_isoformat,
"last_updated": last_updated_isoformat,
}
def __eq__(self, other):
"""Return the comparison."""
return (
other.__class__ in [self.__class__, State]
and self.entity_id == other.entity_id
and self.state == other.state
and self.attributes == other.attributes
)

View file

@ -81,7 +81,6 @@ async def test_chain(hass, values):
async def test_chain_history(hass, values, missing=False):
"""Test if filter chaining works."""
config = {
"history": {},
"sensor": {
"platform": "filter",
"name": "test",
@ -94,7 +93,6 @@ async def test_chain_history(hass, values, missing=False):
},
}
await async_init_recorder_component(hass)
assert_setup_component(1, "history")
t_0 = dt_util.utcnow() - timedelta(minutes=1)
t_1 = dt_util.utcnow() - timedelta(minutes=2)
@ -114,10 +112,10 @@ async def test_chain_history(hass, values, missing=False):
}
with patch(
"homeassistant.components.history.state_changes_during_period",
"homeassistant.components.recorder.history.state_changes_during_period",
return_value=fake_states,
), patch(
"homeassistant.components.history.get_last_state_changes",
"homeassistant.components.recorder.history.get_last_state_changes",
return_value=fake_states,
):
with assert_setup_component(1, "sensor"):
@ -208,7 +206,6 @@ async def test_chain_history_missing(hass, values):
async def test_history_time(hass):
"""Test loading from history based on a time window."""
config = {
"history": {},
"sensor": {
"platform": "filter",
"name": "test",
@ -217,7 +214,6 @@ async def test_history_time(hass):
},
}
await async_init_recorder_component(hass)
assert_setup_component(1, "history")
t_0 = dt_util.utcnow() - timedelta(minutes=1)
t_1 = dt_util.utcnow() - timedelta(minutes=2)
@ -231,10 +227,10 @@ async def test_history_time(hass):
]
}
with patch(
"homeassistant.components.history.state_changes_during_period",
"homeassistant.components.recorder.history.state_changes_during_period",
return_value=fake_states,
), patch(
"homeassistant.components.history.get_last_state_changes",
"homeassistant.components.recorder.history.get_last_state_changes",
return_value=fake_states,
):
with assert_setup_component(1, "sensor"):

View file

@ -1,6 +1,5 @@
"""The tests the History component."""
# pylint: disable=protected-access,invalid-name
from copy import copy
from datetime import timedelta
import json
from unittest.mock import patch, sentinel
@ -8,13 +7,14 @@ from unittest.mock import patch, sentinel
import pytest
from homeassistant.components import history, recorder
from homeassistant.components.recorder.history import get_significant_states
from homeassistant.components.recorder.models import process_timestamp
import homeassistant.core as ha
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import init_recorder_component, mock_state_change_event
from tests.common import init_recorder_component
from tests.components.recorder.common import trigger_db_commit, wait_recording_done
@ -25,151 +25,6 @@ def test_setup():
pass
def test_get_states(hass_history):
"""Test getting states at a specific point in time."""
hass = hass_history
states = []
now = dt_util.utcnow()
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=now):
for i in range(5):
state = ha.State(
f"test.point_in_time_{i % 5}",
f"State {i}",
{"attribute_test": i},
)
mock_state_change_event(hass, state)
states.append(state)
wait_recording_done(hass)
future = now + timedelta(seconds=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=future):
for i in range(5):
state = ha.State(
f"test.point_in_time_{i % 5}",
f"State {i}",
{"attribute_test": i},
)
mock_state_change_event(hass, state)
wait_recording_done(hass)
# Get states returns everything before POINT
for state1, state2 in zip(
states,
sorted(history.get_states(hass, future), key=lambda state: state.entity_id),
):
assert state1 == state2
# Test get_state here because we have a DB setup
assert states[0] == history.get_state(hass, future, states[0].entity_id)
time_before_recorder_ran = now - timedelta(days=1000)
assert history.get_states(hass, time_before_recorder_ran) == []
assert history.get_state(hass, time_before_recorder_ran, "demo.id") is None
def test_state_changes_during_period(hass_history):
"""Test state change during period."""
hass = hass_history
entity_id = "media_player.test"
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow()
point = start + timedelta(seconds=1)
end = point + timedelta(seconds=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("idle")
set_state("YouTube")
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point):
states = [
set_state("idle"),
set_state("Netflix"),
set_state("Plex"),
set_state("YouTube"),
]
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=end):
set_state("Netflix")
set_state("Plex")
hist = history.state_changes_during_period(hass, start, end, entity_id)
assert states == hist[entity_id]
def test_get_last_state_changes(hass_history):
"""Test number of state changes."""
hass = hass_history
entity_id = "sensor.test"
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1)
point2 = point + timedelta(minutes=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("1")
states = []
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point):
states.append(set_state("2"))
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point2):
states.append(set_state("3"))
hist = history.get_last_state_changes(hass, 2, entity_id)
assert states == hist[entity_id]
def test_ensure_state_can_be_copied(hass_history):
"""Ensure a state can pass though copy().
The filter integration uses copy() on states
from history.
"""
hass = hass_history
entity_id = "sensor.test"
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("1")
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point):
set_state("2")
hist = history.get_last_state_changes(hass, 2, entity_id)
assert copy(hist[entity_id][0]) == hist[entity_id][0]
assert copy(hist[entity_id][1]) == hist[entity_id][1]
def test_get_significant_states(hass_history):
"""Test that only significant states are returned.
@ -179,7 +34,7 @@ def test_get_significant_states(hass_history):
"""
hass = hass_history
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, filters=history.Filters())
hist = get_significant_states(hass, zero, four, filters=history.Filters())
assert states == hist
@ -195,7 +50,7 @@ def test_get_significant_states_minimal_response(hass_history):
"""
hass = hass_history
zero, four, states = record_states(hass)
hist = history.get_significant_states(
hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True
)
@ -236,7 +91,7 @@ def test_get_significant_states_with_initial(hass_history):
if state.last_changed == one:
state.last_changed = one_and_half
hist = history.get_significant_states(
hist = get_significant_states(
hass,
one_and_half,
four,
@ -263,7 +118,7 @@ def test_get_significant_states_without_initial(hass_history):
)
del states["media_player.test2"]
hist = history.get_significant_states(
hist = get_significant_states(
hass,
one_and_half,
four,
@ -283,7 +138,7 @@ def test_get_significant_states_entity_id(hass_history):
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = history.get_significant_states(
hist = get_significant_states(
hass, zero, four, ["media_player.test"], filters=history.Filters()
)
assert states == hist
@ -298,7 +153,7 @@ def test_get_significant_states_multiple_entity_ids(hass_history):
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = history.get_significant_states(
hist = get_significant_states(
hass,
zero,
four,
@ -570,12 +425,12 @@ def test_get_significant_states_are_ordered(hass_history):
hass = hass_history
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = history.get_significant_states(
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = history.get_significant_states(
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
assert list(hist.keys()) == entity_ids
@ -619,14 +474,14 @@ def test_get_significant_states_only(hass_history):
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = history.get_significant_states(hass, start, significant_changes_only=True)
hist = get_significant_states(hass, start, significant_changes_only=True)
assert len(hist[entity_id]) == 2
assert states[0] not in hist[entity_id]
assert states[1] in hist[entity_id]
assert states[2] in hist[entity_id]
hist = history.get_significant_states(hass, start, significant_changes_only=False)
hist = get_significant_states(hass, start, significant_changes_only=False)
assert len(hist[entity_id]) == 3
assert states == hist[entity_id]
@ -644,7 +499,7 @@ def check_significant_states(hass, zero, four, states, config):
filters.included_entities = include.get(history.CONF_ENTITIES, [])
filters.included_domains = include.get(history.CONF_DOMAINS, [])
hist = history.get_significant_states(hass, zero, four, filters=filters)
hist = get_significant_states(hass, zero, four, filters=filters)
assert states == hist

View file

@ -35,7 +35,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
"""Test the history statistics sensor setup."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
@ -57,7 +56,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
"""Test the history statistics sensor setup for multiple states."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
@ -146,7 +144,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
"""Test when duration value is not a timedelta."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
@ -187,7 +184,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
"""Test config when not enough arguments provided."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
@ -206,7 +202,6 @@ class TestHistoryStatsSensor(unittest.TestCase):
"""Test config when too many arguments provided."""
self.init_recorder()
config = {
"history": {},
"sensor": {
"platform": "history_stats",
"entity_id": "binary_sensor.test_id",
@ -344,9 +339,9 @@ async def test_measure_multiple(hass):
)
with patch(
"homeassistant.components.history.state_changes_during_period",
"homeassistant.components.recorder.history.state_changes_during_period",
return_value=fake_states,
), patch("homeassistant.components.history.get_state", return_value=None):
), patch("homeassistant.components.recorder.history.get_state", return_value=None):
for i in range(1, 5):
await hass.helpers.entity_component.async_update_entity(f"sensor.sensor{i}")
await hass.async_block_till_done()
@ -421,9 +416,9 @@ async def async_test_measure(hass):
)
with patch(
"homeassistant.components.history.state_changes_during_period",
"homeassistant.components.recorder.history.state_changes_during_period",
return_value=fake_states,
), patch("homeassistant.components.history.get_state", return_value=None):
), patch("homeassistant.components.recorder.history.get_state", return_value=None):
for i in range(1, 5):
await hass.helpers.entity_component.async_update_entity(f"sensor.sensor{i}")
await hass.async_block_till_done()

View file

@ -0,0 +1,432 @@
"""The tests the History component."""
# pylint: disable=protected-access,invalid-name
from copy import copy
from datetime import timedelta
import json
from unittest.mock import patch, sentinel
from homeassistant.components.recorder import history
from homeassistant.components.recorder.models import process_timestamp
import homeassistant.core as ha
from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
from tests.common import mock_state_change_event
from tests.components.recorder.common import wait_recording_done
def test_get_states(hass_recorder):
"""Test getting states at a specific point in time."""
hass = hass_recorder()
states = []
now = dt_util.utcnow()
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=now):
for i in range(5):
state = ha.State(
f"test.point_in_time_{i % 5}",
f"State {i}",
{"attribute_test": i},
)
mock_state_change_event(hass, state)
states.append(state)
wait_recording_done(hass)
future = now + timedelta(seconds=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=future):
for i in range(5):
state = ha.State(
f"test.point_in_time_{i % 5}",
f"State {i}",
{"attribute_test": i},
)
mock_state_change_event(hass, state)
wait_recording_done(hass)
# Get states returns everything before POINT
for state1, state2 in zip(
states,
sorted(history.get_states(hass, future), key=lambda state: state.entity_id),
):
assert state1 == state2
# Test get_state here because we have a DB setup
assert states[0] == history.get_state(hass, future, states[0].entity_id)
time_before_recorder_ran = now - timedelta(days=1000)
assert history.get_states(hass, time_before_recorder_ran) == []
assert history.get_state(hass, time_before_recorder_ran, "demo.id") is None
def test_state_changes_during_period(hass_recorder):
"""Test state change during period."""
hass = hass_recorder()
entity_id = "media_player.test"
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow()
point = start + timedelta(seconds=1)
end = point + timedelta(seconds=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("idle")
set_state("YouTube")
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point):
states = [
set_state("idle"),
set_state("Netflix"),
set_state("Plex"),
set_state("YouTube"),
]
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=end):
set_state("Netflix")
set_state("Plex")
hist = history.state_changes_during_period(hass, start, end, entity_id)
assert states == hist[entity_id]
def test_get_last_state_changes(hass_recorder):
"""Test number of state changes."""
hass = hass_recorder()
entity_id = "sensor.test"
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1)
point2 = point + timedelta(minutes=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("1")
states = []
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point):
states.append(set_state("2"))
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point2):
states.append(set_state("3"))
hist = history.get_last_state_changes(hass, 2, entity_id)
assert states == hist[entity_id]
def test_ensure_state_can_be_copied(hass_recorder):
"""Ensure a state can pass though copy().
The filter integration uses copy() on states
from history.
"""
hass = hass_recorder()
entity_id = "sensor.test"
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("1")
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=point):
set_state("2")
hist = history.get_last_state_changes(hass, 2, entity_id)
assert copy(hist[entity_id][0]) == hist[entity_id][0]
assert copy(hist[entity_id][1]) == hist[entity_id][1]
def test_get_significant_states(hass_recorder):
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
assert states == hist
def test_get_significant_states_minimal_response(hass_recorder):
"""Test that only significant states are returned.
When minimal responses is set only the first and
last states return a complete state.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, minimal_response=True)
# The second media_player.test state is reduced
# down to last_changed and state when minimal_response
# is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what
# will happen with encoding a native state
input_state = states["media_player.test"][1]
orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
states["media_player.test"][1] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist
def test_get_significant_states_with_initial(hass_recorder):
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
if entity_id == "media_player.test":
states[entity_id] = states[entity_id][1:]
for state in states[entity_id]:
if state.last_changed == one:
state.last_changed = one_and_half
hist = history.get_significant_states(
hass,
one_and_half,
four,
include_start_time_state=True,
)
assert states == hist
def test_get_significant_states_without_initial(hass_recorder):
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
states[entity_id] = list(
filter(lambda s: s.last_changed != one, states[entity_id])
)
del states["media_player.test2"]
hist = history.get_significant_states(
hass,
one_and_half,
four,
include_start_time_state=False,
)
assert states == hist
def test_get_significant_states_entity_id(hass_recorder):
"""Test that only significant states are returned for one entity."""
hass = hass_recorder()
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = history.get_significant_states(hass, zero, four, ["media_player.test"])
assert states == hist
def test_get_significant_states_multiple_entity_ids(hass_recorder):
"""Test that only significant states are returned for one entity."""
hass = hass_recorder()
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = history.get_significant_states(
hass,
zero,
four,
["media_player.test", "thermostat.test"],
)
assert states == hist
def test_get_significant_states_are_ordered(hass_recorder):
"""Test order of results from get_significant_states.
When entity ids are given, the results should be returned with the data
in the same order.
"""
hass = hass_recorder()
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = history.get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = history.get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
def test_get_significant_states_only(hass_recorder):
"""Test significant states when significant_states_only is set."""
hass = hass_recorder()
entity_id = "sensor.test"
def set_state(state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=4)
points = []
for i in range(1, 4):
points.append(start + timedelta(minutes=i))
states = []
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=start):
set_state("123", attributes={"attribute": 10.64})
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[0]
):
# Attributes are different, state not
states.append(set_state("123", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[1]
):
# state is different, attributes not
states.append(set_state("32", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.dt_util.utcnow", return_value=points[2]
):
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = history.get_significant_states(hass, start, significant_changes_only=True)
assert len(hist[entity_id]) == 2
assert states[0] not in hist[entity_id]
assert states[1] in hist[entity_id]
assert states[2] in hist[entity_id]
hist = history.get_significant_states(hass, start, significant_changes_only=False)
assert len(hist[entity_id]) == 3
assert states == hist[entity_id]
def record_states(hass):
"""Record some test states.
We inject a bunch of state updates from media player, zone and
thermostat.
"""
mp = "media_player.test"
mp2 = "media_player.test2"
mp3 = "media_player.test3"
therm = "thermostat.test"
therm2 = "thermostat.test2"
zone = "zone.home"
script_c = "script.can_cancel_this_one"
def set_state(entity_id, state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
zero = dt_util.utcnow()
one = zero + timedelta(seconds=1)
two = one + timedelta(seconds=1)
three = two + timedelta(seconds=1)
four = three + timedelta(seconds=1)
states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []}
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
states[mp].append(
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[mp].append(
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp2].append(
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp3].append(
set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[therm].append(
set_state(therm, 20, attributes={"current_temperature": 19.5})
)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=two):
# This state will be skipped only different in time
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
# This state will be skipped because domain is excluded
set_state(zone, "zoning")
states[script_c].append(
set_state(script_c, "off", attributes={"can_cancel": True})
)
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 19.8})
)
states[therm2].append(
set_state(therm2, 20, attributes={"current_temperature": 19})
)
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
states[mp].append(
set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)})
)
states[mp3].append(
set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)})
)
# Attributes changed even though state is the same
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 20})
)
return zero, four, states