Require a list of entity ids when fetching history (#90992)

This commit is contained in:
J. Nick Koston 2023-04-08 16:14:44 -10:00 committed by GitHub
parent 667a00e7f9
commit d0d4ab6056
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 1570 additions and 1464 deletions

View file

@ -3,8 +3,6 @@ from __future__ import annotations
from datetime import datetime as dt, timedelta
from http import HTTPStatus
import logging
import time
from typing import cast
from aiohttp import web
@ -12,68 +10,40 @@ import voluptuous as vol
from homeassistant.components import frontend
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.recorder import (
DOMAIN as RECORDER_DOMAIN,
get_instance,
history,
)
from homeassistant.components.recorder.filters import (
Filters,
extract_include_exclude_filter_conf,
merge_include_exclude_filters,
sqlalchemy_filter_from_include_exclude_conf,
)
from homeassistant.components.recorder import get_instance, history
from homeassistant.components.recorder.util import session_scope
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import (
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA,
convert_include_exclude_filter,
)
from homeassistant.helpers.entityfilter import INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.dt as dt_util
from . import websocket_api
from .const import DOMAIN
from .helpers import entities_may_have_state_changes_after
from .models import HistoryConfig
_LOGGER = logging.getLogger(__name__)
CONF_ORDER = "use_include_order"
_ONE_DAY = timedelta(days=1)
CONFIG_SCHEMA = vol.Schema(
vol.All(
cv.deprecated(DOMAIN),
{
DOMAIN: vol.All(
cv.deprecated(CONF_ORDER),
INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.extend(
{vol.Optional(CONF_ORDER, default=False): cv.boolean}
),
)
},
),
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the history hooks."""
conf = config.get(DOMAIN, {})
recorder_conf = config.get(RECORDER_DOMAIN, {})
history_conf = config.get(DOMAIN, {})
recorder_filter = extract_include_exclude_filter_conf(recorder_conf)
logbook_filter = extract_include_exclude_filter_conf(history_conf)
merged_filter = merge_include_exclude_filters(recorder_filter, logbook_filter)
possible_merged_entities_filter = convert_include_exclude_filter(merged_filter)
sqlalchemy_filter = None
entity_filter = None
if not possible_merged_entities_filter.empty_filter:
sqlalchemy_filter = sqlalchemy_filter_from_include_exclude_conf(conf)
entity_filter = possible_merged_entities_filter
hass.data[DOMAIN] = HistoryConfig(sqlalchemy_filter, entity_filter)
hass.http.register_view(HistoryPeriodView(sqlalchemy_filter))
hass.http.register_view(HistoryPeriodView())
frontend.async_register_built_in_panel(hass, "history", "history", "hass:chart-box")
websocket_api.async_setup(hass)
return True
@ -86,44 +56,42 @@ class HistoryPeriodView(HomeAssistantView):
name = "api:history:view-period"
extra_urls = ["/api/history/period/{datetime}"]
def __init__(self, filters: Filters | None) -> None:
"""Initialize the history period view."""
self.filters = filters
async def get(
self, request: web.Request, datetime: str | None = None
) -> web.Response:
"""Return history over a period of time."""
datetime_ = None
query = request.query
if datetime and (datetime_ := dt_util.parse_datetime(datetime)) is None:
return self.json_message("Invalid datetime", HTTPStatus.BAD_REQUEST)
now = dt_util.utcnow()
if not (entity_ids_str := query.get("filter_entity_id")) or not (
entity_ids := entity_ids_str.strip().lower().split(",")
):
return self.json_message(
"filter_entity_id is missing", HTTPStatus.BAD_REQUEST
)
one_day = timedelta(days=1)
now = dt_util.utcnow()
if datetime_:
start_time = dt_util.as_utc(datetime_)
else:
start_time = now - one_day
start_time = now - _ONE_DAY
if start_time > now:
return self.json([])
if end_time_str := request.query.get("end_time"):
if end_time_str := query.get("end_time"):
if end_time := dt_util.parse_datetime(end_time_str):
end_time = dt_util.as_utc(end_time)
else:
return self.json_message("Invalid end_time", HTTPStatus.BAD_REQUEST)
else:
end_time = start_time + one_day
entity_ids_str = request.query.get("filter_entity_id")
entity_ids = None
if entity_ids_str:
entity_ids = entity_ids_str.lower().split(",")
include_start_time_state = "skip_initial_state" not in request.query
significant_changes_only = (
request.query.get("significant_changes_only", "1") != "0"
)
end_time = start_time + _ONE_DAY
include_start_time_state = "skip_initial_state" not in query
significant_changes_only = query.get("significant_changes_only", "1") != "0"
minimal_response = "minimal_response" in request.query
no_attributes = "no_attributes" in request.query
@ -159,33 +127,27 @@ class HistoryPeriodView(HomeAssistantView):
hass: HomeAssistant,
start_time: dt,
end_time: dt,
entity_ids: list[str] | None,
entity_ids: list[str],
include_start_time_state: bool,
significant_changes_only: bool,
minimal_response: bool,
no_attributes: bool,
) -> web.Response:
"""Fetch significant stats from the database as json."""
timer_start = time.perf_counter()
with session_scope(hass=hass, read_only=True) as session:
states = history.get_significant_states_with_session(
return self.json(
list(
history.get_significant_states_with_session(
hass,
session,
start_time,
end_time,
entity_ids,
self.filters,
None,
include_start_time_state,
significant_changes_only,
minimal_response,
no_attributes,
).values()
)
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug(
"Extracted %d states in %fs", sum(map(len, states.values())), elapsed
)
return self.json(list(states.values()))

View file

@ -1,15 +0,0 @@
"""Models for the history integration."""
from __future__ import annotations
from dataclasses import dataclass
from homeassistant.components.recorder.filters import Filters
from homeassistant.helpers.entityfilter import EntityFilter
@dataclass
class HistoryConfig:
"""Configuration for the history integration."""
sqlalchemy_filter: Filters | None = None
entity_filter: EntityFilter | None = None

View file

@ -12,7 +12,6 @@ import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.components.recorder import get_instance, history
from homeassistant.components.recorder.filters import Filters
from homeassistant.components.websocket_api import messages
from homeassistant.components.websocket_api.connection import ActiveConnection
from homeassistant.const import (
@ -20,7 +19,6 @@ from homeassistant.const import (
COMPRESSED_STATE_LAST_CHANGED,
COMPRESSED_STATE_LAST_UPDATED,
COMPRESSED_STATE_STATE,
EVENT_STATE_CHANGED,
)
from homeassistant.core import (
CALLBACK_TYPE,
@ -30,7 +28,6 @@ from homeassistant.core import (
callback,
is_callback,
)
from homeassistant.helpers.entityfilter import EntityFilter
from homeassistant.helpers.event import (
async_track_point_in_utc_time,
async_track_state_change_event,
@ -38,9 +35,8 @@ from homeassistant.helpers.event import (
from homeassistant.helpers.json import JSON_DUMP
import homeassistant.util.dt as dt_util
from .const import DOMAIN, EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES
from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES
from .helpers import entities_may_have_state_changes_after
from .models import HistoryConfig
_LOGGER = logging.getLogger(__name__)
@ -69,7 +65,6 @@ def _ws_get_significant_states(
start_time: dt,
end_time: dt | None,
entity_ids: list[str] | None,
filters: Filters | None,
include_start_time_state: bool,
significant_changes_only: bool,
minimal_response: bool,
@ -84,7 +79,7 @@ def _ws_get_significant_states(
start_time,
end_time,
entity_ids,
filters,
None,
include_start_time_state,
significant_changes_only,
minimal_response,
@ -150,7 +145,6 @@ async def ws_get_history_during_period(
significant_changes_only = msg["significant_changes_only"]
minimal_response = msg["minimal_response"]
history_config: HistoryConfig = hass.data[DOMAIN]
connection.send_message(
await get_instance(hass).async_add_executor_job(
@ -160,7 +154,6 @@ async def ws_get_history_during_period(
start_time,
end_time,
entity_ids,
history_config.sqlalchemy_filter,
include_start_time_state,
significant_changes_only,
minimal_response,
@ -214,7 +207,6 @@ def _generate_historical_response(
start_time: dt,
end_time: dt,
entity_ids: list[str] | None,
filters: Filters | None,
include_start_time_state: bool,
significant_changes_only: bool,
minimal_response: bool,
@ -229,7 +221,7 @@ def _generate_historical_response(
start_time,
end_time,
entity_ids,
filters,
None,
include_start_time_state,
significant_changes_only,
minimal_response,
@ -270,7 +262,6 @@ async def _async_send_historical_states(
start_time: dt,
end_time: dt,
entity_ids: list[str] | None,
filters: Filters | None,
include_start_time_state: bool,
significant_changes_only: bool,
minimal_response: bool,
@ -286,7 +277,6 @@ async def _async_send_historical_states(
start_time,
end_time,
entity_ids,
filters,
include_start_time_state,
significant_changes_only,
minimal_response,
@ -365,8 +355,7 @@ def _async_subscribe_events(
hass: HomeAssistant,
subscriptions: list[CALLBACK_TYPE],
target: Callable[[Event], None],
entities_filter: EntityFilter | None,
entity_ids: list[str] | None,
entity_ids: list[str],
significant_changes_only: bool,
minimal_response: bool,
) -> None:
@ -386,7 +375,7 @@ def _async_subscribe_events(
return
assert isinstance(new_state, State)
assert isinstance(old_state, State)
if (entities_filter and not entities_filter(new_state.entity_id)) or (
if (
(significant_changes_only or minimal_response)
and new_state.state == old_state.state
and new_state.domain not in history.SIGNIFICANT_DOMAINS
@ -394,21 +383,8 @@ def _async_subscribe_events(
return
target(event)
if entity_ids:
subscriptions.append(
async_track_state_change_event(
hass, entity_ids, _forward_state_events_filtered
)
)
return
# We want the firehose
subscriptions.append(
hass.bus.async_listen(
EVENT_STATE_CHANGED,
_forward_state_events_filtered,
run_immediately=True,
)
async_track_state_change_event(hass, entity_ids, _forward_state_events_filtered)
)
@ -417,7 +393,7 @@ def _async_subscribe_events(
vol.Required("type"): "history/stream",
vol.Required("start_time"): str,
vol.Optional("end_time"): str,
vol.Optional("entity_ids"): [str],
vol.Required("entity_ids"): [str],
vol.Optional("include_start_time_state", default=True): bool,
vol.Optional("significant_changes_only", default=True): bool,
vol.Optional("minimal_response", default=False): bool,
@ -431,15 +407,7 @@ async def ws_stream(
"""Handle history stream websocket command."""
start_time_str = msg["start_time"]
msg_id: int = msg["id"]
entity_ids: list[str] | None = msg.get("entity_ids")
utc_now = dt_util.utcnow()
filters: Filters | None = None
entities_filter: EntityFilter | None = None
if not entity_ids:
history_config: HistoryConfig = hass.data[DOMAIN]
filters = history_config.sqlalchemy_filter
entities_filter = history_config.entity_filter
if start_time := dt_util.parse_datetime(start_time_str):
start_time = dt_util.as_utc(start_time)
@ -459,7 +427,7 @@ async def ws_stream(
connection.send_error(msg_id, "invalid_end_time", "Invalid end_time")
return
entity_ids = msg.get("entity_ids")
entity_ids: list[str] = msg["entity_ids"]
include_start_time_state = msg["include_start_time_state"]
significant_changes_only = msg["significant_changes_only"]
no_attributes = msg["no_attributes"]
@ -485,7 +453,6 @@ async def ws_stream(
start_time,
end_time,
entity_ids,
filters,
include_start_time_state,
significant_changes_only,
minimal_response,
@ -535,7 +502,6 @@ async def ws_stream(
hass,
subscriptions,
_queue_or_cancel,
entities_filter,
entity_ids,
significant_changes_only=significant_changes_only,
minimal_response=minimal_response,
@ -551,7 +517,6 @@ async def ws_stream(
start_time,
subscriptions_setup_complete_time,
entity_ids,
filters,
include_start_time_state,
significant_changes_only,
minimal_response,
@ -593,7 +558,6 @@ async def ws_stream(
last_event_time or start_time,
subscriptions_setup_complete_time,
entity_ids,
filters,
False, # We don't want the start time state again
significant_changes_only,
minimal_response,

View file

@ -13,7 +13,6 @@ SIGNIFICANT_DOMAINS = {
}
SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in SIGNIFICANT_DOMAINS]
IGNORE_DOMAINS = {"zone", "scene"}
IGNORE_DOMAINS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in IGNORE_DOMAINS]
NEED_ATTRIBUTE_DOMAINS = {
"climate",
"humidifier",

View file

@ -5,7 +5,6 @@ from collections import defaultdict
from collections.abc import Callable, Iterable, Iterator, MutableMapping
from datetime import datetime
from itertools import groupby
import logging
from operator import attrgetter
import time
from typing import Any, cast
@ -13,7 +12,6 @@ from typing import Any, cast
from sqlalchemy import Column, Text, and_, func, lambda_stmt, or_, select
from sqlalchemy.engine.row import Row
from sqlalchemy.orm.properties import MappedColumn
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import literal
from sqlalchemy.sql.lambdas import StatementLambdaElement
@ -36,7 +34,6 @@ from ..models.legacy import LazyStatePreSchema31, row_to_compressed_state_pre_sc
from ..util import execute_stmt_lambda_element, session_scope
from .common import _schema_version
from .const import (
IGNORE_DOMAINS_ENTITY_ID_LIKE,
LAST_CHANGED_KEY,
NEED_ATTRIBUTE_DOMAINS,
SIGNIFICANT_DOMAINS,
@ -44,9 +41,6 @@ from .const import (
STATE_KEY,
)
_LOGGER = logging.getLogger(__name__)
_BASE_STATES = (
States.entity_id,
States.state,
@ -229,24 +223,11 @@ def get_significant_states(
)
def _ignore_domains_filter(query: Query) -> Query:
"""Add a filter to ignore domains we do not fetch history for."""
return query.filter(
and_(
*[
~States.entity_id.like(entity_domain)
for entity_domain in IGNORE_DOMAINS_ENTITY_ID_LIKE
]
)
)
def _significant_states_stmt(
schema_version: int,
start_time: datetime,
end_time: datetime | None,
entity_ids: list[str] | None,
filters: Filters | None,
entity_ids: list[str],
significant_changes_only: bool,
no_attributes: bool,
) -> StatementLambdaElement:
@ -255,8 +236,7 @@ def _significant_states_stmt(
schema_version, no_attributes, include_last_changed=not significant_changes_only
)
if (
entity_ids
and len(entity_ids) == 1
len(entity_ids) == 1
and significant_changes_only
and split_entity_id(entity_ids[0])[0] not in SIGNIFICANT_DOMAINS
):
@ -297,18 +277,7 @@ def _significant_states_stmt(
),
)
)
if entity_ids:
stmt += lambda q: q.filter(
# https://github.com/python/mypy/issues/2608
States.entity_id.in_(entity_ids) # type:ignore[arg-type]
)
else:
stmt += _ignore_domains_filter
if filters and filters.has_config:
stmt = stmt.add_criteria(
lambda q: q.filter(filters.states_entity_filter()), track_on=[filters] # type: ignore[union-attr]
)
stmt += lambda q: q.filter(States.entity_id.in_(entity_ids))
if schema_version >= 31:
start_time_ts = start_time.timestamp()
@ -356,25 +325,25 @@ def get_significant_states_with_session(
as well as all states from certain domains (for instance
thermostat so that we get current temperature in our graphs).
"""
if filters is not None:
raise NotImplementedError("Filters are no longer supported")
if not entity_ids:
raise ValueError("entity_ids must be provided")
stmt = _significant_states_stmt(
_schema_version(hass),
start_time,
end_time,
entity_ids,
filters,
significant_changes_only,
no_attributes,
)
states = execute_stmt_lambda_element(
session, stmt, None if entity_ids else start_time, end_time
)
states = execute_stmt_lambda_element(session, stmt, None, end_time)
return _sorted_states_to_dict(
hass,
session,
states,
start_time,
entity_ids,
filters,
include_start_time_state,
minimal_response,
no_attributes,
@ -419,7 +388,7 @@ def _state_changed_during_period_stmt(
schema_version: int,
start_time: datetime,
end_time: datetime | None,
entity_id: str | None,
entity_id: str,
no_attributes: bool,
descending: bool,
limit: int | None,
@ -450,7 +419,6 @@ def _state_changed_during_period_stmt(
stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts)
else:
stmt += lambda q: q.filter(States.last_updated < end_time)
if entity_id:
stmt += lambda q: q.filter(States.entity_id == entity_id)
if join_attributes:
stmt += lambda q: q.outerjoin(
@ -484,9 +452,9 @@ def state_changes_during_period(
include_start_time_state: bool = True,
) -> MutableMapping[str, list[State]]:
"""Return states changes during UTC period start_time - end_time."""
entity_id = entity_id.lower() if entity_id is not None else None
entity_ids = [entity_id] if entity_id is not None else None
if not entity_id:
raise ValueError("entity_id must be provided")
entity_ids = [entity_id.lower()]
with session_scope(hass=hass, read_only=True) as session:
stmt = _state_changed_during_period_stmt(
_schema_version(hass),
@ -497,9 +465,7 @@ def state_changes_during_period(
descending,
limit,
)
states = execute_stmt_lambda_element(
session, stmt, None if entity_id else start_time, end_time
)
states = execute_stmt_lambda_element(session, stmt, None, end_time)
return cast(
MutableMapping[str, list[State]],
_sorted_states_to_dict(
@ -647,93 +613,17 @@ def _get_states_for_entities_stmt(
return stmt
def _get_states_for_all_stmt(
schema_version: int,
run_start: datetime,
utc_point_in_time: datetime,
filters: Filters | None,
no_attributes: bool,
) -> StatementLambdaElement:
"""Baked query to get states for all entities."""
stmt, join_attributes = _lambda_stmt_and_join_attributes(
schema_version, no_attributes, include_last_changed=True
)
# We did not get an include-list of entities, query all states in the inner
# query, then filter out unwanted domains as well as applying the custom filter.
# This filtering can't be done in the inner query because the domain column is
# not indexed and we can't control what's in the custom filter.
if schema_version >= 31:
run_start_ts = process_timestamp(run_start).timestamp()
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
stmt += lambda q: q.join(
(
most_recent_states_by_date := (
select(
States.entity_id.label("max_entity_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"),
)
.filter(
(States.last_updated_ts >= run_start_ts)
& (States.last_updated_ts < utc_point_in_time_ts)
)
.group_by(States.entity_id)
.subquery()
)
),
and_(
States.entity_id == most_recent_states_by_date.c.max_entity_id,
States.last_updated_ts == most_recent_states_by_date.c.max_last_updated,
),
)
else:
stmt += lambda q: q.join(
(
most_recent_states_by_date := (
select(
States.entity_id.label("max_entity_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated).label("max_last_updated"),
)
.filter(
(States.last_updated >= run_start)
& (States.last_updated < utc_point_in_time)
)
.group_by(States.entity_id)
.subquery()
)
),
and_(
States.entity_id == most_recent_states_by_date.c.max_entity_id,
States.last_updated == most_recent_states_by_date.c.max_last_updated,
),
)
stmt += _ignore_domains_filter
if filters and filters.has_config:
stmt = stmt.add_criteria(
lambda q: q.filter(filters.states_entity_filter()), track_on=[filters] # type: ignore[union-attr]
)
if join_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
return stmt
def _get_rows_with_session(
hass: HomeAssistant,
session: Session,
utc_point_in_time: datetime,
entity_ids: list[str] | None = None,
entity_ids: list[str],
run: RecorderRuns | None = None,
filters: Filters | None = None,
no_attributes: bool = False,
) -> Iterable[Row]:
"""Return the states at a specific point in time."""
schema_version = _schema_version(hass)
if entity_ids and len(entity_ids) == 1:
if len(entity_ids) == 1:
return execute_stmt_lambda_element(
session,
_get_single_entity_states_stmt(
@ -750,15 +640,9 @@ def _get_rows_with_session(
# We have more than one entity to look at so we need to do a query on states
# since the last recorder run started.
if entity_ids:
stmt = _get_states_for_entities_stmt(
schema_version, run.start, utc_point_in_time, entity_ids, no_attributes
)
else:
stmt = _get_states_for_all_stmt(
schema_version, run.start, utc_point_in_time, filters, no_attributes
)
return execute_stmt_lambda_element(session, stmt)
@ -804,8 +688,7 @@ def _sorted_states_to_dict(
session: Session,
states: Iterable[Row],
start_time: datetime,
entity_ids: list[str] | None,
filters: Filters | None = None,
entity_ids: list[str],
include_start_time_state: bool = True,
minimal_response: bool = False,
no_attributes: bool = False,
@ -847,12 +730,11 @@ def _sorted_states_to_dict(
result: dict[str, list[State | dict[str, Any]]] = defaultdict(list)
# Set all entity IDs to empty lists in result set to maintain the order
if entity_ids is not None:
for ent_id in entity_ids:
result[ent_id] = []
# Get the states at the start time
timer_start = time.perf_counter()
time.perf_counter()
initial_states: dict[str, Row] = {}
if include_start_time_state:
initial_states = {
@ -862,16 +744,11 @@ def _sorted_states_to_dict(
session,
start_time,
entity_ids,
filters=filters,
no_attributes=no_attributes,
)
}
if _LOGGER.isEnabledFor(logging.DEBUG):
elapsed = time.perf_counter() - timer_start
_LOGGER.debug("getting %d first datapoints took %fs", len(result), elapsed)
if entity_ids and len(entity_ids) == 1:
if len(entity_ids) == 1:
states_iter: Iterable[tuple[str, Iterator[Row]]] = (
(entity_ids[0], iter(states)),
)

View file

@ -8,10 +8,9 @@ from itertools import groupby
from operator import itemgetter
from typing import Any, cast
from sqlalchemy import Column, and_, func, lambda_stmt, or_, select
from sqlalchemy import Column, and_, func, lambda_stmt, select
from sqlalchemy.engine.row import Row
from sqlalchemy.orm.properties import MappedColumn
from sqlalchemy.orm.query import Query
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import literal
from sqlalchemy.sql.lambdas import StatementLambdaElement
@ -21,7 +20,7 @@ from homeassistant.core import HomeAssistant, State, split_entity_id
import homeassistant.util.dt as dt_util
from ... import recorder
from ..db_schema import RecorderRuns, StateAttributes, States, StatesMeta
from ..db_schema import RecorderRuns, StateAttributes, States
from ..filters import Filters
from ..models import (
LazyState,
@ -31,11 +30,9 @@ from ..models import (
)
from ..util import execute_stmt_lambda_element, session_scope
from .const import (
IGNORE_DOMAINS_ENTITY_ID_LIKE,
LAST_CHANGED_KEY,
NEED_ATTRIBUTE_DOMAINS,
SIGNIFICANT_DOMAINS,
SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE,
STATE_KEY,
)
@ -73,7 +70,7 @@ _FIELD_MAP = {
def _lambda_stmt_and_join_attributes(
no_attributes: bool, include_last_changed: bool = True
) -> tuple[StatementLambdaElement, bool]:
) -> StatementLambdaElement:
"""Return the lambda_stmt and if StateAttributes should be joined.
Because these are lambda_stmt the values inside the lambdas need
@ -84,18 +81,12 @@ def _lambda_stmt_and_join_attributes(
# state_attributes table
if no_attributes:
if include_last_changed:
return (
lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR)),
False,
)
return (
lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED)),
False,
)
return lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR))
return lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED))
if include_last_changed:
return lambda_stmt(lambda: select(*_QUERY_STATES)), True
return lambda_stmt(lambda: select(*_QUERY_STATES_NO_LAST_CHANGED)), True
return lambda_stmt(lambda: select(*_QUERY_STATES))
return lambda_stmt(lambda: select(*_QUERY_STATES_NO_LAST_CHANGED))
def get_significant_states(
@ -127,33 +118,19 @@ def get_significant_states(
)
def _ignore_domains_filter(query: Query) -> Query:
"""Add a filter to ignore domains we do not fetch history for."""
return query.filter(
and_(
*[
~StatesMeta.entity_id.like(entity_domain)
for entity_domain in IGNORE_DOMAINS_ENTITY_ID_LIKE
]
)
)
def _significant_states_stmt(
start_time: datetime,
end_time: datetime | None,
metadata_ids: list[int] | None,
metadata_ids: list[int],
metadata_ids_in_significant_domains: list[int],
filters: Filters | None,
significant_changes_only: bool,
no_attributes: bool,
) -> StatementLambdaElement:
"""Query the database for significant state changes."""
stmt, join_attributes = _lambda_stmt_and_join_attributes(
stmt = _lambda_stmt_and_join_attributes(
no_attributes, include_last_changed=not significant_changes_only
)
join_states_meta = False
if metadata_ids and significant_changes_only:
if significant_changes_only:
# Since we are filtering on entity_id (metadata_id) we can avoid
# the join of the states_meta table since we already know which
# metadata_ids are in the significant domains.
@ -162,52 +139,13 @@ def _significant_states_stmt(
| (States.last_changed_ts == States.last_updated_ts)
| States.last_changed_ts.is_(None)
)
elif significant_changes_only:
# This is the case where we are not filtering on entity_id
# so we need to join the states_meta table to filter out
# the domains we do not care about. This query path was
# only used by the old history page to show all entities
# in the UI. The new history page filters on entity_id
# so this query path is not used anymore except for third
# party integrations that use the history API.
stmt += lambda q: q.filter(
or_(
*[
StatesMeta.entity_id.like(entity_domain)
for entity_domain in SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE
],
(
(States.last_changed_ts == States.last_updated_ts)
| States.last_changed_ts.is_(None)
),
)
)
join_states_meta = True
if metadata_ids:
stmt += lambda q: q.filter(
# https://github.com/python/mypy/issues/2608
States.metadata_id.in_(metadata_ids) # type:ignore[arg-type]
)
else:
stmt += _ignore_domains_filter
if filters and filters.has_config:
stmt = stmt.add_criteria(
lambda q: q.filter(filters.states_metadata_entity_filter()), # type: ignore[union-attr]
track_on=[filters],
)
join_states_meta = True
stmt += lambda q: q.filter(States.metadata_id.in_(metadata_ids))
start_time_ts = start_time.timestamp()
stmt += lambda q: q.filter(States.last_updated_ts > start_time_ts)
if end_time:
end_time_ts = end_time.timestamp()
stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts)
if join_states_meta:
stmt += lambda q: q.outerjoin(
StatesMeta, States.metadata_id == StatesMeta.metadata_id
)
if join_attributes:
if not no_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
@ -239,10 +177,13 @@ def get_significant_states_with_session(
as well as all states from certain domains (for instance
thermostat so that we get current temperature in our graphs).
"""
if filters is not None:
raise NotImplementedError("Filters are no longer supported")
if not entity_ids:
raise ValueError("entity_ids must be provided")
metadata_ids: list[int] | None = None
entity_id_to_metadata_id: dict[str, int | None] | None = None
metadata_ids_in_significant_domains: list[int] = []
if entity_ids:
instance = recorder.get_instance(hass)
if not (
entity_id_to_metadata_id := instance.states_meta_manager.get_many(
@ -262,13 +203,10 @@ def get_significant_states_with_session(
end_time,
metadata_ids,
metadata_ids_in_significant_domains,
filters,
significant_changes_only,
no_attributes,
)
states = execute_stmt_lambda_element(
session, stmt, None if entity_ids else start_time, end_time
)
states = execute_stmt_lambda_element(session, stmt, None, end_time)
return _sorted_states_to_dict(
hass,
session,
@ -276,7 +214,6 @@ def get_significant_states_with_session(
start_time,
entity_ids,
entity_id_to_metadata_id,
filters,
include_start_time_state,
minimal_response,
no_attributes,
@ -325,9 +262,7 @@ def _state_changed_during_period_stmt(
descending: bool,
limit: int | None,
) -> StatementLambdaElement:
stmt, join_attributes = _lambda_stmt_and_join_attributes(
no_attributes, include_last_changed=False
)
stmt = _lambda_stmt_and_join_attributes(no_attributes, include_last_changed=False)
start_time_ts = start_time.timestamp()
stmt += lambda q: q.filter(
(
@ -341,7 +276,7 @@ def _state_changed_during_period_stmt(
stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts)
if metadata_id:
stmt += lambda q: q.filter(States.metadata_id == metadata_id)
if join_attributes:
if not no_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
@ -365,16 +300,18 @@ def state_changes_during_period(
include_start_time_state: bool = True,
) -> MutableMapping[str, list[State]]:
"""Return states changes during UTC period start_time - end_time."""
entity_id = entity_id.lower() if entity_id is not None else None
entity_ids = [entity_id] if entity_id is not None else None
if not entity_id:
raise ValueError("entity_id must be provided")
entity_ids = [entity_id.lower()]
with session_scope(hass=hass, read_only=True) as session:
metadata_id: int | None = None
entity_id_to_metadata_id = None
if entity_id:
instance = recorder.get_instance(hass)
metadata_id = instance.states_meta_manager.get(entity_id, session, False)
entity_id_to_metadata_id = {entity_id: metadata_id}
if not (
metadata_id := instance.states_meta_manager.get(entity_id, session, False)
):
return {}
entity_id_to_metadata_id: dict[str, int | None] = {entity_id: metadata_id}
stmt = _state_changed_during_period_stmt(
start_time,
end_time,
@ -383,9 +320,7 @@ def state_changes_during_period(
descending,
limit,
)
states = execute_stmt_lambda_element(
session, stmt, None if entity_id else start_time, end_time
)
states = execute_stmt_lambda_element(session, stmt, None, end_time)
return cast(
MutableMapping[str, list[State]],
_sorted_states_to_dict(
@ -403,9 +338,7 @@ def state_changes_during_period(
def _get_last_state_changes_stmt(
number_of_states: int, metadata_id: int
) -> StatementLambdaElement:
stmt, join_attributes = _lambda_stmt_and_join_attributes(
False, include_last_changed=False
)
stmt = _lambda_stmt_and_join_attributes(False, include_last_changed=False)
if number_of_states == 1:
stmt += lambda q: q.join(
(
@ -438,12 +371,9 @@ def _get_last_state_changes_stmt(
.subquery()
).c.state_id
)
if join_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
stmt += lambda q: q.order_by(States.state_id.desc())
).order_by(States.state_id.desc())
return stmt
@ -488,9 +418,7 @@ def _get_states_for_entities_stmt(
no_attributes: bool,
) -> StatementLambdaElement:
"""Baked query to get states for specific entities."""
stmt, join_attributes = _lambda_stmt_and_join_attributes(
no_attributes, include_last_changed=True
)
stmt = _lambda_stmt_and_join_attributes(no_attributes, include_last_changed=True)
# We got an include-list of entities, accelerate the query by filtering already
# in the inner query.
run_start_ts = process_timestamp(run_start).timestamp()
@ -520,79 +448,24 @@ def _get_states_for_entities_stmt(
== most_recent_states_for_entities_by_date.c.max_last_updated,
),
)
if join_attributes:
if not no_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
return stmt
def _get_states_for_all_stmt(
run_start: datetime,
utc_point_in_time: datetime,
filters: Filters | None,
no_attributes: bool,
) -> StatementLambdaElement:
"""Baked query to get states for all entities."""
stmt, join_attributes = _lambda_stmt_and_join_attributes(
no_attributes, include_last_changed=True
)
# We did not get an include-list of entities, query all states in the inner
# query, then filter out unwanted domains as well as applying the custom filter.
# This filtering can't be done in the inner query because the domain column is
# not indexed and we can't control what's in the custom filter.
run_start_ts = process_timestamp(run_start).timestamp()
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
stmt += lambda q: q.join(
(
most_recent_states_by_date := (
select(
States.metadata_id.label("max_metadata_id"),
# https://github.com/sqlalchemy/sqlalchemy/issues/9189
# pylint: disable-next=not-callable
func.max(States.last_updated_ts).label("max_last_updated"),
)
.filter(
(States.last_updated_ts >= run_start_ts)
& (States.last_updated_ts < utc_point_in_time_ts)
)
.group_by(States.metadata_id)
.subquery()
)
),
and_(
States.metadata_id == most_recent_states_by_date.c.max_metadata_id,
States.last_updated_ts == most_recent_states_by_date.c.max_last_updated,
),
)
stmt += _ignore_domains_filter
if filters and filters.has_config:
stmt = stmt.add_criteria(
lambda q: q.filter(filters.states_metadata_entity_filter()), # type: ignore[union-attr]
track_on=[filters],
)
if join_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
)
stmt += lambda q: q.outerjoin(
StatesMeta, States.metadata_id == StatesMeta.metadata_id
)
return stmt
def _get_rows_with_session(
hass: HomeAssistant,
session: Session,
utc_point_in_time: datetime,
entity_ids: list[str] | None = None,
entity_ids: list[str],
entity_id_to_metadata_id: dict[str, int | None] | None = None,
run: RecorderRuns | None = None,
filters: Filters | None = None,
no_attributes: bool = False,
) -> Iterable[Row]:
"""Return the states at a specific point in time."""
if entity_ids and len(entity_ids) == 1:
if len(entity_ids) == 1:
if not entity_id_to_metadata_id or not (
metadata_id := entity_id_to_metadata_id.get(entity_ids[0])
):
@ -613,7 +486,6 @@ def _get_rows_with_session(
# We have more than one entity to look at so we need to do a query on states
# since the last recorder run started.
if entity_ids:
if not entity_id_to_metadata_id or not (
metadata_ids := extract_metadata_ids(entity_id_to_metadata_id)
):
@ -621,11 +493,6 @@ def _get_rows_with_session(
stmt = _get_states_for_entities_stmt(
run.start, utc_point_in_time, metadata_ids, no_attributes
)
else:
stmt = _get_states_for_all_stmt(
run.start, utc_point_in_time, filters, no_attributes
)
return execute_stmt_lambda_element(session, stmt)
@ -636,9 +503,7 @@ def _get_single_entity_states_stmt(
) -> StatementLambdaElement:
# Use an entirely different (and extremely fast) query if we only
# have a single entity id
stmt, join_attributes = _lambda_stmt_and_join_attributes(
no_attributes, include_last_changed=True
)
stmt = _lambda_stmt_and_join_attributes(no_attributes, include_last_changed=True)
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
stmt += (
lambda q: q.filter(
@ -648,7 +513,7 @@ def _get_single_entity_states_stmt(
.order_by(States.last_updated_ts.desc())
.limit(1)
)
if join_attributes:
if not no_attributes:
stmt += lambda q: q.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id
)
@ -660,9 +525,8 @@ def _sorted_states_to_dict(
session: Session,
states: Iterable[Row],
start_time: datetime,
entity_ids: list[str] | None,
entity_id_to_metadata_id: dict[str, int | None] | None,
filters: Filters | None = None,
entity_ids: list[str],
entity_id_to_metadata_id: dict[str, int | None],
include_start_time_state: bool = True,
minimal_response: bool = False,
no_attributes: bool = False,
@ -697,19 +561,12 @@ def _sorted_states_to_dict(
metadata_id_idx = field_map["metadata_id"]
# Set all entity IDs to empty lists in result set to maintain the order
if entity_ids is not None:
for ent_id in entity_ids:
result[ent_id] = []
if entity_id_to_metadata_id:
metadata_id_to_entity_id = {
v: k for k, v in entity_id_to_metadata_id.items() if v is not None
}
else:
metadata_id_to_entity_id = recorder.get_instance(
hass
).states_meta_manager.get_metadata_id_to_entity_id(session)
# Get the states at the start time
initial_states: dict[int, Row] = {}
if include_start_time_state:
@ -721,16 +578,13 @@ def _sorted_states_to_dict(
start_time,
entity_ids,
entity_id_to_metadata_id,
filters=filters,
no_attributes=no_attributes,
)
}
if entity_ids and len(entity_ids) == 1:
if not entity_id_to_metadata_id or not (
metadata_id := entity_id_to_metadata_id.get(entity_ids[0])
):
return {}
if len(entity_ids) == 1:
metadata_id = entity_id_to_metadata_id[entity_ids[0]]
assert metadata_id is not None # should not be possible if we got here
states_iter: Iterable[tuple[int, Iterator[Row]]] = (
(metadata_id, iter(states)),
)

View file

@ -50,7 +50,9 @@ async def test_exclude_attributes(
assert ["hello.world"] == calls[0].data.get(ATTR_ENTITY_ID)
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) == 1
for entity_states in states.values():
for state in entity_states:

View file

@ -28,7 +28,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -31,7 +31,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -37,7 +37,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -25,7 +25,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -38,7 +38,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -11,14 +11,7 @@ from homeassistant.components import history
from homeassistant.components.recorder import Recorder
from homeassistant.components.recorder.history import get_significant_states
from homeassistant.components.recorder.models import process_timestamp
from homeassistant.const import (
CONF_DOMAINS,
CONF_ENTITIES,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_HOMEASSISTANT_FINAL_WRITE,
)
import homeassistant.core as ha
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
@ -59,7 +52,7 @@ def test_get_significant_states(hass_history) -> None:
"""
hass = hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(hass, zero, four, filters=history.Filters())
hist = get_significant_states(hass, zero, four, entity_ids=list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -76,7 +69,7 @@ def test_get_significant_states_minimal_response(hass_history) -> None:
hass = hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True
hass, zero, four, minimal_response=True, entity_ids=list(states)
)
entites_with_reducable_states = [
"media_player.test",
@ -147,11 +140,7 @@ def test_get_significant_states_with_initial(hass_history) -> None:
state.last_changed = one_and_half
hist = get_significant_states(
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=True,
hass, one_and_half, four, include_start_time_state=True, entity_ids=list(states)
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -182,8 +171,8 @@ def test_get_significant_states_without_initial(hass_history) -> None:
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=False,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -198,9 +187,7 @@ def test_get_significant_states_entity_id(hass_history) -> None:
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = get_significant_states(
hass, zero, four, ["media_player.test"], filters=history.Filters()
)
hist = get_significant_states(hass, zero, four, ["media_player.test"])
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -218,247 +205,10 @@ def test_get_significant_states_multiple_entity_ids(hass_history) -> None:
zero,
four,
["media_player.test", "thermostat.test"],
filters=history.Filters(),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
def test_get_significant_states_exclude_domain(hass_history) -> None:
"""Test if significant states are returned when excluding domains.
We should get back every thermostat change that includes an attribute
change, but no media player changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["media_player"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_entity(hass_history) -> None:
"""Test if significant states are returned when excluding entities.
We should get back every thermostat and script changes, but no media
player changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: ["media_player.test"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude(hass_history) -> None:
"""Test significant states when excluding entities and domains.
We should not get back every thermostat and media player test changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test"]
del states["thermostat.test2"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["thermostat"],
CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_include_entity(hass_history) -> None:
"""Test significant states when excluding domains and include entities.
We should not get back every thermostat change unless its specifically included
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["thermostat.test2"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {CONF_ENTITIES: ["media_player.test", "thermostat.test"]},
CONF_EXCLUDE: {CONF_DOMAINS: ["thermostat"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_domain(hass_history) -> None:
"""Test if significant states are returned when including domains.
We should get back every thermostat and script changes, but no media
player changes.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_INCLUDE: {CONF_DOMAINS: ["thermostat", "script"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_entity(hass_history) -> None:
"""Test if significant states are returned when including entities.
We should only get back changes of the media_player.test entity.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_INCLUDE: {CONF_ENTITIES: ["media_player.test"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include(hass_history) -> None:
"""Test significant states when including domains and entities.
We should only get back changes of the media_player.test entity and the
thermostat domain.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["thermostat"],
CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_domain(hass_history) -> None:
"""Test if significant states when excluding and including domains.
We should get back all the media_player domain changes
only since the include wins over the exclude but will
exclude everything else.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {CONF_DOMAINS: ["media_player"]},
CONF_EXCLUDE: {CONF_DOMAINS: ["media_player"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_entity(hass_history) -> None:
"""Test if significant states when excluding and including domains.
We should not get back any changes since we include only
media_player.test but also exclude it.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {CONF_ENTITIES: ["media_player.test"]},
CONF_EXCLUDE: {CONF_ENTITIES: ["media_player.test"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude(hass_history) -> None:
"""Test if significant states when in/excluding domains and entities.
We should get back changes of the media_player.test2, media_player.test3,
and thermostat.test.
"""
hass = hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["media_player"],
CONF_ENTITIES: ["thermostat.test"],
},
CONF_EXCLUDE: {
CONF_DOMAINS: ["thermostat"],
CONF_ENTITIES: ["media_player.test"],
},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_are_ordered(hass_history) -> None:
"""Test order of results from get_significant_states.
@ -468,14 +218,10 @@ def test_get_significant_states_are_ordered(hass_history) -> None:
hass = hass_history
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
hist = get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
hist = get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
@ -522,7 +268,12 @@ def test_get_significant_states_only(hass_history) -> None:
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = get_significant_states(hass, start, significant_changes_only=True)
hist = get_significant_states(
hass,
start,
significant_changes_only=True,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 2
assert not any(
@ -535,7 +286,12 @@ def test_get_significant_states_only(hass_history) -> None:
state.last_updated == states[2].last_updated for state in hist[entity_id]
)
hist = get_significant_states(hass, start, significant_changes_only=False)
hist = get_significant_states(
hass,
start,
significant_changes_only=False,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 3
assert_multiple_states_equal_without_context_and_last_changed(
@ -545,16 +301,7 @@ def test_get_significant_states_only(hass_history) -> None:
def check_significant_states(hass, zero, four, states, config):
"""Check if significant states are retrieved."""
domain_config = config[history.DOMAIN]
exclude = domain_config.get(CONF_EXCLUDE, {})
include = domain_config.get(CONF_INCLUDE, {})
filters = history.Filters(
excluded_entities=exclude.get(CONF_ENTITIES, []),
excluded_domains=exclude.get(CONF_DOMAINS, []),
included_entities=include.get(CONF_ENTITIES, []),
included_domains=include.get(CONF_DOMAINS, []),
)
hist = get_significant_states(hass, zero, four, filters=filters)
hist = get_significant_states(hass, zero, four)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -649,7 +396,9 @@ async def test_fetch_period_api(
"""Test the fetch period view for history."""
await async_setup_component(hass, "history", {})
client = await hass_client()
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power"
)
assert response.status == HTTPStatus.OK
@ -661,7 +410,9 @@ async def test_fetch_period_api_with_use_include_order(
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
)
client = await hass_client()
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power"
)
assert response.status == HTTPStatus.OK
@ -713,7 +464,7 @@ async def test_fetch_period_api_with_no_timestamp(
"""Test the fetch period view for history with no timestamp."""
await async_setup_component(hass, "history", {})
client = await hass_client()
response = await client.get("/api/history/period")
response = await client.get("/api/history/period?filter_entity_id=sensor.power")
assert response.status == HTTPStatus.OK
@ -739,119 +490,6 @@ async def test_fetch_period_api_with_include_order(
assert response.status == HTTPStatus.OK
async def test_fetch_period_api_with_entity_glob_include(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history."""
await async_setup_component(
hass,
"history",
{
"history": {
"include": {"entity_globs": ["light.k*"]},
}
},
)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert response_json[0][0]["entity_id"] == "light.kitchen"
async def test_fetch_period_api_with_entity_glob_exclude(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history."""
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.k*", "binary_sensor.*_?"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
hass.states.async_set("binary_sensor.sensor_l", "on")
hass.states.async_set("binary_sensor.sensor_r", "on")
hass.states.async_set("binary_sensor.sensor", "on")
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert len(response_json) == 3
entities = {state[0]["entity_id"] for state in response_json}
assert entities == {"binary_sensor.sensor", "light.cow", "light.match"}
async def test_fetch_period_api_with_entity_glob_include_and_exclude(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history."""
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.many*", "binary_sensor.*"],
},
"include": {
"entity_globs": ["light.m*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("light.many_state_changes", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
hass.states.async_set("binary_sensor.exclude", "on")
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert len(response_json) == 4
entities = {state[0]["entity_id"] for state in response_json}
assert entities == {
"light.many_state_changes",
"light.match",
"media_player.test",
"switch.match",
}
async def test_entity_ids_limit_via_api(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
@ -910,3 +548,148 @@ async def test_entity_ids_limit_via_api_with_skip_initial_state(
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
async def test_fetch_period_api_before_history_started(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history for the far past."""
await async_setup_component(
hass,
"history",
{},
)
await async_wait_recording_done(hass)
far_past = dt_util.utcnow() - timedelta(days=365)
client = await hass_client()
response = await client.get(
f"/api/history/period/{far_past.isoformat()}?filter_entity_id=light.kitchen",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert response_json == []
async def test_fetch_period_api_far_future(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history for the far future."""
await async_setup_component(
hass,
"history",
{},
)
await async_wait_recording_done(hass)
far_future = dt_util.utcnow() + timedelta(days=365)
client = await hass_client()
response = await client.get(
f"/api/history/period/{far_future.isoformat()}?filter_entity_id=light.kitchen",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert response_json == []
async def test_fetch_period_api_with_invalid_datetime(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history with an invalid date time."""
await async_setup_component(
hass,
"history",
{},
)
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
"/api/history/period/INVALID?filter_entity_id=light.kitchen",
)
assert response.status == HTTPStatus.BAD_REQUEST
response_json = await response.json()
assert response_json == {"message": "Invalid datetime"}
async def test_fetch_period_api_invalid_end_time(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history with an invalid end time."""
await async_setup_component(
hass,
"history",
{},
)
await async_wait_recording_done(hass)
far_past = dt_util.utcnow() - timedelta(days=365)
client = await hass_client()
response = await client.get(
f"/api/history/period/{far_past.isoformat()}",
params={"filter_entity_id": "light.kitchen", "end_time": "INVALID"},
)
assert response.status == HTTPStatus.BAD_REQUEST
response_json = await response.json()
assert response_json == {"message": "Invalid end_time"}
async def test_entity_ids_limit_via_api_with_end_time(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test limiting history to entity_ids with end_time."""
await async_setup_component(
hass,
"history",
{"history": {}},
)
start = dt_util.utcnow()
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await async_wait_recording_done(hass)
end_time = start + timedelta(minutes=1)
future_second = dt_util.utcnow() + timedelta(seconds=1)
client = await hass_client()
response = await client.get(
f"/api/history/period/{future_second.isoformat()}",
params={
"filter_entity_id": "light.kitchen,light.cow",
"end_time": end_time.isoformat(),
},
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert len(response_json) == 0
when = start - timedelta(minutes=1)
response = await client.get(
f"/api/history/period/{when.isoformat()}",
params={
"filter_entity_id": "light.kitchen,light.cow",
"end_time": end_time.isoformat(),
},
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert len(response_json) == 2
assert response_json[0][0]["entity_id"] == "light.kitchen"
assert response_json[1][0]["entity_id"] == "light.cow"
async def test_fetch_period_api_with_no_entity_ids(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history with minimal_response."""
await async_setup_component(hass, "history", {})
await async_wait_recording_done(hass)
yesterday = dt_util.utcnow() - timedelta(days=1)
client = await hass_client()
response = await client.get(f"/api/history/period/{yesterday.isoformat()}")
assert response.status == HTTPStatus.BAD_REQUEST
response_json = await response.json()
assert response_json == {"message": "filter_entity_id is missing"}

View file

@ -13,12 +13,10 @@ import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from homeassistant.components import history, recorder
from homeassistant.components import recorder
from homeassistant.components.recorder import Recorder, core, statistics
from homeassistant.components.recorder.history import get_significant_states
from homeassistant.components.recorder.models import process_timestamp
from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE
import homeassistant.core as ha
from homeassistant.core import HomeAssistant
from homeassistant.helpers.json import JSONEncoder
from homeassistant.setup import async_setup_component
@ -79,6 +77,8 @@ def db_schema_30():
core, "Events", old_db_schema.Events
), patch.object(
core, "StateAttributes", old_db_schema.StateAttributes
), patch.object(
core, "EntityIDMigrationTask", core.RecorderTask
), patch(
CREATE_ENGINE_TARGET, new=_create_engine_test
):
@ -108,7 +108,7 @@ def test_get_significant_states(legacy_hass_history) -> None:
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(hass, zero, four, filters=history.Filters())
hist = get_significant_states(hass, zero, four, entity_ids=list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -125,7 +125,7 @@ def test_get_significant_states_minimal_response(legacy_hass_history) -> None:
hass = legacy_hass_history
zero, four, states = record_states(hass)
hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True
hass, zero, four, minimal_response=True, entity_ids=list(states)
)
entites_with_reducable_states = [
"media_player.test",
@ -202,8 +202,8 @@ def test_get_significant_states_with_initial(legacy_hass_history) -> None:
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=True,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -234,8 +234,8 @@ def test_get_significant_states_without_initial(legacy_hass_history) -> None:
hass,
one_and_half,
four,
filters=history.Filters(),
include_start_time_state=False,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -253,9 +253,7 @@ def test_get_significant_states_entity_id(hass_history) -> None:
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = get_significant_states(
hass, zero, four, ["media_player.test"], filters=history.Filters()
)
hist = get_significant_states(hass, zero, four, ["media_player.test"])
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -273,247 +271,10 @@ def test_get_significant_states_multiple_entity_ids(legacy_hass_history) -> None
zero,
four,
["media_player.test", "thermostat.test"],
filters=history.Filters(),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
def test_get_significant_states_exclude_domain(legacy_hass_history) -> None:
"""Test if significant states are returned when excluding domains.
We should get back every thermostat change that includes an attribute
change, but no media player changes.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_EXCLUDE: {CONF_DOMAINS: ["media_player"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_entity(legacy_hass_history) -> None:
"""Test if significant states are returned when excluding entities.
We should get back every thermostat and script changes, but no media
player changes.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_EXCLUDE: {CONF_ENTITIES: ["media_player.test"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude(legacy_hass_history) -> None:
"""Test significant states when excluding entities and domains.
We should not get back every thermostat and media player test changes.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test"]
del states["thermostat.test2"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_EXCLUDE: {
CONF_DOMAINS: ["thermostat"],
CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_exclude_include_entity(legacy_hass_history) -> None:
"""Test significant states when excluding domains and include entities.
We should not get back every thermostat change unless its specifically included
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["thermostat.test2"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {CONF_ENTITIES: ["media_player.test", "thermostat.test"]},
CONF_EXCLUDE: {CONF_DOMAINS: ["thermostat"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_domain(legacy_hass_history) -> None:
"""Test if significant states are returned when including domains.
We should get back every thermostat and script changes, but no media
player changes.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["media_player.test2"]
del states["media_player.test3"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_INCLUDE: {CONF_DOMAINS: ["thermostat", "script"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_entity(legacy_hass_history) -> None:
"""Test if significant states are returned when including entities.
We should only get back changes of the media_player.test entity.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {CONF_INCLUDE: {CONF_ENTITIES: ["media_player.test"]}},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include(legacy_hass_history) -> None:
"""Test significant states when including domains and entities.
We should only get back changes of the media_player.test entity and the
thermostat domain.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["thermostat"],
CONF_ENTITIES: ["media_player.test"],
}
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_domain(legacy_hass_history) -> None:
"""Test if significant states when excluding and including domains.
We should get back all the media_player domain changes
only since the include wins over the exclude but will
exclude everything else.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {CONF_DOMAINS: ["media_player"]},
CONF_EXCLUDE: {CONF_DOMAINS: ["media_player"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude_entity(legacy_hass_history) -> None:
"""Test if significant states when excluding and including domains.
We should not get back any changes since we include only
media_player.test but also exclude it.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {CONF_ENTITIES: ["media_player.test"]},
CONF_EXCLUDE: {CONF_ENTITIES: ["media_player.test"]},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_include_exclude(legacy_hass_history) -> None:
"""Test if significant states when in/excluding domains and entities.
We should get back changes of the media_player.test2, media_player.test3,
and thermostat.test.
"""
hass = legacy_hass_history
zero, four, states = record_states(hass)
del states["media_player.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
config = history.CONFIG_SCHEMA(
{
ha.DOMAIN: {},
history.DOMAIN: {
CONF_INCLUDE: {
CONF_DOMAINS: ["media_player"],
CONF_ENTITIES: ["thermostat.test"],
},
CONF_EXCLUDE: {
CONF_DOMAINS: ["thermostat"],
CONF_ENTITIES: ["media_player.test"],
},
},
}
)
check_significant_states(hass, zero, four, states, config)
def test_get_significant_states_are_ordered(legacy_hass_history) -> None:
"""Test order of results from get_significant_states.
@ -523,14 +284,10 @@ def test_get_significant_states_are_ordered(legacy_hass_history) -> None:
hass = legacy_hass_history
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
hist = get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = get_significant_states(
hass, zero, four, entity_ids, filters=history.Filters()
)
hist = get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
@ -577,7 +334,12 @@ def test_get_significant_states_only(legacy_hass_history) -> None:
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = get_significant_states(hass, start, significant_changes_only=True)
hist = get_significant_states(
hass,
start,
significant_changes_only=True,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 2
assert not any(
@ -590,7 +352,12 @@ def test_get_significant_states_only(legacy_hass_history) -> None:
state.last_updated == states[2].last_updated for state in hist[entity_id]
)
hist = get_significant_states(hass, start, significant_changes_only=False)
hist = get_significant_states(
hass,
start,
significant_changes_only=False,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 3
assert_multiple_states_equal_without_context_and_last_changed(
@ -600,16 +367,7 @@ def test_get_significant_states_only(legacy_hass_history) -> None:
def check_significant_states(hass, zero, four, states, config):
"""Check if significant states are retrieved."""
domain_config = config[history.DOMAIN]
exclude = domain_config.get(CONF_EXCLUDE, {})
include = domain_config.get(CONF_INCLUDE, {})
filters = history.Filters(
excluded_entities=exclude.get(CONF_ENTITIES, []),
excluded_domains=exclude.get(CONF_DOMAINS, []),
included_entities=include.get(CONF_ENTITIES, []),
included_domains=include.get(CONF_DOMAINS, []),
)
hist = get_significant_states(hass, zero, four, filters=filters)
hist = get_significant_states(hass, zero, four)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -707,23 +465,7 @@ async def test_fetch_period_api(
with patch.object(instance.states_meta_manager, "active", False):
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}"
)
assert response.status == HTTPStatus.OK
async def test_fetch_period_api_with_use_include_order(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history with include order."""
await async_setup_component(
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
)
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}"
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power"
)
assert response.status == HTTPStatus.OK
@ -779,7 +521,7 @@ async def test_fetch_period_api_with_no_timestamp(
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
client = await hass_client()
response = await client.get("/api/history/period")
response = await client.get("/api/history/period?filter_entity_id=sensor.power")
assert response.status == HTTPStatus.OK
@ -807,125 +549,6 @@ async def test_fetch_period_api_with_include_order(
assert response.status == HTTPStatus.OK
async def test_fetch_period_api_with_entity_glob_include(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history."""
await async_setup_component(
hass,
"history",
{
"history": {
"include": {"entity_globs": ["light.k*"]},
}
},
)
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.nomatch", "on")
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert response_json[0][0]["entity_id"] == "light.kitchen"
async def test_fetch_period_api_with_entity_glob_exclude(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history."""
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.k*", "binary_sensor.*_?"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
hass.states.async_set("binary_sensor.sensor_l", "on")
hass.states.async_set("binary_sensor.sensor_r", "on")
hass.states.async_set("binary_sensor.sensor", "on")
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert len(response_json) == 3
entities = {state[0]["entity_id"] for state in response_json}
assert entities == {"binary_sensor.sensor", "light.cow", "light.match"}
async def test_fetch_period_api_with_entity_glob_include_and_exclude(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:
"""Test the fetch period view for history."""
await async_setup_component(
hass,
"history",
{
"history": {
"exclude": {
"entity_globs": ["light.many*", "binary_sensor.*"],
},
"include": {
"entity_globs": ["light.m*"],
"domains": "switch",
"entities": "media_player.test",
},
}
},
)
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
hass.states.async_set("light.kitchen", "on")
hass.states.async_set("light.cow", "on")
hass.states.async_set("light.match", "on")
hass.states.async_set("light.many_state_changes", "on")
hass.states.async_set("switch.match", "on")
hass.states.async_set("media_player.test", "on")
hass.states.async_set("binary_sensor.exclude", "on")
await async_wait_recording_done(hass)
client = await hass_client()
response = await client.get(
f"/api/history/period/{dt_util.utcnow().isoformat()}",
)
assert response.status == HTTPStatus.OK
response_json = await response.json()
assert len(response_json) == 4
entities = {state[0]["entity_id"] for state in response_json}
assert entities == {
"light.many_state_changes",
"light.match",
"media_player.test",
"switch.match",
}
async def test_entity_ids_limit_via_api(
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
) -> None:

View file

@ -461,19 +461,10 @@ async def test_history_stream_historical_only(
) -> None:
"""Test history stream."""
now = dt_util.utcnow()
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
await async_setup_component(
hass,
"history",
{
history.DOMAIN: {
history.CONF_ORDER: True,
CONF_INCLUDE: {
CONF_ENTITIES: sort_order,
CONF_DOMAINS: ["sensor"],
},
}
},
{},
)
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
@ -500,6 +491,7 @@ async def test_history_stream_historical_only(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["sensor.one", "sensor.two", "sensor.three", "sensor.four"],
"start_time": now.isoformat(),
"end_time": end_time.isoformat(),
"include_start_time_state": True,
@ -755,6 +747,7 @@ async def test_history_stream_bad_start_time(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["climate.test"],
"start_time": "cats",
}
)
@ -781,6 +774,7 @@ async def test_history_stream_end_time_before_start_time(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["climate.test"],
"start_time": start_time.isoformat(),
"end_time": end_time.isoformat(),
}
@ -807,6 +801,7 @@ async def test_history_stream_bad_end_time(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["climate.test"],
"start_time": now.isoformat(),
"end_time": "dogs",
}
@ -821,19 +816,10 @@ async def test_history_stream_live_no_attributes_minimal_response(
) -> None:
"""Test history stream with history and live data and no_attributes and minimal_response."""
now = dt_util.utcnow()
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
await async_setup_component(
hass,
"history",
{
history.DOMAIN: {
history.CONF_ORDER: True,
CONF_INCLUDE: {
CONF_ENTITIES: sort_order,
CONF_DOMAINS: ["sensor"],
},
}
},
{},
)
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
@ -853,6 +839,7 @@ async def test_history_stream_live_no_attributes_minimal_response(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["sensor.one", "sensor.two"],
"start_time": now.isoformat(),
"include_start_time_state": True,
"significant_changes_only": False,
@ -910,19 +897,10 @@ async def test_history_stream_live(
) -> None:
"""Test history stream with history and live data."""
now = dt_util.utcnow()
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
await async_setup_component(
hass,
"history",
{
history.DOMAIN: {
history.CONF_ORDER: True,
CONF_INCLUDE: {
CONF_ENTITIES: sort_order,
CONF_DOMAINS: ["sensor"],
},
}
},
{},
)
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
@ -942,6 +920,7 @@ async def test_history_stream_live(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["sensor.one", "sensor.two"],
"start_time": now.isoformat(),
"include_start_time_state": True,
"significant_changes_only": False,
@ -1021,19 +1000,10 @@ async def test_history_stream_live_minimal_response(
) -> None:
"""Test history stream with history and live data and minimal_response."""
now = dt_util.utcnow()
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
await async_setup_component(
hass,
"history",
{
history.DOMAIN: {
history.CONF_ORDER: True,
CONF_INCLUDE: {
CONF_ENTITIES: sort_order,
CONF_DOMAINS: ["sensor"],
},
}
},
{},
)
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
@ -1053,6 +1023,7 @@ async def test_history_stream_live_minimal_response(
{
"id": 1,
"type": "history/stream",
"entity_ids": ["sensor.one", "sensor.two"],
"start_time": now.isoformat(),
"include_start_time_state": True,
"significant_changes_only": False,
@ -1126,19 +1097,10 @@ async def test_history_stream_live_no_attributes(
) -> None:
"""Test history stream with history and live data and no_attributes."""
now = dt_util.utcnow()
sort_order = ["sensor.two", "sensor.four", "sensor.one"]
await async_setup_component(
hass,
"history",
{
history.DOMAIN: {
history.CONF_ORDER: True,
CONF_INCLUDE: {
CONF_ENTITIES: sort_order,
CONF_DOMAINS: ["sensor"],
},
}
},
{},
)
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
@ -1159,6 +1121,7 @@ async def test_history_stream_live_no_attributes(
"id": 1,
"type": "history/stream",
"start_time": now.isoformat(),
"entity_ids": ["sensor.one", "sensor.two"],
"include_start_time_state": True,
"significant_changes_only": False,
"no_attributes": True,

View file

@ -31,7 +31,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -30,7 +30,9 @@ async def test_exclude_attributes(
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5))
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -31,7 +31,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -35,7 +35,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -43,7 +43,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -42,7 +42,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -42,7 +42,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -34,7 +34,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -33,7 +33,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -27,7 +27,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) > 1
for entity_states in states.values():
for state in entity_states:

View file

@ -59,7 +59,7 @@ ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEAT
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 30
SCHEMA_VERSION = 32
_LOGGER = logging.getLogger(__name__)
@ -253,7 +253,8 @@ class Events(Base): # type: ignore[misc,valid-type]
event_type=event.event_type,
event_data=None,
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
time_fired=event.time_fired,
time_fired=None,
time_fired_ts=dt_util.utc_to_timestamp(event.time_fired),
context_id=event.context.id,
context_user_id=event.context.user_id,
context_parent_id=event.context.parent_id,
@ -268,12 +269,12 @@ class Events(Base): # type: ignore[misc,valid-type]
)
try:
return Event(
self.event_type,
self.event_type or "",
json_loads(self.event_data) if self.event_data else {},
EventOrigin(self.origin)
if self.origin
else EVENT_ORIGIN_ORDER[self.origin_idx],
process_timestamp(self.time_fired),
else EVENT_ORIGIN_ORDER[self.origin_idx or 0],
dt_util.utc_from_timestamp(self.time_fired_ts or 0),
context=context,
)
except JSON_DECODE_EXCEPTIONS:
@ -419,21 +420,22 @@ class States(Base): # type: ignore[misc,valid-type]
context_user_id=event.context.user_id,
context_parent_id=event.context.parent_id,
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
last_updated=None,
last_changed=None,
)
# None state means the state was removed from the state machine
if state is None:
dbstate.state = ""
dbstate.last_updated = event.time_fired
dbstate.last_changed = None
dbstate.last_updated_ts = dt_util.utc_to_timestamp(event.time_fired)
dbstate.last_changed_ts = None
return dbstate
dbstate.state = state.state
dbstate.last_updated = state.last_updated
dbstate.last_updated_ts = dt_util.utc_to_timestamp(state.last_updated)
if state.last_updated == state.last_changed:
dbstate.last_changed = None
dbstate.last_changed_ts = None
else:
dbstate.last_changed = state.last_changed
dbstate.last_changed_ts = dt_util.utc_to_timestamp(state.last_changed)
return dbstate
@ -450,14 +452,16 @@ class States(Base): # type: ignore[misc,valid-type]
# When json_loads fails
_LOGGER.exception("Error converting row to state: %s", self)
return None
if self.last_changed is None or self.last_changed == self.last_updated:
last_changed = last_updated = process_timestamp(self.last_updated)
if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts:
last_changed = last_updated = dt_util.utc_from_timestamp(
self.last_updated_ts or 0
)
else:
last_updated = process_timestamp(self.last_updated)
last_changed = process_timestamp(self.last_changed)
last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0)
last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0)
return State(
self.entity_id,
self.state,
self.entity_id or "",
self.state, # type: ignore[arg-type]
# Join the state_attributes table on attributes_id to get the attributes
# for newer states
attrs,

View file

@ -60,7 +60,9 @@ def test_rename_entity_without_collision(
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, list(set(states) | {"sensor.test99", "sensor.test1"})
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -71,13 +73,20 @@ def test_rename_entity_without_collision(
hass.add_job(rename_entry)
wait_recording_done(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, list(set(states) | {"sensor.test99", "sensor.test1"})
)
states["sensor.test99"] = states.pop("sensor.test1")
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
hass.states.set("sensor.test99", "post_migrate")
wait_recording_done(hass)
new_hist = history.get_significant_states(hass, zero, dt_util.utcnow())
new_hist = history.get_significant_states(
hass,
zero,
dt_util.utcnow(),
list(set(states) | {"sensor.test99", "sensor.test1"}),
)
assert not new_hist.get("sensor.test1")
assert new_hist["sensor.test99"][-1].state == "post_migrate"
@ -207,7 +216,9 @@ def test_rename_entity_collision(
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, list(set(states) | {"sensor.test99", "sensor.test1"})
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
assert len(hist["sensor.test1"]) == 3
@ -225,7 +236,9 @@ def test_rename_entity_collision(
wait_recording_done(hass)
# History is not migrated on collision
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, list(set(states) | {"sensor.test99", "sensor.test1"})
)
assert len(hist["sensor.test1"]) == 3
assert len(hist["sensor.test99"]) == 2
@ -234,7 +247,12 @@ def test_rename_entity_collision(
hass.states.set("sensor.test99", "post_migrate")
wait_recording_done(hass)
new_hist = history.get_significant_states(hass, zero, dt_util.utcnow())
new_hist = history.get_significant_states(
hass,
zero,
dt_util.utcnow(),
list(set(states) | {"sensor.test99", "sensor.test1"}),
)
assert new_hist["sensor.test99"][-1].state == "post_migrate"
assert len(hist["sensor.test99"]) == 2

View file

@ -21,6 +21,7 @@ from homeassistant.components.recorder.db_schema import (
States,
StatesMeta,
)
from homeassistant.components.recorder.filters import Filters
from homeassistant.components.recorder.history import legacy
from homeassistant.components.recorder.models import LazyState, process_timestamp
from homeassistant.components.recorder.models.legacy import LazyStatePreSchema31
@ -68,7 +69,6 @@ async def _async_get_states(
utc_point_in_time,
entity_ids,
run,
None,
no_attributes,
)
]
@ -463,7 +463,7 @@ def test_get_significant_states(hass_recorder: Callable[..., HomeAssistant]) ->
"""
hass = hass_recorder()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(hass, zero, four, entity_ids=list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -481,7 +481,9 @@ def test_get_significant_states_minimal_response(
"""
hass = hass_recorder()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, minimal_response=True)
hist = history.get_significant_states(
hass, zero, four, minimal_response=True, entity_ids=list(states)
)
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
@ -556,10 +558,7 @@ def test_get_significant_states_with_initial(
state.last_changed = one_and_half
hist = history.get_significant_states(
hass,
one_and_half,
four,
include_start_time_state=True,
hass, one_and_half, four, include_start_time_state=True, entity_ids=list(states)
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -593,6 +592,7 @@ def test_get_significant_states_without_initial(
one_and_half,
four,
include_start_time_state=False,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -698,7 +698,12 @@ def test_get_significant_states_only(
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = history.get_significant_states(hass, start, significant_changes_only=True)
hist = history.get_significant_states(
hass,
start,
significant_changes_only=True,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 2
assert not any(
@ -711,7 +716,12 @@ def test_get_significant_states_only(
state.last_updated == states[2].last_updated for state in hist[entity_id]
)
hist = history.get_significant_states(hass, start, significant_changes_only=False)
hist = history.get_significant_states(
hass,
start,
significant_changes_only=False,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 3
assert_multiple_states_equal_without_context_and_last_changed(
@ -737,7 +747,11 @@ async def test_get_significant_states_only_minimal_response(
await async_wait_recording_done(hass)
hist = history.get_significant_states(
hass, now, minimal_response=True, significant_changes_only=False
hass,
now,
minimal_response=True,
significant_changes_only=False,
entity_ids=["sensor.test"],
)
assert len(hist["sensor.test"]) == 3
@ -1113,18 +1127,10 @@ def test_state_changes_during_period_multiple_entities_single_test(
wait_recording_done(hass)
end = dt_util.utcnow()
hist = history.state_changes_during_period(hass, start, end, None)
for entity_id, value in test_entites.items():
hist[entity_id][0].state == value
for entity_id, value in test_entites.items():
hist = history.state_changes_during_period(hass, start, end, entity_id)
assert len(hist) == 1
hist[entity_id][0].state == value
hist = history.state_changes_during_period(hass, start, end, None)
for entity_id, value in test_entites.items():
hist[entity_id][0].state == value
assert hist[entity_id][0].state == value
@pytest.mark.freeze_time("2039-01-19 03:14:07.555555-00:00")
@ -1161,3 +1167,63 @@ async def test_get_full_significant_states_past_year_2038(
assert_states_equal_without_context(sensor_one_states[1], state1)
assert sensor_one_states[0].last_changed == past_2038_time
assert sensor_one_states[0].last_updated == past_2038_time
def test_get_significant_states_without_entity_ids_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test at least one entity id is required for get_significant_states."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(ValueError, match="entity_ids must be provided"):
history.get_significant_states(hass, now, None)
def test_state_changes_during_period_without_entity_ids_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test at least one entity id is required for state_changes_during_period."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(ValueError, match="entity_id must be provided"):
history.state_changes_during_period(hass, now, None)
def test_get_significant_states_with_filters_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test passing filters is no longer supported."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(NotImplementedError, match="Filters are no longer supported"):
history.get_significant_states(
hass, now, None, ["media_player.test"], Filters()
)
def test_get_significant_states_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test get_significant_states returns an empty dict when entities not in the db."""
hass = hass_recorder()
now = dt_util.utcnow()
assert history.get_significant_states(hass, now, None, ["nonexistent.entity"]) == {}
def test_state_changes_during_period_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test state_changes_during_period returns an empty dict when entities not in the db."""
hass = hass_recorder()
now = dt_util.utcnow()
assert (
history.state_changes_during_period(hass, now, None, "nonexistent.entity") == {}
)
def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test get_last_state_changes returns an empty dict when entities not in the db."""
hass = hass_recorder()
assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {}

View file

@ -17,6 +17,7 @@ from sqlalchemy.orm import Session
from homeassistant.components import recorder
from homeassistant.components.recorder import core, history, statistics
from homeassistant.components.recorder.filters import Filters
from homeassistant.components.recorder.models import process_timestamp
from homeassistant.components.recorder.util import session_scope
from homeassistant.core import HomeAssistant, State
@ -75,6 +76,8 @@ def db_schema_30():
core, "Events", old_db_schema.Events
), patch.object(
core, "StateAttributes", old_db_schema.StateAttributes
), patch.object(
core, "EntityIDMigrationTask", core.RecorderTask
), patch(
CREATE_ENGINE_TARGET, new=_create_engine_test
):
@ -357,7 +360,7 @@ def test_get_significant_states(hass_recorder: Callable[..., HomeAssistant]) ->
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(hass, zero, four, entity_ids=list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -376,7 +379,9 @@ def test_get_significant_states_minimal_response(
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, minimal_response=True)
hist = history.get_significant_states(
hass, zero, four, minimal_response=True, entity_ids=list(states)
)
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
@ -460,6 +465,7 @@ def test_get_significant_states_with_initial(
one_and_half,
four,
include_start_time_state=True,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -495,6 +501,7 @@ def test_get_significant_states_without_initial(
one_and_half,
four,
include_start_time_state=False,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
@ -613,7 +620,10 @@ def test_get_significant_states_only(
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = history.get_significant_states(
hass, start, significant_changes_only=True
hass,
start,
significant_changes_only=True,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 2
@ -628,7 +638,10 @@ def test_get_significant_states_only(
)
hist = history.get_significant_states(
hass, start, significant_changes_only=False
hass,
start,
significant_changes_only=False,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 3
@ -741,15 +754,67 @@ def test_state_changes_during_period_multiple_entities_single_test(
wait_recording_done(hass)
end = dt_util.utcnow()
hist = history.state_changes_during_period(hass, start, end, None)
for entity_id, value in test_entites.items():
hist[entity_id][0].state == value
for entity_id, value in test_entites.items():
hist = history.state_changes_during_period(hass, start, end, entity_id)
assert len(hist) == 1
hist[entity_id][0].state == value
assert hist[entity_id][0].state == value
hist = history.state_changes_during_period(hass, start, end, None)
for entity_id, value in test_entites.items():
hist[entity_id][0].state == value
def test_get_significant_states_without_entity_ids_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test at least one entity id is required for get_significant_states."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(ValueError, match="entity_ids must be provided"):
history.get_significant_states(hass, now, None)
def test_state_changes_during_period_without_entity_ids_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test at least one entity id is required for state_changes_during_period."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(ValueError, match="entity_id must be provided"):
history.state_changes_during_period(hass, now, None)
def test_get_significant_states_with_filters_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test passing filters is no longer supported."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(NotImplementedError, match="Filters are no longer supported"):
history.get_significant_states(
hass, now, None, ["media_player.test"], Filters()
)
def test_get_significant_states_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test get_significant_states returns an empty dict when entities not in the db."""
hass = hass_recorder()
now = dt_util.utcnow()
assert history.get_significant_states(hass, now, None, ["nonexistent.entity"]) == {}
def test_state_changes_during_period_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test state_changes_during_period returns an empty dict when entities not in the db."""
hass = hass_recorder()
now = dt_util.utcnow()
assert (
history.state_changes_during_period(hass, now, None, "nonexistent.entity") == {}
)
def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test get_last_state_changes returns an empty dict when entities not in the db."""
hass = hass_recorder()
assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {}

View file

@ -0,0 +1,811 @@
"""The tests the History component."""
from __future__ import annotations
from collections.abc import Callable
# pylint: disable=invalid-name
from copy import copy
from datetime import datetime, timedelta
import importlib
import json
import sys
from unittest.mock import patch, sentinel
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import Session
from homeassistant.components import recorder
from homeassistant.components.recorder import core, history, statistics
from homeassistant.components.recorder.filters import Filters
from homeassistant.components.recorder.models import process_timestamp
from homeassistant.components.recorder.util import session_scope
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers.json import JSONEncoder
import homeassistant.util.dt as dt_util
from .common import (
assert_dict_of_states_equal_without_context_and_last_changed,
assert_multiple_states_equal_without_context,
assert_multiple_states_equal_without_context_and_last_changed,
assert_states_equal_without_context,
wait_recording_done,
)
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
def _create_engine_test(*args, **kwargs):
"""Test version of create_engine that initializes with old schema.
This simulates an existing db with the old schema.
"""
importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE]
engine = create_engine(*args, **kwargs)
old_db_schema.Base.metadata.create_all(engine)
with Session(engine) as session:
session.add(
recorder.db_schema.StatisticsRuns(start=statistics.get_start_time())
)
session.add(
recorder.db_schema.SchemaChanges(
schema_version=old_db_schema.SCHEMA_VERSION
)
)
session.commit()
return engine
@pytest.fixture(autouse=True)
def db_schema_32():
"""Fixture to initialize the db with the old schema."""
importlib.import_module(SCHEMA_MODULE)
old_db_schema = sys.modules[SCHEMA_MODULE]
with patch.object(recorder, "db_schema", old_db_schema), patch.object(
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(
core, "EventTypes", old_db_schema.EventTypes
), patch.object(
core, "EventData", old_db_schema.EventData
), patch.object(
core, "States", old_db_schema.States
), patch.object(
core, "Events", old_db_schema.Events
), patch.object(
core, "StateAttributes", old_db_schema.StateAttributes
), patch.object(
core, "EntityIDMigrationTask", core.RecorderTask
), patch(
CREATE_ENGINE_TARGET, new=_create_engine_test
):
yield
def test_get_full_significant_states_with_session_entity_no_matches(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test getting states at a specific point in time for entities that never have been recorded."""
hass = hass_recorder()
now = dt_util.utcnow()
time_before_recorder_ran = now - timedelta(days=1000)
instance = recorder.get_instance(hass)
with session_scope(hass=hass) as session, patch.object(
instance.states_meta_manager, "active", False
):
assert (
history.get_full_significant_states_with_session(
hass, session, time_before_recorder_ran, now, entity_ids=["demo.id"]
)
== {}
)
assert (
history.get_full_significant_states_with_session(
hass,
session,
time_before_recorder_ran,
now,
entity_ids=["demo.id", "demo.id2"],
)
== {}
)
def test_significant_states_with_session_entity_minimal_response_no_matches(
hass_recorder: Callable[..., HomeAssistant],
) -> None:
"""Test getting states at a specific point in time for entities that never have been recorded."""
hass = hass_recorder()
now = dt_util.utcnow()
time_before_recorder_ran = now - timedelta(days=1000)
instance = recorder.get_instance(hass)
with session_scope(hass=hass) as session, patch.object(
instance.states_meta_manager, "active", False
):
assert (
history.get_significant_states_with_session(
hass,
session,
time_before_recorder_ran,
now,
entity_ids=["demo.id"],
minimal_response=True,
)
== {}
)
assert (
history.get_significant_states_with_session(
hass,
session,
time_before_recorder_ran,
now,
entity_ids=["demo.id", "demo.id2"],
minimal_response=True,
)
== {}
)
@pytest.mark.parametrize(
("attributes", "no_attributes", "limit"),
[
({"attr": True}, False, 5000),
({}, True, 5000),
({"attr": True}, False, 3),
({}, True, 3),
],
)
def test_state_changes_during_period(
hass_recorder: Callable[..., HomeAssistant], attributes, no_attributes, limit
) -> None:
"""Test state change during period."""
hass = hass_recorder()
entity_id = "media_player.test"
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state, attributes)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow()
point = start + timedelta(seconds=1)
end = point + timedelta(seconds=1)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start
):
set_state("idle")
set_state("YouTube")
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point
):
states = [
set_state("idle"),
set_state("Netflix"),
set_state("Plex"),
set_state("YouTube"),
]
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=end
):
set_state("Netflix")
set_state("Plex")
hist = history.state_changes_during_period(
hass, start, end, entity_id, no_attributes, limit=limit
)
assert_multiple_states_equal_without_context(states[:limit], hist[entity_id])
def test_state_changes_during_period_descending(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test state change during period descending."""
hass = hass_recorder()
entity_id = "media_player.test"
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state, {"any": 1})
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow()
point = start + timedelta(seconds=1)
point2 = start + timedelta(seconds=1, microseconds=2)
point3 = start + timedelta(seconds=1, microseconds=3)
point4 = start + timedelta(seconds=1, microseconds=4)
end = point + timedelta(seconds=1)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start
):
set_state("idle")
set_state("YouTube")
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point
):
states = [set_state("idle")]
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point2
):
states.append(set_state("Netflix"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point3
):
states.append(set_state("Plex"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point4
):
states.append(set_state("YouTube"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=end
):
set_state("Netflix")
set_state("Plex")
hist = history.state_changes_during_period(
hass, start, end, entity_id, no_attributes=False, descending=False
)
assert_multiple_states_equal_without_context(states, hist[entity_id])
hist = history.state_changes_during_period(
hass, start, end, entity_id, no_attributes=False, descending=True
)
assert_multiple_states_equal_without_context(
states, list(reversed(list(hist[entity_id])))
)
def test_get_last_state_changes(hass_recorder: Callable[..., HomeAssistant]) -> None:
"""Test number of state changes."""
hass = hass_recorder()
entity_id = "sensor.test"
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1)
point2 = point + timedelta(minutes=1, seconds=1)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start
):
set_state("1")
states = []
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point
):
states.append(set_state("2"))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point2
):
states.append(set_state("3"))
hist = history.get_last_state_changes(hass, 2, entity_id)
assert_multiple_states_equal_without_context(states, hist[entity_id])
def test_ensure_state_can_be_copied(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Ensure a state can pass though copy().
The filter integration uses copy() on states
from history.
"""
hass = hass_recorder()
entity_id = "sensor.test"
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
def set_state(state):
"""Set the state."""
hass.states.set(entity_id, state)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start
):
set_state("1")
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=point
):
set_state("2")
hist = history.get_last_state_changes(hass, 2, entity_id)
assert_states_equal_without_context(
copy(hist[entity_id][0]), hist[entity_id][0]
)
assert_states_equal_without_context(
copy(hist[entity_id][1]), hist[entity_id][1]
)
def test_get_significant_states(hass_recorder: Callable[..., HomeAssistant]) -> None:
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, entity_ids=list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
def test_get_significant_states_minimal_response(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test that only significant states are returned.
When minimal responses is set only the first and
last states return a complete state.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
hist = history.get_significant_states(
hass, zero, four, minimal_response=True, entity_ids=list(states)
)
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
]
# All states for media_player.test state are reduced
# down to last_changed and state when minimal_response
# is set except for the first state.
# is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what
# will happen with encoding a native state
for entity_id in entites_with_reducable_states:
entity_states = states[entity_id]
for state_idx in range(1, len(entity_states)):
input_state = entity_states[state_idx]
orig_last_changed = orig_last_changed = json.dumps(
process_timestamp(input_state.last_changed),
cls=JSONEncoder,
).replace('"', "")
orig_state = input_state.state
entity_states[state_idx] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert len(hist) == len(states)
assert_states_equal_without_context(
states["media_player.test"][0], hist["media_player.test"][0]
)
assert states["media_player.test"][1] == hist["media_player.test"][1]
assert states["media_player.test"][2] == hist["media_player.test"][2]
assert_multiple_states_equal_without_context(
states["media_player.test2"], hist["media_player.test2"]
)
assert_states_equal_without_context(
states["media_player.test3"][0], hist["media_player.test3"][0]
)
assert states["media_player.test3"][1] == hist["media_player.test3"][1]
assert_multiple_states_equal_without_context(
states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"]
)
assert_multiple_states_equal_without_context_and_last_changed(
states["thermostat.test"], hist["thermostat.test"]
)
assert_multiple_states_equal_without_context_and_last_changed(
states["thermostat.test2"], hist["thermostat.test2"]
)
@pytest.mark.parametrize("time_zone", ["Europe/Berlin", "US/Hawaii", "UTC"])
def test_get_significant_states_with_initial(
time_zone, hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
hass.config.set_time_zone(time_zone)
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
if entity_id == "media_player.test":
states[entity_id] = states[entity_id][1:]
for state in states[entity_id]:
if state.last_changed == one:
state.last_changed = one_and_half
hist = history.get_significant_states(
hass, one_and_half, four, include_start_time_state=True, entity_ids=list(states)
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
def test_get_significant_states_without_initial(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test that only significant states are returned.
We should get back every thermostat change that
includes an attribute change, but only the state updates for
media player (attribute changes are not significant and not returned).
"""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
one = zero + timedelta(seconds=1)
one_with_microsecond = zero + timedelta(seconds=1, microseconds=1)
one_and_half = zero + timedelta(seconds=1.5)
for entity_id in states:
states[entity_id] = list(
filter(
lambda s: s.last_changed != one
and s.last_changed != one_with_microsecond,
states[entity_id],
)
)
del states["media_player.test2"]
hist = history.get_significant_states(
hass,
one_and_half,
four,
include_start_time_state=False,
entity_ids=list(states),
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
def test_get_significant_states_entity_id(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test that only significant states are returned for one entity."""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = history.get_significant_states(hass, zero, four, ["media_player.test"])
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
def test_get_significant_states_multiple_entity_ids(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test that only significant states are returned for one entity."""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, states = record_states(hass)
del states["media_player.test2"]
del states["media_player.test3"]
del states["thermostat.test2"]
del states["script.can_cancel_this_one"]
hist = history.get_significant_states(
hass,
zero,
four,
["media_player.test", "thermostat.test"],
)
assert_multiple_states_equal_without_context_and_last_changed(
states["media_player.test"], hist["media_player.test"]
)
assert_multiple_states_equal_without_context_and_last_changed(
states["thermostat.test"], hist["thermostat.test"]
)
def test_get_significant_states_are_ordered(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test order of results from get_significant_states.
When entity ids are given, the results should be returned with the data
in the same order.
"""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
zero, four, _states = record_states(hass)
entity_ids = ["media_player.test", "media_player.test2"]
hist = history.get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
entity_ids = ["media_player.test2", "media_player.test"]
hist = history.get_significant_states(hass, zero, four, entity_ids)
assert list(hist.keys()) == entity_ids
def test_get_significant_states_only(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test significant states when significant_states_only is set."""
hass = hass_recorder()
entity_id = "sensor.test"
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
def set_state(state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
start = dt_util.utcnow() - timedelta(minutes=4)
points = []
for i in range(1, 4):
points.append(start + timedelta(minutes=i))
states = []
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start
):
set_state("123", attributes={"attribute": 10.64})
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow",
return_value=points[0],
):
# Attributes are different, state not
states.append(set_state("123", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow",
return_value=points[1],
):
# state is different, attributes not
states.append(set_state("32", attributes={"attribute": 21.42}))
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow",
return_value=points[2],
):
# everything is different
states.append(set_state("412", attributes={"attribute": 54.23}))
hist = history.get_significant_states(
hass,
start,
significant_changes_only=True,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 2
assert not any(
state.last_updated == states[0].last_updated for state in hist[entity_id]
)
assert any(
state.last_updated == states[1].last_updated for state in hist[entity_id]
)
assert any(
state.last_updated == states[2].last_updated for state in hist[entity_id]
)
hist = history.get_significant_states(
hass,
start,
significant_changes_only=False,
entity_ids=list({state.entity_id for state in states}),
)
assert len(hist[entity_id]) == 3
assert_multiple_states_equal_without_context_and_last_changed(
states, hist[entity_id]
)
def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]:
"""Record some test states.
We inject a bunch of state updates from media player, zone and
thermostat.
"""
mp = "media_player.test"
mp2 = "media_player.test2"
mp3 = "media_player.test3"
therm = "thermostat.test"
therm2 = "thermostat.test2"
zone = "zone.home"
script_c = "script.can_cancel_this_one"
def set_state(entity_id, state, **kwargs):
"""Set the state."""
hass.states.set(entity_id, state, **kwargs)
wait_recording_done(hass)
return hass.states.get(entity_id)
zero = dt_util.utcnow()
one = zero + timedelta(seconds=1)
two = one + timedelta(seconds=1)
three = two + timedelta(seconds=1)
four = three + timedelta(seconds=1)
states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []}
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=one
):
states[mp].append(
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[mp2].append(
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
states[mp3].append(
set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)})
)
states[therm].append(
set_state(therm, 20, attributes={"current_temperature": 19.5})
)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow",
return_value=one + timedelta(microseconds=1),
):
states[mp].append(
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=two
):
# This state will be skipped only different in time
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
# This state will be skipped because domain is excluded
set_state(zone, "zoning")
states[script_c].append(
set_state(script_c, "off", attributes={"can_cancel": True})
)
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 19.8})
)
states[therm2].append(
set_state(therm2, 20, attributes={"current_temperature": 19})
)
with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=three
):
states[mp].append(
set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)})
)
states[mp3].append(
set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)})
)
# Attributes changed even though state is the same
states[therm].append(
set_state(therm, 21, attributes={"current_temperature": 20})
)
return zero, four, states
def test_state_changes_during_period_multiple_entities_single_test(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test state change during period with multiple entities in the same test.
This test ensures the sqlalchemy query cache does not
generate incorrect results.
"""
hass = hass_recorder()
instance = recorder.get_instance(hass)
with patch.object(instance.states_meta_manager, "active", False):
start = dt_util.utcnow()
test_entites = {f"sensor.{i}": str(i) for i in range(30)}
for entity_id, value in test_entites.items():
hass.states.set(entity_id, value)
wait_recording_done(hass)
end = dt_util.utcnow()
for entity_id, value in test_entites.items():
hist = history.state_changes_during_period(hass, start, end, entity_id)
assert len(hist) == 1
assert hist[entity_id][0].state == value
def test_get_significant_states_without_entity_ids_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test at least one entity id is required for get_significant_states."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(ValueError, match="entity_ids must be provided"):
history.get_significant_states(hass, now, None)
def test_state_changes_during_period_without_entity_ids_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test at least one entity id is required for state_changes_during_period."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(ValueError, match="entity_id must be provided"):
history.state_changes_during_period(hass, now, None)
def test_get_significant_states_with_filters_raises(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test passing filters is no longer supported."""
hass = hass_recorder()
now = dt_util.utcnow()
with pytest.raises(NotImplementedError, match="Filters are no longer supported"):
history.get_significant_states(
hass, now, None, ["media_player.test"], Filters()
)
def test_get_significant_states_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test get_significant_states returns an empty dict when entities not in the db."""
hass = hass_recorder()
now = dt_util.utcnow()
assert history.get_significant_states(hass, now, None, ["nonexistent.entity"]) == {}
def test_state_changes_during_period_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test state_changes_during_period returns an empty dict when entities not in the db."""
hass = hass_recorder()
now = dt_util.utcnow()
assert (
history.state_changes_during_period(hass, now, None, "nonexistent.entity") == {}
)
def test_get_last_state_changes_with_non_existent_entity_ids_returns_empty(
hass_recorder: Callable[..., HomeAssistant]
) -> None:
"""Test get_last_state_changes returns an empty dict when entities not in the db."""
hass = hass_recorder()
assert history.get_last_state_changes(hass, 1, "nonexistent.entity") == {}

View file

@ -2065,7 +2065,11 @@ async def test_purge_entities_keep_days(
await async_recorder_block_till_done(hass)
states = await instance.async_add_executor_job(
get_significant_states, hass, one_month_ago
get_significant_states,
hass,
one_month_ago,
None,
["sensor.keep", "sensor.purge"],
)
assert len(states["sensor.keep"]) == 2
assert len(states["sensor.purge"]) == 3
@ -2082,7 +2086,11 @@ async def test_purge_entities_keep_days(
await async_wait_purge_done(hass)
states = await instance.async_add_executor_job(
get_significant_states, hass, one_month_ago
get_significant_states,
hass,
one_month_ago,
None,
["sensor.keep", "sensor.purge"],
)
assert len(states["sensor.keep"]) == 2
assert len(states["sensor.purge"]) == 1
@ -2098,7 +2106,11 @@ async def test_purge_entities_keep_days(
await async_wait_purge_done(hass)
states = await instance.async_add_executor_job(
get_significant_states, hass, one_month_ago
get_significant_states,
hass,
one_month_ago,
None,
["sensor.keep", "sensor.purge"],
)
assert len(states["sensor.keep"]) == 2
assert "sensor.purge" not in states

View file

@ -68,7 +68,7 @@ def test_compile_hourly_statistics(hass_recorder: Callable[..., HomeAssistant])
instance = recorder.get_instance(hass)
setup_component(hass, "sensor", {})
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
# Should not fail if there is nothing there yet
@ -329,7 +329,7 @@ def test_rename_entity(hass_recorder: Callable[..., HomeAssistant]) -> None:
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
@ -418,7 +418,7 @@ def test_rename_entity_collision(
hass.block_till_done()
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
@ -485,7 +485,7 @@ def test_statistics_duplicated(
hass = hass_recorder()
setup_component(hass, "sensor", {})
zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(hass, zero, four, list(states))
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
wait_recording_done(hass)

View file

@ -54,7 +54,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -66,7 +66,9 @@ async def test_exclude_attributes(
await async_wait_recording_done(hass)
assert len(calls) == 1
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -27,7 +27,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -156,7 +156,9 @@ def test_compile_hourly_statistics(
"unit_of_measurement": state_unit,
}
four, states = record_states(hass, zero, "sensor.test1", attributes)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -274,7 +276,9 @@ def test_compile_hourly_statistics_with_some_same_last_updated(
set_state(entity_id, str(seq[3]), attributes=attributes)
)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -383,7 +387,9 @@ def test_compile_hourly_statistics_with_all_same_last_updated(
set_state(entity_id, str(seq[3]), attributes=attributes)
)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -490,7 +496,9 @@ def test_compile_hourly_statistics_only_state_is_and_end_of_period(
set_state(entity_id, str(seq[3]), attributes=attributes)
)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -552,7 +560,9 @@ def test_compile_hourly_statistics_purged_state_changes(
"unit_of_measurement": state_unit,
}
four, states = record_states(hass, zero, "sensor.test1", attributes)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
mean = min = max = float(hist["sensor.test1"][-1].state)
@ -564,7 +574,9 @@ def test_compile_hourly_statistics_purged_state_changes(
hass.services.call("recorder", "purge", {"keep_days": 0})
hass.block_till_done()
wait_recording_done(hass)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert not hist
do_adhoc_statistics(hass, start=zero)
@ -637,7 +649,9 @@ def test_compile_hourly_statistics_wrong_unit(
_, _states = record_states(hass, zero, "sensor.test7", attributes_tmp)
states = {**states, **_states}
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -836,7 +850,10 @@ async def test_compile_hourly_sum_statistics_amount(
)
await async_wait_recording_done(hass)
hist = history.get_significant_states(
hass, period0 - timedelta.resolution, eight + timedelta.resolution
hass,
period0 - timedelta.resolution,
eight + timedelta.resolution,
hass.states.async_entity_ids(),
)
assert_multiple_states_equal_without_context_and_last_changed(
dict(states)["sensor.test1"], dict(hist)["sensor.test1"]
@ -1038,6 +1055,7 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
hass,
zero - timedelta.resolution,
two + timedelta.resolution,
hass.states.async_entity_ids(),
significant_changes_only=False,
)
assert_multiple_states_equal_without_context_and_last_changed(
@ -1145,6 +1163,7 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
hass,
zero - timedelta.resolution,
one + timedelta.resolution,
hass.states.async_entity_ids(),
significant_changes_only=False,
)
assert_multiple_states_equal_without_context_and_last_changed(
@ -1238,6 +1257,7 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
hass,
zero - timedelta.resolution,
one + timedelta.resolution,
hass.states.async_entity_ids(),
significant_changes_only=False,
)
assert_multiple_states_equal_without_context_and_last_changed(
@ -1379,6 +1399,7 @@ def test_compile_hourly_sum_statistics_negative_state(
hass,
zero - timedelta.resolution,
one + timedelta.resolution,
hass.states.async_entity_ids(),
significant_changes_only=False,
)
assert_multiple_states_equal_without_context_and_last_changed(
@ -1470,7 +1491,10 @@ def test_compile_hourly_sum_statistics_total_no_reset(
)
wait_recording_done(hass)
hist = history.get_significant_states(
hass, period0 - timedelta.resolution, eight + timedelta.resolution
hass,
period0 - timedelta.resolution,
eight + timedelta.resolution,
hass.states.async_entity_ids(),
)
assert_multiple_states_equal_without_context_and_last_changed(
dict(states)["sensor.test1"], dict(hist)["sensor.test1"]
@ -1579,7 +1603,10 @@ def test_compile_hourly_sum_statistics_total_increasing(
)
wait_recording_done(hass)
hist = history.get_significant_states(
hass, period0 - timedelta.resolution, eight + timedelta.resolution
hass,
period0 - timedelta.resolution,
eight + timedelta.resolution,
hass.states.async_entity_ids(),
)
assert_multiple_states_equal_without_context_and_last_changed(
dict(states)["sensor.test1"], dict(hist)["sensor.test1"]
@ -1686,7 +1713,10 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
)
wait_recording_done(hass)
hist = history.get_significant_states(
hass, period0 - timedelta.resolution, eight + timedelta.resolution
hass,
period0 - timedelta.resolution,
eight + timedelta.resolution,
hass.states.async_entity_ids(),
)
assert_multiple_states_equal_without_context_and_last_changed(
dict(states)["sensor.test1"], dict(hist)["sensor.test1"]
@ -1795,7 +1825,10 @@ def test_compile_hourly_energy_statistics_unsupported(
wait_recording_done(hass)
hist = history.get_significant_states(
hass, period0 - timedelta.resolution, eight + timedelta.resolution
hass,
period0 - timedelta.resolution,
eight + timedelta.resolution,
hass.states.async_entity_ids(),
)
assert_multiple_states_equal_without_context_and_last_changed(
dict(states)["sensor.test1"], dict(hist)["sensor.test1"]
@ -1889,7 +1922,10 @@ def test_compile_hourly_energy_statistics_multiple(
states = {**states, **_states}
wait_recording_done(hass)
hist = history.get_significant_states(
hass, period0 - timedelta.resolution, eight + timedelta.resolution
hass,
period0 - timedelta.resolution,
eight + timedelta.resolution,
hass.states.async_entity_ids(),
)
assert_multiple_states_equal_without_context_and_last_changed(
dict(states)["sensor.test1"], dict(hist)["sensor.test1"]
@ -2078,7 +2114,9 @@ def test_compile_hourly_statistics_unchanged(
"unit_of_measurement": state_unit,
}
four, states = record_states(hass, zero, "sensor.test1", attributes)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=four)
@ -2112,7 +2150,9 @@ def test_compile_hourly_statistics_partially_unavailable(
four, states = record_states_partially_unavailable(
hass, zero, "sensor.test1", TEMPERATURE_SENSOR_ATTRIBUTES
)
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -2185,7 +2225,9 @@ def test_compile_hourly_statistics_unavailable(
)
_, _states = record_states(hass, zero, "sensor.test2", attributes)
states = {**states, **_states}
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=four)
@ -2407,7 +2449,9 @@ def test_compile_hourly_statistics_changing_units_1(
hass, zero + timedelta(minutes=10), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -2526,7 +2570,9 @@ def test_compile_hourly_statistics_changing_units_2(
hass, zero + timedelta(minutes=5), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5))
@ -2603,7 +2649,9 @@ def test_compile_hourly_statistics_changing_units_3(
hass, zero + timedelta(minutes=10), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -2751,7 +2799,9 @@ def test_compile_hourly_statistics_convert_units_1(
hass, zero + timedelta(minutes=10), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero + timedelta(minutes=10))
wait_recording_done(hass)
@ -2853,7 +2903,9 @@ def test_compile_hourly_statistics_equivalent_units_1(
hass, zero + timedelta(minutes=10), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero)
@ -2967,7 +3019,9 @@ def test_compile_hourly_statistics_equivalent_units_2(
hass, zero + timedelta(minutes=5), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=zero + timedelta(seconds=30 * 5))
@ -3093,7 +3147,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
hass, zero + timedelta(minutes=10), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
# Run statistics again, additional statistics is generated
@ -3148,7 +3204,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
hass, zero + timedelta(minutes=20), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
# Run statistics again, additional statistics is generated
@ -3293,7 +3351,9 @@ def test_compile_hourly_statistics_changing_device_class_2(
hass, zero + timedelta(minutes=10), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
hist = history.get_significant_states(
hass, zero, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
# Run statistics again, additional statistics is generated
@ -3418,7 +3478,9 @@ def test_compile_hourly_statistics_changing_state_class(
# Add more states, with changed state class
four, _states = record_states(hass, period1, "sensor.test1", attributes_2)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, period0, four)
hist = history.get_significant_states(
hass, period0, four, hass.states.async_entity_ids()
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
do_adhoc_statistics(hass, start=period1)
@ -3605,7 +3667,11 @@ def test_compile_statistics_hourly_daily_monthly_summary(
start += timedelta(minutes=5)
hist = history.get_significant_states(
hass, zero - timedelta.resolution, four, significant_changes_only=False
hass,
zero - timedelta.resolution,
four,
hass.states.async_entity_ids(),
significant_changes_only=False,
)
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
wait_recording_done(hass)

View file

@ -27,7 +27,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -35,7 +35,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -25,7 +25,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -69,7 +69,9 @@ async def test_exclude_attributes(
assert state.attributes[ATTR_EVENT_SCORE] == 100
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -42,7 +42,9 @@ async def test_exclude_attributes(
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -27,7 +27,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -31,7 +31,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states:

View file

@ -30,7 +30,9 @@ async def test_exclude_attributes(recorder_mock: Recorder, hass: HomeAssistant)
await hass.async_block_till_done()
await async_wait_recording_done(hass)
states = await hass.async_add_executor_job(get_significant_states, hass, now)
states = await hass.async_add_executor_job(
get_significant_states, hass, now, None, hass.states.async_entity_ids()
)
assert len(states) >= 1
for entity_states in states.values():
for state in entity_states: