Retain history when renaming an entity_id (#89963)
Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
This commit is contained in:
parent
affb48d271
commit
c94b054d75
9 changed files with 478 additions and 130 deletions
|
@ -20,7 +20,7 @@ from homeassistant.helpers.integration_platform import (
|
|||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
from . import statistics, websocket_api
|
||||
from . import entity_registry, websocket_api
|
||||
from .const import ( # noqa: F401
|
||||
CONF_DB_INTEGRITY_CHECK,
|
||||
DATA_INSTANCE,
|
||||
|
@ -163,8 +163,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||
instance.async_register()
|
||||
instance.start()
|
||||
async_register_services(hass, instance)
|
||||
statistics.async_setup(hass)
|
||||
websocket_api.async_setup(hass)
|
||||
entity_registry.async_setup(hass)
|
||||
await async_process_integration_platforms(hass, DOMAIN, _process_recorder_platform)
|
||||
|
||||
return await instance.async_db_ready
|
||||
|
|
|
@ -109,6 +109,7 @@ from .tasks import (
|
|||
StatisticsTask,
|
||||
StopTask,
|
||||
SynchronizeTask,
|
||||
UpdateStatesMetadataTask,
|
||||
UpdateStatisticsMetadataTask,
|
||||
WaitTask,
|
||||
)
|
||||
|
@ -548,6 +549,15 @@ class Recorder(threading.Thread):
|
|||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_update_states_metadata(
|
||||
self,
|
||||
entity_id: str,
|
||||
new_entity_id: str,
|
||||
) -> None:
|
||||
"""Update states metadata for an entity_id."""
|
||||
self.queue_task(UpdateStatesMetadataTask(entity_id, new_entity_id))
|
||||
|
||||
@callback
|
||||
def async_change_statistics_unit(
|
||||
self,
|
||||
|
@ -970,8 +980,26 @@ class Recorder(threading.Thread):
|
|||
def _process_state_changed_event_into_session(self, event: Event) -> None:
|
||||
"""Process a state_changed event into the session."""
|
||||
state_attributes_manager = self.state_attributes_manager
|
||||
states_meta_manager = self.states_meta_manager
|
||||
entity_removed = not event.data.get("new_state")
|
||||
entity_id = event.data["entity_id"]
|
||||
|
||||
dbstate = States.from_event(event)
|
||||
if (entity_id := dbstate.entity_id) is None or not (
|
||||
|
||||
states_manager = self.states_manager
|
||||
if old_state := states_manager.pop_pending(entity_id):
|
||||
dbstate.old_state = old_state
|
||||
elif old_state_id := states_manager.pop_committed(entity_id):
|
||||
dbstate.old_state_id = old_state_id
|
||||
if entity_removed:
|
||||
dbstate.state = None
|
||||
else:
|
||||
states_manager.add_pending(entity_id, dbstate)
|
||||
|
||||
if states_meta_manager.active:
|
||||
dbstate.entity_id = None
|
||||
|
||||
if entity_id is None or not (
|
||||
shared_attrs_bytes := state_attributes_manager.serialize_from_event(event)
|
||||
):
|
||||
return
|
||||
|
@ -979,11 +1007,16 @@ class Recorder(threading.Thread):
|
|||
assert self.event_session is not None
|
||||
session = self.event_session
|
||||
# Map the entity_id to the StatesMeta table
|
||||
states_meta_manager = self.states_meta_manager
|
||||
if pending_states_meta := states_meta_manager.get_pending(entity_id):
|
||||
dbstate.states_meta_rel = pending_states_meta
|
||||
elif metadata_id := states_meta_manager.get(entity_id, session, True):
|
||||
dbstate.metadata_id = metadata_id
|
||||
elif states_meta_manager.active and entity_removed:
|
||||
# If the entity was removed, we don't need to add it to the
|
||||
# StatesMeta table or record it in the pending commit
|
||||
# if it does not have a metadata_id allocated to it as
|
||||
# it either never existed or was just renamed.
|
||||
return
|
||||
else:
|
||||
states_meta = StatesMeta(entity_id=entity_id)
|
||||
states_meta_manager.add_pending(states_meta)
|
||||
|
@ -1015,19 +1048,6 @@ class Recorder(threading.Thread):
|
|||
session.add(dbstate_attributes)
|
||||
dbstate.state_attributes = dbstate_attributes
|
||||
|
||||
states_manager = self.states_manager
|
||||
if old_state := states_manager.pop_pending(entity_id):
|
||||
dbstate.old_state = old_state
|
||||
elif old_state_id := states_manager.pop_committed(entity_id):
|
||||
dbstate.old_state_id = old_state_id
|
||||
if event.data.get("new_state"):
|
||||
states_manager.add_pending(entity_id, dbstate)
|
||||
else:
|
||||
dbstate.state = None
|
||||
|
||||
if states_meta_manager.active:
|
||||
dbstate.entity_id = None
|
||||
|
||||
session.add(dbstate)
|
||||
|
||||
def _handle_database_error(self, err: Exception) -> bool:
|
||||
|
|
71
homeassistant/components/recorder/entity_registry.py
Normal file
71
homeassistant/components/recorder/entity_registry.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
"""Recorder entity registry helper."""
|
||||
import logging
|
||||
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
|
||||
from .core import Recorder
|
||||
from .util import get_instance, session_scope
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the entity hooks."""
|
||||
|
||||
@callback
|
||||
def _async_entity_id_changed(event: Event) -> None:
|
||||
instance = get_instance(hass)
|
||||
old_entity_id: str = event.data["old_entity_id"]
|
||||
new_entity_id: str = event.data["entity_id"]
|
||||
instance.async_update_statistics_metadata(
|
||||
old_entity_id, new_statistic_id=new_entity_id
|
||||
)
|
||||
instance.async_update_states_metadata(
|
||||
old_entity_id, new_entity_id=new_entity_id
|
||||
)
|
||||
|
||||
@callback
|
||||
def entity_registry_changed_filter(event: Event) -> bool:
|
||||
"""Handle entity_id changed filter."""
|
||||
return event.data["action"] == "update" and "old_entity_id" in event.data
|
||||
|
||||
@callback
|
||||
def _setup_entity_registry_event_handler(hass: HomeAssistant) -> None:
|
||||
"""Subscribe to event registry events."""
|
||||
hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED,
|
||||
_async_entity_id_changed,
|
||||
event_filter=entity_registry_changed_filter,
|
||||
run_immediately=True,
|
||||
)
|
||||
|
||||
async_at_start(hass, _setup_entity_registry_event_handler)
|
||||
|
||||
|
||||
def update_states_metadata(
|
||||
instance: Recorder,
|
||||
entity_id: str,
|
||||
new_entity_id: str,
|
||||
) -> None:
|
||||
"""Update the states metadata table when an entity is renamed."""
|
||||
states_meta_manager = instance.states_meta_manager
|
||||
if not states_meta_manager.active:
|
||||
_LOGGER.warning(
|
||||
"Cannot rename entity_id `%s` to `%s` "
|
||||
"because the states meta manager is not yet active",
|
||||
entity_id,
|
||||
new_entity_id,
|
||||
)
|
||||
return
|
||||
|
||||
with session_scope(session=instance.get_session()) as session:
|
||||
if not states_meta_manager.update_metadata(session, entity_id, new_entity_id):
|
||||
_LOGGER.warning(
|
||||
"Cannot migrate history for entity_id `%s` to `%s` "
|
||||
"because the new entity_id is already in use",
|
||||
entity_id,
|
||||
new_entity_id,
|
||||
)
|
|
@ -26,11 +26,9 @@ from sqlalchemy.sql.lambdas import StatementLambdaElement
|
|||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT
|
||||
from homeassistant.core import Event, HomeAssistant, callback, valid_entity_id
|
||||
from homeassistant.core import HomeAssistant, callback, valid_entity_id
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
from homeassistant.helpers.start import async_at_start
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
@ -326,35 +324,6 @@ class ValidationIssue:
|
|||
return dataclasses.asdict(self)
|
||||
|
||||
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the history hooks."""
|
||||
|
||||
@callback
|
||||
def _async_entity_id_changed(event: Event) -> None:
|
||||
get_instance(hass).async_update_statistics_metadata(
|
||||
event.data["old_entity_id"], new_statistic_id=event.data["entity_id"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def entity_registry_changed_filter(event: Event) -> bool:
|
||||
"""Handle entity_id changed filter."""
|
||||
if event.data["action"] != "update" or "old_entity_id" not in event.data:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@callback
|
||||
def setup_entity_registry_event_handler(hass: HomeAssistant) -> None:
|
||||
"""Subscribe to event registry events."""
|
||||
hass.bus.async_listen(
|
||||
er.EVENT_ENTITY_REGISTRY_UPDATED,
|
||||
_async_entity_id_changed,
|
||||
event_filter=entity_registry_changed_filter,
|
||||
)
|
||||
|
||||
async_at_start(hass, setup_entity_registry_event_handler)
|
||||
|
||||
|
||||
def get_start_time() -> datetime:
|
||||
"""Return start time."""
|
||||
now = dt_util.utcnow()
|
||||
|
|
|
@ -144,3 +144,20 @@ class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
|
|||
"""
|
||||
for entity_id in entity_ids:
|
||||
self._id_map.pop(entity_id, None)
|
||||
|
||||
def update_metadata(
|
||||
self,
|
||||
session: Session,
|
||||
entity_id: str,
|
||||
new_entity_id: str,
|
||||
) -> bool:
|
||||
"""Update states metadata for an entity_id."""
|
||||
if self.get(new_entity_id, session, True) is not None:
|
||||
# If the new entity id already exists we have
|
||||
# a collision and should not update.
|
||||
return False
|
||||
session.query(StatesMeta).filter(StatesMeta.entity_id == entity_id).update(
|
||||
{StatesMeta.entity_id: new_entity_id}
|
||||
)
|
||||
self._id_map.pop(entity_id, None)
|
||||
return True
|
||||
|
|
|
@ -13,7 +13,7 @@ from typing import TYPE_CHECKING, Any
|
|||
from homeassistant.core import Event
|
||||
from homeassistant.helpers.typing import UndefinedType
|
||||
|
||||
from . import purge, statistics
|
||||
from . import entity_registry, purge, statistics
|
||||
from .const import DOMAIN, EXCLUDE_ATTRIBUTES
|
||||
from .db_schema import Statistics, StatisticsShortTerm
|
||||
from .models import StatisticData, StatisticMetaData
|
||||
|
@ -83,6 +83,22 @@ class UpdateStatisticsMetadataTask(RecorderTask):
|
|||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class UpdateStatesMetadataTask(RecorderTask):
|
||||
"""Task to update states metadata."""
|
||||
|
||||
entity_id: str
|
||||
new_entity_id: str
|
||||
|
||||
def run(self, instance: Recorder) -> None:
|
||||
"""Handle the task."""
|
||||
entity_registry.update_states_metadata(
|
||||
instance,
|
||||
self.entity_id,
|
||||
self.new_entity_id,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PurgeTask(RecorderTask):
|
||||
"""Object to store information about purge task."""
|
||||
|
|
|
@ -4,21 +4,22 @@ from __future__ import annotations
|
|||
import asyncio
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
import time
|
||||
from typing import Any, Literal, cast
|
||||
from unittest.mock import patch, sentinel
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from homeassistant import core as ha
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import get_instance, statistics
|
||||
from homeassistant.components.recorder.core import Recorder
|
||||
from homeassistant.components.recorder import Recorder, get_instance, statistics
|
||||
from homeassistant.components.recorder.db_schema import RecorderRuns
|
||||
from homeassistant.components.recorder.tasks import RecorderTask, StatisticsTask
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import Event, HomeAssistant, State
|
||||
from homeassistant.util import dt as dt_util
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import db_schema_0
|
||||
|
||||
|
@ -38,6 +39,15 @@ class BlockRecorderTask(RecorderTask):
|
|||
time.sleep(self.seconds)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ForceReturnConnectionToPool(RecorderTask):
|
||||
"""Force return connection to pool."""
|
||||
|
||||
def run(self, instance: Recorder) -> None:
|
||||
"""Handle the task."""
|
||||
instance.event_session.commit()
|
||||
|
||||
|
||||
async def async_block_recorder(hass: HomeAssistant, seconds: float) -> None:
|
||||
"""Block the recorders event loop for testing.
|
||||
|
||||
|
@ -223,3 +233,77 @@ def assert_dict_of_states_equal_without_context_and_last_changed(
|
|||
assert_multiple_states_equal_without_context_and_last_changed(
|
||||
state, others[entity_id]
|
||||
)
|
||||
|
||||
|
||||
def record_states(hass):
|
||||
"""Record some test states.
|
||||
|
||||
We inject a bunch of state updates temperature sensors.
|
||||
"""
|
||||
mp = "media_player.test"
|
||||
sns1 = "sensor.test1"
|
||||
sns2 = "sensor.test2"
|
||||
sns3 = "sensor.test3"
|
||||
sns4 = "sensor.test4"
|
||||
sns1_attr = {
|
||||
"device_class": "temperature",
|
||||
"state_class": "measurement",
|
||||
"unit_of_measurement": UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
sns2_attr = {
|
||||
"device_class": "humidity",
|
||||
"state_class": "measurement",
|
||||
"unit_of_measurement": "%",
|
||||
}
|
||||
sns3_attr = {"device_class": "temperature"}
|
||||
sns4_attr = {}
|
||||
|
||||
def set_state(entity_id, state, **kwargs):
|
||||
"""Set the state."""
|
||||
hass.states.set(entity_id, state, **kwargs)
|
||||
wait_recording_done(hass)
|
||||
return hass.states.get(entity_id)
|
||||
|
||||
zero = dt_util.utcnow()
|
||||
one = zero + timedelta(seconds=1 * 5)
|
||||
two = one + timedelta(seconds=15 * 5)
|
||||
three = two + timedelta(seconds=30 * 5)
|
||||
four = three + timedelta(seconds=15 * 5)
|
||||
|
||||
states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []}
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=one
|
||||
):
|
||||
states[mp].append(
|
||||
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
||||
)
|
||||
states[sns1].append(set_state(sns1, "10", attributes=sns1_attr))
|
||||
states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
|
||||
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
|
||||
states[sns4].append(set_state(sns4, "10", attributes=sns4_attr))
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow",
|
||||
return_value=one + timedelta(microseconds=1),
|
||||
):
|
||||
states[mp].append(
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=two
|
||||
):
|
||||
states[sns1].append(set_state(sns1, "15", attributes=sns1_attr))
|
||||
states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
|
||||
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
||||
states[sns4].append(set_state(sns4, "15", attributes=sns4_attr))
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=three
|
||||
):
|
||||
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
||||
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
||||
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
||||
states[sns4].append(set_state(sns4, "20", attributes=sns4_attr))
|
||||
|
||||
return zero, four, states
|
||||
|
|
245
tests/components/recorder/test_entity_registry.py
Normal file
245
tests/components/recorder/test_entity_registry.py
Normal file
|
@ -0,0 +1,245 @@
|
|||
"""The tests for sensor recorder platform."""
|
||||
from collections.abc import Callable
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from homeassistant.components.recorder import history
|
||||
from homeassistant.components.recorder.db_schema import StatesMeta
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.setup import setup_component
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .common import (
|
||||
ForceReturnConnectionToPool,
|
||||
assert_dict_of_states_equal_without_context_and_last_changed,
|
||||
async_wait_recording_done,
|
||||
record_states,
|
||||
wait_recording_done,
|
||||
)
|
||||
|
||||
from tests.common import MockEntity, MockEntityPlatform, mock_registry
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
def _count_entity_id_in_states_meta(
|
||||
hass: HomeAssistant, session: Session, entity_id: str
|
||||
) -> int:
|
||||
return len(
|
||||
list(
|
||||
session.execute(
|
||||
select(StatesMeta).filter(StatesMeta.entity_id == "sensor.test99")
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def test_rename_entity_without_collision(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test states meta is migrated when entity_id is changed."""
|
||||
hass = hass_recorder()
|
||||
setup_component(hass, "sensor", {})
|
||||
|
||||
entity_reg = mock_registry(hass)
|
||||
|
||||
@callback
|
||||
def add_entry():
|
||||
reg_entry = entity_reg.async_get_or_create(
|
||||
"sensor",
|
||||
"test",
|
||||
"unique_0000",
|
||||
suggested_object_id="test1",
|
||||
)
|
||||
assert reg_entry.entity_id == "sensor.test1"
|
||||
|
||||
hass.add_job(add_entry)
|
||||
hass.block_till_done()
|
||||
|
||||
zero, four, states = record_states(hass)
|
||||
hist = history.get_significant_states(hass, zero, four)
|
||||
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
@callback
|
||||
def rename_entry():
|
||||
entity_reg.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
|
||||
|
||||
hass.add_job(rename_entry)
|
||||
wait_recording_done(hass)
|
||||
|
||||
hist = history.get_significant_states(hass, zero, four)
|
||||
states["sensor.test99"] = states.pop("sensor.test1")
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
|
||||
hass.states.set("sensor.test99", "post_migrate")
|
||||
wait_recording_done(hass)
|
||||
new_hist = history.get_significant_states(hass, zero, dt_util.utcnow())
|
||||
assert not new_hist.get("sensor.test1")
|
||||
assert new_hist["sensor.test99"][-1].state == "post_migrate"
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
assert _count_entity_id_in_states_meta(hass, session, "sensor.test99") == 1
|
||||
assert _count_entity_id_in_states_meta(hass, session, "sensor.test1") == 1
|
||||
|
||||
assert "the new entity_id is already in use" not in caplog.text
|
||||
|
||||
|
||||
async def test_rename_entity_on_mocked_platform(
|
||||
async_setup_recorder_instance: RecorderInstanceGenerator,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test states meta is migrated when entity_id is changed when using a mocked platform.
|
||||
|
||||
This test will call async_remove on the entity so we can make
|
||||
sure that we do not record the entity as removed in the database
|
||||
when we rename it.
|
||||
"""
|
||||
instance = await async_setup_recorder_instance(hass)
|
||||
entity_reg = er.async_get(hass)
|
||||
start = dt_util.utcnow()
|
||||
|
||||
reg_entry = entity_reg.async_get_or_create(
|
||||
"sensor",
|
||||
"test",
|
||||
"unique_0000",
|
||||
suggested_object_id="test1",
|
||||
)
|
||||
assert reg_entry.entity_id == "sensor.test1"
|
||||
|
||||
entity_platform1 = MockEntityPlatform(
|
||||
hass, domain="mock_integration", platform_name="mock_platform", platform=None
|
||||
)
|
||||
entity1 = MockEntity(entity_id=reg_entry.entity_id)
|
||||
await entity_platform1.async_add_entities([entity1])
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.states.async_set("sensor.test1", "pre_migrate")
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
hist = await instance.async_add_executor_job(
|
||||
history.get_significant_states,
|
||||
hass,
|
||||
start,
|
||||
None,
|
||||
["sensor.test1", "sensor.test99"],
|
||||
)
|
||||
|
||||
entity_reg.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
|
||||
await hass.async_block_till_done()
|
||||
# We have to call the remove method ourselves since we are mocking the platform
|
||||
hass.states.async_remove("sensor.test1")
|
||||
|
||||
# The remove will trigger a lookup of the non-existing entity_id in the database
|
||||
# so we need to force the recorder to return the connection to the pool
|
||||
# since our test setup only allows one connection at a time.
|
||||
instance.queue_task(ForceReturnConnectionToPool())
|
||||
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
hist = await instance.async_add_executor_job(
|
||||
history.get_significant_states,
|
||||
hass,
|
||||
start,
|
||||
None,
|
||||
["sensor.test1", "sensor.test99"],
|
||||
)
|
||||
|
||||
assert "sensor.test1" not in hist
|
||||
# Make sure the states manager has not leaked the old entity_id
|
||||
assert instance.states_manager.pop_committed("sensor.test1") is None
|
||||
assert instance.states_manager.pop_pending("sensor.test1") is None
|
||||
|
||||
hass.states.async_set("sensor.test99", "post_migrate")
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
new_hist = await instance.async_add_executor_job(
|
||||
history.get_significant_states,
|
||||
hass,
|
||||
start,
|
||||
None,
|
||||
["sensor.test1", "sensor.test99"],
|
||||
)
|
||||
|
||||
assert "sensor.test1" not in new_hist
|
||||
assert new_hist["sensor.test99"][-1].state == "post_migrate"
|
||||
|
||||
def _get_states_meta_counts():
|
||||
with session_scope(hass=hass) as session:
|
||||
return _count_entity_id_in_states_meta(
|
||||
hass, session, "sensor.test99"
|
||||
), _count_entity_id_in_states_meta(hass, session, "sensor.test1")
|
||||
|
||||
test99_count, test1_count = await instance.async_add_executor_job(
|
||||
_get_states_meta_counts
|
||||
)
|
||||
assert test99_count == 1
|
||||
assert test1_count == 1
|
||||
|
||||
assert "the new entity_id is already in use" not in caplog.text
|
||||
|
||||
|
||||
def test_rename_entity_collision(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test states meta is not migrated when there is a collision."""
|
||||
hass = hass_recorder()
|
||||
setup_component(hass, "sensor", {})
|
||||
|
||||
entity_reg = mock_registry(hass)
|
||||
|
||||
@callback
|
||||
def add_entry():
|
||||
reg_entry = entity_reg.async_get_or_create(
|
||||
"sensor",
|
||||
"test",
|
||||
"unique_0000",
|
||||
suggested_object_id="test1",
|
||||
)
|
||||
assert reg_entry.entity_id == "sensor.test1"
|
||||
|
||||
hass.add_job(add_entry)
|
||||
hass.block_till_done()
|
||||
|
||||
zero, four, states = record_states(hass)
|
||||
hist = history.get_significant_states(hass, zero, four)
|
||||
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
||||
assert len(hist["sensor.test1"]) == 3
|
||||
|
||||
hass.states.set("sensor.test99", "collision")
|
||||
hass.states.remove("sensor.test99")
|
||||
|
||||
hass.block_till_done()
|
||||
|
||||
# Rename entity sensor.test1 to sensor.test99
|
||||
@callback
|
||||
def rename_entry():
|
||||
entity_reg.async_update_entity("sensor.test1", new_entity_id="sensor.test99")
|
||||
|
||||
hass.add_job(rename_entry)
|
||||
wait_recording_done(hass)
|
||||
|
||||
# History is not migrated on collision
|
||||
hist = history.get_significant_states(hass, zero, four)
|
||||
assert len(hist["sensor.test1"]) == 3
|
||||
assert len(hist["sensor.test99"]) == 2
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
assert _count_entity_id_in_states_meta(hass, session, "sensor.test99") == 1
|
||||
|
||||
hass.states.set("sensor.test99", "post_migrate")
|
||||
wait_recording_done(hass)
|
||||
new_hist = history.get_significant_states(hass, zero, dt_util.utcnow())
|
||||
assert new_hist["sensor.test99"][-1].state == "post_migrate"
|
||||
assert len(hist["sensor.test99"]) == 2
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
assert _count_entity_id_in_states_meta(hass, session, "sensor.test99") == 1
|
||||
assert _count_entity_id_in_states_meta(hass, session, "sensor.test1") == 1
|
||||
|
||||
assert "the new entity_id is already in use" in caplog.text
|
|
@ -5,7 +5,7 @@ from collections.abc import Callable
|
|||
from datetime import datetime, timedelta
|
||||
import importlib
|
||||
import sys
|
||||
from unittest.mock import ANY, DEFAULT, MagicMock, patch, sentinel
|
||||
from unittest.mock import ANY, DEFAULT, MagicMock, patch
|
||||
|
||||
import py
|
||||
import pytest
|
||||
|
@ -43,7 +43,6 @@ from homeassistant.components.recorder.table_managers.statistics_meta import (
|
|||
)
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.components.sensor import UNIT_CONVERTERS
|
||||
from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
|
@ -54,6 +53,7 @@ from .common import (
|
|||
assert_dict_of_states_equal_without_context_and_last_changed,
|
||||
async_wait_recording_done,
|
||||
do_adhoc_statistics,
|
||||
record_states,
|
||||
statistics_during_period,
|
||||
wait_recording_done,
|
||||
)
|
||||
|
@ -1758,80 +1758,6 @@ async def test_validate_db_schema_fix_statistics_datetime_issue(
|
|||
modify_columns_mock.assert_called_once_with(ANY, ANY, table, modification)
|
||||
|
||||
|
||||
def record_states(hass):
|
||||
"""Record some test states.
|
||||
|
||||
We inject a bunch of state updates temperature sensors.
|
||||
"""
|
||||
mp = "media_player.test"
|
||||
sns1 = "sensor.test1"
|
||||
sns2 = "sensor.test2"
|
||||
sns3 = "sensor.test3"
|
||||
sns4 = "sensor.test4"
|
||||
sns1_attr = {
|
||||
"device_class": "temperature",
|
||||
"state_class": "measurement",
|
||||
"unit_of_measurement": UnitOfTemperature.CELSIUS,
|
||||
}
|
||||
sns2_attr = {
|
||||
"device_class": "humidity",
|
||||
"state_class": "measurement",
|
||||
"unit_of_measurement": "%",
|
||||
}
|
||||
sns3_attr = {"device_class": "temperature"}
|
||||
sns4_attr = {}
|
||||
|
||||
def set_state(entity_id, state, **kwargs):
|
||||
"""Set the state."""
|
||||
hass.states.set(entity_id, state, **kwargs)
|
||||
wait_recording_done(hass)
|
||||
return hass.states.get(entity_id)
|
||||
|
||||
zero = dt_util.utcnow()
|
||||
one = zero + timedelta(seconds=1 * 5)
|
||||
two = one + timedelta(seconds=15 * 5)
|
||||
three = two + timedelta(seconds=30 * 5)
|
||||
four = three + timedelta(seconds=15 * 5)
|
||||
|
||||
states = {mp: [], sns1: [], sns2: [], sns3: [], sns4: []}
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=one
|
||||
):
|
||||
states[mp].append(
|
||||
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
||||
)
|
||||
states[sns1].append(set_state(sns1, "10", attributes=sns1_attr))
|
||||
states[sns2].append(set_state(sns2, "10", attributes=sns2_attr))
|
||||
states[sns3].append(set_state(sns3, "10", attributes=sns3_attr))
|
||||
states[sns4].append(set_state(sns4, "10", attributes=sns4_attr))
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow",
|
||||
return_value=one + timedelta(microseconds=1),
|
||||
):
|
||||
states[mp].append(
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=two
|
||||
):
|
||||
states[sns1].append(set_state(sns1, "15", attributes=sns1_attr))
|
||||
states[sns2].append(set_state(sns2, "15", attributes=sns2_attr))
|
||||
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
||||
states[sns4].append(set_state(sns4, "15", attributes=sns4_attr))
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=three
|
||||
):
|
||||
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
||||
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
||||
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
||||
states[sns4].append(set_state(sns4, "20", attributes=sns4_attr))
|
||||
|
||||
return zero, four, states
|
||||
|
||||
|
||||
def test_cache_key_for_generate_statistics_during_period_stmt() -> None:
|
||||
"""Test cache key for _generate_statistics_during_period_stmt."""
|
||||
columns = select(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start_ts)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue