Add slots to the StateMachine class (#95849)
This commit is contained in:
parent
39dcb5a2b5
commit
b2e708834f
5 changed files with 63 additions and 32 deletions
|
@ -553,7 +553,7 @@ class Recorder(threading.Thread):
|
|||
If the number of entities has increased, increase the size of the LRU
|
||||
cache to avoid thrashing.
|
||||
"""
|
||||
new_size = self.hass.states.async_entity_ids_count() * 2
|
||||
if new_size := self.hass.states.async_entity_ids_count() * 2:
|
||||
self.state_attributes_manager.adjust_lru_size(new_size)
|
||||
self.states_meta_manager.adjust_lru_size(new_size)
|
||||
self.statistics_meta_manager.adjust_lru_size(new_size)
|
||||
|
|
|
@ -1410,6 +1410,8 @@ class State:
|
|||
class StateMachine:
|
||||
"""Helper class that tracks the state of different entities."""
|
||||
|
||||
__slots__ = ("_states", "_reservations", "_bus", "_loop")
|
||||
|
||||
def __init__(self, bus: EventBus, loop: asyncio.events.AbstractEventLoop) -> None:
|
||||
"""Initialize state machine."""
|
||||
self._states: dict[str, State] = {}
|
||||
|
|
|
@ -56,6 +56,10 @@ from homeassistant.components.recorder.services import (
|
|||
SERVICE_PURGE,
|
||||
SERVICE_PURGE_ENTITIES,
|
||||
)
|
||||
from homeassistant.components.recorder.table_managers import (
|
||||
state_attributes as state_attributes_table_manager,
|
||||
states_meta as states_meta_table_manager,
|
||||
)
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.const import (
|
||||
EVENT_COMPONENT_LOADED,
|
||||
|
@ -93,6 +97,15 @@ from tests.common import (
|
|||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def small_cache_size() -> None:
|
||||
"""Patch the default cache size to 8."""
|
||||
with patch.object(state_attributes_table_manager, "CACHE_SIZE", 8), patch.object(
|
||||
states_meta_table_manager, "CACHE_SIZE", 8
|
||||
):
|
||||
yield
|
||||
|
||||
|
||||
def _default_recorder(hass):
|
||||
"""Return a recorder with reasonable defaults."""
|
||||
return Recorder(
|
||||
|
@ -2022,13 +2035,10 @@ def test_deduplication_event_data_inside_commit_interval(
|
|||
assert all(event.data_id == first_data_id for event in events)
|
||||
|
||||
|
||||
# Patch CACHE_SIZE since otherwise
|
||||
# the CI can fail because the test takes too long to run
|
||||
@patch(
|
||||
"homeassistant.components.recorder.table_managers.state_attributes.CACHE_SIZE", 5
|
||||
)
|
||||
def test_deduplication_state_attributes_inside_commit_interval(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
small_cache_size: None,
|
||||
hass_recorder: Callable[..., HomeAssistant],
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test deduplication of state attributes inside the commit interval."""
|
||||
hass = hass_recorder()
|
||||
|
@ -2306,14 +2316,13 @@ async def test_excluding_attributes_by_integration(
|
|||
|
||||
|
||||
async def test_lru_increases_with_many_entities(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
small_cache_size: None, recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test that the recorder's internal LRU cache increases with many entities."""
|
||||
# We do not actually want to record 4096 entities so we mock the entity count
|
||||
mock_entity_count = 4096
|
||||
with patch.object(
|
||||
hass.states, "async_entity_ids_count", return_value=mock_entity_count
|
||||
):
|
||||
mock_entity_count = 16
|
||||
for idx in range(mock_entity_count):
|
||||
hass.states.async_set(f"test.entity{idx}", "on")
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10))
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
|
|
|
@ -232,17 +232,21 @@ async def test_hass_starting(hass: HomeAssistant) -> None:
|
|||
entity.hass = hass
|
||||
entity.entity_id = "input_boolean.b1"
|
||||
|
||||
all_states = hass.states.async_all()
|
||||
assert len(all_states) == 0
|
||||
hass.states.async_set("input_boolean.b1", "on")
|
||||
|
||||
# Mock that only b1 is present this run
|
||||
states = [State("input_boolean.b1", "on")]
|
||||
with patch(
|
||||
"homeassistant.helpers.restore_state.Store.async_save"
|
||||
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
|
||||
) as mock_write_data:
|
||||
state = await entity.async_get_last_state()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert state is not None
|
||||
assert state.entity_id == "input_boolean.b1"
|
||||
assert state.state == "on"
|
||||
hass.states.async_remove("input_boolean.b1")
|
||||
|
||||
# Assert that no data was written yet, since hass is still starting.
|
||||
assert not mock_write_data.called
|
||||
|
@ -293,15 +297,20 @@ async def test_dump_data(hass: HomeAssistant) -> None:
|
|||
"input_boolean.b5": StoredState(State("input_boolean.b5", "off"), None, now),
|
||||
}
|
||||
|
||||
for state in states:
|
||||
hass.states.async_set(state.entity_id, state.state, state.attributes)
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.restore_state.Store.async_save"
|
||||
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
|
||||
) as mock_write_data:
|
||||
await data.async_dump_states()
|
||||
|
||||
assert mock_write_data.called
|
||||
args = mock_write_data.mock_calls[0][1]
|
||||
written_states = args[0]
|
||||
|
||||
for state in states:
|
||||
hass.states.async_remove(state.entity_id)
|
||||
# b0 should not be written, since it didn't extend RestoreEntity
|
||||
# b1 should be written, since it is present in the current run
|
||||
# b2 should not be written, since it is not registered with the helper
|
||||
|
@ -319,9 +328,12 @@ async def test_dump_data(hass: HomeAssistant) -> None:
|
|||
# Test that removed entities are not persisted
|
||||
await entity.async_remove()
|
||||
|
||||
for state in states:
|
||||
hass.states.async_set(state.entity_id, state.state, state.attributes)
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.restore_state.Store.async_save"
|
||||
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
|
||||
) as mock_write_data:
|
||||
await data.async_dump_states()
|
||||
|
||||
assert mock_write_data.called
|
||||
|
@ -355,10 +367,13 @@ async def test_dump_error(hass: HomeAssistant) -> None:
|
|||
|
||||
data = async_get(hass)
|
||||
|
||||
for state in states:
|
||||
hass.states.async_set(state.entity_id, state.state, state.attributes)
|
||||
|
||||
with patch(
|
||||
"homeassistant.helpers.restore_state.Store.async_save",
|
||||
side_effect=HomeAssistantError,
|
||||
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
|
||||
) as mock_write_data:
|
||||
await data.async_dump_states()
|
||||
|
||||
assert mock_write_data.called
|
||||
|
|
|
@ -4533,18 +4533,20 @@ async def test_render_to_info_with_exception(hass: HomeAssistant) -> None:
|
|||
async def test_lru_increases_with_many_entities(hass: HomeAssistant) -> None:
|
||||
"""Test that the template internal LRU cache increases with many entities."""
|
||||
# We do not actually want to record 4096 entities so we mock the entity count
|
||||
mock_entity_count = 4096
|
||||
mock_entity_count = 16
|
||||
|
||||
assert template.CACHED_TEMPLATE_LRU.get_size() == template.CACHED_TEMPLATE_STATES
|
||||
assert (
|
||||
template.CACHED_TEMPLATE_NO_COLLECT_LRU.get_size()
|
||||
== template.CACHED_TEMPLATE_STATES
|
||||
)
|
||||
template.CACHED_TEMPLATE_LRU.set_size(8)
|
||||
template.CACHED_TEMPLATE_NO_COLLECT_LRU.set_size(8)
|
||||
|
||||
template.async_setup(hass)
|
||||
with patch.object(
|
||||
hass.states, "async_entity_ids_count", return_value=mock_entity_count
|
||||
):
|
||||
for i in range(mock_entity_count):
|
||||
hass.states.async_set(f"sensor.sensor{i}", "on")
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=10))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
@ -4556,7 +4558,10 @@ async def test_lru_increases_with_many_entities(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
await hass.async_stop()
|
||||
with patch.object(hass.states, "async_entity_ids_count", return_value=8192):
|
||||
|
||||
for i in range(mock_entity_count):
|
||||
hass.states.async_set(f"sensor.sensor_add_{i}", "on")
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=20))
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue