From 2e945aed54b1dd11fdf5212805383b515a09c59f Mon Sep 17 00:00:00 2001 From: Erik Montnemery Date: Mon, 6 May 2024 15:25:48 +0200 Subject: [PATCH] Convert recorder auto_repairs tests to use async API (#116927) --- .../statistics/test_duplicates.py | 204 ++++++++++-------- 1 file changed, 118 insertions(+), 86 deletions(-) diff --git a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py index 2a1c3c5d209..175cb6ecd1a 100644 --- a/tests/components/recorder/auto_repairs/statistics/test_duplicates.py +++ b/tests/components/recorder/auto_repairs/statistics/test_duplicates.py @@ -1,6 +1,5 @@ """Test removing statistics duplicates.""" -from collections.abc import Callable import importlib from pathlib import Path import sys @@ -11,7 +10,7 @@ from sqlalchemy import create_engine from sqlalchemy.orm import Session from homeassistant.components import recorder -from homeassistant.components.recorder import statistics +from homeassistant.components.recorder import Recorder, statistics from homeassistant.components.recorder.auto_repairs.statistics.duplicates import ( delete_statistics_duplicates, delete_statistics_meta_duplicates, @@ -21,20 +20,34 @@ from homeassistant.components.recorder.statistics import async_add_external_stat from homeassistant.components.recorder.util import session_scope from homeassistant.core import HomeAssistant from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from ...common import wait_recording_done +from ...common import async_wait_recording_done -from tests.common import get_test_home_assistant +from tests.common import async_test_home_assistant +from tests.typing import RecorderInstanceGenerator -def test_delete_duplicates_no_duplicates( - hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture +@pytest.fixture +async def mock_recorder_before_hass( + async_setup_recorder_instance: RecorderInstanceGenerator, +) -> None: + """Set up recorder.""" + + +@pytest.fixture +def setup_recorder(recorder_mock: Recorder) -> None: + """Set up recorder.""" + + +async def test_delete_duplicates_no_duplicates( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + setup_recorder: None, ) -> None: """Test removal of duplicated statistics.""" - hass = hass_recorder() - wait_recording_done(hass) + await async_wait_recording_done(hass) instance = recorder.get_instance(hass) with session_scope(hass=hass) as session: delete_statistics_duplicates(instance, hass, session) @@ -43,12 +56,13 @@ def test_delete_duplicates_no_duplicates( assert "Found duplicated" not in caplog.text -def test_duplicate_statistics_handle_integrity_error( - hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture +async def test_duplicate_statistics_handle_integrity_error( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + setup_recorder: None, ) -> None: """Test the recorder does not blow up if statistics is duplicated.""" - hass = hass_recorder() - wait_recording_done(hass) + await async_wait_recording_done(hass) period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00")) period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00")) @@ -93,7 +107,7 @@ def test_duplicate_statistics_handle_integrity_error( async_add_external_statistics( hass, external_energy_metadata_1, external_energy_statistics_2 ) - wait_recording_done(hass) + await async_wait_recording_done(hass) assert insert_statistics_mock.call_count == 3 with session_scope(hass=hass) as session: @@ -126,7 +140,7 @@ def _create_engine_28(*args, **kwargs): return engine -def test_delete_metadata_duplicates( +async def test_delete_metadata_duplicates( caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test removal of duplicated statistics.""" @@ -164,23 +178,7 @@ def test_delete_metadata_duplicates( "unit_of_measurement": "%", } - # Create some duplicated statistics_meta with schema version 28 - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), - patch( - "homeassistant.components.recorder.core.create_engine", - new=_create_engine_28, - ), - get_test_home_assistant() as hass, - ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) - wait_recording_done(hass) - wait_recording_done(hass) - + def add_statistics_meta(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: session.add( recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) @@ -192,8 +190,33 @@ def test_delete_metadata_duplicates( recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) ) - with session_scope(hass=hass) as session: - tmp = session.query(recorder.db_schema.StatisticsMeta).all() + def get_statistics_meta(hass: HomeAssistant) -> list: + with session_scope(hass=hass, read_only=True) as session: + return list(session.query(recorder.db_schema.StatisticsMeta).all()) + + # Create some duplicated statistics_meta with schema version 28 + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch( + "homeassistant.components.recorder.core.create_engine", + new=_create_engine_28, + ), + ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + instance = recorder.get_instance(hass) + await instance.async_add_executor_job(add_statistics_meta, hass) + + tmp = await instance.async_add_executor_job(get_statistics_meta, hass) assert len(tmp) == 3 assert tmp[0].id == 1 assert tmp[0].statistic_id == "test:total_energy_import_tariff_1" @@ -202,29 +225,29 @@ def test_delete_metadata_duplicates( assert tmp[2].id == 3 assert tmp[2].statistic_id == "test:fossil_percentage" - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - with get_test_home_assistant() as hass: + async with async_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) + await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) assert "Deleted 1 duplicated statistics_meta rows" in caplog.text - with session_scope(hass=hass) as session: - tmp = session.query(recorder.db_schema.StatisticsMeta).all() - assert len(tmp) == 2 - assert tmp[0].id == 2 - assert tmp[0].statistic_id == "test:total_energy_import_tariff_1" - assert tmp[1].id == 3 - assert tmp[1].statistic_id == "test:fossil_percentage" + instance = recorder.get_instance(hass) + tmp = await instance.async_add_executor_job(get_statistics_meta, hass) + assert len(tmp) == 2 + assert tmp[0].id == 2 + assert tmp[0].statistic_id == "test:total_energy_import_tariff_1" + assert tmp[1].id == 3 + assert tmp[1].statistic_id == "test:fossil_percentage" - hass.stop() + await hass.async_stop() -def test_delete_metadata_duplicates_many( +async def test_delete_metadata_duplicates_many( caplog: pytest.LogCaptureFixture, tmp_path: Path ) -> None: """Test removal of duplicated statistics.""" @@ -262,23 +285,7 @@ def test_delete_metadata_duplicates_many( "unit_of_measurement": "%", } - # Create some duplicated statistics with schema version 28 - with ( - patch.object(recorder, "db_schema", old_db_schema), - patch.object( - recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION - ), - patch( - "homeassistant.components.recorder.core.create_engine", - new=_create_engine_28, - ), - get_test_home_assistant() as hass, - ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) - wait_recording_done(hass) - wait_recording_done(hass) - + def add_statistics_meta(hass: HomeAssistant) -> None: with session_scope(hass=hass) as session: session.add( recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) @@ -302,36 +309,61 @@ def test_delete_metadata_duplicates_many( recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) ) - hass.stop() + def get_statistics_meta(hass: HomeAssistant) -> list: + with session_scope(hass=hass, read_only=True) as session: + return list(session.query(recorder.db_schema.StatisticsMeta).all()) + + # Create some duplicated statistics with schema version 28 + with ( + patch.object(recorder, "db_schema", old_db_schema), + patch.object( + recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION + ), + patch( + "homeassistant.components.recorder.core.create_engine", + new=_create_engine_28, + ), + ): + async with async_test_home_assistant() as hass: + recorder_helper.async_initialize_recorder(hass) + await async_setup_component( + hass, "recorder", {"recorder": {"db_url": dburl}} + ) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + + instance = recorder.get_instance(hass) + await instance.async_add_executor_job(add_statistics_meta, hass) + + await hass.async_stop() # Test that the duplicates are removed during migration from schema 28 - with get_test_home_assistant() as hass: + async with async_test_home_assistant() as hass: recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) + await async_setup_component(hass, "recorder", {"recorder": {"db_url": dburl}}) + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) assert "Deleted 1102 duplicated statistics_meta rows" in caplog.text - with session_scope(hass=hass) as session: - tmp = session.query(recorder.db_schema.StatisticsMeta).all() - assert len(tmp) == 3 - assert tmp[0].id == 1101 - assert tmp[0].statistic_id == "test:total_energy_import_tariff_1" - assert tmp[1].id == 1103 - assert tmp[1].statistic_id == "test:total_energy_import_tariff_2" - assert tmp[2].id == 1105 - assert tmp[2].statistic_id == "test:fossil_percentage" + instance = recorder.get_instance(hass) + tmp = await instance.async_add_executor_job(get_statistics_meta, hass) + assert len(tmp) == 3 + assert tmp[0].id == 1101 + assert tmp[0].statistic_id == "test:total_energy_import_tariff_1" + assert tmp[1].id == 1103 + assert tmp[1].statistic_id == "test:total_energy_import_tariff_2" + assert tmp[2].id == 1105 + assert tmp[2].statistic_id == "test:fossil_percentage" - hass.stop() + await hass.async_stop() -def test_delete_metadata_duplicates_no_duplicates( - hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture +async def test_delete_metadata_duplicates_no_duplicates( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture, setup_recorder: None ) -> None: """Test removal of duplicated statistics.""" - hass = hass_recorder() - wait_recording_done(hass) + await async_wait_recording_done(hass) with session_scope(hass=hass) as session: instance = recorder.get_instance(hass) delete_statistics_meta_duplicates(instance, session)