Update recorder tests to async (#124161)

This commit is contained in:
Erik Montnemery 2024-08-18 21:14:41 +02:00 committed by GitHub
parent 135ebaafa0
commit 06d1bbc20f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194

View file

@ -17,18 +17,17 @@ import pytest
from homeassistant.components import recorder from homeassistant.components import recorder
from homeassistant.components.recorder import get_instance from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.util import session_scope from homeassistant.components.recorder.util import session_scope
from homeassistant.helpers import recorder as recorder_helper
from homeassistant.setup import setup_component
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
from .common import ( from .common import (
CREATE_ENGINE_TARGET, CREATE_ENGINE_TARGET,
async_wait_recording_done,
create_engine_test_for_schema_version_postfix, create_engine_test_for_schema_version_postfix,
get_schema_module_path, get_schema_module_path,
wait_recording_done,
) )
from tests.common import get_test_home_assistant from tests.common import async_test_home_assistant
from tests.typing import RecorderInstanceGenerator
SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_VERSION_POSTFIX = "23_with_newer_columns"
SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX)
@ -37,8 +36,8 @@ SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX)
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
@pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.usefixtures("skip_by_db_engine")
@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("persistent_database", [True])
def test_delete_duplicates( async def test_delete_duplicates(
recorder_db_url: str, caplog: pytest.LogCaptureFixture async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture
) -> None: ) -> None:
"""Test removal of duplicated statistics. """Test removal of duplicated statistics.
@ -176,42 +175,42 @@ def test_delete_duplicates(
schema_version_postfix=SCHEMA_VERSION_POSTFIX, schema_version_postfix=SCHEMA_VERSION_POSTFIX,
), ),
), ),
get_test_home_assistant() as hass,
): ):
recorder_helper.async_initialize_recorder(hass) async with async_test_home_assistant() as hass, async_test_recorder(hass):
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) await async_wait_recording_done(hass)
wait_recording_done(hass) await async_wait_recording_done(hass)
wait_recording_done(hass)
with session_scope(hass=hass) as session: with session_scope(hass=hass) as session:
session.add( session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) recorder.db_schema.StatisticsMeta.from_meta(
) external_energy_metadata_1
session.add( )
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) )
) session.add(
session.add( recorder.db_schema.StatisticsMeta.from_meta(
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) external_energy_metadata_2
) )
with session_scope(hass=hass) as session: )
for stat in external_energy_statistics_1: session.add(
session.add(recorder.db_schema.Statistics.from_stats(1, stat)) recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
for stat in external_energy_statistics_2: )
session.add(recorder.db_schema.Statistics.from_stats(2, stat)) with session_scope(hass=hass) as session:
for stat in external_co2_statistics: for stat in external_energy_statistics_1:
session.add(recorder.db_schema.Statistics.from_stats(3, stat)) session.add(recorder.db_schema.Statistics.from_stats(1, stat))
for stat in external_energy_statistics_2:
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
for stat in external_co2_statistics:
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
hass.stop() await hass.async_stop()
# Test that the duplicates are removed during migration from schema 23 # Test that the duplicates are removed during migration from schema 23
with get_test_home_assistant() as hass: async with async_test_home_assistant() as hass, async_test_recorder(hass):
recorder_helper.async_initialize_recorder(hass) await hass.async_start()
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) await async_wait_recording_done(hass)
hass.start() await async_wait_recording_done(hass)
wait_recording_done(hass) await hass.async_stop()
wait_recording_done(hass)
hass.stop()
assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 2 duplicated statistics rows" in caplog.text
assert "Found non identical" not in caplog.text assert "Found non identical" not in caplog.text
@ -221,8 +220,8 @@ def test_delete_duplicates(
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
@pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.usefixtures("skip_by_db_engine")
@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("persistent_database", [True])
def test_delete_duplicates_many( async def test_delete_duplicates_many(
recorder_db_url: str, caplog: pytest.LogCaptureFixture async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture
) -> None: ) -> None:
"""Test removal of duplicated statistics. """Test removal of duplicated statistics.
@ -360,48 +359,48 @@ def test_delete_duplicates_many(
schema_version_postfix=SCHEMA_VERSION_POSTFIX, schema_version_postfix=SCHEMA_VERSION_POSTFIX,
), ),
), ),
get_test_home_assistant() as hass,
): ):
recorder_helper.async_initialize_recorder(hass) async with async_test_home_assistant() as hass, async_test_recorder(hass):
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) await async_wait_recording_done(hass)
wait_recording_done(hass) await async_wait_recording_done(hass)
wait_recording_done(hass)
with session_scope(hass=hass) as session: with session_scope(hass=hass) as session:
session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
)
session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2)
)
session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
)
with session_scope(hass=hass) as session:
for stat in external_energy_statistics_1:
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
for _ in range(3000):
session.add( session.add(
recorder.db_schema.Statistics.from_stats( recorder.db_schema.StatisticsMeta.from_meta(
1, external_energy_statistics_1[-1] external_energy_metadata_1
) )
) )
for stat in external_energy_statistics_2: session.add(
session.add(recorder.db_schema.Statistics.from_stats(2, stat)) recorder.db_schema.StatisticsMeta.from_meta(
for stat in external_co2_statistics: external_energy_metadata_2
session.add(recorder.db_schema.Statistics.from_stats(3, stat)) )
)
session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
)
with session_scope(hass=hass) as session:
for stat in external_energy_statistics_1:
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
for _ in range(3000):
session.add(
recorder.db_schema.Statistics.from_stats(
1, external_energy_statistics_1[-1]
)
)
for stat in external_energy_statistics_2:
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
for stat in external_co2_statistics:
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
hass.stop() await hass.async_stop()
# Test that the duplicates are removed during migration from schema 23 # Test that the duplicates are removed during migration from schema 23
with get_test_home_assistant() as hass: async with async_test_home_assistant() as hass, async_test_recorder(hass):
recorder_helper.async_initialize_recorder(hass) await hass.async_start()
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) await async_wait_recording_done(hass)
hass.start() await async_wait_recording_done(hass)
wait_recording_done(hass) await hass.async_stop()
wait_recording_done(hass)
hass.stop()
assert "Deleted 3002 duplicated statistics rows" in caplog.text assert "Deleted 3002 duplicated statistics rows" in caplog.text
assert "Found non identical" not in caplog.text assert "Found non identical" not in caplog.text
@ -412,8 +411,10 @@ def test_delete_duplicates_many(
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
@pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.usefixtures("skip_by_db_engine")
@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("persistent_database", [True])
def test_delete_duplicates_non_identical( async def test_delete_duplicates_non_identical(
recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path async_test_recorder: RecorderInstanceGenerator,
caplog: pytest.LogCaptureFixture,
tmp_path: Path,
) -> None: ) -> None:
"""Test removal of duplicated statistics. """Test removal of duplicated statistics.
@ -521,38 +522,40 @@ def test_delete_duplicates_non_identical(
schema_version_postfix=SCHEMA_VERSION_POSTFIX, schema_version_postfix=SCHEMA_VERSION_POSTFIX,
), ),
), ),
get_test_home_assistant() as hass,
): ):
recorder_helper.async_initialize_recorder(hass) async with async_test_home_assistant() as hass, async_test_recorder(hass):
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) await async_wait_recording_done(hass)
wait_recording_done(hass) await async_wait_recording_done(hass)
wait_recording_done(hass)
with session_scope(hass=hass) as session: with session_scope(hass=hass) as session:
session.add( session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) recorder.db_schema.StatisticsMeta.from_meta(
) external_energy_metadata_1
session.add( )
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) )
) session.add(
with session_scope(hass=hass) as session: recorder.db_schema.StatisticsMeta.from_meta(
for stat in external_energy_statistics_1: external_energy_metadata_2
session.add(recorder.db_schema.Statistics.from_stats(1, stat)) )
for stat in external_energy_statistics_2: )
session.add(recorder.db_schema.Statistics.from_stats(2, stat)) with session_scope(hass=hass) as session:
for stat in external_energy_statistics_1:
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
for stat in external_energy_statistics_2:
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
hass.stop() await hass.async_stop()
# Test that the duplicates are removed during migration from schema 23 # Test that the duplicates are removed during migration from schema 23
with get_test_home_assistant() as hass: async with (
hass.config.config_dir = tmp_path async_test_home_assistant(config_dir=tmp_path) as hass,
recorder_helper.async_initialize_recorder(hass) async_test_recorder(hass),
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) ):
hass.start() await hass.async_start()
wait_recording_done(hass) await async_wait_recording_done(hass)
wait_recording_done(hass) await async_wait_recording_done(hass)
hass.stop() await hass.async_stop()
assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 2 duplicated statistics rows" in caplog.text
assert "Deleted 1 non identical" in caplog.text assert "Deleted 1 non identical" in caplog.text
@ -561,8 +564,11 @@ def test_delete_duplicates_non_identical(
isotime = dt_util.utcnow().isoformat() isotime = dt_util.utcnow().isoformat()
backup_file_name = f".storage/deleted_statistics.{isotime}.json" backup_file_name = f".storage/deleted_statistics.{isotime}.json"
with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: def read_backup():
backup = json.load(backup_file) with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file:
return json.load(backup_file)
backup = await hass.async_add_executor_job(read_backup)
assert backup == [ assert backup == [
{ {
@ -597,8 +603,10 @@ def test_delete_duplicates_non_identical(
@pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
@pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.usefixtures("skip_by_db_engine")
def test_delete_duplicates_short_term( async def test_delete_duplicates_short_term(
recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path async_test_recorder: RecorderInstanceGenerator,
caplog: pytest.LogCaptureFixture,
tmp_path: Path,
) -> None: ) -> None:
"""Test removal of duplicated statistics. """Test removal of duplicated statistics.
@ -637,37 +645,37 @@ def test_delete_duplicates_short_term(
schema_version_postfix=SCHEMA_VERSION_POSTFIX, schema_version_postfix=SCHEMA_VERSION_POSTFIX,
), ),
), ),
get_test_home_assistant() as hass,
): ):
recorder_helper.async_initialize_recorder(hass) async with async_test_home_assistant() as hass, async_test_recorder(hass):
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) await async_wait_recording_done(hass)
wait_recording_done(hass) await async_wait_recording_done(hass)
wait_recording_done(hass)
with session_scope(hass=hass) as session: with session_scope(hass=hass) as session:
session.add( session.add(
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) recorder.db_schema.StatisticsMeta.from_meta(
) external_energy_metadata_1
with session_scope(hass=hass) as session: )
session.add( )
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) with session_scope(hass=hass) as session:
) session.add(
session.add( recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row)
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) )
) session.add(
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row)
)
hass.stop() await hass.async_stop()
# Test that the duplicates are removed during migration from schema 23 # Test that the duplicates are removed during migration from schema 23
with get_test_home_assistant() as hass: async with (
hass.config.config_dir = tmp_path async_test_home_assistant(config_dir=tmp_path) as hass,
recorder_helper.async_initialize_recorder(hass) async_test_recorder(hass),
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) ):
hass.start() await hass.async_start()
wait_recording_done(hass) await async_wait_recording_done(hass)
wait_recording_done(hass) await async_wait_recording_done(hass)
hass.stop() await hass.async_stop()
assert "duplicated statistics rows" not in caplog.text assert "duplicated statistics rows" not in caplog.text
assert "Found non identical" not in caplog.text assert "Found non identical" not in caplog.text