Update recorder tests to async (#124161)
This commit is contained in:
parent
135ebaafa0
commit
06d1bbc20f
1 changed files with 140 additions and 132 deletions
|
@ -17,18 +17,17 @@ import pytest
|
|||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
from homeassistant.setup import setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .common import (
|
||||
CREATE_ENGINE_TARGET,
|
||||
async_wait_recording_done,
|
||||
create_engine_test_for_schema_version_postfix,
|
||||
get_schema_module_path,
|
||||
wait_recording_done,
|
||||
)
|
||||
|
||||
from tests.common import get_test_home_assistant
|
||||
from tests.common import async_test_home_assistant
|
||||
from tests.typing import RecorderInstanceGenerator
|
||||
|
||||
SCHEMA_VERSION_POSTFIX = "23_with_newer_columns"
|
||||
SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX)
|
||||
|
@ -37,8 +36,8 @@ SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX)
|
|||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
def test_delete_duplicates(
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||
async def test_delete_duplicates(
|
||||
async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
|
@ -176,42 +175,42 @@ def test_delete_duplicates(
|
|||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
),
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
async with async_test_home_assistant() as hass, async_test_recorder(hass):
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
for stat in external_energy_statistics_1:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
|
||||
for stat in external_energy_statistics_2:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
for stat in external_co2_statistics:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_1
|
||||
)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_2
|
||||
)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
for stat in external_energy_statistics_1:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
|
||||
for stat in external_energy_statistics_2:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
for stat in external_co2_statistics:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
|
||||
|
||||
hass.stop()
|
||||
await hass.async_stop()
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
async with async_test_home_assistant() as hass, async_test_recorder(hass):
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
await hass.async_stop()
|
||||
|
||||
assert "Deleted 2 duplicated statistics rows" in caplog.text
|
||||
assert "Found non identical" not in caplog.text
|
||||
|
@ -221,8 +220,8 @@ def test_delete_duplicates(
|
|||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
def test_delete_duplicates_many(
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture
|
||||
async def test_delete_duplicates_many(
|
||||
async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
|
@ -360,48 +359,48 @@ def test_delete_duplicates_many(
|
|||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
),
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
async with async_test_home_assistant() as hass, async_test_recorder(hass):
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
for stat in external_energy_statistics_1:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
|
||||
for _ in range(3000):
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.Statistics.from_stats(
|
||||
1, external_energy_statistics_1[-1]
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_1
|
||||
)
|
||||
)
|
||||
for stat in external_energy_statistics_2:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
for stat in external_co2_statistics:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_2
|
||||
)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
for stat in external_energy_statistics_1:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
|
||||
for _ in range(3000):
|
||||
session.add(
|
||||
recorder.db_schema.Statistics.from_stats(
|
||||
1, external_energy_statistics_1[-1]
|
||||
)
|
||||
)
|
||||
for stat in external_energy_statistics_2:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
for stat in external_co2_statistics:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
|
||||
|
||||
hass.stop()
|
||||
await hass.async_stop()
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
async with async_test_home_assistant() as hass, async_test_recorder(hass):
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
await hass.async_stop()
|
||||
|
||||
assert "Deleted 3002 duplicated statistics rows" in caplog.text
|
||||
assert "Found non identical" not in caplog.text
|
||||
|
@ -412,8 +411,10 @@ def test_delete_duplicates_many(
|
|||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
def test_delete_duplicates_non_identical(
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
async def test_delete_duplicates_non_identical(
|
||||
async_test_recorder: RecorderInstanceGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
|
@ -521,38 +522,40 @@ def test_delete_duplicates_non_identical(
|
|||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
),
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
async with async_test_home_assistant() as hass, async_test_recorder(hass):
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
for stat in external_energy_statistics_1:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
|
||||
for stat in external_energy_statistics_2:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_1
|
||||
)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_2
|
||||
)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
for stat in external_energy_statistics_1:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(1, stat))
|
||||
for stat in external_energy_statistics_2:
|
||||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
|
||||
hass.stop()
|
||||
await hass.async_stop()
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.config.config_dir = tmp_path
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
async with (
|
||||
async_test_home_assistant(config_dir=tmp_path) as hass,
|
||||
async_test_recorder(hass),
|
||||
):
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
await hass.async_stop()
|
||||
|
||||
assert "Deleted 2 duplicated statistics rows" in caplog.text
|
||||
assert "Deleted 1 non identical" in caplog.text
|
||||
|
@ -561,8 +564,11 @@ def test_delete_duplicates_non_identical(
|
|||
isotime = dt_util.utcnow().isoformat()
|
||||
backup_file_name = f".storage/deleted_statistics.{isotime}.json"
|
||||
|
||||
with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file:
|
||||
backup = json.load(backup_file)
|
||||
def read_backup():
|
||||
with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file:
|
||||
return json.load(backup_file)
|
||||
|
||||
backup = await hass.async_add_executor_job(read_backup)
|
||||
|
||||
assert backup == [
|
||||
{
|
||||
|
@ -597,8 +603,10 @@ def test_delete_duplicates_non_identical(
|
|||
@pytest.mark.parametrize("persistent_database", [True])
|
||||
@pytest.mark.skip_on_db_engine(["mysql", "postgresql"])
|
||||
@pytest.mark.usefixtures("skip_by_db_engine")
|
||||
def test_delete_duplicates_short_term(
|
||||
recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path
|
||||
async def test_delete_duplicates_short_term(
|
||||
async_test_recorder: RecorderInstanceGenerator,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
tmp_path: Path,
|
||||
) -> None:
|
||||
"""Test removal of duplicated statistics.
|
||||
|
||||
|
@ -637,37 +645,37 @@ def test_delete_duplicates_short_term(
|
|||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
),
|
||||
get_test_home_assistant() as hass,
|
||||
):
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
async with async_test_home_assistant() as hass, async_test_recorder(hass):
|
||||
get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident())
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_1
|
||||
)
|
||||
)
|
||||
with session_scope(hass=hass) as session:
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row)
|
||||
)
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row)
|
||||
)
|
||||
|
||||
hass.stop()
|
||||
await hass.async_stop()
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.config.config_dir = tmp_path
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
async with (
|
||||
async_test_home_assistant(config_dir=tmp_path) as hass,
|
||||
async_test_recorder(hass),
|
||||
):
|
||||
await hass.async_start()
|
||||
await async_wait_recording_done(hass)
|
||||
await async_wait_recording_done(hass)
|
||||
await hass.async_stop()
|
||||
|
||||
assert "duplicated statistics rows" not in caplog.text
|
||||
assert "Found non identical" not in caplog.text
|
||||
|
|
Loading…
Add table
Reference in a new issue