Use recorder fixtures and helpers in tests (#70773)
This commit is contained in:
parent
24b090a038
commit
3016b5fbfc
8 changed files with 190 additions and 303 deletions
|
@ -55,7 +55,7 @@ from homeassistant.helpers import (
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.json import JSONEncoder
|
from homeassistant.helpers.json import JSONEncoder
|
||||||
from homeassistant.setup import async_setup_component, setup_component
|
from homeassistant.setup import setup_component
|
||||||
from homeassistant.util.async_ import run_callback_threadsafe
|
from homeassistant.util.async_ import run_callback_threadsafe
|
||||||
import homeassistant.util.dt as date_util
|
import homeassistant.util.dt as date_util
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
@ -917,28 +917,6 @@ def init_recorder_component(hass, add_config=None):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def async_init_recorder_component(hass, add_config=None):
|
|
||||||
"""Initialize the recorder asynchronously."""
|
|
||||||
config = dict(add_config) if add_config else {}
|
|
||||||
if recorder.CONF_DB_URL not in config:
|
|
||||||
config[recorder.CONF_DB_URL] = "sqlite://" # In memory DB
|
|
||||||
if recorder.CONF_COMMIT_INTERVAL not in config:
|
|
||||||
config[recorder.CONF_COMMIT_INTERVAL] = 0
|
|
||||||
|
|
||||||
with patch(
|
|
||||||
"homeassistant.components.recorder.ALLOW_IN_MEMORY_DB",
|
|
||||||
True,
|
|
||||||
), patch("homeassistant.components.recorder.migration.migrate_schema"):
|
|
||||||
assert await async_setup_component(
|
|
||||||
hass, recorder.DOMAIN, {recorder.DOMAIN: config}
|
|
||||||
)
|
|
||||||
assert recorder.DOMAIN in hass.config.components
|
|
||||||
_LOGGER.info(
|
|
||||||
"Test recorder successfully started, database location: %s",
|
|
||||||
config[recorder.CONF_DB_URL],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def mock_restore_cache(hass, states):
|
def mock_restore_cache(hass, states):
|
||||||
"""Mock the DATA_RESTORE_CACHE."""
|
"""Mock the DATA_RESTORE_CACHE."""
|
||||||
key = restore_state.DATA_RESTORE_STATE_TASK
|
key = restore_state.DATA_RESTORE_STATE_TASK
|
||||||
|
|
|
@ -8,13 +8,11 @@ from homeassistant.components.recorder.statistics import async_add_external_stat
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from tests.common import (
|
from tests.common import MockConfigEntry, flush_store, mock_platform
|
||||||
MockConfigEntry,
|
from tests.components.recorder.common import (
|
||||||
flush_store,
|
async_recorder_block_till_done,
|
||||||
init_recorder_component,
|
async_wait_recording_done,
|
||||||
mock_platform,
|
|
||||||
)
|
)
|
||||||
from tests.components.recorder.common import async_wait_recording_done
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
|
@ -298,14 +296,14 @@ async def test_get_solar_forecast(hass, hass_ws_client, mock_energy_platform) ->
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
async def test_fossil_energy_consumption_no_co2(hass, hass_ws_client):
|
async def test_fossil_energy_consumption_no_co2(hass, hass_ws_client, recorder_mock):
|
||||||
"""Test fossil_energy_consumption when co2 data is missing."""
|
"""Test fossil_energy_consumption when co2 data is missing."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
@ -459,14 +457,14 @@ async def test_fossil_energy_consumption_no_co2(hass, hass_ws_client):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
async def test_fossil_energy_consumption_hole(hass, hass_ws_client):
|
async def test_fossil_energy_consumption_hole(hass, hass_ws_client, recorder_mock):
|
||||||
"""Test fossil_energy_consumption when some data points lack sum."""
|
"""Test fossil_energy_consumption when some data points lack sum."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
@ -620,14 +618,14 @@ async def test_fossil_energy_consumption_hole(hass, hass_ws_client):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
async def test_fossil_energy_consumption_no_data(hass, hass_ws_client):
|
async def test_fossil_energy_consumption_no_data(hass, hass_ws_client, recorder_mock):
|
||||||
"""Test fossil_energy_consumption when there is no data."""
|
"""Test fossil_energy_consumption when there is no data."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
@ -768,14 +766,14 @@ async def test_fossil_energy_consumption_no_data(hass, hass_ws_client):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
@pytest.mark.freeze_time("2021-08-01 00:00:00+00:00")
|
||||||
async def test_fossil_energy_consumption(hass, hass_ws_client):
|
async def test_fossil_energy_consumption(hass, hass_ws_client, recorder_mock):
|
||||||
"""Test fossil_energy_consumption with co2 sensor data."""
|
"""Test fossil_energy_consumption with co2 sensor data."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
later = dt_util.as_utc(dt_util.parse_datetime("2022-09-01 00:00:00"))
|
||||||
|
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
period1 = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
period2 = dt_util.as_utc(dt_util.parse_datetime("2021-09-30 23:00:00"))
|
||||||
|
|
|
@ -17,10 +17,9 @@ from homeassistant.setup import async_setup_component
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
|
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
|
||||||
|
|
||||||
from tests.common import init_recorder_component
|
|
||||||
from tests.components.recorder.common import (
|
from tests.components.recorder.common import (
|
||||||
|
async_recorder_block_till_done,
|
||||||
async_wait_recording_done,
|
async_wait_recording_done,
|
||||||
trigger_db_commit,
|
|
||||||
wait_recording_done,
|
wait_recording_done,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -584,23 +583,21 @@ def record_states(hass):
|
||||||
return zero, four, states
|
return zero, four, states
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api(hass, hass_client):
|
async def test_fetch_period_api(hass, hass_client, recorder_mock):
|
||||||
"""Test the fetch period view for history."""
|
"""Test the fetch period view for history."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
|
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
|
||||||
assert response.status == HTTPStatus.OK
|
assert response.status == HTTPStatus.OK
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api_with_use_include_order(hass, hass_client):
|
async def test_fetch_period_api_with_use_include_order(
|
||||||
|
hass, hass_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test the fetch period view for history with include order."""
|
"""Test the fetch period view for history with include order."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
|
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
|
response = await client.get(f"/api/history/period/{dt_util.utcnow().isoformat()}")
|
||||||
assert response.status == HTTPStatus.OK
|
assert response.status == HTTPStatus.OK
|
||||||
|
@ -639,19 +636,16 @@ async def test_fetch_period_api_with_minimal_response(hass, recorder_mock, hass_
|
||||||
assert state_list[2]["state"] == "23"
|
assert state_list[2]["state"] == "23"
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api_with_no_timestamp(hass, hass_client):
|
async def test_fetch_period_api_with_no_timestamp(hass, hass_client, recorder_mock):
|
||||||
"""Test the fetch period view for history with no timestamp."""
|
"""Test the fetch period view for history with no timestamp."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get("/api/history/period")
|
response = await client.get("/api/history/period")
|
||||||
assert response.status == HTTPStatus.OK
|
assert response.status == HTTPStatus.OK
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api_with_include_order(hass, hass_client):
|
async def test_fetch_period_api_with_include_order(hass, hass_client, recorder_mock):
|
||||||
"""Test the fetch period view for history."""
|
"""Test the fetch period view for history."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
|
@ -662,7 +656,6 @@ async def test_fetch_period_api_with_include_order(hass, hass_client):
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
f"/api/history/period/{dt_util.utcnow().isoformat()}",
|
f"/api/history/period/{dt_util.utcnow().isoformat()}",
|
||||||
|
@ -671,9 +664,10 @@ async def test_fetch_period_api_with_include_order(hass, hass_client):
|
||||||
assert response.status == HTTPStatus.OK
|
assert response.status == HTTPStatus.OK
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api_with_entity_glob_include(hass, hass_client):
|
async def test_fetch_period_api_with_entity_glob_include(
|
||||||
|
hass, hass_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test the fetch period view for history."""
|
"""Test the fetch period view for history."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
|
@ -683,16 +677,11 @@ async def test_fetch_period_api_with_entity_glob_include(hass, hass_client):
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
hass.states.async_set("light.kitchen", "on")
|
hass.states.async_set("light.kitchen", "on")
|
||||||
hass.states.async_set("light.cow", "on")
|
hass.states.async_set("light.cow", "on")
|
||||||
hass.states.async_set("light.nomatch", "on")
|
hass.states.async_set("light.nomatch", "on")
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
|
@ -703,9 +692,10 @@ async def test_fetch_period_api_with_entity_glob_include(hass, hass_client):
|
||||||
assert response_json[0][0]["entity_id"] == "light.kitchen"
|
assert response_json[0][0]["entity_id"] == "light.kitchen"
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api_with_entity_glob_exclude(hass, hass_client):
|
async def test_fetch_period_api_with_entity_glob_exclude(
|
||||||
|
hass, hass_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test the fetch period view for history."""
|
"""Test the fetch period view for history."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
|
@ -719,18 +709,13 @@ async def test_fetch_period_api_with_entity_glob_exclude(hass, hass_client):
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
hass.states.async_set("light.kitchen", "on")
|
hass.states.async_set("light.kitchen", "on")
|
||||||
hass.states.async_set("light.cow", "on")
|
hass.states.async_set("light.cow", "on")
|
||||||
hass.states.async_set("light.match", "on")
|
hass.states.async_set("light.match", "on")
|
||||||
hass.states.async_set("switch.match", "on")
|
hass.states.async_set("switch.match", "on")
|
||||||
hass.states.async_set("media_player.test", "on")
|
hass.states.async_set("media_player.test", "on")
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
|
@ -743,9 +728,10 @@ async def test_fetch_period_api_with_entity_glob_exclude(hass, hass_client):
|
||||||
assert response_json[1][0]["entity_id"] == "light.match"
|
assert response_json[1][0]["entity_id"] == "light.match"
|
||||||
|
|
||||||
|
|
||||||
async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_client):
|
async def test_fetch_period_api_with_entity_glob_include_and_exclude(
|
||||||
|
hass, hass_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test the fetch period view for history."""
|
"""Test the fetch period view for history."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
|
@ -762,7 +748,6 @@ async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
hass.states.async_set("light.kitchen", "on")
|
hass.states.async_set("light.kitchen", "on")
|
||||||
hass.states.async_set("light.cow", "on")
|
hass.states.async_set("light.cow", "on")
|
||||||
hass.states.async_set("light.match", "on")
|
hass.states.async_set("light.match", "on")
|
||||||
|
@ -770,11 +755,7 @@ async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_
|
||||||
hass.states.async_set("switch.match", "on")
|
hass.states.async_set("switch.match", "on")
|
||||||
hass.states.async_set("media_player.test", "on")
|
hass.states.async_set("media_player.test", "on")
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
|
@ -788,24 +769,18 @@ async def test_fetch_period_api_with_entity_glob_include_and_exclude(hass, hass_
|
||||||
assert response_json[2][0]["entity_id"] == "switch.match"
|
assert response_json[2][0]["entity_id"] == "switch.match"
|
||||||
|
|
||||||
|
|
||||||
async def test_entity_ids_limit_via_api(hass, hass_client):
|
async def test_entity_ids_limit_via_api(hass, hass_client, recorder_mock):
|
||||||
"""Test limiting history to entity_ids."""
|
"""Test limiting history to entity_ids."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
{"history": {}},
|
{"history": {}},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
hass.states.async_set("light.kitchen", "on")
|
hass.states.async_set("light.kitchen", "on")
|
||||||
hass.states.async_set("light.cow", "on")
|
hass.states.async_set("light.cow", "on")
|
||||||
hass.states.async_set("light.nomatch", "on")
|
hass.states.async_set("light.nomatch", "on")
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
|
@ -818,24 +793,20 @@ async def test_entity_ids_limit_via_api(hass, hass_client):
|
||||||
assert response_json[1][0]["entity_id"] == "light.cow"
|
assert response_json[1][0]["entity_id"] == "light.cow"
|
||||||
|
|
||||||
|
|
||||||
async def test_entity_ids_limit_via_api_with_skip_initial_state(hass, hass_client):
|
async def test_entity_ids_limit_via_api_with_skip_initial_state(
|
||||||
|
hass, hass_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test limiting history to entity_ids with skip_initial_state."""
|
"""Test limiting history to entity_ids with skip_initial_state."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
{"history": {}},
|
{"history": {}},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
hass.states.async_set("light.kitchen", "on")
|
hass.states.async_set("light.kitchen", "on")
|
||||||
hass.states.async_set("light.cow", "on")
|
hass.states.async_set("light.cow", "on")
|
||||||
hass.states.async_set("light.nomatch", "on")
|
hass.states.async_set("light.nomatch", "on")
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(
|
response = await client.get(
|
||||||
|
@ -885,24 +856,20 @@ TEMPERATURE_SENSOR_ATTRIBUTES = {
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_statistics_during_period(
|
async def test_statistics_during_period(
|
||||||
hass, hass_ws_client, units, attributes, state, value
|
hass, hass_ws_client, recorder_mock, units, attributes, state, value
|
||||||
):
|
):
|
||||||
"""Test statistics_during_period."""
|
"""Test statistics_during_period."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
hass.states.async_set("sensor.test", state, attributes=attributes)
|
hass.states.async_set("sensor.test", state, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
|
@ -947,15 +914,15 @@ async def test_statistics_during_period(
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
async def test_statistics_during_period_bad_start_time(hass, hass_ws_client):
|
async def test_statistics_during_period_bad_start_time(
|
||||||
|
hass, hass_ws_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test statistics_during_period."""
|
"""Test statistics_during_period."""
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
{"history": {}},
|
{"history": {}},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
|
@ -971,17 +938,17 @@ async def test_statistics_during_period_bad_start_time(hass, hass_ws_client):
|
||||||
assert response["error"]["code"] == "invalid_start_time"
|
assert response["error"]["code"] == "invalid_start_time"
|
||||||
|
|
||||||
|
|
||||||
async def test_statistics_during_period_bad_end_time(hass, hass_ws_client):
|
async def test_statistics_during_period_bad_end_time(
|
||||||
|
hass, hass_ws_client, recorder_mock
|
||||||
|
):
|
||||||
"""Test statistics_during_period."""
|
"""Test statistics_during_period."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(
|
await async_setup_component(
|
||||||
hass,
|
hass,
|
||||||
"history",
|
"history",
|
||||||
{"history": {}},
|
{"history": {}},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
|
@ -1009,15 +976,16 @@ async def test_statistics_during_period_bad_end_time(hass, hass_ws_client):
|
||||||
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "Pa"),
|
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, "Pa"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_list_statistic_ids(hass, hass_ws_client, units, attributes, unit):
|
async def test_list_statistic_ids(
|
||||||
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
|
):
|
||||||
"""Test list_statistic_ids."""
|
"""Test list_statistic_ids."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {"history": {}})
|
await async_setup_component(hass, "history", {"history": {}})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json({"id": 1, "type": "history/list_statistic_ids"})
|
await client.send_json({"id": 1, "type": "history/list_statistic_ids"})
|
||||||
|
@ -1026,10 +994,7 @@ async def test_list_statistic_ids(hass, hass_ws_client, units, attributes, unit)
|
||||||
assert response["result"] == []
|
assert response["result"] == []
|
||||||
|
|
||||||
hass.states.async_set("sensor.test", 10, attributes=attributes)
|
hass.states.async_set("sensor.test", 10, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
await client.send_json({"id": 2, "type": "history/list_statistic_ids"})
|
await client.send_json({"id": 2, "type": "history/list_statistic_ids"})
|
||||||
response = await client.receive_json()
|
response = await client.receive_json()
|
||||||
|
@ -1046,7 +1011,7 @@ async def test_list_statistic_ids(hass, hass_ws_client, units, attributes, unit)
|
||||||
]
|
]
|
||||||
|
|
||||||
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
# Remove the state, statistics will now be fetched from the database
|
# Remove the state, statistics will now be fetched from the database
|
||||||
hass.states.async_remove("sensor.test")
|
hass.states.async_remove("sensor.test")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
|
@ -9,7 +9,7 @@ from unittest.mock import Mock, patch
|
||||||
import pytest
|
import pytest
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import logbook, recorder
|
from homeassistant.components import logbook
|
||||||
from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME
|
from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME
|
||||||
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
||||||
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
|
from homeassistant.components.recorder.models import process_timestamp_to_utc_isoformat
|
||||||
|
@ -40,9 +40,8 @@ import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
from tests.common import async_capture_events, mock_platform
|
from tests.common import async_capture_events, mock_platform
|
||||||
from tests.components.recorder.common import (
|
from tests.components.recorder.common import (
|
||||||
async_trigger_db_commit,
|
async_recorder_block_till_done,
|
||||||
async_wait_recording_done,
|
async_wait_recording_done,
|
||||||
trigger_db_commit,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
EMPTY_CONFIG = logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})
|
EMPTY_CONFIG = logbook.CONFIG_SCHEMA({logbook.DOMAIN: {}})
|
||||||
|
@ -88,11 +87,7 @@ async def test_service_call_create_logbook_entry(hass_):
|
||||||
# Logbook entry service call results in firing an event.
|
# Logbook entry service call results in firing an event.
|
||||||
# Our service call will unblock when the event listeners have been
|
# Our service call will unblock when the event listeners have been
|
||||||
# scheduled. This means that they may not have been processed yet.
|
# scheduled. This means that they may not have been processed yet.
|
||||||
await hass_.async_add_executor_job(trigger_db_commit, hass_)
|
await async_wait_recording_done(hass_)
|
||||||
await hass_.async_block_till_done()
|
|
||||||
await hass_.async_add_executor_job(
|
|
||||||
hass_.data[recorder.DATA_INSTANCE].block_till_done
|
|
||||||
)
|
|
||||||
|
|
||||||
events = list(
|
events = list(
|
||||||
logbook._get_events(
|
logbook._get_events(
|
||||||
|
@ -306,7 +301,7 @@ def create_state_changed_event_from_old_new(
|
||||||
async def test_logbook_view(hass, hass_client, recorder_mock):
|
async def test_logbook_view(hass, hass_client, recorder_mock):
|
||||||
"""Test the logbook view."""
|
"""Test the logbook view."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get(f"/api/logbook/{dt_util.utcnow().isoformat()}")
|
response = await client.get(f"/api/logbook/{dt_util.utcnow().isoformat()}")
|
||||||
assert response.status == HTTPStatus.OK
|
assert response.status == HTTPStatus.OK
|
||||||
|
@ -315,7 +310,7 @@ async def test_logbook_view(hass, hass_client, recorder_mock):
|
||||||
async def test_logbook_view_period_entity(hass, hass_client, recorder_mock, set_utc):
|
async def test_logbook_view_period_entity(hass, hass_client, recorder_mock, set_utc):
|
||||||
"""Test the logbook view with period and entity."""
|
"""Test the logbook view with period and entity."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id_test = "switch.test"
|
entity_id_test = "switch.test"
|
||||||
hass.states.async_set(entity_id_test, STATE_OFF)
|
hass.states.async_set(entity_id_test, STATE_OFF)
|
||||||
|
@ -323,9 +318,7 @@ async def test_logbook_view_period_entity(hass, hass_client, recorder_mock, set_
|
||||||
entity_id_second = "switch.second"
|
entity_id_second = "switch.second"
|
||||||
hass.states.async_set(entity_id_second, STATE_OFF)
|
hass.states.async_set(entity_id_second, STATE_OFF)
|
||||||
hass.states.async_set(entity_id_second, STATE_ON)
|
hass.states.async_set(entity_id_second, STATE_ON)
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -419,12 +412,7 @@ async def test_logbook_describe_event(hass, hass_client, recorder_mock):
|
||||||
return_value=dt_util.utcnow() - timedelta(seconds=5),
|
return_value=dt_util.utcnow() - timedelta(seconds=5),
|
||||||
):
|
):
|
||||||
hass.bus.async_fire("some_event")
|
hass.bus.async_fire("some_event")
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(
|
|
||||||
hass.data[recorder.DATA_INSTANCE].block_till_done
|
|
||||||
)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get("/api/logbook")
|
response = await client.get("/api/logbook")
|
||||||
|
@ -488,12 +476,7 @@ async def test_exclude_described_event(hass, hass_client, recorder_mock):
|
||||||
hass.bus.async_fire(
|
hass.bus.async_fire(
|
||||||
"some_event", {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id3}
|
"some_event", {logbook.ATTR_NAME: name, logbook.ATTR_ENTITY_ID: entity_id3}
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(
|
|
||||||
hass.data[recorder.DATA_INSTANCE].block_till_done
|
|
||||||
)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response = await client.get("/api/logbook")
|
response = await client.get("/api/logbook")
|
||||||
|
@ -507,7 +490,7 @@ async def test_exclude_described_event(hass, hass_client, recorder_mock):
|
||||||
async def test_logbook_view_end_time_entity(hass, hass_client, recorder_mock):
|
async def test_logbook_view_end_time_entity(hass, hass_client, recorder_mock):
|
||||||
"""Test the logbook view with end_time and entity."""
|
"""Test the logbook view with end_time and entity."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id_test = "switch.test"
|
entity_id_test = "switch.test"
|
||||||
hass.states.async_set(entity_id_test, STATE_OFF)
|
hass.states.async_set(entity_id_test, STATE_OFF)
|
||||||
|
@ -515,9 +498,7 @@ async def test_logbook_view_end_time_entity(hass, hass_client, recorder_mock):
|
||||||
entity_id_second = "switch.second"
|
entity_id_second = "switch.second"
|
||||||
hass.states.async_set(entity_id_second, STATE_OFF)
|
hass.states.async_set(entity_id_second, STATE_OFF)
|
||||||
hass.states.async_set(entity_id_second, STATE_ON)
|
hass.states.async_set(entity_id_second, STATE_ON)
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -567,7 +548,7 @@ async def test_logbook_entity_filter_with_automations(hass, hass_client, recorde
|
||||||
await async_setup_component(hass, "automation", {})
|
await async_setup_component(hass, "automation", {})
|
||||||
await async_setup_component(hass, "script", {})
|
await async_setup_component(hass, "script", {})
|
||||||
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id_test = "alarm_control_panel.area_001"
|
entity_id_test = "alarm_control_panel.area_001"
|
||||||
hass.states.async_set(entity_id_test, STATE_OFF)
|
hass.states.async_set(entity_id_test, STATE_OFF)
|
||||||
|
@ -586,9 +567,7 @@ async def test_logbook_entity_filter_with_automations(hass, hass_client, recorde
|
||||||
)
|
)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -653,7 +632,6 @@ async def test_logbook_entity_no_longer_in_state_machine(
|
||||||
entity_id_test, STATE_ON, {ATTR_FRIENDLY_NAME: "Alarm Control Panel"}
|
entity_id_test, STATE_ON, {ATTR_FRIENDLY_NAME: "Alarm Control Panel"}
|
||||||
)
|
)
|
||||||
|
|
||||||
async_trigger_db_commit(hass)
|
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
hass.states.async_remove(entity_id_test)
|
hass.states.async_remove(entity_id_test)
|
||||||
|
@ -679,7 +657,7 @@ async def test_filter_continuous_sensor_values(
|
||||||
):
|
):
|
||||||
"""Test remove continuous sensor events from logbook."""
|
"""Test remove continuous sensor events from logbook."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id_test = "switch.test"
|
entity_id_test = "switch.test"
|
||||||
hass.states.async_set(entity_id_test, STATE_OFF)
|
hass.states.async_set(entity_id_test, STATE_OFF)
|
||||||
|
@ -691,9 +669,7 @@ async def test_filter_continuous_sensor_values(
|
||||||
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
|
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
|
||||||
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
|
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -714,7 +690,7 @@ async def test_filter_continuous_sensor_values(
|
||||||
async def test_exclude_new_entities(hass, hass_client, recorder_mock, set_utc):
|
async def test_exclude_new_entities(hass, hass_client, recorder_mock, set_utc):
|
||||||
"""Test if events are excluded on first update."""
|
"""Test if events are excluded on first update."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id = "climate.bla"
|
entity_id = "climate.bla"
|
||||||
entity_id2 = "climate.blu"
|
entity_id2 = "climate.blu"
|
||||||
|
@ -724,9 +700,7 @@ async def test_exclude_new_entities(hass, hass_client, recorder_mock, set_utc):
|
||||||
hass.states.async_set(entity_id2, STATE_OFF)
|
hass.states.async_set(entity_id2, STATE_OFF)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -748,7 +722,7 @@ async def test_exclude_new_entities(hass, hass_client, recorder_mock, set_utc):
|
||||||
async def test_exclude_removed_entities(hass, hass_client, recorder_mock, set_utc):
|
async def test_exclude_removed_entities(hass, hass_client, recorder_mock, set_utc):
|
||||||
"""Test if events are excluded on last update."""
|
"""Test if events are excluded on last update."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id = "climate.bla"
|
entity_id = "climate.bla"
|
||||||
entity_id2 = "climate.blu"
|
entity_id2 = "climate.blu"
|
||||||
|
@ -764,9 +738,7 @@ async def test_exclude_removed_entities(hass, hass_client, recorder_mock, set_ut
|
||||||
hass.states.async_remove(entity_id)
|
hass.states.async_remove(entity_id)
|
||||||
hass.states.async_remove(entity_id2)
|
hass.states.async_remove(entity_id2)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -789,7 +761,7 @@ async def test_exclude_removed_entities(hass, hass_client, recorder_mock, set_ut
|
||||||
async def test_exclude_attribute_changes(hass, hass_client, recorder_mock, set_utc):
|
async def test_exclude_attribute_changes(hass, hass_client, recorder_mock, set_utc):
|
||||||
"""Test if events of attribute changes are filtered."""
|
"""Test if events of attribute changes are filtered."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
|
|
||||||
|
@ -802,9 +774,7 @@ async def test_exclude_attribute_changes(hass, hass_client, recorder_mock, set_u
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -829,7 +799,7 @@ async def test_logbook_entity_context_id(hass, recorder_mock, hass_client):
|
||||||
await async_setup_component(hass, "automation", {})
|
await async_setup_component(hass, "automation", {})
|
||||||
await async_setup_component(hass, "script", {})
|
await async_setup_component(hass, "script", {})
|
||||||
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
context = ha.Context(
|
context = ha.Context(
|
||||||
id="ac5bd62de45711eaaeb351041eec8dd9",
|
id="ac5bd62de45711eaaeb351041eec8dd9",
|
||||||
|
@ -913,11 +883,7 @@ async def test_logbook_entity_context_id(hass, recorder_mock, hass_client):
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"light.switch", STATE_OFF, context=light_turn_off_service_context
|
"light.switch", STATE_OFF, context=light_turn_off_service_context
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -980,7 +946,7 @@ async def test_logbook_entity_context_parent_id(hass, hass_client, recorder_mock
|
||||||
await async_setup_component(hass, "automation", {})
|
await async_setup_component(hass, "automation", {})
|
||||||
await async_setup_component(hass, "script", {})
|
await async_setup_component(hass, "script", {})
|
||||||
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
context = ha.Context(
|
context = ha.Context(
|
||||||
id="ac5bd62de45711eaaeb351041eec8dd9",
|
id="ac5bd62de45711eaaeb351041eec8dd9",
|
||||||
|
@ -1085,11 +1051,7 @@ async def test_logbook_entity_context_parent_id(hass, hass_client, recorder_mock
|
||||||
"alarm_control_panel.area_009",
|
"alarm_control_panel.area_009",
|
||||||
missing_parent_context,
|
missing_parent_context,
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -1179,7 +1141,7 @@ async def test_logbook_context_from_template(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
await hass.async_start()
|
await hass.async_start()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
@ -1199,11 +1161,7 @@ async def test_logbook_context_from_template(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"switch.test_state", STATE_ON, context=switch_turn_off_context
|
"switch.test_state", STATE_ON, context=switch_turn_off_context
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -1264,7 +1222,7 @@ async def test_logbook_entity_matches_only(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
await hass.async_start()
|
await hass.async_start()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
@ -1284,11 +1242,7 @@ async def test_logbook_entity_matches_only(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"switch.test_state", STATE_ON, context=switch_turn_off_context
|
"switch.test_state", STATE_ON, context=switch_turn_off_context
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -1317,7 +1271,7 @@ async def test_custom_log_entry_discoverable_via_entity_matches_only(
|
||||||
):
|
):
|
||||||
"""Test if a custom log entry is later discoverable via entity_matches_only."""
|
"""Test if a custom log entry is later discoverable via entity_matches_only."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
logbook.async_log_entry(
|
logbook.async_log_entry(
|
||||||
hass,
|
hass,
|
||||||
|
@ -1326,10 +1280,7 @@ async def test_custom_log_entry_discoverable_via_entity_matches_only(
|
||||||
"switch",
|
"switch",
|
||||||
"switch.test_switch",
|
"switch.test_switch",
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -1377,7 +1328,7 @@ async def test_logbook_entity_matches_only_multiple(hass, hass_client, recorder_
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
await hass.async_start()
|
await hass.async_start()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
@ -1402,11 +1353,7 @@ async def test_logbook_entity_matches_only_multiple(hass, hass_client, recorder_
|
||||||
"switch.test_state", STATE_ON, context=switch_turn_off_context
|
"switch.test_state", STATE_ON, context=switch_turn_off_context
|
||||||
)
|
)
|
||||||
hass.states.async_set("light.test_state", STATE_ON, context=switch_turn_off_context)
|
hass.states.async_set("light.test_state", STATE_ON, context=switch_turn_off_context)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
|
@ -1456,7 +1403,7 @@ async def test_logbook_invalid_entity(hass, hass_client, recorder_mock):
|
||||||
async def test_icon_and_state(hass, hass_client, recorder_mock):
|
async def test_icon_and_state(hass, hass_client, recorder_mock):
|
||||||
"""Test to ensure state and custom icons are returned."""
|
"""Test to ensure state and custom icons are returned."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
|
|
||||||
|
@ -1475,7 +1422,7 @@ async def test_icon_and_state(hass, hass_client, recorder_mock):
|
||||||
)
|
)
|
||||||
hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"})
|
hass.states.async_set("light.kitchen", STATE_OFF, {"icon": "mdi:chemical-weapon"})
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
response_json = await _async_fetch_logbook(client)
|
response_json = await _async_fetch_logbook(client)
|
||||||
|
@ -1502,7 +1449,7 @@ async def test_exclude_events_domain(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1511,7 +1458,7 @@ async def test_exclude_events_domain(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id2, None)
|
hass.states.async_set(entity_id2, None)
|
||||||
hass.states.async_set(entity_id2, 20)
|
hass.states.async_set(entity_id2, 20)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
@ -1541,7 +1488,7 @@ async def test_exclude_events_domain_glob(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1552,7 +1499,7 @@ async def test_exclude_events_domain_glob(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id3, None)
|
hass.states.async_set(entity_id3, None)
|
||||||
hass.states.async_set(entity_id3, 30)
|
hass.states.async_set(entity_id3, 30)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1580,7 +1527,7 @@ async def test_include_events_entity(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1589,7 +1536,7 @@ async def test_include_events_entity(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id2, None)
|
hass.states.async_set(entity_id2, None)
|
||||||
hass.states.async_set(entity_id2, 20)
|
hass.states.async_set(entity_id2, 20)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1612,7 +1559,7 @@ async def test_exclude_events_entity(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1621,7 +1568,7 @@ async def test_exclude_events_entity(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id2, None)
|
hass.states.async_set(entity_id2, None)
|
||||||
hass.states.async_set(entity_id2, 20)
|
hass.states.async_set(entity_id2, 20)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
assert len(entries) == 2
|
assert len(entries) == 2
|
||||||
|
@ -1645,7 +1592,7 @@ async def test_include_events_domain(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1658,7 +1605,7 @@ async def test_include_events_domain(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id2, None)
|
hass.states.async_set(entity_id2, None)
|
||||||
hass.states.async_set(entity_id2, 20)
|
hass.states.async_set(entity_id2, 20)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1688,7 +1635,7 @@ async def test_include_events_domain_glob(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1703,7 +1650,7 @@ async def test_include_events_domain_glob(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id3, None)
|
hass.states.async_set(entity_id3, None)
|
||||||
hass.states.async_set(entity_id3, 30)
|
hass.states.async_set(entity_id3, 30)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1739,7 +1686,7 @@ async def test_include_exclude_events(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1754,7 +1701,7 @@ async def test_include_exclude_events(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id4, None)
|
hass.states.async_set(entity_id4, None)
|
||||||
hass.states.async_set(entity_id4, 10)
|
hass.states.async_set(entity_id4, 10)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1794,7 +1741,7 @@ async def test_include_exclude_events_with_glob_filters(
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
@ -1813,7 +1760,7 @@ async def test_include_exclude_events_with_glob_filters(
|
||||||
hass.states.async_set(entity_id6, None)
|
hass.states.async_set(entity_id6, None)
|
||||||
hass.states.async_set(entity_id6, 30)
|
hass.states.async_set(entity_id6, 30)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1836,14 +1783,14 @@ async def test_empty_config(hass, hass_client, recorder_mock):
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
await async_setup_component(hass, "logbook", config)
|
await async_setup_component(hass, "logbook", config)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
hass.states.async_set(entity_id, None)
|
hass.states.async_set(entity_id, None)
|
||||||
hass.states.async_set(entity_id, 10)
|
hass.states.async_set(entity_id, 10)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
entries = await _async_fetch_logbook(client)
|
entries = await _async_fetch_logbook(client)
|
||||||
|
|
||||||
|
@ -1857,7 +1804,7 @@ async def test_empty_config(hass, hass_client, recorder_mock):
|
||||||
async def test_context_filter(hass, hass_client, recorder_mock):
|
async def test_context_filter(hass, hass_client, recorder_mock):
|
||||||
"""Test we can filter by context."""
|
"""Test we can filter by context."""
|
||||||
assert await async_setup_component(hass, "logbook", {})
|
assert await async_setup_component(hass, "logbook", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
entity_id = "switch.blu"
|
entity_id = "switch.blu"
|
||||||
context = ha.Context()
|
context = ha.Context()
|
||||||
|
@ -1869,7 +1816,7 @@ async def test_context_filter(hass, hass_client, recorder_mock):
|
||||||
hass.states.async_set(entity_id, "off")
|
hass.states.async_set(entity_id, "off")
|
||||||
hass.states.async_set(entity_id, "unknown", context=context)
|
hass.states.async_set(entity_id, "unknown", context=context)
|
||||||
|
|
||||||
await _async_commit_and_wait(hass)
|
await async_wait_recording_done(hass)
|
||||||
client = await hass_client()
|
client = await hass_client()
|
||||||
|
|
||||||
# Test results
|
# Test results
|
||||||
|
@ -1903,14 +1850,6 @@ async def _async_fetch_logbook(client, params=None):
|
||||||
return await response.json()
|
return await response.json()
|
||||||
|
|
||||||
|
|
||||||
async def _async_commit_and_wait(hass):
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
|
|
||||||
def _assert_entry(
|
def _assert_entry(
|
||||||
entry, when=None, name=None, message=None, domain=None, entity_id=None, state=None
|
entry, when=None, name=None, message=None, domain=None, entity_id=None, state=None
|
||||||
):
|
):
|
||||||
|
|
|
@ -55,7 +55,7 @@ async def async_wait_purge_done(hass: HomeAssistant, max: int = None) -> None:
|
||||||
|
|
||||||
@ha.callback
|
@ha.callback
|
||||||
def async_trigger_db_commit(hass: HomeAssistant) -> None:
|
def async_trigger_db_commit(hass: HomeAssistant) -> None:
|
||||||
"""Fore the recorder to commit. Async friendly."""
|
"""Force the recorder to commit. Async friendly."""
|
||||||
for _ in range(recorder.DEFAULT_COMMIT_INTERVAL):
|
for _ in range(recorder.DEFAULT_COMMIT_INTERVAL):
|
||||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1))
|
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1))
|
||||||
|
|
||||||
|
|
|
@ -14,9 +14,13 @@ from homeassistant.setup import async_setup_component
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||||
|
|
||||||
from .common import async_wait_recording_done, create_engine_test, trigger_db_commit
|
from .common import (
|
||||||
|
async_recorder_block_till_done,
|
||||||
|
async_wait_recording_done,
|
||||||
|
create_engine_test,
|
||||||
|
)
|
||||||
|
|
||||||
from tests.common import async_fire_time_changed, init_recorder_component
|
from tests.common import async_fire_time_changed
|
||||||
|
|
||||||
POWER_SENSOR_ATTRIBUTES = {
|
POWER_SENSOR_ATTRIBUTES = {
|
||||||
"device_class": "power",
|
"device_class": "power",
|
||||||
|
@ -74,17 +78,14 @@ async def test_clear_statistics(hass, hass_ws_client, recorder_mock):
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
hass.states.async_set("sensor.test1", state, attributes=attributes)
|
hass.states.async_set("sensor.test1", state, attributes=attributes)
|
||||||
hass.states.async_set("sensor.test2", state * 2, attributes=attributes)
|
hass.states.async_set("sensor.test2", state * 2, attributes=attributes)
|
||||||
hass.states.async_set("sensor.test3", state * 3, attributes=attributes)
|
hass.states.async_set("sensor.test3", state * 3, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
|
@ -149,7 +150,7 @@ async def test_clear_statistics(hass, hass_ws_client, recorder_mock):
|
||||||
)
|
)
|
||||||
response = await client.receive_json()
|
response = await client.receive_json()
|
||||||
assert response["success"]
|
assert response["success"]
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
|
@ -173,7 +174,7 @@ async def test_clear_statistics(hass, hass_ws_client, recorder_mock):
|
||||||
)
|
)
|
||||||
response = await client.receive_json()
|
response = await client.receive_json()
|
||||||
assert response["success"]
|
assert response["success"]
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
|
@ -203,15 +204,12 @@ async def test_update_statistics_metadata(
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await async_setup_component(hass, "history", {})
|
await async_setup_component(hass, "history", {})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
hass.states.async_set("sensor.test", state, attributes=attributes)
|
hass.states.async_set("sensor.test", state, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(period="hourly", start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
|
@ -239,7 +237,7 @@ async def test_update_statistics_metadata(
|
||||||
)
|
)
|
||||||
response = await client.receive_json()
|
response = await client.receive_json()
|
||||||
assert response["success"]
|
assert response["success"]
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
await client.send_json({"id": 3, "type": "history/list_statistic_ids"})
|
await client.send_json({"id": 3, "type": "history/list_statistic_ids"})
|
||||||
response = await client.receive_json()
|
response = await client.receive_json()
|
||||||
|
@ -443,10 +441,9 @@ async def test_get_statistics_metadata(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "history", {"history": {}})
|
await async_setup_component(hass, "history", {"history": {}})
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
await client.send_json({"id": 1, "type": "recorder/get_statistics_metadata"})
|
await client.send_json({"id": 1, "type": "recorder/get_statistics_metadata"})
|
||||||
|
@ -498,16 +495,10 @@ async def test_get_statistics_metadata(
|
||||||
)
|
)
|
||||||
|
|
||||||
hass.states.async_set("sensor.test", 10, attributes=attributes)
|
hass.states.async_set("sensor.test", 10, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
hass.states.async_set("sensor.test2", 10, attributes=attributes)
|
hass.states.async_set("sensor.test2", 10, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
await hass.async_add_executor_job(trigger_db_commit, hass)
|
|
||||||
await hass.async_block_till_done()
|
|
||||||
|
|
||||||
await client.send_json(
|
await client.send_json(
|
||||||
{
|
{
|
||||||
|
@ -530,7 +521,7 @@ async def test_get_statistics_metadata(
|
||||||
]
|
]
|
||||||
|
|
||||||
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[recorder.DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[recorder.DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
# Remove the state, statistics will now be fetched from the database
|
# Remove the state, statistics will now be fetched from the database
|
||||||
hass.states.async_remove("sensor.test")
|
hass.states.async_remove("sensor.test")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
|
@ -23,12 +23,12 @@ from homeassistant.components.recorder.statistics import (
|
||||||
)
|
)
|
||||||
from homeassistant.components.recorder.util import session_scope
|
from homeassistant.components.recorder.util import session_scope
|
||||||
from homeassistant.const import STATE_UNAVAILABLE
|
from homeassistant.const import STATE_UNAVAILABLE
|
||||||
from homeassistant.setup import setup_component
|
from homeassistant.setup import async_setup_component, setup_component
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
|
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
|
||||||
|
|
||||||
from tests.common import async_setup_component, init_recorder_component
|
|
||||||
from tests.components.recorder.common import (
|
from tests.components.recorder.common import (
|
||||||
|
async_recorder_block_till_done,
|
||||||
async_wait_recording_done,
|
async_wait_recording_done,
|
||||||
wait_recording_done,
|
wait_recording_done,
|
||||||
)
|
)
|
||||||
|
@ -362,7 +362,7 @@ async def test_compile_hourly_sum_statistics_amount(
|
||||||
recorder = hass.data[DATA_INSTANCE]
|
recorder = hass.data[DATA_INSTANCE]
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
# Wait for the sensor recorder platform to be added
|
# Wait for the sensor recorder platform to be added
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
attributes = {
|
attributes = {
|
||||||
"device_class": device_class,
|
"device_class": device_class,
|
||||||
"state_class": state_class,
|
"state_class": state_class,
|
||||||
|
@ -2777,7 +2777,7 @@ def record_states(hass, zero, entity_id, attributes, seq=None):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_supported_device_class(
|
async def test_validate_statistics_supported_device_class(
|
||||||
hass, hass_ws_client, units, attributes, unit
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -2798,9 +2798,8 @@ async def test_validate_statistics_supported_device_class(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
# No statistics, no state - empty response
|
# No statistics, no state - empty response
|
||||||
|
@ -2810,14 +2809,14 @@ async def test_validate_statistics_supported_device_class(
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"sensor.test", 10, attributes={**attributes, **{"unit_of_measurement": unit}}
|
"sensor.test", 10, attributes={**attributes, **{"unit_of_measurement": unit}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# No statistics, invalid state - expect error
|
# No statistics, invalid state - expect error
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"sensor.test", 11, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
"sensor.test", 11, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
expected = {
|
expected = {
|
||||||
"sensor.test": [
|
"sensor.test": [
|
||||||
{
|
{
|
||||||
|
@ -2833,24 +2832,24 @@ async def test_validate_statistics_supported_device_class(
|
||||||
await assert_validation_result(client, expected)
|
await assert_validation_result(client, expected)
|
||||||
|
|
||||||
# Statistics has run, invalid state - expect error
|
# Statistics has run, invalid state - expect error
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"sensor.test", 12, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
"sensor.test", 12, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, expected)
|
await assert_validation_result(client, expected)
|
||||||
|
|
||||||
# Valid state - empty response
|
# Valid state - empty response
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": unit}}
|
"sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": unit}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Valid state, statistic runs again - empty response
|
# Valid state, statistic runs again - empty response
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Remove the state - empty response
|
# Remove the state - empty response
|
||||||
|
@ -2873,7 +2872,7 @@ async def test_validate_statistics_supported_device_class(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_supported_device_class_2(
|
async def test_validate_statistics_supported_device_class_2(
|
||||||
hass, hass_ws_client, units, attributes, unit
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -2894,9 +2893,8 @@ async def test_validate_statistics_supported_device_class_2(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
# No statistics, no state - empty response
|
# No statistics, no state - empty response
|
||||||
|
@ -2910,7 +2908,7 @@ async def test_validate_statistics_supported_device_class_2(
|
||||||
|
|
||||||
# Statistics has run, device class set - expect error
|
# Statistics has run, device class set - expect error
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
hass.states.async_set("sensor.test", 12, attributes=attributes)
|
hass.states.async_set("sensor.test", 12, attributes=attributes)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
expected = {
|
expected = {
|
||||||
|
@ -2932,7 +2930,7 @@ async def test_validate_statistics_supported_device_class_2(
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
"sensor.test", 13, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
expected = {
|
expected = {
|
||||||
"sensor.test": [
|
"sensor.test": [
|
||||||
{
|
{
|
||||||
|
@ -2964,7 +2962,7 @@ async def test_validate_statistics_supported_device_class_2(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_unsupported_state_class(
|
async def test_validate_statistics_unsupported_state_class(
|
||||||
hass, hass_ws_client, units, attributes, unit
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -2985,9 +2983,8 @@ async def test_validate_statistics_unsupported_state_class(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
# No statistics, no state - empty response
|
# No statistics, no state - empty response
|
||||||
|
@ -3000,7 +2997,7 @@ async def test_validate_statistics_unsupported_state_class(
|
||||||
|
|
||||||
# Statistics has run, empty response
|
# Statistics has run, empty response
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# State update with invalid state class, expect error
|
# State update with invalid state class, expect error
|
||||||
|
@ -3029,7 +3026,7 @@ async def test_validate_statistics_unsupported_state_class(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_sensor_no_longer_recorded(
|
async def test_validate_statistics_sensor_no_longer_recorded(
|
||||||
hass, hass_ws_client, units, attributes, unit
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -3050,9 +3047,8 @@ async def test_validate_statistics_sensor_no_longer_recorded(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
# No statistics, no state - empty response
|
# No statistics, no state - empty response
|
||||||
|
@ -3065,7 +3061,7 @@ async def test_validate_statistics_sensor_no_longer_recorded(
|
||||||
|
|
||||||
# Statistics has run, empty response
|
# Statistics has run, empty response
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Sensor no longer recorded, expect error
|
# Sensor no longer recorded, expect error
|
||||||
|
@ -3091,7 +3087,7 @@ async def test_validate_statistics_sensor_no_longer_recorded(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_sensor_not_recorded(
|
async def test_validate_statistics_sensor_not_recorded(
|
||||||
hass, hass_ws_client, units, attributes, unit
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -3112,9 +3108,8 @@ async def test_validate_statistics_sensor_not_recorded(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
# No statistics, no state - empty response
|
# No statistics, no state - empty response
|
||||||
|
@ -3139,7 +3134,7 @@ async def test_validate_statistics_sensor_not_recorded(
|
||||||
|
|
||||||
# Statistics has run, expect same error
|
# Statistics has run, expect same error
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, expected)
|
await assert_validation_result(client, expected)
|
||||||
|
|
||||||
|
|
||||||
|
@ -3150,7 +3145,7 @@ async def test_validate_statistics_sensor_not_recorded(
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_sensor_removed(
|
async def test_validate_statistics_sensor_removed(
|
||||||
hass, hass_ws_client, units, attributes, unit
|
hass, hass_ws_client, recorder_mock, units, attributes, unit
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -3171,9 +3166,8 @@ async def test_validate_statistics_sensor_removed(
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
hass.config.units = units
|
hass.config.units = units
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
|
|
||||||
# No statistics, no state - empty response
|
# No statistics, no state - empty response
|
||||||
|
@ -3186,7 +3180,7 @@ async def test_validate_statistics_sensor_removed(
|
||||||
|
|
||||||
# Statistics has run, empty response
|
# Statistics has run, empty response
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Sensor removed, expect error
|
# Sensor removed, expect error
|
||||||
|
@ -3207,7 +3201,7 @@ async def test_validate_statistics_sensor_removed(
|
||||||
[BATTERY_SENSOR_ATTRIBUTES, NONE_SENSOR_ATTRIBUTES],
|
[BATTERY_SENSOR_ATTRIBUTES, NONE_SENSOR_ATTRIBUTES],
|
||||||
)
|
)
|
||||||
async def test_validate_statistics_unsupported_device_class(
|
async def test_validate_statistics_unsupported_device_class(
|
||||||
hass, hass_ws_client, attributes
|
hass, recorder_mock, hass_ws_client, attributes
|
||||||
):
|
):
|
||||||
"""Test validate_statistics."""
|
"""Test validate_statistics."""
|
||||||
id = 1
|
id = 1
|
||||||
|
@ -3238,9 +3232,8 @@ async def test_validate_statistics_unsupported_device_class(
|
||||||
|
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
|
|
||||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
|
||||||
await async_setup_component(hass, "sensor", {})
|
await async_setup_component(hass, "sensor", {})
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
client = await hass_ws_client()
|
client = await hass_ws_client()
|
||||||
rec = hass.data[DATA_INSTANCE]
|
rec = hass.data[DATA_INSTANCE]
|
||||||
|
|
||||||
|
@ -3258,9 +3251,9 @@ async def test_validate_statistics_unsupported_device_class(
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Run statistics, no statistics will be generated because of conflicting units
|
# Run statistics, no statistics will be generated because of conflicting units
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
rec.do_adhoc_statistics(start=now)
|
rec.do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_statistic_ids([])
|
await assert_statistic_ids([])
|
||||||
|
|
||||||
# No statistics, changed unit - empty response
|
# No statistics, changed unit - empty response
|
||||||
|
@ -3270,9 +3263,9 @@ async def test_validate_statistics_unsupported_device_class(
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Run statistics one hour later, only the "dogs" state will be considered
|
# Run statistics one hour later, only the "dogs" state will be considered
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
rec.do_adhoc_statistics(start=now + timedelta(hours=1))
|
rec.do_adhoc_statistics(start=now + timedelta(hours=1))
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_statistic_ids(
|
await assert_statistic_ids(
|
||||||
[{"statistic_id": "sensor.test", "unit_of_measurement": "dogs"}]
|
[{"statistic_id": "sensor.test", "unit_of_measurement": "dogs"}]
|
||||||
)
|
)
|
||||||
|
@ -3280,7 +3273,7 @@ async def test_validate_statistics_unsupported_device_class(
|
||||||
|
|
||||||
# Change back to original unit - expect error
|
# Change back to original unit - expect error
|
||||||
hass.states.async_set("sensor.test", 13, attributes=attributes)
|
hass.states.async_set("sensor.test", 13, attributes=attributes)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
expected = {
|
expected = {
|
||||||
"sensor.test": [
|
"sensor.test": [
|
||||||
{
|
{
|
||||||
|
@ -3299,13 +3292,13 @@ async def test_validate_statistics_unsupported_device_class(
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
"sensor.test", 14, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
"sensor.test", 14, attributes={**attributes, **{"unit_of_measurement": "dogs"}}
|
||||||
)
|
)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Valid state, statistic runs again - empty response
|
# Valid state, statistic runs again - empty response
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
await async_recorder_block_till_done(hass)
|
||||||
await assert_validation_result(client, {})
|
await assert_validation_result(client, {})
|
||||||
|
|
||||||
# Remove the state - empty response
|
# Remove the state - empty response
|
||||||
|
|
|
@ -46,7 +46,6 @@ from tests.common import ( # noqa: E402, isort:skip
|
||||||
MockUser,
|
MockUser,
|
||||||
SetupRecorderInstanceT,
|
SetupRecorderInstanceT,
|
||||||
async_fire_mqtt_message,
|
async_fire_mqtt_message,
|
||||||
async_init_recorder_component,
|
|
||||||
async_test_home_assistant,
|
async_test_home_assistant,
|
||||||
get_test_home_assistant,
|
get_test_home_assistant,
|
||||||
init_recorder_component,
|
init_recorder_component,
|
||||||
|
@ -57,6 +56,8 @@ from tests.components.recorder.common import ( # noqa: E402, isort:skip
|
||||||
async_recorder_block_till_done,
|
async_recorder_block_till_done,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
||||||
|
|
||||||
|
@ -737,6 +738,28 @@ def hass_recorder(enable_nightly_purge, enable_statistics, hass_storage):
|
||||||
dt_util.DEFAULT_TIME_ZONE = original_tz
|
dt_util.DEFAULT_TIME_ZONE = original_tz
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_init_recorder_component(hass, add_config=None):
|
||||||
|
"""Initialize the recorder asynchronously."""
|
||||||
|
config = dict(add_config) if add_config else {}
|
||||||
|
if recorder.CONF_DB_URL not in config:
|
||||||
|
config[recorder.CONF_DB_URL] = "sqlite://" # In memory DB
|
||||||
|
if recorder.CONF_COMMIT_INTERVAL not in config:
|
||||||
|
config[recorder.CONF_COMMIT_INTERVAL] = 0
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.recorder.ALLOW_IN_MEMORY_DB",
|
||||||
|
True,
|
||||||
|
), patch("homeassistant.components.recorder.migration.migrate_schema"):
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass, recorder.DOMAIN, {recorder.DOMAIN: config}
|
||||||
|
)
|
||||||
|
assert recorder.DOMAIN in hass.config.components
|
||||||
|
_LOGGER.info(
|
||||||
|
"Test recorder successfully started, database location: %s",
|
||||||
|
config[recorder.CONF_DB_URL],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def async_setup_recorder_instance(
|
async def async_setup_recorder_instance(
|
||||||
enable_nightly_purge, enable_statistics
|
enable_nightly_purge, enable_statistics
|
||||||
|
@ -762,7 +785,7 @@ async def async_setup_recorder_instance(
|
||||||
side_effect=stats,
|
side_effect=stats,
|
||||||
autospec=True,
|
autospec=True,
|
||||||
):
|
):
|
||||||
await async_init_recorder_component(hass, config)
|
await _async_init_recorder_component(hass, config)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
instance = hass.data[recorder.DATA_INSTANCE]
|
instance = hass.data[recorder.DATA_INSTANCE]
|
||||||
# The recorder's worker is not started until Home Assistant is running
|
# The recorder's worker is not started until Home Assistant is running
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue