Report orphaned statistics in statistic validation (#57324)
This commit is contained in:
parent
e2303dc713
commit
e3534eec87
4 changed files with 128 additions and 16 deletions
|
@ -560,7 +560,7 @@ def _apply_update(instance, session, new_version, old_version): # noqa: C901
|
|||
|
||||
# Copy last hourly statistic to the newly created 5-minute statistics table
|
||||
sum_statistics = get_metadata_with_session(
|
||||
instance.hass, session, None, statistic_type="sum"
|
||||
instance.hass, session, statistic_type="sum"
|
||||
)
|
||||
for metadata_id, _ in sum_statistics.values():
|
||||
last_statistic = (
|
||||
|
|
|
@ -204,7 +204,9 @@ def _update_or_add_metadata(
|
|||
Updating metadata source is not possible.
|
||||
"""
|
||||
statistic_id = new_metadata["statistic_id"]
|
||||
old_metadata_dict = get_metadata_with_session(hass, session, [statistic_id], None)
|
||||
old_metadata_dict = get_metadata_with_session(
|
||||
hass, session, statistic_ids=[statistic_id]
|
||||
)
|
||||
if not old_metadata_dict:
|
||||
unit = new_metadata["unit_of_measurement"]
|
||||
has_mean = new_metadata["has_mean"]
|
||||
|
@ -417,8 +419,10 @@ def compile_statistics(instance: Recorder, start: datetime) -> bool:
|
|||
def get_metadata_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: scoped_session,
|
||||
statistic_ids: Iterable[str] | None,
|
||||
statistic_type: Literal["mean"] | Literal["sum"] | None,
|
||||
*,
|
||||
statistic_ids: Iterable[str] | None = None,
|
||||
statistic_type: Literal["mean"] | Literal["sum"] | None = None,
|
||||
statistic_source: str | None = None,
|
||||
) -> dict[str, tuple[int, StatisticMetaData]]:
|
||||
"""Fetch meta data.
|
||||
|
||||
|
@ -448,11 +452,19 @@ def get_metadata_with_session(
|
|||
baked_query += lambda q: q.filter(
|
||||
StatisticsMeta.statistic_id.in_(bindparam("statistic_ids"))
|
||||
)
|
||||
if statistic_source is not None:
|
||||
baked_query += lambda q: q.filter(
|
||||
StatisticsMeta.source == bindparam("statistic_source")
|
||||
)
|
||||
if statistic_type == "mean":
|
||||
baked_query += lambda q: q.filter(StatisticsMeta.has_mean == true())
|
||||
elif statistic_type == "sum":
|
||||
baked_query += lambda q: q.filter(StatisticsMeta.has_sum == true())
|
||||
result = execute(baked_query(session).params(statistic_ids=statistic_ids))
|
||||
result = execute(
|
||||
baked_query(session).params(
|
||||
statistic_ids=statistic_ids, statistic_source=statistic_source
|
||||
)
|
||||
)
|
||||
if not result:
|
||||
return {}
|
||||
|
||||
|
@ -468,11 +480,20 @@ def get_metadata_with_session(
|
|||
|
||||
def get_metadata(
|
||||
hass: HomeAssistant,
|
||||
statistic_ids: Iterable[str],
|
||||
*,
|
||||
statistic_ids: Iterable[str] | None = None,
|
||||
statistic_type: Literal["mean"] | Literal["sum"] | None = None,
|
||||
statistic_source: str | None = None,
|
||||
) -> dict[str, tuple[int, StatisticMetaData]]:
|
||||
"""Return metadata for statistic_ids."""
|
||||
with session_scope(hass=hass) as session:
|
||||
return get_metadata_with_session(hass, session, statistic_ids, None)
|
||||
return get_metadata_with_session(
|
||||
hass,
|
||||
session,
|
||||
statistic_ids=statistic_ids,
|
||||
statistic_type=statistic_type,
|
||||
statistic_source=statistic_source,
|
||||
)
|
||||
|
||||
|
||||
def _configured_unit(unit: str, units: UnitSystem) -> str:
|
||||
|
@ -521,7 +542,9 @@ def list_statistic_ids(
|
|||
|
||||
# Query the database
|
||||
with session_scope(hass=hass) as session:
|
||||
metadata = get_metadata_with_session(hass, session, None, statistic_type)
|
||||
metadata = get_metadata_with_session(
|
||||
hass, session, statistic_type=statistic_type
|
||||
)
|
||||
|
||||
for _, meta in metadata.values():
|
||||
if (unit := meta["unit_of_measurement"]) is not None:
|
||||
|
@ -693,7 +716,7 @@ def statistics_during_period(
|
|||
metadata = None
|
||||
with session_scope(hass=hass) as session:
|
||||
# Fetch metadata for the given (or all) statistic_ids
|
||||
metadata = get_metadata_with_session(hass, session, statistic_ids, None)
|
||||
metadata = get_metadata_with_session(hass, session, statistic_ids=statistic_ids)
|
||||
if not metadata:
|
||||
return {}
|
||||
|
||||
|
@ -744,7 +767,7 @@ def get_last_statistics(
|
|||
statistic_ids = [statistic_id]
|
||||
with session_scope(hass=hass) as session:
|
||||
# Fetch metadata for the given statistic_id
|
||||
metadata = get_metadata_with_session(hass, session, statistic_ids, None)
|
||||
metadata = get_metadata_with_session(hass, session, statistic_ids=statistic_ids)
|
||||
if not metadata:
|
||||
return {}
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@ from homeassistant.components.recorder import (
|
|||
statistics,
|
||||
util as recorder_util,
|
||||
)
|
||||
from homeassistant.components.recorder.const import DOMAIN as RECORDER_DOMAIN
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMetaData,
|
||||
|
@ -416,7 +417,7 @@ def _compile_statistics( # noqa: C901
|
|||
sensor_states = _get_sensor_states(hass)
|
||||
wanted_statistics = _wanted_statistics(sensor_states)
|
||||
old_metadatas = statistics.get_metadata_with_session(
|
||||
hass, session, [i.entity_id for i in sensor_states], None
|
||||
hass, session, statistic_ids=[i.entity_id for i in sensor_states]
|
||||
)
|
||||
|
||||
# Get history between start and end
|
||||
|
@ -656,7 +657,9 @@ def validate_statistics(
|
|||
validation_result = defaultdict(list)
|
||||
|
||||
sensor_states = hass.states.all(DOMAIN)
|
||||
metadatas = statistics.get_metadata(hass, [i.entity_id for i in sensor_states])
|
||||
metadatas = statistics.get_metadata(hass, statistic_source=RECORDER_DOMAIN)
|
||||
sensor_entity_ids = {i.entity_id for i in sensor_states}
|
||||
sensor_statistic_ids = set(metadatas)
|
||||
|
||||
for state in sensor_states:
|
||||
entity_id = state.entity_id
|
||||
|
@ -727,4 +730,15 @@ def validate_statistics(
|
|||
)
|
||||
)
|
||||
|
||||
for statistic_id in sensor_statistic_ids - sensor_entity_ids:
|
||||
# There is no sensor matching the statistics_id
|
||||
validation_result[statistic_id].append(
|
||||
statistics.ValidationIssue(
|
||||
"no_state",
|
||||
{
|
||||
"statistic_id": statistic_id,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
return validation_result
|
||||
|
|
|
@ -1945,7 +1945,7 @@ def test_compile_hourly_statistics_changing_statistics(
|
|||
assert statistic_ids == [
|
||||
{"statistic_id": "sensor.test1", "unit_of_measurement": None}
|
||||
]
|
||||
metadata = get_metadata(hass, ("sensor.test1",))
|
||||
metadata = get_metadata(hass, statistic_ids=("sensor.test1",))
|
||||
assert metadata == {
|
||||
"sensor.test1": (
|
||||
1,
|
||||
|
@ -1970,7 +1970,7 @@ def test_compile_hourly_statistics_changing_statistics(
|
|||
assert statistic_ids == [
|
||||
{"statistic_id": "sensor.test1", "unit_of_measurement": None}
|
||||
]
|
||||
metadata = get_metadata(hass, ("sensor.test1",))
|
||||
metadata = get_metadata(hass, statistic_ids=("sensor.test1",))
|
||||
assert metadata == {
|
||||
"sensor.test1": (
|
||||
1,
|
||||
|
@ -2521,7 +2521,15 @@ async def test_validate_statistics_supported_device_class(
|
|||
|
||||
# Remove the state - empty response
|
||||
hass.states.async_remove("sensor.test")
|
||||
await assert_validation_result(client, {})
|
||||
expected = {
|
||||
"sensor.test": [
|
||||
{
|
||||
"data": {"statistic_id": "sensor.test"},
|
||||
"type": "no_state",
|
||||
}
|
||||
],
|
||||
}
|
||||
await assert_validation_result(client, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -2742,6 +2750,65 @@ async def test_validate_statistics_sensor_not_recorded(
|
|||
await assert_validation_result(client, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"units, attributes, unit",
|
||||
[
|
||||
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, "W"),
|
||||
],
|
||||
)
|
||||
async def test_validate_statistics_sensor_removed(
|
||||
hass, hass_ws_client, units, attributes, unit
|
||||
):
|
||||
"""Test validate_statistics."""
|
||||
id = 1
|
||||
|
||||
def next_id():
|
||||
nonlocal id
|
||||
id += 1
|
||||
return id
|
||||
|
||||
async def assert_validation_result(client, expected_result):
|
||||
await client.send_json(
|
||||
{"id": next_id(), "type": "recorder/validate_statistics"}
|
||||
)
|
||||
response = await client.receive_json()
|
||||
assert response["success"]
|
||||
assert response["result"] == expected_result
|
||||
|
||||
now = dt_util.utcnow()
|
||||
|
||||
hass.config.units = units
|
||||
await hass.async_add_executor_job(init_recorder_component, hass)
|
||||
await async_setup_component(hass, "sensor", {})
|
||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
||||
client = await hass_ws_client()
|
||||
|
||||
# No statistics, no state - empty response
|
||||
await assert_validation_result(client, {})
|
||||
|
||||
# No statistics, valid state - empty response
|
||||
hass.states.async_set("sensor.test", 10, attributes=attributes)
|
||||
await hass.async_block_till_done()
|
||||
await assert_validation_result(client, {})
|
||||
|
||||
# Statistics has run, empty response
|
||||
hass.data[DATA_INSTANCE].do_adhoc_statistics(start=now)
|
||||
await hass.async_add_executor_job(hass.data[DATA_INSTANCE].block_till_done)
|
||||
await assert_validation_result(client, {})
|
||||
|
||||
# Sensor removed, expect error
|
||||
hass.states.async_remove("sensor.test")
|
||||
expected = {
|
||||
"sensor.test": [
|
||||
{
|
||||
"data": {"statistic_id": "sensor.test"},
|
||||
"type": "no_state",
|
||||
}
|
||||
],
|
||||
}
|
||||
await assert_validation_result(client, expected)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"attributes",
|
||||
[BATTERY_SENSOR_ATTRIBUTES, NONE_SENSOR_ATTRIBUTES],
|
||||
|
@ -2850,7 +2917,15 @@ async def test_validate_statistics_unsupported_device_class(
|
|||
|
||||
# Remove the state - empty response
|
||||
hass.states.async_remove("sensor.test")
|
||||
await assert_validation_result(client, {})
|
||||
expected = {
|
||||
"sensor.test": [
|
||||
{
|
||||
"data": {"statistic_id": "sensor.test"},
|
||||
"type": "no_state",
|
||||
}
|
||||
],
|
||||
}
|
||||
await assert_validation_result(client, expected)
|
||||
|
||||
|
||||
def record_meter_states(hass, zero, entity_id, _attributes, seq):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue