Improve statistics error messages when sensor's unit is changing (#55436)

* Improve error messages when sensor's unit is changing

* Improve test coverage
This commit is contained in:
Erik Montnemery 2021-08-30 12:51:46 +02:00 committed by GitHub
parent 7e9f8de7e0
commit 722aa0895e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 95 additions and 3 deletions

View file

@ -200,11 +200,18 @@ def _normalize_states(
hass.data[WARN_UNSTABLE_UNIT] = set() hass.data[WARN_UNSTABLE_UNIT] = set()
if entity_id not in hass.data[WARN_UNSTABLE_UNIT]: if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
hass.data[WARN_UNSTABLE_UNIT].add(entity_id) hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
extra = ""
if old_metadata := statistics.get_metadata(hass, entity_id):
extra = (
" and matches the unit of already compiled statistics "
f"({old_metadata['unit_of_measurement']})"
)
_LOGGER.warning( _LOGGER.warning(
"The unit of %s is changing, got %s, generation of long term " "The unit of %s is changing, got multiple %s, generation of long term "
"statistics will be suppressed unless the unit is stable", "statistics will be suppressed unless the unit is stable%s",
entity_id, entity_id,
all_units, all_units,
extra,
) )
return None, [] return None, []
unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT)
@ -320,7 +327,7 @@ def compile_statistics(
entity_id, entity_id,
unit, unit,
old_metadata["unit_of_measurement"], old_metadata["unit_of_measurement"],
unit, old_metadata["unit_of_measurement"],
) )
continue continue

View file

@ -1028,6 +1028,7 @@ def test_compile_hourly_statistics_changing_units_2(
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(minutes=30)) recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(minutes=30))
wait_recording_done(hass) wait_recording_done(hass)
assert "The unit of sensor.test1 is changing" in caplog.text assert "The unit of sensor.test1 is changing" in caplog.text
assert "and matches the unit of already compiled statistics" not in caplog.text
statistic_ids = list_statistic_ids(hass) statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [ assert statistic_ids == [
{"statistic_id": "sensor.test1", "unit_of_measurement": "cats"} {"statistic_id": "sensor.test1", "unit_of_measurement": "cats"}
@ -1038,6 +1039,90 @@ def test_compile_hourly_statistics_changing_units_2(
assert "Error while processing event StatisticsTask" not in caplog.text assert "Error while processing event StatisticsTask" not in caplog.text
@pytest.mark.parametrize(
"device_class,unit,native_unit,mean,min,max",
[
(None, None, None, 16.440677, 10, 30),
(None, "%", "%", 16.440677, 10, 30),
("battery", "%", "%", 16.440677, 10, 30),
("battery", None, None, 16.440677, 10, 30),
],
)
def test_compile_hourly_statistics_changing_units_3(
hass_recorder, caplog, device_class, unit, native_unit, mean, min, max
):
"""Test compiling hourly statistics where units change from one hour to the next."""
zero = dt_util.utcnow()
hass = hass_recorder()
recorder = hass.data[DATA_INSTANCE]
setup_component(hass, "sensor", {})
attributes = {
"device_class": device_class,
"state_class": "measurement",
"unit_of_measurement": unit,
}
four, states = record_states(hass, zero, "sensor.test1", attributes)
four, _states = record_states(
hass, zero + timedelta(hours=1), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
attributes["unit_of_measurement"] = "cats"
four, _states = record_states(
hass, zero + timedelta(hours=2), "sensor.test1", attributes
)
states["sensor.test1"] += _states["sensor.test1"]
hist = history.get_significant_states(hass, zero, four)
assert dict(states) == dict(hist)
recorder.do_adhoc_statistics(period="hourly", start=zero)
wait_recording_done(hass)
assert "does not match the unit of already compiled" not in caplog.text
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{"statistic_id": "sensor.test1", "unit_of_measurement": native_unit}
]
stats = statistics_during_period(hass, zero)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2))
wait_recording_done(hass)
assert "The unit of sensor.test1 is changing" in caplog.text
assert f"matches the unit of already compiled statistics ({unit})" in caplog.text
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{"statistic_id": "sensor.test1", "unit_of_measurement": native_unit}
]
stats = statistics_during_period(hass, zero)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"mean": approx(mean),
"min": approx(min),
"max": approx(max),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
assert "Error while processing event StatisticsTask" not in caplog.text
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,unit,native_unit,mean,min,max", "device_class,unit,native_unit,mean,min,max",
[ [