Allow small dip in total_increasing sensor without detecting a reset (#55153)

This commit is contained in:
Erik Montnemery 2021-08-24 17:23:55 +02:00 committed by GitHub
parent 8877f37da0
commit fa9f91325c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 91 additions and 3 deletions

View file

@ -11,6 +11,7 @@ from homeassistant.components.sensor import (
STATE_CLASS_TOTAL_INCREASING,
SensorEntity,
)
from homeassistant.components.sensor.recorder import reset_detected
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
ENERGY_KILO_WATT_HOUR,
@ -297,7 +298,7 @@ class EnergyCostSensor(SensorEntity):
)
return
if energy < float(self._last_energy_sensor_state):
if reset_detected(energy, float(self._last_energy_sensor_state)):
# Energy meter was reset, reset cost sensor too
self._reset(0)
# Update with newly incurred cost

View file

@ -226,6 +226,11 @@ def _normalize_states(
return DEVICE_CLASS_UNITS[key], fstates
def reset_detected(state: float, previous_state: float | None) -> bool:
"""Test if a total_increasing sensor has been reset."""
return previous_state is not None and state < 0.9 * previous_state
def compile_statistics(
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
) -> dict:
@ -308,7 +313,7 @@ def compile_statistics(
fstate,
)
elif state_class == STATE_CLASS_TOTAL_INCREASING and (
old_state is None or (new_state is not None and fstate < new_state)
old_state is None or reset_detected(fstate, new_state)
):
reset = True
_LOGGER.info(

View file

@ -216,6 +216,16 @@ async def test_cost_sensor_price_entity(
assert cost_sensor_entity_id in statistics
assert statistics[cost_sensor_entity_id]["stat"]["sum"] == 19.0
# Energy sensor has a small dip, no reset should be detected
hass.states.async_set(
usage_sensor_entity_id,
"14",
{ATTR_UNIT_OF_MEASUREMENT: ENERGY_KILO_WATT_HOUR},
)
await hass.async_block_till_done()
state = hass.states.get(cost_sensor_entity_id)
assert state.state == "18.0" # 19 EUR + (14-14.5) kWh * 2 EUR/kWh = 18 EUR
# Energy sensor is reset, with initial state at 4kWh, 0 kWh is used as zero-point
hass.states.async_set(
usage_sensor_entity_id,
@ -240,7 +250,7 @@ async def test_cost_sensor_price_entity(
await async_wait_recording_done_without_instance(hass)
statistics = await hass.loop.run_in_executor(None, _compile_statistics, hass)
assert cost_sensor_entity_id in statistics
assert statistics[cost_sensor_entity_id]["stat"]["sum"] == 39.0
assert statistics[cost_sensor_entity_id]["stat"]["sum"] == 38.0
async def test_cost_sensor_handle_wh(hass, hass_storage) -> None:

View file

@ -318,6 +318,78 @@ def test_compile_hourly_sum_statistics_total_increasing(
assert "Error while processing event StatisticsTask" not in caplog.text
@pytest.mark.parametrize(
"device_class,unit,native_unit,factor",
[("energy", "kWh", "kWh", 1)],
)
def test_compile_hourly_sum_statistics_total_increasing_small_dip(
hass_recorder, caplog, device_class, unit, native_unit, factor
):
"""Test small dips in sensor readings do not trigger a reset."""
zero = dt_util.utcnow()
hass = hass_recorder()
recorder = hass.data[DATA_INSTANCE]
setup_component(hass, "sensor", {})
attributes = {
"device_class": device_class,
"state_class": "total_increasing",
"unit_of_measurement": unit,
}
seq = [10, 15, 20, 19, 30, 40, 50, 60, 70]
four, eight, states = record_meter_states(
hass, zero, "sensor.test1", attributes, seq
)
hist = history.get_significant_states(
hass, zero - timedelta.resolution, eight + timedelta.resolution
)
assert dict(states)["sensor.test1"] == dict(hist)["sensor.test1"]
recorder.do_adhoc_statistics(period="hourly", start=zero)
wait_recording_done(hass)
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=1))
wait_recording_done(hass)
recorder.do_adhoc_statistics(period="hourly", start=zero + timedelta(hours=2))
wait_recording_done(hass)
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{"statistic_id": "sensor.test1", "unit_of_measurement": native_unit}
]
stats = statistics_during_period(hass, zero)
assert stats == {
"sensor.test1": [
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero),
"max": None,
"mean": None,
"min": None,
"state": approx(factor * seq[2]),
"sum": approx(factor * 10.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=1)),
"max": None,
"mean": None,
"min": None,
"state": approx(factor * seq[5]),
"sum": approx(factor * 30.0),
},
{
"statistic_id": "sensor.test1",
"start": process_timestamp_to_utc_isoformat(zero + timedelta(hours=2)),
"max": None,
"mean": None,
"min": None,
"state": approx(factor * seq[8]),
"sum": approx(factor * 60.0),
},
]
}
assert "Error while processing event StatisticsTask" not in caplog.text
def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
"""Test compiling hourly statistics."""
zero = dt_util.utcnow()