Weight sensor average statistics by state durations (#51150)
* Weight sensor average statistics by state durations * Fix test
This commit is contained in:
parent
17b2678aee
commit
b339d73109
3 changed files with 53 additions and 18 deletions
|
@ -3,7 +3,6 @@ from __future__ import annotations
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
from statistics import fmean
|
|
||||||
|
|
||||||
from homeassistant.components.recorder import history, statistics
|
from homeassistant.components.recorder import history, statistics
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
|
@ -16,7 +15,7 @@ from homeassistant.components.sensor import (
|
||||||
STATE_CLASS_MEASUREMENT,
|
STATE_CLASS_MEASUREMENT,
|
||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_DEVICE_CLASS
|
from homeassistant.const import ATTR_DEVICE_CLASS
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant, State
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
from . import DOMAIN
|
from . import DOMAIN
|
||||||
|
@ -53,6 +52,44 @@ def _is_number(s: str) -> bool: # pylint: disable=invalid-name
|
||||||
return s.replace(".", "", 1).isdigit()
|
return s.replace(".", "", 1).isdigit()
|
||||||
|
|
||||||
|
|
||||||
|
def _time_weighted_average(
|
||||||
|
fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime
|
||||||
|
) -> float:
|
||||||
|
"""Calculate a time weighted average.
|
||||||
|
|
||||||
|
The average is calculated by, weighting the states by duration in seconds between
|
||||||
|
state changes.
|
||||||
|
Note: there's no interpolation of values between state changes.
|
||||||
|
"""
|
||||||
|
old_fstate: float | None = None
|
||||||
|
old_start_time: datetime.datetime | None = None
|
||||||
|
accumulated = 0.0
|
||||||
|
|
||||||
|
for fstate, state in fstates:
|
||||||
|
# The recorder will give us the last known state, which may be well
|
||||||
|
# before the requested start time for the statistics
|
||||||
|
start_time = start if state.last_updated < start else state.last_updated
|
||||||
|
if old_start_time is None:
|
||||||
|
# Adjust start time, if there was no last known state
|
||||||
|
start = start_time
|
||||||
|
else:
|
||||||
|
duration = start_time - old_start_time
|
||||||
|
# Accumulate the value, weighted by duration until next state change
|
||||||
|
assert old_fstate is not None
|
||||||
|
accumulated += old_fstate * duration.total_seconds()
|
||||||
|
|
||||||
|
old_fstate = fstate
|
||||||
|
old_start_time = start_time
|
||||||
|
|
||||||
|
if old_fstate is not None:
|
||||||
|
# Accumulate the value, weighted by duration until end of the period
|
||||||
|
assert old_start_time is not None
|
||||||
|
duration = end - old_start_time
|
||||||
|
accumulated += old_fstate * duration.total_seconds()
|
||||||
|
|
||||||
|
return accumulated / (end - start).total_seconds()
|
||||||
|
|
||||||
|
|
||||||
def compile_statistics(
|
def compile_statistics(
|
||||||
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
|
hass: HomeAssistant, start: datetime.datetime, end: datetime.datetime
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
@ -91,10 +128,8 @@ def compile_statistics(
|
||||||
if "min" in wanted_statistics:
|
if "min" in wanted_statistics:
|
||||||
result[entity_id]["min"] = min(*itertools.islice(zip(*fstates), 1))
|
result[entity_id]["min"] = min(*itertools.islice(zip(*fstates), 1))
|
||||||
|
|
||||||
# Note: The average calculation will be incorrect for unevenly spaced readings,
|
|
||||||
# this needs to be improved by weighting with time between measurements
|
|
||||||
if "mean" in wanted_statistics:
|
if "mean" in wanted_statistics:
|
||||||
result[entity_id]["mean"] = fmean(*itertools.islice(zip(*fstates), 1))
|
result[entity_id]["mean"] = _time_weighted_average(fstates, start, end)
|
||||||
|
|
||||||
if "sum" in wanted_statistics:
|
if "sum" in wanted_statistics:
|
||||||
last_reset = old_last_reset = None
|
last_reset = old_last_reset = None
|
||||||
|
|
|
@ -30,7 +30,7 @@ def test_compile_hourly_statistics(hass_recorder):
|
||||||
{
|
{
|
||||||
"statistic_id": "sensor.test1",
|
"statistic_id": "sensor.test1",
|
||||||
"start": process_timestamp_to_utc_isoformat(zero),
|
"start": process_timestamp_to_utc_isoformat(zero),
|
||||||
"mean": 15.0,
|
"mean": 14.915254237288135,
|
||||||
"min": 10.0,
|
"min": 10.0,
|
||||||
"max": 20.0,
|
"max": 20.0,
|
||||||
"last_reset": None,
|
"last_reset": None,
|
||||||
|
|
|
@ -31,9 +31,9 @@ def test_compile_hourly_statistics(hass_recorder):
|
||||||
{
|
{
|
||||||
"statistic_id": "sensor.test1",
|
"statistic_id": "sensor.test1",
|
||||||
"start": process_timestamp_to_utc_isoformat(zero),
|
"start": process_timestamp_to_utc_isoformat(zero),
|
||||||
"mean": 15.0,
|
"mean": 16.440677966101696,
|
||||||
"min": 10.0,
|
"min": 10.0,
|
||||||
"max": 20.0,
|
"max": 30.0,
|
||||||
"last_reset": None,
|
"last_reset": None,
|
||||||
"state": None,
|
"state": None,
|
||||||
"sum": None,
|
"sum": None,
|
||||||
|
@ -243,9 +243,9 @@ def test_compile_hourly_statistics_unchanged(hass_recorder):
|
||||||
{
|
{
|
||||||
"statistic_id": "sensor.test1",
|
"statistic_id": "sensor.test1",
|
||||||
"start": process_timestamp_to_utc_isoformat(four),
|
"start": process_timestamp_to_utc_isoformat(four),
|
||||||
"mean": 20.0,
|
"mean": 30.0,
|
||||||
"min": 20.0,
|
"min": 30.0,
|
||||||
"max": 20.0,
|
"max": 30.0,
|
||||||
"last_reset": None,
|
"last_reset": None,
|
||||||
"state": None,
|
"state": None,
|
||||||
"sum": None,
|
"sum": None,
|
||||||
|
@ -271,7 +271,7 @@ def test_compile_hourly_statistics_partially_unavailable(hass_recorder):
|
||||||
{
|
{
|
||||||
"statistic_id": "sensor.test1",
|
"statistic_id": "sensor.test1",
|
||||||
"start": process_timestamp_to_utc_isoformat(zero),
|
"start": process_timestamp_to_utc_isoformat(zero),
|
||||||
"mean": 17.5,
|
"mean": 21.1864406779661,
|
||||||
"min": 10.0,
|
"min": 10.0,
|
||||||
"max": 25.0,
|
"max": 25.0,
|
||||||
"last_reset": None,
|
"last_reset": None,
|
||||||
|
@ -318,9 +318,9 @@ def record_states(hass):
|
||||||
|
|
||||||
zero = dt_util.utcnow()
|
zero = dt_util.utcnow()
|
||||||
one = zero + timedelta(minutes=1)
|
one = zero + timedelta(minutes=1)
|
||||||
two = one + timedelta(minutes=15)
|
two = one + timedelta(minutes=10)
|
||||||
three = two + timedelta(minutes=30)
|
three = two + timedelta(minutes=40)
|
||||||
four = three + timedelta(minutes=15)
|
four = three + timedelta(minutes=10)
|
||||||
|
|
||||||
states = {mp: [], sns1: [], sns2: [], sns3: []}
|
states = {mp: [], sns1: [], sns2: [], sns3: []}
|
||||||
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
|
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=one):
|
||||||
|
@ -340,9 +340,9 @@ def record_states(hass):
|
||||||
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "15", attributes=sns3_attr))
|
||||||
|
|
||||||
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
|
with patch("homeassistant.components.recorder.dt_util.utcnow", return_value=three):
|
||||||
states[sns1].append(set_state(sns1, "20", attributes=sns1_attr))
|
states[sns1].append(set_state(sns1, "30", attributes=sns1_attr))
|
||||||
states[sns2].append(set_state(sns2, "20", attributes=sns2_attr))
|
states[sns2].append(set_state(sns2, "30", attributes=sns2_attr))
|
||||||
states[sns3].append(set_state(sns3, "20", attributes=sns3_attr))
|
states[sns3].append(set_state(sns3, "30", attributes=sns3_attr))
|
||||||
|
|
||||||
return zero, four, states
|
return zero, four, states
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue