Add statistics meta data table (#52331)

* Add statistics meta data table

* Tweak meta data generation
This commit is contained in:
Erik Montnemery 2021-06-30 13:32:17 +02:00 committed by GitHub
parent d8337cf98f
commit 0ab999738b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 126 additions and 25 deletions

View file

@ -30,6 +30,7 @@ from homeassistant.const import (
PRESSURE_MBAR,
PRESSURE_PA,
PRESSURE_PSI,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant, State
import homeassistant.util.dt as dt_util
@ -49,6 +50,13 @@ DEVICE_CLASS_STATISTICS = {
DEVICE_CLASS_TEMPERATURE: {"mean", "min", "max"},
}
DEVICE_CLASS_UNITS = {
DEVICE_CLASS_ENERGY: ENERGY_KILO_WATT_HOUR,
DEVICE_CLASS_POWER: POWER_WATT,
DEVICE_CLASS_PRESSURE: PRESSURE_PA,
DEVICE_CLASS_TEMPERATURE: TEMP_CELSIUS,
}
UNIT_CONVERSIONS = {
DEVICE_CLASS_ENERGY: {
ENERGY_KILO_WATT_HOUR: lambda x: x,
@ -134,12 +142,16 @@ def _time_weighted_average(
def _normalize_states(
entity_history: list[State], device_class: str, entity_id: str
) -> list[tuple[float, State]]:
) -> tuple[str | None, list[tuple[float, State]]]:
"""Normalize units."""
if device_class not in UNIT_CONVERSIONS:
# We're not normalizing this device class, return the state as they are
return [(float(el.state), el) for el in entity_history if _is_number(el.state)]
fstates = [
(float(el.state), el) for el in entity_history if _is_number(el.state)
]
unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT)
return unit, fstates
fstates = []
@ -159,7 +171,7 @@ def _normalize_states(
fstates.append((UNIT_CONVERSIONS[device_class][unit](fstate), state)) # type: ignore
return fstates
return DEVICE_CLASS_UNITS[device_class], fstates
def compile_statistics(
@ -185,21 +197,25 @@ def compile_statistics(
continue
entity_history = history_list[entity_id]
fstates = _normalize_states(entity_history, device_class, entity_id)
unit, fstates = _normalize_states(entity_history, device_class, entity_id)
if not fstates:
continue
result[entity_id] = {}
# Set meta data
result[entity_id]["meta"] = {"unit_of_measurement": unit}
# Make calculations
stat: dict = {}
if "max" in wanted_statistics:
result[entity_id]["max"] = max(*itertools.islice(zip(*fstates), 1))
stat["max"] = max(*itertools.islice(zip(*fstates), 1))
if "min" in wanted_statistics:
result[entity_id]["min"] = min(*itertools.islice(zip(*fstates), 1))
stat["min"] = min(*itertools.islice(zip(*fstates), 1))
if "mean" in wanted_statistics:
result[entity_id]["mean"] = _time_weighted_average(fstates, start, end)
stat["mean"] = _time_weighted_average(fstates, start, end)
if "sum" in wanted_statistics:
last_reset = old_last_reset = None
@ -233,8 +249,10 @@ def compile_statistics(
# Update the sum with the last state
_sum += new_state - old_state
result[entity_id]["last_reset"] = dt_util.parse_datetime(last_reset)
result[entity_id]["sum"] = _sum
result[entity_id]["state"] = new_state
stat["last_reset"] = dt_util.parse_datetime(last_reset)
stat["sum"] = _sum
stat["state"] = new_state
result[entity_id]["stat"] = stat
return result