Allow selecting display unit when fetching statistics (#78578)

This commit is contained in:
Erik Montnemery 2022-09-20 23:43:57 +02:00 committed by GitHub
parent 3f512e38db
commit dae00c70de
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 1231 additions and 241 deletions

View file

@ -480,10 +480,16 @@ class Recorder(threading.Thread):
@callback @callback
def async_adjust_statistics( def async_adjust_statistics(
self, statistic_id: str, start_time: datetime, sum_adjustment: float self,
statistic_id: str,
start_time: datetime,
sum_adjustment: float,
display_unit: str,
) -> None: ) -> None:
"""Adjust statistics.""" """Adjust statistics."""
self.queue_task(AdjustStatisticsTask(statistic_id, start_time, sum_adjustment)) self.queue_task(
AdjustStatisticsTask(statistic_id, start_time, sum_adjustment, display_unit)
)
@callback @callback
def async_clear_statistics(self, statistic_ids: list[str]) -> None: def async_clear_statistics(self, statistic_ids: list[str]) -> None:

View file

@ -6,6 +6,7 @@ from collections.abc import Callable, Iterable
import contextlib import contextlib
import dataclasses import dataclasses
from datetime import datetime, timedelta from datetime import datetime, timedelta
from functools import partial
from itertools import chain, groupby from itertools import chain, groupby
import json import json
import logging import logging
@ -25,11 +26,12 @@ import voluptuous as vol
from homeassistant.const import ( from homeassistant.const import (
ENERGY_KILO_WATT_HOUR, ENERGY_KILO_WATT_HOUR,
ENERGY_MEGA_WATT_HOUR,
ENERGY_WATT_HOUR,
POWER_KILO_WATT, POWER_KILO_WATT,
POWER_WATT, POWER_WATT,
PRESSURE_PA, PRESSURE_PA,
TEMP_CELSIUS, TEMP_CELSIUS,
VOLUME_CUBIC_FEET,
VOLUME_CUBIC_METERS, VOLUME_CUBIC_METERS,
) )
from homeassistant.core import Event, HomeAssistant, callback, valid_entity_id from homeassistant.core import Event, HomeAssistant, callback, valid_entity_id
@ -41,7 +43,6 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
import homeassistant.util.pressure as pressure_util import homeassistant.util.pressure as pressure_util
import homeassistant.util.temperature as temperature_util import homeassistant.util.temperature as temperature_util
from homeassistant.util.unit_system import UnitSystem
import homeassistant.util.volume as volume_util import homeassistant.util.volume as volume_util
from .const import DOMAIN, MAX_ROWS_TO_PURGE, SupportedDialect from .const import DOMAIN, MAX_ROWS_TO_PURGE, SupportedDialect
@ -131,65 +132,138 @@ QUERY_STATISTIC_META_ID = [
] ]
def _convert_power(value: float | None, state_unit: str, _: UnitSystem) -> float | None: def _convert_energy_from_kwh(to_unit: str, value: float | None) -> float | None:
"""Convert power in W to to_unit.""" """Convert energy in kWh to to_unit."""
if value is None: if value is None:
return None return None
if state_unit == POWER_KILO_WATT: if to_unit == ENERGY_MEGA_WATT_HOUR:
return value / 1000
if to_unit == ENERGY_WATT_HOUR:
return value * 1000
return value
def _convert_energy_to_kwh(from_unit: str, value: float) -> float:
"""Convert energy in from_unit to kWh."""
if from_unit == ENERGY_MEGA_WATT_HOUR:
return value * 1000
if from_unit == ENERGY_WATT_HOUR:
return value / 1000 return value / 1000
return value return value
def _convert_pressure( def _convert_power_from_w(to_unit: str, value: float | None) -> float | None:
value: float | None, state_unit: str, _: UnitSystem """Convert power in W to to_unit."""
) -> float | None: if value is None:
return None
if to_unit == POWER_KILO_WATT:
return value / 1000
return value
def _convert_pressure_from_pa(to_unit: str, value: float | None) -> float | None:
"""Convert pressure in Pa to to_unit.""" """Convert pressure in Pa to to_unit."""
if value is None: if value is None:
return None return None
return pressure_util.convert(value, PRESSURE_PA, state_unit) return pressure_util.convert(value, PRESSURE_PA, to_unit)
def _convert_temperature( def _convert_temperature_from_c(to_unit: str, value: float | None) -> float | None:
value: float | None, state_unit: str, _: UnitSystem
) -> float | None:
"""Convert temperature in °C to to_unit.""" """Convert temperature in °C to to_unit."""
if value is None: if value is None:
return None return None
return temperature_util.convert(value, TEMP_CELSIUS, state_unit) return temperature_util.convert(value, TEMP_CELSIUS, to_unit)
def _convert_volume(value: float | None, _: str, units: UnitSystem) -> float | None: def _convert_volume_from_m3(to_unit: str, value: float | None) -> float | None:
"""Convert volume in m³ to ft³ or m³.""" """Convert volume in m³ to to_unit."""
if value is None: if value is None:
return None return None
return volume_util.convert(value, VOLUME_CUBIC_METERS, _volume_unit(units)) return volume_util.convert(value, VOLUME_CUBIC_METERS, to_unit)
# Convert power, pressure, temperature and volume statistics from the normalized unit def _convert_volume_to_m3(from_unit: str, value: float) -> float:
# used for statistics to the unit configured by the user """Convert volume in from_unit to m³."""
STATISTIC_UNIT_TO_DISPLAY_UNIT_CONVERSIONS: dict[ return volume_util.convert(value, from_unit, VOLUME_CUBIC_METERS)
str, Callable[[float | None, str, UnitSystem], float | None]
] = {
POWER_WATT: _convert_power, STATISTIC_UNIT_TO_UNIT_CLASS: dict[str | None, str] = {
PRESSURE_PA: _convert_pressure, ENERGY_KILO_WATT_HOUR: "energy",
TEMP_CELSIUS: _convert_temperature, POWER_WATT: "power",
VOLUME_CUBIC_METERS: _convert_volume, PRESSURE_PA: "pressure",
TEMP_CELSIUS: "temperature",
VOLUME_CUBIC_METERS: "volume",
} }
# Convert volume statistics from the display unit configured by the user
# to the normalized unit used for statistics # Convert energy power, pressure, temperature and volume statistics from the
# This is used to support adjusting statistics in the display unit # normalized unit used for statistics to the unit configured by the user
DISPLAY_UNIT_TO_STATISTIC_UNIT_CONVERSIONS: dict[ STATISTIC_UNIT_TO_DISPLAY_UNIT_FUNCTIONS: dict[
str, Callable[[float, UnitSystem], float] str, Callable[[str, float | None], float | None]
] = { ] = {
VOLUME_CUBIC_FEET: lambda x, units: volume_util.convert( ENERGY_KILO_WATT_HOUR: _convert_energy_from_kwh,
x, _volume_unit(units), VOLUME_CUBIC_METERS POWER_WATT: _convert_power_from_w,
), PRESSURE_PA: _convert_pressure_from_pa,
TEMP_CELSIUS: _convert_temperature_from_c,
VOLUME_CUBIC_METERS: _convert_volume_from_m3,
}
# Convert energy and volume statistics from the display unit configured by the user
# to the normalized unit used for statistics.
# This is used to support adjusting statistics in the display unit
DISPLAY_UNIT_TO_STATISTIC_UNIT_FUNCTIONS: dict[str, Callable[[str, float], float]] = {
ENERGY_KILO_WATT_HOUR: _convert_energy_to_kwh,
VOLUME_CUBIC_METERS: _convert_volume_to_m3,
} }
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
def _get_statistic_to_display_unit_converter(
statistic_unit: str | None,
state_unit: str | None,
requested_units: dict[str, str] | None,
) -> Callable[[float | None], float | None]:
"""Prepare a converter from the normalized statistics unit to display unit."""
def no_conversion(val: float | None) -> float | None:
"""Return val."""
return val
if statistic_unit is None:
return no_conversion
if (
convert_fn := STATISTIC_UNIT_TO_DISPLAY_UNIT_FUNCTIONS.get(statistic_unit)
) is None:
return no_conversion
unit_class = STATISTIC_UNIT_TO_UNIT_CLASS[statistic_unit]
display_unit = requested_units.get(unit_class) if requested_units else state_unit
return partial(convert_fn, display_unit)
def _get_display_to_statistic_unit_converter(
display_unit: str | None,
statistic_unit: str | None,
) -> Callable[[float], float]:
"""Prepare a converter from the display unit to the normalized statistics unit."""
def no_conversion(val: float) -> float:
"""Return val."""
return val
if statistic_unit is None:
return no_conversion
if (
convert_fn := DISPLAY_UNIT_TO_STATISTIC_UNIT_FUNCTIONS.get(statistic_unit)
) is None:
return no_conversion
return partial(convert_fn, display_unit)
@dataclasses.dataclass @dataclasses.dataclass
class PlatformCompiledStatistics: class PlatformCompiledStatistics:
"""Compiled Statistics from a platform.""" """Compiled Statistics from a platform."""
@ -802,28 +876,6 @@ def get_metadata(
) )
def _volume_unit(units: UnitSystem) -> str:
"""Return the preferred volume unit according to unit system."""
if units.is_metric:
return VOLUME_CUBIC_METERS
return VOLUME_CUBIC_FEET
def _configured_unit(
unit: str | None, state_unit: str | None, units: UnitSystem
) -> str | None:
"""Return the pressure and temperature units configured by the user.
Energy and volume is normalized for the energy dashboard.
For other units, display in the unit of the source.
"""
if unit == ENERGY_KILO_WATT_HOUR:
return ENERGY_KILO_WATT_HOUR
if unit == VOLUME_CUBIC_METERS:
return _volume_unit(units)
return state_unit
def clear_statistics(instance: Recorder, statistic_ids: list[str]) -> None: def clear_statistics(instance: Recorder, statistic_ids: list[str]) -> None:
"""Clear statistics for a list of statistic_ids.""" """Clear statistics for a list of statistic_ids."""
with session_scope(session=instance.get_session()) as session: with session_scope(session=instance.get_session()) as session:
@ -868,11 +920,6 @@ def list_statistic_ids(
""" """
result = {} result = {}
def _display_unit(
hass: HomeAssistant, statistic_unit: str | None, state_unit: str | None
) -> str | None:
return _configured_unit(statistic_unit, state_unit, hass.config.units)
# Query the database # Query the database
with session_scope(hass=hass) as session: with session_scope(hass=hass) as session:
metadata = get_metadata_with_session( metadata = get_metadata_with_session(
@ -881,12 +928,13 @@ def list_statistic_ids(
result = { result = {
meta["statistic_id"]: { meta["statistic_id"]: {
"display_unit_of_measurement": meta["state_unit_of_measurement"],
"has_mean": meta["has_mean"], "has_mean": meta["has_mean"],
"has_sum": meta["has_sum"], "has_sum": meta["has_sum"],
"name": meta["name"], "name": meta["name"],
"source": meta["source"], "source": meta["source"],
"display_unit_of_measurement": _display_unit( "unit_class": STATISTIC_UNIT_TO_UNIT_CLASS.get(
hass, meta["unit_of_measurement"], meta["state_unit_of_measurement"] meta["unit_of_measurement"]
), ),
"unit_of_measurement": meta["unit_of_measurement"], "unit_of_measurement": meta["unit_of_measurement"],
} }
@ -909,8 +957,9 @@ def list_statistic_ids(
"has_sum": meta["has_sum"], "has_sum": meta["has_sum"],
"name": meta["name"], "name": meta["name"],
"source": meta["source"], "source": meta["source"],
"display_unit_of_measurement": _display_unit( "display_unit_of_measurement": meta["state_unit_of_measurement"],
hass, meta["unit_of_measurement"], meta["state_unit_of_measurement"] "unit_class": STATISTIC_UNIT_TO_UNIT_CLASS.get(
meta["unit_of_measurement"]
), ),
"unit_of_measurement": meta["unit_of_measurement"], "unit_of_measurement": meta["unit_of_measurement"],
} }
@ -925,6 +974,7 @@ def list_statistic_ids(
"source": info["source"], "source": info["source"],
"display_unit_of_measurement": info["display_unit_of_measurement"], "display_unit_of_measurement": info["display_unit_of_measurement"],
"statistics_unit_of_measurement": info["unit_of_measurement"], "statistics_unit_of_measurement": info["unit_of_measurement"],
"unit_class": info["unit_class"],
} }
for _id, info in result.items() for _id, info in result.items()
] ]
@ -1079,6 +1129,7 @@ def statistics_during_period(
statistic_ids: list[str] | None = None, statistic_ids: list[str] | None = None,
period: Literal["5minute", "day", "hour", "month"] = "hour", period: Literal["5minute", "day", "hour", "month"] = "hour",
start_time_as_datetime: bool = False, start_time_as_datetime: bool = False,
units: dict[str, str] | None = None,
) -> dict[str, list[dict[str, Any]]]: ) -> dict[str, list[dict[str, Any]]]:
"""Return statistics during UTC period start_time - end_time for the statistic_ids. """Return statistics during UTC period start_time - end_time for the statistic_ids.
@ -1120,10 +1171,20 @@ def statistics_during_period(
table, table,
start_time, start_time,
start_time_as_datetime, start_time_as_datetime,
units,
) )
result = _sorted_statistics_to_dict( result = _sorted_statistics_to_dict(
hass, session, stats, statistic_ids, metadata, True, table, start_time, True hass,
session,
stats,
statistic_ids,
metadata,
True,
table,
start_time,
True,
units,
) )
if period == "day": if period == "day":
@ -1192,6 +1253,8 @@ def _get_last_statistics(
convert_units, convert_units,
table, table,
None, None,
False,
None,
) )
@ -1276,6 +1339,8 @@ def get_latest_short_term_statistics(
False, False,
StatisticsShortTerm, StatisticsShortTerm,
None, None,
False,
None,
) )
@ -1320,18 +1385,18 @@ def _sorted_statistics_to_dict(
convert_units: bool, convert_units: bool,
table: type[Statistics | StatisticsShortTerm], table: type[Statistics | StatisticsShortTerm],
start_time: datetime | None, start_time: datetime | None,
start_time_as_datetime: bool = False, start_time_as_datetime: bool,
units: dict[str, str] | None,
) -> dict[str, list[dict]]: ) -> dict[str, list[dict]]:
"""Convert SQL results into JSON friendly data structure.""" """Convert SQL results into JSON friendly data structure."""
result: dict = defaultdict(list) result: dict = defaultdict(list)
units = hass.config.units
metadata = dict(_metadata.values()) metadata = dict(_metadata.values())
need_stat_at_start_time: set[int] = set() need_stat_at_start_time: set[int] = set()
stats_at_start_time = {} stats_at_start_time = {}
def no_conversion(val: Any, _unit: str | None, _units: Any) -> float | None: def no_conversion(val: float | None) -> float | None:
"""Return x.""" """Return val."""
return val # type: ignore[no-any-return] return val
# Set all statistic IDs to empty lists in result set to maintain the order # Set all statistic IDs to empty lists in result set to maintain the order
if statistic_ids is not None: if statistic_ids is not None:
@ -1357,11 +1422,8 @@ def _sorted_statistics_to_dict(
unit = metadata[meta_id]["unit_of_measurement"] unit = metadata[meta_id]["unit_of_measurement"]
state_unit = metadata[meta_id]["state_unit_of_measurement"] state_unit = metadata[meta_id]["state_unit_of_measurement"]
statistic_id = metadata[meta_id]["statistic_id"] statistic_id = metadata[meta_id]["statistic_id"]
convert: Callable[[Any, Any, Any], float | None]
if unit is not None and convert_units: if unit is not None and convert_units:
convert = STATISTIC_UNIT_TO_DISPLAY_UNIT_CONVERSIONS.get( convert = _get_statistic_to_display_unit_converter(unit, state_unit, units)
unit, no_conversion
)
else: else:
convert = no_conversion convert = no_conversion
ent_results = result[meta_id] ent_results = result[meta_id]
@ -1373,14 +1435,14 @@ def _sorted_statistics_to_dict(
"statistic_id": statistic_id, "statistic_id": statistic_id,
"start": start if start_time_as_datetime else start.isoformat(), "start": start if start_time_as_datetime else start.isoformat(),
"end": end.isoformat(), "end": end.isoformat(),
"mean": convert(db_state.mean, state_unit, units), "mean": convert(db_state.mean),
"min": convert(db_state.min, state_unit, units), "min": convert(db_state.min),
"max": convert(db_state.max, state_unit, units), "max": convert(db_state.max),
"last_reset": process_timestamp_to_utc_isoformat( "last_reset": process_timestamp_to_utc_isoformat(
db_state.last_reset db_state.last_reset
), ),
"state": convert(db_state.state, state_unit, units), "state": convert(db_state.state),
"sum": convert(db_state.sum, state_unit, units), "sum": convert(db_state.sum),
} }
) )
@ -1556,6 +1618,7 @@ def adjust_statistics(
statistic_id: str, statistic_id: str,
start_time: datetime, start_time: datetime,
sum_adjustment: float, sum_adjustment: float,
display_unit: str,
) -> bool: ) -> bool:
"""Process an add_statistics job.""" """Process an add_statistics job."""
@ -1566,11 +1629,9 @@ def adjust_statistics(
if statistic_id not in metadata: if statistic_id not in metadata:
return True return True
units = instance.hass.config.units
statistic_unit = metadata[statistic_id][1]["unit_of_measurement"] statistic_unit = metadata[statistic_id][1]["unit_of_measurement"]
display_unit = _configured_unit(statistic_unit, None, units) convert = _get_display_to_statistic_unit_converter(display_unit, statistic_unit)
convert = DISPLAY_UNIT_TO_STATISTIC_UNIT_CONVERSIONS.get(display_unit, lambda x, units: x) # type: ignore[arg-type] sum_adjustment = convert(sum_adjustment)
sum_adjustment = convert(sum_adjustment, units)
_adjust_sum_statistics( _adjust_sum_statistics(
session, session,

View file

@ -145,6 +145,7 @@ class AdjustStatisticsTask(RecorderTask):
statistic_id: str statistic_id: str
start_time: datetime start_time: datetime
sum_adjustment: float sum_adjustment: float
display_unit: str
def run(self, instance: Recorder) -> None: def run(self, instance: Recorder) -> None:
"""Run statistics task.""" """Run statistics task."""
@ -153,12 +154,16 @@ class AdjustStatisticsTask(RecorderTask):
self.statistic_id, self.statistic_id,
self.start_time, self.start_time,
self.sum_adjustment, self.sum_adjustment,
self.display_unit,
): ):
return return
# Schedule a new adjust statistics task if this one didn't finish # Schedule a new adjust statistics task if this one didn't finish
instance.queue_task( instance.queue_task(
AdjustStatisticsTask( AdjustStatisticsTask(
self.statistic_id, self.start_time, self.sum_adjustment self.statistic_id,
self.start_time,
self.sum_adjustment,
self.display_unit,
) )
) )

View file

@ -9,10 +9,21 @@ import voluptuous as vol
from homeassistant.components import websocket_api from homeassistant.components import websocket_api
from homeassistant.components.websocket_api import messages from homeassistant.components.websocket_api import messages
from homeassistant.const import (
ENERGY_KILO_WATT_HOUR,
ENERGY_MEGA_WATT_HOUR,
ENERGY_WATT_HOUR,
POWER_KILO_WATT,
POWER_WATT,
VOLUME_CUBIC_FEET,
VOLUME_CUBIC_METERS,
)
from homeassistant.core import HomeAssistant, callback, valid_entity_id from homeassistant.core import HomeAssistant, callback, valid_entity_id
from homeassistant.helpers import config_validation as cv from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.json import JSON_DUMP from homeassistant.helpers.json import JSON_DUMP
from homeassistant.util import dt as dt_util from homeassistant.util import dt as dt_util
import homeassistant.util.pressure as pressure_util
import homeassistant.util.temperature as temperature_util
from .const import MAX_QUEUE_BACKLOG from .const import MAX_QUEUE_BACKLOG
from .statistics import ( from .statistics import (
@ -47,15 +58,18 @@ def _ws_get_statistics_during_period(
hass: HomeAssistant, hass: HomeAssistant,
msg_id: int, msg_id: int,
start_time: dt, start_time: dt,
end_time: dt | None = None, end_time: dt | None,
statistic_ids: list[str] | None = None, statistic_ids: list[str] | None,
period: Literal["5minute", "day", "hour", "month"] = "hour", period: Literal["5minute", "day", "hour", "month"],
units: dict[str, str],
) -> str: ) -> str:
"""Fetch statistics and convert them to json in the executor.""" """Fetch statistics and convert them to json in the executor."""
return JSON_DUMP( return JSON_DUMP(
messages.result_message( messages.result_message(
msg_id, msg_id,
statistics_during_period(hass, start_time, end_time, statistic_ids, period), statistics_during_period(
hass, start_time, end_time, statistic_ids, period, units=units
),
) )
) )
@ -91,6 +105,7 @@ async def ws_handle_get_statistics_during_period(
end_time, end_time,
msg.get("statistic_ids"), msg.get("statistic_ids"),
msg.get("period"), msg.get("period"),
msg.get("units"),
) )
) )
@ -102,6 +117,17 @@ async def ws_handle_get_statistics_during_period(
vol.Optional("end_time"): str, vol.Optional("end_time"): str,
vol.Optional("statistic_ids"): [str], vol.Optional("statistic_ids"): [str],
vol.Required("period"): vol.Any("5minute", "hour", "day", "month"), vol.Required("period"): vol.Any("5minute", "hour", "day", "month"),
vol.Optional("units"): vol.Schema(
{
vol.Optional("energy"): vol.Any(
ENERGY_WATT_HOUR, ENERGY_KILO_WATT_HOUR, ENERGY_MEGA_WATT_HOUR
),
vol.Optional("power"): vol.Any(POWER_WATT, POWER_KILO_WATT),
vol.Optional("pressure"): vol.In(pressure_util.VALID_UNITS),
vol.Optional("temperature"): vol.In(temperature_util.VALID_UNITS),
vol.Optional("volume"): vol.Any(VOLUME_CUBIC_FEET, VOLUME_CUBIC_METERS),
}
),
} }
) )
@websocket_api.async_response @websocket_api.async_response
@ -236,13 +262,18 @@ def ws_update_statistics_metadata(
vol.Required("statistic_id"): str, vol.Required("statistic_id"): str,
vol.Required("start_time"): str, vol.Required("start_time"): str,
vol.Required("adjustment"): vol.Any(float, int), vol.Required("adjustment"): vol.Any(float, int),
vol.Required("display_unit"): vol.Any(str, None),
} }
) )
@callback @websocket_api.async_response
def ws_adjust_sum_statistics( async def ws_adjust_sum_statistics(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None: ) -> None:
"""Adjust sum statistics.""" """Adjust sum statistics.
If the statistics is stored as kWh, it's allowed to make an adjustment in Wh or MWh
If the statistics is stored as , it's allowed to make an adjustment in ft³
"""
start_time_str = msg["start_time"] start_time_str = msg["start_time"]
if start_time := dt_util.parse_datetime(start_time_str): if start_time := dt_util.parse_datetime(start_time_str):
@ -251,8 +282,38 @@ def ws_adjust_sum_statistics(
connection.send_error(msg["id"], "invalid_start_time", "Invalid start time") connection.send_error(msg["id"], "invalid_start_time", "Invalid start time")
return return
instance = get_instance(hass)
metadatas = await instance.async_add_executor_job(
list_statistic_ids, hass, (msg["statistic_id"],)
)
if not metadatas:
connection.send_error(msg["id"], "unknown_statistic_id", "Unknown statistic ID")
return
metadata = metadatas[0]
def valid_units(statistics_unit: str | None, display_unit: str | None) -> bool:
if statistics_unit == display_unit:
return True
if statistics_unit == ENERGY_KILO_WATT_HOUR and display_unit in (
ENERGY_MEGA_WATT_HOUR,
ENERGY_WATT_HOUR,
):
return True
if statistics_unit == VOLUME_CUBIC_METERS and display_unit == VOLUME_CUBIC_FEET:
return True
return False
stat_unit = metadata["statistics_unit_of_measurement"]
if not valid_units(stat_unit, msg["display_unit"]):
connection.send_error(
msg["id"],
"invalid_units",
f"Can't convert {stat_unit} to {msg['display_unit']}",
)
return
get_instance(hass).async_adjust_statistics( get_instance(hass).async_adjust_statistics(
msg["statistic_id"], start_time, msg["adjustment"] msg["statistic_id"], start_time, msg["adjustment"], msg["display_unit"]
) )
connection.send_result(msg["id"]) connection.send_result(msg["id"])
@ -286,7 +347,7 @@ def ws_adjust_sum_statistics(
def ws_import_statistics( def ws_import_statistics(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
) -> None: ) -> None:
"""Adjust sum statistics.""" """Import statistics."""
metadata = msg["metadata"] metadata = msg["metadata"]
stats = msg["stats"] stats = msg["stats"]
metadata["state_unit_of_measurement"] = metadata["unit_of_measurement"] metadata["state_unit_of_measurement"] = metadata["unit_of_measurement"]

View file

@ -70,6 +70,7 @@ async def test_demo_statistics(hass, recorder_mock):
"source": "demo", "source": "demo",
"statistic_id": "demo:temperature_outdoor", "statistic_id": "demo:temperature_outdoor",
"statistics_unit_of_measurement": "°C", "statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
} in statistic_ids } in statistic_ids
assert { assert {
"display_unit_of_measurement": "kWh", "display_unit_of_measurement": "kWh",
@ -79,6 +80,7 @@ async def test_demo_statistics(hass, recorder_mock):
"source": "demo", "source": "demo",
"statistic_id": "demo:energy_consumption_kwh", "statistic_id": "demo:energy_consumption_kwh",
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
} in statistic_ids } in statistic_ids

View file

@ -532,6 +532,7 @@ async def test_import_statistics(
"name": "Total imported energy", "name": "Total imported energy",
"source": source, "source": source,
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
} }
] ]
metadata = get_metadata(hass, statistic_ids=(statistic_id,)) metadata = get_metadata(hass, statistic_ids=(statistic_id,))
@ -603,7 +604,7 @@ async def test_import_statistics(
] ]
} }
# Update the previously inserted statistics + rename and change unit # Update the previously inserted statistics + rename and change display unit
external_statistics = { external_statistics = {
"start": period1, "start": period1,
"max": 1, "max": 1,
@ -620,13 +621,14 @@ async def test_import_statistics(
statistic_ids = list_statistic_ids(hass) statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [ assert statistic_ids == [
{ {
"display_unit_of_measurement": "kWh", "display_unit_of_measurement": "MWh",
"has_mean": False, "has_mean": False,
"has_sum": True, "has_sum": True,
"statistic_id": statistic_id, "statistic_id": statistic_id,
"name": "Total imported energy renamed", "name": "Total imported energy renamed",
"source": source, "source": source,
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
} }
] ]
metadata = get_metadata(hass, statistic_ids=(statistic_id,)) metadata = get_metadata(hass, statistic_ids=(statistic_id,))
@ -651,12 +653,12 @@ async def test_import_statistics(
"statistic_id": statistic_id, "statistic_id": statistic_id,
"start": period1.isoformat(), "start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(), "end": (period1 + timedelta(hours=1)).isoformat(),
"max": approx(1.0), "max": approx(1.0 / 1000),
"mean": approx(2.0), "mean": approx(2.0 / 1000),
"min": approx(3.0), "min": approx(3.0 / 1000),
"last_reset": last_reset_utc_str, "last_reset": last_reset_utc_str,
"state": approx(4.0), "state": approx(4.0 / 1000),
"sum": approx(5.0), "sum": approx(5.0 / 1000),
}, },
{ {
"statistic_id": statistic_id, "statistic_id": statistic_id,
@ -666,8 +668,8 @@ async def test_import_statistics(
"mean": None, "mean": None,
"min": None, "min": None,
"last_reset": last_reset_utc_str, "last_reset": last_reset_utc_str,
"state": approx(1.0), "state": approx(1.0 / 1000),
"sum": approx(3.0), "sum": approx(3.0 / 1000),
}, },
] ]
} }
@ -680,6 +682,7 @@ async def test_import_statistics(
"statistic_id": statistic_id, "statistic_id": statistic_id,
"start_time": period2.isoformat(), "start_time": period2.isoformat(),
"adjustment": 1000.0, "adjustment": 1000.0,
"display_unit": "MWh",
} }
) )
response = await client.receive_json() response = await client.receive_json()
@ -693,12 +696,12 @@ async def test_import_statistics(
"statistic_id": statistic_id, "statistic_id": statistic_id,
"start": period1.isoformat(), "start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(), "end": (period1 + timedelta(hours=1)).isoformat(),
"max": approx(1.0), "max": approx(1.0 / 1000),
"mean": approx(2.0), "mean": approx(2.0 / 1000),
"min": approx(3.0), "min": approx(3.0 / 1000),
"last_reset": last_reset_utc_str, "last_reset": last_reset_utc_str,
"state": approx(4.0), "state": approx(4.0 / 1000),
"sum": approx(5.0), "sum": approx(5.0 / 1000),
}, },
{ {
"statistic_id": statistic_id, "statistic_id": statistic_id,
@ -708,8 +711,8 @@ async def test_import_statistics(
"mean": None, "mean": None,
"min": None, "min": None,
"last_reset": last_reset_utc_str, "last_reset": last_reset_utc_str,
"state": approx(1.0), "state": approx(1.0 / 1000),
"sum": approx(1003.0), "sum": approx(1000 + 3.0 / 1000),
}, },
] ]
} }

View file

@ -50,12 +50,12 @@ TEMPERATURE_SENSOR_F_ATTRIBUTES = {
"state_class": "measurement", "state_class": "measurement",
"unit_of_measurement": "°F", "unit_of_measurement": "°F",
} }
ENERGY_SENSOR_ATTRIBUTES = { ENERGY_SENSOR_KWH_ATTRIBUTES = {
"device_class": "energy", "device_class": "energy",
"state_class": "total", "state_class": "total",
"unit_of_measurement": "kWh", "unit_of_measurement": "kWh",
} }
GAS_SENSOR_ATTRIBUTES = { GAS_SENSOR_M3_ATTRIBUTES = {
"device_class": "gas", "device_class": "gas",
"state_class": "total", "state_class": "total",
"unit_of_measurement": "", "unit_of_measurement": "",
@ -133,6 +133,241 @@ async def test_statistics_during_period(
} }
@pytest.mark.parametrize(
"attributes, state, value, custom_units, converted_value",
[
(POWER_SENSOR_KW_ATTRIBUTES, 10, 10, {"power": "W"}, 10000),
(POWER_SENSOR_KW_ATTRIBUTES, 10, 10, {"power": "kW"}, 10),
(PRESSURE_SENSOR_HPA_ATTRIBUTES, 10, 10, {"pressure": "Pa"}, 1000),
(PRESSURE_SENSOR_HPA_ATTRIBUTES, 10, 10, {"pressure": "hPa"}, 10),
(PRESSURE_SENSOR_HPA_ATTRIBUTES, 10, 10, {"pressure": "psi"}, 1000 / 6894.757),
(TEMPERATURE_SENSOR_C_ATTRIBUTES, 10, 10, {"temperature": "°C"}, 10),
(TEMPERATURE_SENSOR_C_ATTRIBUTES, 10, 10, {"temperature": "°F"}, 50),
(TEMPERATURE_SENSOR_C_ATTRIBUTES, 10, 10, {"temperature": "K"}, 283.15),
],
)
async def test_statistics_during_period_unit_conversion(
hass,
hass_ws_client,
recorder_mock,
attributes,
state,
value,
custom_units,
converted_value,
):
"""Test statistics_during_period."""
now = dt_util.utcnow()
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
hass.states.async_set("sensor.test", state, attributes=attributes)
await async_wait_recording_done(hass)
do_adhoc_statistics(hass, start=now)
await async_wait_recording_done(hass)
client = await hass_ws_client()
# Query in state unit
await client.send_json(
{
"id": 1,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
"period": "5minute",
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"mean": approx(value),
"min": approx(value),
"max": approx(value),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
# Query in custom unit
await client.send_json(
{
"id": 2,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
"period": "5minute",
"units": custom_units,
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"mean": approx(converted_value),
"min": approx(converted_value),
"max": approx(converted_value),
"last_reset": None,
"state": None,
"sum": None,
}
]
}
@pytest.mark.parametrize(
"attributes, state, value, custom_units, converted_value",
[
(ENERGY_SENSOR_KWH_ATTRIBUTES, 10, 10, {"energy": "kWh"}, 10),
(ENERGY_SENSOR_KWH_ATTRIBUTES, 10, 10, {"energy": "MWh"}, 0.010),
(ENERGY_SENSOR_KWH_ATTRIBUTES, 10, 10, {"energy": "Wh"}, 10000),
(GAS_SENSOR_M3_ATTRIBUTES, 10, 10, {"volume": ""}, 10),
(GAS_SENSOR_M3_ATTRIBUTES, 10, 10, {"volume": "ft³"}, 353.147),
],
)
async def test_sum_statistics_during_period_unit_conversion(
hass,
hass_ws_client,
recorder_mock,
attributes,
state,
value,
custom_units,
converted_value,
):
"""Test statistics_during_period."""
now = dt_util.utcnow()
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
hass.states.async_set("sensor.test", 0, attributes=attributes)
hass.states.async_set("sensor.test", state, attributes=attributes)
await async_wait_recording_done(hass)
do_adhoc_statistics(hass, start=now)
await async_wait_recording_done(hass)
client = await hass_ws_client()
# Query in state unit
await client.send_json(
{
"id": 1,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
"period": "5minute",
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"mean": None,
"min": None,
"max": None,
"last_reset": None,
"state": approx(value),
"sum": approx(value),
}
]
}
# Query in custom unit
await client.send_json(
{
"id": 2,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"statistic_ids": ["sensor.test"],
"period": "5minute",
"units": custom_units,
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {
"sensor.test": [
{
"statistic_id": "sensor.test",
"start": now.isoformat(),
"end": (now + timedelta(minutes=5)).isoformat(),
"mean": None,
"min": None,
"max": None,
"last_reset": None,
"state": approx(converted_value),
"sum": approx(converted_value),
}
]
}
@pytest.mark.parametrize(
"custom_units",
[
{"energy": "W"},
{"power": "Pa"},
{"pressure": "K"},
{"temperature": ""},
{"volume": "kWh"},
],
)
async def test_statistics_during_period_invalid_unit_conversion(
hass, hass_ws_client, recorder_mock, custom_units
):
"""Test statistics_during_period."""
now = dt_util.utcnow()
await async_setup_component(hass, "sensor", {})
await async_recorder_block_till_done(hass)
client = await hass_ws_client()
# Query in state unit
await client.send_json(
{
"id": 1,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"period": "5minute",
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] == {}
# Query in custom unit
await client.send_json(
{
"id": 2,
"type": "recorder/statistics_during_period",
"start_time": now.isoformat(),
"period": "5minute",
"units": custom_units,
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "invalid_format"
@pytest.mark.parametrize( @pytest.mark.parametrize(
"units, attributes, state, value", "units, attributes, state, value",
[ [
@ -307,16 +542,16 @@ async def test_statistics_during_period_bad_end_time(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"units, attributes, display_unit, statistics_unit", "units, attributes, display_unit, statistics_unit, unit_class",
[ [
(IMPERIAL_SYSTEM, POWER_SENSOR_KW_ATTRIBUTES, "kW", "W"), (IMPERIAL_SYSTEM, POWER_SENSOR_KW_ATTRIBUTES, "kW", "W", "power"),
(METRIC_SYSTEM, POWER_SENSOR_KW_ATTRIBUTES, "kW", "W"), (METRIC_SYSTEM, POWER_SENSOR_KW_ATTRIBUTES, "kW", "W", "power"),
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_C_ATTRIBUTES, "°C", "°C"), (IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_C_ATTRIBUTES, "°C", "°C", "temperature"),
(METRIC_SYSTEM, TEMPERATURE_SENSOR_C_ATTRIBUTES, "°C", "°C"), (METRIC_SYSTEM, TEMPERATURE_SENSOR_C_ATTRIBUTES, "°C", "°C", "temperature"),
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_F_ATTRIBUTES, "°F", "°C"), (IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_F_ATTRIBUTES, "°F", "°C", "temperature"),
(METRIC_SYSTEM, TEMPERATURE_SENSOR_F_ATTRIBUTES, "°F", "°C"), (METRIC_SYSTEM, TEMPERATURE_SENSOR_F_ATTRIBUTES, "°F", "°C", "temperature"),
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_HPA_ATTRIBUTES, "hPa", "Pa"), (IMPERIAL_SYSTEM, PRESSURE_SENSOR_HPA_ATTRIBUTES, "hPa", "Pa", "pressure"),
(METRIC_SYSTEM, PRESSURE_SENSOR_HPA_ATTRIBUTES, "hPa", "Pa"), (METRIC_SYSTEM, PRESSURE_SENSOR_HPA_ATTRIBUTES, "hPa", "Pa", "pressure"),
], ],
) )
async def test_list_statistic_ids( async def test_list_statistic_ids(
@ -327,6 +562,7 @@ async def test_list_statistic_ids(
attributes, attributes,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
): ):
"""Test list_statistic_ids.""" """Test list_statistic_ids."""
now = dt_util.utcnow() now = dt_util.utcnow()
@ -356,6 +592,7 @@ async def test_list_statistic_ids(
"source": "recorder", "source": "recorder",
"display_unit_of_measurement": display_unit, "display_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
@ -377,6 +614,7 @@ async def test_list_statistic_ids(
"source": "recorder", "source": "recorder",
"display_unit_of_measurement": display_unit, "display_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
@ -400,6 +638,7 @@ async def test_list_statistic_ids(
"source": "recorder", "source": "recorder",
"display_unit_of_measurement": display_unit, "display_unit_of_measurement": display_unit,
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
@ -590,6 +829,7 @@ async def test_update_statistics_metadata(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "W", "statistics_unit_of_measurement": "W",
"unit_class": "power",
} }
] ]
@ -617,6 +857,7 @@ async def test_update_statistics_metadata(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": new_unit, "statistics_unit_of_measurement": new_unit,
"unit_class": None,
} }
] ]
@ -802,14 +1043,14 @@ async def test_backup_end_without_start(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"units, attributes, unit", "units, attributes, unit, unit_class",
[ [
(METRIC_SYSTEM, GAS_SENSOR_ATTRIBUTES, ""), (METRIC_SYSTEM, GAS_SENSOR_M3_ATTRIBUTES, "", "volume"),
(METRIC_SYSTEM, ENERGY_SENSOR_ATTRIBUTES, "kWh"), (METRIC_SYSTEM, ENERGY_SENSOR_KWH_ATTRIBUTES, "kWh", "energy"),
], ],
) )
async def test_get_statistics_metadata( async def test_get_statistics_metadata(
hass, hass_ws_client, recorder_mock, units, attributes, unit hass, hass_ws_client, recorder_mock, units, attributes, unit, unit_class
): ):
"""Test get_statistics_metadata.""" """Test get_statistics_metadata."""
now = dt_util.utcnow() now = dt_util.utcnow()
@ -891,6 +1132,7 @@ async def test_get_statistics_metadata(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": unit, "statistics_unit_of_measurement": unit,
"unit_class": unit_class,
} }
] ]
@ -918,6 +1160,7 @@ async def test_get_statistics_metadata(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": unit, "statistics_unit_of_measurement": unit,
"unit_class": unit_class,
} }
] ]
@ -1014,6 +1257,7 @@ async def test_import_statistics(
"name": "Total imported energy", "name": "Total imported energy",
"source": source, "source": source,
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
} }
] ]
metadata = get_metadata(hass, statistic_ids=(statistic_id,)) metadata = get_metadata(hass, statistic_ids=(statistic_id,))
@ -1149,6 +1393,119 @@ async def test_import_statistics(
] ]
} }
@pytest.mark.parametrize(
"source, statistic_id",
(
("test", "test:total_energy_import"),
("recorder", "sensor.total_energy_import"),
),
)
async def test_adjust_sum_statistics_energy(
hass, hass_ws_client, recorder_mock, caplog, source, statistic_id
):
"""Test adjusting statistics."""
client = await hass_ws_client()
assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text
zero = dt_util.utcnow()
period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)
external_statistics1 = {
"start": period1.isoformat(),
"last_reset": None,
"state": 0,
"sum": 2,
}
external_statistics2 = {
"start": period2.isoformat(),
"last_reset": None,
"state": 1,
"sum": 3,
}
external_metadata = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": source,
"statistic_id": statistic_id,
"unit_of_measurement": "kWh",
}
await client.send_json(
{
"id": 1,
"type": "recorder/import_statistics",
"metadata": external_metadata,
"stats": [external_statistics1, external_statistics2],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] is None
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(0.0),
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0),
"sum": approx(3.0),
},
]
}
statistic_ids = list_statistic_ids(hass) # TODO
assert statistic_ids == [
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}
]
metadata = get_metadata(hass, statistic_ids=(statistic_id,))
assert metadata == {
statistic_id: (
1,
{
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": "kWh",
"statistic_id": statistic_id,
"unit_of_measurement": "kWh",
},
)
}
# Adjust previously inserted statistics in kWh
await client.send_json( await client.send_json(
{ {
"id": 4, "id": 4,
@ -1156,6 +1513,7 @@ async def test_import_statistics(
"statistic_id": statistic_id, "statistic_id": statistic_id,
"start_time": period2.isoformat(), "start_time": period2.isoformat(),
"adjustment": 1000.0, "adjustment": 1000.0,
"display_unit": "kWh",
} }
) )
response = await client.receive_json() response = await client.receive_json()
@ -1169,12 +1527,12 @@ async def test_import_statistics(
"statistic_id": statistic_id, "statistic_id": statistic_id,
"start": period1.isoformat(), "start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(), "end": (period1 + timedelta(hours=1)).isoformat(),
"max": approx(1.0), "max": approx(None),
"mean": approx(2.0), "mean": approx(None),
"min": approx(3.0), "min": approx(None),
"last_reset": None, "last_reset": None,
"state": approx(4.0), "state": approx(0.0),
"sum": approx(5.0), "sum": approx(2.0),
}, },
{ {
"statistic_id": statistic_id, "statistic_id": statistic_id,
@ -1189,3 +1547,432 @@ async def test_import_statistics(
}, },
] ]
} }
# Adjust previously inserted statistics in MWh
await client.send_json(
{
"id": 5,
"type": "recorder/adjust_sum_statistics",
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 2.0,
"display_unit": "MWh",
}
)
response = await client.receive_json()
assert response["success"]
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
"last_reset": None,
"state": approx(0.0),
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0),
"sum": approx(3003.0),
},
]
}
@pytest.mark.parametrize(
"source, statistic_id",
(
("test", "test:total_gas"),
("recorder", "sensor.total_gas"),
),
)
async def test_adjust_sum_statistics_gas(
hass, hass_ws_client, recorder_mock, caplog, source, statistic_id
):
"""Test adjusting statistics."""
client = await hass_ws_client()
assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text
zero = dt_util.utcnow()
period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)
external_statistics1 = {
"start": period1.isoformat(),
"last_reset": None,
"state": 0,
"sum": 2,
}
external_statistics2 = {
"start": period2.isoformat(),
"last_reset": None,
"state": 1,
"sum": 3,
}
external_metadata = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": source,
"statistic_id": statistic_id,
"unit_of_measurement": "",
}
await client.send_json(
{
"id": 1,
"type": "recorder/import_statistics",
"metadata": external_metadata,
"stats": [external_statistics1, external_statistics2],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] is None
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(0.0),
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0),
"sum": approx(3.0),
},
]
}
statistic_ids = list_statistic_ids(hass) # TODO
assert statistic_ids == [
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"statistics_unit_of_measurement": "",
"unit_class": "volume",
}
]
metadata = get_metadata(hass, statistic_ids=(statistic_id,))
assert metadata == {
statistic_id: (
1,
{
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": "",
"statistic_id": statistic_id,
"unit_of_measurement": "",
},
)
}
# Adjust previously inserted statistics in m³
await client.send_json(
{
"id": 4,
"type": "recorder/adjust_sum_statistics",
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": "",
}
)
response = await client.receive_json()
assert response["success"]
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
"last_reset": None,
"state": approx(0.0),
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0),
"sum": approx(1003.0),
},
]
}
# Adjust previously inserted statistics in ft³
await client.send_json(
{
"id": 5,
"type": "recorder/adjust_sum_statistics",
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 35.3147, # ~1 m³
"display_unit": "ft³",
}
)
response = await client.receive_json()
assert response["success"]
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"max": approx(None),
"mean": approx(None),
"min": approx(None),
"last_reset": None,
"state": approx(0.0),
"sum": approx(2.0),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0),
"sum": approx(1004),
},
]
}
@pytest.mark.parametrize(
"state_unit, statistic_unit, unit_class, factor, valid_units, invalid_units",
(
("kWh", "kWh", "energy", 1, ("Wh", "kWh", "MWh"), ("ft³", "", "cats", None)),
("MWh", "MWh", None, 1, ("MWh",), ("Wh", "kWh", "ft³", "", "cats", None)),
("", "", "volume", 1, ("ft³", ""), ("Wh", "kWh", "MWh", "cats", None)),
("ft³", "ft³", None, 1, ("ft³",), ("", "Wh", "kWh", "MWh", "cats", None)),
("dogs", "dogs", None, 1, ("dogs",), ("cats", None)),
(None, None, None, 1, (None,), ("cats",)),
),
)
async def test_adjust_sum_statistics_errors(
hass,
hass_ws_client,
recorder_mock,
caplog,
state_unit,
statistic_unit,
unit_class,
factor,
valid_units,
invalid_units,
):
"""Test incorrectly adjusting statistics."""
statistic_id = "sensor.total_energy_import"
source = "recorder"
client = await hass_ws_client()
assert "Compiling statistics for" not in caplog.text
assert "Statistics already compiled" not in caplog.text
zero = dt_util.utcnow()
period1 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=1)
period2 = zero.replace(minute=0, second=0, microsecond=0) + timedelta(hours=2)
external_statistics1 = {
"start": period1.isoformat(),
"last_reset": None,
"state": 0,
"sum": 2,
}
external_statistics2 = {
"start": period2.isoformat(),
"last_reset": None,
"state": 1,
"sum": 3,
}
external_metadata = {
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": source,
"statistic_id": statistic_id,
"unit_of_measurement": statistic_unit,
}
await client.send_json(
{
"id": 1,
"type": "recorder/import_statistics",
"metadata": external_metadata,
"stats": [external_statistics1, external_statistics2],
}
)
response = await client.receive_json()
assert response["success"]
assert response["result"] is None
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == {
statistic_id: [
{
"statistic_id": statistic_id,
"start": period1.isoformat(),
"end": (period1 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(0.0 * factor),
"sum": approx(2.0 * factor),
},
{
"statistic_id": statistic_id,
"start": period2.isoformat(),
"end": (period2 + timedelta(hours=1)).isoformat(),
"max": None,
"mean": None,
"min": None,
"last_reset": None,
"state": approx(1.0 * factor),
"sum": approx(3.0 * factor),
},
]
}
previous_stats = stats
statistic_ids = list_statistic_ids(hass)
assert statistic_ids == [
{
"display_unit_of_measurement": state_unit,
"has_mean": False,
"has_sum": True,
"statistic_id": statistic_id,
"name": "Total imported energy",
"source": source,
"statistics_unit_of_measurement": statistic_unit,
"unit_class": unit_class,
}
]
metadata = get_metadata(hass, statistic_ids=(statistic_id,))
assert metadata == {
statistic_id: (
1,
{
"has_mean": False,
"has_sum": True,
"name": "Total imported energy",
"source": source,
"state_unit_of_measurement": state_unit,
"statistic_id": statistic_id,
"unit_of_measurement": statistic_unit,
},
)
}
# Try to adjust statistics
msg_id = 2
await client.send_json(
{
"id": msg_id,
"type": "recorder/adjust_sum_statistics",
"statistic_id": "sensor.does_not_exist",
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": statistic_unit,
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "unknown_statistic_id"
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == previous_stats
for unit in invalid_units:
msg_id += 1
await client.send_json(
{
"id": msg_id,
"type": "recorder/adjust_sum_statistics",
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": unit,
}
)
response = await client.receive_json()
assert not response["success"]
assert response["error"]["code"] == "invalid_units"
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats == previous_stats
for unit in valid_units:
msg_id += 1
await client.send_json(
{
"id": msg_id,
"type": "recorder/adjust_sum_statistics",
"statistic_id": statistic_id,
"start_time": period2.isoformat(),
"adjustment": 1000.0,
"display_unit": unit,
}
)
response = await client.receive_json()
assert response["success"]
await async_wait_recording_done(hass)
stats = statistics_during_period(hass, zero, period="hour")
assert stats != previous_stats
previous_stats = stats

View file

@ -76,20 +76,20 @@ def set_time_zone():
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,mean,min,max", "device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max",
[ [
(None, "%", "%", "%", 13.050847, -10, 30), (None, "%", "%", "%", None, 13.050847, -10, 30),
("battery", "%", "%", "%", 13.050847, -10, 30), ("battery", "%", "%", "%", None, 13.050847, -10, 30),
("battery", None, None, None, 13.050847, -10, 30), ("battery", None, None, None, None, 13.050847, -10, 30),
("humidity", "%", "%", "%", 13.050847, -10, 30), ("humidity", "%", "%", "%", None, 13.050847, -10, 30),
("humidity", None, None, None, 13.050847, -10, 30), ("humidity", None, None, None, None, 13.050847, -10, 30),
("pressure", "Pa", "Pa", "Pa", 13.050847, -10, 30), ("pressure", "Pa", "Pa", "Pa", "pressure", 13.050847, -10, 30),
("pressure", "hPa", "hPa", "Pa", 13.050847, -10, 30), ("pressure", "hPa", "hPa", "Pa", "pressure", 13.050847, -10, 30),
("pressure", "mbar", "mbar", "Pa", 13.050847, -10, 30), ("pressure", "mbar", "mbar", "Pa", "pressure", 13.050847, -10, 30),
("pressure", "inHg", "inHg", "Pa", 13.050847, -10, 30), ("pressure", "inHg", "inHg", "Pa", "pressure", 13.050847, -10, 30),
("pressure", "psi", "psi", "Pa", 13.050847, -10, 30), ("pressure", "psi", "psi", "Pa", "pressure", 13.050847, -10, 30),
("temperature", "°C", "°C", "°C", 13.050847, -10, 30), ("temperature", "°C", "°C", "°C", "temperature", 13.050847, -10, 30),
("temperature", "°F", "°F", "°C", 13.050847, -10, 30), ("temperature", "°F", "°F", "°C", "temperature", 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics( def test_compile_hourly_statistics(
@ -99,6 +99,7 @@ def test_compile_hourly_statistics(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
mean, mean,
min, min,
max, max,
@ -129,6 +130,7 @@ def test_compile_hourly_statistics(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -151,13 +153,19 @@ def test_compile_hourly_statistics(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit", "device_class, state_unit, display_unit, statistics_unit, unit_class",
[ [
(None, "%", "%", "%"), (None, "%", "%", "%", None),
], ],
) )
def test_compile_hourly_statistics_purged_state_changes( def test_compile_hourly_statistics_purged_state_changes(
hass_recorder, caplog, device_class, state_unit, display_unit, statistics_unit hass_recorder,
caplog,
device_class,
state_unit,
display_unit,
statistics_unit,
unit_class,
): ):
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
zero = dt_util.utcnow() zero = dt_util.utcnow()
@ -197,6 +205,7 @@ def test_compile_hourly_statistics_purged_state_changes(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -266,6 +275,7 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "°C", "statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
}, },
{ {
"statistic_id": "sensor.test6", "statistic_id": "sensor.test6",
@ -275,6 +285,7 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "°C", "statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
}, },
{ {
"statistic_id": "sensor.test7", "statistic_id": "sensor.test7",
@ -284,6 +295,7 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "°C", "statistics_unit_of_measurement": "°C",
"unit_class": "temperature",
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -333,20 +345,20 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
@pytest.mark.parametrize("state_class", ["total"]) @pytest.mark.parametrize("state_class", ["total"])
@pytest.mark.parametrize( @pytest.mark.parametrize(
"units,device_class,state_unit,display_unit,statistics_unit,factor", "units, device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
(IMPERIAL_SYSTEM, "energy", "kWh", "kWh", "kWh", 1), (IMPERIAL_SYSTEM, "energy", "kWh", "kWh", "kWh", "energy", 1),
(IMPERIAL_SYSTEM, "energy", "Wh", "kWh", "kWh", 1 / 1000), (IMPERIAL_SYSTEM, "energy", "Wh", "Wh", "kWh", "energy", 1),
(IMPERIAL_SYSTEM, "monetary", "EUR", "EUR", "EUR", 1), (IMPERIAL_SYSTEM, "monetary", "EUR", "EUR", "EUR", None, 1),
(IMPERIAL_SYSTEM, "monetary", "SEK", "SEK", "SEK", 1), (IMPERIAL_SYSTEM, "monetary", "SEK", "SEK", "SEK", None, 1),
(IMPERIAL_SYSTEM, "gas", "", "ft³", "", 35.314666711), (IMPERIAL_SYSTEM, "gas", "", "", "", "volume", 1),
(IMPERIAL_SYSTEM, "gas", "ft³", "ft³", "", 1), (IMPERIAL_SYSTEM, "gas", "ft³", "ft³", "", "volume", 1),
(METRIC_SYSTEM, "energy", "kWh", "kWh", "kWh", 1), (METRIC_SYSTEM, "energy", "kWh", "kWh", "kWh", "energy", 1),
(METRIC_SYSTEM, "energy", "Wh", "kWh", "kWh", 1 / 1000), (METRIC_SYSTEM, "energy", "Wh", "Wh", "kWh", "energy", 1),
(METRIC_SYSTEM, "monetary", "EUR", "EUR", "EUR", 1), (METRIC_SYSTEM, "monetary", "EUR", "EUR", "EUR", None, 1),
(METRIC_SYSTEM, "monetary", "SEK", "SEK", "SEK", 1), (METRIC_SYSTEM, "monetary", "SEK", "SEK", "SEK", None, 1),
(METRIC_SYSTEM, "gas", "", "", "", 1), (METRIC_SYSTEM, "gas", "", "", "", "volume", 1),
(METRIC_SYSTEM, "gas", "ft³", "", "", 0.0283168466), (METRIC_SYSTEM, "gas", "ft³", "ft³", "", "volume", 1),
], ],
) )
async def test_compile_hourly_sum_statistics_amount( async def test_compile_hourly_sum_statistics_amount(
@ -360,6 +372,7 @@ async def test_compile_hourly_sum_statistics_amount(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
@ -405,6 +418,7 @@ async def test_compile_hourly_sum_statistics_amount(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, period0, period="5minute") stats = statistics_during_period(hass, period0, period="5minute")
@ -478,6 +492,7 @@ async def test_compile_hourly_sum_statistics_amount(
"statistic_id": "sensor.test1", "statistic_id": "sensor.test1",
"start_time": period1.isoformat(), "start_time": period1.isoformat(),
"adjustment": 100.0, "adjustment": 100.0,
"display_unit": display_unit,
} }
) )
response = await client.receive_json() response = await client.receive_json()
@ -497,6 +512,7 @@ async def test_compile_hourly_sum_statistics_amount(
"statistic_id": "sensor.test1", "statistic_id": "sensor.test1",
"start_time": period2.isoformat(), "start_time": period2.isoformat(),
"adjustment": -400.0, "adjustment": -400.0,
"display_unit": display_unit,
} }
) )
response = await client.receive_json() response = await client.receive_json()
@ -511,14 +527,14 @@ async def test_compile_hourly_sum_statistics_amount(
@pytest.mark.parametrize("state_class", ["total"]) @pytest.mark.parametrize("state_class", ["total"])
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
("energy", "kWh", "kWh", "kWh", 1), ("energy", "kWh", "kWh", "kWh", "energy", 1),
("energy", "Wh", "kWh", "kWh", 1 / 1000), ("energy", "Wh", "Wh", "kWh", "energy", 1),
("monetary", "EUR", "EUR", "EUR", 1), ("monetary", "EUR", "EUR", "EUR", None, 1),
("monetary", "SEK", "SEK", "SEK", 1), ("monetary", "SEK", "SEK", "SEK", None, 1),
("gas", "", "", "", 1), ("gas", "", "", "", "volume", 1),
("gas", "ft³", "", "", 0.0283168466), ("gas", "ft³", "ft³", "", "volume", 1),
], ],
) )
def test_compile_hourly_sum_statistics_amount_reset_every_state_change( def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
@ -529,6 +545,7 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
@ -594,6 +611,7 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -630,9 +648,9 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
@pytest.mark.parametrize("state_class", ["total"]) @pytest.mark.parametrize("state_class", ["total"])
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
("energy", "kWh", "kWh", "kWh", 1), ("energy", "kWh", "kWh", "kWh", "energy", 1),
], ],
) )
def test_compile_hourly_sum_statistics_amount_invalid_last_reset( def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
@ -643,6 +661,7 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
@ -693,6 +712,7 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -717,9 +737,9 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
@pytest.mark.parametrize("state_class", ["total"]) @pytest.mark.parametrize("state_class", ["total"])
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
("energy", "kWh", "kWh", "kWh", 1), ("energy", "kWh", "kWh", "kWh", "energy", 1),
], ],
) )
def test_compile_hourly_sum_statistics_nan_inf_state( def test_compile_hourly_sum_statistics_nan_inf_state(
@ -730,6 +750,7 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics with nan and inf states.""" """Test compiling hourly statistics with nan and inf states."""
@ -776,6 +797,7 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -819,9 +841,9 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
) )
@pytest.mark.parametrize("state_class", ["total_increasing"]) @pytest.mark.parametrize("state_class", ["total_increasing"])
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
("energy", "kWh", "kWh", "kWh", 1), ("energy", "kWh", "kWh", "kWh", "energy", 1),
], ],
) )
def test_compile_hourly_sum_statistics_negative_state( def test_compile_hourly_sum_statistics_negative_state(
@ -835,6 +857,7 @@ def test_compile_hourly_sum_statistics_negative_state(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics with negative states.""" """Test compiling hourly statistics with negative states."""
@ -889,6 +912,7 @@ def test_compile_hourly_sum_statistics_negative_state(
"source": "recorder", "source": "recorder",
"statistic_id": entity_id, "statistic_id": entity_id,
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} in statistic_ids } in statistic_ids
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
assert stats[entity_id] == [ assert stats[entity_id] == [
@ -916,14 +940,14 @@ def test_compile_hourly_sum_statistics_negative_state(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
("energy", "kWh", "kWh", "kWh", 1), ("energy", "kWh", "kWh", "kWh", "energy", 1),
("energy", "Wh", "kWh", "kWh", 1 / 1000), ("energy", "Wh", "Wh", "kWh", "energy", 1),
("monetary", "EUR", "EUR", "EUR", 1), ("monetary", "EUR", "EUR", "EUR", None, 1),
("monetary", "SEK", "SEK", "SEK", 1), ("monetary", "SEK", "SEK", "SEK", None, 1),
("gas", "", "", "", 1), ("gas", "", "", "", "volume", 1),
("gas", "ft³", "", "", 0.0283168466), ("gas", "ft³", "ft³", "", "volume", 1),
], ],
) )
def test_compile_hourly_sum_statistics_total_no_reset( def test_compile_hourly_sum_statistics_total_no_reset(
@ -933,6 +957,7 @@ def test_compile_hourly_sum_statistics_total_no_reset(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
@ -975,6 +1000,7 @@ def test_compile_hourly_sum_statistics_total_no_reset(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, period0, period="5minute") stats = statistics_during_period(hass, period0, period="5minute")
@ -1019,12 +1045,12 @@ def test_compile_hourly_sum_statistics_total_no_reset(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[ [
("energy", "kWh", "kWh", "kWh", 1), ("energy", "kWh", "kWh", "kWh", "energy", 1),
("energy", "Wh", "kWh", "kWh", 1 / 1000), ("energy", "Wh", "Wh", "kWh", "energy", 1),
("gas", "", "", "", 1), ("gas", "", "", "", "volume", 1),
("gas", "ft³", "", "", 0.0283168466), ("gas", "ft³", "ft³", "", "volume", 1),
], ],
) )
def test_compile_hourly_sum_statistics_total_increasing( def test_compile_hourly_sum_statistics_total_increasing(
@ -1034,6 +1060,7 @@ def test_compile_hourly_sum_statistics_total_increasing(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test compiling hourly statistics.""" """Test compiling hourly statistics."""
@ -1076,6 +1103,7 @@ def test_compile_hourly_sum_statistics_total_increasing(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, period0, period="5minute") stats = statistics_during_period(hass, period0, period="5minute")
@ -1123,8 +1151,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,factor", "device_class, state_unit, display_unit, statistics_unit, unit_class, factor",
[("energy", "kWh", "kWh", "kWh", 1)], [("energy", "kWh", "kWh", "kWh", "energy", 1)],
) )
def test_compile_hourly_sum_statistics_total_increasing_small_dip( def test_compile_hourly_sum_statistics_total_increasing_small_dip(
hass_recorder, hass_recorder,
@ -1133,6 +1161,7 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
factor, factor,
): ):
"""Test small dips in sensor readings do not trigger a reset.""" """Test small dips in sensor readings do not trigger a reset."""
@ -1188,6 +1217,7 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
} }
] ]
stats = statistics_during_period(hass, period0, period="5minute") stats = statistics_during_period(hass, period0, period="5minute")
@ -1282,6 +1312,7 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
} }
] ]
stats = statistics_during_period(hass, period0, period="5minute") stats = statistics_during_period(hass, period0, period="5minute")
@ -1374,6 +1405,7 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}, },
{ {
"statistic_id": "sensor.test2", "statistic_id": "sensor.test2",
@ -1383,15 +1415,17 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}, },
{ {
"statistic_id": "sensor.test3", "statistic_id": "sensor.test3",
"display_unit_of_measurement": "kWh", "display_unit_of_measurement": "Wh",
"has_mean": False, "has_mean": False,
"has_sum": True, "has_sum": True,
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "kWh", "statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
}, },
] ]
stats = statistics_during_period(hass, period0, period="5minute") stats = statistics_during_period(hass, period0, period="5minute")
@ -1475,8 +1509,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
"mean": None, "mean": None,
"min": None, "min": None,
"last_reset": process_timestamp_to_utc_isoformat(period0), "last_reset": process_timestamp_to_utc_isoformat(period0),
"state": approx(5.0 / 1000), "state": approx(5.0),
"sum": approx(5.0 / 1000), "sum": approx(5.0),
}, },
{ {
"statistic_id": "sensor.test3", "statistic_id": "sensor.test3",
@ -1486,8 +1520,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
"mean": None, "mean": None,
"min": None, "min": None,
"last_reset": process_timestamp_to_utc_isoformat(four), "last_reset": process_timestamp_to_utc_isoformat(four),
"state": approx(50.0 / 1000), "state": approx(50.0),
"sum": approx(60.0 / 1000), "sum": approx(60.0),
}, },
{ {
"statistic_id": "sensor.test3", "statistic_id": "sensor.test3",
@ -1497,8 +1531,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
"mean": None, "mean": None,
"min": None, "min": None,
"last_reset": process_timestamp_to_utc_isoformat(four), "last_reset": process_timestamp_to_utc_isoformat(four),
"state": approx(90.0 / 1000), "state": approx(90.0),
"sum": approx(100.0 / 1000), "sum": approx(100.0),
}, },
], ],
} }
@ -1666,31 +1700,31 @@ def test_compile_hourly_statistics_fails(hass_recorder, caplog):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"state_class,device_class,state_unit,display_unit,statistics_unit,statistic_type", "state_class, device_class, state_unit, display_unit, statistics_unit, unit_class, statistic_type",
[ [
("measurement", "battery", "%", "%", "%", "mean"), ("measurement", "battery", "%", "%", "%", None, "mean"),
("measurement", "battery", None, None, None, "mean"), ("measurement", "battery", None, None, None, None, "mean"),
("total", "energy", "Wh", "kWh", "kWh", "sum"), ("total", "energy", "Wh", "Wh", "kWh", "energy", "sum"),
("total", "energy", "kWh", "kWh", "kWh", "sum"), ("total", "energy", "kWh", "kWh", "kWh", "energy", "sum"),
("measurement", "energy", "Wh", "kWh", "kWh", "mean"), ("measurement", "energy", "Wh", "Wh", "kWh", "energy", "mean"),
("measurement", "energy", "kWh", "kWh", "kWh", "mean"), ("measurement", "energy", "kWh", "kWh", "kWh", "energy", "mean"),
("measurement", "humidity", "%", "%", "%", "mean"), ("measurement", "humidity", "%", "%", "%", None, "mean"),
("measurement", "humidity", None, None, None, "mean"), ("measurement", "humidity", None, None, None, None, "mean"),
("total", "monetary", "USD", "USD", "USD", "sum"), ("total", "monetary", "USD", "USD", "USD", None, "sum"),
("total", "monetary", "None", "None", "None", "sum"), ("total", "monetary", "None", "None", "None", None, "sum"),
("total", "gas", "", "", "", "sum"), ("total", "gas", "", "", "", "volume", "sum"),
("total", "gas", "ft³", "", "", "sum"), ("total", "gas", "ft³", "ft³", "", "volume", "sum"),
("measurement", "monetary", "USD", "USD", "USD", "mean"), ("measurement", "monetary", "USD", "USD", "USD", None, "mean"),
("measurement", "monetary", "None", "None", "None", "mean"), ("measurement", "monetary", "None", "None", "None", None, "mean"),
("measurement", "gas", "", "", "", "mean"), ("measurement", "gas", "", "", "", "volume", "mean"),
("measurement", "gas", "ft³", "", "", "mean"), ("measurement", "gas", "ft³", "ft³", "", "volume", "mean"),
("measurement", "pressure", "Pa", "Pa", "Pa", "mean"), ("measurement", "pressure", "Pa", "Pa", "Pa", "pressure", "mean"),
("measurement", "pressure", "hPa", "hPa", "Pa", "mean"), ("measurement", "pressure", "hPa", "hPa", "Pa", "pressure", "mean"),
("measurement", "pressure", "mbar", "mbar", "Pa", "mean"), ("measurement", "pressure", "mbar", "mbar", "Pa", "pressure", "mean"),
("measurement", "pressure", "inHg", "inHg", "Pa", "mean"), ("measurement", "pressure", "inHg", "inHg", "Pa", "pressure", "mean"),
("measurement", "pressure", "psi", "psi", "Pa", "mean"), ("measurement", "pressure", "psi", "psi", "Pa", "pressure", "mean"),
("measurement", "temperature", "°C", "°C", "°C", "mean"), ("measurement", "temperature", "°C", "°C", "°C", "temperature", "mean"),
("measurement", "temperature", "°F", "°F", "°C", "mean"), ("measurement", "temperature", "°F", "°F", "°C", "temperature", "mean"),
], ],
) )
def test_list_statistic_ids( def test_list_statistic_ids(
@ -1701,6 +1735,7 @@ def test_list_statistic_ids(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
statistic_type, statistic_type,
): ):
"""Test listing future statistic ids.""" """Test listing future statistic ids."""
@ -1724,6 +1759,7 @@ def test_list_statistic_ids(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}, },
] ]
for stat_type in ["mean", "sum", "dogs"]: for stat_type in ["mean", "sum", "dogs"]:
@ -1738,6 +1774,7 @@ def test_list_statistic_ids(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}, },
] ]
else: else:
@ -1772,12 +1809,12 @@ def test_list_statistic_ids_unsupported(hass_recorder, caplog, _attributes):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,mean,min,max", "device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max",
[ [
(None, None, None, None, 13.050847, -10, 30), (None, None, None, None, None, 13.050847, -10, 30),
(None, "%", "%", "%", 13.050847, -10, 30), (None, "%", "%", "%", None, 13.050847, -10, 30),
("battery", "%", "%", "%", 13.050847, -10, 30), ("battery", "%", "%", "%", None, 13.050847, -10, 30),
("battery", None, None, None, 13.050847, -10, 30), ("battery", None, None, None, None, 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics_changing_units_1( def test_compile_hourly_statistics_changing_units_1(
@ -1787,6 +1824,7 @@ def test_compile_hourly_statistics_changing_units_1(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
mean, mean,
min, min,
max, max,
@ -1827,6 +1865,7 @@ def test_compile_hourly_statistics_changing_units_1(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -1862,6 +1901,7 @@ def test_compile_hourly_statistics_changing_units_1(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -1884,12 +1924,12 @@ def test_compile_hourly_statistics_changing_units_1(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,mean,min,max", "device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max",
[ [
(None, None, None, None, 13.050847, -10, 30), (None, None, None, None, None, 13.050847, -10, 30),
(None, "%", "%", "%", 13.050847, -10, 30), (None, "%", "%", "%", None, 13.050847, -10, 30),
("battery", "%", "%", "%", 13.050847, -10, 30), ("battery", "%", "%", "%", None, 13.050847, -10, 30),
("battery", None, None, None, 13.050847, -10, 30), ("battery", None, None, None, None, 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics_changing_units_2( def test_compile_hourly_statistics_changing_units_2(
@ -1899,6 +1939,7 @@ def test_compile_hourly_statistics_changing_units_2(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
mean, mean,
min, min,
max, max,
@ -1936,6 +1977,7 @@ def test_compile_hourly_statistics_changing_units_2(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "cats", "statistics_unit_of_measurement": "cats",
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -1945,12 +1987,12 @@ def test_compile_hourly_statistics_changing_units_2(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,mean,min,max", "device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max",
[ [
(None, None, None, None, 13.050847, -10, 30), (None, None, None, None, None, 13.050847, -10, 30),
(None, "%", "%", "%", 13.050847, -10, 30), (None, "%", "%", "%", None, 13.050847, -10, 30),
("battery", "%", "%", "%", 13.050847, -10, 30), ("battery", "%", "%", "%", None, 13.050847, -10, 30),
("battery", None, None, None, 13.050847, -10, 30), ("battery", None, None, None, None, 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics_changing_units_3( def test_compile_hourly_statistics_changing_units_3(
@ -1960,6 +2002,7 @@ def test_compile_hourly_statistics_changing_units_3(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
mean, mean,
min, min,
max, max,
@ -2000,6 +2043,7 @@ def test_compile_hourly_statistics_changing_units_3(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -2035,6 +2079,7 @@ def test_compile_hourly_statistics_changing_units_3(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistics_unit, "statistics_unit_of_measurement": statistics_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -2057,13 +2102,21 @@ def test_compile_hourly_statistics_changing_units_3(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,statistic_unit,mean,min,max", "device_class, state_unit, statistic_unit, unit_class, mean, min, max",
[ [
("power", "kW", "W", 13.050847, -10, 30), ("power", "kW", "W", None, 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics_changing_device_class_1( def test_compile_hourly_statistics_changing_device_class_1(
hass_recorder, caplog, device_class, state_unit, statistic_unit, mean, min, max hass_recorder,
caplog,
device_class,
state_unit,
statistic_unit,
unit_class,
mean,
min,
max,
): ):
"""Test compiling hourly statistics where device class changes from one hour to the next.""" """Test compiling hourly statistics where device class changes from one hour to the next."""
zero = dt_util.utcnow() zero = dt_util.utcnow()
@ -2091,6 +2144,7 @@ def test_compile_hourly_statistics_changing_device_class_1(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": state_unit, "statistics_unit_of_measurement": state_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -2140,6 +2194,7 @@ def test_compile_hourly_statistics_changing_device_class_1(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": state_unit, "statistics_unit_of_measurement": state_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -2162,9 +2217,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistic_unit,mean,min,max", "device_class, state_unit, display_unit, statistic_unit, unit_class, mean, min, max",
[ [
("power", "kW", "kW", "W", 13.050847, -10, 30), ("power", "kW", "kW", "W", "power", 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics_changing_device_class_2( def test_compile_hourly_statistics_changing_device_class_2(
@ -2174,6 +2229,7 @@ def test_compile_hourly_statistics_changing_device_class_2(
state_unit, state_unit,
display_unit, display_unit,
statistic_unit, statistic_unit,
unit_class,
mean, mean,
min, min,
max, max,
@ -2205,6 +2261,7 @@ def test_compile_hourly_statistics_changing_device_class_2(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistic_unit, "statistics_unit_of_measurement": statistic_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -2254,6 +2311,7 @@ def test_compile_hourly_statistics_changing_device_class_2(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": statistic_unit, "statistics_unit_of_measurement": statistic_unit,
"unit_class": unit_class,
}, },
] ]
stats = statistics_during_period(hass, zero, period="5minute") stats = statistics_during_period(hass, zero, period="5minute")
@ -2276,9 +2334,9 @@ def test_compile_hourly_statistics_changing_device_class_2(
@pytest.mark.parametrize( @pytest.mark.parametrize(
"device_class,state_unit,display_unit,statistics_unit,mean,min,max", "device_class, state_unit, display_unit, statistics_unit, unit_class, mean, min, max",
[ [
(None, None, None, None, 13.050847, -10, 30), (None, None, None, None, None, 13.050847, -10, 30),
], ],
) )
def test_compile_hourly_statistics_changing_statistics( def test_compile_hourly_statistics_changing_statistics(
@ -2288,6 +2346,7 @@ def test_compile_hourly_statistics_changing_statistics(
state_unit, state_unit,
display_unit, display_unit,
statistics_unit, statistics_unit,
unit_class,
mean, mean,
min, min,
max, max,
@ -2322,6 +2381,7 @@ def test_compile_hourly_statistics_changing_statistics(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": None, "statistics_unit_of_measurement": None,
"unit_class": None,
}, },
] ]
metadata = get_metadata(hass, statistic_ids=("sensor.test1",)) metadata = get_metadata(hass, statistic_ids=("sensor.test1",))
@ -2358,6 +2418,7 @@ def test_compile_hourly_statistics_changing_statistics(
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": None, "statistics_unit_of_measurement": None,
"unit_class": None,
}, },
] ]
metadata = get_metadata(hass, statistic_ids=("sensor.test1",)) metadata = get_metadata(hass, statistic_ids=("sensor.test1",))
@ -2552,6 +2613,7 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "%", "statistics_unit_of_measurement": "%",
"unit_class": None,
}, },
{ {
"statistic_id": "sensor.test2", "statistic_id": "sensor.test2",
@ -2561,6 +2623,7 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "%", "statistics_unit_of_measurement": "%",
"unit_class": None,
}, },
{ {
"statistic_id": "sensor.test3", "statistic_id": "sensor.test3",
@ -2570,6 +2633,7 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "%", "statistics_unit_of_measurement": "%",
"unit_class": None,
}, },
{ {
"statistic_id": "sensor.test4", "statistic_id": "sensor.test4",
@ -2579,6 +2643,7 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
"name": None, "name": None,
"source": "recorder", "source": "recorder",
"statistics_unit_of_measurement": "EUR", "statistics_unit_of_measurement": "EUR",
"unit_class": None,
}, },
] ]
@ -2588,7 +2653,7 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
for i in range(13, 24): for i in range(13, 24):
expected_sums["sensor.test4"][i] += sum_adjustment expected_sums["sensor.test4"][i] += sum_adjustment
instance.async_adjust_statistics( instance.async_adjust_statistics(
"sensor.test4", sum_adjustement_start, sum_adjustment "sensor.test4", sum_adjustement_start, sum_adjustment, "EUR"
) )
wait_recording_done(hass) wait_recording_done(hass)