Fix handling of imperial units in long term statistics (#55959)
This commit is contained in:
parent
9f1e503784
commit
27764e9985
4 changed files with 40 additions and 23 deletions
|
@ -377,11 +377,11 @@ def statistics_during_period(
|
|||
)
|
||||
if not stats:
|
||||
return {}
|
||||
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata)
|
||||
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata, True)
|
||||
|
||||
|
||||
def get_last_statistics(
|
||||
hass: HomeAssistant, number_of_stats: int, statistic_id: str
|
||||
hass: HomeAssistant, number_of_stats: int, statistic_id: str, convert_units: bool
|
||||
) -> dict[str, list[dict]]:
|
||||
"""Return the last number_of_stats statistics for a statistic_id."""
|
||||
statistic_ids = [statistic_id]
|
||||
|
@ -411,7 +411,9 @@ def get_last_statistics(
|
|||
if not stats:
|
||||
return {}
|
||||
|
||||
return _sorted_statistics_to_dict(hass, stats, statistic_ids, metadata)
|
||||
return _sorted_statistics_to_dict(
|
||||
hass, stats, statistic_ids, metadata, convert_units
|
||||
)
|
||||
|
||||
|
||||
def _sorted_statistics_to_dict(
|
||||
|
@ -419,11 +421,16 @@ def _sorted_statistics_to_dict(
|
|||
stats: list,
|
||||
statistic_ids: list[str] | None,
|
||||
metadata: dict[str, StatisticMetaData],
|
||||
convert_units: bool,
|
||||
) -> dict[str, list[dict]]:
|
||||
"""Convert SQL results into JSON friendly data structure."""
|
||||
result: dict = defaultdict(list)
|
||||
units = hass.config.units
|
||||
|
||||
def no_conversion(val: Any, _: Any) -> float | None:
|
||||
"""Return x."""
|
||||
return val # type: ignore
|
||||
|
||||
# Set all statistic IDs to empty lists in result set to maintain the order
|
||||
if statistic_ids is not None:
|
||||
for stat_id in statistic_ids:
|
||||
|
@ -436,9 +443,11 @@ def _sorted_statistics_to_dict(
|
|||
for meta_id, group in groupby(stats, lambda stat: stat.metadata_id): # type: ignore
|
||||
unit = metadata[meta_id]["unit_of_measurement"]
|
||||
statistic_id = metadata[meta_id]["statistic_id"]
|
||||
convert: Callable[[Any, Any], float | None] = UNIT_CONVERSIONS.get(
|
||||
unit, lambda x, units: x # type: ignore
|
||||
)
|
||||
convert: Callable[[Any, Any], float | None]
|
||||
if convert_units:
|
||||
convert = UNIT_CONVERSIONS.get(unit, lambda x, units: x) # type: ignore
|
||||
else:
|
||||
convert = no_conversion
|
||||
ent_results = result[meta_id]
|
||||
ent_results.extend(
|
||||
{
|
||||
|
|
|
@ -393,7 +393,7 @@ def compile_statistics( # noqa: C901
|
|||
last_reset = old_last_reset = None
|
||||
new_state = old_state = None
|
||||
_sum = 0
|
||||
last_stats = statistics.get_last_statistics(hass, 1, entity_id)
|
||||
last_stats = statistics.get_last_statistics(hass, 1, entity_id, False)
|
||||
if entity_id in last_stats:
|
||||
# We have compiled history for this sensor before, use that as a starting point
|
||||
last_reset = old_last_reset = last_stats[entity_id][0]["last_reset"]
|
||||
|
|
|
@ -36,7 +36,7 @@ def test_compile_hourly_statistics(hass_recorder):
|
|||
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
|
||||
stats = statistics_during_period(hass, zero, **kwargs)
|
||||
assert stats == {}
|
||||
stats = get_last_statistics(hass, 0, "sensor.test1")
|
||||
stats = get_last_statistics(hass, 0, "sensor.test1", True)
|
||||
assert stats == {}
|
||||
|
||||
recorder.do_adhoc_statistics(period="hourly", start=zero)
|
||||
|
@ -82,19 +82,19 @@ def test_compile_hourly_statistics(hass_recorder):
|
|||
assert stats == {}
|
||||
|
||||
# Test get_last_statistics
|
||||
stats = get_last_statistics(hass, 0, "sensor.test1")
|
||||
stats = get_last_statistics(hass, 0, "sensor.test1", True)
|
||||
assert stats == {}
|
||||
|
||||
stats = get_last_statistics(hass, 1, "sensor.test1")
|
||||
stats = get_last_statistics(hass, 1, "sensor.test1", True)
|
||||
assert stats == {"sensor.test1": [{**expected_2, "statistic_id": "sensor.test1"}]}
|
||||
|
||||
stats = get_last_statistics(hass, 2, "sensor.test1")
|
||||
stats = get_last_statistics(hass, 2, "sensor.test1", True)
|
||||
assert stats == {"sensor.test1": expected_stats1[::-1]}
|
||||
|
||||
stats = get_last_statistics(hass, 3, "sensor.test1")
|
||||
stats = get_last_statistics(hass, 3, "sensor.test1", True)
|
||||
assert stats == {"sensor.test1": expected_stats1[::-1]}
|
||||
|
||||
stats = get_last_statistics(hass, 1, "sensor.test3")
|
||||
stats = get_last_statistics(hass, 1, "sensor.test3", True)
|
||||
assert stats == {}
|
||||
|
||||
|
||||
|
@ -219,7 +219,7 @@ def test_rename_entity(hass_recorder):
|
|||
for kwargs in ({}, {"statistic_ids": ["sensor.test1"]}):
|
||||
stats = statistics_during_period(hass, zero, **kwargs)
|
||||
assert stats == {}
|
||||
stats = get_last_statistics(hass, 0, "sensor.test1")
|
||||
stats = get_last_statistics(hass, 0, "sensor.test1", True)
|
||||
assert stats == {}
|
||||
|
||||
recorder.do_adhoc_statistics(period="hourly", start=zero)
|
||||
|
|
|
@ -18,6 +18,7 @@ from homeassistant.components.recorder.statistics import (
|
|||
from homeassistant.const import STATE_UNAVAILABLE
|
||||
from homeassistant.setup import setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.util.unit_system import IMPERIAL_SYSTEM, METRIC_SYSTEM
|
||||
|
||||
from tests.components.recorder.common import wait_recording_done
|
||||
|
||||
|
@ -194,22 +195,29 @@ def test_compile_hourly_statistics_unsupported(hass_recorder, caplog, attributes
|
|||
|
||||
@pytest.mark.parametrize("state_class", ["measurement", "total"])
|
||||
@pytest.mark.parametrize(
|
||||
"device_class,unit,native_unit,factor",
|
||||
"units,device_class,unit,display_unit,factor",
|
||||
[
|
||||
("energy", "kWh", "kWh", 1),
|
||||
("energy", "Wh", "kWh", 1 / 1000),
|
||||
("monetary", "EUR", "EUR", 1),
|
||||
("monetary", "SEK", "SEK", 1),
|
||||
("gas", "m³", "m³", 1),
|
||||
("gas", "ft³", "m³", 0.0283168466),
|
||||
(IMPERIAL_SYSTEM, "energy", "kWh", "kWh", 1),
|
||||
(IMPERIAL_SYSTEM, "energy", "Wh", "kWh", 1 / 1000),
|
||||
(IMPERIAL_SYSTEM, "monetary", "EUR", "EUR", 1),
|
||||
(IMPERIAL_SYSTEM, "monetary", "SEK", "SEK", 1),
|
||||
(IMPERIAL_SYSTEM, "gas", "m³", "ft³", 35.314666711),
|
||||
(IMPERIAL_SYSTEM, "gas", "ft³", "ft³", 1),
|
||||
(METRIC_SYSTEM, "energy", "kWh", "kWh", 1),
|
||||
(METRIC_SYSTEM, "energy", "Wh", "kWh", 1 / 1000),
|
||||
(METRIC_SYSTEM, "monetary", "EUR", "EUR", 1),
|
||||
(METRIC_SYSTEM, "monetary", "SEK", "SEK", 1),
|
||||
(METRIC_SYSTEM, "gas", "m³", "m³", 1),
|
||||
(METRIC_SYSTEM, "gas", "ft³", "m³", 0.0283168466),
|
||||
],
|
||||
)
|
||||
def test_compile_hourly_sum_statistics_amount(
|
||||
hass_recorder, caplog, state_class, device_class, unit, native_unit, factor
|
||||
hass_recorder, caplog, units, state_class, device_class, unit, display_unit, factor
|
||||
):
|
||||
"""Test compiling hourly statistics."""
|
||||
zero = dt_util.utcnow()
|
||||
hass = hass_recorder()
|
||||
hass.config.units = units
|
||||
recorder = hass.data[DATA_INSTANCE]
|
||||
setup_component(hass, "sensor", {})
|
||||
attributes = {
|
||||
|
@ -236,7 +244,7 @@ def test_compile_hourly_sum_statistics_amount(
|
|||
wait_recording_done(hass)
|
||||
statistic_ids = list_statistic_ids(hass)
|
||||
assert statistic_ids == [
|
||||
{"statistic_id": "sensor.test1", "unit_of_measurement": native_unit}
|
||||
{"statistic_id": "sensor.test1", "unit_of_measurement": display_unit}
|
||||
]
|
||||
stats = statistics_during_period(hass, zero)
|
||||
assert stats == {
|
||||
|
|
Loading…
Add table
Reference in a new issue