Switch statistics config to require either/both 'max_age' and 'sampling_size' ()

* Remove default characteristic

* Remove default sampling_size

* Fix typo

* Fix typo
This commit is contained in:
Thomas Dietrich 2022-11-17 10:31:06 +01:00 committed by GitHub
parent 1b80c66195
commit ad8b882cb6
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 65 additions and 79 deletions
homeassistant/components/statistics
tests/components/statistics

View file

@ -34,11 +34,10 @@ from homeassistant.core import (
Event, Event,
HomeAssistant, HomeAssistant,
State, State,
async_get_hass,
callback, callback,
split_entity_id, split_entity_id,
) )
from homeassistant.helpers import config_validation as cv, issue_registry from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import ( from homeassistant.helpers.event import (
async_track_point_in_utc_time, async_track_point_in_utc_time,
@ -179,7 +178,7 @@ CONF_MAX_AGE = "max_age"
CONF_PRECISION = "precision" CONF_PRECISION = "precision"
CONF_PERCENTILE = "percentile" CONF_PERCENTILE = "percentile"
DEFAULT_NAME = "Stats" DEFAULT_NAME = "Statistical characteristic"
DEFAULT_PRECISION = 2 DEFAULT_PRECISION = 2
ICON = "mdi:calculator" ICON = "mdi:calculator"
@ -187,24 +186,6 @@ ICON = "mdi:calculator"
def valid_state_characteristic_configuration(config: dict[str, Any]) -> dict[str, Any]: def valid_state_characteristic_configuration(config: dict[str, Any]) -> dict[str, Any]:
"""Validate that the characteristic selected is valid for the source sensor type, throw if it isn't.""" """Validate that the characteristic selected is valid for the source sensor type, throw if it isn't."""
is_binary = split_entity_id(config[CONF_ENTITY_ID])[0] == BINARY_SENSOR_DOMAIN is_binary = split_entity_id(config[CONF_ENTITY_ID])[0] == BINARY_SENSOR_DOMAIN
if config.get(CONF_STATE_CHARACTERISTIC) is None:
config[CONF_STATE_CHARACTERISTIC] = STAT_COUNT if is_binary else STAT_MEAN
issue_registry.async_create_issue(
hass=async_get_hass(),
domain=DOMAIN,
issue_id=f"{config[CONF_ENTITY_ID]}_default_characteristic",
breaks_in_ha_version="2022.12.0",
is_fixable=False,
severity=issue_registry.IssueSeverity.WARNING,
translation_key="deprecation_warning_characteristic",
translation_placeholders={
"entity": config[CONF_NAME],
"characteristic": config[CONF_STATE_CHARACTERISTIC],
},
learn_more_url="https://github.com/home-assistant/core/pull/60402",
)
characteristic = cast(str, config[CONF_STATE_CHARACTERISTIC]) characteristic = cast(str, config[CONF_STATE_CHARACTERISTIC])
if (is_binary and characteristic not in STATS_BINARY_SUPPORT) or ( if (is_binary and characteristic not in STATS_BINARY_SUPPORT) or (
not is_binary and characteristic not in STATS_NUMERIC_SUPPORT not is_binary and characteristic not in STATS_NUMERIC_SUPPORT
@ -218,20 +199,14 @@ def valid_state_characteristic_configuration(config: dict[str, Any]) -> dict[str
def valid_boundary_configuration(config: dict[str, Any]) -> dict[str, Any]: def valid_boundary_configuration(config: dict[str, Any]) -> dict[str, Any]:
"""Validate that sampling_size, max_age, or both are provided.""" """Validate that max_age, sampling_size, or both are provided."""
if config.get(CONF_SAMPLES_MAX_BUFFER_SIZE) is None: if (
config[CONF_SAMPLES_MAX_BUFFER_SIZE] = 20 config.get(CONF_SAMPLES_MAX_BUFFER_SIZE) is None
issue_registry.async_create_issue( and config.get(CONF_MAX_AGE) is None
hass=async_get_hass(), ):
domain=DOMAIN, raise vol.RequiredFieldInvalid(
issue_id=f"{config[CONF_ENTITY_ID]}_invalid_boundary_config", "The sensor configuration must provide 'max_age' and/or 'sampling_size'"
breaks_in_ha_version="2022.12.0",
is_fixable=False,
severity=issue_registry.IssueSeverity.WARNING,
translation_key="deprecation_warning_size",
translation_placeholders={"entity": config[CONF_NAME]},
learn_more_url="https://github.com/home-assistant/core/pull/69700",
) )
return config return config
@ -241,8 +216,10 @@ _PLATFORM_SCHEMA_BASE = PLATFORM_SCHEMA.extend(
vol.Required(CONF_ENTITY_ID): cv.entity_id, vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIQUE_ID): cv.string, vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Optional(CONF_STATE_CHARACTERISTIC): cv.string, vol.Required(CONF_STATE_CHARACTERISTIC): cv.string,
vol.Optional(CONF_SAMPLES_MAX_BUFFER_SIZE): vol.Coerce(int), vol.Optional(CONF_SAMPLES_MAX_BUFFER_SIZE): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_MAX_AGE): cv.time_period, vol.Optional(CONF_MAX_AGE): cv.time_period,
vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): vol.Coerce(int), vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): vol.Coerce(int),
vol.Optional(CONF_PERCENTILE, default=50): vol.All( vol.Optional(CONF_PERCENTILE, default=50): vol.All(
@ -274,7 +251,7 @@ async def async_setup_platform(
name=config[CONF_NAME], name=config[CONF_NAME],
unique_id=config.get(CONF_UNIQUE_ID), unique_id=config.get(CONF_UNIQUE_ID),
state_characteristic=config[CONF_STATE_CHARACTERISTIC], state_characteristic=config[CONF_STATE_CHARACTERISTIC],
samples_max_buffer_size=config[CONF_SAMPLES_MAX_BUFFER_SIZE], samples_max_buffer_size=config.get(CONF_SAMPLES_MAX_BUFFER_SIZE),
samples_max_age=config.get(CONF_MAX_AGE), samples_max_age=config.get(CONF_MAX_AGE),
precision=config[CONF_PRECISION], precision=config[CONF_PRECISION],
percentile=config[CONF_PERCENTILE], percentile=config[CONF_PERCENTILE],
@ -293,7 +270,7 @@ class StatisticsSensor(SensorEntity):
name: str, name: str,
unique_id: str | None, unique_id: str | None,
state_characteristic: str, state_characteristic: str,
samples_max_buffer_size: int, samples_max_buffer_size: int | None,
samples_max_age: timedelta | None, samples_max_age: timedelta | None,
precision: int, precision: int,
percentile: int, percentile: int,
@ -308,20 +285,17 @@ class StatisticsSensor(SensorEntity):
split_entity_id(self._source_entity_id)[0] == BINARY_SENSOR_DOMAIN split_entity_id(self._source_entity_id)[0] == BINARY_SENSOR_DOMAIN
) )
self._state_characteristic: str = state_characteristic self._state_characteristic: str = state_characteristic
self._samples_max_buffer_size: int = samples_max_buffer_size self._samples_max_buffer_size: int | None = samples_max_buffer_size
self._samples_max_age: timedelta | None = samples_max_age self._samples_max_age: timedelta | None = samples_max_age
self._precision: int = precision self._precision: int = precision
self._percentile: int = percentile self._percentile: int = percentile
self._value: StateType | datetime = None self._value: StateType | datetime = None
self._unit_of_measurement: str | None = None self._unit_of_measurement: str | None = None
self._available: bool = False self._available: bool = False
self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size) self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size)
self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size) self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size)
self.attributes: dict[str, StateType] = { self.attributes: dict[str, StateType] = {}
STAT_AGE_COVERAGE_RATIO: None,
STAT_BUFFER_USAGE_RATIO: None,
STAT_SOURCE_VALUE_VALID: None,
}
self._state_characteristic_fn: Callable[[], StateType | datetime] self._state_characteristic_fn: Callable[[], StateType | datetime]
if self.is_binary: if self.is_binary:
@ -496,11 +470,8 @@ class StatisticsSensor(SensorEntity):
self._update_value() self._update_value()
# If max_age is set, ensure to update again after the defined interval. # If max_age is set, ensure to update again after the defined interval.
next_to_purge_timestamp = self._next_to_purge_timestamp() if timestamp := self._next_to_purge_timestamp():
if next_to_purge_timestamp: _LOGGER.debug("%s: scheduling update at %s", self.entity_id, timestamp)
_LOGGER.debug(
"%s: scheduling update at %s", self.entity_id, next_to_purge_timestamp
)
if self._update_listener: if self._update_listener:
self._update_listener() self._update_listener()
self._update_listener = None self._update_listener = None
@ -513,7 +484,7 @@ class StatisticsSensor(SensorEntity):
self._update_listener = None self._update_listener = None
self._update_listener = async_track_point_in_utc_time( self._update_listener = async_track_point_in_utc_time(
self.hass, _scheduled_update, next_to_purge_timestamp self.hass, _scheduled_update, timestamp
) )
def _fetch_states_from_database(self) -> list[State]: def _fetch_states_from_database(self) -> list[State]:
@ -563,18 +534,20 @@ class StatisticsSensor(SensorEntity):
def _update_attributes(self) -> None: def _update_attributes(self) -> None:
"""Calculate and update the various attributes.""" """Calculate and update the various attributes."""
self.attributes[STAT_BUFFER_USAGE_RATIO] = round( if self._samples_max_buffer_size is not None:
len(self.states) / self._samples_max_buffer_size, 2 self.attributes[STAT_BUFFER_USAGE_RATIO] = round(
) len(self.states) / self._samples_max_buffer_size, 2
if len(self.states) >= 1 and self._samples_max_age is not None:
self.attributes[STAT_AGE_COVERAGE_RATIO] = round(
(self.ages[-1] - self.ages[0]).total_seconds()
/ self._samples_max_age.total_seconds(),
2,
) )
else:
self.attributes[STAT_AGE_COVERAGE_RATIO] = None if self._samples_max_age is not None:
if len(self.states) >= 1:
self.attributes[STAT_AGE_COVERAGE_RATIO] = round(
(self.ages[-1] - self.ages[0]).total_seconds()
/ self._samples_max_age.total_seconds(),
2,
)
else:
self.attributes[STAT_AGE_COVERAGE_RATIO] = None
def _update_value(self) -> None: def _update_value(self) -> None:
"""Front to call the right statistical characteristics functions. """Front to call the right statistical characteristics functions.

View file

@ -1,12 +0,0 @@
{
"issues": {
"deprecation_warning_characteristic": {
"description": "The configuration parameter `state_characteristic` of the statistics integration will become mandatory.\n\nPlease add `state_characteristic: {characteristic}` to the configuration of sensor `{entity}` to keep the current behavior.\n\nRead the documentation of the statistics integration for further details: https://www.home-assistant.io/integrations/statistics/",
"title": "Mandatory 'state_characteristic' assumed for a Statistics entity"
},
"deprecation_warning_size": {
"description": "The configuration parameter `sampling_size` of the statistics integration defaulted to the value 20 so far, which will change.\n\nPlease check the configuration for sensor `{entity}` and add suited boundaries, e.g., `sampling_size: 20` to keep the current behavior. The configuration of the statistics integration will become more flexible with version 2022.12.0 and accept either `sampling_size` or `max_age`, or both settings. The request above prepares your configuration for this otherwise breaking change.\n\nRead the documentation of the statistics integration for further details: https://www.home-assistant.io/integrations/statistics/",
"title": "Implicit 'sampling_size' assumed for a Statistics entity"
}
}
}

View file

@ -3,3 +3,4 @@ sensor:
entity_id: sensor.cpu entity_id: sensor.cpu
name: cputest name: cputest
state_characteristic: mean state_characteristic: mean
sampling_size: 20

View file

@ -45,8 +45,10 @@ async def test_unique_id(hass: HomeAssistant):
{ {
"platform": "statistics", "platform": "statistics",
"name": "test", "name": "test",
"entity_id": "sensor.test_monitored",
"unique_id": "uniqueid_sensor_test", "unique_id": "uniqueid_sensor_test",
"entity_id": "sensor.test_monitored",
"state_characteristic": "mean",
"sampling_size": 20,
}, },
] ]
}, },
@ -71,6 +73,8 @@ async def test_sensor_defaults_numeric(hass: HomeAssistant):
"platform": "statistics", "platform": "statistics",
"name": "test", "name": "test",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "mean",
"sampling_size": 20,
}, },
] ]
}, },
@ -162,6 +166,8 @@ async def test_sensor_defaults_binary(hass: HomeAssistant):
"platform": "statistics", "platform": "statistics",
"name": "test", "name": "test",
"entity_id": "binary_sensor.test_monitored", "entity_id": "binary_sensor.test_monitored",
"state_characteristic": "count",
"sampling_size": 20,
}, },
] ]
}, },
@ -199,12 +205,14 @@ async def test_sensor_source_with_force_update(hass: HomeAssistant):
"name": "test_normal", "name": "test_normal",
"entity_id": "sensor.test_monitored_normal", "entity_id": "sensor.test_monitored_normal",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_force", "name": "test_force",
"entity_id": "sensor.test_monitored_force", "entity_id": "sensor.test_monitored_force",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
}, },
] ]
}, },
@ -234,8 +242,8 @@ async def test_sensor_source_with_force_update(hass: HomeAssistant):
assert state_force.attributes.get("buffer_usage_ratio") == round(9 / 20, 2) assert state_force.attributes.get("buffer_usage_ratio") == round(9 / 20, 2)
async def test_sampling_size_non_default(hass: HomeAssistant): async def test_sampling_size_reduced(hass: HomeAssistant):
"""Test rotation.""" """Test limited buffer size."""
assert await async_setup_component( assert await async_setup_component(
hass, hass,
"sensor", "sensor",
@ -287,7 +295,7 @@ async def test_sampling_size_1(hass: HomeAssistant):
) )
await hass.async_block_till_done() await hass.async_block_till_done()
for value in VALUES_NUMERIC[-3:]: # just the last 3 will do for value in VALUES_NUMERIC:
hass.states.async_set( hass.states.async_set(
"sensor.test_monitored", "sensor.test_monitored",
str(value), str(value),
@ -303,7 +311,7 @@ async def test_sampling_size_1(hass: HomeAssistant):
async def test_age_limit_expiry(hass: HomeAssistant): async def test_age_limit_expiry(hass: HomeAssistant):
"""Test that values are removed after certain age.""" """Test that values are removed with given max age."""
now = dt_util.utcnow() now = dt_util.utcnow()
mock_data = { mock_data = {
"return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC) "return_time": datetime(now.year + 1, 8, 2, 12, 23, tzinfo=dt_util.UTC)
@ -325,6 +333,7 @@ async def test_age_limit_expiry(hass: HomeAssistant):
"name": "test", "name": "test",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
"max_age": {"minutes": 4}, "max_age": {"minutes": 4},
}, },
] ]
@ -402,6 +411,7 @@ async def test_precision(hass: HomeAssistant):
"name": "test_precision_0", "name": "test_precision_0",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
"precision": 0, "precision": 0,
}, },
{ {
@ -409,6 +419,7 @@ async def test_precision(hass: HomeAssistant):
"name": "test_precision_3", "name": "test_precision_3",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
"precision": 3, "precision": 3,
}, },
] ]
@ -500,6 +511,7 @@ async def test_device_class(hass: HomeAssistant):
"name": "test_source_class", "name": "test_source_class",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
}, },
{ {
# Device class is set to None for characteristics with special meaning # Device class is set to None for characteristics with special meaning
@ -507,6 +519,7 @@ async def test_device_class(hass: HomeAssistant):
"name": "test_none", "name": "test_none",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "count", "state_characteristic": "count",
"sampling_size": 20,
}, },
{ {
# Device class is set to timestamp for datetime characteristics # Device class is set to timestamp for datetime characteristics
@ -514,6 +527,7 @@ async def test_device_class(hass: HomeAssistant):
"name": "test_timestamp", "name": "test_timestamp",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "datetime_oldest", "state_characteristic": "datetime_oldest",
"sampling_size": 20,
}, },
] ]
}, },
@ -554,12 +568,14 @@ async def test_state_class(hass: HomeAssistant):
"name": "test_normal", "name": "test_normal",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "count", "state_characteristic": "count",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_nan", "name": "test_nan",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "datetime_oldest", "state_characteristic": "datetime_oldest",
"sampling_size": 20,
}, },
] ]
}, },
@ -594,29 +610,35 @@ async def test_unitless_source_sensor(hass: HomeAssistant):
"name": "test_unitless_1", "name": "test_unitless_1",
"entity_id": "sensor.test_monitored_unitless", "entity_id": "sensor.test_monitored_unitless",
"state_characteristic": "count", "state_characteristic": "count",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_unitless_2", "name": "test_unitless_2",
"entity_id": "sensor.test_monitored_unitless", "entity_id": "sensor.test_monitored_unitless",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_unitless_3", "name": "test_unitless_3",
"entity_id": "sensor.test_monitored_unitless", "entity_id": "sensor.test_monitored_unitless",
"state_characteristic": "change_second", "state_characteristic": "change_second",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_unitless_4", "name": "test_unitless_4",
"entity_id": "binary_sensor.test_monitored_unitless", "entity_id": "binary_sensor.test_monitored_unitless",
"state_characteristic": "count",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_unitless_5", "name": "test_unitless_5",
"entity_id": "binary_sensor.test_monitored_unitless", "entity_id": "binary_sensor.test_monitored_unitless",
"state_characteristic": "mean", "state_characteristic": "mean",
"sampling_size": 20,
}, },
] ]
}, },
@ -1087,12 +1109,14 @@ async def test_invalid_state_characteristic(hass: HomeAssistant):
"name": "test_numeric", "name": "test_numeric",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"state_characteristic": "invalid", "state_characteristic": "invalid",
"sampling_size": 20,
}, },
{ {
"platform": "statistics", "platform": "statistics",
"name": "test_binary", "name": "test_binary",
"entity_id": "binary_sensor.test_monitored", "entity_id": "binary_sensor.test_monitored",
"state_characteristic": "variance", "state_characteristic": "variance",
"sampling_size": 20,
}, },
] ]
}, },
@ -1192,8 +1216,8 @@ async def test_initialize_from_database_with_maxage(recorder_mock, hass: HomeAss
"platform": "statistics", "platform": "statistics",
"name": "test", "name": "test",
"entity_id": "sensor.test_monitored", "entity_id": "sensor.test_monitored",
"sampling_size": 100,
"state_characteristic": "datetime_newest", "state_characteristic": "datetime_newest",
"sampling_size": 100,
"max_age": {"hours": 3}, "max_age": {"hours": 3},
}, },
] ]