Sensor: Handle local->UTC conversion and reject timezoneless timestamps (#59971)
This commit is contained in:
parent
7e1b00c491
commit
69b7495324
5 changed files with 115 additions and 33 deletions
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
from collections.abc import Mapping
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime, timedelta
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
import inspect
|
||||
import logging
|
||||
from typing import Any, Final, cast, final
|
||||
|
@ -308,15 +308,31 @@ class SensorEntity(Entity):
|
|||
f"while it has device class '{device_class}'"
|
||||
) from error
|
||||
|
||||
if value.tzinfo is not None and value.tzinfo != timezone.utc:
|
||||
value = value.astimezone(timezone.utc)
|
||||
|
||||
# Convert the date object to a standardized state string.
|
||||
if device_class == DEVICE_CLASS_DATE:
|
||||
return value.date().isoformat()
|
||||
|
||||
return value.isoformat(timespec="seconds")
|
||||
|
||||
# Received a datetime
|
||||
if value is not None and device_class == DEVICE_CLASS_TIMESTAMP:
|
||||
try:
|
||||
return value.isoformat(timespec="seconds") # type: ignore
|
||||
# We cast the value, to avoid using isinstance, but satisfy
|
||||
# typechecking. The errors are guarded in this try.
|
||||
value = cast(datetime, value)
|
||||
if value.tzinfo is None:
|
||||
raise ValueError(
|
||||
f"Invalid datetime: {self.entity_id} provides state '{value}', "
|
||||
"which is missing timezone information"
|
||||
)
|
||||
|
||||
if value.tzinfo != timezone.utc:
|
||||
value = value.astimezone(timezone.utc)
|
||||
|
||||
return value.isoformat(timespec="seconds")
|
||||
except (AttributeError, TypeError) as err:
|
||||
raise ValueError(
|
||||
f"Invalid datetime: {self.entity_id} has a timestamp device class"
|
||||
|
|
|
@ -287,18 +287,13 @@ async def test_update_sensor_no_state(hass, create_registrations, webhook_client
|
|||
(DEVICE_CLASS_DATE, "2021-11-18", "2021-11-18"),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-11-18T20:25:00",
|
||||
"2021-11-18T20:25:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-11-18 20:25:00",
|
||||
"2021-11-18T20:25:00",
|
||||
"2021-11-18T20:25:00+00:00",
|
||||
"2021-11-18T20:25:00+00:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-11-18 20:25:00+01:00",
|
||||
"2021-11-18T20:25:00+01:00",
|
||||
"2021-11-18T19:25:00+00:00",
|
||||
),
|
||||
],
|
||||
)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""The tests for Octoptint binary sensor module."""
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
@ -22,7 +22,8 @@ async def test_sensors(hass):
|
|||
"state": "Printing",
|
||||
}
|
||||
with patch(
|
||||
"homeassistant.util.dt.utcnow", return_value=datetime(2020, 2, 20, 9, 10, 0)
|
||||
"homeassistant.util.dt.utcnow",
|
||||
return_value=datetime(2020, 2, 20, 9, 10, 0, tzinfo=timezone.utc),
|
||||
):
|
||||
await init_integration(hass, "sensor", printer=printer, job=job)
|
||||
|
||||
|
@ -65,14 +66,14 @@ async def test_sensors(hass):
|
|||
|
||||
state = hass.states.get("sensor.octoprint_start_time")
|
||||
assert state is not None
|
||||
assert state.state == "2020-02-20T09:00:00"
|
||||
assert state.state == "2020-02-20T09:00:00+00:00"
|
||||
assert state.name == "OctoPrint Start Time"
|
||||
entry = entity_registry.async_get("sensor.octoprint_start_time")
|
||||
assert entry.unique_id == "Start Time-uuid"
|
||||
|
||||
state = hass.states.get("sensor.octoprint_estimated_finish_time")
|
||||
assert state is not None
|
||||
assert state.state == "2020-02-20T10:50:00"
|
||||
assert state.state == "2020-02-20T10:50:00+00:00"
|
||||
assert state.name == "OctoPrint Estimated Finish Time"
|
||||
entry = entity_registry.async_get("sensor.octoprint_estimated_finish_time")
|
||||
assert entry.unique_id == "Estimated Finish Time-uuid"
|
||||
|
|
|
@ -210,44 +210,44 @@ class TestPicnicSensor(unittest.IsolatedAsyncioTestCase):
|
|||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_selected_slot_start",
|
||||
"2021-03-03T14:45:00+01:00",
|
||||
"2021-03-03T13:45:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_selected_slot_end",
|
||||
"2021-03-03T15:45:00+01:00",
|
||||
"2021-03-03T14:45:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_selected_slot_max_order_time",
|
||||
"2021-03-02T22:00:00+01:00",
|
||||
"2021-03-02T21:00:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor("sensor.picnic_selected_slot_min_order_value", "35.0")
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_slot_start",
|
||||
"2021-02-26T20:15:00+01:00",
|
||||
"2021-02-26T19:15:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_slot_end",
|
||||
"2021-02-26T21:15:00+01:00",
|
||||
"2021-02-26T20:15:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor("sensor.picnic_last_order_status", "COMPLETED")
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_eta_start",
|
||||
"2021-02-26T20:54:00+01:00",
|
||||
"2021-02-26T19:54:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_eta_end",
|
||||
"2021-02-26T21:14:00+01:00",
|
||||
"2021-02-26T20:14:00+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_delivery_time",
|
||||
"2021-02-26T20:54:05+01:00",
|
||||
"2021-02-26T19:54:05+00:00",
|
||||
cls=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
self._assert_sensor(
|
||||
|
@ -305,10 +305,10 @@ class TestPicnicSensor(unittest.IsolatedAsyncioTestCase):
|
|||
# Assert delivery time is not available, but eta is
|
||||
self._assert_sensor("sensor.picnic_last_order_delivery_time", STATE_UNAVAILABLE)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_eta_start", "2021-02-26T20:54:00+01:00"
|
||||
"sensor.picnic_last_order_eta_start", "2021-02-26T19:54:00+00:00"
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_eta_end", "2021-02-26T21:14:00+01:00"
|
||||
"sensor.picnic_last_order_eta_end", "2021-02-26T20:14:00+00:00"
|
||||
)
|
||||
|
||||
async def test_sensors_use_detailed_eta_if_available(self):
|
||||
|
@ -322,8 +322,8 @@ class TestPicnicSensor(unittest.IsolatedAsyncioTestCase):
|
|||
self.picnic_mock().get_deliveries.return_value = [delivery_response]
|
||||
self.picnic_mock().get_delivery_position.return_value = {
|
||||
"eta_window": {
|
||||
"start": "2021-03-05T11:19:20.452+01:00",
|
||||
"end": "2021-03-05T11:39:20.452+01:00",
|
||||
"start": "2021-03-05T10:19:20.452+00:00",
|
||||
"end": "2021-03-05T10:39:20.452+00:00",
|
||||
}
|
||||
}
|
||||
await self._coordinator.async_refresh()
|
||||
|
@ -333,10 +333,10 @@ class TestPicnicSensor(unittest.IsolatedAsyncioTestCase):
|
|||
delivery_response["delivery_id"]
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_eta_start", "2021-03-05T11:19:20+01:00"
|
||||
"sensor.picnic_last_order_eta_start", "2021-03-05T10:19:20+00:00"
|
||||
)
|
||||
self._assert_sensor(
|
||||
"sensor.picnic_last_order_eta_end", "2021-03-05T11:39:20+01:00"
|
||||
"sensor.picnic_last_order_eta_end", "2021-03-05T10:39:20+00:00"
|
||||
)
|
||||
|
||||
async def test_sensors_no_data(self):
|
||||
|
|
|
@ -117,6 +117,9 @@ async def test_deprecated_unit_of_measurement(hass, caplog, enable_custom_integr
|
|||
async def test_datetime_conversion(hass, caplog, enable_custom_integrations):
|
||||
"""Test conversion of datetime."""
|
||||
test_timestamp = datetime(2017, 12, 19, 18, 29, 42, tzinfo=timezone.utc)
|
||||
test_local_timestamp = test_timestamp.astimezone(
|
||||
dt_util.get_time_zone("Europe/Amsterdam")
|
||||
)
|
||||
test_date = date(2017, 12, 19)
|
||||
platform = getattr(hass.components, "test.sensor")
|
||||
platform.init(empty=True)
|
||||
|
@ -132,6 +135,11 @@ async def test_datetime_conversion(hass, caplog, enable_custom_integrations):
|
|||
platform.ENTITIES["3"] = platform.MockSensor(
|
||||
name="Test", native_value=None, device_class=DEVICE_CLASS_DATE
|
||||
)
|
||||
platform.ENTITIES["4"] = platform.MockSensor(
|
||||
name="Test",
|
||||
native_value=test_local_timestamp,
|
||||
device_class=DEVICE_CLASS_TIMESTAMP,
|
||||
)
|
||||
|
||||
assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}})
|
||||
await hass.async_block_till_done()
|
||||
|
@ -148,16 +156,58 @@ async def test_datetime_conversion(hass, caplog, enable_custom_integrations):
|
|||
state = hass.states.get(platform.ENTITIES["3"].entity_id)
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
state = hass.states.get(platform.ENTITIES["4"].entity_id)
|
||||
assert state.state == test_timestamp.isoformat()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"device_class,native_value",
|
||||
"device_class,native_value,state_value",
|
||||
[
|
||||
(DEVICE_CLASS_DATE, "2021-11-09"),
|
||||
(DEVICE_CLASS_TIMESTAMP, "2021-01-09T12:00:00+00:00"),
|
||||
(DEVICE_CLASS_DATE, "2021-11-09", "2021-11-09"),
|
||||
(
|
||||
DEVICE_CLASS_DATE,
|
||||
"2021-01-09T12:00:00+00:00",
|
||||
"2021-01-09",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_DATE,
|
||||
"2021-01-09T00:00:00+01:00",
|
||||
"2021-01-08",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-01-09T12:00:00+00:00",
|
||||
"2021-01-09T12:00:00+00:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-01-09 12:00:00+00:00",
|
||||
"2021-01-09T12:00:00+00:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-01-09T12:00:00+04:00",
|
||||
"2021-01-09T08:00:00+00:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-01-09 12:00:00+01:00",
|
||||
"2021-01-09T11:00:00+00:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-01-09 12:00:00",
|
||||
"2021-01-09T12:00:00",
|
||||
),
|
||||
(
|
||||
DEVICE_CLASS_TIMESTAMP,
|
||||
"2021-01-09T12:00:00",
|
||||
"2021-01-09T12:00:00",
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_deprecated_datetime_str(
|
||||
hass, caplog, enable_custom_integrations, device_class, native_value
|
||||
hass, caplog, enable_custom_integrations, device_class, native_value, state_value
|
||||
):
|
||||
"""Test warning on deprecated str for a date(time) value."""
|
||||
platform = getattr(hass.components, "test.sensor")
|
||||
|
@ -171,9 +221,29 @@ async def test_deprecated_datetime_str(
|
|||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
assert state.state == native_value
|
||||
assert state.state == state_value
|
||||
assert (
|
||||
"is providing a string for its state, while the device class is "
|
||||
f"'{device_class}', this is not valid and will be unsupported "
|
||||
"from Home Assistant 2022.2."
|
||||
) in caplog.text
|
||||
|
||||
|
||||
async def test_reject_timezoneless_datetime_str(
|
||||
hass, caplog, enable_custom_integrations
|
||||
):
|
||||
"""Test rejection of timezone-less datetime objects as timestamp."""
|
||||
test_timestamp = datetime(2017, 12, 19, 18, 29, 42, tzinfo=None)
|
||||
platform = getattr(hass.components, "test.sensor")
|
||||
platform.init(empty=True)
|
||||
platform.ENTITIES["0"] = platform.MockSensor(
|
||||
name="Test", native_value=test_timestamp, device_class=DEVICE_CLASS_TIMESTAMP
|
||||
)
|
||||
|
||||
assert await async_setup_component(hass, "sensor", {"sensor": {"platform": "test"}})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
"Invalid datetime: sensor.test provides state '2017-12-19 18:29:42', "
|
||||
"which is missing timezone information"
|
||||
) in caplog.text
|
||||
|
|
Loading…
Add table
Reference in a new issue