Add schedule helper (#76566)
Co-authored-by: Paulus Schoutsen <balloob@gmail.com>
This commit is contained in:
parent
ebbff7b60e
commit
f0827a20c3
15 changed files with 859 additions and 0 deletions
|
@ -213,6 +213,7 @@ homeassistant.components.rpi_power.*
|
||||||
homeassistant.components.rtsp_to_webrtc.*
|
homeassistant.components.rtsp_to_webrtc.*
|
||||||
homeassistant.components.samsungtv.*
|
homeassistant.components.samsungtv.*
|
||||||
homeassistant.components.scene.*
|
homeassistant.components.scene.*
|
||||||
|
homeassistant.components.schedule.*
|
||||||
homeassistant.components.select.*
|
homeassistant.components.select.*
|
||||||
homeassistant.components.sensibo.*
|
homeassistant.components.sensibo.*
|
||||||
homeassistant.components.sensor.*
|
homeassistant.components.sensor.*
|
||||||
|
|
|
@ -921,6 +921,8 @@ build.json @home-assistant/supervisor
|
||||||
/tests/components/samsungtv/ @chemelli74 @epenet
|
/tests/components/samsungtv/ @chemelli74 @epenet
|
||||||
/homeassistant/components/scene/ @home-assistant/core
|
/homeassistant/components/scene/ @home-assistant/core
|
||||||
/tests/components/scene/ @home-assistant/core
|
/tests/components/scene/ @home-assistant/core
|
||||||
|
/homeassistant/components/schedule/ @home-assistant/core
|
||||||
|
/tests/components/schedule/ @home-assistant/core
|
||||||
/homeassistant/components/schluter/ @prairieapps
|
/homeassistant/components/schluter/ @prairieapps
|
||||||
/homeassistant/components/scrape/ @fabaff
|
/homeassistant/components/scrape/ @fabaff
|
||||||
/tests/components/scrape/ @fabaff
|
/tests/components/scrape/ @fabaff
|
||||||
|
|
|
@ -27,6 +27,7 @@
|
||||||
"network",
|
"network",
|
||||||
"person",
|
"person",
|
||||||
"scene",
|
"scene",
|
||||||
|
"schedule",
|
||||||
"script",
|
"script",
|
||||||
"ssdp",
|
"ssdp",
|
||||||
"sun",
|
"sun",
|
||||||
|
|
314
homeassistant/components/schedule/__init__.py
Normal file
314
homeassistant/components/schedule/__init__.py
Normal file
|
@ -0,0 +1,314 @@
|
||||||
|
"""Support for schedules in Home Assistant."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import itertools
|
||||||
|
import logging
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_EDITABLE,
|
||||||
|
CONF_ICON,
|
||||||
|
CONF_ID,
|
||||||
|
CONF_NAME,
|
||||||
|
SERVICE_RELOAD,
|
||||||
|
STATE_OFF,
|
||||||
|
STATE_ON,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
|
from homeassistant.helpers.collection import (
|
||||||
|
IDManager,
|
||||||
|
StorageCollection,
|
||||||
|
StorageCollectionWebsocket,
|
||||||
|
YamlCollection,
|
||||||
|
sync_entity_lifecycle,
|
||||||
|
)
|
||||||
|
import homeassistant.helpers.config_validation as cv
|
||||||
|
from homeassistant.helpers.entity import Entity
|
||||||
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
|
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||||
|
from homeassistant.helpers.integration_platform import (
|
||||||
|
async_process_integration_platform_for_component,
|
||||||
|
)
|
||||||
|
from homeassistant.helpers.service import async_register_admin_service
|
||||||
|
from homeassistant.helpers.storage import Store
|
||||||
|
from homeassistant.helpers.typing import ConfigType
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
|
from .const import (
|
||||||
|
ATTR_NEXT_EVENT,
|
||||||
|
CONF_ALL_DAYS,
|
||||||
|
CONF_FROM,
|
||||||
|
CONF_TO,
|
||||||
|
DOMAIN,
|
||||||
|
LOGGER,
|
||||||
|
WEEKDAY_TO_CONF,
|
||||||
|
)
|
||||||
|
|
||||||
|
STORAGE_VERSION = 1
|
||||||
|
STORAGE_VERSION_MINOR = 1
|
||||||
|
|
||||||
|
|
||||||
|
def valid_schedule(schedule: list[dict[str, str]]) -> list[dict[str, str]]:
|
||||||
|
"""Validate the schedule of time ranges.
|
||||||
|
|
||||||
|
Ensure they have no overlap and the end time is greater than the start time.
|
||||||
|
"""
|
||||||
|
# Emtpty schedule is valid
|
||||||
|
if not schedule:
|
||||||
|
return schedule
|
||||||
|
|
||||||
|
# Sort the schedule by start times
|
||||||
|
schedule = sorted(schedule, key=lambda time_range: time_range[CONF_FROM])
|
||||||
|
|
||||||
|
# Check if the start time of the next event is before the end time of the previous event
|
||||||
|
previous_to = None
|
||||||
|
for time_range in schedule:
|
||||||
|
if time_range[CONF_FROM] >= time_range[CONF_TO]:
|
||||||
|
raise vol.Invalid(
|
||||||
|
f"Invalid time range, from {time_range[CONF_FROM]} is after {time_range[CONF_TO]}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if the from time of the event is after the to time of the previous event
|
||||||
|
if previous_to is not None and previous_to > time_range[CONF_FROM]: # type: ignore[unreachable]
|
||||||
|
raise vol.Invalid("Overlapping times found in schedule")
|
||||||
|
|
||||||
|
previous_to = time_range[CONF_TO]
|
||||||
|
|
||||||
|
return schedule
|
||||||
|
|
||||||
|
|
||||||
|
BASE_SCHEMA = {
|
||||||
|
vol.Required(CONF_NAME): vol.All(str, vol.Length(min=1)),
|
||||||
|
vol.Optional(CONF_ICON): cv.icon,
|
||||||
|
}
|
||||||
|
|
||||||
|
TIME_RANGE_SCHEMA = {
|
||||||
|
vol.Required(CONF_FROM): cv.time,
|
||||||
|
vol.Required(CONF_TO): cv.time,
|
||||||
|
}
|
||||||
|
STORAGE_TIME_RANGE_SCHEMA = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_FROM): vol.All(cv.time, vol.Coerce(str)),
|
||||||
|
vol.Required(CONF_TO): vol.All(cv.time, vol.Coerce(str)),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEDULE_SCHEMA = {
|
||||||
|
vol.Optional(day, default=[]): vol.All(
|
||||||
|
cv.ensure_list, [TIME_RANGE_SCHEMA], valid_schedule
|
||||||
|
)
|
||||||
|
for day in CONF_ALL_DAYS
|
||||||
|
}
|
||||||
|
STORAGE_SCHEDULE_SCHEMA = {
|
||||||
|
vol.Optional(day, default=[]): vol.All(
|
||||||
|
cv.ensure_list, [TIME_RANGE_SCHEMA], valid_schedule, [STORAGE_TIME_RANGE_SCHEMA]
|
||||||
|
)
|
||||||
|
for day in CONF_ALL_DAYS
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
CONFIG_SCHEMA = vol.Schema(
|
||||||
|
{DOMAIN: cv.schema_with_slug_keys(vol.All(BASE_SCHEMA | SCHEDULE_SCHEMA))},
|
||||||
|
extra=vol.ALLOW_EXTRA,
|
||||||
|
)
|
||||||
|
STORAGE_SCHEMA = vol.Schema(
|
||||||
|
{vol.Required(CONF_ID): cv.string} | BASE_SCHEMA | SCHEDULE_SCHEMA
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
|
"""Set up an input select."""
|
||||||
|
component = EntityComponent(LOGGER, DOMAIN, hass)
|
||||||
|
|
||||||
|
# Process integration platforms right away since
|
||||||
|
# we will create entities before firing EVENT_COMPONENT_LOADED
|
||||||
|
await async_process_integration_platform_for_component(hass, DOMAIN)
|
||||||
|
|
||||||
|
id_manager = IDManager()
|
||||||
|
|
||||||
|
yaml_collection = YamlCollection(LOGGER, id_manager)
|
||||||
|
sync_entity_lifecycle(
|
||||||
|
hass, DOMAIN, DOMAIN, component, yaml_collection, Schedule.from_yaml
|
||||||
|
)
|
||||||
|
|
||||||
|
storage_collection = ScheduleStorageCollection(
|
||||||
|
Store(
|
||||||
|
hass,
|
||||||
|
key=DOMAIN,
|
||||||
|
version=STORAGE_VERSION,
|
||||||
|
minor_version=STORAGE_VERSION_MINOR,
|
||||||
|
),
|
||||||
|
logging.getLogger(f"{__name__}.storage_collection"),
|
||||||
|
id_manager,
|
||||||
|
)
|
||||||
|
sync_entity_lifecycle(hass, DOMAIN, DOMAIN, component, storage_collection, Schedule)
|
||||||
|
|
||||||
|
await yaml_collection.async_load(
|
||||||
|
[{CONF_ID: id_, **cfg} for id_, cfg in config.get(DOMAIN, {}).items()]
|
||||||
|
)
|
||||||
|
await storage_collection.async_load()
|
||||||
|
|
||||||
|
StorageCollectionWebsocket(
|
||||||
|
storage_collection,
|
||||||
|
DOMAIN,
|
||||||
|
DOMAIN,
|
||||||
|
BASE_SCHEMA | STORAGE_SCHEDULE_SCHEMA,
|
||||||
|
BASE_SCHEMA | STORAGE_SCHEDULE_SCHEMA,
|
||||||
|
).async_setup(hass)
|
||||||
|
|
||||||
|
async def reload_service_handler(service_call: ServiceCall) -> None:
|
||||||
|
"""Reload yaml entities."""
|
||||||
|
conf = await component.async_prepare_reload(skip_reset=True)
|
||||||
|
if conf is None:
|
||||||
|
conf = {DOMAIN: {}}
|
||||||
|
await yaml_collection.async_load(
|
||||||
|
[{CONF_ID: id_, **cfg} for id_, cfg in conf.get(DOMAIN, {}).items()]
|
||||||
|
)
|
||||||
|
|
||||||
|
async_register_admin_service(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_RELOAD,
|
||||||
|
reload_service_handler,
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class ScheduleStorageCollection(StorageCollection):
|
||||||
|
"""Schedules stored in storage."""
|
||||||
|
|
||||||
|
SCHEMA = vol.Schema(BASE_SCHEMA | STORAGE_SCHEDULE_SCHEMA)
|
||||||
|
|
||||||
|
async def _process_create_data(self, data: dict) -> dict:
|
||||||
|
"""Validate the config is valid."""
|
||||||
|
self.SCHEMA(data)
|
||||||
|
return data
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _get_suggested_id(self, info: dict) -> str:
|
||||||
|
"""Suggest an ID based on the config."""
|
||||||
|
name: str = info[CONF_NAME]
|
||||||
|
return name
|
||||||
|
|
||||||
|
async def _update_data(self, data: dict, update_data: dict) -> dict:
|
||||||
|
"""Return a new updated data object."""
|
||||||
|
self.SCHEMA(update_data)
|
||||||
|
return data | update_data
|
||||||
|
|
||||||
|
async def _async_load_data(self) -> dict | None:
|
||||||
|
"""Load the data."""
|
||||||
|
if data := await super()._async_load_data():
|
||||||
|
data["items"] = [STORAGE_SCHEMA(item) for item in data["items"]]
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
class Schedule(Entity):
|
||||||
|
"""Schedule entity."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_should_poll = False
|
||||||
|
_attr_state: Literal["on", "off"]
|
||||||
|
_config: ConfigType
|
||||||
|
_next: datetime
|
||||||
|
_unsub_update: Callable[[], None] | None = None
|
||||||
|
|
||||||
|
def __init__(self, config: ConfigType, editable: bool = True) -> None:
|
||||||
|
"""Initialize a schedule."""
|
||||||
|
self._config = STORAGE_SCHEMA(config)
|
||||||
|
self._attr_capability_attributes = {ATTR_EDITABLE: editable}
|
||||||
|
self._attr_icon = self._config.get(CONF_ICON)
|
||||||
|
self._attr_name = self._config[CONF_NAME]
|
||||||
|
self._attr_unique_id = self._config[CONF_ID]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_yaml(cls, config: ConfigType) -> Schedule:
|
||||||
|
"""Return entity instance initialized from yaml storage."""
|
||||||
|
schedule = cls(config, editable=False)
|
||||||
|
schedule.entity_id = f"{DOMAIN}.{config[CONF_ID]}"
|
||||||
|
return schedule
|
||||||
|
|
||||||
|
async def async_update_config(self, config: ConfigType) -> None:
|
||||||
|
"""Handle when the config is updated."""
|
||||||
|
self._config = STORAGE_SCHEMA(config)
|
||||||
|
self._attr_icon = config.get(CONF_ICON)
|
||||||
|
self._attr_name = config[CONF_NAME]
|
||||||
|
self._clean_up_listener()
|
||||||
|
self._update()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _clean_up_listener(self) -> None:
|
||||||
|
"""Remove the update timer."""
|
||||||
|
if self._unsub_update is not None:
|
||||||
|
self._unsub_update()
|
||||||
|
self._unsub_update = None
|
||||||
|
|
||||||
|
async def async_added_to_hass(self) -> None:
|
||||||
|
"""Run when entity about to be added to hass."""
|
||||||
|
self.async_on_remove(self._clean_up_listener)
|
||||||
|
self._update()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _update(self, _: datetime | None = None) -> None:
|
||||||
|
"""Update the states of the schedule."""
|
||||||
|
now = dt_util.now()
|
||||||
|
todays_schedule = self._config.get(WEEKDAY_TO_CONF[now.weekday()], [])
|
||||||
|
|
||||||
|
# Determine current schedule state
|
||||||
|
self._attr_state = next(
|
||||||
|
(
|
||||||
|
STATE_ON
|
||||||
|
for time_range in todays_schedule
|
||||||
|
if time_range[CONF_FROM] <= now.time() <= time_range[CONF_TO]
|
||||||
|
),
|
||||||
|
STATE_OFF,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find next event in the schedule, loop over each day (starting with
|
||||||
|
# the current day) until the next event has been found.
|
||||||
|
next_event = None
|
||||||
|
for day in range(7):
|
||||||
|
day_schedule = self._config.get(
|
||||||
|
WEEKDAY_TO_CONF[(now.weekday() + day) % 7], []
|
||||||
|
)
|
||||||
|
times = sorted(
|
||||||
|
itertools.chain(
|
||||||
|
*[
|
||||||
|
[time_range[CONF_FROM], time_range[CONF_TO]]
|
||||||
|
for time_range in day_schedule
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if next_event := next(
|
||||||
|
(
|
||||||
|
possible_next_event
|
||||||
|
for time in times
|
||||||
|
if (
|
||||||
|
possible_next_event := (
|
||||||
|
datetime.combine(now.date(), time, tzinfo=now.tzinfo)
|
||||||
|
+ timedelta(days=day)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
> now
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
):
|
||||||
|
# We have found the next event in this day, stop searching.
|
||||||
|
break
|
||||||
|
|
||||||
|
self._attr_extra_state_attributes = {
|
||||||
|
ATTR_NEXT_EVENT: next_event,
|
||||||
|
}
|
||||||
|
self.async_write_ha_state()
|
||||||
|
|
||||||
|
if next_event:
|
||||||
|
self._unsub_update = async_track_point_in_utc_time(
|
||||||
|
self.hass,
|
||||||
|
self._update,
|
||||||
|
next_event,
|
||||||
|
)
|
37
homeassistant/components/schedule/const.py
Normal file
37
homeassistant/components/schedule/const.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
"""Constants for the schedule integration."""
|
||||||
|
import logging
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
DOMAIN: Final = "schedule"
|
||||||
|
LOGGER = logging.getLogger(__package__)
|
||||||
|
|
||||||
|
CONF_FRIDAY: Final = "friday"
|
||||||
|
CONF_FROM: Final = "from"
|
||||||
|
CONF_MONDAY: Final = "monday"
|
||||||
|
CONF_SATURDAY: Final = "saturday"
|
||||||
|
CONF_SUNDAY: Final = "sunday"
|
||||||
|
CONF_THURSDAY: Final = "thursday"
|
||||||
|
CONF_TO: Final = "to"
|
||||||
|
CONF_TUESDAY: Final = "tuesday"
|
||||||
|
CONF_WEDNESDAY: Final = "wednesday"
|
||||||
|
CONF_ALL_DAYS: Final = {
|
||||||
|
CONF_MONDAY,
|
||||||
|
CONF_TUESDAY,
|
||||||
|
CONF_WEDNESDAY,
|
||||||
|
CONF_THURSDAY,
|
||||||
|
CONF_FRIDAY,
|
||||||
|
CONF_SATURDAY,
|
||||||
|
CONF_SUNDAY,
|
||||||
|
}
|
||||||
|
|
||||||
|
ATTR_NEXT_EVENT: Final = "next_event"
|
||||||
|
|
||||||
|
WEEKDAY_TO_CONF: Final = {
|
||||||
|
0: CONF_MONDAY,
|
||||||
|
1: CONF_TUESDAY,
|
||||||
|
2: CONF_WEDNESDAY,
|
||||||
|
3: CONF_THURSDAY,
|
||||||
|
4: CONF_FRIDAY,
|
||||||
|
5: CONF_SATURDAY,
|
||||||
|
6: CONF_SUNDAY,
|
||||||
|
}
|
8
homeassistant/components/schedule/manifest.json
Normal file
8
homeassistant/components/schedule/manifest.json
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{
|
||||||
|
"domain": "schedule",
|
||||||
|
"integration_type": "helper",
|
||||||
|
"name": "Schedule",
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/schedule",
|
||||||
|
"codeowners": ["@home-assistant/core"],
|
||||||
|
"quality_scale": "internal"
|
||||||
|
}
|
16
homeassistant/components/schedule/recorder.py
Normal file
16
homeassistant/components/schedule/recorder.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
"""Integration platform for recorder."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from homeassistant.const import ATTR_EDITABLE
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
|
||||||
|
from .const import ATTR_NEXT_EVENT
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def exclude_attributes(hass: HomeAssistant) -> set[str]:
|
||||||
|
"""Exclude configuration to be recorded in the database."""
|
||||||
|
return {
|
||||||
|
ATTR_EDITABLE,
|
||||||
|
ATTR_NEXT_EVENT,
|
||||||
|
}
|
3
homeassistant/components/schedule/services.yaml
Normal file
3
homeassistant/components/schedule/services.yaml
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
reload:
|
||||||
|
name: Reload
|
||||||
|
description: Reload the schedule configuration
|
9
homeassistant/components/schedule/strings.json
Normal file
9
homeassistant/components/schedule/strings.json
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
"title": "Schedule",
|
||||||
|
"state": {
|
||||||
|
"_": {
|
||||||
|
"off": "[%key:common::state::off%]",
|
||||||
|
"on": "[%key:common::state::on%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
9
homeassistant/components/schedule/translations/en.json
Normal file
9
homeassistant/components/schedule/translations/en.json
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
{
|
||||||
|
"state": {
|
||||||
|
"_": {
|
||||||
|
"off": "Off",
|
||||||
|
"on": "On"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"title": "Schedule"
|
||||||
|
}
|
11
mypy.ini
11
mypy.ini
|
@ -2066,6 +2066,17 @@ no_implicit_optional = true
|
||||||
warn_return_any = true
|
warn_return_any = true
|
||||||
warn_unreachable = true
|
warn_unreachable = true
|
||||||
|
|
||||||
|
[mypy-homeassistant.components.schedule.*]
|
||||||
|
check_untyped_defs = true
|
||||||
|
disallow_incomplete_defs = true
|
||||||
|
disallow_subclassing_any = true
|
||||||
|
disallow_untyped_calls = true
|
||||||
|
disallow_untyped_decorators = true
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unreachable = true
|
||||||
|
|
||||||
[mypy-homeassistant.components.select.*]
|
[mypy-homeassistant.components.select.*]
|
||||||
check_untyped_defs = true
|
check_untyped_defs = true
|
||||||
disallow_incomplete_defs = true
|
disallow_incomplete_defs = true
|
||||||
|
|
|
@ -83,6 +83,7 @@ NO_IOT_CLASS = [
|
||||||
"raspberry_pi",
|
"raspberry_pi",
|
||||||
"repairs",
|
"repairs",
|
||||||
"safe_mode",
|
"safe_mode",
|
||||||
|
"schedule",
|
||||||
"script",
|
"script",
|
||||||
"search",
|
"search",
|
||||||
"system_health",
|
"system_health",
|
||||||
|
|
1
tests/components/schedule/__init__.py
Normal file
1
tests/components/schedule/__init__.py
Normal file
|
@ -0,0 +1 @@
|
||||||
|
"""Tests for the schedule integration."""
|
376
tests/components/schedule/test_init.py
Normal file
376
tests/components/schedule/test_init.py
Normal file
|
@ -0,0 +1,376 @@
|
||||||
|
"""Test for the Schedule integration."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
|
from typing import Any
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from aiohttp import ClientWebSocketResponse
|
||||||
|
from freezegun import freeze_time
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from homeassistant.components.schedule import STORAGE_VERSION, STORAGE_VERSION_MINOR
|
||||||
|
from homeassistant.components.schedule.const import (
|
||||||
|
ATTR_NEXT_EVENT,
|
||||||
|
CONF_FRIDAY,
|
||||||
|
CONF_FROM,
|
||||||
|
CONF_MONDAY,
|
||||||
|
CONF_SATURDAY,
|
||||||
|
CONF_SUNDAY,
|
||||||
|
CONF_THURSDAY,
|
||||||
|
CONF_TO,
|
||||||
|
CONF_TUESDAY,
|
||||||
|
CONF_WEDNESDAY,
|
||||||
|
DOMAIN,
|
||||||
|
)
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_EDITABLE,
|
||||||
|
ATTR_FRIENDLY_NAME,
|
||||||
|
ATTR_ICON,
|
||||||
|
ATTR_NAME,
|
||||||
|
CONF_ICON,
|
||||||
|
CONF_ID,
|
||||||
|
CONF_NAME,
|
||||||
|
SERVICE_RELOAD,
|
||||||
|
STATE_OFF,
|
||||||
|
STATE_ON,
|
||||||
|
)
|
||||||
|
from homeassistant.core import Context, HomeAssistant
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
from homeassistant.setup import async_setup_component
|
||||||
|
|
||||||
|
from tests.common import MockUser, async_fire_time_changed
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def schedule_setup(
|
||||||
|
hass: HomeAssistant, hass_storage: dict[str, Any]
|
||||||
|
) -> Callable[..., Coroutine[Any, Any, bool]]:
|
||||||
|
"""Schedule setup."""
|
||||||
|
|
||||||
|
async def _schedule_setup(
|
||||||
|
items: dict[str, Any] | None = None,
|
||||||
|
config: dict[str, Any] | None = None,
|
||||||
|
) -> bool:
|
||||||
|
if items is None:
|
||||||
|
hass_storage[DOMAIN] = {
|
||||||
|
"key": DOMAIN,
|
||||||
|
"version": STORAGE_VERSION,
|
||||||
|
"minor_version": STORAGE_VERSION_MINOR,
|
||||||
|
"data": {
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
CONF_ID: "from_storage",
|
||||||
|
CONF_NAME: "from storage",
|
||||||
|
CONF_ICON: "mdi:party-popper",
|
||||||
|
CONF_FRIDAY: [
|
||||||
|
{CONF_FROM: "17:00:00", CONF_TO: "23:59:59"},
|
||||||
|
],
|
||||||
|
CONF_SATURDAY: [
|
||||||
|
{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"},
|
||||||
|
],
|
||||||
|
CONF_SUNDAY: [
|
||||||
|
{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
hass_storage[DOMAIN] = {
|
||||||
|
"key": DOMAIN,
|
||||||
|
"version": 1,
|
||||||
|
"minor_version": STORAGE_VERSION_MINOR,
|
||||||
|
"data": {"items": items},
|
||||||
|
}
|
||||||
|
if config is None:
|
||||||
|
config = {
|
||||||
|
DOMAIN: {
|
||||||
|
"from_yaml": {
|
||||||
|
CONF_NAME: "from yaml",
|
||||||
|
CONF_ICON: "mdi:party-pooper",
|
||||||
|
CONF_MONDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_TUESDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_WEDNESDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_THURSDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_FRIDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_SATURDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_SUNDAY: [{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return await async_setup_component(hass, DOMAIN, config)
|
||||||
|
|
||||||
|
return _schedule_setup
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_config(hass: HomeAssistant) -> None:
|
||||||
|
"""Test invalid configs."""
|
||||||
|
invalid_configs = [
|
||||||
|
None,
|
||||||
|
{},
|
||||||
|
{"name with space": None},
|
||||||
|
]
|
||||||
|
|
||||||
|
for cfg in invalid_configs:
|
||||||
|
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"schedule,error",
|
||||||
|
(
|
||||||
|
(
|
||||||
|
[
|
||||||
|
{CONF_FROM: "00:00:00", CONF_TO: "23:59:59"},
|
||||||
|
{CONF_FROM: "07:00:00", CONF_TO: "08:00:00"},
|
||||||
|
],
|
||||||
|
"Overlapping times found in schedule",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
[
|
||||||
|
{CONF_FROM: "07:00:00", CONF_TO: "08:00:00"},
|
||||||
|
{CONF_FROM: "07:00:00", CONF_TO: "08:00:00"},
|
||||||
|
],
|
||||||
|
"Overlapping times found in schedule",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
[
|
||||||
|
{CONF_FROM: "07:59:00", CONF_TO: "09:00:00"},
|
||||||
|
{CONF_FROM: "07:00:00", CONF_TO: "08:00:00"},
|
||||||
|
],
|
||||||
|
"Overlapping times found in schedule",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
[
|
||||||
|
{CONF_FROM: "06:00:00", CONF_TO: "07:00:00"},
|
||||||
|
{CONF_FROM: "06:59:00", CONF_TO: "08:00:00"},
|
||||||
|
],
|
||||||
|
"Overlapping times found in schedule",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
[
|
||||||
|
{CONF_FROM: "06:00:00", CONF_TO: "05:00:00"},
|
||||||
|
],
|
||||||
|
"Invalid time range, from 06:00:00 is after 05:00:00",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
async def test_invalid_schedules(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
schedule: list[dict[str, str]],
|
||||||
|
error: str,
|
||||||
|
) -> None:
|
||||||
|
"""Test overlapping time ranges invalidate."""
|
||||||
|
assert not await schedule_setup(
|
||||||
|
config={
|
||||||
|
DOMAIN: {
|
||||||
|
"from_yaml": {
|
||||||
|
CONF_NAME: "from yaml",
|
||||||
|
CONF_ICON: "mdi:party-pooper",
|
||||||
|
CONF_SUNDAY: schedule,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
assert error in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
async def test_setup_no_config(hass: HomeAssistant, hass_admin_user: MockUser) -> None:
|
||||||
|
"""Test component setup with no config."""
|
||||||
|
count_start = len(hass.states.async_entity_ids())
|
||||||
|
assert await async_setup_component(hass, DOMAIN, {})
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.config.load_yaml_config_file", autospec=True, return_value={}
|
||||||
|
):
|
||||||
|
await hass.services.async_call(
|
||||||
|
DOMAIN,
|
||||||
|
SERVICE_RELOAD,
|
||||||
|
blocking=True,
|
||||||
|
context=Context(user_id=hass_admin_user.id),
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert count_start == len(hass.states.async_entity_ids())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2022-08-10 20:10:00-07:00")
|
||||||
|
async def test_load(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
) -> None:
|
||||||
|
"""Test set up from storage and YAML."""
|
||||||
|
assert await schedule_setup()
|
||||||
|
|
||||||
|
state = hass.states.get(f"{DOMAIN}.from_storage")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_OFF
|
||||||
|
assert state.attributes[ATTR_FRIENDLY_NAME] == "from storage"
|
||||||
|
assert state.attributes[ATTR_EDITABLE] is True
|
||||||
|
assert state.attributes[ATTR_ICON] == "mdi:party-popper"
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-12T17:00:00-07:00"
|
||||||
|
|
||||||
|
state = hass.states.get(f"{DOMAIN}.from_yaml")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_ON
|
||||||
|
assert state.attributes[ATTR_FRIENDLY_NAME] == "from yaml"
|
||||||
|
assert state.attributes[ATTR_EDITABLE] is False
|
||||||
|
assert state.attributes[ATTR_ICON] == "mdi:party-pooper"
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-10T23:59:59-07:00"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_schedule_updates(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
) -> None:
|
||||||
|
"""Test the schedule updates when time changes."""
|
||||||
|
with freeze_time("2022-08-10 20:10:00-07:00"):
|
||||||
|
assert await schedule_setup()
|
||||||
|
|
||||||
|
state = hass.states.get(f"{DOMAIN}.from_storage")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_OFF
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-12T17:00:00-07:00"
|
||||||
|
|
||||||
|
with freeze_time(state.attributes[ATTR_NEXT_EVENT]):
|
||||||
|
async_fire_time_changed(hass, state.attributes[ATTR_NEXT_EVENT])
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
state = hass.states.get(f"{DOMAIN}.from_storage")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_ON
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-12T23:59:59-07:00"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_ws_list(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
) -> None:
|
||||||
|
"""Test listing via WS."""
|
||||||
|
assert await schedule_setup()
|
||||||
|
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
|
||||||
|
await client.send_json({"id": 1, "type": f"{DOMAIN}/list"})
|
||||||
|
resp = await client.receive_json()
|
||||||
|
assert resp["success"]
|
||||||
|
|
||||||
|
result = {item["id"]: item for item in resp["result"]}
|
||||||
|
|
||||||
|
assert len(result) == 1
|
||||||
|
assert result["from_storage"][ATTR_NAME] == "from storage"
|
||||||
|
assert "from_yaml" not in result
|
||||||
|
|
||||||
|
|
||||||
|
async def test_ws_delete(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
) -> None:
|
||||||
|
"""Test WS delete cleans up entity registry."""
|
||||||
|
ent_reg = er.async_get(hass)
|
||||||
|
|
||||||
|
assert await schedule_setup()
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.from_storage")
|
||||||
|
assert state is not None
|
||||||
|
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "from_storage") is not None
|
||||||
|
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
await client.send_json(
|
||||||
|
{"id": 1, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": "from_storage"}
|
||||||
|
)
|
||||||
|
resp = await client.receive_json()
|
||||||
|
assert resp["success"]
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.from_storage")
|
||||||
|
assert state is None
|
||||||
|
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "from_storage") is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2022-08-10 20:10:00-07:00")
|
||||||
|
async def test_update(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
) -> None:
|
||||||
|
"""Test updating the schedule."""
|
||||||
|
ent_reg = er.async_get(hass)
|
||||||
|
|
||||||
|
assert await schedule_setup()
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.from_storage")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_OFF
|
||||||
|
assert state.attributes[ATTR_FRIENDLY_NAME] == "from storage"
|
||||||
|
assert state.attributes[ATTR_ICON] == "mdi:party-popper"
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-12T17:00:00-07:00"
|
||||||
|
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "from_storage") is not None
|
||||||
|
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": f"{DOMAIN}/update",
|
||||||
|
f"{DOMAIN}_id": "from_storage",
|
||||||
|
CONF_NAME: "Party pooper",
|
||||||
|
CONF_ICON: "mdi:party-pooper",
|
||||||
|
CONF_MONDAY: [],
|
||||||
|
CONF_TUESDAY: [],
|
||||||
|
CONF_WEDNESDAY: [{CONF_FROM: "17:00:00", CONF_TO: "23:59:59"}],
|
||||||
|
CONF_THURSDAY: [],
|
||||||
|
CONF_FRIDAY: [],
|
||||||
|
CONF_SATURDAY: [],
|
||||||
|
CONF_SUNDAY: [],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
resp = await client.receive_json()
|
||||||
|
assert resp["success"]
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.from_storage")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_ON
|
||||||
|
assert state.attributes[ATTR_FRIENDLY_NAME] == "Party pooper"
|
||||||
|
assert state.attributes[ATTR_ICON] == "mdi:party-pooper"
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-10T23:59:59-07:00"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2022-08-11 8:52:00-07:00")
|
||||||
|
async def test_ws_create(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||||
|
schedule_setup: Callable[..., Coroutine[Any, Any, bool]],
|
||||||
|
) -> None:
|
||||||
|
"""Test create WS."""
|
||||||
|
ent_reg = er.async_get(hass)
|
||||||
|
|
||||||
|
assert await schedule_setup(items=[])
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.party_mode")
|
||||||
|
assert state is None
|
||||||
|
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "party_mode") is None
|
||||||
|
|
||||||
|
client = await hass_ws_client(hass)
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": f"{DOMAIN}/create",
|
||||||
|
"name": "Party mode",
|
||||||
|
"icon": "mdi:party-popper",
|
||||||
|
"monday": [{"from": "12:00:00", "to": "14:00:00"}],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
resp = await client.receive_json()
|
||||||
|
assert resp["success"]
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.party_mode")
|
||||||
|
assert state
|
||||||
|
assert state.state == STATE_OFF
|
||||||
|
assert state.attributes[ATTR_FRIENDLY_NAME] == "Party mode"
|
||||||
|
assert state.attributes[ATTR_EDITABLE] is True
|
||||||
|
assert state.attributes[ATTR_ICON] == "mdi:party-popper"
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT].isoformat() == "2022-08-15T12:00:00-07:00"
|
70
tests/components/schedule/test_recorder.py
Normal file
70
tests/components/schedule/test_recorder.py
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
"""The tests for recorder platform."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from homeassistant.components.recorder.db_schema import StateAttributes, States
|
||||||
|
from homeassistant.components.recorder.util import session_scope
|
||||||
|
from homeassistant.components.schedule.const import ATTR_NEXT_EVENT, DOMAIN
|
||||||
|
from homeassistant.const import ATTR_EDITABLE, ATTR_FRIENDLY_NAME, ATTR_ICON
|
||||||
|
from homeassistant.core import HomeAssistant, State
|
||||||
|
from homeassistant.setup import async_setup_component
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
|
from tests.common import async_fire_time_changed
|
||||||
|
from tests.components.recorder.common import async_wait_recording_done
|
||||||
|
|
||||||
|
|
||||||
|
async def test_exclude_attributes(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
recorder_mock: None,
|
||||||
|
enable_custom_integrations: None,
|
||||||
|
) -> None:
|
||||||
|
"""Test attributes to be excluded."""
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
{
|
||||||
|
DOMAIN: {
|
||||||
|
"test": {
|
||||||
|
"name": "Party mode",
|
||||||
|
"icon": "mdi:party-popper",
|
||||||
|
"monday": [{"from": "1:00", "to": "2:00"}],
|
||||||
|
"tuesday": [{"from": "2:00", "to": "3:00"}],
|
||||||
|
"wednesday": [{"from": "3:00", "to": "4:00"}],
|
||||||
|
"thursday": [{"from": "5:00", "to": "6:00"}],
|
||||||
|
"friday": [{"from": "7:00", "to": "8:00"}],
|
||||||
|
"saturday": [{"from": "9:00", "to": "10:00"}],
|
||||||
|
"sunday": [{"from": "11:00", "to": "12:00"}],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
state = hass.states.get("schedule.test")
|
||||||
|
assert state
|
||||||
|
assert state.attributes[ATTR_EDITABLE] is False
|
||||||
|
assert state.attributes[ATTR_FRIENDLY_NAME]
|
||||||
|
assert state.attributes[ATTR_ICON]
|
||||||
|
assert state.attributes[ATTR_NEXT_EVENT]
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5))
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _fetch_states() -> list[State]:
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
native_states = []
|
||||||
|
for db_state, db_state_attributes in session.query(States, StateAttributes):
|
||||||
|
state = db_state.to_native()
|
||||||
|
state.attributes = db_state_attributes.to_native()
|
||||||
|
native_states.append(state)
|
||||||
|
return native_states
|
||||||
|
|
||||||
|
states: list[State] = await hass.async_add_executor_job(_fetch_states)
|
||||||
|
assert len(states) == 1
|
||||||
|
assert ATTR_EDITABLE not in states[0].attributes
|
||||||
|
assert ATTR_FRIENDLY_NAME in states[0].attributes
|
||||||
|
assert ATTR_ICON in states[0].attributes
|
||||||
|
assert ATTR_NEXT_EVENT not in states[0].attributes
|
Loading…
Add table
Add a link
Reference in a new issue