Compare commits
4 commits
dev
...
remove_bac
Author | SHA1 | Date | |
---|---|---|---|
|
9a3ebabf88 | ||
|
f99b319048 | ||
|
957ece747d | ||
|
325738829d |
89 changed files with 1232 additions and 1790 deletions
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
|
@ -10,7 +10,7 @@ on:
|
|||
|
||||
env:
|
||||
BUILD_TYPE: core
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
|
|
|
@ -40,8 +40,6 @@ build.json @home-assistant/supervisor
|
|||
# Integrations
|
||||
/homeassistant/components/abode/ @shred86
|
||||
/tests/components/abode/ @shred86
|
||||
/homeassistant/components/acaia/ @zweckj
|
||||
/tests/components/acaia/ @zweckj
|
||||
/homeassistant/components/accuweather/ @bieniu
|
||||
/tests/components/accuweather/ @bieniu
|
||||
/homeassistant/components/acmeda/ @atmurray
|
||||
|
@ -1489,8 +1487,8 @@ build.json @home-assistant/supervisor
|
|||
/tests/components/tedee/ @patrickhilker @zweckj
|
||||
/homeassistant/components/tellduslive/ @fredrike
|
||||
/tests/components/tellduslive/ @fredrike
|
||||
/homeassistant/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @home-assistant/core
|
||||
/homeassistant/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/tests/components/template/ @PhracturedBlue @tetienne @home-assistant/core
|
||||
/homeassistant/components/tesla_fleet/ @Bre77
|
||||
/tests/components/tesla_fleet/ @Bre77
|
||||
/homeassistant/components/tesla_wall_connector/ @einarhauks
|
||||
|
|
10
build.yaml
10
build.yaml
|
@ -1,10 +1,10 @@
|
|||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.11.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.11.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.11.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.11.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2024.06.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2024.06.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2024.06.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2024.06.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2024.06.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
|
|
@ -515,7 +515,7 @@ async def async_from_config_dict(
|
|||
issue_registry.async_create_issue(
|
||||
hass,
|
||||
core.DOMAIN,
|
||||
f"python_version_{required_python_version}",
|
||||
"python_version",
|
||||
is_fixable=False,
|
||||
severity=issue_registry.IssueSeverity.WARNING,
|
||||
breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
"""Initialize the Acaia component."""
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import AcaiaConfigEntry, AcaiaCoordinator
|
||||
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Set up acaia as config entry."""
|
||||
|
||||
coordinator = AcaiaCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AcaiaConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
@ -1,61 +0,0 @@
|
|||
"""Button entities for Acaia scales."""
|
||||
|
||||
from collections.abc import Callable, Coroutine
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .coordinator import AcaiaConfigEntry
|
||||
from .entity import AcaiaEntity
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AcaiaButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Description for acaia button entities."""
|
||||
|
||||
press_fn: Callable[[AcaiaScale], Coroutine[Any, Any, None]]
|
||||
|
||||
|
||||
BUTTONS: tuple[AcaiaButtonEntityDescription, ...] = (
|
||||
AcaiaButtonEntityDescription(
|
||||
key="tare",
|
||||
translation_key="tare",
|
||||
press_fn=lambda scale: scale.tare(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="reset_timer",
|
||||
translation_key="reset_timer",
|
||||
press_fn=lambda scale: scale.reset_timer(),
|
||||
),
|
||||
AcaiaButtonEntityDescription(
|
||||
key="start_stop",
|
||||
translation_key="start_stop",
|
||||
press_fn=lambda scale: scale.start_stop_timer(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AcaiaConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up button entities and services."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(AcaiaButton(coordinator, description) for description in BUTTONS)
|
||||
|
||||
|
||||
class AcaiaButton(AcaiaEntity, ButtonEntity):
|
||||
"""Representation of an Acaia button."""
|
||||
|
||||
entity_description: AcaiaButtonEntityDescription
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Handle the button press."""
|
||||
await self.entity_description.press_fn(self._scale)
|
|
@ -1,149 +0,0 @@
|
|||
"""Config flow for Acaia integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||
from aioacaia.helpers import is_new_scale
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
BluetoothServiceInfoBleak,
|
||||
async_discovered_service_info,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ADDRESS, CONF_NAME
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AcaiaConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for acaia."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._discovered: dict[str, Any] = {}
|
||||
self._discovered_devices: dict[str, str] = {}
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
mac = format_mac(user_input[CONF_ADDRESS])
|
||||
try:
|
||||
is_new_style_scale = await is_new_scale(mac)
|
||||
except AcaiaDeviceNotFound:
|
||||
errors["base"] = "device_not_found"
|
||||
except AcaiaError:
|
||||
_LOGGER.exception("Error occurred while connecting to the scale")
|
||||
errors["base"] = "unknown"
|
||||
except AcaiaUnknownDevice:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
else:
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered_devices[user_input[CONF_ADDRESS]],
|
||||
data={
|
||||
CONF_ADDRESS: mac,
|
||||
CONF_IS_NEW_STYLE_SCALE: is_new_style_scale,
|
||||
},
|
||||
)
|
||||
|
||||
for device in async_discovered_service_info(self.hass):
|
||||
self._discovered_devices[device.address] = device.name
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
options = [
|
||||
SelectOptionDict(
|
||||
value=device_mac,
|
||||
label=f"{device_name} ({device_mac})",
|
||||
)
|
||||
for device_mac, device_name in self._discovered_devices.items()
|
||||
]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADDRESS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfoBleak
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a discovered Bluetooth device."""
|
||||
|
||||
self._discovered[CONF_ADDRESS] = mac = format_mac(discovery_info.address)
|
||||
self._discovered[CONF_NAME] = discovery_info.name
|
||||
|
||||
await self.async_set_unique_id(mac)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
try:
|
||||
self._discovered[CONF_IS_NEW_STYLE_SCALE] = await is_new_scale(
|
||||
discovery_info.address
|
||||
)
|
||||
except AcaiaDeviceNotFound:
|
||||
_LOGGER.debug("Device not found during discovery")
|
||||
return self.async_abort(reason="device_not_found")
|
||||
except AcaiaError:
|
||||
_LOGGER.debug(
|
||||
"Error occurred while connecting to the scale during discovery",
|
||||
exc_info=True,
|
||||
)
|
||||
return self.async_abort(reason="unknown")
|
||||
except AcaiaUnknownDevice:
|
||||
_LOGGER.debug("Unsupported device during discovery")
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
async def async_step_bluetooth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle confirmation of Bluetooth discovery."""
|
||||
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(
|
||||
title=self._discovered[CONF_NAME],
|
||||
data={
|
||||
CONF_ADDRESS: self._discovered[CONF_ADDRESS],
|
||||
CONF_IS_NEW_STYLE_SCALE: self._discovered[CONF_IS_NEW_STYLE_SCALE],
|
||||
},
|
||||
)
|
||||
|
||||
self.context["title_placeholders"] = placeholders = {
|
||||
CONF_NAME: self._discovered[CONF_NAME]
|
||||
}
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(
|
||||
step_id="bluetooth_confirm",
|
||||
description_placeholders=placeholders,
|
||||
)
|
|
@ -1,4 +0,0 @@
|
|||
"""Constants for component."""
|
||||
|
||||
DOMAIN = "acaia"
|
||||
CONF_IS_NEW_STYLE_SCALE = "is_new_style_scale"
|
|
@ -1,86 +0,0 @@
|
|||
"""Coordinator for Acaia integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aioacaia.acaiascale import AcaiaScale
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AcaiaConfigEntry = ConfigEntry[AcaiaCoordinator]
|
||||
|
||||
|
||||
class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Class to handle fetching data from the scale."""
|
||||
|
||||
config_entry: AcaiaConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AcaiaConfigEntry) -> None:
|
||||
"""Initialize coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="acaia coordinator",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=self.async_update_listeners,
|
||||
)
|
||||
|
||||
@property
|
||||
def scale(self) -> AcaiaScale:
|
||||
"""Return the scale object."""
|
||||
return self._scale
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data."""
|
||||
|
||||
# scale is already connected, return
|
||||
if self._scale.connected:
|
||||
return
|
||||
|
||||
# scale is not connected, try to connect
|
||||
try:
|
||||
await self._scale.connect(setup_tasks=False)
|
||||
except (AcaiaDeviceNotFound, AcaiaError, TimeoutError) as ex:
|
||||
_LOGGER.debug(
|
||||
"Could not connect to scale: %s, Error: %s",
|
||||
self.config_entry.data[CONF_ADDRESS],
|
||||
ex,
|
||||
)
|
||||
self._scale.device_disconnected_handler(notify=False)
|
||||
return
|
||||
|
||||
# connected, set up background tasks
|
||||
if not self._scale.heartbeat_task or self._scale.heartbeat_task.done():
|
||||
self._scale.heartbeat_task = self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.send_heartbeats(),
|
||||
name="acaia_heartbeat_task",
|
||||
)
|
||||
|
||||
if not self._scale.process_queue_task or self._scale.process_queue_task.done():
|
||||
self._scale.process_queue_task = (
|
||||
self.config_entry.async_create_background_task(
|
||||
hass=self.hass,
|
||||
target=self._scale.process_queue(),
|
||||
name="acaia_process_queue_task",
|
||||
)
|
||||
)
|
|
@ -1,40 +0,0 @@
|
|||
"""Base class for Acaia entities."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AcaiaCoordinator
|
||||
|
||||
|
||||
@dataclass
|
||||
class AcaiaEntity(CoordinatorEntity[AcaiaCoordinator]):
|
||||
"""Common elements for all entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AcaiaCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._scale = coordinator.scale
|
||||
self._attr_unique_id = f"{self._scale.mac}_{entity_description.key}"
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._scale.mac)},
|
||||
manufacturer="Acaia",
|
||||
model=self._scale.model,
|
||||
suggested_area="Kitchen",
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Returns whether entity is available."""
|
||||
return super().available and self._scale.connected
|
|
@ -1,15 +0,0 @@
|
|||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"default": "mdi:scale-balance"
|
||||
},
|
||||
"reset_timer": {
|
||||
"default": "mdi:timer-refresh"
|
||||
},
|
||||
"start_stop": {
|
||||
"default": "mdi:timer-play"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
{
|
||||
"domain": "acaia",
|
||||
"name": "Acaia",
|
||||
"bluetooth": [
|
||||
{
|
||||
"manufacturer_id": 16962
|
||||
},
|
||||
{
|
||||
"local_name": "ACAIA*"
|
||||
},
|
||||
{
|
||||
"local_name": "PYXIS-*"
|
||||
},
|
||||
{
|
||||
"local_name": "LUNAR-*"
|
||||
},
|
||||
{
|
||||
"local_name": "PROCHBT001"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/acaia",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"requirements": ["aioacaia==0.1.6"]
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
{
|
||||
"config": {
|
||||
"flow_title": "{name}",
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]",
|
||||
"unsupported_device": "This device is not supported."
|
||||
},
|
||||
"error": {
|
||||
"device_not_found": "Device could not be found.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"bluetooth_confirm": {
|
||||
"description": "[%key:component::bluetooth::config::step::bluetooth_confirm::description%]"
|
||||
},
|
||||
"user": {
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"tare": {
|
||||
"name": "Tare"
|
||||
},
|
||||
"reset_timer": {
|
||||
"name": "Reset timer"
|
||||
},
|
||||
"start_stop": {
|
||||
"name": "Start/stop timer"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -11,5 +11,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==0.9.6"]
|
||||
"requirements": ["aioairzone==0.9.5"]
|
||||
}
|
||||
|
|
|
@ -5,18 +5,25 @@ from homeassistant.helpers import config_validation as cv
|
|||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DATA_MANAGER, DOMAIN, LOGGER
|
||||
from .agent import BackupAgent, UploadedBackup
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .http import async_register_http_views
|
||||
from .manager import BackupManager
|
||||
from .models import BackupUploadMetadata
|
||||
from .websocket import async_register_websocket_handlers
|
||||
|
||||
__all__ = [
|
||||
"BackupAgent",
|
||||
"BackupUploadMetadata",
|
||||
"UploadedBackup",
|
||||
]
|
||||
|
||||
CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Backup integration."""
|
||||
backup_manager = BackupManager(hass)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
hass.data[DOMAIN] = backup_manager = BackupManager(hass)
|
||||
|
||||
with_hassio = is_hassio(hass)
|
||||
|
||||
|
|
73
homeassistant/components/backup/agent.py
Normal file
73
homeassistant/components/backup/agent.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
"""Backup agents for the Backup integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any, Protocol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .models import BackupUploadMetadata, BaseBackup
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class UploadedBackup(BaseBackup):
|
||||
"""Uploaded backup class."""
|
||||
|
||||
id: str
|
||||
|
||||
|
||||
class BackupAgent(abc.ABC):
|
||||
"""Define the format that backup agents can have."""
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
"""Initialize the backup agent."""
|
||||
self.name = name
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_download_backup(
|
||||
self,
|
||||
*,
|
||||
id: str,
|
||||
path: Path,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Download a backup file.
|
||||
|
||||
The `id` parameter is the ID of the backup that was returned in async_list_backups.
|
||||
|
||||
The `path` parameter is the full file path to download the backup to.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
path: Path,
|
||||
metadata: BackupUploadMetadata,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup.
|
||||
|
||||
The `path` parameter is the full file path to the backup that should be uploaded.
|
||||
|
||||
The `metadata` parameter contains metadata about the backup that should be uploaded.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
|
||||
"""List backups."""
|
||||
|
||||
|
||||
class BackupAgentPlatformProtocol(Protocol):
|
||||
"""Define the format that backup platforms can have."""
|
||||
|
||||
async def async_get_backup_agents(
|
||||
self,
|
||||
*,
|
||||
hass: HomeAssistant,
|
||||
**kwargs: Any,
|
||||
) -> list[BackupAgent]:
|
||||
"""Register the backup agent."""
|
|
@ -8,10 +8,11 @@ from typing import TYPE_CHECKING
|
|||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager
|
||||
from .manager import BaseBackupManager
|
||||
from .models import BaseBackup
|
||||
|
||||
DOMAIN = "backup"
|
||||
DATA_MANAGER: HassKey[BackupManager] = HassKey(DOMAIN)
|
||||
DATA_MANAGER: HassKey[BaseBackupManager[BaseBackup]] = HassKey(DOMAIN)
|
||||
LOGGER = getLogger(__package__)
|
||||
|
||||
EXCLUDE_FROM_BACKUP = [
|
||||
|
|
|
@ -15,6 +15,7 @@ from homeassistant.core import HomeAssistant, callback
|
|||
from homeassistant.util import slugify
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import BackupManager
|
||||
|
||||
|
||||
@callback
|
||||
|
@ -39,7 +40,7 @@ class DownloadBackupView(HomeAssistantView):
|
|||
if not request["hass_user"].is_admin:
|
||||
return Response(status=HTTPStatus.UNAUTHORIZED)
|
||||
|
||||
manager = request.app[KEY_HASS].data[DATA_MANAGER]
|
||||
manager = cast(BackupManager, request.app[KEY_HASS].data[DATA_MANAGER])
|
||||
backup = await manager.async_get_backup(slug=slug)
|
||||
|
||||
if backup is None or not backup.path.exists():
|
||||
|
|
|
@ -16,10 +16,11 @@ import tarfile
|
|||
from tarfile import TarError
|
||||
from tempfile import TemporaryDirectory
|
||||
import time
|
||||
from typing import Any, Protocol, cast
|
||||
from typing import Any, Generic, Protocol, cast
|
||||
|
||||
import aiohttp
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
from homeassistant.backup_restore import RESTORE_BACKUP_FILE
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
|
@ -30,10 +31,14 @@ from homeassistant.helpers.json import json_bytes
|
|||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import json_loads_object
|
||||
|
||||
from .agent import BackupAgent, BackupAgentPlatformProtocol
|
||||
from .const import DOMAIN, EXCLUDE_FROM_BACKUP, LOGGER
|
||||
from .models import BackupUploadMetadata, BaseBackup
|
||||
|
||||
BUF_SIZE = 2**20 * 4 # 4MB
|
||||
|
||||
_BackupT = TypeVar("_BackupT", bound=BaseBackup, default=BaseBackup)
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class NewBackup:
|
||||
|
@ -43,14 +48,10 @@ class NewBackup:
|
|||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class Backup:
|
||||
class Backup(BaseBackup):
|
||||
"""Backup class."""
|
||||
|
||||
slug: str
|
||||
name: str
|
||||
date: str
|
||||
path: Path
|
||||
size: float
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return a dict representation of this backup."""
|
||||
|
@ -76,19 +77,21 @@ class BackupPlatformProtocol(Protocol):
|
|||
"""Perform operations after a backup finishes."""
|
||||
|
||||
|
||||
class BaseBackupManager(abc.ABC):
|
||||
class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the backup manager."""
|
||||
self.hass = hass
|
||||
self.backup_task: asyncio.Task | None = None
|
||||
self.backups: dict[str, Backup] = {}
|
||||
self.backups: dict[str, _BackupT] = {}
|
||||
self.loaded_platforms = False
|
||||
self.platforms: dict[str, BackupPlatformProtocol] = {}
|
||||
self.backup_agents: dict[str, BackupAgent] = {}
|
||||
self.syncing = False
|
||||
|
||||
@callback
|
||||
def _add_platform(
|
||||
def _add_platform_pre_post_handlers(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
integration_domain: str,
|
||||
|
@ -98,13 +101,25 @@ class BaseBackupManager(abc.ABC):
|
|||
if not hasattr(platform, "async_pre_backup") or not hasattr(
|
||||
platform, "async_post_backup"
|
||||
):
|
||||
LOGGER.warning(
|
||||
"%s does not implement required functions for the backup platform",
|
||||
integration_domain,
|
||||
)
|
||||
return
|
||||
|
||||
self.platforms[integration_domain] = platform
|
||||
|
||||
async def _async_add_platform_agents(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
integration_domain: str,
|
||||
platform: BackupAgentPlatformProtocol,
|
||||
) -> None:
|
||||
"""Add a platform to the backup manager."""
|
||||
if not hasattr(platform, "async_get_backup_agents"):
|
||||
return
|
||||
|
||||
agents = await platform.async_get_backup_agents(hass=hass)
|
||||
self.backup_agents.update(
|
||||
{f"{integration_domain}.{agent.name}": agent for agent in agents}
|
||||
)
|
||||
|
||||
async def async_pre_backup_actions(self, **kwargs: Any) -> None:
|
||||
"""Perform pre backup actions."""
|
||||
if not self.loaded_platforms:
|
||||
|
@ -139,10 +154,22 @@ class BaseBackupManager(abc.ABC):
|
|||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
if self.loaded_platforms:
|
||||
return
|
||||
await integration_platform.async_process_integration_platforms(
|
||||
self.hass, DOMAIN, self._add_platform, wait_for_platforms=True
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
self._add_platform_pre_post_handlers,
|
||||
wait_for_platforms=True,
|
||||
)
|
||||
await integration_platform.async_process_integration_platforms(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
self._async_add_platform_agents,
|
||||
wait_for_platforms=True,
|
||||
)
|
||||
LOGGER.debug("Loaded %s platforms", len(self.platforms))
|
||||
LOGGER.debug("Loaded %s agents", len(self.backup_agents))
|
||||
self.loaded_platforms = True
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -159,14 +186,14 @@ class BaseBackupManager(abc.ABC):
|
|||
"""Generate a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]:
|
||||
async def async_get_backups(self, **kwargs: Any) -> dict[str, _BackupT]:
|
||||
"""Get backups.
|
||||
|
||||
Return a dictionary of Backup instances keyed by their slug.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None:
|
||||
async def async_get_backup(self, *, slug: str, **kwargs: Any) -> _BackupT | None:
|
||||
"""Get a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -182,8 +209,12 @@ class BaseBackupManager(abc.ABC):
|
|||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_upload_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
class BackupManager(BaseBackupManager):
|
||||
|
||||
class BackupManager(BaseBackupManager[Backup]):
|
||||
"""Backup manager for the Backup integration."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
|
@ -192,10 +223,42 @@ class BackupManager(BaseBackupManager):
|
|||
self.backup_dir = Path(hass.config.path("backups"))
|
||||
self.loaded_backups = False
|
||||
|
||||
async def async_upload_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||
"""Upload a backup."""
|
||||
await self.load_platforms()
|
||||
|
||||
if not self.backup_agents:
|
||||
return
|
||||
|
||||
if not (backup := await self.async_get_backup(slug=slug)):
|
||||
return
|
||||
|
||||
self.syncing = True
|
||||
sync_backup_results = await asyncio.gather(
|
||||
*(
|
||||
agent.async_upload_backup(
|
||||
path=backup.path,
|
||||
metadata=BackupUploadMetadata(
|
||||
homeassistant=HAVERSION,
|
||||
size=backup.size,
|
||||
date=backup.date,
|
||||
slug=backup.slug,
|
||||
name=backup.name,
|
||||
),
|
||||
)
|
||||
for agent in self.backup_agents.values()
|
||||
),
|
||||
return_exceptions=True,
|
||||
)
|
||||
for result in sync_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
LOGGER.error("Error during backup upload - %s", result)
|
||||
self.syncing = False
|
||||
|
||||
async def load_backups(self) -> None:
|
||||
"""Load data of stored backup files."""
|
||||
backups = await self.hass.async_add_executor_job(self._read_backups)
|
||||
LOGGER.debug("Loaded %s backups", len(backups))
|
||||
LOGGER.debug("Loaded %s local backups", len(backups))
|
||||
self.backups = backups
|
||||
self.loaded_backups = True
|
||||
|
||||
|
|
28
homeassistant/components/backup/models.py
Normal file
28
homeassistant/components/backup/models.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
"""Models for the backup integration."""
|
||||
|
||||
from dataclasses import asdict, dataclass
|
||||
|
||||
|
||||
@dataclass()
|
||||
class BaseBackup:
|
||||
"""Base backup class."""
|
||||
|
||||
date: str
|
||||
slug: str
|
||||
size: float
|
||||
name: str
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return a dict representation of this backup."""
|
||||
return asdict(self)
|
||||
|
||||
|
||||
@dataclass()
|
||||
class BackupUploadMetadata:
|
||||
"""Backup upload metadata."""
|
||||
|
||||
date: str # The date the backup was created
|
||||
slug: str # The slug of the backup
|
||||
size: float # The size of the backup (in bytes)
|
||||
name: str # The name of the backup
|
||||
homeassistant: str # The version of Home Assistant that created the backup
|
|
@ -1,5 +1,6 @@
|
|||
"""Websocket commands for the Backup integration."""
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
@ -14,6 +15,10 @@ from .manager import BackupProgress
|
|||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, backup_agents_download)
|
||||
websocket_api.async_register_command(hass, backup_agents_info)
|
||||
websocket_api.async_register_command(hass, backup_agents_list_backups)
|
||||
|
||||
if with_hassio:
|
||||
websocket_api.async_register_command(hass, handle_backup_end)
|
||||
websocket_api.async_register_command(hass, handle_backup_start)
|
||||
|
@ -40,7 +45,7 @@ async def handle_info(
|
|||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"backups": list(backups.values()),
|
||||
"backups": [b.as_dict() for b in backups.values()],
|
||||
"backing_up": manager.backup_task is not None,
|
||||
},
|
||||
)
|
||||
|
@ -162,3 +167,77 @@ async def handle_backup_end(
|
|||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/agents/info"})
|
||||
@websocket_api.async_response
|
||||
async def backup_agents_info(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return backup agents info."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
await manager.load_platforms()
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"agents": [{"id": agent_id} for agent_id in manager.backup_agents],
|
||||
"syncing": manager.syncing,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/agents/list_backups"})
|
||||
@websocket_api.async_response
|
||||
async def backup_agents_list_backups(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Return a list of uploaded backups."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
backups: list[dict[str, Any]] = []
|
||||
await manager.load_platforms()
|
||||
for agent_id, agent in manager.backup_agents.items():
|
||||
_listed_backups = await agent.async_list_backups()
|
||||
backups.extend({**b.as_dict(), "agent_id": agent_id} for b in _listed_backups)
|
||||
connection.send_result(msg["id"], backups)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/agents/download",
|
||||
vol.Required("agent"): str,
|
||||
vol.Required("backup_id"): str,
|
||||
vol.Required("slug"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def backup_agents_download(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Download an uploaded backup."""
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
await manager.load_platforms()
|
||||
|
||||
if not (agent := manager.backup_agents.get(msg["agent"])):
|
||||
connection.send_error(
|
||||
msg["id"], "unknown_agent", f"Agent {msg['agent']} not found"
|
||||
)
|
||||
return
|
||||
try:
|
||||
await agent.async_download_backup(
|
||||
id=msg["backup_id"],
|
||||
path=Path(hass.config.path("backup"), f"{msg['slug']}.tar"),
|
||||
)
|
||||
except Exception as err: # noqa: BLE001
|
||||
connection.send_error(msg["id"], "backup_agents_download", str(err))
|
||||
return
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
|
|
@ -7,6 +7,6 @@
|
|||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"requirements": ["aiostreammagic==2.8.5"],
|
||||
"requirements": ["aiostreammagic==2.8.4"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
|
|
@ -51,13 +51,8 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = (
|
|||
CambridgeAudioSelectEntityDescription(
|
||||
key="display_brightness",
|
||||
translation_key="display_brightness",
|
||||
options=[
|
||||
DisplayBrightness.BRIGHT.value,
|
||||
DisplayBrightness.DIM.value,
|
||||
DisplayBrightness.OFF.value,
|
||||
],
|
||||
options=[x.value for x in DisplayBrightness],
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
load_fn=lambda client: client.display.brightness != DisplayBrightness.NONE,
|
||||
value_fn=lambda client: client.display.brightness,
|
||||
set_value_fn=lambda client, value: client.set_display_brightness(
|
||||
DisplayBrightness(value)
|
||||
|
|
|
@ -6,7 +6,7 @@ from abc import ABC, abstractmethod
|
|||
import asyncio
|
||||
from collections.abc import Awaitable, Callable, Iterable
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from functools import cache, partial, wraps
|
||||
from functools import cache, partial
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Protocol
|
||||
|
||||
|
@ -205,49 +205,6 @@ async def _async_refresh_providers(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
type WsCommandWithCamera = Callable[
|
||||
[websocket_api.ActiveConnection, dict[str, Any], Camera],
|
||||
Awaitable[None],
|
||||
]
|
||||
|
||||
|
||||
def require_webrtc_support(
|
||||
error_code: str,
|
||||
) -> Callable[[WsCommandWithCamera], websocket_api.AsyncWebSocketCommandHandler]:
|
||||
"""Validate that the camera supports WebRTC."""
|
||||
|
||||
def decorate(
|
||||
func: WsCommandWithCamera,
|
||||
) -> websocket_api.AsyncWebSocketCommandHandler:
|
||||
"""Decorate func."""
|
||||
|
||||
@wraps(func)
|
||||
async def validate(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Validate that the camera supports WebRTC."""
|
||||
entity_id = msg["entity_id"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
error_code,
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
await func(connection, msg, camera)
|
||||
|
||||
return validate
|
||||
|
||||
return decorate
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "camera/webrtc/offer",
|
||||
|
@ -256,9 +213,8 @@ def require_webrtc_support(
|
|||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@require_webrtc_support("webrtc_offer_failed")
|
||||
async def ws_webrtc_offer(
|
||||
connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle the signal path for a WebRTC stream.
|
||||
|
||||
|
@ -270,7 +226,20 @@ async def ws_webrtc_offer(
|
|||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = msg["entity_id"]
|
||||
offer = msg["offer"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"webrtc_offer_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
session_id = ulid()
|
||||
connection.subscriptions[msg["id"]] = partial(
|
||||
camera.close_webrtc_session, session_id
|
||||
|
@ -309,11 +278,23 @@ async def ws_webrtc_offer(
|
|||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@require_webrtc_support("webrtc_get_client_config_failed")
|
||||
async def ws_get_client_config(
|
||||
connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle get WebRTC client config websocket command."""
|
||||
entity_id = msg["entity_id"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"webrtc_get_client_config_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
config = camera.async_get_webrtc_client_configuration().to_frontend_dict()
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
|
@ -330,11 +311,23 @@ async def ws_get_client_config(
|
|||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
@require_webrtc_support("webrtc_candidate_failed")
|
||||
async def ws_candidate(
|
||||
connection: websocket_api.ActiveConnection, msg: dict[str, Any], camera: Camera
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle WebRTC candidate websocket command."""
|
||||
entity_id = msg["entity_id"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"webrtc_candidate_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
|
||||
await camera.async_on_webrtc_candidate(
|
||||
msg["session_id"], RTCIceCandidate(msg["candidate"])
|
||||
)
|
||||
|
|
|
@ -16,11 +16,11 @@ from hassil.expression import Expression, ListReference, Sequence
|
|||
from hassil.intents import Intents, SlotList, TextSlotList, WildcardSlotList
|
||||
from hassil.recognize import (
|
||||
MISSING_ENTITY,
|
||||
MatchEntity,
|
||||
RecognizeResult,
|
||||
UnmatchedTextEntity,
|
||||
recognize_all,
|
||||
recognize_best,
|
||||
)
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from hassil.util import merge_dict
|
||||
from home_assistant_intents import ErrorKey, get_intents, get_languages
|
||||
import yaml
|
||||
|
@ -499,7 +499,6 @@ class DefaultAgent(ConversationEntity):
|
|||
maybe_result: RecognizeResult | None = None
|
||||
best_num_matched_entities = 0
|
||||
best_num_unmatched_entities = 0
|
||||
best_num_unmatched_ranges = 0
|
||||
for result in recognize_all(
|
||||
user_input.text,
|
||||
lang_intents.intents,
|
||||
|
@ -518,14 +517,10 @@ class DefaultAgent(ConversationEntity):
|
|||
num_matched_entities += 1
|
||||
|
||||
num_unmatched_entities = 0
|
||||
num_unmatched_ranges = 0
|
||||
for unmatched_entity in result.unmatched_entities_list:
|
||||
if isinstance(unmatched_entity, UnmatchedTextEntity):
|
||||
if unmatched_entity.text != MISSING_ENTITY:
|
||||
num_unmatched_entities += 1
|
||||
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
|
||||
num_unmatched_ranges += 1
|
||||
num_unmatched_entities += 1
|
||||
else:
|
||||
num_unmatched_entities += 1
|
||||
|
||||
|
@ -537,24 +532,15 @@ class DefaultAgent(ConversationEntity):
|
|||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities < best_num_unmatched_entities)
|
||||
)
|
||||
or (
|
||||
# Prefer unmatched ranges
|
||||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
||||
)
|
||||
or (
|
||||
# More literal text matched
|
||||
(num_matched_entities == best_num_matched_entities)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||
and (result.text_chunks_matched > maybe_result.text_chunks_matched)
|
||||
)
|
||||
or (
|
||||
# Prefer match failures with entities
|
||||
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||
and (
|
||||
("name" in result.entities)
|
||||
or ("name" in result.unmatched_entities)
|
||||
|
@ -564,7 +550,6 @@ class DefaultAgent(ConversationEntity):
|
|||
maybe_result = result
|
||||
best_num_matched_entities = num_matched_entities
|
||||
best_num_unmatched_entities = num_unmatched_entities
|
||||
best_num_unmatched_ranges = num_unmatched_ranges
|
||||
|
||||
return maybe_result
|
||||
|
||||
|
@ -577,16 +562,77 @@ class DefaultAgent(ConversationEntity):
|
|||
language: str,
|
||||
) -> RecognizeResult | None:
|
||||
"""Search intents for a strict match to user input."""
|
||||
return recognize_best(
|
||||
custom_found = False
|
||||
name_found = False
|
||||
best_results: list[RecognizeResult] = []
|
||||
best_name_quality: int | None = None
|
||||
best_text_chunks_matched: int | None = None
|
||||
for result in recognize_all(
|
||||
user_input.text,
|
||||
lang_intents.intents,
|
||||
slot_lists=slot_lists,
|
||||
intent_context=intent_context,
|
||||
language=language,
|
||||
best_metadata_key=METADATA_CUSTOM_SENTENCE,
|
||||
best_slot_name="name",
|
||||
):
|
||||
# Prioritize user intents
|
||||
is_custom = (
|
||||
result.intent_metadata is not None
|
||||
and result.intent_metadata.get(METADATA_CUSTOM_SENTENCE)
|
||||
)
|
||||
|
||||
if custom_found and not is_custom:
|
||||
continue
|
||||
|
||||
if not custom_found and is_custom:
|
||||
custom_found = True
|
||||
# Clear builtin results
|
||||
name_found = False
|
||||
best_results = []
|
||||
best_name_quality = None
|
||||
best_text_chunks_matched = None
|
||||
|
||||
# Prioritize results with a "name" slot
|
||||
name = result.entities.get("name")
|
||||
is_name = name and not name.is_wildcard
|
||||
|
||||
if name_found and not is_name:
|
||||
continue
|
||||
|
||||
if not name_found and is_name:
|
||||
name_found = True
|
||||
# Clear non-name results
|
||||
best_results = []
|
||||
best_text_chunks_matched = None
|
||||
|
||||
if is_name:
|
||||
# Prioritize results with a better "name" slot
|
||||
name_quality = len(cast(MatchEntity, name).value.split())
|
||||
if (best_name_quality is None) or (name_quality > best_name_quality):
|
||||
best_name_quality = name_quality
|
||||
# Clear worse name results
|
||||
best_results = []
|
||||
best_text_chunks_matched = None
|
||||
elif name_quality < best_name_quality:
|
||||
continue
|
||||
|
||||
# Prioritize results with more literal text
|
||||
# This causes wildcards to match last.
|
||||
if (best_text_chunks_matched is None) or (
|
||||
result.text_chunks_matched > best_text_chunks_matched
|
||||
):
|
||||
best_results = [result]
|
||||
best_text_chunks_matched = result.text_chunks_matched
|
||||
elif result.text_chunks_matched == best_text_chunks_matched:
|
||||
# Accumulate results with the same number of literal text matched.
|
||||
# We will resolve the ambiguity below.
|
||||
best_results.append(result)
|
||||
|
||||
if best_results:
|
||||
# Successful strict match
|
||||
return best_results[0]
|
||||
|
||||
return None
|
||||
|
||||
async def _build_speech(
|
||||
self,
|
||||
language: str,
|
||||
|
|
|
@ -6,8 +6,12 @@ from collections.abc import Iterable
|
|||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from hassil.recognize import MISSING_ENTITY, RecognizeResult
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from hassil.recognize import (
|
||||
MISSING_ENTITY,
|
||||
RecognizeResult,
|
||||
UnmatchedRangeEntity,
|
||||
UnmatchedTextEntity,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http, websocket_api
|
||||
|
|
|
@ -6,5 +6,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.1", "home-assistant-intents==2024.11.13"]
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"]
|
||||
}
|
||||
|
|
|
@ -4,8 +4,7 @@ from __future__ import annotations
|
|||
|
||||
from typing import Any
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
from hassil.util import PUNCTUATION_ALL
|
||||
from hassil.recognize import PUNCTUATION, RecognizeResult
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_COMMAND, CONF_PLATFORM
|
||||
|
@ -21,7 +20,7 @@ from .const import DATA_DEFAULT_ENTITY, DOMAIN
|
|||
def has_no_punctuation(value: list[str]) -> list[str]:
|
||||
"""Validate result does not contain punctuation."""
|
||||
for sentence in value:
|
||||
if PUNCTUATION_ALL.search(sentence):
|
||||
if PUNCTUATION.search(sentence):
|
||||
raise vol.Invalid("sentence should not contain punctuation")
|
||||
|
||||
return value
|
||||
|
|
|
@ -21,7 +21,6 @@ from .models import Eq3Config, Eq3ConfigEntryData
|
|||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.NUMBER,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
|
|
@ -24,11 +24,6 @@ ENTITY_KEY_WINDOW = "window"
|
|||
ENTITY_KEY_LOCK = "lock"
|
||||
ENTITY_KEY_BOOST = "boost"
|
||||
ENTITY_KEY_AWAY = "away"
|
||||
ENTITY_KEY_COMFORT = "comfort"
|
||||
ENTITY_KEY_ECO = "eco"
|
||||
ENTITY_KEY_OFFSET = "offset"
|
||||
ENTITY_KEY_WINDOW_OPEN_TEMPERATURE = "window_open_temperature"
|
||||
ENTITY_KEY_WINDOW_OPEN_TIMEOUT = "window_open_timeout"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
|
@ -82,5 +77,3 @@ DEFAULT_SCAN_INTERVAL = 10 # seconds
|
|||
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED = f"{DOMAIN}.thermostat_disconnected"
|
||||
SIGNAL_THERMOSTAT_CONNECTED = f"{DOMAIN}.thermostat_connected"
|
||||
|
||||
EQ3BT_STEP = 0.5
|
||||
|
|
|
@ -8,23 +8,6 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"comfort": {
|
||||
"default": "mdi:sun-thermometer"
|
||||
},
|
||||
"eco": {
|
||||
"default": "mdi:snowflake-thermometer"
|
||||
},
|
||||
"offset": {
|
||||
"default": "mdi:thermometer-plus"
|
||||
},
|
||||
"window_open_temperature": {
|
||||
"default": "mdi:window-open-variant"
|
||||
},
|
||||
"window_open_timeout": {
|
||||
"default": "mdi:timer-refresh"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"away": {
|
||||
"default": "mdi:home-account",
|
||||
|
|
|
@ -23,5 +23,5 @@
|
|||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.2.1", "bleak-esphome==1.1.0"]
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from eq3btsmart.const import DEFAULT_AWAY_HOURS, DEFAULT_AWAY_TEMP
|
||||
from eq3btsmart.thermostat import Thermostat
|
||||
|
||||
from .const import (
|
||||
|
@ -22,6 +23,8 @@ class Eq3Config:
|
|||
target_temp_selector: TargetTemperatureSelector = DEFAULT_TARGET_TEMP_SELECTOR
|
||||
external_temp_sensor: str = ""
|
||||
scan_interval: int = DEFAULT_SCAN_INTERVAL
|
||||
default_away_hours: float = DEFAULT_AWAY_HOURS
|
||||
default_away_temperature: float = DEFAULT_AWAY_TEMP
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
|
|
|
@ -1,158 +0,0 @@
|
|||
"""Platform for eq3 number entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import (
|
||||
EQ3BT_MAX_OFFSET,
|
||||
EQ3BT_MAX_TEMP,
|
||||
EQ3BT_MIN_OFFSET,
|
||||
EQ3BT_MIN_TEMP,
|
||||
)
|
||||
from eq3btsmart.models import Presets
|
||||
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
ENTITY_KEY_COMFORT,
|
||||
ENTITY_KEY_ECO,
|
||||
ENTITY_KEY_OFFSET,
|
||||
ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
EQ3BT_STEP,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class Eq3NumberEntityDescription(NumberEntityDescription):
|
||||
"""Entity description for eq3 number entities."""
|
||||
|
||||
value_func: Callable[[Presets], float]
|
||||
value_set_func: Callable[
|
||||
[Thermostat],
|
||||
Callable[[float], Awaitable[None]],
|
||||
]
|
||||
mode: NumberMode = NumberMode.BOX
|
||||
entity_category: EntityCategory | None = EntityCategory.CONFIG
|
||||
|
||||
|
||||
NUMBER_ENTITY_DESCRIPTIONS = [
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_COMFORT,
|
||||
value_func=lambda presets: presets.comfort_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_comfort_temperature,
|
||||
translation_key=ENTITY_KEY_COMFORT,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_ECO,
|
||||
value_func=lambda presets: presets.eco_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_eco_temperature,
|
||||
translation_key=ENTITY_KEY_ECO,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
value_func=lambda presets: presets.window_open_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_temperature,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TEMPERATURE,
|
||||
native_min_value=EQ3BT_MIN_TEMP,
|
||||
native_max_value=EQ3BT_MAX_TEMP,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_OFFSET,
|
||||
value_func=lambda presets: presets.offset_temperature.value,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_temperature_offset,
|
||||
translation_key=ENTITY_KEY_OFFSET,
|
||||
native_min_value=EQ3BT_MIN_OFFSET,
|
||||
native_max_value=EQ3BT_MAX_OFFSET,
|
||||
native_step=EQ3BT_STEP,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=NumberDeviceClass.TEMPERATURE,
|
||||
),
|
||||
Eq3NumberEntityDescription(
|
||||
key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
value_set_func=lambda thermostat: thermostat.async_configure_window_open_duration,
|
||||
value_func=lambda presets: presets.window_open_time.value.total_seconds() / 60,
|
||||
translation_key=ENTITY_KEY_WINDOW_OPEN_TIMEOUT,
|
||||
native_min_value=0,
|
||||
native_max_value=60,
|
||||
native_step=5,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the entry."""
|
||||
|
||||
async_add_entities(
|
||||
Eq3NumberEntity(entry, entity_description)
|
||||
for entity_description in NUMBER_ENTITY_DESCRIPTIONS
|
||||
)
|
||||
|
||||
|
||||
class Eq3NumberEntity(Eq3Entity, NumberEntity):
|
||||
"""Base class for all eq3 number entities."""
|
||||
|
||||
entity_description: Eq3NumberEntityDescription
|
||||
|
||||
def __init__(
|
||||
self, entry: Eq3ConfigEntry, entity_description: Eq3NumberEntityDescription
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
|
||||
super().__init__(entry, entity_description.key)
|
||||
self.entity_description = entity_description
|
||||
|
||||
@property
|
||||
def native_value(self) -> float:
|
||||
"""Return the state of the entity."""
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert self._thermostat.status is not None
|
||||
assert self._thermostat.status.presets is not None
|
||||
|
||||
return self.entity_description.value_func(self._thermostat.status.presets)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the state of the entity."""
|
||||
|
||||
await self.entity_description.value_set_func(self._thermostat)(value)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return whether the entity is available."""
|
||||
|
||||
return (
|
||||
self._thermostat.status is not None
|
||||
and self._thermostat.status.presets is not None
|
||||
and self._attr_available
|
||||
)
|
|
@ -25,23 +25,6 @@
|
|||
"name": "Daylight saving time"
|
||||
}
|
||||
},
|
||||
"number": {
|
||||
"comfort": {
|
||||
"name": "Comfort temperature"
|
||||
},
|
||||
"eco": {
|
||||
"name": "Eco temperature"
|
||||
},
|
||||
"offset": {
|
||||
"name": "Offset temperature"
|
||||
},
|
||||
"window_open_temperature": {
|
||||
"name": "Window open temperature"
|
||||
},
|
||||
"window_open_timeout": {
|
||||
"name": "Window open timeout"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"lock": {
|
||||
"name": "Lock"
|
||||
|
|
|
@ -95,7 +95,7 @@ class PowerViewNumber(ShadeEntity, RestoreNumber):
|
|||
self.entity_description = description
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{description.key}"
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
def set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
self._attr_native_value = value
|
||||
self.entity_description.store_value_fn(self.coordinator, self._shade.id, value)
|
||||
|
|
|
@ -3,23 +3,30 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from huum.exceptions import Forbidden, NotAuthenticated
|
||||
from huum.huum import Huum
|
||||
import sys
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, PLATFORMS
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
from huum.exceptions import Forbidden, NotAuthenticated
|
||||
from huum.huum import Huum
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Huum from a config entry."""
|
||||
if sys.version_info >= (3, 13):
|
||||
raise HomeAssistantError(
|
||||
"Huum is not supported on Python 3.13. Please use Python 3.12."
|
||||
)
|
||||
|
||||
username = entry.data[CONF_USERNAME]
|
||||
password = entry.data[CONF_PASSWORD]
|
||||
|
||||
|
|
|
@ -3,13 +3,9 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
from huum.const import SaunaStatus
|
||||
from huum.exceptions import SafetyException
|
||||
from huum.huum import Huum
|
||||
from huum.schemas import HuumStatusResponse
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
|
@ -24,6 +20,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
|||
|
||||
from .const import DOMAIN
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
from huum.const import SaunaStatus
|
||||
from huum.exceptions import SafetyException
|
||||
from huum.huum import Huum
|
||||
from huum.schemas import HuumStatusResponse
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
|
@ -3,10 +3,9 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from typing import Any
|
||||
|
||||
from huum.exceptions import Forbidden, NotAuthenticated
|
||||
from huum.huum import Huum
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
|
@ -15,6 +14,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||
|
||||
from .const import DOMAIN
|
||||
|
||||
if sys.version_info < (3, 13):
|
||||
from huum.exceptions import Forbidden, NotAuthenticated
|
||||
from huum.huum import Huum
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
|
|
|
@ -5,5 +5,5 @@
|
|||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/huum",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["huum==0.7.12"]
|
||||
"requirements": ["huum==0.7.11;python_version<'3.13'"]
|
||||
}
|
||||
|
|
74
homeassistant/components/kitchen_sink/backup.py
Normal file
74
homeassistant/components/kitchen_sink/backup.py
Normal file
|
@ -0,0 +1,74 @@
|
|||
"""Backup platform for the kitchen_sink integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from uuid import uuid4
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
BackupAgent,
|
||||
BackupUploadMetadata,
|
||||
UploadedBackup,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_get_backup_sync_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Register the backup agents."""
|
||||
return [KitchenSinkBackupAgent("syncer")]
|
||||
|
||||
|
||||
class KitchenSinkBackupAgent(BackupAgent):
|
||||
"""Kitchen sink backup agent."""
|
||||
|
||||
def __init__(self, name: str) -> None:
|
||||
"""Initialize the kitchen sink backup sync agent."""
|
||||
super().__init__(name)
|
||||
self._uploads = [
|
||||
UploadedBackup(
|
||||
id="def456",
|
||||
name="Kitchen sink syncer",
|
||||
slug="abc123",
|
||||
size=1234,
|
||||
date="1970-01-01T00:00:00Z",
|
||||
)
|
||||
]
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
*,
|
||||
id: str,
|
||||
path: Path,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Download a backup file."""
|
||||
LOGGER.info("Downloading backup %s to %s", id, path)
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
path: Path,
|
||||
metadata: BackupUploadMetadata,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
LOGGER.info("Uploading backup %s %s", path.name, metadata)
|
||||
self._uploads.append(
|
||||
UploadedBackup(
|
||||
id=uuid4().hex,
|
||||
name=metadata.name,
|
||||
slug=metadata.slug,
|
||||
size=metadata.size,
|
||||
date=metadata.date,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
|
||||
"""List synced backups."""
|
||||
return self._uploads
|
|
@ -20,8 +20,7 @@ from homeassistant.const import (
|
|||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import (
|
||||
ADD_ENTITIES_CALLBACKS,
|
||||
|
@ -42,26 +41,15 @@ from .helpers import (
|
|||
register_lcn_address_devices,
|
||||
register_lcn_host_device,
|
||||
)
|
||||
from .services import register_services
|
||||
from .services import SERVICES
|
||||
from .websocket import register_panel_and_ws_api
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the LCN component."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
await register_services(hass)
|
||||
await register_panel_and_ws_api(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up a connection to PCHK host from a config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
if config_entry.entry_id in hass.data[DOMAIN]:
|
||||
return False
|
||||
|
||||
|
@ -121,6 +109,15 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
|
|||
)
|
||||
lcn_connection.register_for_inputs(input_received)
|
||||
|
||||
# register service calls
|
||||
for service_name, service in SERVICES:
|
||||
if not hass.services.has_service(DOMAIN, service_name):
|
||||
hass.services.async_register(
|
||||
DOMAIN, service_name, service(hass).async_call_service, service.schema
|
||||
)
|
||||
|
||||
await register_panel_and_ws_api(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -171,6 +168,11 @@ async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
|
|||
host = hass.data[DOMAIN].pop(config_entry.entry_id)
|
||||
await host[CONNECTION].async_close()
|
||||
|
||||
# unregister service calls
|
||||
if unload_ok and not hass.data[DOMAIN]: # check if this is the last entry to unload
|
||||
for service_name, _ in SERVICES:
|
||||
hass.services.async_remove(DOMAIN, service_name)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
|
|
|
@ -429,11 +429,3 @@ SERVICES = (
|
|||
(LcnService.DYN_TEXT, DynText),
|
||||
(LcnService.PCK, Pck),
|
||||
)
|
||||
|
||||
|
||||
async def register_services(hass: HomeAssistant) -> None:
|
||||
"""Register services for LCN."""
|
||||
for service_name, service in SERVICES:
|
||||
hass.services.async_register(
|
||||
DOMAIN, service_name, service(hass).async_call_service, service.schema
|
||||
)
|
||||
|
|
|
@ -18,5 +18,5 @@
|
|||
"documentation": "https://www.home-assistant.io/integrations/reolink",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"requirements": ["reolink-aio==0.11.1"]
|
||||
"requirements": ["reolink-aio==0.11.0"]
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ class RingEvent(RingBaseEntity[RingListenCoordinator, RingDeviceT], EventEntity)
|
|||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
if (alert := self._get_coordinator_alert()) and not alert.is_update:
|
||||
if alert := self._get_coordinator_alert():
|
||||
self._async_handle_event(alert.kind)
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
|
@ -106,12 +107,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
|||
async def _async_update_data(self) -> DeviceProp:
|
||||
"""Update data via library."""
|
||||
try:
|
||||
# Update device props and standard api information
|
||||
await self._update_device_prop()
|
||||
# Set the new map id from the updated device props
|
||||
await asyncio.gather(*(self._update_device_prop(), self.get_rooms()))
|
||||
self._set_current_map()
|
||||
# Get the rooms for that map id.
|
||||
await self.get_rooms()
|
||||
except RoborockException as ex:
|
||||
raise UpdateFailed(ex) from ex
|
||||
return self.roborock_device_info.props
|
||||
|
|
|
@ -135,9 +135,6 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity):
|
|||
RoborockCommand.LOAD_MULTI_MAP,
|
||||
[map_id],
|
||||
)
|
||||
# Update the current map id manually so that nothing gets broken
|
||||
# if another service hits the api.
|
||||
self.coordinator.current_map = map_id
|
||||
# We need to wait after updating the map
|
||||
# so that other commands will be executed correctly.
|
||||
await asyncio.sleep(MAP_SLEEP)
|
||||
|
@ -151,9 +148,6 @@ class RoborockCurrentMapSelectEntity(RoborockCoordinatedEntityV1, SelectEntity):
|
|||
@property
|
||||
def current_option(self) -> str | None:
|
||||
"""Get the current status of the select entity from device_status."""
|
||||
if (
|
||||
(current_map := self.coordinator.current_map) is not None
|
||||
and current_map in self.coordinator.maps
|
||||
): # 63 means it is searching for a map.
|
||||
if (current_map := self.coordinator.current_map) is not None:
|
||||
return self.coordinator.maps[current_map].name
|
||||
return None
|
||||
|
|
|
@ -7,5 +7,5 @@
|
|||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioruckus"],
|
||||
"requirements": ["aioruckus==0.42"]
|
||||
"requirements": ["aioruckus==0.41"]
|
||||
}
|
||||
|
|
|
@ -28,10 +28,6 @@
|
|||
"deprecated_yaml_import_issue_auth_error": {
|
||||
"title": "YAML import failed due to an authentication error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was an authentication error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
},
|
||||
"deprecated_yaml_import_issue_cannot_connect": {
|
||||
"title": "YAML import failed due to a connection error",
|
||||
"description": "Configuring {integration_title} using YAML is being removed but there was a connect error while importing your existing configuration.\nSetup will not proceed.\n\nVerify that your {integration_title} is operating correctly and restart Home Assistant to attempt the import again.\n\nAlternatively, you may remove the `{domain}` configuration from your configuration.yaml entirely, restart Home Assistant, and add the {integration_title} integration manually."
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
|
|
@ -17,8 +17,7 @@
|
|||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"unknown": "Unexpected error"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
|
|
@ -37,13 +37,13 @@
|
|||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"incorrect_pin": "Incorrect PIN",
|
||||
"bad_pin_format": "PIN should be 4 digits",
|
||||
"two_factor_request_failed": "Request for 2FA code failed, please try again",
|
||||
"bad_validation_code_format": "Validation code should be 6 digits",
|
||||
"incorrect_validation_code": "Incorrect validation code"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"two_factor_request_failed": "Request for 2FA code failed, please try again"
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
"domain": "template",
|
||||
"name": "Template",
|
||||
"after_dependencies": ["group"],
|
||||
"codeowners": ["@PhracturedBlue", "@home-assistant/core"],
|
||||
"codeowners": ["@PhracturedBlue", "@tetienne", "@home-assistant/core"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["blueprint"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/template",
|
||||
|
|
|
@ -49,7 +49,6 @@ class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementati
|
|||
def extra_authorize_data(self) -> dict[str, Any]:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
return {
|
||||
"prompt": "login",
|
||||
"scope": " ".join(SCOPES),
|
||||
"code_challenge": self.code_challenge, # PKCE
|
||||
}
|
||||
|
@ -84,4 +83,4 @@ class TeslaUserImplementation(AuthImplementation):
|
|||
@property
|
||||
def extra_authorize_data(self) -> dict[str, Any]:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
return {"prompt": "login", "scope": " ".join(SCOPES)}
|
||||
return {"scope": " ".join(SCOPES)}
|
||||
|
|
|
@ -22,7 +22,7 @@ from .const import _LOGGER, DOMAIN, LINE_TYPES
|
|||
from .coordinator import VodafoneStationRouter
|
||||
|
||||
NOT_AVAILABLE: list = ["", "N/A", "0.0.0.0"]
|
||||
UPTIME_DEVIATION = 60
|
||||
UPTIME_DEVIATION = 45
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
|
|
|
@ -33,7 +33,6 @@ from homeassistant.config_entries import (
|
|||
from homeassistant.const import CONF_NAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.selector import FileSelector, FileSelectorConfig
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
|
@ -105,10 +104,9 @@ async def list_serial_ports(hass: HomeAssistant) -> list[ListPortInfo]:
|
|||
yellow_radio.description = "Yellow Zigbee module"
|
||||
yellow_radio.manufacturer = "Nabu Casa"
|
||||
|
||||
if is_hassio(hass):
|
||||
# Present the multi-PAN addon as a setup option, if it's available
|
||||
multipan_manager = (
|
||||
await silabs_multiprotocol_addon.get_multiprotocol_addon_manager(hass)
|
||||
multipan_manager = await silabs_multiprotocol_addon.get_multiprotocol_addon_manager(
|
||||
hass
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
@ -29,9 +29,9 @@ PATCH_VERSION: Final = "0.dev0"
|
|||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 13, 0)
|
||||
REQUIRED_NEXT_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
# Truthy date string triggers showing related deprecation warning messages.
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = "2025.2"
|
||||
REQUIRED_NEXT_PYTHON_HA_RELEASE: Final = ""
|
||||
|
||||
# Format for platform files
|
||||
PLATFORM_FORMAT: Final = "{platform}.{domain}"
|
||||
|
|
|
@ -8,26 +8,6 @@ from __future__ import annotations
|
|||
from typing import Final
|
||||
|
||||
BLUETOOTH: Final[list[dict[str, bool | str | int | list[int]]]] = [
|
||||
{
|
||||
"domain": "acaia",
|
||||
"manufacturer_id": 16962,
|
||||
},
|
||||
{
|
||||
"domain": "acaia",
|
||||
"local_name": "ACAIA*",
|
||||
},
|
||||
{
|
||||
"domain": "acaia",
|
||||
"local_name": "PYXIS-*",
|
||||
},
|
||||
{
|
||||
"domain": "acaia",
|
||||
"local_name": "LUNAR-*",
|
||||
},
|
||||
{
|
||||
"domain": "acaia",
|
||||
"local_name": "PROCHBT001",
|
||||
},
|
||||
{
|
||||
"domain": "airthings_ble",
|
||||
"manufacturer_id": 820,
|
||||
|
|
|
@ -24,7 +24,6 @@ FLOWS = {
|
|||
],
|
||||
"integration": [
|
||||
"abode",
|
||||
"acaia",
|
||||
"accuweather",
|
||||
"acmeda",
|
||||
"adax",
|
||||
|
|
|
@ -11,12 +11,6 @@
|
|||
"config_flow": true,
|
||||
"iot_class": "cloud_push"
|
||||
},
|
||||
"acaia": {
|
||||
"name": "Acaia",
|
||||
"integration_type": "device",
|
||||
"config_flow": true,
|
||||
"iot_class": "local_push"
|
||||
},
|
||||
"accuweather": {
|
||||
"name": "AccuWeather",
|
||||
"integration_type": "service",
|
||||
|
|
|
@ -5,7 +5,7 @@ aiodiscover==2.1.0
|
|||
aiodns==3.2.0
|
||||
aiohasupervisor==0.2.1
|
||||
aiohttp-fast-zlib==0.1.1
|
||||
aiohttp==3.11.0
|
||||
aiohttp==3.11.0rc2
|
||||
aiohttp_cors==0.7.0
|
||||
aiozoneinfo==0.2.1
|
||||
astral==2.2
|
||||
|
@ -32,10 +32,10 @@ go2rtc-client==0.1.1
|
|||
ha-ffmpeg==3.2.2
|
||||
habluetooth==3.6.0
|
||||
hass-nabucasa==0.84.0
|
||||
hassil==2.0.1
|
||||
hassil==1.7.4
|
||||
home-assistant-bluetooth==1.13.0
|
||||
home-assistant-frontend==20241106.2
|
||||
home-assistant-intents==2024.11.13
|
||||
home-assistant-intents==2024.11.6
|
||||
httpx==0.27.2
|
||||
ifaddr==0.2.0
|
||||
Jinja2==3.1.4
|
||||
|
@ -181,8 +181,8 @@ chacha20poly1305-reuseable>=0.13.0
|
|||
# https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39
|
||||
pycountry>=23.12.11
|
||||
|
||||
# scapy==2.6.0 causes CI failures due to a race condition
|
||||
scapy>=2.6.1
|
||||
# scapy<2.5.0 will not work with python3.12
|
||||
scapy>=2.5.0
|
||||
|
||||
# tuf isn't updated to deal with breaking changes in securesystemslib==1.0.
|
||||
# Only tuf>=4 includes a constraint to <1.0.
|
||||
|
|
|
@ -19,7 +19,6 @@ classifiers = [
|
|||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Topic :: Home Automation",
|
||||
]
|
||||
requires-python = ">=3.12.0"
|
||||
|
@ -29,7 +28,7 @@ dependencies = [
|
|||
# change behavior based on presence of supervisor. Deprecated with #127228
|
||||
# Lib can be removed with 2025.11
|
||||
"aiohasupervisor==0.2.1",
|
||||
"aiohttp==3.11.0",
|
||||
"aiohttp==3.11.0rc2",
|
||||
"aiohttp_cors==0.7.0",
|
||||
"aiohttp-fast-zlib==0.1.1",
|
||||
"aiozoneinfo==0.2.1",
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
# Home Assistant Core
|
||||
aiodns==3.2.0
|
||||
aiohasupervisor==0.2.1
|
||||
aiohttp==3.11.0
|
||||
aiohttp==3.11.0rc2
|
||||
aiohttp_cors==0.7.0
|
||||
aiohttp-fast-zlib==0.1.1
|
||||
aiozoneinfo==0.2.1
|
||||
|
|
|
@ -172,9 +172,6 @@ aio-geojson-usgs-earthquakes==0.3
|
|||
# homeassistant.components.gdacs
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.acaia
|
||||
aioacaia==0.1.6
|
||||
|
||||
# homeassistant.components.airq
|
||||
aioairq==0.3.2
|
||||
|
||||
|
@ -182,7 +179,7 @@ aioairq==0.3.2
|
|||
aioairzone-cloud==0.6.10
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.9.6
|
||||
aioairzone==0.9.5
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
|
@ -357,7 +354,7 @@ aiorecollect==2023.09.0
|
|||
aioridwell==2024.01.0
|
||||
|
||||
# homeassistant.components.ruckus_unleashed
|
||||
aioruckus==0.42
|
||||
aioruckus==0.41
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.1.0
|
||||
|
@ -384,7 +381,7 @@ aiosolaredge==0.2.0
|
|||
aiosteamist==1.0.0
|
||||
|
||||
# homeassistant.components.cambridge_audio
|
||||
aiostreammagic==2.8.5
|
||||
aiostreammagic==2.8.4
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==4.4.0
|
||||
|
@ -863,7 +860,7 @@ epion==0.0.3
|
|||
epson-projector==0.5.1
|
||||
|
||||
# homeassistant.components.eq3btsmart
|
||||
eq3btsmart==1.4.1
|
||||
eq3btsmart==1.2.1
|
||||
|
||||
# homeassistant.components.esphome
|
||||
esphome-dashboard-api==1.2.3
|
||||
|
@ -1096,7 +1093,7 @@ hass-nabucasa==0.84.0
|
|||
hass-splunk==0.1.1
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==2.0.1
|
||||
hassil==1.7.4
|
||||
|
||||
# homeassistant.components.jewish_calendar
|
||||
hdate==0.10.9
|
||||
|
@ -1133,7 +1130,7 @@ holidays==0.60
|
|||
home-assistant-frontend==20241106.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.11.13
|
||||
home-assistant-intents==2024.11.6
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.8.0
|
||||
|
@ -1151,7 +1148,7 @@ httplib2==0.20.4
|
|||
huawei-lte-api==1.10.0
|
||||
|
||||
# homeassistant.components.huum
|
||||
huum==0.7.12
|
||||
huum==0.7.11;python_version<'3.13'
|
||||
|
||||
# homeassistant.components.hyperion
|
||||
hyperion-py==0.7.5
|
||||
|
@ -2556,7 +2553,7 @@ renault-api==0.2.7
|
|||
renson-endura-delta==1.7.1
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.1
|
||||
reolink-aio==0.11.0
|
||||
|
||||
# homeassistant.components.idteck_prox
|
||||
rfk101py==0.0.1
|
||||
|
|
|
@ -160,9 +160,6 @@ aio-geojson-usgs-earthquakes==0.3
|
|||
# homeassistant.components.gdacs
|
||||
aio-georss-gdacs==0.10
|
||||
|
||||
# homeassistant.components.acaia
|
||||
aioacaia==0.1.6
|
||||
|
||||
# homeassistant.components.airq
|
||||
aioairq==0.3.2
|
||||
|
||||
|
@ -170,7 +167,7 @@ aioairq==0.3.2
|
|||
aioairzone-cloud==0.6.10
|
||||
|
||||
# homeassistant.components.airzone
|
||||
aioairzone==0.9.6
|
||||
aioairzone==0.9.5
|
||||
|
||||
# homeassistant.components.ambient_network
|
||||
# homeassistant.components.ambient_station
|
||||
|
@ -339,7 +336,7 @@ aiorecollect==2023.09.0
|
|||
aioridwell==2024.01.0
|
||||
|
||||
# homeassistant.components.ruckus_unleashed
|
||||
aioruckus==0.42
|
||||
aioruckus==0.41
|
||||
|
||||
# homeassistant.components.russound_rio
|
||||
aiorussound==4.1.0
|
||||
|
@ -366,7 +363,7 @@ aiosolaredge==0.2.0
|
|||
aiosteamist==1.0.0
|
||||
|
||||
# homeassistant.components.cambridge_audio
|
||||
aiostreammagic==2.8.5
|
||||
aiostreammagic==2.8.4
|
||||
|
||||
# homeassistant.components.switcher_kis
|
||||
aioswitcher==4.4.0
|
||||
|
@ -732,7 +729,7 @@ epion==0.0.3
|
|||
epson-projector==0.5.1
|
||||
|
||||
# homeassistant.components.eq3btsmart
|
||||
eq3btsmart==1.4.1
|
||||
eq3btsmart==1.2.1
|
||||
|
||||
# homeassistant.components.esphome
|
||||
esphome-dashboard-api==1.2.3
|
||||
|
@ -931,7 +928,7 @@ habluetooth==3.6.0
|
|||
hass-nabucasa==0.84.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
hassil==2.0.1
|
||||
hassil==1.7.4
|
||||
|
||||
# homeassistant.components.jewish_calendar
|
||||
hdate==0.10.9
|
||||
|
@ -959,7 +956,7 @@ holidays==0.60
|
|||
home-assistant-frontend==20241106.2
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2024.11.13
|
||||
home-assistant-intents==2024.11.6
|
||||
|
||||
# homeassistant.components.home_connect
|
||||
homeconnect==0.8.0
|
||||
|
@ -974,7 +971,7 @@ httplib2==0.20.4
|
|||
huawei-lte-api==1.10.0
|
||||
|
||||
# homeassistant.components.huum
|
||||
huum==0.7.12
|
||||
huum==0.7.11;python_version<'3.13'
|
||||
|
||||
# homeassistant.components.hyperion
|
||||
hyperion-py==0.7.5
|
||||
|
@ -2047,7 +2044,7 @@ renault-api==0.2.7
|
|||
renson-endura-delta==1.7.1
|
||||
|
||||
# homeassistant.components.reolink
|
||||
reolink-aio==0.11.1
|
||||
reolink-aio==0.11.0
|
||||
|
||||
# homeassistant.components.rflink
|
||||
rflink==0.0.66
|
||||
|
|
|
@ -214,8 +214,8 @@ chacha20poly1305-reuseable>=0.13.0
|
|||
# https://github.com/pycountry/pycountry/blob/ea69bab36f00df58624a0e490fdad4ccdc14268b/HISTORY.txt#L39
|
||||
pycountry>=23.12.11
|
||||
|
||||
# scapy==2.6.0 causes CI failures due to a race condition
|
||||
scapy>=2.6.1
|
||||
# scapy<2.5.0 will not work with python3.12
|
||||
scapy>=2.5.0
|
||||
|
||||
# tuf isn't updated to deal with breaking changes in securesystemslib==1.0.
|
||||
# Only tuf>=4 includes a constraint to <1.0.
|
||||
|
|
|
@ -80,7 +80,7 @@ WORKDIR /config
|
|||
_HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest.
|
||||
#
|
||||
# To update, run python3 -m script.hassfest -p docker
|
||||
FROM python:3.13-alpine
|
||||
FROM python:3.12-alpine
|
||||
|
||||
ENV \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
|
@ -161,8 +161,6 @@ def _generate_hassfest_dockerimage(
|
|||
packages.update(
|
||||
gather_recursive_requirements(platform.value, already_checked_domains)
|
||||
)
|
||||
# Add go2rtc requirements as this file needs the go2rtc integration
|
||||
packages.update(gather_recursive_requirements("go2rtc", already_checked_domains))
|
||||
|
||||
return File(
|
||||
_HASSFEST_TEMPLATE.format(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Automatically generated by hassfest.
|
||||
#
|
||||
# To update, run python3 -m script.hassfest -p docker
|
||||
FROM python:3.13-alpine
|
||||
FROM python:3.12-alpine
|
||||
|
||||
ENV \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
|
@ -23,7 +23,7 @@ RUN --mount=from=ghcr.io/astral-sh/uv:0.5.0,source=/uv,target=/bin/uv \
|
|||
-c /usr/src/homeassistant/homeassistant/package_constraints.txt \
|
||||
-r /usr/src/homeassistant/requirements.txt \
|
||||
stdlib-list==0.10.0 pipdeptree==2.23.4 tqdm==4.66.5 ruff==0.7.3 \
|
||||
PyTurboJPEG==1.7.5 go2rtc-client==0.1.1 ha-ffmpeg==3.2.2 hassil==2.0.1 home-assistant-intents==2024.11.13 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.2 hassil==1.7.4 home-assistant-intents==2024.11.6 mutagen==1.47.0 pymicro-vad==1.0.1 pyspeex-noise==1.0.2
|
||||
|
||||
LABEL "name"="hassfest"
|
||||
LABEL "maintainer"="Home Assistant <hello@home-assistant.io>"
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
"""Common test tools for the acaia integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def setup_integration(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Set up the acaia integration for testing."""
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
|
@ -1,80 +0,0 @@
|
|||
"""Common fixtures for the acaia tests."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from aioacaia.acaiascale import AcaiaDeviceState
|
||||
from aioacaia.const import UnitMass as AcaiaUnitOfMass
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
with patch(
|
||||
"homeassistant.components.acaia.async_setup_entry", return_value=True
|
||||
) as mock_setup_entry:
|
||||
yield mock_setup_entry
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_verify() -> Generator[AsyncMock]:
|
||||
"""Override is_new_scale check."""
|
||||
with patch(
|
||||
"homeassistant.components.acaia.config_flow.is_new_scale", return_value=True
|
||||
) as mock_verify:
|
||||
yield mock_verify
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
return MockConfigEntry(
|
||||
title="LUNAR-DDEEFF",
|
||||
domain=DOMAIN,
|
||||
version=1,
|
||||
data={
|
||||
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
|
||||
CONF_IS_NEW_STYLE_SCALE: True,
|
||||
},
|
||||
unique_id="aa:bb:cc:dd:ee:ff",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def init_integration(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_scale: MagicMock
|
||||
) -> None:
|
||||
"""Set up the acaia integration for testing."""
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_scale() -> Generator[MagicMock]:
|
||||
"""Return a mocked acaia scale client."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.acaia.coordinator.AcaiaScale",
|
||||
autospec=True,
|
||||
) as scale_mock,
|
||||
):
|
||||
scale = scale_mock.return_value
|
||||
scale.connected = True
|
||||
scale.mac = "aa:bb:cc:dd:ee:ff"
|
||||
scale.model = "Lunar"
|
||||
scale.timer_running = True
|
||||
scale.heartbeat_task = None
|
||||
scale.process_queue_task = None
|
||||
scale.device_state = AcaiaDeviceState(
|
||||
battery_level=42, units=AcaiaUnitOfMass.GRAMS
|
||||
)
|
||||
scale.weight = 123.45
|
||||
yield scale
|
|
@ -1,139 +0,0 @@
|
|||
# serializer version: 1
|
||||
# name: test_buttons[button.lunar_ddeeff_reset_timer-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': None,
|
||||
'entity_id': 'button.lunar_ddeeff_reset_timer',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Reset timer',
|
||||
'platform': 'acaia',
|
||||
'previous_unique_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'reset_timer',
|
||||
'unique_id': 'aa:bb:cc:dd:ee:ff_reset_timer',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[button.lunar_ddeeff_reset_timer-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'LUNAR-DDEEFF Reset timer',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'button.lunar_ddeeff_reset_timer',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[button.lunar_ddeeff_start_stop_timer-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': None,
|
||||
'entity_id': 'button.lunar_ddeeff_start_stop_timer',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Start/stop timer',
|
||||
'platform': 'acaia',
|
||||
'previous_unique_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'start_stop',
|
||||
'unique_id': 'aa:bb:cc:dd:ee:ff_start_stop',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[button.lunar_ddeeff_start_stop_timer-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'LUNAR-DDEEFF Start/stop timer',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'button.lunar_ddeeff_start_stop_timer',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[button.lunar_ddeeff_tare-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'button',
|
||||
'entity_category': None,
|
||||
'entity_id': 'button.lunar_ddeeff_tare',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Tare',
|
||||
'platform': 'acaia',
|
||||
'previous_unique_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'tare',
|
||||
'unique_id': 'aa:bb:cc:dd:ee:ff_tare',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_buttons[button.lunar_ddeeff_tare-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'LUNAR-DDEEFF Tare',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'button.lunar_ddeeff_tare',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'unknown',
|
||||
})
|
||||
# ---
|
|
@ -1,33 +0,0 @@
|
|||
# serializer version: 1
|
||||
# name: test_device
|
||||
DeviceRegistryEntrySnapshot({
|
||||
'area_id': 'kitchen',
|
||||
'config_entries': <ANY>,
|
||||
'configuration_url': None,
|
||||
'connections': set({
|
||||
}),
|
||||
'disabled_by': None,
|
||||
'entry_type': None,
|
||||
'hw_version': None,
|
||||
'id': <ANY>,
|
||||
'identifiers': set({
|
||||
tuple(
|
||||
'acaia',
|
||||
'aa:bb:cc:dd:ee:ff',
|
||||
),
|
||||
}),
|
||||
'is_new': False,
|
||||
'labels': set({
|
||||
}),
|
||||
'manufacturer': 'Acaia',
|
||||
'model': 'Lunar',
|
||||
'model_id': None,
|
||||
'name': 'LUNAR-DDEEFF',
|
||||
'name_by_user': None,
|
||||
'primary_config_entry': <ANY>,
|
||||
'serial_number': None,
|
||||
'suggested_area': 'Kitchen',
|
||||
'sw_version': None,
|
||||
'via_device_id': None,
|
||||
})
|
||||
# ---
|
|
@ -1,90 +0,0 @@
|
|||
"""Tests for the acaia buttons."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
STATE_UNAVAILABLE,
|
||||
STATE_UNKNOWN,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
|
||||
|
||||
BUTTONS = (
|
||||
"tare",
|
||||
"reset_timer",
|
||||
"start_stop_timer",
|
||||
)
|
||||
|
||||
|
||||
async def test_buttons(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_scale: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the acaia buttons."""
|
||||
|
||||
with patch("homeassistant.components.acaia.PLATFORMS", [Platform.BUTTON]):
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
|
||||
|
||||
|
||||
async def test_button_presses(
|
||||
hass: HomeAssistant,
|
||||
mock_scale: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test the acaia button presses."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
for button in BUTTONS:
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
{
|
||||
ATTR_ENTITY_ID: f"button.lunar_ddeeff_{button}",
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
function = getattr(mock_scale, button)
|
||||
function.assert_called_once()
|
||||
|
||||
|
||||
async def test_buttons_unavailable_on_disconnected_scale(
|
||||
hass: HomeAssistant,
|
||||
mock_scale: MagicMock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
"""Test the acaia buttons are unavailable when the scale is disconnected."""
|
||||
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
|
||||
for button in BUTTONS:
|
||||
state = hass.states.get(f"button.lunar_ddeeff_{button}")
|
||||
assert state
|
||||
assert state.state == STATE_UNKNOWN
|
||||
|
||||
mock_scale.connected = False
|
||||
freezer.tick(timedelta(minutes=10))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
for button in BUTTONS:
|
||||
state = hass.states.get(f"button.lunar_ddeeff_{button}")
|
||||
assert state
|
||||
assert state.state == STATE_UNAVAILABLE
|
|
@ -1,242 +0,0 @@
|
|||
"""Test the acaia config flow."""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError, AcaiaUnknownDevice
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.acaia.const import CONF_IS_NEW_STYLE_SCALE, DOMAIN
|
||||
from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers.service_info.bluetooth import BluetoothServiceInfo
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
service_info = BluetoothServiceInfo(
|
||||
name="LUNAR-DDEEFF",
|
||||
address="aa:bb:cc:dd:ee:ff",
|
||||
rssi=-63,
|
||||
manufacturer_data={},
|
||||
service_data={},
|
||||
service_uuids=[],
|
||||
source="local",
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_discovered_service_info() -> Generator[AsyncMock]:
|
||||
"""Override getting Bluetooth service info."""
|
||||
with patch(
|
||||
"homeassistant.components.acaia.config_flow.async_discovered_service_info",
|
||||
return_value=[service_info],
|
||||
) as mock_discovered_service_info:
|
||||
yield mock_discovered_service_info
|
||||
|
||||
|
||||
async def test_form(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_verify: AsyncMock,
|
||||
mock_discovered_service_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Test we get the form."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "user"
|
||||
|
||||
user_input = {
|
||||
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
|
||||
}
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input=user_input,
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["title"] == "LUNAR-DDEEFF"
|
||||
assert result2["data"] == {
|
||||
**user_input,
|
||||
CONF_IS_NEW_STYLE_SCALE: True,
|
||||
}
|
||||
|
||||
|
||||
async def test_bluetooth_discovery(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_verify: AsyncMock,
|
||||
) -> None:
|
||||
"""Test we can discover a device."""
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
assert result["step_id"] == "bluetooth_confirm"
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
user_input={},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.CREATE_ENTRY
|
||||
assert result2["title"] == service_info.name
|
||||
assert result2["data"] == {
|
||||
CONF_ADDRESS: service_info.address,
|
||||
CONF_IS_NEW_STYLE_SCALE: True,
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(AcaiaDeviceNotFound("Error"), "device_not_found"),
|
||||
(AcaiaError, "unknown"),
|
||||
(AcaiaUnknownDevice, "unsupported_device"),
|
||||
],
|
||||
)
|
||||
async def test_bluetooth_discovery_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_verify: AsyncMock,
|
||||
exception: Exception,
|
||||
error: str,
|
||||
) -> None:
|
||||
"""Test abortions of Bluetooth discovery."""
|
||||
mock_verify.side_effect = exception
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == error
|
||||
|
||||
|
||||
async def test_already_configured(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_verify: AsyncMock,
|
||||
mock_discovered_service_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Ensure we can't add the same device twice."""
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert result2["type"] is FlowResultType.ABORT
|
||||
assert result2["reason"] == "already_configured"
|
||||
|
||||
|
||||
async def test_already_configured_bluetooth_discovery(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Ensure configure device is not discovered again."""
|
||||
|
||||
mock_config_entry.add_to_hass(hass)
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_BLUETOOTH}, data=service_info
|
||||
)
|
||||
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "already_configured"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("exception", "error"),
|
||||
[
|
||||
(AcaiaDeviceNotFound("Error"), "device_not_found"),
|
||||
(AcaiaError, "unknown"),
|
||||
],
|
||||
)
|
||||
async def test_recoverable_config_flow_errors(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_verify: AsyncMock,
|
||||
mock_discovered_service_info: AsyncMock,
|
||||
exception: Exception,
|
||||
error: str,
|
||||
) -> None:
|
||||
"""Test recoverable errors."""
|
||||
mock_verify.side_effect = exception
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
|
||||
},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.FORM
|
||||
assert result2["errors"] == {"base": error}
|
||||
|
||||
# recover
|
||||
mock_verify.side_effect = None
|
||||
result3 = await hass.config_entries.flow.async_configure(
|
||||
result2["flow_id"],
|
||||
{
|
||||
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
|
||||
},
|
||||
)
|
||||
assert result3["type"] is FlowResultType.CREATE_ENTRY
|
||||
|
||||
|
||||
async def test_unsupported_device(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_verify: AsyncMock,
|
||||
mock_discovered_service_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Test flow aborts on unsupported device."""
|
||||
mock_verify.side_effect = AcaiaUnknownDevice
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.FORM
|
||||
|
||||
result2 = await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_ADDRESS: "aa:bb:cc:dd:ee:ff",
|
||||
},
|
||||
)
|
||||
|
||||
assert result2["type"] is FlowResultType.ABORT
|
||||
assert result2["reason"] == "unsupported_device"
|
||||
|
||||
|
||||
async def test_no_bluetooth_devices(
|
||||
hass: HomeAssistant,
|
||||
mock_setup_entry: AsyncMock,
|
||||
mock_discovered_service_info: AsyncMock,
|
||||
) -> None:
|
||||
"""Test flow aborts on unsupported device."""
|
||||
mock_discovered_service_info.return_value = []
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_USER}
|
||||
)
|
||||
assert result["type"] is FlowResultType.ABORT
|
||||
assert result["reason"] == "no_devices_found"
|
|
@ -1,65 +0,0 @@
|
|||
"""Test init of acaia integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.acaia.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
pytestmark = pytest.mark.usefixtures("init_integration")
|
||||
|
||||
|
||||
async def test_load_unload_config_entry(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
) -> None:
|
||||
"""Test loading and unloading the integration."""
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.LOADED
|
||||
|
||||
await hass.config_entries.async_unload(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"exception", [AcaiaError, AcaiaDeviceNotFound("Boom"), TimeoutError]
|
||||
)
|
||||
async def test_update_exception_leads_to_active_disconnect(
|
||||
hass: HomeAssistant,
|
||||
mock_scale: MagicMock,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
exception: Exception,
|
||||
) -> None:
|
||||
"""Test scale gets disconnected on exception."""
|
||||
|
||||
mock_scale.connect.side_effect = exception
|
||||
mock_scale.connected = False
|
||||
|
||||
freezer.tick(timedelta(minutes=10))
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
mock_scale.device_disconnected_handler.assert_called_once()
|
||||
|
||||
|
||||
async def test_device(
|
||||
mock_scale: MagicMock,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Snapshot the device from registry."""
|
||||
|
||||
device = device_registry.async_get_device({(DOMAIN, mock_scale.mac)})
|
||||
assert device
|
||||
assert device == snapshot
|
|
@ -697,7 +697,7 @@
|
|||
'speech': dict({
|
||||
'plain': dict({
|
||||
'extra_data': None,
|
||||
'speech': 'Sorry, I am not aware of any area called Are',
|
||||
'speech': 'Sorry, I am not aware of any area called are',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
|
@ -741,7 +741,7 @@
|
|||
'speech': dict({
|
||||
'plain': dict({
|
||||
'extra_data': None,
|
||||
'speech': 'Sorry, I am not aware of any area called Are',
|
||||
'speech': 'Sorry, I am not aware of any area called are',
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
|
|
|
@ -3,10 +3,13 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.backup import DOMAIN
|
||||
from homeassistant.components.backup.agent import BackupAgent, UploadedBackup
|
||||
from homeassistant.components.backup.manager import Backup
|
||||
from homeassistant.components.backup.models import BackupUploadMetadata
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
@ -20,6 +23,40 @@ TEST_BACKUP = Backup(
|
|||
)
|
||||
|
||||
|
||||
class BackupAgentTest(BackupAgent):
|
||||
"""Test backup agent."""
|
||||
|
||||
async def async_download_backup(
|
||||
self,
|
||||
*,
|
||||
id: str,
|
||||
path: Path,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Download a backup file."""
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
path: Path,
|
||||
metadata: BackupUploadMetadata,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
|
||||
"""List backups."""
|
||||
return [
|
||||
UploadedBackup(
|
||||
id="abc123",
|
||||
name="Test",
|
||||
slug="abc123",
|
||||
size=13.37,
|
||||
date="1970-01-01T00:00:00Z",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def setup_backup_integration(
|
||||
hass: HomeAssistant,
|
||||
with_hassio: bool = False,
|
||||
|
|
|
@ -1,4 +1,106 @@
|
|||
# serializer version: 1
|
||||
# name: test_agents_download[with_hassio]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': None,
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_download[without_hassio]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': None,
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_download_exception
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'backup_agents_download',
|
||||
'message': 'Boom',
|
||||
}),
|
||||
'id': 1,
|
||||
'success': False,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_download_unknown_agent
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'unknown_agent',
|
||||
'message': 'Agent domain.test not found',
|
||||
}),
|
||||
'id': 1,
|
||||
'success': False,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_info[with_hassio]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': dict({
|
||||
'agents': list([
|
||||
dict({
|
||||
'id': 'domain.test',
|
||||
}),
|
||||
]),
|
||||
'syncing': False,
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_info[without_hassio]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': dict({
|
||||
'agents': list([
|
||||
dict({
|
||||
'id': 'domain.test',
|
||||
}),
|
||||
]),
|
||||
'syncing': False,
|
||||
}),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_list_backups[with_hassio]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': list([
|
||||
dict({
|
||||
'agent_id': 'domain.test',
|
||||
'date': '1970-01-01T00:00:00Z',
|
||||
'id': 'abc123',
|
||||
'name': 'Test',
|
||||
'size': 13.37,
|
||||
'slug': 'abc123',
|
||||
}),
|
||||
]),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_agents_list_backups[without_hassio]
|
||||
dict({
|
||||
'id': 1,
|
||||
'result': list([
|
||||
dict({
|
||||
'agent_id': 'domain.test',
|
||||
'date': '1970-01-01T00:00:00Z',
|
||||
'id': 'abc123',
|
||||
'name': 'Test',
|
||||
'size': 13.37,
|
||||
'slug': 'abc123',
|
||||
}),
|
||||
]),
|
||||
'success': True,
|
||||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_end[with_hassio-hass_access_token]
|
||||
dict({
|
||||
'error': dict({
|
||||
|
@ -40,7 +142,7 @@
|
|||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_end_excepion[exception0]
|
||||
# name: test_backup_end_exception[exception0]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'post_backup_actions_failed',
|
||||
|
@ -51,7 +153,7 @@
|
|||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_end_excepion[exception1]
|
||||
# name: test_backup_end_exception[exception1]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'post_backup_actions_failed',
|
||||
|
@ -62,7 +164,7 @@
|
|||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_end_excepion[exception2]
|
||||
# name: test_backup_end_exception[exception2]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'post_backup_actions_failed',
|
||||
|
@ -114,7 +216,7 @@
|
|||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_start_excepion[exception0]
|
||||
# name: test_backup_start_exception[exception0]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'pre_backup_actions_failed',
|
||||
|
@ -125,7 +227,7 @@
|
|||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_start_excepion[exception1]
|
||||
# name: test_backup_start_exception[exception1]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'pre_backup_actions_failed',
|
||||
|
@ -136,7 +238,7 @@
|
|||
'type': 'result',
|
||||
})
|
||||
# ---
|
||||
# name: test_backup_start_excepion[exception2]
|
||||
# name: test_backup_start_exception[exception2]
|
||||
dict({
|
||||
'error': dict({
|
||||
'code': 'pre_backup_actions_failed',
|
||||
|
|
|
@ -3,13 +3,15 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, Mock, mock_open, patch
|
||||
|
||||
import aiohttp
|
||||
from multidict import CIMultiDict, CIMultiDictProxy
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.backup import BackupManager
|
||||
from homeassistant.components.backup import BackupManager, BackupUploadMetadata
|
||||
from homeassistant.components.backup.agent import BackupAgentPlatformProtocol
|
||||
from homeassistant.components.backup.manager import (
|
||||
BackupPlatformProtocol,
|
||||
BackupProgress,
|
||||
|
@ -18,7 +20,7 @@ from homeassistant.core import HomeAssistant
|
|||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
from .common import TEST_BACKUP
|
||||
from .common import TEST_BACKUP, BackupAgentTest
|
||||
|
||||
from tests.common import MockPlatform, mock_platform
|
||||
|
||||
|
@ -39,7 +41,7 @@ async def _mock_backup_generation(
|
|||
assert manager.backup_task is not None
|
||||
assert progress == []
|
||||
|
||||
await manager.backup_task
|
||||
backup = await manager.backup_task
|
||||
assert progress == [BackupProgress(done=True, stage=None, success=True)]
|
||||
|
||||
assert mocked_json_bytes.call_count == 1
|
||||
|
@ -48,10 +50,12 @@ async def _mock_backup_generation(
|
|||
assert backup_json_dict["homeassistant"] == {"version": "2025.1.0"}
|
||||
assert manager.backup_dir.as_posix() in str(mocked_tarfile.call_args_list[0][0][0])
|
||||
|
||||
return backup
|
||||
|
||||
|
||||
async def _setup_mock_domain(
|
||||
hass: HomeAssistant,
|
||||
platform: BackupPlatformProtocol | None = None,
|
||||
platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None,
|
||||
) -> None:
|
||||
"""Set up a mock domain."""
|
||||
mock_platform(hass, "some_domain.backup", platform or MockPlatform())
|
||||
|
@ -174,6 +178,7 @@ async def test_async_create_backup(
|
|||
assert "Generated new backup with slug " in caplog.text
|
||||
assert "Creating backup directory" in caplog.text
|
||||
assert "Loaded 0 platforms" in caplog.text
|
||||
assert "Loaded 0 agents" in caplog.text
|
||||
|
||||
|
||||
async def test_loading_platforms(
|
||||
|
@ -191,6 +196,7 @@ async def test_loading_platforms(
|
|||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(),
|
||||
),
|
||||
)
|
||||
await manager.load_platforms()
|
||||
|
@ -202,6 +208,32 @@ async def test_loading_platforms(
|
|||
assert "Loaded 1 platforms" in caplog.text
|
||||
|
||||
|
||||
async def test_loading_agents(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test loading backup agents."""
|
||||
manager = BackupManager(hass)
|
||||
|
||||
assert not manager.loaded_platforms
|
||||
assert not manager.platforms
|
||||
|
||||
await _setup_mock_domain(
|
||||
hass,
|
||||
Mock(
|
||||
async_get_backup_agents=AsyncMock(return_value=[BackupAgentTest("test")]),
|
||||
),
|
||||
)
|
||||
await manager.load_platforms()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert manager.loaded_platforms
|
||||
assert len(manager.backup_agents) == 1
|
||||
|
||||
assert "Loaded 1 agents" in caplog.text
|
||||
assert "some_domain.test" in manager.backup_agents
|
||||
|
||||
|
||||
async def test_not_loading_bad_platforms(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
|
@ -220,10 +252,151 @@ async def test_not_loading_bad_platforms(
|
|||
assert len(manager.platforms) == 0
|
||||
|
||||
assert "Loaded 0 platforms" in caplog.text
|
||||
assert (
|
||||
"some_domain does not implement required functions for the backup platform"
|
||||
in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_backup_generation")
|
||||
async def test_syncing_backup(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mocked_json_bytes: Mock,
|
||||
mocked_tarfile: Mock,
|
||||
) -> None:
|
||||
"""Test syncing a backup."""
|
||||
manager = BackupManager(hass)
|
||||
|
||||
await _setup_mock_domain(
|
||||
hass,
|
||||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(
|
||||
return_value=[
|
||||
BackupAgentTest("agent1"),
|
||||
BackupAgentTest("agent2"),
|
||||
]
|
||||
),
|
||||
),
|
||||
)
|
||||
await manager.load_platforms()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
backup = await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
return_value=backup,
|
||||
),
|
||||
patch.object(BackupAgentTest, "async_upload_backup") as mocked_upload,
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.HAVERSION",
|
||||
"2025.1.0",
|
||||
),
|
||||
):
|
||||
await manager.async_upload_backup(slug=backup.slug)
|
||||
assert mocked_upload.call_count == 2
|
||||
first_call = mocked_upload.call_args_list[0]
|
||||
assert first_call[1]["path"] == backup.path
|
||||
assert first_call[1]["metadata"] == BackupUploadMetadata(
|
||||
date=backup.date,
|
||||
homeassistant="2025.1.0",
|
||||
name=backup.name,
|
||||
size=backup.size,
|
||||
slug=backup.slug,
|
||||
)
|
||||
|
||||
assert "Error during backup upload" not in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_backup_generation")
|
||||
async def test_syncing_backup_with_exception(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mocked_json_bytes: Mock,
|
||||
mocked_tarfile: Mock,
|
||||
) -> None:
|
||||
"""Test syncing a backup with exception."""
|
||||
manager = BackupManager(hass)
|
||||
|
||||
class ModifiedBackupSyncAgentTest(BackupAgentTest):
|
||||
async def async_upload_backup(self, **kwargs: Any) -> None:
|
||||
raise HomeAssistantError("Test exception")
|
||||
|
||||
await _setup_mock_domain(
|
||||
hass,
|
||||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(
|
||||
return_value=[
|
||||
ModifiedBackupSyncAgentTest("agent1"),
|
||||
ModifiedBackupSyncAgentTest("agent2"),
|
||||
]
|
||||
),
|
||||
),
|
||||
)
|
||||
await manager.load_platforms()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
backup = await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile)
|
||||
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
||||
return_value=backup,
|
||||
),
|
||||
patch.object(
|
||||
ModifiedBackupSyncAgentTest,
|
||||
"async_upload_backup",
|
||||
) as mocked_upload,
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.HAVERSION",
|
||||
"2025.1.0",
|
||||
),
|
||||
):
|
||||
mocked_upload.side_effect = HomeAssistantError("Test exception")
|
||||
await manager.async_upload_backup(slug=backup.slug)
|
||||
assert mocked_upload.call_count == 2
|
||||
first_call = mocked_upload.call_args_list[0]
|
||||
assert first_call[1]["path"] == backup.path
|
||||
assert first_call[1]["metadata"] == BackupUploadMetadata(
|
||||
date=backup.date,
|
||||
homeassistant="2025.1.0",
|
||||
name=backup.name,
|
||||
size=backup.size,
|
||||
slug=backup.slug,
|
||||
)
|
||||
|
||||
assert "Error during backup upload - Test exception" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_backup_generation")
|
||||
async def test_syncing_backup_no_agents(
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
mocked_json_bytes: Mock,
|
||||
mocked_tarfile: Mock,
|
||||
) -> None:
|
||||
"""Test syncing a backup with no agents."""
|
||||
manager = BackupManager(hass)
|
||||
|
||||
await _setup_mock_domain(
|
||||
hass,
|
||||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(return_value=[]),
|
||||
),
|
||||
)
|
||||
await manager.load_platforms()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
backup = await _mock_backup_generation(manager, mocked_json_bytes, mocked_tarfile)
|
||||
with patch(
|
||||
"homeassistant.components.backup.agent.BackupAgent.async_upload_backup"
|
||||
) as mocked_async_upload_backup:
|
||||
await manager.async_upload_backup(slug=backup.slug)
|
||||
assert mocked_async_upload_backup.call_count == 0
|
||||
|
||||
|
||||
async def test_exception_plaform_pre(
|
||||
|
@ -241,6 +414,7 @@ async def test_exception_plaform_pre(
|
|||
Mock(
|
||||
async_pre_backup=_mock_step,
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -263,6 +437,7 @@ async def test_exception_plaform_post(
|
|||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=_mock_step,
|
||||
async_get_backup_agents=AsyncMock(),
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -285,6 +460,7 @@ async def test_loading_platforms_when_running_async_pre_backup_actions(
|
|||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(),
|
||||
),
|
||||
)
|
||||
await manager.async_pre_backup_actions()
|
||||
|
@ -310,6 +486,7 @@ async def test_loading_platforms_when_running_async_post_backup_actions(
|
|||
Mock(
|
||||
async_pre_backup=AsyncMock(),
|
||||
async_post_backup=AsyncMock(),
|
||||
async_get_backup_agents=AsyncMock(),
|
||||
),
|
||||
)
|
||||
await manager.async_post_backup_actions()
|
||||
|
|
|
@ -1,16 +1,18 @@
|
|||
"""Tests for the Backup integration."""
|
||||
|
||||
from unittest.mock import patch
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
import pytest
|
||||
from syrupy import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.backup.manager import Backup
|
||||
from homeassistant.components.backup.const import DATA_MANAGER
|
||||
from homeassistant.components.backup.models import BaseBackup
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
from .common import TEST_BACKUP, setup_backup_integration
|
||||
from .common import TEST_BACKUP, BackupAgentTest, setup_backup_integration
|
||||
|
||||
from tests.typing import WebSocketGenerator
|
||||
|
||||
|
@ -43,15 +45,23 @@ async def test_info(
|
|||
"""Test getting backup info."""
|
||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
||||
|
||||
hass.data[DATA_MANAGER].backups = {TEST_BACKUP.slug: TEST_BACKUP}
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with patch(
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.load_backups",
|
||||
AsyncMock(),
|
||||
),
|
||||
patch(
|
||||
"homeassistant.components.backup.manager.BackupManager.async_get_backups",
|
||||
return_value={TEST_BACKUP.slug: TEST_BACKUP},
|
||||
),
|
||||
):
|
||||
await client.send_json_auto_id({"type": "backup/info"})
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -73,7 +83,7 @@ async def test_details(
|
|||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
with_hassio: bool,
|
||||
backup_content: Backup | None,
|
||||
backup_content: BaseBackup | None,
|
||||
) -> None:
|
||||
"""Test getting backup info."""
|
||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
||||
|
@ -112,7 +122,7 @@ async def test_remove(
|
|||
"homeassistant.components.backup.manager.BackupManager.async_remove_backup",
|
||||
):
|
||||
await client.send_json_auto_id({"type": "backup/remove", "slug": "abc123"})
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -140,7 +150,7 @@ async def test_generate(
|
|||
|
||||
await client.send_json_auto_id({"type": "backup/generate"})
|
||||
for _ in range(number_of_messages):
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -199,7 +209,7 @@ async def test_backup_end(
|
|||
"homeassistant.components.backup.manager.BackupManager.async_post_backup_actions",
|
||||
):
|
||||
await client.send_json_auto_id({"type": "backup/end"})
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -232,7 +242,7 @@ async def test_backup_start(
|
|||
"homeassistant.components.backup.manager.BackupManager.async_pre_backup_actions",
|
||||
):
|
||||
await client.send_json_auto_id({"type": "backup/start"})
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -243,7 +253,7 @@ async def test_backup_start(
|
|||
Exception("Boom"),
|
||||
],
|
||||
)
|
||||
async def test_backup_end_excepion(
|
||||
async def test_backup_end_exception(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
|
@ -261,7 +271,7 @@ async def test_backup_end_excepion(
|
|||
side_effect=exception,
|
||||
):
|
||||
await client.send_json_auto_id({"type": "backup/end"})
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -272,7 +282,7 @@ async def test_backup_end_excepion(
|
|||
Exception("Boom"),
|
||||
],
|
||||
)
|
||||
async def test_backup_start_excepion(
|
||||
async def test_backup_start_exception(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
|
@ -290,4 +300,135 @@ async def test_backup_start_excepion(
|
|||
side_effect=exception,
|
||||
):
|
||||
await client.send_json_auto_id({"type": "backup/start"})
|
||||
assert snapshot == await client.receive_json()
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"with_hassio",
|
||||
[
|
||||
pytest.param(True, id="with_hassio"),
|
||||
pytest.param(False, id="without_hassio"),
|
||||
],
|
||||
)
|
||||
async def test_agents_info(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
with_hassio: bool,
|
||||
) -> None:
|
||||
"""Test getting backup agents info."""
|
||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
||||
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await client.send_json_auto_id({"type": "backup/agents/info"})
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"with_hassio",
|
||||
[
|
||||
pytest.param(True, id="with_hassio"),
|
||||
pytest.param(False, id="without_hassio"),
|
||||
],
|
||||
)
|
||||
async def test_agents_list_backups(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
with_hassio: bool,
|
||||
) -> None:
|
||||
"""Test backup agents list backups details."""
|
||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
||||
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await client.send_json_auto_id({"type": "backup/agents/list_backups"})
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"with_hassio",
|
||||
[
|
||||
pytest.param(True, id="with_hassio"),
|
||||
pytest.param(False, id="without_hassio"),
|
||||
],
|
||||
)
|
||||
async def test_agents_download(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
with_hassio: bool,
|
||||
) -> None:
|
||||
"""Test WS command to start downloading a backup."""
|
||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
||||
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "backup/agents/download",
|
||||
"slug": "abc123",
|
||||
"agent": "domain.test",
|
||||
"backup_id": "abc123",
|
||||
}
|
||||
)
|
||||
with patch.object(BackupAgentTest, "async_download_backup") as download_mock:
|
||||
assert await client.receive_json() == snapshot
|
||||
assert download_mock.call_args[1] == {
|
||||
"id": "abc123",
|
||||
"path": Path(hass.config.path("backup"), "abc123.tar"),
|
||||
}
|
||||
|
||||
|
||||
async def test_agents_download_exception(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test WS command to start downloading a backup throwing an exception."""
|
||||
await setup_backup_integration(hass)
|
||||
hass.data[DATA_MANAGER].backup_agents = {"domain.test": BackupAgentTest("test")}
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "backup/agents/download",
|
||||
"slug": "abc123",
|
||||
"agent": "domain.test",
|
||||
"backup_id": "abc123",
|
||||
}
|
||||
)
|
||||
with patch.object(BackupAgentTest, "async_download_backup") as download_mock:
|
||||
download_mock.side_effect = Exception("Boom")
|
||||
assert await client.receive_json() == snapshot
|
||||
|
||||
|
||||
async def test_agents_download_unknown_agent(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
snapshot: SnapshotAssertion,
|
||||
) -> None:
|
||||
"""Test downloading a backup with an unknown agent."""
|
||||
await setup_backup_integration(hass)
|
||||
|
||||
client = await hass_ws_client(hass)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "backup/agents/download",
|
||||
"slug": "abc123",
|
||||
"agent": "domain.test",
|
||||
"backup_id": "abc123",
|
||||
}
|
||||
)
|
||||
assert await client.receive_json() == snapshot
|
||||
|
|
|
@ -139,46 +139,42 @@ async def init_test_integration(
|
|||
return test_camera
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_camera", "mock_stream_source")
|
||||
@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source")
|
||||
async def test_async_register_webrtc_provider(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test registering a WebRTC provider."""
|
||||
await async_setup_component(hass, "camera", {})
|
||||
|
||||
camera = get_camera_from_entity_id(hass, "camera.demo_camera")
|
||||
assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS}
|
||||
assert camera.frontend_stream_type is StreamType.HLS
|
||||
|
||||
provider = SomeTestProvider()
|
||||
unregister = async_register_webrtc_provider(hass, provider)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert camera.camera_capabilities.frontend_stream_types == {
|
||||
StreamType.HLS,
|
||||
StreamType.WEB_RTC,
|
||||
}
|
||||
assert camera.frontend_stream_type is StreamType.WEB_RTC
|
||||
|
||||
# Mark stream as unsupported
|
||||
provider._is_supported = False
|
||||
# Manually refresh the provider
|
||||
await camera.async_refresh_providers()
|
||||
|
||||
assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS}
|
||||
assert camera.frontend_stream_type is StreamType.HLS
|
||||
|
||||
# Mark stream as supported
|
||||
provider._is_supported = True
|
||||
# Manually refresh the provider
|
||||
await camera.async_refresh_providers()
|
||||
assert camera.camera_capabilities.frontend_stream_types == {
|
||||
StreamType.HLS,
|
||||
StreamType.WEB_RTC,
|
||||
}
|
||||
assert camera.frontend_stream_type is StreamType.WEB_RTC
|
||||
|
||||
unregister()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert camera.camera_capabilities.frontend_stream_types == {StreamType.HLS}
|
||||
assert camera.frontend_stream_type is StreamType.HLS
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_camera", "mock_stream_source")
|
||||
@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source")
|
||||
async def test_async_register_webrtc_provider_twice(
|
||||
hass: HomeAssistant,
|
||||
register_test_provider: SomeTestProvider,
|
||||
|
@ -196,11 +192,13 @@ async def test_async_register_webrtc_provider_camera_not_loaded(
|
|||
async_register_webrtc_provider(hass, SomeTestProvider())
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera", "mock_stream", "mock_stream_source")
|
||||
async def test_async_register_ice_server(
|
||||
hass: HomeAssistant,
|
||||
) -> None:
|
||||
"""Test registering an ICE server."""
|
||||
await async_setup_component(hass, "camera", {})
|
||||
|
||||
# Clear any existing ICE servers
|
||||
hass.data[DATA_ICE_SERVERS].clear()
|
||||
|
||||
|
@ -218,7 +216,7 @@ async def test_async_register_ice_server(
|
|||
unregister = async_register_ice_servers(hass, get_ice_servers)
|
||||
assert not called
|
||||
|
||||
camera = get_camera_from_entity_id(hass, "camera.async")
|
||||
camera = get_camera_from_entity_id(hass, "camera.demo_camera")
|
||||
config = camera.async_get_webrtc_client_configuration()
|
||||
|
||||
assert config.configuration.ice_servers == [
|
||||
|
@ -279,7 +277,7 @@ async def test_async_register_ice_server(
|
|||
assert config.configuration.ice_servers == []
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_ws_get_client_config(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -288,7 +286,7 @@ async def test_ws_get_client_config(
|
|||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(
|
||||
{"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"}
|
||||
{"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"}
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
|
||||
|
@ -322,7 +320,7 @@ async def test_ws_get_client_config(
|
|||
async_register_ice_servers(hass, get_ice_server)
|
||||
|
||||
await client.send_json_auto_id(
|
||||
{"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"}
|
||||
{"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"}
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
|
||||
|
@ -372,7 +370,7 @@ async def test_ws_get_client_config_sync_offer(
|
|||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_ws_get_client_config_custom_config(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -386,7 +384,7 @@ async def test_ws_get_client_config_custom_config(
|
|||
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(
|
||||
{"type": "camera/webrtc/get_client_config", "entity_id": "camera.async"}
|
||||
{"type": "camera/webrtc/get_client_config", "entity_id": "camera.demo_camera"}
|
||||
)
|
||||
msg = await client.receive_json()
|
||||
|
||||
|
@ -437,7 +435,7 @@ def mock_rtsp_to_webrtc_fixture(hass: HomeAssistant) -> Generator[Mock]:
|
|||
unsub()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_websocket_webrtc_offer(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -446,7 +444,7 @@ async def test_websocket_webrtc_offer(
|
|||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "camera/webrtc/offer",
|
||||
"entity_id": "camera.async",
|
||||
"entity_id": "camera.demo_camera",
|
||||
"offer": WEBRTC_OFFER,
|
||||
}
|
||||
)
|
||||
|
@ -557,11 +555,11 @@ async def test_websocket_webrtc_offer_webrtc_provider(
|
|||
mock_async_close_session.assert_called_once_with(session_id)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_websocket_webrtc_offer_invalid_entity(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test WebRTC with a camera entity that does not exist."""
|
||||
await async_setup_component(hass, "camera", {})
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
|
@ -580,7 +578,7 @@ async def test_websocket_webrtc_offer_invalid_entity(
|
|||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_websocket_webrtc_offer_missing_offer(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -607,6 +605,7 @@ async def test_websocket_webrtc_offer_missing_offer(
|
|||
(TimeoutError(), "Timeout handling WebRTC offer"),
|
||||
],
|
||||
)
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc_frontendtype_only")
|
||||
async def test_websocket_webrtc_offer_failure(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: WebSocketGenerator,
|
||||
|
@ -950,7 +949,7 @@ async def test_rtsp_to_webrtc_offer_not_accepted(
|
|||
unsub()
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_ws_webrtc_candidate(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -958,13 +957,13 @@ async def test_ws_webrtc_candidate(
|
|||
client = await hass_ws_client(hass)
|
||||
session_id = "session_id"
|
||||
candidate = "candidate"
|
||||
with patch.object(
|
||||
get_camera_from_entity_id(hass, "camera.async"), "async_on_webrtc_candidate"
|
||||
with patch(
|
||||
"homeassistant.components.camera.Camera.async_on_webrtc_candidate"
|
||||
) as mock_on_webrtc_candidate:
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "camera/webrtc/candidate",
|
||||
"entity_id": "camera.async",
|
||||
"entity_id": "camera.demo_camera",
|
||||
"session_id": session_id,
|
||||
"candidate": candidate,
|
||||
}
|
||||
|
@ -977,7 +976,7 @@ async def test_ws_webrtc_candidate(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_ws_webrtc_candidate_not_supported(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -986,7 +985,7 @@ async def test_ws_webrtc_candidate_not_supported(
|
|||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "camera/webrtc/candidate",
|
||||
"entity_id": "camera.sync",
|
||||
"entity_id": "camera.demo_camera",
|
||||
"session_id": "session_id",
|
||||
"candidate": "candidate",
|
||||
}
|
||||
|
@ -1029,11 +1028,11 @@ async def test_ws_webrtc_candidate_webrtc_provider(
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_ws_webrtc_candidate_invalid_entity(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
"""Test ws WebRTC candidate command with a camera entity that does not exist."""
|
||||
await async_setup_component(hass, "camera", {})
|
||||
client = await hass_ws_client(hass)
|
||||
await client.send_json_auto_id(
|
||||
{
|
||||
|
@ -1053,7 +1052,7 @@ async def test_ws_webrtc_candidate_invalid_entity(
|
|||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_test_webrtc_cameras")
|
||||
@pytest.mark.usefixtures("mock_camera_webrtc")
|
||||
async def test_ws_webrtc_canidate_missing_candidate(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -1062,7 +1061,7 @@ async def test_ws_webrtc_canidate_missing_candidate(
|
|||
await client.send_json_auto_id(
|
||||
{
|
||||
"type": "camera/webrtc/candidate",
|
||||
"entity_id": "camera.async",
|
||||
"entity_id": "camera.demo_camera",
|
||||
"session_id": "session_id",
|
||||
}
|
||||
)
|
||||
|
|
|
@ -26,12 +26,7 @@ from homeassistant.config_entries import (
|
|||
)
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import (
|
||||
FlowContext,
|
||||
FlowHandler,
|
||||
FlowManager,
|
||||
FlowResultType,
|
||||
)
|
||||
from homeassistant.data_entry_flow import FlowHandler, FlowManager, FlowResultType
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -562,12 +557,12 @@ def _validate_translation_placeholders(
|
|||
description_placeholders is None
|
||||
or placeholder not in description_placeholders
|
||||
):
|
||||
ignore_translations[full_key] = (
|
||||
pytest.fail(
|
||||
f"Description not found for placeholder `{placeholder}` in {full_key}"
|
||||
)
|
||||
|
||||
|
||||
async def _validate_translation(
|
||||
async def _ensure_translation_exists(
|
||||
hass: HomeAssistant,
|
||||
ignore_translations: dict[str, StoreInfo],
|
||||
category: str,
|
||||
|
@ -593,7 +588,7 @@ async def _validate_translation(
|
|||
ignore_translations[full_key] = "used"
|
||||
return
|
||||
|
||||
ignore_translations[full_key] = (
|
||||
pytest.fail(
|
||||
f"Translation not found for {component}: `{category}.{key}`. "
|
||||
f"Please add to homeassistant/components/{component}/strings.json"
|
||||
)
|
||||
|
@ -609,20 +604,27 @@ def ignore_translations() -> str | list[str]:
|
|||
return []
|
||||
|
||||
|
||||
async def _check_config_flow_result_translations(
|
||||
manager: FlowManager,
|
||||
flow: FlowHandler,
|
||||
result: FlowResult[FlowContext, str],
|
||||
ignore_translations: dict[str, str],
|
||||
) -> None:
|
||||
if isinstance(manager, ConfigEntriesFlowManager):
|
||||
@pytest.fixture(autouse=True)
|
||||
def check_config_translations(ignore_translations: str | list[str]) -> Generator[None]:
|
||||
"""Ensure config_flow translations are available."""
|
||||
if not isinstance(ignore_translations, list):
|
||||
ignore_translations = [ignore_translations]
|
||||
|
||||
_ignore_translations = {k: "unused" for k in ignore_translations}
|
||||
_original = FlowManager._async_handle_step
|
||||
|
||||
async def _async_handle_step(
|
||||
self: FlowManager, flow: FlowHandler, *args
|
||||
) -> FlowResult:
|
||||
result = await _original(self, flow, *args)
|
||||
if isinstance(self, ConfigEntriesFlowManager):
|
||||
category = "config"
|
||||
integration = flow.handler
|
||||
elif isinstance(manager, OptionsFlowManager):
|
||||
component = flow.handler
|
||||
elif isinstance(self, OptionsFlowManager):
|
||||
category = "options"
|
||||
integration = flow.hass.config_entries.async_get_entry(flow.handler).domain
|
||||
component = flow.hass.config_entries.async_get_entry(flow.handler).domain
|
||||
else:
|
||||
return
|
||||
return result
|
||||
|
||||
# Check if this flow has been seen before
|
||||
# Gets set to False on first run, and to True on subsequent runs
|
||||
|
@ -634,81 +636,52 @@ async def _check_config_flow_result_translations(
|
|||
# - title defaults to integration name
|
||||
# - description is optional
|
||||
for header in ("title", "description"):
|
||||
await _validate_translation(
|
||||
await _ensure_translation_exists(
|
||||
flow.hass,
|
||||
ignore_translations,
|
||||
_ignore_translations,
|
||||
category,
|
||||
integration,
|
||||
component,
|
||||
f"step.{step_id}.{header}",
|
||||
result["description_placeholders"],
|
||||
translation_required=False,
|
||||
)
|
||||
if errors := result.get("errors"):
|
||||
for error in errors.values():
|
||||
await _validate_translation(
|
||||
await _ensure_translation_exists(
|
||||
flow.hass,
|
||||
ignore_translations,
|
||||
_ignore_translations,
|
||||
category,
|
||||
integration,
|
||||
component,
|
||||
f"error.{error}",
|
||||
result["description_placeholders"],
|
||||
)
|
||||
return
|
||||
return result
|
||||
|
||||
if result["type"] is FlowResultType.ABORT:
|
||||
# We don't need translations for a discovery flow which immediately
|
||||
# aborts, since such flows won't be seen by users
|
||||
if not flow.__flow_seen_before and flow.source in DISCOVERY_SOURCES:
|
||||
return
|
||||
await _validate_translation(
|
||||
return result
|
||||
await _ensure_translation_exists(
|
||||
flow.hass,
|
||||
ignore_translations,
|
||||
_ignore_translations,
|
||||
category,
|
||||
integration,
|
||||
component,
|
||||
f"abort.{result["reason"]}",
|
||||
result["description_placeholders"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def check_translations(ignore_translations: str | list[str]) -> Generator[None]:
|
||||
"""Check that translation requirements are met.
|
||||
|
||||
Current checks:
|
||||
- data entry flow results (ConfigFlow/OptionsFlow)
|
||||
"""
|
||||
if not isinstance(ignore_translations, list):
|
||||
ignore_translations = [ignore_translations]
|
||||
|
||||
_ignore_translations = {k: "unused" for k in ignore_translations}
|
||||
|
||||
# Keep reference to original functions
|
||||
_original_flow_manager_async_handle_step = FlowManager._async_handle_step
|
||||
|
||||
# Prepare override functions
|
||||
async def _flow_manager_async_handle_step(
|
||||
self: FlowManager, flow: FlowHandler, *args
|
||||
) -> FlowResult:
|
||||
result = await _original_flow_manager_async_handle_step(self, flow, *args)
|
||||
await _check_config_flow_result_translations(
|
||||
self, flow, result, _ignore_translations
|
||||
)
|
||||
return result
|
||||
|
||||
# Use override functions
|
||||
with patch(
|
||||
"homeassistant.data_entry_flow.FlowManager._async_handle_step",
|
||||
_flow_manager_async_handle_step,
|
||||
_async_handle_step,
|
||||
):
|
||||
yield
|
||||
|
||||
# Run final checks
|
||||
unused_ignore = [k for k, v in _ignore_translations.items() if v == "unused"]
|
||||
if unused_ignore:
|
||||
pytest.fail(
|
||||
f"Unused ignore translations: {', '.join(unused_ignore)}. "
|
||||
"Please remove them from the ignore_translations fixture."
|
||||
)
|
||||
for description in _ignore_translations.values():
|
||||
if description not in {"used", "unused"}:
|
||||
pytest.fail(description)
|
||||
|
|
|
@ -639,7 +639,7 @@
|
|||
'details': dict({
|
||||
'brightness': dict({
|
||||
'name': 'brightness',
|
||||
'text': '100',
|
||||
'text': '100%',
|
||||
'value': 100,
|
||||
}),
|
||||
'name': dict({
|
||||
|
@ -654,7 +654,7 @@
|
|||
'match': True,
|
||||
'sentence_template': '[<numeric_value_set>] <name> brightness [to] <brightness>',
|
||||
'slots': dict({
|
||||
'brightness': '100',
|
||||
'brightness': '100%',
|
||||
'name': 'test light',
|
||||
}),
|
||||
'source': 'builtin',
|
||||
|
|
|
@ -770,8 +770,8 @@ async def test_error_no_device_on_floor_exposed(
|
|||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=recognize_result,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[recognize_result],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "turn on test light on the ground floor", None, Context(), None
|
||||
|
@ -838,8 +838,8 @@ async def test_error_no_domain(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=recognize_result,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[recognize_result],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "turn on the fans", None, Context(), None
|
||||
|
@ -873,8 +873,8 @@ async def test_error_no_domain_exposed(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=recognize_result,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[recognize_result],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "turn on the fans", None, Context(), None
|
||||
|
@ -1047,8 +1047,8 @@ async def test_error_no_device_class(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=recognize_result,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[recognize_result],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "open the windows", None, Context(), None
|
||||
|
@ -1096,8 +1096,8 @@ async def test_error_no_device_class_exposed(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=recognize_result,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[recognize_result],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "open all the windows", None, Context(), None
|
||||
|
@ -1207,8 +1207,8 @@ async def test_error_no_device_class_on_floor_exposed(
|
|||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=recognize_result,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[recognize_result],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "open ground floor windows", None, Context(), None
|
||||
|
@ -1229,8 +1229,8 @@ async def test_error_no_device_class_on_floor_exposed(
|
|||
async def test_error_no_intent(hass: HomeAssistant) -> None:
|
||||
"""Test response with an intent match failure."""
|
||||
with patch(
|
||||
"homeassistant.components.conversation.default_agent.recognize_best",
|
||||
return_value=None,
|
||||
"homeassistant.components.conversation.default_agent.recognize_all",
|
||||
return_value=[],
|
||||
):
|
||||
result = await conversation.async_converse(
|
||||
hass, "do something", None, Context(), None
|
||||
|
|
|
@ -56,7 +56,7 @@ async def test_converation_trace(
|
|||
"intent_name": "HassListAddItem",
|
||||
"slots": {
|
||||
"name": "Shopping List",
|
||||
"item": "apples",
|
||||
"item": "apples ",
|
||||
},
|
||||
}
|
||||
|
||||
|
|
21
tests/components/dhcp/conftest.py
Normal file
21
tests/components/dhcp/conftest.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
"""Tests for the dhcp integration."""
|
||||
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
|
||||
def pytest_sessionstart(session):
|
||||
"""Try to avoid flaky FileExistsError in CI.
|
||||
|
||||
Called after the Session object has been created and
|
||||
before performing collection and entering the run test loop.
|
||||
|
||||
This is needed due to a race condition in scapy v2.6.0
|
||||
See https://github.com/secdev/scapy/pull/4558
|
||||
|
||||
Can be removed when scapy 2.6.1 is released.
|
||||
"""
|
||||
for sub_dir in (".cache", ".config"):
|
||||
path = pathlib.Path(os.path.join(os.path.expanduser("~"), sub_dir))
|
||||
if not path.exists():
|
||||
path.mkdir(mode=0o700, exist_ok=True)
|
6
tests/components/huum/conftest.py
Normal file
6
tests/components/huum/conftest.py
Normal file
|
@ -0,0 +1,6 @@
|
|||
"""Skip test collection for Python 3.13."""
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 13):
|
||||
collect_ignore_glob = ["test_*.py"]
|
|
@ -100,6 +100,10 @@ async def test_form_invalid_auth(
|
|||
assert result["errors"] == {"base": "invalid_auth"}
|
||||
|
||||
|
||||
@pytest.mark.parametrize( # Remove when translations fixed
|
||||
"ignore_translations",
|
||||
["component.srp_energy.config.abort.unknown"],
|
||||
)
|
||||
async def test_form_unknown_error(
|
||||
hass: HomeAssistant,
|
||||
mock_srp_energy_config_flow: MagicMock,
|
||||
|
|
|
@ -192,6 +192,10 @@ async def test_two_factor_request_success(
|
|||
assert len(mock_two_factor_request.mock_calls) == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize( # Remove when translations fixed
|
||||
"ignore_translations",
|
||||
["component.subaru.config.abort.two_factor_request_failed"],
|
||||
)
|
||||
async def test_two_factor_request_fail(
|
||||
hass: HomeAssistant, two_factor_start_form
|
||||
) -> None:
|
||||
|
|
|
@ -21,7 +21,7 @@ import zigpy.types
|
|||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components import ssdp, usb, zeroconf
|
||||
from homeassistant.components.hassio import AddonError, AddonState
|
||||
from homeassistant.components.hassio import AddonState
|
||||
from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL
|
||||
from homeassistant.components.zha import config_flow, radio_manager
|
||||
from homeassistant.components.zha.const import (
|
||||
|
@ -1878,23 +1878,10 @@ async def test_config_flow_port_yellow_port_name(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
async def test_config_flow_ports_no_hassio(hass: HomeAssistant) -> None:
|
||||
"""Test config flow serial port name when this is not a hassio install."""
|
||||
|
||||
with (
|
||||
patch("homeassistant.components.zha.config_flow.is_hassio", return_value=False),
|
||||
patch("serial.tools.list_ports.comports", MagicMock(return_value=[])),
|
||||
):
|
||||
ports = await config_flow.list_serial_ports(hass)
|
||||
|
||||
assert ports == []
|
||||
|
||||
|
||||
async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) -> None:
|
||||
"""Test config flow serial port name for multiprotocol add-on."""
|
||||
|
||||
with (
|
||||
patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True),
|
||||
patch(
|
||||
"homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info"
|
||||
) as async_get_addon_info,
|
||||
|
@ -1902,28 +1889,16 @@ async def test_config_flow_port_multiprotocol_port_name(hass: HomeAssistant) ->
|
|||
):
|
||||
async_get_addon_info.return_value.state = AddonState.RUNNING
|
||||
async_get_addon_info.return_value.hostname = "core-silabs-multiprotocol"
|
||||
ports = await config_flow.list_serial_ports(hass)
|
||||
|
||||
assert len(ports) == 1
|
||||
assert ports[0].description == "Multiprotocol add-on"
|
||||
assert ports[0].manufacturer == "Nabu Casa"
|
||||
assert ports[0].device == "socket://core-silabs-multiprotocol:9999"
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={CONF_SOURCE: SOURCE_USER},
|
||||
)
|
||||
|
||||
|
||||
async def test_config_flow_port_no_multiprotocol(hass: HomeAssistant) -> None:
|
||||
"""Test config flow serial port listing when addon info fails to load."""
|
||||
|
||||
with (
|
||||
patch("homeassistant.components.zha.config_flow.is_hassio", return_value=True),
|
||||
patch(
|
||||
"homeassistant.components.hassio.addon_manager.AddonManager.async_get_addon_info",
|
||||
side_effect=AddonError,
|
||||
),
|
||||
patch("serial.tools.list_ports.comports", MagicMock(return_value=[])),
|
||||
):
|
||||
ports = await config_flow.list_serial_ports(hass)
|
||||
|
||||
assert ports == []
|
||||
assert (
|
||||
result["data_schema"].schema["path"].container[0]
|
||||
== "socket://core-silabs-multiprotocol:9999 - Multiprotocol add-on - Nabu Casa"
|
||||
)
|
||||
|
||||
|
||||
@patch("serial.tools.list_ports.comports", MagicMock(return_value=[com_port()]))
|
||||
|
|
Loading…
Add table
Reference in a new issue