Make local backup a backup agent
This commit is contained in:
parent
e8179f7a73
commit
400f792bff
9 changed files with 372 additions and 216 deletions
|
@ -32,6 +32,7 @@ SERVICE_CREATE_SCHEMA = vol.Schema({vol.Optional(CONF_PASSWORD): str})
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the Backup integration."""
|
"""Set up the Backup integration."""
|
||||||
hass.data[DOMAIN] = backup_manager = BackupManager(hass)
|
hass.data[DOMAIN] = backup_manager = BackupManager(hass)
|
||||||
|
await backup_manager.async_setup()
|
||||||
|
|
||||||
with_hassio = is_hassio(hass)
|
with_hassio = is_hassio(hass)
|
||||||
|
|
||||||
|
@ -49,6 +50,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Service handler for creating backups."""
|
"""Service handler for creating backups."""
|
||||||
await backup_manager.async_create_backup(
|
await backup_manager.async_create_backup(
|
||||||
addons_included=None,
|
addons_included=None,
|
||||||
|
# pylint: disable=fixme
|
||||||
|
# TODO: Don't forget to remove this when the implementation is complete
|
||||||
|
agent_ids=[], # TODO: Should we default to local?
|
||||||
database_included=True,
|
database_included=True,
|
||||||
folders_included=None,
|
folders_included=None,
|
||||||
name=None,
|
name=None,
|
||||||
|
|
146
homeassistant/components/backup/backup.py
Normal file
146
homeassistant/components/backup/backup.py
Normal file
|
@ -0,0 +1,146 @@
|
||||||
|
"""Local backup support for Core and Container installations."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import asdict, dataclass
|
||||||
|
import json
|
||||||
|
from pathlib import Path
|
||||||
|
import tarfile
|
||||||
|
from tarfile import TarError
|
||||||
|
from typing import Any, cast
|
||||||
|
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.util.json import json_loads_object
|
||||||
|
|
||||||
|
from .agent import BackupAgent, UploadedBackup
|
||||||
|
from .const import BUF_SIZE, LOGGER
|
||||||
|
from .models import BackupUploadMetadata
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_backup_agents(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> list[BackupAgent]:
|
||||||
|
"""Register the backup agent."""
|
||||||
|
return [LocalBackupAgent(hass)]
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class LocalBackup(UploadedBackup):
|
||||||
|
"""Local backup class."""
|
||||||
|
|
||||||
|
path: Path
|
||||||
|
|
||||||
|
def as_dict(self) -> dict:
|
||||||
|
"""Return a dict representation of this backup."""
|
||||||
|
return {**asdict(self), "path": self.path.as_posix()}
|
||||||
|
|
||||||
|
|
||||||
|
class LocalBackupAgent(BackupAgent):
|
||||||
|
"""Define the format that backup agents can have."""
|
||||||
|
|
||||||
|
name = "local"
|
||||||
|
|
||||||
|
def __init__(self, hass: HomeAssistant) -> None:
|
||||||
|
"""Initialize the backup agent."""
|
||||||
|
super().__init__()
|
||||||
|
self._hass = hass
|
||||||
|
self.backup_dir = Path(hass.config.path("backups"))
|
||||||
|
self.backups: dict[str, LocalBackup] = {}
|
||||||
|
self.loaded_backups = False
|
||||||
|
|
||||||
|
async def load_backups(self) -> None:
|
||||||
|
"""Load data of stored backup files."""
|
||||||
|
backups = await self._hass.async_add_executor_job(self._read_backups)
|
||||||
|
LOGGER.debug("Loaded %s local backups", len(backups))
|
||||||
|
self.backups = backups
|
||||||
|
self.loaded_backups = True
|
||||||
|
|
||||||
|
def _read_backups(self) -> dict[str, LocalBackup]:
|
||||||
|
"""Read backups from disk."""
|
||||||
|
backups: dict[str, LocalBackup] = {}
|
||||||
|
for backup_path in self.backup_dir.glob("*.tar"):
|
||||||
|
try:
|
||||||
|
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
||||||
|
if data_file := backup_file.extractfile("./backup.json"):
|
||||||
|
data = json_loads_object(data_file.read())
|
||||||
|
backup = LocalBackup(
|
||||||
|
id=cast(str, data["slug"]), # Do we need another ID?
|
||||||
|
slug=cast(str, data["slug"]),
|
||||||
|
name=cast(str, data["name"]),
|
||||||
|
date=cast(str, data["date"]),
|
||||||
|
path=backup_path,
|
||||||
|
size=round(backup_path.stat().st_size / 1_048_576, 2),
|
||||||
|
protected=cast(bool, data.get("protected", False)),
|
||||||
|
)
|
||||||
|
backups[backup.slug] = backup
|
||||||
|
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
||||||
|
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
||||||
|
return backups
|
||||||
|
|
||||||
|
async def async_download_backup(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
id: str,
|
||||||
|
path: Path,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Download a backup file."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
async def async_upload_backup(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
path: Path,
|
||||||
|
metadata: BackupUploadMetadata,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Upload a backup."""
|
||||||
|
self.backups[metadata.slug] = LocalBackup(
|
||||||
|
id=metadata.slug, # Do we need another ID?
|
||||||
|
slug=metadata.slug,
|
||||||
|
name=metadata.name,
|
||||||
|
date=metadata.date,
|
||||||
|
path=path,
|
||||||
|
size=round(path.stat().st_size / 1_048_576, 2),
|
||||||
|
protected=metadata.protected,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_list_backups(self, **kwargs: Any) -> list[UploadedBackup]:
|
||||||
|
"""List backups."""
|
||||||
|
if not self.loaded_backups:
|
||||||
|
await self.load_backups()
|
||||||
|
return list(self.backups.values())
|
||||||
|
|
||||||
|
async def async_get_backup(
|
||||||
|
self, *, slug: str, **kwargs: Any
|
||||||
|
) -> UploadedBackup | None:
|
||||||
|
"""Return a backup."""
|
||||||
|
if not self.loaded_backups:
|
||||||
|
await self.load_backups()
|
||||||
|
|
||||||
|
if not (backup := self.backups.get(slug)):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not backup.path.exists():
|
||||||
|
LOGGER.debug(
|
||||||
|
(
|
||||||
|
"Removing tracked backup (%s) that does not exists on the expected"
|
||||||
|
" path %s"
|
||||||
|
),
|
||||||
|
backup.slug,
|
||||||
|
backup.path,
|
||||||
|
)
|
||||||
|
self.backups.pop(slug)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return backup
|
||||||
|
|
||||||
|
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||||
|
"""Remove a backup."""
|
||||||
|
if (backup := await self.async_get_backup(slug=slug)) is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._hass.async_add_executor_job(backup.path.unlink, True) # type: ignore[attr-defined]
|
||||||
|
LOGGER.debug("Removed backup located at %s", backup.path) # type: ignore[attr-defined]
|
||||||
|
self.backups.pop(slug)
|
|
@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
||||||
from .manager import BaseBackupManager
|
from .manager import BaseBackupManager
|
||||||
from .models import BaseBackup
|
from .models import BaseBackup
|
||||||
|
|
||||||
|
BUF_SIZE = 2**20 * 4 # 4MB
|
||||||
DOMAIN = "backup"
|
DOMAIN = "backup"
|
||||||
DATA_MANAGER: HassKey[BaseBackupManager[BaseBackup]] = HassKey(DOMAIN)
|
DATA_MANAGER: HassKey[BaseBackupManager[BaseBackup]] = HassKey(DOMAIN)
|
||||||
LOGGER = getLogger(__package__)
|
LOGGER = getLogger(__package__)
|
||||||
|
|
|
@ -13,10 +13,9 @@ from pathlib import Path
|
||||||
from queue import SimpleQueue
|
from queue import SimpleQueue
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
from tarfile import TarError
|
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
import time
|
import time
|
||||||
from typing import Any, Generic, Protocol, cast
|
from typing import Any, Generic, Protocol
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from securetar import SecureTarFile, atomic_contents_add
|
from securetar import SecureTarFile, atomic_contents_add
|
||||||
|
@ -29,13 +28,22 @@ from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import integration_platform
|
from homeassistant.helpers import integration_platform
|
||||||
from homeassistant.helpers.json import json_bytes
|
from homeassistant.helpers.json import json_bytes
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.json import json_loads_object
|
|
||||||
|
|
||||||
from .agent import BackupAgent, BackupAgentPlatformProtocol
|
from .agent import BackupAgent, BackupAgentPlatformProtocol
|
||||||
from .const import DOMAIN, EXCLUDE_DATABASE_FROM_BACKUP, EXCLUDE_FROM_BACKUP, LOGGER
|
from .const import (
|
||||||
|
BUF_SIZE,
|
||||||
|
DOMAIN,
|
||||||
|
EXCLUDE_DATABASE_FROM_BACKUP,
|
||||||
|
EXCLUDE_FROM_BACKUP,
|
||||||
|
LOGGER,
|
||||||
|
)
|
||||||
from .models import BackupUploadMetadata, BaseBackup
|
from .models import BackupUploadMetadata, BaseBackup
|
||||||
|
|
||||||
BUF_SIZE = 2**20 * 4 # 4MB
|
# pylint: disable=fixme
|
||||||
|
# TODO: Don't forget to remove this when the implementation is complete
|
||||||
|
|
||||||
|
|
||||||
|
LOCAL_AGENT_ID = f"{DOMAIN}.local"
|
||||||
|
|
||||||
_BackupT = TypeVar("_BackupT", bound=BaseBackup, default=BaseBackup)
|
_BackupT = TypeVar("_BackupT", bound=BaseBackup, default=BaseBackup)
|
||||||
|
|
||||||
|
@ -51,6 +59,7 @@ class NewBackup:
|
||||||
class Backup(BaseBackup):
|
class Backup(BaseBackup):
|
||||||
"""Backup class."""
|
"""Backup class."""
|
||||||
|
|
||||||
|
agent_ids: list[str]
|
||||||
path: Path
|
path: Path
|
||||||
|
|
||||||
def as_dict(self) -> dict:
|
def as_dict(self) -> dict:
|
||||||
|
@ -84,20 +93,21 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||||
"""Initialize the backup manager."""
|
"""Initialize the backup manager."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.backup_task: asyncio.Task | None = None
|
self.backup_task: asyncio.Task | None = None
|
||||||
self.backups: dict[str, _BackupT] = {}
|
|
||||||
self.loaded_platforms = False
|
|
||||||
self.platforms: dict[str, BackupPlatformProtocol] = {}
|
self.platforms: dict[str, BackupPlatformProtocol] = {}
|
||||||
self.backup_agents: dict[str, BackupAgent] = {}
|
self.backup_agents: dict[str, BackupAgent] = {}
|
||||||
self.syncing = False
|
self.syncing = False
|
||||||
|
|
||||||
|
async def async_setup(self) -> None:
|
||||||
|
"""Set up the backup manager."""
|
||||||
|
await self.load_platforms()
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _add_platform_pre_post_handlers(
|
def _add_platform_pre_post_handler(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
|
||||||
integration_domain: str,
|
integration_domain: str,
|
||||||
platform: BackupPlatformProtocol,
|
platform: BackupPlatformProtocol,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add a platform to the backup manager."""
|
"""Add a backup platform."""
|
||||||
if not hasattr(platform, "async_pre_backup") or not hasattr(
|
if not hasattr(platform, "async_pre_backup") or not hasattr(
|
||||||
platform, "async_post_backup"
|
platform, "async_post_backup"
|
||||||
):
|
):
|
||||||
|
@ -107,7 +117,6 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||||
|
|
||||||
async def _async_add_platform_agents(
|
async def _async_add_platform_agents(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
|
||||||
integration_domain: str,
|
integration_domain: str,
|
||||||
platform: BackupAgentPlatformProtocol,
|
platform: BackupAgentPlatformProtocol,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -115,16 +124,23 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||||
if not hasattr(platform, "async_get_backup_agents"):
|
if not hasattr(platform, "async_get_backup_agents"):
|
||||||
return
|
return
|
||||||
|
|
||||||
agents = await platform.async_get_backup_agents(hass=hass)
|
agents = await platform.async_get_backup_agents(self.hass)
|
||||||
self.backup_agents.update(
|
self.backup_agents.update(
|
||||||
{f"{integration_domain}.{agent.name}": agent for agent in agents}
|
{f"{integration_domain}.{agent.name}": agent for agent in agents}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _add_platform(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
integration_domain: str,
|
||||||
|
platform: Any,
|
||||||
|
) -> None:
|
||||||
|
"""Add a backup platform manager."""
|
||||||
|
self._add_platform_pre_post_handler(integration_domain, platform)
|
||||||
|
await self._async_add_platform_agents(integration_domain, platform)
|
||||||
|
|
||||||
async def async_pre_backup_actions(self, **kwargs: Any) -> None:
|
async def async_pre_backup_actions(self, **kwargs: Any) -> None:
|
||||||
"""Perform pre backup actions."""
|
"""Perform pre backup actions."""
|
||||||
if not self.loaded_platforms:
|
|
||||||
await self.load_platforms()
|
|
||||||
|
|
||||||
pre_backup_results = await asyncio.gather(
|
pre_backup_results = await asyncio.gather(
|
||||||
*(
|
*(
|
||||||
platform.async_pre_backup(self.hass)
|
platform.async_pre_backup(self.hass)
|
||||||
|
@ -138,9 +154,6 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||||
|
|
||||||
async def async_post_backup_actions(self, **kwargs: Any) -> None:
|
async def async_post_backup_actions(self, **kwargs: Any) -> None:
|
||||||
"""Perform post backup actions."""
|
"""Perform post backup actions."""
|
||||||
if not self.loaded_platforms:
|
|
||||||
await self.load_platforms()
|
|
||||||
|
|
||||||
post_backup_results = await asyncio.gather(
|
post_backup_results = await asyncio.gather(
|
||||||
*(
|
*(
|
||||||
platform.async_post_backup(self.hass)
|
platform.async_post_backup(self.hass)
|
||||||
|
@ -154,23 +167,14 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||||
|
|
||||||
async def load_platforms(self) -> None:
|
async def load_platforms(self) -> None:
|
||||||
"""Load backup platforms."""
|
"""Load backup platforms."""
|
||||||
if self.loaded_platforms:
|
|
||||||
return
|
|
||||||
await integration_platform.async_process_integration_platforms(
|
await integration_platform.async_process_integration_platforms(
|
||||||
self.hass,
|
self.hass,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
self._add_platform_pre_post_handlers,
|
self._add_platform,
|
||||||
wait_for_platforms=True,
|
|
||||||
)
|
|
||||||
await integration_platform.async_process_integration_platforms(
|
|
||||||
self.hass,
|
|
||||||
DOMAIN,
|
|
||||||
self._async_add_platform_agents,
|
|
||||||
wait_for_platforms=True,
|
wait_for_platforms=True,
|
||||||
)
|
)
|
||||||
LOGGER.debug("Loaded %s platforms", len(self.platforms))
|
LOGGER.debug("Loaded %s platforms", len(self.platforms))
|
||||||
LOGGER.debug("Loaded %s agents", len(self.backup_agents))
|
LOGGER.debug("Loaded %s agents", len(self.backup_agents))
|
||||||
self.loaded_platforms = True
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
async def async_restore_backup(
|
async def async_restore_backup(
|
||||||
|
@ -187,6 +191,7 @@ class BaseBackupManager(abc.ABC, Generic[_BackupT]):
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
addons_included: list[str] | None,
|
addons_included: list[str] | None,
|
||||||
|
agent_ids: list[str],
|
||||||
database_included: bool,
|
database_included: bool,
|
||||||
folders_included: list[str] | None,
|
folders_included: list[str] | None,
|
||||||
name: str | None,
|
name: str | None,
|
||||||
|
@ -236,22 +241,34 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
"""Initialize the backup manager."""
|
"""Initialize the backup manager."""
|
||||||
super().__init__(hass=hass)
|
super().__init__(hass=hass)
|
||||||
self.backup_dir = Path(hass.config.path("backups"))
|
self.backup_dir = Path(hass.config.path("backups"))
|
||||||
self.loaded_backups = False
|
self.temp_backup_dir = Path(hass.config.path("tmp_backups"))
|
||||||
|
|
||||||
async def async_upload_backup(self, *, slug: str, **kwargs: Any) -> None:
|
async def async_upload_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||||
"""Upload a backup."""
|
"""Upload a backup to all agents."""
|
||||||
await self.load_platforms()
|
|
||||||
|
|
||||||
if not self.backup_agents:
|
if not self.backup_agents:
|
||||||
return
|
return
|
||||||
|
|
||||||
if not (backup := await self.async_get_backup(slug=slug)):
|
if not (backup := await self.async_get_backup(slug=slug)):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
await self._async_upload_backup(
|
||||||
|
slug=slug,
|
||||||
|
backup=backup,
|
||||||
|
agent_ids=list(self.backup_agents.keys()),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _async_upload_backup(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
slug: str,
|
||||||
|
backup: Backup,
|
||||||
|
agent_ids: list[str],
|
||||||
|
) -> None:
|
||||||
|
"""Upload a backup to selected agents."""
|
||||||
self.syncing = True
|
self.syncing = True
|
||||||
sync_backup_results = await asyncio.gather(
|
sync_backup_results = await asyncio.gather(
|
||||||
*(
|
*(
|
||||||
agent.async_upload_backup(
|
self.backup_agents[agent_id].async_upload_backup(
|
||||||
path=backup.path,
|
path=backup.path,
|
||||||
metadata=BackupUploadMetadata(
|
metadata=BackupUploadMetadata(
|
||||||
homeassistant=HAVERSION,
|
homeassistant=HAVERSION,
|
||||||
|
@ -262,80 +279,48 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
protected=backup.protected,
|
protected=backup.protected,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
for agent in self.backup_agents.values()
|
for agent_id in agent_ids
|
||||||
),
|
),
|
||||||
return_exceptions=True,
|
return_exceptions=True,
|
||||||
)
|
)
|
||||||
for result in sync_backup_results:
|
for result in sync_backup_results:
|
||||||
if isinstance(result, Exception):
|
if isinstance(result, Exception):
|
||||||
LOGGER.error("Error during backup upload - %s", result)
|
LOGGER.error("Error during backup upload - %s", result)
|
||||||
|
# TODO: Reset self.syncing in a finally block
|
||||||
self.syncing = False
|
self.syncing = False
|
||||||
|
|
||||||
async def load_backups(self) -> None:
|
|
||||||
"""Load data of stored backup files."""
|
|
||||||
backups = await self.hass.async_add_executor_job(self._read_backups)
|
|
||||||
LOGGER.debug("Loaded %s local backups", len(backups))
|
|
||||||
self.backups = backups
|
|
||||||
self.loaded_backups = True
|
|
||||||
|
|
||||||
def _read_backups(self) -> dict[str, Backup]:
|
|
||||||
"""Read backups from disk."""
|
|
||||||
backups: dict[str, Backup] = {}
|
|
||||||
for backup_path in self.backup_dir.glob("*.tar"):
|
|
||||||
try:
|
|
||||||
with tarfile.open(backup_path, "r:", bufsize=BUF_SIZE) as backup_file:
|
|
||||||
if data_file := backup_file.extractfile("./backup.json"):
|
|
||||||
data = json_loads_object(data_file.read())
|
|
||||||
backup = Backup(
|
|
||||||
slug=cast(str, data["slug"]),
|
|
||||||
name=cast(str, data["name"]),
|
|
||||||
date=cast(str, data["date"]),
|
|
||||||
path=backup_path,
|
|
||||||
size=round(backup_path.stat().st_size / 1_048_576, 2),
|
|
||||||
protected=cast(bool, data.get("protected", False)),
|
|
||||||
)
|
|
||||||
backups[backup.slug] = backup
|
|
||||||
except (OSError, TarError, json.JSONDecodeError, KeyError) as err:
|
|
||||||
LOGGER.warning("Unable to read backup %s: %s", backup_path, err)
|
|
||||||
return backups
|
|
||||||
|
|
||||||
async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]:
|
async def async_get_backups(self, **kwargs: Any) -> dict[str, Backup]:
|
||||||
"""Return backups."""
|
"""Return backups."""
|
||||||
if not self.loaded_backups:
|
backups: dict[str, Backup] = {}
|
||||||
await self.load_backups()
|
for agent_id, agent in self.backup_agents.items():
|
||||||
|
agent_backups = await agent.async_list_backups()
|
||||||
|
for agent_backup in agent_backups:
|
||||||
|
if agent_backup.slug not in backups:
|
||||||
|
backups[agent_backup.slug] = Backup(
|
||||||
|
slug=agent_backup.slug,
|
||||||
|
name=agent_backup.name,
|
||||||
|
date=agent_backup.date,
|
||||||
|
agent_ids=[],
|
||||||
|
# TODO: Do we need to expose the path?
|
||||||
|
path=agent_backup.path, # type: ignore[attr-defined]
|
||||||
|
size=agent_backup.size,
|
||||||
|
protected=agent_backup.protected,
|
||||||
|
)
|
||||||
|
backups[agent_backup.slug].agent_ids.append(agent_id)
|
||||||
|
|
||||||
return self.backups
|
return backups
|
||||||
|
|
||||||
async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None:
|
async def async_get_backup(self, *, slug: str, **kwargs: Any) -> Backup | None:
|
||||||
"""Return a backup."""
|
"""Return a backup."""
|
||||||
if not self.loaded_backups:
|
# TODO: This is not efficient, but it's fine for draft
|
||||||
await self.load_backups()
|
backups = await self.async_get_backups()
|
||||||
|
return backups.get(slug)
|
||||||
if not (backup := self.backups.get(slug)):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not backup.path.exists():
|
|
||||||
LOGGER.debug(
|
|
||||||
(
|
|
||||||
"Removing tracked backup (%s) that does not exists on the expected"
|
|
||||||
" path %s"
|
|
||||||
),
|
|
||||||
backup.slug,
|
|
||||||
backup.path,
|
|
||||||
)
|
|
||||||
self.backups.pop(slug)
|
|
||||||
return None
|
|
||||||
|
|
||||||
return backup
|
|
||||||
|
|
||||||
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
|
async def async_remove_backup(self, *, slug: str, **kwargs: Any) -> None:
|
||||||
"""Remove a backup."""
|
"""Remove a backup."""
|
||||||
if (backup := await self.async_get_backup(slug=slug)) is None:
|
# TODO: We should only remove from the agents that have the backup
|
||||||
return
|
for agent in self.backup_agents.values():
|
||||||
|
await agent.async_remove_backup(slug=slug) # type: ignore[attr-defined]
|
||||||
await self.hass.async_add_executor_job(backup.path.unlink, True)
|
|
||||||
LOGGER.debug("Removed backup located at %s", backup.path)
|
|
||||||
self.backups.pop(slug)
|
|
||||||
|
|
||||||
async def async_receive_backup(
|
async def async_receive_backup(
|
||||||
self,
|
self,
|
||||||
|
@ -392,12 +377,13 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
temp_dir_handler.cleanup()
|
temp_dir_handler.cleanup()
|
||||||
|
|
||||||
await self.hass.async_add_executor_job(_move_and_cleanup)
|
await self.hass.async_add_executor_job(_move_and_cleanup)
|
||||||
await self.load_backups()
|
# TODO: What do we need to do instead?
|
||||||
|
|
||||||
async def async_create_backup(
|
async def async_create_backup(
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
addons_included: list[str] | None,
|
addons_included: list[str] | None,
|
||||||
|
agent_ids: list[str],
|
||||||
database_included: bool,
|
database_included: bool,
|
||||||
folders_included: list[str] | None,
|
folders_included: list[str] | None,
|
||||||
name: str | None,
|
name: str | None,
|
||||||
|
@ -408,12 +394,17 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
"""Initiate generating a backup."""
|
"""Initiate generating a backup."""
|
||||||
if self.backup_task:
|
if self.backup_task:
|
||||||
raise HomeAssistantError("Backup already in progress")
|
raise HomeAssistantError("Backup already in progress")
|
||||||
|
if not agent_ids:
|
||||||
|
raise HomeAssistantError("At least one agent must be selected")
|
||||||
|
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||||
|
raise HomeAssistantError("Invalid agent selected")
|
||||||
backup_name = name or f"Core {HAVERSION}"
|
backup_name = name or f"Core {HAVERSION}"
|
||||||
date_str = dt_util.now().isoformat()
|
date_str = dt_util.now().isoformat()
|
||||||
slug = _generate_slug(date_str, backup_name)
|
slug = _generate_slug(date_str, backup_name)
|
||||||
self.backup_task = self.hass.async_create_task(
|
self.backup_task = self.hass.async_create_task(
|
||||||
self._async_create_backup(
|
self._async_create_backup(
|
||||||
addons_included=addons_included,
|
addons_included=addons_included,
|
||||||
|
agent_ids=agent_ids,
|
||||||
backup_name=backup_name,
|
backup_name=backup_name,
|
||||||
database_included=database_included,
|
database_included=database_included,
|
||||||
date_str=date_str,
|
date_str=date_str,
|
||||||
|
@ -431,6 +422,7 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
self,
|
self,
|
||||||
*,
|
*,
|
||||||
addons_included: list[str] | None,
|
addons_included: list[str] | None,
|
||||||
|
agent_ids: list[str],
|
||||||
database_included: bool,
|
database_included: bool,
|
||||||
backup_name: str,
|
backup_name: str,
|
||||||
date_str: str,
|
date_str: str,
|
||||||
|
@ -441,6 +433,11 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
) -> Backup:
|
) -> Backup:
|
||||||
"""Generate a backup."""
|
"""Generate a backup."""
|
||||||
success = False
|
success = False
|
||||||
|
if LOCAL_AGENT_ID in agent_ids:
|
||||||
|
backup_dir = self.backup_dir
|
||||||
|
else:
|
||||||
|
backup_dir = self.temp_backup_dir
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.async_pre_backup_actions()
|
await self.async_pre_backup_actions()
|
||||||
|
|
||||||
|
@ -458,9 +455,10 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
"protected": password is not None,
|
"protected": password is not None,
|
||||||
}
|
}
|
||||||
|
|
||||||
tar_file_path = Path(self.backup_dir, f"{backup_data['slug']}.tar")
|
tar_file_path = Path(backup_dir, f"{backup_data['slug']}.tar")
|
||||||
size_in_bytes = await self.hass.async_add_executor_job(
|
size_in_bytes = await self.hass.async_add_executor_job(
|
||||||
self._mkdir_and_generate_backup_contents,
|
self._mkdir_and_generate_backup_contents,
|
||||||
|
backup_dir,
|
||||||
tar_file_path,
|
tar_file_path,
|
||||||
backup_data,
|
backup_data,
|
||||||
database_included,
|
database_included,
|
||||||
|
@ -473,10 +471,19 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
path=tar_file_path,
|
path=tar_file_path,
|
||||||
size=round(size_in_bytes / 1_048_576, 2),
|
size=round(size_in_bytes / 1_048_576, 2),
|
||||||
protected=password is not None,
|
protected=password is not None,
|
||||||
|
agent_ids=agent_ids, # TODO: This should maybe be set after upload
|
||||||
)
|
)
|
||||||
if self.loaded_backups:
|
# TODO: We should add a cache of the backup metadata
|
||||||
self.backups[slug] = backup
|
LOGGER.debug(
|
||||||
LOGGER.debug("Generated new backup with slug %s", slug)
|
"Generated new backup with slug %s, uploading to agents %s",
|
||||||
|
slug,
|
||||||
|
agent_ids,
|
||||||
|
)
|
||||||
|
await self._async_upload_backup(
|
||||||
|
slug=slug, backup=backup, agent_ids=agent_ids
|
||||||
|
)
|
||||||
|
# TODO: Upload to other agents
|
||||||
|
# TODO: Remove from local store if not uploaded to local agent
|
||||||
success = True
|
success = True
|
||||||
return backup
|
return backup
|
||||||
finally:
|
finally:
|
||||||
|
@ -487,15 +494,16 @@ class BackupManager(BaseBackupManager[Backup]):
|
||||||
|
|
||||||
def _mkdir_and_generate_backup_contents(
|
def _mkdir_and_generate_backup_contents(
|
||||||
self,
|
self,
|
||||||
|
backup_dir: Path,
|
||||||
tar_file_path: Path,
|
tar_file_path: Path,
|
||||||
backup_data: dict[str, Any],
|
backup_data: dict[str, Any],
|
||||||
database_included: bool,
|
database_included: bool,
|
||||||
password: str | None = None,
|
password: str | None = None,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Generate backup contents and return the size."""
|
"""Generate backup contents and return the size."""
|
||||||
if not self.backup_dir.exists():
|
if not backup_dir.exists():
|
||||||
LOGGER.debug("Creating backup directory")
|
LOGGER.debug("Creating backup directory %s", backup_dir)
|
||||||
self.backup_dir.mkdir()
|
backup_dir.mkdir()
|
||||||
|
|
||||||
excludes = EXCLUDE_FROM_BACKUP
|
excludes = EXCLUDE_FROM_BACKUP
|
||||||
if not database_included:
|
if not database_included:
|
||||||
|
|
|
@ -120,6 +120,7 @@ async def handle_restore(
|
||||||
{
|
{
|
||||||
vol.Required("type"): "backup/generate",
|
vol.Required("type"): "backup/generate",
|
||||||
vol.Optional("addons_included"): [str],
|
vol.Optional("addons_included"): [str],
|
||||||
|
vol.Required("agent_ids"): [str],
|
||||||
vol.Optional("database_included", default=True): bool,
|
vol.Optional("database_included", default=True): bool,
|
||||||
vol.Optional("folders_included"): [str],
|
vol.Optional("folders_included"): [str],
|
||||||
vol.Optional("name"): str,
|
vol.Optional("name"): str,
|
||||||
|
@ -139,6 +140,7 @@ async def handle_create(
|
||||||
|
|
||||||
backup = await hass.data[DATA_MANAGER].async_create_backup(
|
backup = await hass.data[DATA_MANAGER].async_create_backup(
|
||||||
addons_included=msg.get("addons_included"),
|
addons_included=msg.get("addons_included"),
|
||||||
|
agent_ids=msg["agent_ids"],
|
||||||
database_included=msg["database_included"],
|
database_included=msg["database_included"],
|
||||||
folders_included=msg.get("folders_included"),
|
folders_included=msg.get("folders_included"),
|
||||||
name=msg.get("name"),
|
name=msg.get("name"),
|
||||||
|
@ -283,7 +285,7 @@ async def backup_agents_download(
|
||||||
try:
|
try:
|
||||||
await agent.async_download_backup(
|
await agent.async_download_backup(
|
||||||
id=msg["backup_id"],
|
id=msg["backup_id"],
|
||||||
path=Path(hass.config.path("backup"), f"{msg['slug']}.tar"),
|
path=Path(manager.backup_dir, f"{msg['slug']}.tar"), # type: ignore[attr-defined]
|
||||||
)
|
)
|
||||||
except Exception as err: # noqa: BLE001
|
except Exception as err: # noqa: BLE001
|
||||||
connection.send_error(msg["id"], "backup_agents_download", str(err))
|
connection.send_error(msg["id"], "backup_agents_download", str(err))
|
||||||
|
|
|
@ -12,12 +12,14 @@ from homeassistant.components.backup import (
|
||||||
BackupUploadMetadata,
|
BackupUploadMetadata,
|
||||||
UploadedBackup,
|
UploadedBackup,
|
||||||
)
|
)
|
||||||
from homeassistant.components.backup.manager import Backup
|
from homeassistant.components.backup.const import DATA_MANAGER
|
||||||
|
from homeassistant.components.backup.manager import LOCAL_AGENT_ID, Backup
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
|
|
||||||
TEST_BACKUP = Backup(
|
TEST_BACKUP = Backup(
|
||||||
|
agent_ids=["backup.local"],
|
||||||
slug="abc123",
|
slug="abc123",
|
||||||
name="Test",
|
name="Test",
|
||||||
date="1970-01-01T00:00:00.000Z",
|
date="1970-01-01T00:00:00.000Z",
|
||||||
|
@ -70,7 +72,16 @@ async def setup_backup_integration(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
with_hassio: bool = False,
|
with_hassio: bool = False,
|
||||||
configuration: ConfigType | None = None,
|
configuration: ConfigType | None = None,
|
||||||
|
backups: list[Backup] | None = None,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Set up the Backup integration."""
|
"""Set up the Backup integration."""
|
||||||
with patch("homeassistant.components.backup.is_hassio", return_value=with_hassio):
|
with patch("homeassistant.components.backup.is_hassio", return_value=with_hassio):
|
||||||
return await async_setup_component(hass, DOMAIN, configuration or {})
|
result = await async_setup_component(hass, DOMAIN, configuration or {})
|
||||||
|
if with_hassio or not backups:
|
||||||
|
return result
|
||||||
|
|
||||||
|
local_agent = hass.data[DATA_MANAGER].backup_agents[LOCAL_AGENT_ID]
|
||||||
|
local_agent.backups = {backups.slug: backups for backups in backups}
|
||||||
|
local_agent.loaded_backups = True
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
|
@ -45,6 +45,9 @@
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'domain.test',
|
'agent_id': 'domain.test',
|
||||||
}),
|
}),
|
||||||
|
dict({
|
||||||
|
'agent_id': 'backup.local',
|
||||||
|
}),
|
||||||
]),
|
]),
|
||||||
'syncing': False,
|
'syncing': False,
|
||||||
}),
|
}),
|
||||||
|
@ -60,6 +63,9 @@
|
||||||
dict({
|
dict({
|
||||||
'agent_id': 'domain.test',
|
'agent_id': 'domain.test',
|
||||||
}),
|
}),
|
||||||
|
dict({
|
||||||
|
'agent_id': 'backup.local',
|
||||||
|
}),
|
||||||
]),
|
]),
|
||||||
'syncing': False,
|
'syncing': False,
|
||||||
}),
|
}),
|
||||||
|
@ -352,6 +358,9 @@
|
||||||
'id': 1,
|
'id': 1,
|
||||||
'result': dict({
|
'result': dict({
|
||||||
'backup': dict({
|
'backup': dict({
|
||||||
|
'agent_ids': list([
|
||||||
|
'backup.local',
|
||||||
|
]),
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
'name': 'Test',
|
'name': 'Test',
|
||||||
'path': 'abc123.tar',
|
'path': 'abc123.tar',
|
||||||
|
@ -508,6 +517,9 @@
|
||||||
'backing_up': False,
|
'backing_up': False,
|
||||||
'backups': list([
|
'backups': list([
|
||||||
dict({
|
dict({
|
||||||
|
'agent_ids': list([
|
||||||
|
'backup.local',
|
||||||
|
]),
|
||||||
'date': '1970-01-01T00:00:00.000Z',
|
'date': '1970-01-01T00:00:00.000Z',
|
||||||
'name': 'Test',
|
'name': 'Test',
|
||||||
'path': 'abc123.tar',
|
'path': 'abc123.tar',
|
||||||
|
|
|
@ -11,12 +11,14 @@ from multidict import CIMultiDict, CIMultiDictProxy
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant.components.backup import (
|
from homeassistant.components.backup import (
|
||||||
|
DOMAIN,
|
||||||
BackupAgentPlatformProtocol,
|
BackupAgentPlatformProtocol,
|
||||||
BackupManager,
|
BackupManager,
|
||||||
BackupPlatformProtocol,
|
BackupPlatformProtocol,
|
||||||
BackupUploadMetadata,
|
BackupUploadMetadata,
|
||||||
|
backup as local_backup_platform,
|
||||||
)
|
)
|
||||||
from homeassistant.components.backup.manager import BackupProgress
|
from homeassistant.components.backup.manager import LOCAL_AGENT_ID, BackupProgress
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
|
@ -52,6 +54,7 @@ async def _mock_backup_generation(
|
||||||
assert manager.backup_task is None
|
assert manager.backup_task is None
|
||||||
await manager.async_create_backup(
|
await manager.async_create_backup(
|
||||||
addons_included=[],
|
addons_included=[],
|
||||||
|
agent_ids=[LOCAL_AGENT_ID],
|
||||||
database_included=database_included,
|
database_included=database_included,
|
||||||
folders_included=[],
|
folders_included=[],
|
||||||
name=name,
|
name=name,
|
||||||
|
@ -92,29 +95,36 @@ async def _mock_backup_generation(
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
|
|
||||||
async def _setup_mock_domain(
|
async def _setup_backup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
*,
|
||||||
|
domain: str = "some_domain",
|
||||||
platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None,
|
platform: BackupPlatformProtocol | BackupAgentPlatformProtocol | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up a mock domain."""
|
"""Set up a mock domain."""
|
||||||
mock_platform(hass, "some_domain.backup", platform or MockPlatform())
|
mock_platform(hass, f"{domain}.backup", platform or MockPlatform())
|
||||||
assert await async_setup_component(hass, "some_domain", {})
|
assert await async_setup_component(hass, domain, {})
|
||||||
|
|
||||||
|
|
||||||
async def test_constructor(hass: HomeAssistant) -> None:
|
async def test_constructor(hass: HomeAssistant) -> None:
|
||||||
"""Test BackupManager constructor."""
|
"""Test BackupManager constructor."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
assert manager.backup_dir.as_posix() == hass.config.path("backups")
|
assert manager.backup_dir.as_posix() == hass.config.path("backups")
|
||||||
|
assert manager.temp_backup_dir.as_posix() == hass.config.path("tmp_backups")
|
||||||
|
|
||||||
|
|
||||||
async def test_load_backups(hass: HomeAssistant) -> None:
|
async def test_load_backups(hass: HomeAssistant) -> None:
|
||||||
"""Test loading backups."""
|
"""Test loading backups."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await manager.load_platforms()
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]),
|
patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]),
|
||||||
patch("tarfile.open", return_value=MagicMock()),
|
patch("tarfile.open", return_value=MagicMock()),
|
||||||
patch(
|
patch(
|
||||||
"homeassistant.components.backup.manager.json_loads_object",
|
"homeassistant.components.backup.backup.json_loads_object",
|
||||||
return_value={
|
return_value={
|
||||||
"slug": TEST_BACKUP.slug,
|
"slug": TEST_BACKUP.slug,
|
||||||
"name": TEST_BACKUP.name,
|
"name": TEST_BACKUP.name,
|
||||||
|
@ -126,7 +136,7 @@ async def test_load_backups(hass: HomeAssistant) -> None:
|
||||||
return_value=MagicMock(st_size=TEST_BACKUP.size),
|
return_value=MagicMock(st_size=TEST_BACKUP.size),
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
await manager.load_backups()
|
await manager.backup_agents[LOCAL_AGENT_ID].load_backups()
|
||||||
backups = await manager.async_get_backups()
|
backups = await manager.async_get_backups()
|
||||||
assert backups == {TEST_BACKUP.slug: TEST_BACKUP}
|
assert backups == {TEST_BACKUP.slug: TEST_BACKUP}
|
||||||
|
|
||||||
|
@ -137,11 +147,15 @@ async def test_load_backups_with_exception(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test loading backups with exception."""
|
"""Test loading backups with exception."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await manager.load_platforms()
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]),
|
patch("pathlib.Path.glob", return_value=[TEST_BACKUP.path]),
|
||||||
patch("tarfile.open", side_effect=OSError("Test exception")),
|
patch("tarfile.open", side_effect=OSError("Test exception")),
|
||||||
):
|
):
|
||||||
await manager.load_backups()
|
await manager.backup_agents[LOCAL_AGENT_ID].load_backups()
|
||||||
backups = await manager.async_get_backups()
|
backups = await manager.async_get_backups()
|
||||||
assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text
|
assert f"Unable to read backup {TEST_BACKUP.path}: Test exception" in caplog.text
|
||||||
assert backups == {}
|
assert backups == {}
|
||||||
|
@ -153,8 +167,13 @@ async def test_removing_backup(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test removing backup."""
|
"""Test removing backup."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
manager.backups = {TEST_BACKUP.slug: TEST_BACKUP}
|
|
||||||
manager.loaded_backups = True
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await manager.load_platforms()
|
||||||
|
|
||||||
|
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
|
||||||
|
local_agent.backups = {TEST_BACKUP.slug: TEST_BACKUP}
|
||||||
|
local_agent.loaded_backups = True
|
||||||
|
|
||||||
with patch("pathlib.Path.exists", return_value=True):
|
with patch("pathlib.Path.exists", return_value=True):
|
||||||
await manager.async_remove_backup(slug=TEST_BACKUP.slug)
|
await manager.async_remove_backup(slug=TEST_BACKUP.slug)
|
||||||
|
@ -168,18 +187,27 @@ async def test_removing_non_existing_backup(
|
||||||
"""Test removing not existing backup."""
|
"""Test removing not existing backup."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await manager.load_platforms()
|
||||||
|
|
||||||
await manager.async_remove_backup(slug="non_existing")
|
await manager.async_remove_backup(slug="non_existing")
|
||||||
assert "Removed backup located at" not in caplog.text
|
assert "Removed backup located at" not in caplog.text
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(reason="Cleanup not implemented in the draft")
|
||||||
async def test_getting_backup_that_does_not_exist(
|
async def test_getting_backup_that_does_not_exist(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test getting backup that does not exist."""
|
"""Test getting backup that does not exist."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
manager.backups = {TEST_BACKUP.slug: TEST_BACKUP}
|
|
||||||
manager.loaded_backups = True
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await manager.load_platforms()
|
||||||
|
|
||||||
|
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
|
||||||
|
local_agent.backups = {TEST_BACKUP.slug: TEST_BACKUP}
|
||||||
|
local_agent.loaded_backups = True
|
||||||
|
|
||||||
with patch("pathlib.Path.exists", return_value=False):
|
with patch("pathlib.Path.exists", return_value=False):
|
||||||
backup = await manager.async_get_backup(slug=TEST_BACKUP.slug)
|
backup = await manager.async_get_backup(slug=TEST_BACKUP.slug)
|
||||||
|
@ -199,6 +227,7 @@ async def test_async_create_backup_when_backing_up(hass: HomeAssistant) -> None:
|
||||||
with pytest.raises(HomeAssistantError, match="Backup already in progress"):
|
with pytest.raises(HomeAssistantError, match="Backup already in progress"):
|
||||||
await manager.async_create_backup(
|
await manager.async_create_backup(
|
||||||
addons_included=[],
|
addons_included=[],
|
||||||
|
agent_ids=[LOCAL_AGENT_ID],
|
||||||
database_included=True,
|
database_included=True,
|
||||||
folders_included=[],
|
folders_included=[],
|
||||||
name=None,
|
name=None,
|
||||||
|
@ -227,7 +256,12 @@ async def test_async_create_backup(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test generate backup."""
|
"""Test generate backup."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
manager.loaded_backups = True
|
|
||||||
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await manager.load_platforms()
|
||||||
|
|
||||||
|
local_agent = manager.backup_agents[LOCAL_AGENT_ID]
|
||||||
|
local_agent.loaded_backups = True
|
||||||
|
|
||||||
await _mock_backup_generation(
|
await _mock_backup_generation(
|
||||||
hass, manager, mocked_json_bytes, mocked_tarfile, **params
|
hass, manager, mocked_json_bytes, mocked_tarfile, **params
|
||||||
|
@ -236,10 +270,10 @@ async def test_async_create_backup(
|
||||||
assert "Generated new backup with slug " in caplog.text
|
assert "Generated new backup with slug " in caplog.text
|
||||||
assert "Creating backup directory" in caplog.text
|
assert "Creating backup directory" in caplog.text
|
||||||
assert "Loaded 0 platforms" in caplog.text
|
assert "Loaded 0 platforms" in caplog.text
|
||||||
assert "Loaded 0 agents" in caplog.text
|
assert "Loaded 1 agents" in caplog.text
|
||||||
|
|
||||||
assert len(manager.backups) == 1
|
assert len(local_agent.backups) == 1
|
||||||
backup = list(manager.backups.values())[0]
|
backup = list(local_agent.backups.values())[0]
|
||||||
assert backup.protected is bool(params.get("password"))
|
assert backup.protected is bool(params.get("password"))
|
||||||
|
|
||||||
|
|
||||||
|
@ -250,12 +284,11 @@ async def test_loading_platforms(
|
||||||
"""Test loading backup platforms."""
|
"""Test loading backup platforms."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
assert not manager.loaded_platforms
|
|
||||||
assert not manager.platforms
|
assert not manager.platforms
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_pre_backup=AsyncMock(),
|
async_pre_backup=AsyncMock(),
|
||||||
async_post_backup=AsyncMock(),
|
async_post_backup=AsyncMock(),
|
||||||
async_get_backup_agents=AsyncMock(),
|
async_get_backup_agents=AsyncMock(),
|
||||||
|
@ -264,7 +297,6 @@ async def test_loading_platforms(
|
||||||
await manager.load_platforms()
|
await manager.load_platforms()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert manager.loaded_platforms
|
|
||||||
assert len(manager.platforms) == 1
|
assert len(manager.platforms) == 1
|
||||||
|
|
||||||
assert "Loaded 1 platforms" in caplog.text
|
assert "Loaded 1 platforms" in caplog.text
|
||||||
|
@ -277,19 +309,17 @@ async def test_loading_agents(
|
||||||
"""Test loading backup agents."""
|
"""Test loading backup agents."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
assert not manager.loaded_platforms
|
|
||||||
assert not manager.platforms
|
assert not manager.platforms
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_get_backup_agents=AsyncMock(return_value=[BackupAgentTest("test")]),
|
async_get_backup_agents=AsyncMock(return_value=[BackupAgentTest("test")]),
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
await manager.load_platforms()
|
await manager.load_platforms()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert manager.loaded_platforms
|
|
||||||
assert len(manager.backup_agents) == 1
|
assert len(manager.backup_agents) == 1
|
||||||
|
|
||||||
assert "Loaded 1 agents" in caplog.text
|
assert "Loaded 1 agents" in caplog.text
|
||||||
|
@ -303,14 +333,12 @@ async def test_not_loading_bad_platforms(
|
||||||
"""Test loading backup platforms."""
|
"""Test loading backup platforms."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
assert not manager.loaded_platforms
|
|
||||||
assert not manager.platforms
|
assert not manager.platforms
|
||||||
|
|
||||||
await _setup_mock_domain(hass)
|
await _setup_backup_platform(hass)
|
||||||
await manager.load_platforms()
|
await manager.load_platforms()
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert manager.loaded_platforms
|
|
||||||
assert len(manager.platforms) == 0
|
assert len(manager.platforms) == 0
|
||||||
|
|
||||||
assert "Loaded 0 platforms" in caplog.text
|
assert "Loaded 0 platforms" in caplog.text
|
||||||
|
@ -326,9 +354,10 @@ async def test_syncing_backup(
|
||||||
"""Test syncing a backup."""
|
"""Test syncing a backup."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_pre_backup=AsyncMock(),
|
async_pre_backup=AsyncMock(),
|
||||||
async_post_backup=AsyncMock(),
|
async_post_backup=AsyncMock(),
|
||||||
async_get_backup_agents=AsyncMock(
|
async_get_backup_agents=AsyncMock(
|
||||||
|
@ -387,9 +416,10 @@ async def test_syncing_backup_with_exception(
|
||||||
async def async_upload_backup(self, **kwargs: Any) -> None:
|
async def async_upload_backup(self, **kwargs: Any) -> None:
|
||||||
raise HomeAssistantError("Test exception")
|
raise HomeAssistantError("Test exception")
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_pre_backup=AsyncMock(),
|
async_pre_backup=AsyncMock(),
|
||||||
async_post_backup=AsyncMock(),
|
async_post_backup=AsyncMock(),
|
||||||
async_get_backup_agents=AsyncMock(
|
async_get_backup_agents=AsyncMock(
|
||||||
|
@ -448,9 +478,10 @@ async def test_syncing_backup_no_agents(
|
||||||
"""Test syncing a backup with no agents."""
|
"""Test syncing a backup with no agents."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(hass, domain=DOMAIN, platform=local_backup_platform)
|
||||||
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_pre_backup=AsyncMock(),
|
async_pre_backup=AsyncMock(),
|
||||||
async_post_backup=AsyncMock(),
|
async_post_backup=AsyncMock(),
|
||||||
async_get_backup_agents=AsyncMock(return_value=[]),
|
async_get_backup_agents=AsyncMock(return_value=[]),
|
||||||
|
@ -479,9 +510,9 @@ async def test_exception_plaform_pre(
|
||||||
async def _mock_step(hass: HomeAssistant) -> None:
|
async def _mock_step(hass: HomeAssistant) -> None:
|
||||||
raise HomeAssistantError("Test exception")
|
raise HomeAssistantError("Test exception")
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_pre_backup=_mock_step,
|
async_pre_backup=_mock_step,
|
||||||
async_post_backup=AsyncMock(),
|
async_post_backup=AsyncMock(),
|
||||||
async_get_backup_agents=AsyncMock(),
|
async_get_backup_agents=AsyncMock(),
|
||||||
|
@ -502,9 +533,9 @@ async def test_exception_plaform_post(
|
||||||
async def _mock_step(hass: HomeAssistant) -> None:
|
async def _mock_step(hass: HomeAssistant) -> None:
|
||||||
raise HomeAssistantError("Test exception")
|
raise HomeAssistantError("Test exception")
|
||||||
|
|
||||||
await _setup_mock_domain(
|
await _setup_backup_platform(
|
||||||
hass,
|
hass,
|
||||||
Mock(
|
platform=Mock(
|
||||||
async_pre_backup=AsyncMock(),
|
async_pre_backup=AsyncMock(),
|
||||||
async_post_backup=_mock_step,
|
async_post_backup=_mock_step,
|
||||||
async_get_backup_agents=AsyncMock(),
|
async_get_backup_agents=AsyncMock(),
|
||||||
|
@ -515,58 +546,6 @@ async def test_exception_plaform_post(
|
||||||
await _mock_backup_generation(hass, manager, mocked_json_bytes, mocked_tarfile)
|
await _mock_backup_generation(hass, manager, mocked_json_bytes, mocked_tarfile)
|
||||||
|
|
||||||
|
|
||||||
async def test_loading_platforms_when_running_async_pre_backup_actions(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
caplog: pytest.LogCaptureFixture,
|
|
||||||
) -> None:
|
|
||||||
"""Test loading backup platforms when running post backup actions."""
|
|
||||||
manager = BackupManager(hass)
|
|
||||||
|
|
||||||
assert not manager.loaded_platforms
|
|
||||||
assert not manager.platforms
|
|
||||||
|
|
||||||
await _setup_mock_domain(
|
|
||||||
hass,
|
|
||||||
Mock(
|
|
||||||
async_pre_backup=AsyncMock(),
|
|
||||||
async_post_backup=AsyncMock(),
|
|
||||||
async_get_backup_agents=AsyncMock(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
await manager.async_pre_backup_actions()
|
|
||||||
|
|
||||||
assert manager.loaded_platforms
|
|
||||||
assert len(manager.platforms) == 1
|
|
||||||
|
|
||||||
assert "Loaded 1 platforms" in caplog.text
|
|
||||||
|
|
||||||
|
|
||||||
async def test_loading_platforms_when_running_async_post_backup_actions(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
caplog: pytest.LogCaptureFixture,
|
|
||||||
) -> None:
|
|
||||||
"""Test loading backup platforms when running post backup actions."""
|
|
||||||
manager = BackupManager(hass)
|
|
||||||
|
|
||||||
assert not manager.loaded_platforms
|
|
||||||
assert not manager.platforms
|
|
||||||
|
|
||||||
await _setup_mock_domain(
|
|
||||||
hass,
|
|
||||||
Mock(
|
|
||||||
async_pre_backup=AsyncMock(),
|
|
||||||
async_post_backup=AsyncMock(),
|
|
||||||
async_get_backup_agents=AsyncMock(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
await manager.async_post_backup_actions()
|
|
||||||
|
|
||||||
assert manager.loaded_platforms
|
|
||||||
assert len(manager.platforms) == 1
|
|
||||||
|
|
||||||
assert "Loaded 1 platforms" in caplog.text
|
|
||||||
|
|
||||||
|
|
||||||
async def test_async_receive_backup(
|
async def test_async_receive_backup(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
@ -601,6 +580,7 @@ async def test_async_receive_backup(
|
||||||
assert mover_mock.mock_calls[0].args[1].name == "abc123.tar"
|
assert mover_mock.mock_calls[0].args[1].name == "abc123.tar"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(reason="Restore not implemented in the draft")
|
||||||
async def test_async_trigger_restore(
|
async def test_async_trigger_restore(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
@ -623,6 +603,7 @@ async def test_async_trigger_restore(
|
||||||
assert mocked_service_call.called
|
assert mocked_service_call.called
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(reason="Restore not implemented in the draft")
|
||||||
async def test_async_trigger_restore_with_password(
|
async def test_async_trigger_restore_with_password(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
caplog: pytest.LogCaptureFixture,
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
@ -645,6 +626,7 @@ async def test_async_trigger_restore_with_password(
|
||||||
assert mocked_service_call.called
|
assert mocked_service_call.called
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail(reason="Restore not implemented in the draft")
|
||||||
async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None:
|
async def test_async_trigger_restore_missing_backup(hass: HomeAssistant) -> None:
|
||||||
"""Test trigger restore."""
|
"""Test trigger restore."""
|
||||||
manager = BackupManager(hass)
|
manager = BackupManager(hass)
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import ANY, AsyncMock, patch
|
from unittest.mock import ANY, patch
|
||||||
|
|
||||||
from freezegun.api import FrozenDateTimeFactory
|
from freezegun.api import FrozenDateTimeFactory
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -45,31 +45,19 @@ async def test_info(
|
||||||
with_hassio: bool,
|
with_hassio: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test getting backup info."""
|
"""Test getting backup info."""
|
||||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
await setup_backup_integration(hass, with_hassio=with_hassio, backups=[TEST_BACKUP])
|
||||||
|
|
||||||
hass.data[DATA_MANAGER].backups = {TEST_BACKUP.slug: TEST_BACKUP}
|
|
||||||
|
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with (
|
await client.send_json_auto_id({"type": "backup/info"})
|
||||||
patch(
|
assert await client.receive_json() == snapshot
|
||||||
"homeassistant.components.backup.manager.BackupManager.load_backups",
|
|
||||||
AsyncMock(),
|
|
||||||
),
|
|
||||||
patch(
|
|
||||||
"homeassistant.components.backup.manager.BackupManager.async_get_backups",
|
|
||||||
return_value={TEST_BACKUP.slug: TEST_BACKUP},
|
|
||||||
),
|
|
||||||
):
|
|
||||||
await client.send_json_auto_id({"type": "backup/info"})
|
|
||||||
assert await client.receive_json() == snapshot
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"backup_content",
|
"backup_content",
|
||||||
[
|
[
|
||||||
pytest.param(TEST_BACKUP, id="with_backup_content"),
|
pytest.param([TEST_BACKUP], id="with_backup_content"),
|
||||||
pytest.param(None, id="without_backup_content"),
|
pytest.param(None, id="without_backup_content"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
@ -88,17 +76,15 @@ async def test_details(
|
||||||
backup_content: BaseBackup | None,
|
backup_content: BaseBackup | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test getting backup info."""
|
"""Test getting backup info."""
|
||||||
await setup_backup_integration(hass, with_hassio=with_hassio)
|
await setup_backup_integration(
|
||||||
|
hass, with_hassio=with_hassio, backups=backup_content
|
||||||
|
)
|
||||||
|
|
||||||
client = await hass_ws_client(hass)
|
client = await hass_ws_client(hass)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
with patch(
|
await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"})
|
||||||
"homeassistant.components.backup.manager.BackupManager.async_get_backup",
|
assert await client.receive_json() == snapshot
|
||||||
return_value=backup_content,
|
|
||||||
):
|
|
||||||
await client.send_json_auto_id({"type": "backup/details", "slug": "abc123"})
|
|
||||||
assert await client.receive_json() == snapshot
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
@ -159,7 +145,9 @@ async def test_generate(
|
||||||
freezer.move_to("2024-11-13 12:01:00+01:00")
|
freezer.move_to("2024-11-13 12:01:00+01:00")
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
await client.send_json_auto_id({"type": "backup/generate", **(data or {})})
|
await client.send_json_auto_id(
|
||||||
|
{"type": "backup/generate", **{"agent_ids": ["backup.local"]} | (data or {})}
|
||||||
|
)
|
||||||
for _ in range(number_of_messages):
|
for _ in range(number_of_messages):
|
||||||
assert await client.receive_json() == snapshot
|
assert await client.receive_json() == snapshot
|
||||||
|
|
||||||
|
@ -168,16 +156,18 @@ async def test_generate(
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("params", "expected_extra_call_params"),
|
("params", "expected_extra_call_params"),
|
||||||
[
|
[
|
||||||
({}, {}),
|
({"agent_ids": ["backup.local"]}, {"agent_ids": ["backup.local"]}),
|
||||||
(
|
(
|
||||||
{
|
{
|
||||||
"addons_included": ["ssl"],
|
"addons_included": ["ssl"],
|
||||||
|
"agent_ids": ["backup.local"],
|
||||||
"database_included": False,
|
"database_included": False,
|
||||||
"folders_included": ["media"],
|
"folders_included": ["media"],
|
||||||
"name": "abc123",
|
"name": "abc123",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"addons_included": ["ssl"],
|
"addons_included": ["ssl"],
|
||||||
|
"agent_ids": ["backup.local"],
|
||||||
"database_included": False,
|
"database_included": False,
|
||||||
"folders_included": ["media"],
|
"folders_included": ["media"],
|
||||||
"name": "abc123",
|
"name": "abc123",
|
||||||
|
@ -525,7 +515,7 @@ async def test_agents_download(
|
||||||
assert await client.receive_json() == snapshot
|
assert await client.receive_json() == snapshot
|
||||||
assert download_mock.call_args[1] == {
|
assert download_mock.call_args[1] == {
|
||||||
"id": "abc123",
|
"id": "abc123",
|
||||||
"path": Path(hass.config.path("backup"), "abc123.tar"),
|
"path": Path(hass.config.path("backups"), "abc123.tar"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue