Store automatic backup flag in backup metadata (#133500)

pull/133511/head
Erik Montnemery 2024-12-18 18:30:46 +01:00 committed by GitHub
parent fc622e398f
commit 51d63ba508
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 120 additions and 59 deletions

View File

@ -23,7 +23,7 @@ from homeassistant.backup_restore import RESTORE_BACKUP_FILE, password_to_key
from homeassistant.const import __version__ as HAVERSION
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import integration_platform
from homeassistant.helpers import instance_id, integration_platform
from homeassistant.helpers.json import json_bytes
from homeassistant.util import dt as dt_util
@ -200,6 +200,7 @@ class BackupReaderWriter(abc.ABC):
*,
agent_ids: list[str],
backup_name: str,
extra_metadata: dict[str, bool | str],
include_addons: list[str] | None,
include_all_addons: bool,
include_database: bool,
@ -445,16 +446,18 @@ class BackupManager:
if (backup_id := agent_backup.backup_id) not in backups:
if known_backup := self.known_backups.get(backup_id):
failed_agent_ids = known_backup.failed_agent_ids
with_automatic_settings = known_backup.with_automatic_settings
else:
failed_agent_ids = []
with_automatic_settings = None
with_automatic_settings = self.is_our_automatic_backup(
agent_backup, await instance_id.async_get(self.hass)
)
backups[backup_id] = ManagerBackup(
agent_ids=[],
addons=agent_backup.addons,
backup_id=backup_id,
date=agent_backup.date,
database_included=agent_backup.database_included,
extra_metadata=agent_backup.extra_metadata,
failed_agent_ids=failed_agent_ids,
folders=agent_backup.folders,
homeassistant_included=agent_backup.homeassistant_included,
@ -494,16 +497,18 @@ class BackupManager:
if backup is None:
if known_backup := self.known_backups.get(backup_id):
failed_agent_ids = known_backup.failed_agent_ids
with_automatic_settings = known_backup.with_automatic_settings
else:
failed_agent_ids = []
with_automatic_settings = None
with_automatic_settings = self.is_our_automatic_backup(
result, await instance_id.async_get(self.hass)
)
backup = ManagerBackup(
agent_ids=[],
addons=result.addons,
backup_id=result.backup_id,
date=result.date,
database_included=result.database_included,
extra_metadata=result.extra_metadata,
failed_agent_ids=failed_agent_ids,
folders=result.folders,
homeassistant_included=result.homeassistant_included,
@ -517,6 +522,22 @@ class BackupManager:
return (backup, agent_errors)
@staticmethod
def is_our_automatic_backup(
backup: AgentBackup, our_instance_id: str
) -> bool | None:
"""Check if a backup was created by us and return automatic_settings flag.
Returns `None` if the backup was not created by us, or if the
automatic_settings flag is not a boolean.
"""
if backup.extra_metadata.get("instance_id") != our_instance_id:
return None
with_automatic_settings = backup.extra_metadata.get("with_automatic_settings")
if not isinstance(with_automatic_settings, bool):
return None
return with_automatic_settings
async def async_delete_backup(self, backup_id: str) -> dict[str, Exception]:
"""Delete a backup."""
agent_errors: dict[str, Exception] = {}
@ -598,7 +619,7 @@ class BackupManager:
open_stream=written_backup.open_stream,
)
await written_backup.release_stream()
self.known_backups.add(written_backup.backup, agent_errors, False)
self.known_backups.add(written_backup.backup, agent_errors)
async def async_create_backup(
self,
@ -699,6 +720,10 @@ class BackupManager:
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
agent_ids=agent_ids,
backup_name=backup_name,
extra_metadata={
"instance_id": await instance_id.async_get(self.hass),
"with_automatic_settings": with_automatic_settings,
},
include_addons=include_addons,
include_all_addons=include_all_addons,
include_database=include_database,
@ -747,9 +772,7 @@ class BackupManager:
# create backup was successful, update last_completed_automatic_backup
self.config.data.last_completed_automatic_backup = dt_util.now()
self.store.save()
self.known_backups.add(
written_backup.backup, agent_errors, with_automatic_settings
)
self.known_backups.add(written_backup.backup, agent_errors)
# delete old backups more numerous than copies
await delete_backups_exceeding_configured_count(self)
@ -870,7 +893,6 @@ class KnownBackups:
backup["backup_id"]: KnownBackup(
backup_id=backup["backup_id"],
failed_agent_ids=backup["failed_agent_ids"],
with_automatic_settings=backup["with_automatic_settings"],
)
for backup in stored_backups
}
@ -883,13 +905,11 @@ class KnownBackups:
self,
backup: AgentBackup,
agent_errors: dict[str, Exception],
with_automatic_settings: bool,
) -> None:
"""Add a backup."""
self._backups[backup.backup_id] = KnownBackup(
backup_id=backup.backup_id,
failed_agent_ids=list(agent_errors),
with_automatic_settings=with_automatic_settings,
)
self._manager.store.save()
@ -911,14 +931,12 @@ class KnownBackup:
backup_id: str
failed_agent_ids: list[str]
with_automatic_settings: bool
def to_dict(self) -> StoredKnownBackup:
"""Convert known backup to a dict."""
return {
"backup_id": self.backup_id,
"failed_agent_ids": self.failed_agent_ids,
"with_automatic_settings": self.with_automatic_settings,
}
@ -927,7 +945,6 @@ class StoredKnownBackup(TypedDict):
backup_id: str
failed_agent_ids: list[str]
with_automatic_settings: bool
class CoreBackupReaderWriter(BackupReaderWriter):
@ -945,6 +962,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
*,
agent_ids: list[str],
backup_name: str,
extra_metadata: dict[str, bool | str],
include_addons: list[str] | None,
include_all_addons: bool,
include_database: bool,
@ -969,6 +987,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
agent_ids=agent_ids,
backup_id=backup_id,
backup_name=backup_name,
extra_metadata=extra_metadata,
include_database=include_database,
date_str=date_str,
on_progress=on_progress,
@ -987,6 +1006,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
backup_id: str,
backup_name: str,
date_str: str,
extra_metadata: dict[str, bool | str],
include_database: bool,
on_progress: Callable[[ManagerStateEvent], None],
password: str | None,
@ -1012,6 +1032,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
backup_data = {
"compressed": True,
"date": date_str,
"extra": extra_metadata,
"homeassistant": {
"exclude_database": not include_database,
"version": HAVERSION,
@ -1035,6 +1056,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
backup_id=backup_id,
database_included=include_database,
date=date_str,
extra_metadata=extra_metadata,
folders=[],
homeassistant_included=True,
homeassistant_version=HAVERSION,

View File

@ -33,6 +33,7 @@ class AgentBackup:
backup_id: str
date: str
database_included: bool
extra_metadata: dict[str, bool | str]
folders: list[Folder]
homeassistant_included: bool
homeassistant_version: str | None # None if homeassistant_included is False
@ -44,6 +45,12 @@ class AgentBackup:
"""Return a dict representation of this backup."""
return asdict(self)
def as_frontend_json(self) -> dict:
"""Return a dict representation of this backup for sending to frontend."""
return {
key: val for key, val in asdict(self).items() if key != "extra_metadata"
}
@classmethod
def from_dict(cls, data: dict[str, Any]) -> Self:
"""Create an instance from a JSON serialization."""
@ -52,6 +59,7 @@ class AgentBackup:
backup_id=data["backup_id"],
date=data["date"],
database_included=data["database_included"],
extra_metadata=data["extra_metadata"],
folders=[Folder(folder) for folder in data["folders"]],
homeassistant_included=data["homeassistant_included"],
homeassistant_version=data["homeassistant_version"],

View File

@ -60,6 +60,7 @@ def read_backup(backup_path: Path) -> AgentBackup:
backup_id=cast(str, data["slug"]),
database_included=database_included,
date=cast(str, data["date"]),
extra_metadata=cast(dict[str, bool | str], data.get("metadata", {})),
folders=folders,
homeassistant_included=homeassistant_included,
homeassistant_version=homeassistant_version,

View File

@ -51,7 +51,7 @@ async def handle_info(
"agent_errors": {
agent_id: str(err) for agent_id, err in agent_errors.items()
},
"backups": list(backups.values()),
"backups": [backup.as_frontend_json() for backup in backups.values()],
"last_attempted_automatic_backup": manager.config.data.last_attempted_automatic_backup,
"last_completed_automatic_backup": manager.config.data.last_completed_automatic_backup,
},
@ -81,7 +81,7 @@ async def handle_details(
"agent_errors": {
agent_id: str(err) for agent_id, err in agent_errors.items()
},
"backup": backup,
"backup": backup.as_frontend_json() if backup else None,
},
)

View File

@ -104,6 +104,7 @@ def _backup_details_to_agent_backup(
backup_id=details.slug,
database_included=database_included,
date=details.date.isoformat(),
extra_metadata=details.extra or {},
folders=[Folder(folder) for folder in details.folders],
homeassistant_included=homeassistant_included,
homeassistant_version=details.homeassistant,
@ -202,6 +203,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
*,
agent_ids: list[str],
backup_name: str,
extra_metadata: dict[str, bool | str],
include_addons: list[str] | None,
include_all_addons: bool,
include_database: bool,
@ -242,6 +244,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
location=locations or LOCATION_CLOUD_BACKUP,
homeassistant_exclude_database=not include_database,
background=True,
extra=extra_metadata,
)
)
backup_task = self._hass.async_create_task(

View File

@ -58,6 +58,7 @@ class KitchenSinkBackupAgent(BackupAgent):
backup_id="abc123",
database_included=False,
date="1970-01-01T00:00:00Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",

View File

@ -5,7 +5,7 @@ from __future__ import annotations
from collections.abc import AsyncIterator, Callable, Coroutine
from pathlib import Path
from typing import Any
from unittest.mock import AsyncMock, Mock, patch
from unittest.mock import ANY, AsyncMock, Mock, patch
from homeassistant.components.backup import (
DOMAIN,
@ -29,6 +29,7 @@ TEST_BACKUP_ABC123 = AgentBackup(
backup_id="abc123",
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={"instance_id": ANY, "with_automatic_settings": True},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
@ -43,6 +44,7 @@ TEST_BACKUP_DEF456 = AgentBackup(
backup_id="def456",
database_included=False,
date="1980-01-01T00:00:00.000Z",
extra_metadata={"instance_id": "unknown_uuid", "with_automatic_settings": True},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
@ -69,6 +71,7 @@ class BackupAgentTest(BackupAgent):
backup_id="abc123",
database_included=True,
date="1970-01-01T00:00:00Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",

View File

@ -78,7 +78,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,

View File

@ -1539,7 +1539,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -1607,7 +1607,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -1660,7 +1660,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -1788,7 +1788,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -1841,7 +1841,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -1950,7 +1950,7 @@
'name': 'Test',
'protected': False,
'size': 13,
'with_automatic_settings': False,
'with_automatic_settings': None,
}),
]),
'last_attempted_automatic_backup': None,
@ -2163,7 +2163,7 @@
'name': 'Test',
'protected': False,
'size': 13,
'with_automatic_settings': False,
'with_automatic_settings': None,
}),
]),
'last_attempted_automatic_backup': None,
@ -2216,7 +2216,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
}),
'success': True,
@ -2254,7 +2254,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
}),
'success': True,
@ -2305,7 +2305,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
}),
'success': True,
@ -2344,7 +2344,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
}),
'success': True,
@ -2607,7 +2607,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -2649,7 +2649,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -2692,7 +2692,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -2756,7 +2756,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,
@ -2799,7 +2799,7 @@
'name': 'Test',
'protected': False,
'size': 0,
'with_automatic_settings': None,
'with_automatic_settings': True,
}),
]),
'last_attempted_automatic_backup': None,

View File

@ -121,6 +121,10 @@ async def test_async_create_backup(
assert create_backup.call_args == call(
agent_ids=["backup.local"],
backup_name="Core 2025.1.0",
extra_metadata={
"instance_id": hass.data["core.uuid"],
"with_automatic_settings": False,
},
include_addons=None,
include_all_addons=False,
include_database=True,
@ -325,6 +329,10 @@ async def test_async_initiate_backup(
assert backup_json_dict == {
"compressed": True,
"date": ANY,
"extra": {
"instance_id": hass.data["core.uuid"],
"with_automatic_settings": False,
},
"homeassistant": {
"exclude_database": not include_database,
"version": "2025.1.0",
@ -345,30 +353,30 @@ async def test_async_initiate_backup(
backup_agent_ids = backup_data.pop("agent_ids")
assert backup_agent_ids == agent_ids
assert backup_data == {
"addons": [],
"backup_id": ANY,
"database_included": include_database,
"date": ANY,
"failed_agent_ids": [],
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2025.1.0",
"name": name,
"protected": bool(password),
"size": ANY,
"with_automatic_settings": False,
}
backup = AgentBackup.from_dict(backup_data)
assert backup == AgentBackup(
addons=[],
backup_id=ANY,
database_included=include_database,
date=ANY,
folders=[],
homeassistant_included=True,
homeassistant_version="2025.1.0",
name=name,
protected=bool(password),
size=ANY,
)
for agent_id in agent_ids:
agent = agents[agent_id]
assert len(agent._backups) == 1
agent_backup = agent._backups[backup.backup_id]
assert agent_backup.backup_id == backup.backup_id
assert agent_backup.date == backup.date
assert agent_backup.name == backup.name
assert agent_backup.protected == backup.protected
assert agent_backup.size == backup.size
agent_backup = agent._backups[backup_data["backup_id"]]
assert agent_backup.backup_id == backup_data["backup_id"]
assert agent_backup.date == backup_data["date"]
assert agent_backup.name == backup_data["name"]
assert agent_backup.protected == backup_data["protected"]
assert agent_backup.size == backup_data["size"]
outer_tar = mocked_tarfile.return_value
core_tar = outer_tar.create_inner_tar.return_value.__enter__.return_value
@ -380,7 +388,7 @@ async def test_async_initiate_backup(
tar_file_path = str(mocked_tarfile.call_args_list[0][0][0])
backup_directory = hass.config.path(backup_directory)
assert tar_file_path == f"{backup_directory}/{backup.backup_id}.tar"
assert tar_file_path == f"{backup_directory}/{backup_data["backup_id"]}.tar"
@pytest.mark.usefixtures("mock_backup_generation")
@ -522,7 +530,6 @@ async def test_async_initiate_backup_with_agent_error(
{
"backup_id": "abc123",
"failed_agent_ids": ["test.remote"],
"with_automatic_settings": False,
}
]

View File

@ -34,6 +34,7 @@ from tests.typing import WebSocketGenerator
BACKUP_CALL = call(
agent_ids=["test.test-agent"],
backup_name="test-name",
extra_metadata={"instance_id": ANY, "with_automatic_settings": True},
include_addons=["test-addon"],
include_all_addons=False,
include_database=True,
@ -276,7 +277,6 @@ async def test_delete(
{
"backup_id": "abc123",
"failed_agent_ids": ["test.remote"],
"with_automatic_settings": False,
}
]
},

View File

@ -108,6 +108,7 @@ def mock_list_files() -> Generator[MagicMock]:
"backup_id": "23e64aec",
"date": "2024-11-22T11:48:48.727189+01:00",
"database_included": True,
"extra_metadata": {},
"folders": [],
"homeassistant_included": True,
"homeassistant_version": "2024.12.0.dev0",
@ -335,6 +336,7 @@ async def test_agents_upload(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
@ -390,6 +392,7 @@ async def test_agents_upload_fail_put(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
@ -438,6 +441,7 @@ async def test_agents_upload_fail_cloud(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
@ -479,6 +483,7 @@ async def test_agents_upload_not_protected(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",

View File

@ -12,7 +12,7 @@ from datetime import datetime
from io import StringIO
import os
from typing import Any
from unittest.mock import AsyncMock, Mock, patch
from unittest.mock import ANY, AsyncMock, Mock, patch
from aiohasupervisor.exceptions import (
SupervisorBadRequestError,
@ -445,6 +445,7 @@ async def test_agent_upload(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",
@ -622,6 +623,10 @@ DEFAULT_BACKUP_OPTIONS = supervisor_backups.PartialBackupOptions(
addons=None,
background=True,
compressed=True,
extra={
"instance_id": ANY,
"with_automatic_settings": False,
},
folders=None,
homeassistant_exclude_database=False,
homeassistant=True,
@ -876,6 +881,7 @@ async def test_agent_receive_remote_backup(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",

View File

@ -14,6 +14,7 @@ from homeassistant.components.backup import (
)
from homeassistant.components.kitchen_sink import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import instance_id
from homeassistant.setup import async_setup_component
from tests.typing import ClientSessionGenerator, WebSocketGenerator
@ -137,6 +138,10 @@ async def test_agents_upload(
backup_id=backup_id,
database_included=True,
date="1970-01-01T00:00:00.000Z",
extra_metadata={
"instance_id": await instance_id.async_get(hass),
"with_automatic_settings": False,
},
folders=[Folder.MEDIA, Folder.SHARE],
homeassistant_included=True,
homeassistant_version="2024.12.0",