Add support for per-backup agent encryption flag to hassio (#136828)

* Add support for per-backup agent encryption flag to hassio

* Improve comment

* Set password to None when supervisor should not encrypt
pull/137390/head
Erik Montnemery 2025-01-29 18:23:25 +01:00 committed by Bram Kragten
parent 6a8e45c51e
commit 8749210d1b
2 changed files with 350 additions and 21 deletions

View File

@ -97,7 +97,7 @@ def async_register_backup_agents_listener(
def _backup_details_to_agent_backup(
details: supervisor_backups.BackupComplete,
details: supervisor_backups.BackupComplete, location: str | None
) -> AgentBackup:
"""Convert a supervisor backup details object to an agent backup."""
homeassistant_included = details.homeassistant is not None
@ -109,6 +109,7 @@ def _backup_details_to_agent_backup(
AddonInfo(name=addon.name, slug=addon.slug, version=addon.version)
for addon in details.addons
]
location = location or LOCATION_LOCAL
return AgentBackup(
addons=addons,
backup_id=details.slug,
@ -119,8 +120,8 @@ def _backup_details_to_agent_backup(
homeassistant_included=homeassistant_included,
homeassistant_version=details.homeassistant,
name=details.name,
protected=details.protected,
size=details.size_bytes,
protected=details.location_attributes[location].protected,
size=details.location_attributes[location].size_bytes,
)
@ -158,8 +159,23 @@ class SupervisorBackupAgent(BackupAgent):
) -> None:
"""Upload a backup.
Not required for supervisor, the SupervisorBackupReaderWriter stores files.
The upload will be skipped if the backup already exists in the agent's location.
"""
if await self.async_get_backup(backup.backup_id):
_LOGGER.debug(
"Backup %s already exists in location %s",
backup.backup_id,
self.location,
)
return
stream = await open_stream()
upload_options = supervisor_backups.UploadBackupOptions(
location={self.location}
)
await self._client.backups.upload_backup(
stream,
upload_options,
)
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups."""
@ -169,7 +185,7 @@ class SupervisorBackupAgent(BackupAgent):
if not backup.locations or self.location not in backup.locations:
continue
details = await self._client.backups.backup_info(backup.slug)
result.append(_backup_details_to_agent_backup(details))
result.append(_backup_details_to_agent_backup(details, self.location))
return result
async def async_get_backup(
@ -181,7 +197,7 @@ class SupervisorBackupAgent(BackupAgent):
details = await self._client.backups.backup_info(backup_id)
if self.location not in details.locations:
return None
return _backup_details_to_agent_backup(details)
return _backup_details_to_agent_backup(details, self.location)
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
"""Remove a backup."""
@ -246,7 +262,41 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
for agent_id in agent_ids
if manager.backup_agents[agent_id].domain == DOMAIN
]
locations = [agent.location for agent in hassio_agents]
# Supervisor does not support creating backups spread across multiple
# locations, where some locations are encrypted and some are not.
# It's inefficient to let core do all the copying so we want to let
# supervisor handle as much as possible.
# Therefore, we split the locations into two lists: encrypted and decrypted.
# The longest list will be sent to supervisor, and the remaining locations
# will be handled by async_upload_backup.
# If the lists are the same length, it does not matter which one we send,
# we send the encrypted list to have a well defined behavior.
encrypted_locations: list[str | None] = []
decrypted_locations: list[str | None] = []
agents_settings = manager.config.data.agents
for hassio_agent in hassio_agents:
if password is not None:
if agent_settings := agents_settings.get(hassio_agent.agent_id):
if agent_settings.protected:
encrypted_locations.append(hassio_agent.location)
else:
decrypted_locations.append(hassio_agent.location)
else:
encrypted_locations.append(hassio_agent.location)
else:
decrypted_locations.append(hassio_agent.location)
_LOGGER.debug("Encrypted locations: %s", encrypted_locations)
_LOGGER.debug("Decrypted locations: %s", decrypted_locations)
if hassio_agents:
if len(encrypted_locations) >= len(decrypted_locations):
locations = encrypted_locations
else:
locations = decrypted_locations
password = None
else:
locations = []
locations = locations or [LOCATION_CLOUD_BACKUP]
try:
backup = await self._client.backups.partial_backup(
@ -257,7 +307,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
name=backup_name,
password=password,
compressed=True,
location=locations or LOCATION_CLOUD_BACKUP,
location=locations,
homeassistant_exclude_database=not include_database,
background=True,
extra=extra_metadata,
@ -267,7 +317,9 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
raise BackupReaderWriterError(f"Error creating backup: {err}") from err
backup_task = self._hass.async_create_task(
self._async_wait_for_backup(
backup, remove_after_upload=not bool(locations)
backup,
locations,
remove_after_upload=locations == [LOCATION_CLOUD_BACKUP],
),
name="backup_manager_create_backup",
eager_start=False, # To ensure the task is not started before we return
@ -276,7 +328,11 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
return (NewBackup(backup_job_id=backup.job_id), backup_task)
async def _async_wait_for_backup(
self, backup: supervisor_backups.NewBackup, *, remove_after_upload: bool
self,
backup: supervisor_backups.NewBackup,
locations: list[str | None],
*,
remove_after_upload: bool,
) -> WrittenBackup:
"""Wait for a backup to complete."""
backup_complete = asyncio.Event()
@ -327,7 +383,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
) from err
return WrittenBackup(
backup=_backup_details_to_agent_backup(details),
backup=_backup_details_to_agent_backup(details, locations[0]),
open_stream=open_backup,
release_stream=remove_backup,
)
@ -347,20 +403,19 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
for agent_id in agent_ids
if manager.backup_agents[agent_id].domain == DOMAIN
]
locations = {agent.location for agent in hassio_agents}
locations = [agent.location for agent in hassio_agents]
locations = locations or [LOCATION_CLOUD_BACKUP]
backup_id = await self._client.backups.upload_backup(
stream,
supervisor_backups.UploadBackupOptions(
location=locations or {LOCATION_CLOUD_BACKUP}
),
supervisor_backups.UploadBackupOptions(location=set(locations)),
)
async def open_backup() -> AsyncIterator[bytes]:
return await self._client.backups.download_backup(backup_id)
async def remove_backup() -> None:
if locations:
if locations != [LOCATION_CLOUD_BACKUP]:
return
await self._client.backups.remove_backup(
backup_id,
@ -372,7 +427,7 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
details = await self._client.backups.backup_info(backup_id)
return WrittenBackup(
backup=_backup_details_to_agent_backup(details),
backup=_backup_details_to_agent_backup(details, locations[0]),
open_stream=open_backup,
release_stream=remove_backup,
)

View File

@ -245,6 +245,56 @@ TEST_BACKUP_DETAILS_4 = supervisor_backups.BackupComplete(
type=TEST_BACKUP.type,
)
TEST_BACKUP_5 = supervisor_backups.Backup(
compressed=False,
content=supervisor_backups.BackupContent(
addons=["ssl"],
folders=[supervisor_backups.Folder.SHARE],
homeassistant=True,
),
date=datetime.fromisoformat("1970-01-01T00:00:00Z"),
location=LOCATION_CLOUD_BACKUP,
location_attributes={
LOCATION_CLOUD_BACKUP: supervisor_backups.BackupLocationAttributes(
protected=False, size_bytes=1048576
)
},
locations={LOCATION_CLOUD_BACKUP},
name="Test",
protected=False,
size=1.0,
size_bytes=1048576,
slug="abc123",
type=supervisor_backups.BackupType.PARTIAL,
)
TEST_BACKUP_DETAILS_5 = supervisor_backups.BackupComplete(
addons=[
supervisor_backups.BackupAddon(
name="Terminal & SSH",
size=0.0,
slug="core_ssh",
version="9.14.0",
)
],
compressed=TEST_BACKUP_5.compressed,
date=TEST_BACKUP_5.date,
extra=None,
folders=[supervisor_backups.Folder.SHARE],
homeassistant_exclude_database=False,
homeassistant="2024.12.0",
location=TEST_BACKUP_5.location,
location_attributes=TEST_BACKUP_5.location_attributes,
locations=TEST_BACKUP_5.locations,
name=TEST_BACKUP_5.name,
protected=TEST_BACKUP_5.protected,
repositories=[],
size=TEST_BACKUP_5.size,
size_bytes=TEST_BACKUP_5.size_bytes,
slug=TEST_BACKUP_5.slug,
supervisor_version="2024.11.2",
type=TEST_BACKUP_5.type,
)
@pytest.fixture(autouse=True)
def fixture_supervisor_environ() -> Generator[None]:
@ -821,6 +871,230 @@ async def test_reader_writer_create(
assert response["event"] == {"manager_state": "idle"}
@pytest.mark.usefixtures("hassio_client")
@pytest.mark.parametrize(
(
"commands",
"password",
"agent_ids",
"password_sent_to_supervisor",
"create_locations",
"create_protected",
"upload_locations",
),
[
(
[],
None,
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
None,
[None, "share1", "share2", "share3"],
False,
[],
),
(
[],
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
"hunter2",
[None, "share1", "share2", "share3"],
True,
[],
),
(
[
{
"type": "backup/config/update",
"agents": {
"hassio.local": {"protected": False},
},
}
],
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
"hunter2",
["share1", "share2", "share3"],
True,
[None],
),
(
[
{
"type": "backup/config/update",
"agents": {
"hassio.local": {"protected": False},
"hassio.share1": {"protected": False},
},
}
],
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
"hunter2",
["share2", "share3"],
True,
[None, "share1"],
),
(
[
{
"type": "backup/config/update",
"agents": {
"hassio.local": {"protected": False},
"hassio.share1": {"protected": False},
"hassio.share2": {"protected": False},
},
}
],
"hunter2",
["hassio.local", "hassio.share1", "hassio.share2", "hassio.share3"],
None,
[None, "share1", "share2"],
True,
["share3"],
),
(
[
{
"type": "backup/config/update",
"agents": {
"hassio.local": {"protected": False},
},
}
],
"hunter2",
["hassio.local"],
None,
[None],
False,
[],
),
],
)
async def test_reader_writer_create_per_agent_encryption(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
supervisor_client: AsyncMock,
commands: dict[str, Any],
password: str | None,
agent_ids: list[str],
password_sent_to_supervisor: str | None,
create_locations: list[str | None],
create_protected: bool,
upload_locations: list[str | None],
) -> None:
"""Test generating a backup."""
client = await hass_ws_client(hass)
mounts = MountsInfo(
default_backup_mount=None,
mounts=[
supervisor_mounts.CIFSMountResponse(
share=f"share{i}",
name=f"share{i}",
read_only=False,
state=supervisor_mounts.MountState.ACTIVE,
user_path=f"share{i}",
usage=supervisor_mounts.MountUsage.BACKUP,
server=f"share{i}",
type=supervisor_mounts.MountType.CIFS,
)
for i in range(1, 4)
],
)
supervisor_client.backups.partial_backup.return_value.job_id = "abc123"
supervisor_client.backups.backup_info.return_value = replace(
TEST_BACKUP_DETAILS,
locations=create_locations,
location_attributes={
location or LOCATION_LOCAL: supervisor_backups.BackupLocationAttributes(
protected=create_protected,
size_bytes=TEST_BACKUP_DETAILS.size_bytes,
)
for location in create_locations
},
)
supervisor_client.mounts.info.return_value = mounts
assert await async_setup_component(hass, BACKUP_DOMAIN, {BACKUP_DOMAIN: {}})
for command in commands:
await client.send_json_auto_id(command)
result = await client.receive_json()
assert result["success"] is True
await client.send_json_auto_id({"type": "backup/subscribe_events"})
response = await client.receive_json()
assert response["event"] == {"manager_state": "idle"}
response = await client.receive_json()
assert response["success"]
await client.send_json_auto_id(
{
"type": "backup/generate",
"agent_ids": agent_ids,
"name": "Test",
"password": password,
}
)
response = await client.receive_json()
assert response["event"] == {
"manager_state": "create_backup",
"reason": None,
"stage": None,
"state": "in_progress",
}
response = await client.receive_json()
assert response["success"]
assert response["result"] == {"backup_job_id": "abc123"}
supervisor_client.backups.partial_backup.assert_called_once_with(
replace(
DEFAULT_BACKUP_OPTIONS,
password=password_sent_to_supervisor,
location=create_locations,
)
)
await client.send_json_auto_id(
{
"type": "supervisor/event",
"data": {
"event": "job",
"data": {"done": True, "uuid": "abc123", "reference": "test_slug"},
},
}
)
response = await client.receive_json()
assert response["success"]
response = await client.receive_json()
assert response["event"] == {
"manager_state": "create_backup",
"reason": None,
"stage": "upload_to_agents",
"state": "in_progress",
}
response = await client.receive_json()
assert response["event"] == {
"manager_state": "create_backup",
"reason": None,
"stage": None,
"state": "completed",
}
assert len(supervisor_client.backups.upload_backup.mock_calls) == len(
upload_locations
)
for call in supervisor_client.backups.upload_backup.mock_calls:
upload_call_locations: set = call.args[1].location
assert len(upload_call_locations) == 1
assert upload_call_locations.pop() in upload_locations
supervisor_client.backups.remove_backup.assert_not_called()
response = await client.receive_json()
assert response["event"] == {"manager_state": "idle"}
@pytest.mark.usefixtures("hassio_client", "setup_integration")
@pytest.mark.parametrize(
("side_effect", "error_code", "error_message", "expected_reason"),
@ -969,7 +1243,7 @@ async def test_reader_writer_create_download_remove_error(
"""Test download and remove error when generating a backup."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_backup.return_value.job_id = "abc123"
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
method_mock = getattr(supervisor_client.backups, method)
method_mock.side_effect = exception
@ -1129,7 +1403,7 @@ async def test_reader_writer_create_remote_backup(
"""Test generating a backup which will be uploaded to a remote agent."""
client = await hass_ws_client(hass)
supervisor_client.backups.partial_backup.return_value.job_id = "abc123"
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
remote_agent = BackupAgentTest("remote")
await _setup_backup_platform(
@ -1163,7 +1437,7 @@ async def test_reader_writer_create_remote_backup(
assert response["result"] == {"backup_job_id": "abc123"}
supervisor_client.backups.partial_backup.assert_called_once_with(
replace(DEFAULT_BACKUP_OPTIONS, location=LOCATION_CLOUD_BACKUP),
replace(DEFAULT_BACKUP_OPTIONS, location=[LOCATION_CLOUD_BACKUP]),
)
await client.send_json_auto_id(
@ -1280,7 +1554,7 @@ async def test_agent_receive_remote_backup(
"""Test receiving a backup which will be uploaded to a remote agent."""
client = await hass_client()
backup_id = "test-backup"
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS_5
supervisor_client.backups.upload_backup.return_value = "test_slug"
test_backup = AgentBackup(
addons=[AddonInfo(name="Test", slug="test", version="1.0.0")],