Create repair issue if not all add-ons or folders were backed up (#144999)

* Create repair issue if not all add-ons or folders were backed up

* Fix spelling

* Fix _collect_errors

* Make time patching by freezegun work with mashumaro

* Addd test to hassio

* Add fixture

* Fix generating list of folders

* Add issue creation tests

* Include name of failing add-on in message

* Improve code formatting

* Rename AddonError to AddonErrorData
pull/145331/head
Erik Montnemery 2025-05-20 15:23:52 +02:00 committed by GitHub
parent fc62bc5fc1
commit 8e74f63d47
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 660 additions and 73 deletions

View File

@ -23,6 +23,7 @@ from .const import DATA_MANAGER, DOMAIN
from .coordinator import BackupConfigEntry, BackupDataUpdateCoordinator
from .http import async_register_http_views
from .manager import (
AddonErrorData,
BackupManager,
BackupManagerError,
BackupPlatformEvent,
@ -48,6 +49,7 @@ from .util import suggested_filename, suggested_filename_from_name_date
from .websocket import async_register_websocket_handlers
__all__ = [
"AddonErrorData",
"AddonInfo",
"AgentBackup",
"BackupAgent",

View File

@ -106,11 +106,21 @@ class ManagerBackup(BaseBackup):
with_automatic_settings: bool | None
@dataclass(frozen=True, kw_only=True, slots=True)
class AddonErrorData:
"""Addon error class."""
name: str
errors: list[tuple[str, str]]
@dataclass(frozen=True, kw_only=True, slots=True)
class WrittenBackup:
"""Written backup class."""
addon_errors: dict[str, AddonErrorData]
backup: AgentBackup
folder_errors: dict[Folder, list[tuple[str, str]]]
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]]
release_stream: Callable[[], Coroutine[Any, Any, None]]
@ -1208,7 +1218,9 @@ class BackupManager:
backup_success = True
if with_automatic_settings:
self._update_issue_after_agent_upload(agent_errors, unavailable_agents)
self._update_issue_after_agent_upload(
written_backup, agent_errors, unavailable_agents
)
# delete old backups more numerous than copies
# try this regardless of agent errors above
await delete_backups_exceeding_configured_count(self)
@ -1354,8 +1366,10 @@ class BackupManager:
for subscription in self._backup_event_subscriptions:
subscription(event)
def _update_issue_backup_failed(self) -> None:
"""Update issue registry when a backup fails."""
def _create_automatic_backup_failed_issue(
self, translation_key: str, translation_placeholders: dict[str, str] | None
) -> None:
"""Create an issue in the issue registry for automatic backup failures."""
ir.async_create_issue(
self.hass,
DOMAIN,
@ -1364,37 +1378,64 @@ class BackupManager:
is_persistent=True,
learn_more_url="homeassistant://config/backup",
severity=ir.IssueSeverity.WARNING,
translation_key="automatic_backup_failed_create",
translation_key=translation_key,
translation_placeholders=translation_placeholders,
)
def _update_issue_backup_failed(self) -> None:
"""Update issue registry when a backup fails."""
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_create", None
)
def _update_issue_after_agent_upload(
self, agent_errors: dict[str, Exception], unavailable_agents: list[str]
self,
written_backup: WrittenBackup,
agent_errors: dict[str, Exception],
unavailable_agents: list[str],
) -> None:
"""Update issue registry after a backup is uploaded to agents."""
if not agent_errors and not unavailable_agents:
addon_errors = written_backup.addon_errors
failed_agents = unavailable_agents + [
self.backup_agents[agent_id].name for agent_id in agent_errors
]
folder_errors = written_backup.folder_errors
if not failed_agents and not addon_errors and not folder_errors:
# No issues to report, clear previous error
ir.async_delete_issue(self.hass, DOMAIN, "automatic_backup_failed")
return
ir.async_create_issue(
self.hass,
DOMAIN,
"automatic_backup_failed",
is_fixable=False,
is_persistent=True,
learn_more_url="homeassistant://config/backup",
severity=ir.IssueSeverity.WARNING,
translation_key="automatic_backup_failed_upload_agents",
translation_placeholders={
"failed_agents": ", ".join(
chain(
(
self.backup_agents[agent_id].name
for agent_id in agent_errors
),
unavailable_agents,
)
)
},
)
if (agent_errors or unavailable_agents) and not (addon_errors or folder_errors):
# No issues with add-ons or folders, but issues with agents
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_upload_agents",
{"failed_agents": ", ".join(failed_agents)},
)
elif addon_errors and not (agent_errors or unavailable_agents or folder_errors):
# No issues with agents or folders, but issues with add-ons
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_addons",
{"failed_addons": ", ".join(val.name for val in addon_errors.values())},
)
elif folder_errors and not (agent_errors or unavailable_agents or addon_errors):
# No issues with agents or add-ons, but issues with folders
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_folders",
{"failed_folders": ", ".join(folder for folder in folder_errors)},
)
else:
# Issues with agents, add-ons, and/or folders
self._create_automatic_backup_failed_issue(
"automatic_backup_failed_agents_addons_folders",
{
"failed_agents": ", ".join(failed_agents) or "-",
"failed_addons": (
", ".join(val.name for val in addon_errors.values()) or "-"
),
"failed_folders": ", ".join(f for f in folder_errors) or "-",
},
)
async def async_can_decrypt_on_download(
self,
@ -1677,7 +1718,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
raise BackupReaderWriterError(str(err)) from err
return WrittenBackup(
backup=backup, open_stream=open_backup, release_stream=remove_backup
addon_errors={},
backup=backup,
folder_errors={},
open_stream=open_backup,
release_stream=remove_backup,
)
finally:
# Inform integrations the backup is done
@ -1816,7 +1861,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
await async_add_executor_job(temp_file.unlink, True)
return WrittenBackup(
backup=backup, open_stream=open_backup, release_stream=remove_backup
addon_errors={},
backup=backup,
folder_errors={},
open_stream=open_backup,
release_stream=remove_backup,
)
async def async_restore_backup(

View File

@ -11,6 +11,18 @@
"automatic_backup_failed_upload_agents": {
"title": "Automatic backup could not be uploaded to the configured locations",
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_addons": {
"title": "Not all add-ons could be included in automatic backup",
"description": "Add-ons {failed_addons} could not be included in automatic backup. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_agents_addons_folders": {
"title": "Automatic backup was created with errors",
"description": "The automatic backup was created with errors:\n* Locations which the backup could not be uploaded to: {failed_agents}\n* Add-ons which could not be backed up: {failed_addons}\n* Folders which could not be backed up: {failed_folders}\n\nPlease check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
},
"automatic_backup_failed_folders": {
"title": "Not all folders could be included in automatic backup",
"description": "Folders {failed_folders} could not be included in automatic backup. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
}
},
"services": {

View File

@ -19,12 +19,14 @@ from aiohasupervisor.exceptions import (
)
from aiohasupervisor.models import (
backups as supervisor_backups,
jobs as supervisor_jobs,
mounts as supervisor_mounts,
)
from aiohasupervisor.models.backups import LOCATION_CLOUD_BACKUP, LOCATION_LOCAL_STORAGE
from homeassistant.components.backup import (
DATA_MANAGER,
AddonErrorData,
AddonInfo,
AgentBackup,
BackupAgent,
@ -401,6 +403,25 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
f"Backup failed: {create_errors or 'no backup_id'}"
)
# The backup was created successfully, check for non critical errors
full_status = await self._client.jobs.get_job(backup.job_id)
_addon_errors = _collect_errors(
full_status, "backup_store_addons", "backup_addon_save"
)
addon_errors: dict[str, AddonErrorData] = {}
for slug, errors in _addon_errors.items():
try:
addon_info = await self._client.addons.addon_info(slug)
addon_errors[slug] = AddonErrorData(name=addon_info.name, errors=errors)
except SupervisorError as err:
_LOGGER.debug("Error getting addon %s: %s", slug, err)
addon_errors[slug] = AddonErrorData(name=slug, errors=errors)
_folder_errors = _collect_errors(
full_status, "backup_store_folders", "backup_folder_save"
)
folder_errors = {Folder(key): val for key, val in _folder_errors.items()}
async def open_backup() -> AsyncIterator[bytes]:
try:
return await self._client.backups.download_backup(backup_id)
@ -430,7 +451,9 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
) from err
return WrittenBackup(
addon_errors=addon_errors,
backup=_backup_details_to_agent_backup(details, locations[0]),
folder_errors=folder_errors,
open_stream=open_backup,
release_stream=remove_backup,
)
@ -474,7 +497,9 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
details = await self._client.backups.backup_info(backup_id)
return WrittenBackup(
addon_errors={},
backup=_backup_details_to_agent_backup(details, locations[0]),
folder_errors={},
open_stream=open_backup,
release_stream=remove_backup,
)
@ -696,6 +721,27 @@ class SupervisorBackupReaderWriter(BackupReaderWriter):
on_event(job.to_dict())
def _collect_errors(
job: supervisor_jobs.Job, child_job_name: str, grandchild_job_name: str
) -> dict[str, list[tuple[str, str]]]:
"""Collect errors from a job's grandchildren."""
errors: dict[str, list[tuple[str, str]]] = {}
for child_job in job.child_jobs:
if child_job.name != child_job_name:
continue
for grandchild in child_job.child_jobs:
if (
grandchild.name != grandchild_job_name
or not grandchild.errors
or not grandchild.reference
):
continue
errors[grandchild.reference] = [
(error.type, error.message) for error in grandchild.errors
]
return errors
async def _default_agent(client: SupervisorClient) -> str:
"""Return the default agent for creating a backup."""
mounts = await client.mounts.info()

View File

@ -110,8 +110,10 @@ CONFIG_DIR_DIRS = {
def mock_create_backup() -> Generator[AsyncMock]:
"""Mock manager create backup."""
mock_written_backup = MagicMock(spec_set=WrittenBackup)
mock_written_backup.addon_errors = {}
mock_written_backup.backup.backup_id = "abc123"
mock_written_backup.backup.protected = False
mock_written_backup.folder_errors = {}
mock_written_backup.open_stream = AsyncMock()
mock_written_backup.release_stream = AsyncMock()
fut: Future[MagicMock] = Future()

View File

@ -35,6 +35,7 @@ from homeassistant.components.backup import (
from homeassistant.components.backup.agent import BackupAgentError
from homeassistant.components.backup.const import DATA_MANAGER
from homeassistant.components.backup.manager import (
AddonErrorData,
BackupManagerError,
BackupManagerExceptionGroup,
BackupManagerState,
@ -123,7 +124,9 @@ async def test_create_backup_service(
new_backup = NewBackup(backup_job_id="time-123")
backup_task = AsyncMock(
return_value=WrittenBackup(
addon_errors={},
backup=TEST_BACKUP_ABC123,
folder_errors={},
open_stream=AsyncMock(),
release_stream=AsyncMock(),
),
@ -320,7 +323,9 @@ async def test_async_create_backup(
new_backup = NewBackup(backup_job_id="time-123")
backup_task = AsyncMock(
return_value=WrittenBackup(
addon_errors={},
backup=TEST_BACKUP_ABC123,
folder_errors={},
open_stream=AsyncMock(),
release_stream=AsyncMock(),
),
@ -962,6 +967,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
"automatic_agents",
"create_backup_command",
"create_backup_addon_errors",
"create_backup_folder_errors",
"create_backup_side_effect",
"upload_side_effect",
"create_backup_result",
@ -972,6 +979,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{},
{},
None,
None,
True,
@ -980,6 +989,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{},
{},
None,
None,
True,
@ -989,6 +1000,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote", "test.unknown"],
{"type": "backup/generate", "agent_ids": ["test.remote", "test.unknown"]},
{},
{},
None,
None,
True,
@ -1005,6 +1018,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote", "test.unknown"],
{"type": "backup/generate_with_automatic_settings"},
{},
{},
None,
None,
True,
@ -1026,6 +1041,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{},
{},
Exception("Boom!"),
None,
False,
@ -1034,6 +1051,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{},
{},
Exception("Boom!"),
None,
False,
@ -1048,6 +1067,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{},
{},
delayed_boom,
None,
True,
@ -1056,6 +1077,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{},
{},
delayed_boom,
None,
True,
@ -1070,6 +1093,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{},
{},
None,
Exception("Boom!"),
True,
@ -1078,6 +1103,8 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{},
{},
None,
Exception("Boom!"),
True,
@ -1088,6 +1115,157 @@ async def delayed_boom(*args, **kwargs) -> tuple[NewBackup, Any]:
}
},
),
# Add-ons can't be backed up
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{
"test_addon": AddonErrorData(
name="Test Add-on", errors=[("test_error", "Boom!")]
)
},
{},
None,
None,
True,
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{
"test_addon": AddonErrorData(
name="Test Add-on", errors=[("test_error", "Boom!")]
)
},
{},
None,
None,
True,
{
(DOMAIN, "automatic_backup_failed"): {
"translation_key": "automatic_backup_failed_addons",
"translation_placeholders": {"failed_addons": "Test Add-on"},
}
},
),
# Folders can't be backed up
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{},
{Folder.MEDIA: [("test_error", "Boom!")]},
None,
None,
True,
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{},
{Folder.MEDIA: [("test_error", "Boom!")]},
None,
None,
True,
{
(DOMAIN, "automatic_backup_failed"): {
"translation_key": "automatic_backup_failed_folders",
"translation_placeholders": {"failed_folders": "media"},
}
},
),
# Add-ons and folders can't be backed up
(
["test.remote"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{
"test_addon": AddonErrorData(
name="Test Add-on", errors=[("test_error", "Boom!")]
)
},
{Folder.MEDIA: [("test_error", "Boom!")]},
None,
None,
True,
{},
),
(
["test.remote"],
{"type": "backup/generate_with_automatic_settings"},
{
"test_addon": AddonErrorData(
name="Test Add-on", errors=[("test_error", "Boom!")]
)
},
{Folder.MEDIA: [("test_error", "Boom!")]},
None,
None,
True,
{
(DOMAIN, "automatic_backup_failed"): {
"translation_key": "automatic_backup_failed_agents_addons_folders",
"translation_placeholders": {
"failed_addons": "Test Add-on",
"failed_agents": "-",
"failed_folders": "media",
},
},
},
),
# Add-ons and folders can't be backed up, one agent unavailable
(
["test.remote", "test.unknown"],
{"type": "backup/generate", "agent_ids": ["test.remote"]},
{
"test_addon": AddonErrorData(
name="Test Add-on", errors=[("test_error", "Boom!")]
)
},
{Folder.MEDIA: [("test_error", "Boom!")]},
None,
None,
True,
{
(DOMAIN, "automatic_backup_agents_unavailable_test.unknown"): {
"translation_key": "automatic_backup_agents_unavailable",
"translation_placeholders": {
"agent_id": "test.unknown",
"backup_settings": "/config/backup/settings",
},
},
},
),
(
["test.remote", "test.unknown"],
{"type": "backup/generate_with_automatic_settings"},
{
"test_addon": AddonErrorData(
name="Test Add-on", errors=[("test_error", "Boom!")]
)
},
{Folder.MEDIA: [("test_error", "Boom!")]},
None,
None,
True,
{
(DOMAIN, "automatic_backup_failed"): {
"translation_key": "automatic_backup_failed_agents_addons_folders",
"translation_placeholders": {
"failed_addons": "Test Add-on",
"failed_agents": "test.unknown",
"failed_folders": "media",
},
},
(DOMAIN, "automatic_backup_agents_unavailable_test.unknown"): {
"translation_key": "automatic_backup_agents_unavailable",
"translation_placeholders": {
"agent_id": "test.unknown",
"backup_settings": "/config/backup/settings",
},
},
},
),
],
)
async def test_create_backup_failure_raises_issue(
@ -1096,16 +1274,20 @@ async def test_create_backup_failure_raises_issue(
create_backup: AsyncMock,
automatic_agents: list[str],
create_backup_command: dict[str, Any],
create_backup_addon_errors: dict[str, str],
create_backup_folder_errors: dict[Folder, str],
create_backup_side_effect: Exception | None,
upload_side_effect: Exception | None,
create_backup_result: bool,
issues_after_create_backup: dict[tuple[str, str], dict[str, Any]],
) -> None:
"""Test backup issue is cleared after backup is created."""
"""Test issue is created when create backup has error."""
mock_agents = await setup_backup_integration(hass, remote_agents=["test.remote"])
ws_client = await hass_ws_client(hass)
create_backup.return_value[1].result().addon_errors = create_backup_addon_errors
create_backup.return_value[1].result().folder_errors = create_backup_folder_errors
create_backup.side_effect = create_backup_side_effect
await ws_client.send_json_auto_id(
@ -1857,7 +2039,9 @@ async def test_receive_backup_busy_manager(
# finish the backup
backup_task.set_result(
WrittenBackup(
addon_errors={},
backup=TEST_BACKUP_ABC123,
folder_errors={},
open_stream=AsyncMock(),
release_stream=AsyncMock(),
)

View File

@ -0,0 +1,162 @@
{
"result": "ok",
"data": {
"name": "backup_manager_partial_backup",
"reference": "14a1ea4b",
"uuid": "400a90112553472a90d84a7e60d5265e",
"progress": 0,
"stage": "finishing_file",
"done": true,
"errors": [],
"created": "2025-05-14T08:56:22.801143+00:00",
"child_jobs": [
{
"name": "backup_store_homeassistant",
"reference": "14a1ea4b",
"uuid": "176318a1a8184b02b7e9ad3ec54ee5ec",
"progress": 0,
"stage": null,
"done": true,
"errors": [],
"created": "2025-05-14T08:56:22.807078+00:00",
"child_jobs": []
},
{
"name": "backup_store_addons",
"reference": "14a1ea4b",
"uuid": "42664cb8fd4e474f8919bd737877125b",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't backup add-on core_ssh: Can't write tarfile: FAKE OS error during add-on backup",
"stage": null
},
{
"type": "BackupError",
"message": "Can't backup add-on core_whisper: Can't write tarfile: FAKE OS error during add-on backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.843960+00:00",
"child_jobs": [
{
"name": "backup_addon_save",
"reference": "core_ssh",
"uuid": "7cc7feb782e54345bdb5ca653928233f",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't write tarfile: FAKE OS error during add-on backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.844160+00:00",
"child_jobs": []
},
{
"name": "backup_addon_save",
"reference": "core_whisper",
"uuid": "0cfb1163751740929e63a68df59dc13b",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't write tarfile: FAKE OS error during add-on backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.850376+00:00",
"child_jobs": []
}
]
},
{
"name": "backup_store_folders",
"reference": "14a1ea4b",
"uuid": "dd4685b4aac9460ab0e1150fe5c968e1",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't backup folder share: Can't write tarfile: FAKE OS error during folder backup",
"stage": null
},
{
"type": "BackupError",
"message": "Can't backup folder ssl: Can't write tarfile: FAKE OS error during folder backup",
"stage": null
},
{
"type": "BackupError",
"message": "Can't backup folder media: Can't write tarfile: FAKE OS error during folder backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.858227+00:00",
"child_jobs": [
{
"name": "backup_folder_save",
"reference": "share",
"uuid": "8a4dccd988f641a383abb469a478cbdb",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't write tarfile: FAKE OS error during folder backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.858385+00:00",
"child_jobs": []
},
{
"name": "backup_folder_save",
"reference": "ssl",
"uuid": "f9b437376cc9428090606779eff35b41",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't write tarfile: FAKE OS error during folder backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.859973+00:00",
"child_jobs": []
},
{
"name": "backup_folder_save",
"reference": "media",
"uuid": "b920835ef079403784fba4ff54437197",
"progress": 0,
"stage": null,
"done": true,
"errors": [
{
"type": "BackupError",
"message": "Can't write tarfile: FAKE OS error during folder backup",
"stage": null
}
],
"created": "2025-05-14T08:56:22.860792+00:00",
"child_jobs": []
}
]
}
]
}
}

View File

@ -10,6 +10,7 @@ from collections.abc import (
Iterable,
)
from dataclasses import replace
import datetime as dt
from datetime import datetime
from io import StringIO
import os
@ -47,12 +48,13 @@ from homeassistant.components.backup import (
from homeassistant.components.hassio import DOMAIN
from homeassistant.components.hassio.backup import RESTORE_JOB_ID_ENV
from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.backup import async_initialize_backup
from homeassistant.setup import async_setup_component
from .test_init import MOCK_ENVIRON
from tests.common import mock_platform
from tests.common import load_json_object_fixture, mock_platform
from tests.typing import ClientSessionGenerator, WebSocketGenerator
TEST_BACKUP = supervisor_backups.Backup(
@ -986,6 +988,128 @@ async def test_reader_writer_create(
assert response["event"] == {"manager_state": "idle"}
@pytest.mark.usefixtures("addon_info", "hassio_client", "setup_backup_integration")
@pytest.mark.parametrize(
"addon_info_side_effect",
# Getting info fails for one of the addons, should fall back to slug
[[Mock(), SupervisorError("Boom")]],
)
async def test_reader_writer_create_addon_folder_error(
hass: HomeAssistant,
hass_ws_client: WebSocketGenerator,
freezer: FrozenDateTimeFactory,
supervisor_client: AsyncMock,
addon_info_side_effect: list[Any],
) -> None:
"""Test generating a backup."""
addon_info_side_effect[0].name = "Advanced SSH & Web Terminal"
assert dt.datetime.__name__ == "HAFakeDatetime"
assert dt.HAFakeDatetime.__name__ == "HAFakeDatetime"
client = await hass_ws_client(hass)
freezer.move_to("2025-01-30 13:42:12.345678")
supervisor_client.backups.partial_backup.return_value.job_id = UUID(TEST_JOB_ID)
supervisor_client.backups.backup_info.return_value = TEST_BACKUP_DETAILS
supervisor_client.jobs.get_job.side_effect = [
TEST_JOB_NOT_DONE,
supervisor_jobs.Job.from_dict(
load_json_object_fixture(
"backup_done_with_addon_folder_errors.json", DOMAIN
)["data"]
),
]
issue_registry = ir.async_get(hass)
assert not issue_registry.issues
await client.send_json_auto_id({"type": "backup/subscribe_events"})
response = await client.receive_json()
assert response["event"] == {"manager_state": "idle"}
response = await client.receive_json()
assert response["success"]
await client.send_json_auto_id(
{
"type": "backup/config/update",
"create_backup": {
"agent_ids": ["hassio.local"],
"include_addons": ["core_ssh", "core_whisper"],
"include_all_addons": False,
"include_database": True,
"include_folders": ["media", "share"],
"name": "Test",
},
}
)
response = await client.receive_json()
assert response["success"]
await client.send_json_auto_id({"type": "backup/generate_with_automatic_settings"})
response = await client.receive_json()
assert response["event"] == {
"manager_state": "create_backup",
"reason": None,
"stage": None,
"state": "in_progress",
}
response = await client.receive_json()
assert response["success"]
assert response["result"] == {"backup_job_id": TEST_JOB_ID}
supervisor_client.backups.partial_backup.assert_called_once_with(
replace(
DEFAULT_BACKUP_OPTIONS,
addons={"core_ssh", "core_whisper"},
extra=DEFAULT_BACKUP_OPTIONS.extra | {"with_automatic_settings": True},
folders={Folder.MEDIA, Folder.SHARE, Folder.SSL},
)
)
await client.send_json_auto_id(
{
"type": "supervisor/event",
"data": {
"event": "job",
"data": {"done": True, "uuid": TEST_JOB_ID, "reference": "test_slug"},
},
}
)
response = await client.receive_json()
assert response["success"]
response = await client.receive_json()
assert response["event"] == {
"manager_state": "create_backup",
"reason": None,
"stage": "upload_to_agents",
"state": "in_progress",
}
response = await client.receive_json()
assert response["event"] == {
"manager_state": "create_backup",
"reason": None,
"stage": None,
"state": "completed",
}
supervisor_client.backups.download_backup.assert_not_called()
supervisor_client.backups.remove_backup.assert_not_called()
response = await client.receive_json()
assert response["event"] == {"manager_state": "idle"}
# Check that the expected issue was created
assert list(issue_registry.issues) == [("backup", "automatic_backup_failed")]
issue = issue_registry.issues[("backup", "automatic_backup_failed")]
assert issue.translation_key == "automatic_backup_failed_agents_addons_folders"
assert issue.translation_placeholders == {
"failed_addons": "Advanced SSH & Web Terminal, core_whisper",
"failed_agents": "-",
"failed_folders": "share, ssl, media",
}
@pytest.mark.usefixtures("hassio_client", "setup_backup_integration")
async def test_reader_writer_create_report_progress(
hass: HomeAssistant,

View File

@ -52,7 +52,7 @@ from homeassistant.exceptions import ServiceNotFound
from . import patch_recorder # isort:skip
# Setup patching of dt_util time functions before any other Home Assistant imports
from . import patch_time # noqa: F401, isort:skip
from . import patch_time # isort:skip
from homeassistant import components, core as ha, loader, runner
from homeassistant.auth.const import GROUP_ID_ADMIN, GROUP_ID_READ_ONLY
@ -190,14 +190,14 @@ def pytest_runtest_setup() -> None:
pytest_socket.socket_allow_hosts(["127.0.0.1"])
pytest_socket.disable_socket(allow_unix_socket=True)
freezegun.api.datetime_to_fakedatetime = ha_datetime_to_fakedatetime # type: ignore[attr-defined]
freezegun.api.FakeDatetime = HAFakeDatetime # type: ignore[attr-defined]
freezegun.api.datetime_to_fakedatetime = patch_time.ha_datetime_to_fakedatetime # type: ignore[attr-defined]
freezegun.api.FakeDatetime = patch_time.HAFakeDatetime # type: ignore[attr-defined]
def adapt_datetime(val):
return val.isoformat(" ")
# Setup HAFakeDatetime converter for sqlite3
sqlite3.register_adapter(HAFakeDatetime, adapt_datetime)
sqlite3.register_adapter(patch_time.HAFakeDatetime, adapt_datetime)
# Setup HAFakeDatetime converter for pymysql
try:
@ -206,48 +206,11 @@ def pytest_runtest_setup() -> None:
except ImportError:
pass
else:
MySQLdb_converters.conversions[HAFakeDatetime] = (
MySQLdb_converters.conversions[patch_time.HAFakeDatetime] = (
MySQLdb_converters.DateTime2literal
)
def ha_datetime_to_fakedatetime(datetime) -> freezegun.api.FakeDatetime: # type: ignore[name-defined]
"""Convert datetime to FakeDatetime.
Modified to include https://github.com/spulec/freezegun/pull/424.
"""
return freezegun.api.FakeDatetime( # type: ignore[attr-defined]
datetime.year,
datetime.month,
datetime.day,
datetime.hour,
datetime.minute,
datetime.second,
datetime.microsecond,
datetime.tzinfo,
fold=datetime.fold,
)
class HAFakeDatetime(freezegun.api.FakeDatetime): # type: ignore[name-defined]
"""Modified to include https://github.com/spulec/freezegun/pull/424."""
@classmethod
def now(cls, tz=None):
"""Return frozen now."""
now = cls._time_to_freeze() or freezegun.api.real_datetime.now()
if tz:
result = tz.fromutc(now.replace(tzinfo=tz))
else:
result = now
# Add the _tz_offset only if it's non-zero to preserve fold
if cls._tz_offset():
result += cls._tz_offset()
return ha_datetime_to_fakedatetime(result)
def check_real[**_P, _R](func: Callable[_P, Coroutine[Any, Any, _R]]):
"""Force a function to require a keyword _test_real to be passed in."""

View File

@ -5,6 +5,49 @@ from __future__ import annotations
import datetime
import time
import freezegun
def ha_datetime_to_fakedatetime(datetime) -> freezegun.api.FakeDatetime: # type: ignore[name-defined]
"""Convert datetime to FakeDatetime.
Modified to include https://github.com/spulec/freezegun/pull/424.
"""
return freezegun.api.FakeDatetime( # type: ignore[attr-defined]
datetime.year,
datetime.month,
datetime.day,
datetime.hour,
datetime.minute,
datetime.second,
datetime.microsecond,
datetime.tzinfo,
fold=datetime.fold,
)
class HAFakeDatetime(freezegun.api.FakeDatetime): # type: ignore[name-defined]
"""Modified to include https://github.com/spulec/freezegun/pull/424."""
@classmethod
def now(cls, tz=None):
"""Return frozen now."""
now = cls._time_to_freeze() or freezegun.api.real_datetime.now()
if tz:
result = tz.fromutc(now.replace(tzinfo=tz))
else:
result = now
# Add the _tz_offset only if it's non-zero to preserve fold
if cls._tz_offset():
result += cls._tz_offset()
return ha_datetime_to_fakedatetime(result)
# Needed by Mashumaro
datetime.HAFakeDatetime = HAFakeDatetime
# Do not add any Home Assistant import here