Don't pollute config dir with deleted duplicated statistics (#62489)
parent
cbcd6d458e
commit
cab2a74b5f
|
@ -9,6 +9,7 @@ from datetime import datetime, timedelta
|
|||
from itertools import chain, groupby
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from statistics import mean
|
||||
from typing import TYPE_CHECKING, Any, Literal
|
||||
|
@ -365,8 +366,10 @@ def delete_duplicates(instance: Recorder, session: scoped_session) -> None:
|
|||
|
||||
if non_identical_duplicates:
|
||||
isotime = dt_util.utcnow().isoformat()
|
||||
backup_file_name = f"deleted_statistics.{isotime}.json"
|
||||
backup_file_name = f".deleted_statistics/deleted_statistics.{isotime}.json"
|
||||
backup_path = instance.hass.config.path(backup_file_name)
|
||||
|
||||
os.makedirs(os.path.dirname(backup_path), exist_ok=True)
|
||||
with open(backup_path, "w", encoding="utf8") as backup_file:
|
||||
json.dump(
|
||||
non_identical_duplicates,
|
||||
|
|
|
@ -993,7 +993,7 @@ def test_delete_duplicates_non_identical(caplog, tmpdir):
|
|||
assert "Found duplicated" not in caplog.text
|
||||
|
||||
isotime = dt_util.utcnow().isoformat()
|
||||
backup_file_name = f"deleted_statistics.{isotime}.json"
|
||||
backup_file_name = f".deleted_statistics/deleted_statistics.{isotime}.json"
|
||||
|
||||
with open(hass.config.path(backup_file_name)) as backup_file:
|
||||
backup = json.load(backup_file)
|
||||
|
|
Loading…
Reference in New Issue