Preload storage for integrations we know we are going to setup (#114192)
parent
345fa75562
commit
cabc4f797a
|
@ -78,6 +78,7 @@ from .helpers import (
|
|||
translation,
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
|
@ -203,6 +204,27 @@ SETUP_ORDER = (
|
|||
("debugger", DEBUGGER_INTEGRATIONS),
|
||||
)
|
||||
|
||||
#
|
||||
# Storage keys we are likely to load during startup
|
||||
# in order of when we expect to load them.
|
||||
#
|
||||
# If they do not exist they will not be loaded
|
||||
#
|
||||
PRELOAD_STORAGE = [
|
||||
"core.network",
|
||||
"http.auth",
|
||||
"image",
|
||||
"lovelace_dashboards",
|
||||
"lovelace_resources",
|
||||
"core.uuid",
|
||||
"lovelace.map",
|
||||
"bluetooth.passive_update_processor",
|
||||
"bluetooth.remote_scanners",
|
||||
"assist_pipeline.pipelines",
|
||||
"core.analytics",
|
||||
"auth_module.totp",
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_hass(
|
||||
runtime_config: RuntimeConfig,
|
||||
|
@ -346,6 +368,7 @@ async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
|
|||
entity.async_setup(hass)
|
||||
template.async_setup(hass)
|
||||
await asyncio.gather(
|
||||
create_eager_task(get_internal_store_manager(hass).async_initialize()),
|
||||
create_eager_task(area_registry.async_load(hass)),
|
||||
create_eager_task(category_registry.async_load(hass)),
|
||||
create_eager_task(device_registry.async_load(hass)),
|
||||
|
@ -840,6 +863,17 @@ async def _async_resolve_domains_to_setup(
|
|||
eager_start=True,
|
||||
)
|
||||
|
||||
# Preload storage for all integrations we are going to set up
|
||||
# so we do not have to wait for it to be loaded when we need it
|
||||
# in the setup process.
|
||||
hass.async_create_background_task(
|
||||
get_internal_store_manager(hass).async_preload(
|
||||
[*PRELOAD_STORAGE, *domains_to_setup]
|
||||
),
|
||||
"preload storage",
|
||||
eager_start=True,
|
||||
)
|
||||
|
||||
return domains_to_setup, integration_cache
|
||||
|
||||
|
||||
|
|
|
@ -392,6 +392,8 @@ class HomeAssistant:
|
|||
# pylint: disable-next=import-outside-toplevel
|
||||
from . import loader
|
||||
|
||||
# This is a dictionary that any component can store any data on.
|
||||
self.data: dict[str, Any] = {}
|
||||
self.loop = asyncio.get_running_loop()
|
||||
self._tasks: set[asyncio.Future[Any]] = set()
|
||||
self._background_tasks: set[asyncio.Future[Any]] = set()
|
||||
|
@ -401,8 +403,6 @@ class HomeAssistant:
|
|||
self.config = Config(self, config_dir)
|
||||
self.components = loader.Components(self)
|
||||
self.helpers = loader.Helpers(self)
|
||||
# This is a dictionary that any component can store any data on.
|
||||
self.data: dict[str, Any] = {}
|
||||
self.state: CoreState = CoreState.not_running
|
||||
self.exit_code: int = 0
|
||||
# If not None, use to signal end-of-loop
|
||||
|
@ -2590,7 +2590,7 @@ class Config:
|
|||
"""Initialize a new config object."""
|
||||
self.hass = hass
|
||||
|
||||
self._store = self._ConfigStore(self.hass)
|
||||
self._store = self._ConfigStore(self.hass, config_dir)
|
||||
|
||||
self.latitude: float = 0
|
||||
self.longitude: float = 0
|
||||
|
@ -2857,7 +2857,7 @@ class Config:
|
|||
class _ConfigStore(Store[dict[str, Any]]):
|
||||
"""Class to help storing Config data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
|
||||
"""Initialize storage class."""
|
||||
super().__init__(
|
||||
hass,
|
||||
|
@ -2866,6 +2866,7 @@ class Config:
|
|||
private=True,
|
||||
atomic_writes=True,
|
||||
minor_version=CORE_STORAGE_MINOR_VERSION,
|
||||
config_dir=config_dir,
|
||||
)
|
||||
self._original_unit_system: str | None = None # from old store 1.1
|
||||
|
||||
|
|
|
@ -3,16 +3,21 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Mapping, Sequence
|
||||
from collections.abc import Callable, Iterable, Mapping, Sequence
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import inspect
|
||||
from json import JSONDecodeError, JSONEncoder
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Generic, TypeVar
|
||||
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_FINAL_WRITE,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
|
@ -43,7 +48,9 @@ STORAGE_DIR = ".storage"
|
|||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STORAGE_SEMAPHORE = "storage_semaphore"
|
||||
STORAGE_MANAGER = "storage_manager"
|
||||
|
||||
MANAGER_CLEANUP_DELAY = 60
|
||||
|
||||
_T = TypeVar("_T", bound=Mapping[str, Any] | Sequence[Any])
|
||||
|
||||
|
@ -88,6 +95,147 @@ async def async_migrator(
|
|||
return config
|
||||
|
||||
|
||||
def get_internal_store_manager(
|
||||
hass: HomeAssistant, config_dir: str | None = None
|
||||
) -> _StoreManager:
|
||||
"""Get the store manager.
|
||||
|
||||
This function is not part of the API and should only be
|
||||
used in the Home Assistant core internals. It is not
|
||||
guaranteed to be stable.
|
||||
"""
|
||||
if STORAGE_MANAGER not in hass.data:
|
||||
manager = _StoreManager(hass, config_dir or hass.config.config_dir)
|
||||
hass.data[STORAGE_MANAGER] = manager
|
||||
return hass.data[STORAGE_MANAGER]
|
||||
|
||||
|
||||
class _StoreManager:
|
||||
"""Class to help storing data.
|
||||
|
||||
The store manager is used to cache and manage storage files.
|
||||
"""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_dir: str) -> None:
|
||||
"""Initialize storage manager class."""
|
||||
self._hass = hass
|
||||
self._invalidated: set[str] = set()
|
||||
self._files: set[str] | None = None
|
||||
self._data_preload: dict[str, json_util.JsonValueType] = {}
|
||||
self._storage_path: Path = Path(config_dir).joinpath(STORAGE_DIR)
|
||||
self._cancel_cleanup: asyncio.TimerHandle | None = None
|
||||
|
||||
async def async_initialize(self) -> None:
|
||||
"""Initialize the storage manager."""
|
||||
hass = self._hass
|
||||
await hass.async_add_executor_job(self._initialize_files)
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
self._async_schedule_cleanup,
|
||||
run_immediately=True,
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_invalidate(self, key: str) -> None:
|
||||
"""Invalidate cache.
|
||||
|
||||
Store calls this when its going to save data
|
||||
to ensure that the cache is not used after that.
|
||||
"""
|
||||
if "/" not in key:
|
||||
self._invalidated.add(key)
|
||||
self._data_preload.pop(key, None)
|
||||
|
||||
@callback
|
||||
def async_fetch(
|
||||
self, key: str
|
||||
) -> tuple[bool, json_util.JsonValueType | None] | None:
|
||||
"""Fetch data from cache."""
|
||||
#
|
||||
# If the key is invalidated, we don't need to check the cache
|
||||
# If async_initialize has not been called yet, we don't know
|
||||
# if the file exists or not so its a cache miss
|
||||
#
|
||||
# It is very important that we check if self._files is None
|
||||
# because we do not want to incorrectly return a cache miss
|
||||
# because async_initialize has not been called yet as it would
|
||||
# cause the Store to return None when it should not.
|
||||
#
|
||||
# The "/" in key check is to prevent the cache from being used
|
||||
# for subdirs in case we have a key like "hacs/XXX"
|
||||
#
|
||||
if "/" in key or key in self._invalidated or self._files is None:
|
||||
_LOGGER.debug("%s: Cache miss", key)
|
||||
return None
|
||||
|
||||
# If async_initialize has been called and the key is not in self._files
|
||||
# then the file does not exist
|
||||
if key not in self._files:
|
||||
_LOGGER.debug("%s: Cache hit, does not exist", key)
|
||||
return (False, None)
|
||||
|
||||
# If the key is in the preload cache, return it
|
||||
if data := self._data_preload.pop(key, None):
|
||||
_LOGGER.debug("%s: Cache hit data", key)
|
||||
return (True, data)
|
||||
|
||||
_LOGGER.debug("%s: Cache miss, not preloaded", key)
|
||||
return None
|
||||
|
||||
@callback
|
||||
def _async_schedule_cleanup(self, _event: Event) -> None:
|
||||
"""Schedule the cleanup of old files."""
|
||||
self._cancel_cleanup = self._hass.loop.call_later(
|
||||
MANAGER_CLEANUP_DELAY, self._async_cleanup
|
||||
)
|
||||
# Handle the case where we stop in the first 60s
|
||||
self._hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
self._async_cancel_and_cleanup,
|
||||
run_immediately=True,
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_cancel_and_cleanup(self, _event: Event) -> None:
|
||||
"""Cancel the cleanup of old files."""
|
||||
self._async_cleanup()
|
||||
if self._cancel_cleanup:
|
||||
self._cancel_cleanup.cancel()
|
||||
self._cancel_cleanup = None
|
||||
|
||||
@callback
|
||||
def _async_cleanup(self) -> None:
|
||||
"""Cleanup unused cache.
|
||||
|
||||
If nothing consumes the cache 60s after startup or when we
|
||||
stop Home Assistant, we'll clear the cache.
|
||||
"""
|
||||
self._data_preload.clear()
|
||||
|
||||
async def async_preload(self, keys: Iterable[str]) -> None:
|
||||
"""Cache the keys."""
|
||||
# If async_initialize has not been called yet, we can't preload
|
||||
if self._files is not None and (existing := self._files.intersection(keys)):
|
||||
await self._hass.async_add_executor_job(self._preload, existing)
|
||||
|
||||
def _preload(self, keys: Iterable[str]) -> None:
|
||||
"""Cache the keys."""
|
||||
storage_path = self._storage_path
|
||||
data_preload = self._data_preload
|
||||
for key in keys:
|
||||
storage_file: Path = storage_path.joinpath(key)
|
||||
try:
|
||||
if storage_file.is_file():
|
||||
data_preload[key] = json_util.load_json(storage_file)
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
_LOGGER.debug("Error loading %s: %s", key, ex)
|
||||
|
||||
def _initialize_files(self) -> None:
|
||||
"""Initialize the cache."""
|
||||
if self._storage_path.exists():
|
||||
self._files = set(os.listdir(self._storage_path))
|
||||
|
||||
|
||||
@bind_hass
|
||||
class Store(Generic[_T]):
|
||||
"""Class to help storing data."""
|
||||
|
@ -103,6 +251,7 @@ class Store(Generic[_T]):
|
|||
encoder: type[JSONEncoder] | None = None,
|
||||
minor_version: int = 1,
|
||||
read_only: bool = False,
|
||||
config_dir: str | None = None,
|
||||
) -> None:
|
||||
"""Initialize storage class."""
|
||||
self.version = version
|
||||
|
@ -119,6 +268,7 @@ class Store(Generic[_T]):
|
|||
self._atomic_writes = atomic_writes
|
||||
self._read_only = read_only
|
||||
self._next_write_time = 0.0
|
||||
self._manager = get_internal_store_manager(hass, config_dir)
|
||||
|
||||
@cached_property
|
||||
def path(self):
|
||||
|
@ -170,6 +320,10 @@ class Store(Generic[_T]):
|
|||
# We make a copy because code might assume it's safe to mutate loaded data
|
||||
# and we don't want that to mess with what we're trying to store.
|
||||
data = deepcopy(data)
|
||||
elif cache := self._manager.async_fetch(self.key):
|
||||
exists, data = cache
|
||||
if not exists:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
data = await self.hass.async_add_executor_job(
|
||||
|
@ -366,6 +520,7 @@ class Store(Generic[_T]):
|
|||
async def _async_handle_write_data(self, *_args):
|
||||
"""Handle writing the config."""
|
||||
async with self._write_lock:
|
||||
self._manager.async_invalidate(self.key)
|
||||
self._async_cleanup_delay_listener()
|
||||
self._async_cleanup_final_write_listener()
|
||||
|
||||
|
@ -409,6 +564,7 @@ class Store(Generic[_T]):
|
|||
|
||||
async def async_remove(self) -> None:
|
||||
"""Remove all data."""
|
||||
self._manager.async_invalidate(self.key)
|
||||
self._async_cleanup_delay_listener()
|
||||
self._async_cleanup_final_write_listener()
|
||||
|
||||
|
|
|
@ -222,12 +222,10 @@ class StoreWithoutWriteLoad(storage.Store[_T]):
|
|||
async def async_test_home_assistant(
|
||||
event_loop: asyncio.AbstractEventLoop | None = None,
|
||||
load_registries: bool = True,
|
||||
storage_dir: str | None = None,
|
||||
config_dir: str | None = None,
|
||||
) -> AsyncGenerator[HomeAssistant, None]:
|
||||
"""Return a Home Assistant object pointing at test config dir."""
|
||||
hass = HomeAssistant(get_test_config_dir())
|
||||
if storage_dir:
|
||||
hass.config.config_dir = storage_dir
|
||||
hass = HomeAssistant(config_dir or get_test_config_dir())
|
||||
store = auth_store.AuthStore(hass)
|
||||
hass.auth = auth.AuthManager(hass, store, {}, {})
|
||||
ensure_auth_manager_loaded(hass.auth)
|
||||
|
|
|
@ -4,7 +4,6 @@ All containing methods are legacy helpers that should not be used by new
|
|||
components. Instead call the service directly.
|
||||
"""
|
||||
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_BRIGHTNESS_PCT,
|
||||
|
|
|
@ -13,11 +13,14 @@ import pytest
|
|||
|
||||
from homeassistant.const import (
|
||||
EVENT_HOMEASSISTANT_FINAL_WRITE,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
EVENT_HOMEASSISTANT_STARTED,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import issue_registry as ir, storage
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.color import RGBColor
|
||||
|
||||
|
@ -622,10 +625,9 @@ async def test_changing_delayed_written_data(
|
|||
|
||||
async def test_saving_load_round_trip(tmpdir: py.path.local) -> None:
|
||||
"""Test saving and loading round trip."""
|
||||
async with async_test_home_assistant() as hass:
|
||||
hass.config.config_dir = await hass.async_add_executor_job(
|
||||
tmpdir.mkdir, "temp_storage"
|
||||
)
|
||||
loop = asyncio.get_running_loop()
|
||||
config_dir = await loop.run_in_executor(None, tmpdir.mkdir, "temp_storage")
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
|
||||
class NamedTupleSubclass(NamedTuple):
|
||||
"""A NamedTuple subclass."""
|
||||
|
@ -669,7 +671,7 @@ async def test_loading_corrupt_core_file(
|
|||
loop = asyncio.get_running_loop()
|
||||
tmp_storage = await loop.run_in_executor(None, tmpdir.mkdir, "temp_storage")
|
||||
|
||||
async with async_test_home_assistant(storage_dir=tmp_storage) as hass:
|
||||
async with async_test_home_assistant(config_dir=tmp_storage) as hass:
|
||||
storage_key = "core.anything"
|
||||
store = storage.Store(
|
||||
hass, MOCK_VERSION_2, storage_key, minor_version=MOCK_MINOR_VERSION_1
|
||||
|
@ -728,7 +730,7 @@ async def test_loading_corrupt_file_known_domain(
|
|||
loop = asyncio.get_running_loop()
|
||||
tmp_storage = await loop.run_in_executor(None, tmpdir.mkdir, "temp_storage")
|
||||
|
||||
async with async_test_home_assistant(storage_dir=tmp_storage) as hass:
|
||||
async with async_test_home_assistant(config_dir=tmp_storage) as hass:
|
||||
hass.config.components.add("testdomain")
|
||||
storage_key = "testdomain.testkey"
|
||||
|
||||
|
@ -783,10 +785,9 @@ async def test_loading_corrupt_file_known_domain(
|
|||
|
||||
async def test_os_error_is_fatal(tmpdir: py.path.local) -> None:
|
||||
"""Test OSError during load is fatal."""
|
||||
async with async_test_home_assistant() as hass:
|
||||
tmp_storage = await hass.async_add_executor_job(tmpdir.mkdir, "temp_storage")
|
||||
hass.config.config_dir = tmp_storage
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
tmp_storage = await loop.run_in_executor(None, tmpdir.mkdir, "temp_storage")
|
||||
async with async_test_home_assistant(config_dir=tmp_storage) as hass:
|
||||
store = storage.Store(
|
||||
hass, MOCK_VERSION_2, MOCK_KEY, minor_version=MOCK_MINOR_VERSION_1
|
||||
)
|
||||
|
@ -814,10 +815,9 @@ async def test_os_error_is_fatal(tmpdir: py.path.local) -> None:
|
|||
|
||||
async def test_json_load_failure(tmpdir: py.path.local) -> None:
|
||||
"""Test json load raising HomeAssistantError."""
|
||||
async with async_test_home_assistant() as hass:
|
||||
tmp_storage = await hass.async_add_executor_job(tmpdir.mkdir, "temp_storage")
|
||||
hass.config.config_dir = tmp_storage
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
tmp_storage = await loop.run_in_executor(None, tmpdir.mkdir, "temp_storage")
|
||||
async with async_test_home_assistant(config_dir=tmp_storage) as hass:
|
||||
store = storage.Store(
|
||||
hass, MOCK_VERSION_2, MOCK_KEY, minor_version=MOCK_MINOR_VERSION_1
|
||||
)
|
||||
|
@ -861,3 +861,301 @@ async def test_read_only_store(
|
|||
hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE)
|
||||
await hass.async_block_till_done()
|
||||
assert read_only_store.key not in hass_storage
|
||||
|
||||
|
||||
async def test_store_manager_caching(
|
||||
tmpdir: py.path.local, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test store manager caching."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _setup_mock_storage():
|
||||
config_dir = tmpdir.mkdir("temp_config")
|
||||
tmp_storage = config_dir.mkdir(".storage")
|
||||
tmp_storage.join("integration1").write_binary(
|
||||
json_bytes({"data": {"integration1": "integration1"}, "version": 1})
|
||||
)
|
||||
tmp_storage.join("integration2").write_binary(
|
||||
json_bytes({"data": {"integration2": "integration2"}, "version": 1})
|
||||
)
|
||||
tmp_storage.join("broken").write_binary(b"invalid")
|
||||
return config_dir
|
||||
|
||||
config_dir = await loop.run_in_executor(None, _setup_mock_storage)
|
||||
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
assert (
|
||||
store_manager.async_fetch("integration1") is None
|
||||
) # has data but not cached
|
||||
assert (
|
||||
store_manager.async_fetch("integration2") is None
|
||||
) # has data but not cached
|
||||
assert (
|
||||
store_manager.async_fetch("integration3") is None
|
||||
) # no file not but cached
|
||||
|
||||
await store_manager.async_initialize()
|
||||
assert (
|
||||
store_manager.async_fetch("integration1") is None
|
||||
) # has data but not cached
|
||||
assert (
|
||||
store_manager.async_fetch("integration2") is None
|
||||
) # has data but not cached
|
||||
assert (
|
||||
store_manager.async_fetch("integration3") is not None
|
||||
) # no file and initialized
|
||||
|
||||
result = store_manager.async_fetch("integration3")
|
||||
assert result is not None
|
||||
exists, data = result
|
||||
assert exists is False
|
||||
assert data is None
|
||||
|
||||
await store_manager.async_preload(["integration3", "integration2", "broken"])
|
||||
assert "Error loading broken" in caplog.text
|
||||
|
||||
assert (
|
||||
store_manager.async_fetch("integration1") is None
|
||||
) # has data but not cached
|
||||
result = store_manager.async_fetch("integration2")
|
||||
assert result is not None
|
||||
exists, data = result
|
||||
assert exists is True
|
||||
assert data == {"data": {"integration2": "integration2"}, "version": 1}
|
||||
|
||||
assert (
|
||||
store_manager.async_fetch("integration3") is not None
|
||||
) # no file and initialized
|
||||
result = store_manager.async_fetch("integration3")
|
||||
assert result is not None
|
||||
exists, data = result
|
||||
assert exists is False
|
||||
assert data is None
|
||||
|
||||
integration1 = storage.Store(hass, 1, "integration1")
|
||||
await integration1.async_save({"integration1": "updated"})
|
||||
# Save should invalidate the cache
|
||||
assert store_manager.async_fetch("integration1") is None # invalidated
|
||||
|
||||
integration2 = storage.Store(hass, 1, "integration2")
|
||||
integration2.async_delay_save(lambda: {"integration2": "updated"})
|
||||
# Delay save should invalidate the cache after it saves
|
||||
assert "integration2" not in store_manager._invalidated
|
||||
|
||||
# Block twice to flush out the delayed save
|
||||
await hass.async_block_till_done()
|
||||
await hass.async_block_till_done()
|
||||
assert store_manager.async_fetch("integration2") is None # invalidated
|
||||
|
||||
store_manager.async_invalidate("integration3")
|
||||
assert store_manager.async_fetch("integration1") is None # invalidated by save
|
||||
assert (
|
||||
store_manager.async_fetch("integration2") is None
|
||||
) # invalidated by delay save
|
||||
assert store_manager.async_fetch("integration3") is None # invalidated
|
||||
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
assert store_manager.async_fetch("integration1") is None
|
||||
assert store_manager.async_fetch("integration2") is None
|
||||
assert store_manager.async_fetch("integration3") is None
|
||||
await store_manager.async_initialize()
|
||||
await store_manager.async_preload(["integration1", "integration2"])
|
||||
result = store_manager.async_fetch("integration1")
|
||||
assert result is not None
|
||||
exists, data = result
|
||||
assert exists is True
|
||||
assert data["data"] == {"integration1": "updated"}
|
||||
|
||||
integration1 = storage.Store(hass, 1, "integration1")
|
||||
assert await integration1.async_load() == {"integration1": "updated"}
|
||||
|
||||
# Load should pop the cache
|
||||
assert store_manager.async_fetch("integration1") is None
|
||||
|
||||
integration2 = storage.Store(hass, 1, "integration2")
|
||||
assert await integration2.async_load() == {"integration2": "updated"}
|
||||
|
||||
# Load should pop the cache
|
||||
assert store_manager.async_fetch("integration2") is None
|
||||
|
||||
integration3 = storage.Store(hass, 1, "integration3")
|
||||
assert await integration3.async_load() is None
|
||||
|
||||
await integration3.async_save({"integration3": "updated"})
|
||||
assert await integration3.async_load() == {"integration3": "updated"}
|
||||
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
# Now make sure everything still works when we do not
|
||||
# manually load the storage manager
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
integration1 = storage.Store(hass, 1, "integration1")
|
||||
assert await integration1.async_load() == {"integration1": "updated"}
|
||||
await integration1.async_save({"integration1": "updated2"})
|
||||
assert await integration1.async_load() == {"integration1": "updated2"}
|
||||
|
||||
integration2 = storage.Store(hass, 1, "integration2")
|
||||
assert await integration2.async_load() == {"integration2": "updated"}
|
||||
await integration2.async_save({"integration2": "updated2"})
|
||||
assert await integration2.async_load() == {"integration2": "updated2"}
|
||||
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
# Now remove the stores
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
await store_manager.async_initialize()
|
||||
await store_manager.async_preload(["integration1", "integration2"])
|
||||
|
||||
integration1 = storage.Store(hass, 1, "integration1")
|
||||
assert integration1._manager is store_manager
|
||||
assert await integration1.async_load() == {"integration1": "updated2"}
|
||||
|
||||
integration2 = storage.Store(hass, 1, "integration2")
|
||||
assert integration2._manager is store_manager
|
||||
assert await integration2.async_load() == {"integration2": "updated2"}
|
||||
|
||||
await integration1.async_remove()
|
||||
await integration2.async_remove()
|
||||
|
||||
assert store_manager.async_fetch("integration1") is None
|
||||
assert store_manager.async_fetch("integration2") is None
|
||||
|
||||
assert await integration1.async_load() is None
|
||||
assert await integration2.async_load() is None
|
||||
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
# Now make sure the stores are removed and another run works
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
await store_manager.async_initialize()
|
||||
await store_manager.async_preload(["integration1"])
|
||||
result = store_manager.async_fetch("integration1")
|
||||
assert result is not None
|
||||
exists, data = result
|
||||
assert exists is False
|
||||
assert data is None
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
|
||||
async def test_store_manager_sub_dirs(tmpdir: py.path.local) -> None:
|
||||
"""Test store manager ignores subdirs."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _setup_mock_storage():
|
||||
config_dir = tmpdir.mkdir("temp_config")
|
||||
sub_dir_storage = config_dir.mkdir(".storage").mkdir("subdir")
|
||||
|
||||
sub_dir_storage.join("integration1").write_binary(
|
||||
json_bytes({"data": {"integration1": "integration1"}, "version": 1})
|
||||
)
|
||||
return config_dir
|
||||
|
||||
config_dir = await loop.run_in_executor(None, _setup_mock_storage)
|
||||
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
await store_manager.async_initialize()
|
||||
assert store_manager.async_fetch("subdir/integration1") is None
|
||||
assert store_manager.async_fetch("subdir/integrationx") is None
|
||||
integration1 = storage.Store(hass, 1, "subdir/integration1")
|
||||
assert await integration1.async_load() == {"integration1": "integration1"}
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
|
||||
async def test_store_manager_cleanup_after_started(
|
||||
tmpdir: py.path.local, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test that the cache is cleaned up after startup."""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _setup_mock_storage():
|
||||
config_dir = tmpdir.mkdir("temp_config")
|
||||
tmp_storage = config_dir.mkdir(".storage")
|
||||
tmp_storage.join("integration1").write_binary(
|
||||
json_bytes({"data": {"integration1": "integration1"}, "version": 1})
|
||||
)
|
||||
tmp_storage.join("integration2").write_binary(
|
||||
json_bytes({"data": {"integration2": "integration2"}, "version": 1})
|
||||
)
|
||||
return config_dir
|
||||
|
||||
config_dir = await loop.run_in_executor(None, _setup_mock_storage)
|
||||
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
hass.set_state(CoreState.not_running)
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
await store_manager.async_initialize()
|
||||
await store_manager.async_preload(["integration1", "integration2"])
|
||||
assert "integration1" in store_manager._data_preload
|
||||
assert "integration2" in store_manager._data_preload
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
assert "integration1" in store_manager._data_preload
|
||||
assert "integration2" in store_manager._data_preload
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
await hass.async_block_till_done()
|
||||
assert "integration1" in store_manager._data_preload
|
||||
assert "integration2" in store_manager._data_preload
|
||||
freezer.tick(storage.MANAGER_CLEANUP_DELAY)
|
||||
async_fire_time_changed(hass)
|
||||
await hass.async_block_till_done()
|
||||
# The cache should be removed after the cleanup delay
|
||||
# since it means nothing ever loaded it and we want to
|
||||
# recover the memory
|
||||
assert "integration1" not in store_manager._data_preload
|
||||
assert "integration2" not in store_manager._data_preload
|
||||
assert store_manager.async_fetch("integration1") is None
|
||||
assert store_manager.async_fetch("integration2") is None
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
|
||||
async def test_store_manager_cleanup_after_stop(
|
||||
tmpdir: py.path.local, freezer: FrozenDateTimeFactory
|
||||
) -> None:
|
||||
"""Test that the cache is cleaned up after stop event.
|
||||
|
||||
This should only happen if we stop within the cleanup delay.
|
||||
"""
|
||||
loop = asyncio.get_running_loop()
|
||||
|
||||
def _setup_mock_storage():
|
||||
config_dir = tmpdir.mkdir("temp_config")
|
||||
tmp_storage = config_dir.mkdir(".storage")
|
||||
tmp_storage.join("integration1").write_binary(
|
||||
json_bytes({"data": {"integration1": "integration1"}, "version": 1})
|
||||
)
|
||||
tmp_storage.join("integration2").write_binary(
|
||||
json_bytes({"data": {"integration2": "integration2"}, "version": 1})
|
||||
)
|
||||
return config_dir
|
||||
|
||||
config_dir = await loop.run_in_executor(None, _setup_mock_storage)
|
||||
|
||||
async with async_test_home_assistant(config_dir=config_dir) as hass:
|
||||
hass.set_state(CoreState.not_running)
|
||||
store_manager = storage.get_internal_store_manager(hass)
|
||||
await store_manager.async_initialize()
|
||||
await store_manager.async_preload(["integration1", "integration2"])
|
||||
assert "integration1" in store_manager._data_preload
|
||||
assert "integration2" in store_manager._data_preload
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
assert "integration1" in store_manager._data_preload
|
||||
assert "integration2" in store_manager._data_preload
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||
await hass.async_block_till_done()
|
||||
assert "integration1" in store_manager._data_preload
|
||||
assert "integration2" in store_manager._data_preload
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
|
||||
await hass.async_block_till_done()
|
||||
assert "integration1" not in store_manager._data_preload
|
||||
assert "integration2" not in store_manager._data_preload
|
||||
assert store_manager.async_fetch("integration1") is None
|
||||
assert store_manager.async_fetch("integration2") is None
|
||||
await hass.async_stop(force=True)
|
||||
|
|
|
@ -1891,6 +1891,7 @@ async def test_serviceregistry_return_response_optional(
|
|||
async def test_config_defaults() -> None:
|
||||
"""Test config defaults."""
|
||||
hass = Mock()
|
||||
hass.data = {}
|
||||
config = ha.Config(hass, "/test/ha-config")
|
||||
assert config.hass is hass
|
||||
assert config.latitude == 0
|
||||
|
@ -1918,20 +1919,25 @@ async def test_config_defaults() -> None:
|
|||
|
||||
async def test_config_path_with_file() -> None:
|
||||
"""Test get_config_path method."""
|
||||
config = ha.Config(None, "/test/ha-config")
|
||||
hass = Mock()
|
||||
hass.data = {}
|
||||
config = ha.Config(hass, "/test/ha-config")
|
||||
assert config.path("test.conf") == "/test/ha-config/test.conf"
|
||||
|
||||
|
||||
async def test_config_path_with_dir_and_file() -> None:
|
||||
"""Test get_config_path method."""
|
||||
config = ha.Config(None, "/test/ha-config")
|
||||
hass = Mock()
|
||||
hass.data = {}
|
||||
config = ha.Config(hass, "/test/ha-config")
|
||||
assert config.path("dir", "test.conf") == "/test/ha-config/dir/test.conf"
|
||||
|
||||
|
||||
async def test_config_as_dict() -> None:
|
||||
"""Test as dict."""
|
||||
config = ha.Config(None, "/test/ha-config")
|
||||
config.hass = MagicMock()
|
||||
hass = Mock()
|
||||
hass.data = {}
|
||||
config = ha.Config(hass, "/test/ha-config")
|
||||
type(config.hass.state).value = PropertyMock(return_value="RUNNING")
|
||||
expected = {
|
||||
"latitude": 0,
|
||||
|
@ -1962,7 +1968,9 @@ async def test_config_as_dict() -> None:
|
|||
|
||||
async def test_config_is_allowed_path() -> None:
|
||||
"""Test is_allowed_path method."""
|
||||
config = ha.Config(None, "/test/ha-config")
|
||||
hass = Mock()
|
||||
hass.data = {}
|
||||
config = ha.Config(hass, "/test/ha-config")
|
||||
with TemporaryDirectory() as tmp_dir:
|
||||
# The created dir is in /tmp. This is a symlink on OS X
|
||||
# causing this test to fail unless we resolve path first.
|
||||
|
@ -1994,7 +2002,9 @@ async def test_config_is_allowed_path() -> None:
|
|||
|
||||
async def test_config_is_allowed_external_url() -> None:
|
||||
"""Test is_allowed_external_url method."""
|
||||
config = ha.Config(None, "/test/ha-config")
|
||||
hass = Mock()
|
||||
hass.data = {}
|
||||
config = ha.Config(hass, "/test/ha-config")
|
||||
config.allowlist_external_urls = [
|
||||
"http://x.com/",
|
||||
"https://y.com/bla/",
|
||||
|
|
Loading…
Reference in New Issue