Convert test helpers to get hass instance to contextmanagers (#109990)
* Convert get_test_home_assistant helper to contextmanager * Convert async_test_home_assistant helper to contextmanager * Move timezone reset to async_test_home_assistant helperpull/109876/head
parent
3342e6ddbd
commit
2ef2172b01
|
@ -3,8 +3,8 @@ from __future__ import annotations
|
|||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
from collections.abc import Generator, Mapping, Sequence
|
||||
from contextlib import contextmanager
|
||||
from collections.abc import AsyncGenerator, Generator, Mapping, Sequence
|
||||
from contextlib import asynccontextmanager, contextmanager
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from enum import Enum
|
||||
import functools as ft
|
||||
|
@ -153,15 +153,17 @@ def get_test_config_dir(*add_path):
|
|||
return os.path.join(os.path.dirname(__file__), "testing_config", *add_path)
|
||||
|
||||
|
||||
def get_test_home_assistant():
|
||||
@contextmanager
|
||||
def get_test_home_assistant() -> Generator[HomeAssistant, None, None]:
|
||||
"""Return a Home Assistant object pointing at test config directory."""
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
hass = loop.run_until_complete(async_test_home_assistant(loop))
|
||||
context_manager = async_test_home_assistant(loop)
|
||||
hass = loop.run_until_complete(context_manager.__aenter__())
|
||||
|
||||
loop_stop_event = threading.Event()
|
||||
|
||||
def run_loop():
|
||||
def run_loop() -> None:
|
||||
"""Run event loop."""
|
||||
|
||||
loop._thread_ident = threading.get_ident()
|
||||
|
@ -171,25 +173,30 @@ def get_test_home_assistant():
|
|||
orig_stop = hass.stop
|
||||
hass._stopped = Mock(set=loop.stop)
|
||||
|
||||
def start_hass(*mocks):
|
||||
def start_hass(*mocks: Any) -> None:
|
||||
"""Start hass."""
|
||||
asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result()
|
||||
|
||||
def stop_hass():
|
||||
def stop_hass() -> None:
|
||||
"""Stop hass."""
|
||||
orig_stop()
|
||||
loop_stop_event.wait()
|
||||
loop.close()
|
||||
|
||||
hass.start = start_hass
|
||||
hass.stop = stop_hass
|
||||
|
||||
threading.Thread(name="LoopThread", target=run_loop, daemon=False).start()
|
||||
|
||||
return hass
|
||||
yield hass
|
||||
loop.run_until_complete(context_manager.__aexit__(None, None, None))
|
||||
loop.close()
|
||||
|
||||
|
||||
async def async_test_home_assistant(event_loop, load_registries=True):
|
||||
@asynccontextmanager
|
||||
async def async_test_home_assistant(
|
||||
event_loop: asyncio.AbstractEventLoop | None = None,
|
||||
load_registries: bool = True,
|
||||
) -> AsyncGenerator[HomeAssistant, None]:
|
||||
"""Return a Home Assistant object pointing at test config dir."""
|
||||
hass = HomeAssistant(get_test_config_dir())
|
||||
store = auth_store.AuthStore(hass)
|
||||
|
@ -200,6 +207,7 @@ async def async_test_home_assistant(event_loop, load_registries=True):
|
|||
orig_async_add_job = hass.async_add_job
|
||||
orig_async_add_executor_job = hass.async_add_executor_job
|
||||
orig_async_create_task = hass.async_create_task
|
||||
orig_tz = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
def async_add_job(target, *args):
|
||||
"""Add job."""
|
||||
|
@ -300,7 +308,10 @@ async def async_test_home_assistant(event_loop, load_registries=True):
|
|||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, clear_instance)
|
||||
|
||||
return hass
|
||||
yield hass
|
||||
|
||||
# Restore timezone, it is set when creating the hass object
|
||||
dt_util.DEFAULT_TIME_ZONE = orig_tz
|
||||
|
||||
|
||||
def async_mock_service(
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
"""Tests for the Bluetooth integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
import time
|
||||
|
@ -1673,7 +1672,7 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart(
|
|||
unregister_binary_sensor_processor()
|
||||
unregister_sensor_processor()
|
||||
|
||||
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||
async with async_test_home_assistant() as hass:
|
||||
await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
|
||||
|
||||
current_entry.set(entry)
|
||||
|
@ -1740,9 +1739,9 @@ async def test_integration_multiple_entity_platforms_with_reload_and_restart(
|
|||
key="pressure", device_id=None
|
||||
)
|
||||
|
||||
binary_sensor_entity_one: PassiveBluetoothProcessorEntity = binary_sensor_entities[
|
||||
0
|
||||
]
|
||||
binary_sensor_entity_one: PassiveBluetoothProcessorEntity = (
|
||||
binary_sensor_entities[0]
|
||||
)
|
||||
binary_sensor_entity_one.hass = hass
|
||||
assert binary_sensor_entity_one.available is False # service data not injected
|
||||
assert binary_sensor_entity_one.unique_id == "aa:bb:cc:dd:ee:ff-motion"
|
||||
|
|
|
@ -75,32 +75,26 @@ class MockFFmpegDev(ffmpeg.FFmpegBase):
|
|||
self.called_entities = entity_ids
|
||||
|
||||
|
||||
class TestFFmpegSetup:
|
||||
"""Test class for ffmpeg."""
|
||||
|
||||
def setup_method(self):
|
||||
"""Set up things to be run when tests are started."""
|
||||
self.hass = get_test_home_assistant()
|
||||
|
||||
def teardown_method(self):
|
||||
"""Stop everything that was started."""
|
||||
self.hass.stop()
|
||||
|
||||
def test_setup_component(self):
|
||||
def test_setup_component():
|
||||
"""Set up ffmpeg component."""
|
||||
with get_test_home_assistant() as hass:
|
||||
with assert_setup_component(1):
|
||||
setup_component(self.hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
|
||||
setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
|
||||
|
||||
assert self.hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg"
|
||||
assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg"
|
||||
hass.stop()
|
||||
|
||||
def test_setup_component_test_service(self):
|
||||
|
||||
def test_setup_component_test_service():
|
||||
"""Set up ffmpeg component test services."""
|
||||
with get_test_home_assistant() as hass:
|
||||
with assert_setup_component(1):
|
||||
setup_component(self.hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
|
||||
setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}})
|
||||
|
||||
assert self.hass.services.has_service(ffmpeg.DOMAIN, "start")
|
||||
assert self.hass.services.has_service(ffmpeg.DOMAIN, "stop")
|
||||
assert self.hass.services.has_service(ffmpeg.DOMAIN, "restart")
|
||||
assert hass.services.has_service(ffmpeg.DOMAIN, "start")
|
||||
assert hass.services.has_service(ffmpeg.DOMAIN, "stop")
|
||||
assert hass.services.has_service(ffmpeg.DOMAIN, "restart")
|
||||
hass.stop()
|
||||
|
||||
|
||||
async def test_setup_component_test_register(hass: HomeAssistant) -> None:
|
||||
|
|
|
@ -119,14 +119,19 @@ class TestComponentsCore(unittest.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
"""Set up things to be run when tests are started."""
|
||||
self.hass = get_test_home_assistant()
|
||||
self._manager = get_test_home_assistant()
|
||||
self.hass = self._manager.__enter__()
|
||||
assert asyncio.run_coroutine_threadsafe(
|
||||
async_setup_component(self.hass, "homeassistant", {}), self.hass.loop
|
||||
).result()
|
||||
|
||||
self.hass.states.set("light.Bowl", STATE_ON)
|
||||
self.hass.states.set("light.Ceiling", STATE_OFF)
|
||||
self.addCleanup(self.hass.stop)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
"""Tear down hass object."""
|
||||
self.hass.stop()
|
||||
self._manager.__exit__(None, None, None)
|
||||
|
||||
def test_is_on(self):
|
||||
"""Test is_on method."""
|
||||
|
|
|
@ -99,7 +99,8 @@ class TestPicnicSensor(unittest.IsolatedAsyncioTestCase):
|
|||
|
||||
async def asyncSetUp(self):
|
||||
"""Set up things to be run when tests are started."""
|
||||
self.hass = await async_test_home_assistant(None)
|
||||
self._manager = async_test_home_assistant()
|
||||
self.hass = await self._manager.__aenter__()
|
||||
self.entity_registry = er.async_get(self.hass)
|
||||
|
||||
# Patch the api client
|
||||
|
@ -122,6 +123,7 @@ class TestPicnicSensor(unittest.IsolatedAsyncioTestCase):
|
|||
async def asyncTearDown(self):
|
||||
"""Tear down the test setup, stop hass/patchers."""
|
||||
await self.hass.async_stop(force=True)
|
||||
await self._manager.__aexit__(None, None, None)
|
||||
self.picnic_patcher.stop()
|
||||
|
||||
@property
|
||||
|
|
|
@ -27,8 +27,6 @@ from ...common import wait_recording_done
|
|||
|
||||
from tests.common import get_test_home_assistant
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
|
||||
def test_delete_duplicates_no_duplicates(
|
||||
hass_recorder: Callable[..., HomeAssistant], caplog: pytest.LogCaptureFixture
|
||||
|
@ -169,8 +167,7 @@ def test_delete_metadata_duplicates(
|
|||
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||
), patch(
|
||||
"homeassistant.components.recorder.core.create_engine", new=_create_engine_28
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
), get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
|
@ -198,10 +195,9 @@ def test_delete_metadata_duplicates(
|
|||
assert tmp[2].statistic_id == "test:fossil_percentage"
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 28
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
hass.start()
|
||||
|
@ -218,7 +214,6 @@ def test_delete_metadata_duplicates(
|
|||
assert tmp[1].statistic_id == "test:fossil_percentage"
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
|
||||
def test_delete_metadata_duplicates_many(
|
||||
|
@ -264,8 +259,7 @@ def test_delete_metadata_duplicates_many(
|
|||
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||
), patch(
|
||||
"homeassistant.components.recorder.core.create_engine", new=_create_engine_28
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
), get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
|
@ -295,10 +289,9 @@ def test_delete_metadata_duplicates_many(
|
|||
)
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 28
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
hass.start()
|
||||
|
@ -317,7 +310,6 @@ def test_delete_metadata_duplicates_many(
|
|||
assert tmp[2].statistic_id == "test:fossil_percentage"
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
|
||||
def test_delete_metadata_duplicates_no_duplicates(
|
||||
|
|
|
@ -1301,7 +1301,7 @@ def test_compile_missing_statistics(
|
|||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
hass.start()
|
||||
|
@ -1332,12 +1332,13 @@ def test_compile_missing_statistics(
|
|||
stats_hourly.append(event)
|
||||
|
||||
freezer.tick(timedelta(hours=1))
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.bus.listen(
|
||||
EVENT_RECORDER_5MIN_STATISTICS_GENERATED, async_5min_stats_updated_listener
|
||||
)
|
||||
hass.bus.listen(
|
||||
EVENT_RECORDER_HOURLY_STATISTICS_GENERATED, async_hourly_stats_updated_listener
|
||||
EVENT_RECORDER_HOURLY_STATISTICS_GENERATED,
|
||||
async_hourly_stats_updated_listener,
|
||||
)
|
||||
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
|
@ -1562,7 +1563,7 @@ def test_service_disable_run_information_recorded(tmp_path: Path) -> None:
|
|||
test_db_file = test_dir.joinpath("test_run_info.db")
|
||||
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
hass.start()
|
||||
|
@ -1584,7 +1585,7 @@ def test_service_disable_run_information_recorded(tmp_path: Path) -> None:
|
|||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, DOMAIN, {DOMAIN: {CONF_DB_URL: dburl}})
|
||||
hass.start()
|
||||
|
|
|
@ -36,8 +36,6 @@ from .common import async_wait_recording_done, create_engine_test
|
|||
|
||||
from tests.common import async_fire_time_changed
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
|
||||
def _get_native_states(hass, entity_id):
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
|
|
|
@ -44,7 +44,6 @@ from tests.typing import RecorderInstanceGenerator
|
|||
|
||||
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
|
||||
SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
|
||||
async def _async_wait_migration_done(hass: HomeAssistant) -> None:
|
||||
|
|
|
@ -49,8 +49,6 @@ from .common import (
|
|||
from tests.common import mock_registry
|
||||
from tests.typing import WebSocketGenerator
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
|
||||
def test_converters_align_with_sensor() -> None:
|
||||
"""Ensure STATISTIC_UNIT_TO_UNIT_CONVERTER is aligned with UNIT_CONVERTERS."""
|
||||
|
|
|
@ -28,8 +28,6 @@ from .common import (
|
|||
|
||||
from tests.common import get_test_home_assistant
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
SCHEMA_VERSION_POSTFIX = "23_with_newer_columns"
|
||||
SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX)
|
||||
|
||||
|
@ -169,8 +167,7 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) ->
|
|||
create_engine_test_for_schema_version_postfix,
|
||||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
), get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
|
@ -195,17 +192,15 @@ def test_delete_duplicates(caplog: pytest.LogCaptureFixture, tmp_path: Path) ->
|
|||
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
assert "Deleted 2 duplicated statistics rows" in caplog.text
|
||||
assert "Found non identical" not in caplog.text
|
||||
|
@ -349,8 +344,7 @@ def test_delete_duplicates_many(
|
|||
create_engine_test_for_schema_version_postfix,
|
||||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
), get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
|
@ -381,17 +375,15 @@ def test_delete_duplicates_many(
|
|||
session.add(recorder.db_schema.Statistics.from_stats(3, stat))
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
hass.start()
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
assert "Deleted 3002 duplicated statistics rows" in caplog.text
|
||||
assert "Found non identical" not in caplog.text
|
||||
|
@ -506,8 +498,7 @@ def test_delete_duplicates_non_identical(
|
|||
create_engine_test_for_schema_version_postfix,
|
||||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
), get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
|
@ -527,10 +518,9 @@ def test_delete_duplicates_non_identical(
|
|||
session.add(recorder.db_schema.Statistics.from_stats(2, stat))
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.config.config_dir = tmp_path
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
|
@ -538,7 +528,6 @@ def test_delete_duplicates_non_identical(
|
|||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
assert "Deleted 2 duplicated statistics rows" in caplog.text
|
||||
assert "Deleted 1 non identical" in caplog.text
|
||||
|
@ -618,8 +607,7 @@ def test_delete_duplicates_short_term(
|
|||
create_engine_test_for_schema_version_postfix,
|
||||
schema_version_postfix=SCHEMA_VERSION_POSTFIX,
|
||||
),
|
||||
):
|
||||
hass = get_test_home_assistant()
|
||||
), get_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
wait_recording_done(hass)
|
||||
|
@ -638,10 +626,9 @@ def test_delete_duplicates_short_term(
|
|||
)
|
||||
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
hass = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.config.config_dir = tmp_path
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "recorder", {"recorder": {"db_url": dburl}})
|
||||
|
@ -649,7 +636,6 @@ def test_delete_duplicates_short_term(
|
|||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
hass.stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
assert "duplicated statistics rows" not in caplog.text
|
||||
assert "Found non identical" not in caplog.text
|
||||
|
|
|
@ -106,8 +106,7 @@ async def test_last_run_was_recently_clean(
|
|||
recorder.CONF_DB_URL: "sqlite:///" + str(tmp_path / "pytest.db"),
|
||||
recorder.CONF_COMMIT_INTERVAL: 1,
|
||||
}
|
||||
hass = await async_test_home_assistant(None)
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
return_values = []
|
||||
real_last_run_was_recently_clean = util.last_run_was_recently_clean
|
||||
|
||||
|
@ -129,11 +128,11 @@ async def test_last_run_was_recently_clean(
|
|||
await hass.async_block_till_done()
|
||||
await hass.async_stop()
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
with patch(
|
||||
"homeassistant.components.recorder.util.last_run_was_recently_clean",
|
||||
wraps=_last_run_was_recently_clean,
|
||||
) as last_run_was_recently_clean_mock:
|
||||
hass = await async_test_home_assistant(None)
|
||||
await async_setup_recorder_instance(hass, config)
|
||||
last_run_was_recently_clean_mock.assert_called_once()
|
||||
assert return_values[-1] is True
|
||||
|
@ -145,6 +144,7 @@ async def test_last_run_was_recently_clean(
|
|||
|
||||
thirty_min_future_time = dt_util.utcnow() + timedelta(minutes=30)
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
with patch(
|
||||
"homeassistant.components.recorder.util.last_run_was_recently_clean",
|
||||
wraps=_last_run_was_recently_clean,
|
||||
|
@ -152,7 +152,6 @@ async def test_last_run_was_recently_clean(
|
|||
"homeassistant.components.recorder.core.dt_util.utcnow",
|
||||
return_value=thirty_min_future_time,
|
||||
):
|
||||
hass = await async_test_home_assistant(None)
|
||||
await async_setup_recorder_instance(hass, config)
|
||||
last_run_was_recently_clean_mock.assert_called_once()
|
||||
assert return_values[-1] is False
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""The tests for recorder platform migrating data from v30."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
|
@ -23,8 +22,6 @@ from .common import async_wait_recording_done
|
|||
|
||||
from tests.common import async_test_home_assistant
|
||||
|
||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
|
||||
SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
|
||||
|
||||
|
@ -115,7 +112,7 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
|||
), patch(
|
||||
"homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids"
|
||||
):
|
||||
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
|
@ -142,12 +139,10 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
|||
await hass.async_stop()
|
||||
await hass.async_block_till_done()
|
||||
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
assert "ix_states_event_id" in states_index_names
|
||||
|
||||
# Test that the duplicates are removed during migration from schema 23
|
||||
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
|
@ -216,7 +211,6 @@ async def test_migrate_times(caplog: pytest.LogCaptureFixture, tmp_path: Path) -
|
|||
assert recorder.get_instance(hass).use_legacy_events_index is False
|
||||
|
||||
await hass.async_stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
||||
|
||||
async def test_migrate_can_resume_entity_id_post_migration(
|
||||
|
@ -282,7 +276,7 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
|||
), patch(
|
||||
"homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids"
|
||||
):
|
||||
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
|
@ -313,7 +307,7 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
|||
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
||||
|
||||
with patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"):
|
||||
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
|
@ -335,7 +329,7 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
|||
|
||||
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
||||
|
||||
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||
async with async_test_home_assistant() as hass:
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
assert await async_setup_component(
|
||||
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||
|
@ -355,4 +349,3 @@ async def test_migrate_can_resume_entity_id_post_migration(
|
|||
assert "ix_states_entity_id_last_updated_ts" not in states_index_names
|
||||
|
||||
await hass.async_stop()
|
||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||
|
|
|
@ -12,7 +12,7 @@ from homeassistant.components.recorder.statistics import (
|
|||
statistics_during_period,
|
||||
)
|
||||
from homeassistant.components.recorder.util import session_scope
|
||||
from homeassistant.core import CoreState, HomeAssistant
|
||||
from homeassistant.core import CoreState
|
||||
from homeassistant.helpers import recorder as recorder_helper
|
||||
from homeassistant.setup import setup_component
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
@ -51,7 +51,7 @@ def test_compile_missing_statistics(
|
|||
three_days_ago = datetime(2021, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC)
|
||||
start_time = three_days_ago + timedelta(days=3)
|
||||
freezer.move_to(three_days_ago)
|
||||
hass: HomeAssistant = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.set_state(CoreState.not_running)
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "sensor", {})
|
||||
|
@ -84,12 +84,14 @@ def test_compile_missing_statistics(
|
|||
|
||||
wait_recording_done(hass)
|
||||
|
||||
states = get_significant_states(hass, three_days_ago, past_time, ["sensor.test1"])
|
||||
states = get_significant_states(
|
||||
hass, three_days_ago, past_time, ["sensor.test1"]
|
||||
)
|
||||
assert len(states["sensor.test1"]) == 577
|
||||
|
||||
hass.stop()
|
||||
freezer.move_to(start_time)
|
||||
hass: HomeAssistant = get_test_home_assistant()
|
||||
with get_test_home_assistant() as hass:
|
||||
hass.set_state(CoreState.not_running)
|
||||
recorder_helper.async_initialize_recorder(hass)
|
||||
setup_component(hass, "sensor", {})
|
||||
|
|
|
@ -60,7 +60,7 @@ from homeassistant.helpers import (
|
|||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.setup import BASE_PLATFORMS, async_setup_component
|
||||
from homeassistant.util import dt as dt_util, location
|
||||
from homeassistant.util import location
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS
|
||||
|
@ -526,8 +526,6 @@ async def hass(
|
|||
loop = asyncio.get_running_loop()
|
||||
hass_fixture_setup.append(True)
|
||||
|
||||
orig_tz = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
def exc_handle(loop, context):
|
||||
"""Handle exceptions by rethrowing them, which will fail the test."""
|
||||
# Most of these contexts will contain an exception, but not all.
|
||||
|
@ -545,8 +543,7 @@ async def hass(
|
|||
orig_exception_handler(loop, context)
|
||||
|
||||
exceptions: list[Exception] = []
|
||||
hass = await async_test_home_assistant(loop, load_registries)
|
||||
|
||||
async with async_test_home_assistant(loop, load_registries) as hass:
|
||||
orig_exception_handler = loop.get_exception_handler()
|
||||
loop.set_exception_handler(exc_handle)
|
||||
|
||||
|
@ -563,9 +560,6 @@ async def hass(
|
|||
|
||||
await hass.async_stop(force=True)
|
||||
|
||||
# Restore timezone, it is set when creating the hass object
|
||||
dt_util.DEFAULT_TIME_ZONE = orig_tz
|
||||
|
||||
for ex in exceptions:
|
||||
if (
|
||||
request.module.__name__,
|
||||
|
@ -1305,11 +1299,13 @@ def hass_recorder(
|
|||
# pylint: disable-next=import-outside-toplevel
|
||||
from homeassistant.components.recorder import migration
|
||||
|
||||
original_tz = dt_util.DEFAULT_TIME_ZONE
|
||||
|
||||
hass = get_test_home_assistant()
|
||||
nightly = recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None
|
||||
stats = recorder.Recorder.async_periodic_statistics if enable_statistics else None
|
||||
with get_test_home_assistant() as hass:
|
||||
nightly = (
|
||||
recorder.Recorder.async_nightly_tasks if enable_nightly_purge else None
|
||||
)
|
||||
stats = (
|
||||
recorder.Recorder.async_periodic_statistics if enable_statistics else None
|
||||
)
|
||||
compile_missing = (
|
||||
recorder.Recorder._schedule_compile_missing_statistics
|
||||
if enable_statistics
|
||||
|
@ -1383,9 +1379,6 @@ def hass_recorder(
|
|||
yield setup_recorder
|
||||
hass.stop()
|
||||
|
||||
# Restore timezone, it is set when creating the hass object
|
||||
dt_util.DEFAULT_TIME_ZONE = original_tz
|
||||
|
||||
|
||||
async def _async_init_recorder_component(
|
||||
hass: HomeAssistant,
|
||||
|
|
|
@ -516,9 +516,7 @@ async def test_changing_delayed_written_data(
|
|||
|
||||
async def test_saving_load_round_trip(tmpdir: py.path.local) -> None:
|
||||
"""Test saving and loading round trip."""
|
||||
loop = asyncio.get_running_loop()
|
||||
hass = await async_test_home_assistant(loop)
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
hass.config.config_dir = await hass.async_add_executor_job(
|
||||
tmpdir.mkdir, "temp_storage"
|
||||
)
|
||||
|
@ -562,9 +560,7 @@ async def test_loading_corrupt_core_file(
|
|||
tmpdir: py.path.local, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test we handle unrecoverable corruption in a core file."""
|
||||
loop = asyncio.get_running_loop()
|
||||
hass = await async_test_home_assistant(loop)
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
tmp_storage = await hass.async_add_executor_job(tmpdir.mkdir, "temp_storage")
|
||||
hass.config.config_dir = tmp_storage
|
||||
|
||||
|
@ -622,8 +618,7 @@ async def test_loading_corrupt_file_known_domain(
|
|||
tmpdir: py.path.local, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test we handle unrecoverable corruption for a known domain."""
|
||||
loop = asyncio.get_running_loop()
|
||||
hass = await async_test_home_assistant(loop)
|
||||
async with async_test_home_assistant() as hass:
|
||||
hass.config.components.add("testdomain")
|
||||
storage_key = "testdomain.testkey"
|
||||
|
||||
|
@ -681,9 +676,7 @@ async def test_loading_corrupt_file_known_domain(
|
|||
|
||||
async def test_os_error_is_fatal(tmpdir: py.path.local) -> None:
|
||||
"""Test OSError during load is fatal."""
|
||||
loop = asyncio.get_running_loop()
|
||||
hass = await async_test_home_assistant(loop)
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
tmp_storage = await hass.async_add_executor_job(tmpdir.mkdir, "temp_storage")
|
||||
hass.config.config_dir = tmp_storage
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
"""Tests for the storage helper with minimal mocking."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import os
|
||||
from unittest.mock import patch
|
||||
|
@ -15,9 +14,7 @@ from tests.common import async_fire_time_changed, async_test_home_assistant
|
|||
async def test_removing_while_delay_in_progress(tmpdir: py.path.local) -> None:
|
||||
"""Test removing while delay in progress."""
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
hass = await async_test_home_assistant(loop)
|
||||
|
||||
async with async_test_home_assistant() as hass:
|
||||
test_dir = await hass.async_add_executor_job(tmpdir.mkdir, "storage")
|
||||
|
||||
with patch.object(storage, "STORAGE_DIR", test_dir):
|
||||
|
@ -30,9 +27,13 @@ async def test_removing_while_delay_in_progress(tmpdir: py.path.local) -> None:
|
|||
real_store.async_delay_save(lambda: {"delay": "yes"}, 1)
|
||||
|
||||
await real_store.async_remove()
|
||||
assert not await hass.async_add_executor_job(os.path.exists, real_store.path)
|
||||
assert not await hass.async_add_executor_job(
|
||||
os.path.exists, real_store.path
|
||||
)
|
||||
|
||||
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=1))
|
||||
await hass.async_block_till_done()
|
||||
assert not await hass.async_add_executor_job(os.path.exists, real_store.path)
|
||||
assert not await hass.async_add_executor_job(
|
||||
os.path.exists, real_store.path
|
||||
)
|
||||
await hass.async_stop()
|
||||
|
|
Loading…
Reference in New Issue