Use async_setup_recorder_instance fixture in recorder v32_migration tests (#121081)

Co-authored-by: J. Nick Koston <nick@koston.org>
pull/121267/head
Erik Montnemery 2024-07-05 00:49:51 +02:00 committed by GitHub
parent d799a4575b
commit e47cbf3cf7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 39 additions and 46 deletions

View File

@ -14,13 +14,12 @@ from homeassistant.components.recorder import core, statistics
from homeassistant.components.recorder.queries import select_event_type_ids
from homeassistant.components.recorder.util import session_scope
from homeassistant.core import EVENT_STATE_CHANGED, Event, EventOrigin, State
from homeassistant.helpers import recorder as recorder_helper
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from .common import async_wait_recording_done
from tests.common import async_test_home_assistant
from tests.typing import RecorderInstanceGenerator
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
@ -48,11 +47,14 @@ def _create_engine_test(*args, **kwargs):
return engine
@pytest.mark.parametrize("enable_migrate_context_ids", [True])
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
@pytest.mark.parametrize("enable_migrate_entity_ids", [True])
@pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_migrate_times(
async_test_recorder: RecorderInstanceGenerator,
caplog: pytest.LogCaptureFixture,
recorder_db_url: str,
) -> None:
"""Test we can migrate times."""
importlib.import_module(SCHEMA_MODULE)
@ -119,11 +121,10 @@ async def test_migrate_times(
"homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids"
),
):
async with async_test_home_assistant() as hass:
recorder_helper.async_initialize_recorder(hass)
assert await async_setup_component(
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
)
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
@ -133,15 +134,15 @@ async def test_migrate_times(
session.add(old_db_schema.Events.from_event(custom_event))
session.add(old_db_schema.States.from_event(state_changed_event))
await recorder.get_instance(hass).async_add_executor_job(_add_data)
await instance.async_add_executor_job(_add_data)
await hass.async_block_till_done()
await recorder.get_instance(hass).async_block_till_done()
await instance.async_block_till_done()
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
states_indexes = await instance.async_add_executor_job(
_get_states_index_names
)
states_index_names = {index["name"] for index in states_indexes}
assert recorder.get_instance(hass).use_legacy_events_index is True
assert instance.use_legacy_events_index is True
await hass.async_stop()
await hass.async_block_till_done()
@ -149,17 +150,16 @@ async def test_migrate_times(
assert "ix_states_event_id" in states_index_names
# Test that the duplicates are removed during migration from schema 23
async with async_test_home_assistant() as hass:
recorder_helper.async_initialize_recorder(hass)
assert await async_setup_component(
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
)
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
# We need to wait for all the migration tasks to complete
# before we can check the database.
for _ in range(number_of_migrations):
await recorder.get_instance(hass).async_block_till_done()
await instance.async_block_till_done()
await async_wait_recording_done(hass)
def _get_test_data_from_db():
@ -183,9 +183,9 @@ async def test_migrate_times(
session.expunge_all()
return events_result, states_result
events_result, states_result = await recorder.get_instance(
hass
).async_add_executor_job(_get_test_data_from_db)
events_result, states_result = await instance.async_add_executor_job(
_get_test_data_from_db
)
assert len(events_result) == 1
assert events_result[0].time_fired_ts == now_timestamp
@ -197,24 +197,20 @@ async def test_migrate_times(
with session_scope(hass=hass) as session:
return inspect(session.connection()).get_indexes("events")
events_indexes = await recorder.get_instance(hass).async_add_executor_job(
_get_events_index_names
)
events_indexes = await instance.async_add_executor_job(_get_events_index_names)
events_index_names = {index["name"] for index in events_indexes}
assert "ix_events_context_id_bin" in events_index_names
assert "ix_events_context_id" not in events_index_names
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
_get_states_index_names
)
states_indexes = await instance.async_add_executor_job(_get_states_index_names)
states_index_names = {index["name"] for index in states_indexes}
# sqlite does not support dropping foreign keys so we had to
# create a new table and copy the data over
assert "ix_states_event_id" not in states_index_names
assert recorder.get_instance(hass).use_legacy_events_index is False
assert instance.use_legacy_events_index is False
await hass.async_stop()
@ -222,6 +218,7 @@ async def test_migrate_times(
@pytest.mark.parametrize("persistent_database", [True])
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_migrate_can_resume_entity_id_post_migration(
async_test_recorder: RecorderInstanceGenerator,
caplog: pytest.LogCaptureFixture,
recorder_db_url: str,
) -> None:
@ -287,11 +284,10 @@ async def test_migrate_can_resume_entity_id_post_migration(
"homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids"
),
):
async with async_test_home_assistant() as hass:
recorder_helper.async_initialize_recorder(hass)
assert await async_setup_component(
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
)
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
@ -301,15 +297,15 @@ async def test_migrate_can_resume_entity_id_post_migration(
session.add(old_db_schema.Events.from_event(custom_event))
session.add(old_db_schema.States.from_event(state_changed_event))
await recorder.get_instance(hass).async_add_executor_job(_add_data)
await instance.async_add_executor_job(_add_data)
await hass.async_block_till_done()
await recorder.get_instance(hass).async_block_till_done()
await instance.async_block_till_done()
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
states_indexes = await instance.async_add_executor_job(
_get_states_index_names
)
states_index_names = {index["name"] for index in states_indexes}
assert recorder.get_instance(hass).use_legacy_events_index is True
assert instance.use_legacy_events_index is True
await hass.async_stop()
await hass.async_block_till_done()
@ -317,22 +313,19 @@ async def test_migrate_can_resume_entity_id_post_migration(
assert "ix_states_event_id" in states_index_names
assert "ix_states_entity_id_last_updated_ts" in states_index_names
async with async_test_home_assistant() as hass:
recorder_helper.async_initialize_recorder(hass)
assert await async_setup_component(
hass, "recorder", {"recorder": {"db_url": recorder_db_url}}
)
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await hass.async_block_till_done()
# We need to wait for all the migration tasks to complete
# before we can check the database.
for _ in range(number_of_migrations):
await recorder.get_instance(hass).async_block_till_done()
await instance.async_block_till_done()
await async_wait_recording_done(hass)
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
_get_states_index_names
)
states_indexes = await instance.async_add_executor_job(_get_states_index_names)
states_index_names = {index["name"] for index in states_indexes}
assert "ix_states_entity_id_last_updated_ts" not in states_index_names