From 0c6a4692189785a47f9f329ad34e2e49d6cf83dd Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Fri, 24 Feb 2023 11:41:44 -0600 Subject: [PATCH] Fix migration failing when existing data has duplicates (#88712) --- .../components/recorder/migration.py | 29 ++++++++++++++++--- homeassistant/components/recorder/util.py | 2 +- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index a3a609a1b6f..431bc78ba80 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -13,6 +13,7 @@ from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text from sqlalchemy.engine import CursorResult, Engine from sqlalchemy.exc import ( DatabaseError, + IntegrityError, InternalError, OperationalError, ProgrammingError, @@ -778,9 +779,10 @@ def _apply_update( # noqa: C901 # Add name column to StatisticsMeta _add_columns(session_maker, "statistics_meta", ["name VARCHAR(255)"]) elif new_version == 24: - _LOGGER.debug("Deleting duplicated statistics entries") - with session_scope(session=session_maker()) as session: - delete_statistics_duplicates(hass, session) + # This used to create the unique indices for start and statistic_id + # but we changed the format in schema 34 which will now take care + # of removing any duplicate if they still exist. + pass elif new_version == 25: _add_columns(session_maker, "states", [f"attributes_id {big_int}"]) _create_index(session_maker, "states", "ix_states_attributes_id") @@ -907,7 +909,26 @@ def _apply_update( # noqa: C901 "statistics_short_term", "ix_statistics_short_term_statistic_id_start_ts", ) - _migrate_statistics_columns_to_timestamp(session_maker, engine) + try: + _migrate_statistics_columns_to_timestamp(session_maker, engine) + except IntegrityError as ex: + _LOGGER.error( + "Statistics table contains duplicate entries: %s; " + "Cleaning up duplicates and trying again; " + "This will take a while; " + "Please be patient!", + ex, + ) + # There may be duplicated statistics entries, delete duplicates + # and try again + with session_scope(session=session_maker()) as session: + delete_statistics_duplicates(hass, session) + _migrate_statistics_columns_to_timestamp(session_maker, engine) + # Log at error level to ensure the user sees this message in the log + # since we logged the error above. + _LOGGER.error( + "Statistics migration successfully recovered after statistics table duplicate cleanup" + ) elif new_version == 35: # Migration is done in two steps to ensure we can start using # the new columns before we wipe the old ones. diff --git a/homeassistant/components/recorder/util.py b/homeassistant/components/recorder/util.py index 5cda3d283dd..3ff6b62b21e 100644 --- a/homeassistant/components/recorder/util.py +++ b/homeassistant/components/recorder/util.py @@ -125,7 +125,7 @@ def session_scope( need_rollback = True session.commit() except Exception as err: # pylint: disable=broad-except - _LOGGER.error("Error executing query: %s", err) + _LOGGER.error("Error executing query: %s", err, exc_info=True) if need_rollback: session.rollback() if not exception_filter or not exception_filter(err):