New indexes for states and recording_runs tables (#6688)

* New indexes for states table

* Added recorder_runs indexes

* Created a new function for compound indexes.

A new function was created because it makes it a little cleaner when creating
a single-field index since one doesn't have to create a list. This is mostly
when creating the name of the index so with a bit more logic it's possible
to combine it into one function. Given how often migration changes are run,
I thought that code bloat was probably a worthy trade-off for now.

* Adjusted indexes, POC for ref indexes by name.

* Corrected lint errors

* Fixed pydocstyle error

* Moved create_index function outside apply_update

* Moved to single line (just barely)
pull/6771/head
Tim Soderstrom 2017-03-23 22:48:31 -05:00 committed by Paulus Schoutsen
parent 6c5989895a
commit 5dfdb9e481
3 changed files with 33 additions and 18 deletions

View File

@ -166,6 +166,7 @@ class Recorder(threading.Thread):
migration.migrate_schema(self)
self._setup_run()
connected = True
_LOGGER.debug("Connected to recorder database")
except Exception as err: # pylint: disable=broad-except
_LOGGER.error("Error during connection setup: %s (retrying "
"in %s seconds)", err, CONNECT_RETRY_WAIT)

View File

@ -36,25 +36,34 @@ def migrate_schema(instance):
_LOGGER.info("Upgrade to version %s done", new_version)
def _apply_update(engine, new_version):
"""Perform operations to bring schema up to date."""
def _create_index(engine, table_name, index_name):
"""Create an index for the specified table.
The index name should match the name given for the index
within the table definition described in the models
"""
from sqlalchemy import Table
from . import models
if new_version == 1:
def create_index(table_name, column_name):
"""Create an index for the specified table and column."""
table = Table(table_name, models.Base.metadata)
name = "_".join(("ix", table_name, column_name))
# Look up the index object that was created from the models
index = next(idx for idx in table.indexes if idx.name == name)
_LOGGER.debug("Creating index for table %s column %s",
table_name, column_name)
_LOGGER.debug("Looking up index for table %s", table_name)
# Look up the index object by name from the table is the the models
index = next(idx for idx in table.indexes if idx.name == index_name)
_LOGGER.debug("Creating %s index", index_name)
index.create(engine)
_LOGGER.debug("Index creation done for table %s column %s",
table_name, column_name)
_LOGGER.debug("Finished creating %s", index_name)
create_index("events", "time_fired")
def _apply_update(engine, new_version):
"""Perform operations to bring schema up to date."""
if new_version == 1:
_create_index(engine, "events", "ix_events_time_fired")
elif new_version == 2:
# Create compound start/end index for recorder_runs
_create_index(engine, "recorder_runs", "ix_recorder_runs_start_end")
# Create indexes for states
_create_index(engine, "states", "ix_states_last_updated")
_create_index(engine, "states", "ix_states_entity_id_created")
else:
raise ValueError("No schema migration defined for version {}"
.format(new_version))

View File

@ -16,7 +16,7 @@ from homeassistant.remote import JSONEncoder
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 1
SCHEMA_VERSION = 2
_LOGGER = logging.getLogger(__name__)
@ -66,13 +66,16 @@ class States(Base): # type: ignore
attributes = Column(Text)
event_id = Column(Integer, ForeignKey('events.event_id'))
last_changed = Column(DateTime(timezone=True), default=datetime.utcnow)
last_updated = Column(DateTime(timezone=True), default=datetime.utcnow)
last_updated = Column(DateTime(timezone=True), default=datetime.utcnow,
index=True)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
__table_args__ = (Index('states__state_changes',
'last_changed', 'last_updated', 'entity_id'),
Index('states__significant_changes',
'domain', 'last_updated', 'entity_id'), )
'domain', 'last_updated', 'entity_id'),
Index('ix_states_entity_id_created',
'entity_id', 'created'),)
@staticmethod
def from_event(event):
@ -124,6 +127,8 @@ class RecorderRuns(Base): # type: ignore
closed_incorrect = Column(Boolean, default=False)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
__table_args__ = (Index('ix_recorder_runs_start_end', 'start', 'end'),)
def entity_ids(self, point_in_time=None):
"""Return the entity ids that existed in this run.