hass-core/homeassistant/components/recorder/models.py
Tim Soderstrom 5dfdb9e481 New indexes for states and recording_runs tables (#6688)
* New indexes for states table

* Added recorder_runs indexes

* Created a new function for compound indexes.

A new function was created because it makes it a little cleaner when creating
a single-field index since one doesn't have to create a list. This is mostly
when creating the name of the index so with a bit more logic it's possible
to combine it into one function. Given how often migration changes are run,
I thought that code bloat was probably a worthy trade-off for now.

* Adjusted indexes, POC for ref indexes by name.

* Corrected lint errors

* Fixed pydocstyle error

* Moved create_index function outside apply_update

* Moved to single line (just barely)
2017-03-23 20:48:31 -07:00

175 lines
5.8 KiB
Python

"""Models for SQLAlchemy."""
import json
from datetime import datetime
import logging
from sqlalchemy import (Boolean, Column, DateTime, ForeignKey, Index, Integer,
String, Text, distinct)
from sqlalchemy.ext.declarative import declarative_base
import homeassistant.util.dt as dt_util
from homeassistant.core import Event, EventOrigin, State, split_entity_id
from homeassistant.remote import JSONEncoder
# SQLAlchemy Schema
# pylint: disable=invalid-name
Base = declarative_base()
SCHEMA_VERSION = 2
_LOGGER = logging.getLogger(__name__)
class Events(Base): # type: ignore
"""Event history data."""
__tablename__ = 'events'
event_id = Column(Integer, primary_key=True)
event_type = Column(String(32), index=True)
event_data = Column(Text)
origin = Column(String(32))
time_fired = Column(DateTime(timezone=True), index=True)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
@staticmethod
def from_event(event):
"""Create an event database object from a native event."""
return Events(event_type=event.event_type,
event_data=json.dumps(event.data, cls=JSONEncoder),
origin=str(event.origin),
time_fired=event.time_fired)
def to_native(self):
"""Convert to a natve HA Event."""
try:
return Event(
self.event_type,
json.loads(self.event_data),
EventOrigin(self.origin),
_process_timestamp(self.time_fired)
)
except ValueError:
# When json.loads fails
_LOGGER.exception("Error converting to event: %s", self)
return None
class States(Base): # type: ignore
"""State change history."""
__tablename__ = 'states'
state_id = Column(Integer, primary_key=True)
domain = Column(String(64))
entity_id = Column(String(255))
state = Column(String(255))
attributes = Column(Text)
event_id = Column(Integer, ForeignKey('events.event_id'))
last_changed = Column(DateTime(timezone=True), default=datetime.utcnow)
last_updated = Column(DateTime(timezone=True), default=datetime.utcnow,
index=True)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
__table_args__ = (Index('states__state_changes',
'last_changed', 'last_updated', 'entity_id'),
Index('states__significant_changes',
'domain', 'last_updated', 'entity_id'),
Index('ix_states_entity_id_created',
'entity_id', 'created'),)
@staticmethod
def from_event(event):
"""Create object from a state_changed event."""
entity_id = event.data['entity_id']
state = event.data.get('new_state')
dbstate = States(entity_id=entity_id)
# State got deleted
if state is None:
dbstate.state = ''
dbstate.domain = split_entity_id(entity_id)[0]
dbstate.attributes = '{}'
dbstate.last_changed = event.time_fired
dbstate.last_updated = event.time_fired
else:
dbstate.domain = state.domain
dbstate.state = state.state
dbstate.attributes = json.dumps(dict(state.attributes),
cls=JSONEncoder)
dbstate.last_changed = state.last_changed
dbstate.last_updated = state.last_updated
return dbstate
def to_native(self):
"""Convert to an HA state object."""
try:
return State(
self.entity_id, self.state,
json.loads(self.attributes),
_process_timestamp(self.last_changed),
_process_timestamp(self.last_updated)
)
except ValueError:
# When json.loads fails
_LOGGER.exception("Error converting row to state: %s", self)
return None
class RecorderRuns(Base): # type: ignore
"""Representation of recorder run."""
__tablename__ = 'recorder_runs'
run_id = Column(Integer, primary_key=True)
start = Column(DateTime(timezone=True), default=datetime.utcnow)
end = Column(DateTime(timezone=True))
closed_incorrect = Column(Boolean, default=False)
created = Column(DateTime(timezone=True), default=datetime.utcnow)
__table_args__ = (Index('ix_recorder_runs_start_end', 'start', 'end'),)
def entity_ids(self, point_in_time=None):
"""Return the entity ids that existed in this run.
Specify point_in_time if you want to know which existed at that point
in time inside the run.
"""
from sqlalchemy.orm.session import Session
session = Session.object_session(self)
assert session is not None, 'RecorderRuns need to be persisted'
query = session.query(distinct(States.entity_id)).filter(
States.last_updated >= self.start)
if point_in_time is not None:
query = query.filter(States.last_updated < point_in_time)
elif self.end is not None:
query = query.filter(States.last_updated < self.end)
return [row[0] for row in query]
def to_native(self):
"""Return self, native format is this model."""
return self
class SchemaChanges(Base): # type: ignore
"""Representation of schema version changes."""
__tablename__ = 'schema_changes'
change_id = Column(Integer, primary_key=True)
schema_version = Column(Integer)
changed = Column(DateTime(timezone=True), default=datetime.utcnow)
def _process_timestamp(ts):
"""Process a timestamp into datetime object."""
if ts is None:
return None
elif ts.tzinfo is None:
return dt_util.UTC.localize(ts)
else:
return dt_util.as_utc(ts)