This adds a new function to history module which returns significant states. For most domains this is the list of state changes. For the thermostat domain this also includes attribute changes, so that changes in the current_temperature are exposed to the graphing layer. Closes #881
202 lines
5.9 KiB
Python
202 lines
5.9 KiB
Python
"""
|
|
homeassistant.components.history
|
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
Provide pre-made queries on top of the recorder component.
|
|
|
|
For more details about this component, please refer to the documentation at
|
|
https://home-assistant.io/components/history/
|
|
"""
|
|
import re
|
|
from datetime import timedelta
|
|
from itertools import groupby
|
|
from collections import defaultdict
|
|
|
|
import homeassistant.util.dt as dt_util
|
|
import homeassistant.components.recorder as recorder
|
|
from homeassistant.const import HTTP_BAD_REQUEST
|
|
|
|
DOMAIN = 'history'
|
|
DEPENDENCIES = ['recorder', 'http']
|
|
|
|
SIGNIFICANT_DOMAINS = ('thermostat',)
|
|
|
|
URL_HISTORY_PERIOD = re.compile(
|
|
r'/api/history/period(?:/(?P<date>\d{4}-\d{1,2}-\d{1,2})|)')
|
|
|
|
|
|
def last_5_states(entity_id):
|
|
""" Return the last 5 states for entity_id. """
|
|
entity_id = entity_id.lower()
|
|
|
|
query = """
|
|
SELECT * FROM states WHERE entity_id=? AND
|
|
last_changed=last_updated
|
|
ORDER BY state_id DESC LIMIT 0, 5
|
|
"""
|
|
|
|
return recorder.query_states(query, (entity_id, ))
|
|
|
|
|
|
def get_significant_states(start_time, end_time=None, entity_id=None):
|
|
"""Return states changes during UTC period start_time - end_time.
|
|
|
|
Significant states are all states where there is a state change,
|
|
as well as all states from certain domains (for instance
|
|
thermostat so that we get current temperature in our graphs).
|
|
|
|
"""
|
|
where = """
|
|
(domain in ({}) or last_changed=last_updated)
|
|
AND last_updated > ?
|
|
""".format(",".join(["'%s'" % x for x in SIGNIFICANT_DOMAINS]))
|
|
|
|
data = [start_time]
|
|
|
|
if end_time is not None:
|
|
where += "AND last_updated < ? "
|
|
data.append(end_time)
|
|
|
|
if entity_id is not None:
|
|
where += "AND entity_id = ? "
|
|
data.append(entity_id.lower())
|
|
|
|
query = ("SELECT * FROM states WHERE {} "
|
|
"ORDER BY entity_id, last_updated ASC").format(where)
|
|
|
|
states = recorder.query_states(query, data)
|
|
|
|
return states_to_json(states, start_time, entity_id)
|
|
|
|
|
|
def state_changes_during_period(start_time, end_time=None, entity_id=None):
|
|
"""
|
|
Return states changes during UTC period start_time - end_time.
|
|
"""
|
|
where = "last_changed=last_updated AND last_changed > ? "
|
|
data = [start_time]
|
|
|
|
if end_time is not None:
|
|
where += "AND last_changed < ? "
|
|
data.append(end_time)
|
|
|
|
if entity_id is not None:
|
|
where += "AND entity_id = ? "
|
|
data.append(entity_id.lower())
|
|
|
|
query = ("SELECT * FROM states WHERE {} "
|
|
"ORDER BY entity_id, last_changed ASC").format(where)
|
|
|
|
states = recorder.query_states(query, data)
|
|
|
|
return states_to_json(states, start_time, entity_id)
|
|
|
|
|
|
def get_states(utc_point_in_time, entity_ids=None, run=None):
|
|
""" Returns the states at a specific point in time. """
|
|
if run is None:
|
|
run = recorder.run_information(utc_point_in_time)
|
|
|
|
# History did not run before utc_point_in_time
|
|
if run is None:
|
|
return []
|
|
|
|
where = run.where_after_start_run + "AND created < ? "
|
|
where_data = [utc_point_in_time]
|
|
|
|
if entity_ids is not None:
|
|
where += "AND entity_id IN ({}) ".format(
|
|
",".join(['?'] * len(entity_ids)))
|
|
where_data.extend(entity_ids)
|
|
|
|
query = """
|
|
SELECT * FROM states
|
|
INNER JOIN (
|
|
SELECT max(state_id) AS max_state_id
|
|
FROM states WHERE {}
|
|
GROUP BY entity_id)
|
|
WHERE state_id = max_state_id
|
|
""".format(where)
|
|
|
|
return recorder.query_states(query, where_data)
|
|
|
|
|
|
def states_to_json(states, start_time, entity_id):
|
|
"""Converts SQL results into JSON friendly data structure.
|
|
|
|
This takes our state list and turns it into a JSON friendly data
|
|
structure {'entity_id': [list of states], 'entity_id2': [list of states]}
|
|
|
|
We also need to go back and create a synthetic zero data point for
|
|
each list of states, otherwise our graphs won't start on the Y
|
|
axis correctly.
|
|
"""
|
|
|
|
result = defaultdict(list)
|
|
|
|
entity_ids = [entity_id] if entity_id is not None else None
|
|
|
|
# Get the states at the start time
|
|
for state in get_states(start_time, entity_ids):
|
|
state.last_changed = start_time
|
|
state.last_updated = start_time
|
|
result[state.entity_id].append(state)
|
|
|
|
# Append all changes to it
|
|
for entity_id, group in groupby(states, lambda state: state.entity_id):
|
|
result[entity_id].extend(group)
|
|
return result
|
|
|
|
|
|
def get_state(utc_point_in_time, entity_id, run=None):
|
|
""" Return a state at a specific point in time. """
|
|
states = get_states(utc_point_in_time, (entity_id,), run)
|
|
|
|
return states[0] if states else None
|
|
|
|
|
|
# pylint: disable=unused-argument
|
|
def setup(hass, config):
|
|
""" Setup history hooks. """
|
|
hass.http.register_path(
|
|
'GET',
|
|
re.compile(
|
|
r'/api/history/entity/(?P<entity_id>[a-zA-Z\._0-9]+)/'
|
|
r'recent_states'),
|
|
_api_last_5_states)
|
|
|
|
hass.http.register_path('GET', URL_HISTORY_PERIOD, _api_history_period)
|
|
|
|
return True
|
|
|
|
|
|
# pylint: disable=unused-argument
|
|
# pylint: disable=invalid-name
|
|
def _api_last_5_states(handler, path_match, data):
|
|
""" Return the last 5 states for an entity id as JSON. """
|
|
entity_id = path_match.group('entity_id')
|
|
|
|
handler.write_json(last_5_states(entity_id))
|
|
|
|
|
|
def _api_history_period(handler, path_match, data):
|
|
""" Return history over a period of time. """
|
|
date_str = path_match.group('date')
|
|
one_day = timedelta(seconds=86400)
|
|
|
|
if date_str:
|
|
start_date = dt_util.date_str_to_date(date_str)
|
|
|
|
if start_date is None:
|
|
handler.write_json_message("Error parsing JSON", HTTP_BAD_REQUEST)
|
|
return
|
|
|
|
start_time = dt_util.as_utc(dt_util.start_of_local_day(start_date))
|
|
else:
|
|
start_time = dt_util.utcnow() - one_day
|
|
|
|
end_time = start_time + one_day
|
|
|
|
entity_id = data.get('filter_entity_id')
|
|
|
|
handler.write_json(
|
|
get_significant_states(start_time, end_time, entity_id).values())
|