2017-02-26 14:38:06 -08:00
|
|
|
"""Purge old data helper."""
|
|
|
|
from datetime import timedelta
|
|
|
|
import logging
|
|
|
|
|
2019-10-19 04:14:54 +11:00
|
|
|
from sqlalchemy.exc import SQLAlchemyError
|
|
|
|
|
2017-02-26 14:38:06 -08:00
|
|
|
import homeassistant.util.dt as dt_util
|
|
|
|
|
2020-06-22 22:10:05 -05:00
|
|
|
from .models import Events, RecorderRuns, States
|
2020-06-26 19:27:45 +02:00
|
|
|
from .util import execute, session_scope
|
2017-02-26 14:38:06 -08:00
|
|
|
|
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2020-06-26 19:27:45 +02:00
|
|
|
def purge_old_data(instance, purge_days: int, repack: bool) -> bool:
|
|
|
|
"""Purge events and states older than purge_days ago.
|
|
|
|
|
|
|
|
Cleans up an timeframe of an hour, based on the oldest record.
|
|
|
|
"""
|
2017-02-26 14:38:06 -08:00
|
|
|
purge_before = dt_util.utcnow() - timedelta(days=purge_days)
|
2018-02-11 22:22:59 +01:00
|
|
|
_LOGGER.debug("Purging events before %s", purge_before)
|
2017-02-26 14:38:06 -08:00
|
|
|
|
2019-01-26 11:02:16 +01:00
|
|
|
try:
|
|
|
|
with session_scope(session=instance.get_session()) as session:
|
2020-06-26 19:27:45 +02:00
|
|
|
query = session.query(States).order_by(States.last_updated.asc()).limit(1)
|
|
|
|
states = execute(query, to_native=True, validate_entity_ids=False)
|
|
|
|
|
|
|
|
states_purge_before = purge_before
|
|
|
|
if states:
|
|
|
|
states_purge_before = min(
|
|
|
|
purge_before, states[0].last_updated + timedelta(hours=1)
|
|
|
|
)
|
|
|
|
|
|
|
|
deleted_rows_states = (
|
2019-07-31 12:25:30 -07:00
|
|
|
session.query(States)
|
2020-06-26 19:27:45 +02:00
|
|
|
.filter(States.last_updated < states_purge_before)
|
2019-01-26 11:02:16 +01:00
|
|
|
.delete(synchronize_session=False)
|
2019-07-31 12:25:30 -07:00
|
|
|
)
|
2020-06-26 19:27:45 +02:00
|
|
|
_LOGGER.debug("Deleted %s states", deleted_rows_states)
|
2019-01-26 11:02:16 +01:00
|
|
|
|
2020-06-26 19:27:45 +02:00
|
|
|
query = session.query(Events).order_by(Events.time_fired.asc()).limit(1)
|
|
|
|
events = execute(query, to_native=True)
|
|
|
|
|
|
|
|
events_purge_before = purge_before
|
|
|
|
if events:
|
|
|
|
events_purge_before = min(
|
|
|
|
purge_before, events[0].time_fired + timedelta(hours=1)
|
|
|
|
)
|
|
|
|
|
|
|
|
deleted_rows_events = (
|
2019-07-31 12:25:30 -07:00
|
|
|
session.query(Events)
|
2020-06-26 19:27:45 +02:00
|
|
|
.filter(Events.time_fired < events_purge_before)
|
2019-01-26 11:02:16 +01:00
|
|
|
.delete(synchronize_session=False)
|
2019-07-31 12:25:30 -07:00
|
|
|
)
|
2020-06-26 19:27:45 +02:00
|
|
|
_LOGGER.debug("Deleted %s events", deleted_rows_events)
|
2019-01-26 11:02:16 +01:00
|
|
|
|
2020-06-26 19:27:45 +02:00
|
|
|
# If states or events purging isn't processing the purge_before yet,
|
|
|
|
# return false, as we are not done yet.
|
|
|
|
if (states_purge_before and states_purge_before != purge_before) or (
|
|
|
|
events_purge_before and events_purge_before != purge_before
|
|
|
|
):
|
|
|
|
_LOGGER.debug("Purging hasn't fully completed yet.")
|
|
|
|
return False
|
|
|
|
|
|
|
|
# Recorder runs is small, no need to batch run it
|
2020-06-22 22:10:05 -05:00
|
|
|
deleted_rows = (
|
|
|
|
session.query(RecorderRuns)
|
|
|
|
.filter(RecorderRuns.start < purge_before)
|
|
|
|
.delete(synchronize_session=False)
|
|
|
|
)
|
|
|
|
_LOGGER.debug("Deleted %s recorder_runs", deleted_rows)
|
|
|
|
|
2020-06-23 02:28:03 +02:00
|
|
|
if repack:
|
|
|
|
# Execute sqlite or postgresql vacuum command to free up space on disk
|
|
|
|
if instance.engine.driver in ("pysqlite", "postgresql"):
|
|
|
|
_LOGGER.debug("Vacuuming SQL DB to free space")
|
|
|
|
instance.engine.execute("VACUUM")
|
|
|
|
# Optimize mysql / mariadb tables to free up space on disk
|
2020-06-26 11:45:40 -05:00
|
|
|
elif instance.engine.driver in ("mysqldb", "pymysql"):
|
2020-06-23 02:28:03 +02:00
|
|
|
_LOGGER.debug("Optimizing SQL DB to free space")
|
2020-06-22 22:10:05 -05:00
|
|
|
instance.engine.execute("OPTIMIZE TABLE states, events, recorder_runs")
|
2019-01-26 11:02:16 +01:00
|
|
|
|
|
|
|
except SQLAlchemyError as err:
|
|
|
|
_LOGGER.warning("Error purging history: %s.", err)
|
2020-06-26 19:27:45 +02:00
|
|
|
|
|
|
|
return True
|