Speed up sql queries where ORM rows are not needed (#91839)
* Speed up logbook and history queries where ORM rows are not needed This avoids having sqlalchemy wrap Result in ChunkedIteratorResult which has additional overhead we do not need for these cases * more places * anything that uses _sorted_statistics_to_dict does not need orm rows either
This commit is contained in:
parent
2663901603
commit
95fcdc5684
10 changed files with 45 additions and 35 deletions
|
@ -1555,7 +1555,9 @@ def _statistics_during_period_with_session(
|
|||
stmt = _generate_statistics_during_period_stmt(
|
||||
start_time, end_time, metadata_ids, table, types
|
||||
)
|
||||
stats = cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
stats = cast(
|
||||
Sequence[Row], execute_stmt_lambda_element(session, stmt, orm_rows=False)
|
||||
)
|
||||
|
||||
if not stats:
|
||||
return {}
|
||||
|
@ -1664,7 +1666,9 @@ def _get_last_statistics(
|
|||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats)
|
||||
else:
|
||||
stmt = _get_last_statistics_short_term_stmt(metadata_id, number_of_stats)
|
||||
stats = cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
stats = cast(
|
||||
Sequence[Row], execute_stmt_lambda_element(session, stmt, orm_rows=False)
|
||||
)
|
||||
|
||||
if not stats:
|
||||
return {}
|
||||
|
@ -1755,7 +1759,9 @@ def get_latest_short_term_statistics(
|
|||
if statistic_id in metadata
|
||||
]
|
||||
stmt = _latest_short_term_statistics_stmt(metadata_ids)
|
||||
stats = cast(Sequence[Row], execute_stmt_lambda_element(session, stmt))
|
||||
stats = cast(
|
||||
Sequence[Row], execute_stmt_lambda_element(session, stmt, orm_rows=False)
|
||||
)
|
||||
if not stats:
|
||||
return {}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue