limit logs to a maximum number of pages

This commit is contained in:
Jimmy Zelinskie 2015-10-06 13:58:44 -04:00
parent 5ab8ca04b6
commit 9818481b08
2 changed files with 3 additions and 1 deletions

View file

@ -6,6 +6,7 @@ from cachetools import lru_cache
from data.database import LogEntry, LogEntryKind, User, db
# TODO: Find a way to get logs without slowing down pagination significantly.
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None):
joined = (LogEntry
.select(*selections)

View file

@ -15,6 +15,7 @@ from auth import scopes
from app import avatar
LOGS_PER_PAGE = 50
MAX_PAGES = 20
def log_view(log, kinds):
view = {
@ -80,7 +81,7 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, page=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
page = page if page else 1
page = min(MAX_PAGES, page if page else 1)
kinds = model.log.get_log_entry_kinds()
logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository,
namespace=namespace, page=page, count=LOGS_PER_PAGE + 1)