From 9818481b0864f79b69a3f28bcf500fcdbcde9a49 Mon Sep 17 00:00:00 2001 From: Jimmy Zelinskie Date: Tue, 6 Oct 2015 13:58:44 -0400 Subject: [PATCH] limit logs to a maximum number of pages --- data/model/log.py | 1 + endpoints/api/logs.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/data/model/log.py b/data/model/log.py index ad5713d6d..21bd1bde6 100644 --- a/data/model/log.py +++ b/data/model/log.py @@ -6,6 +6,7 @@ from cachetools import lru_cache from data.database import LogEntry, LogEntryKind, User, db +# TODO: Find a way to get logs without slowing down pagination significantly. def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None): joined = (LogEntry .select(*selections) diff --git a/endpoints/api/logs.py b/endpoints/api/logs.py index d76f6d04c..92bbcee93 100644 --- a/endpoints/api/logs.py +++ b/endpoints/api/logs.py @@ -15,6 +15,7 @@ from auth import scopes from app import avatar LOGS_PER_PAGE = 50 +MAX_PAGES = 20 def log_view(log, kinds): view = { @@ -80,7 +81,7 @@ def _validate_logs_arguments(start_time, end_time, performer_name): def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, page=None): (start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name) - page = page if page else 1 + page = min(MAX_PAGES, page if page else 1) kinds = model.log.get_log_entry_kinds() logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository, namespace=namespace, page=page, count=LOGS_PER_PAGE + 1)