Temporarily change to storing logs in a new LogEntry2 table

This will prevent us from running out of auto-incrementing ID values until such time as we can upgrade to peewee 3 and change the field type to a BigInt

Fixes https://jira.coreos.com/browse/QUAY-943
This commit is contained in:
Joseph Schorr 2018-05-18 12:54:38 -04:00
parent 66b4e45929
commit a007332d4c
13 changed files with 201 additions and 113 deletions

View file

@ -1,3 +1,5 @@
import itertools
from data import model, database
from endpoints.api.logs_models_interface import LogEntryDataInterface, LogEntryPage, LogEntry, AggregatedLogEntry
@ -46,15 +48,33 @@ class PreOCIModel(LogEntryDataInterface):
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
def get_logs(m):
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=m)
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore)
logs, next_page_token = model.modelutil.paginate(logs_query, m,
descending=True, page_token=page_token,
limit=20)
return LogEntryPage([_create_log(log) for log in logs], next_page_token)
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
page_token=page_token, limit=20)
# First check the LogEntry2 table for the most recent logs, unless we've been expressly told
# to look inside the first table.
TOKEN_TABLE_KEY = 'ttk'
is_old_table = page_token is not None and page_token.get(TOKEN_TABLE_KEY) == 1
if is_old_table:
page_result = get_logs(database.LogEntry)
else:
page_result = get_logs(database.LogEntry2)
return LogEntryPage([_create_log(log) for log in logs], next_page_token)
if page_result.next_page_token is None and not is_old_table:
page_result = page_result._replace(next_page_token={TOKEN_TABLE_KEY: 1})
elif is_old_table and page_result.next_page_token is not None:
page_result.next_page_token[TOKEN_TABLE_KEY] = 1
return page_result
def get_log_entry_kinds(self):
return model.log.get_log_entry_kinds()
@ -75,10 +95,23 @@ class PreOCIModel(LogEntryDataInterface):
if performer_name:
performer = model.user.get_user(performer_name)
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore)
return [AggregatedLogEntry(log.count, log.kind_id, log.day) for log in aggregated_logs]
ignore=ignore, model=database.LogEntry)
aggregated_logs_2 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry2)
entries = {}
for log in itertools.chain(aggregated_logs, aggregated_logs_2):
key = '%s-%s' % (log.kind_id, log.day)
if key in entries:
entries[key] = AggregatedLogEntry(log.count + entries[key].count, log.kind_id, log.day)
else:
entries[key] = AggregatedLogEntry(log.count, log.kind_id, log.day)
return entries.values()
pre_oci_model = PreOCIModel()

View file

@ -27,6 +27,7 @@ from endpoints.api.build import get_logs_or_log_url
from endpoints.api.superuser_models_pre_oci import (pre_oci_model, ServiceKeyDoesNotExist,
ServiceKeyAlreadyApproved,
InvalidRepositoryBuildException)
from endpoints.api.logs_models_pre_oci import pre_oci_model as log_model
from util.useremails import send_confirmation_email, send_recovery_email
from util.security.ssl import load_certificate, CertInvalidException
from util.config.validator import EXTRA_CA_DIRECTORY
@ -137,10 +138,12 @@ class SuperUserAggregateLogs(ApiResource):
if SuperUserPermission().can():
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
parsed_args['endtime'])
aggregated_logs = pre_oci_model.get_aggregated_logs(start_time, end_time)
# TODO(LogMigrate): Change to a unified log lookup util lib once we're back on LogEntry only.
aggregated_logs = log_model.get_aggregated_logs(start_time, end_time)
kinds = log_model.get_log_entry_kinds()
return {
'aggregated': [log.to_dict() for log in aggregated_logs]
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
}
raise Unauthorized()
@ -168,12 +171,14 @@ class SuperUserLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
log_page = pre_oci_model.get_logs_query(start_time, end_time, page_token=page_token)
# TODO(LogMigrate): Change to a unified log lookup util lib once we're back on LogEntry only.
log_page = log_model.get_logs_query(start_time, end_time, page_token=page_token)
kinds = log_model.get_log_entry_kinds()
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log.to_dict() for log in log_page.logs],
'logs': [log.to_dict(kinds, include_namespace=True) for log in log_page.logs],
}, log_page.next_page_token
raise Unauthorized()

View file

@ -22,7 +22,7 @@ def test_get_logs_query(monkeypatch):
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
assert pre_oci_model.get_logs_query('start_time', 'end_time', 'preformer_namne', 'repository_name', 'namespace_name',
set(), 'page_token') == LogEntryPage([], {})
set(), None) == LogEntryPage([], {})
def test_get_logs_query_returns_list_log_entries(monkeypatch):
@ -52,6 +52,7 @@ def test_get_logs_query_returns_list_log_entries(monkeypatch):
False, 'account_username', 'account_email', False, 1)], {'key': 'value'})
@pytest.mark.skip('Turned off until we move back to a single LogEntry table')
def test_get_logs_query_calls_get_repository(monkeypatch):
repo_mock = Mock()
performer_mock = Mock()
@ -109,7 +110,8 @@ def test_does_repo_exist_returns_true(monkeypatch):
def test_get_aggregated_logs(monkeypatch):
get_aggregated_logs_mock = Mock()
get_aggregated_logs_mock.return_value = [AttrDict({'day': '1', 'kind_id': 4, 'count': 12})]
get_aggregated_logs_mock.side_effect = [[AttrDict({'day': '1', 'kind_id': 4, 'count': 6})],
[AttrDict({'day': '1', 'kind_id': 4, 'count': 12})]]
monkeypatch.setattr(model.log, 'get_aggregated_logs', get_aggregated_logs_mock)
repo_mock = Mock()
@ -125,4 +127,4 @@ def test_get_aggregated_logs(monkeypatch):
actual = pre_oci_model.get_aggregated_logs('start_time', 'end_time', 'performer_name', 'repository_name',
'namespace_name', set())
assert actual == [AggregatedLogEntry(12, 4, '1')]
assert actual == [AggregatedLogEntry(18, 4, '1')]