Add LogEntry3 table without the extra indexes and switch to writing to it

This commit is contained in:
Joseph Schorr 2019-01-03 13:50:43 -05:00
parent b6db002729
commit cdb49dbfd3
12 changed files with 114 additions and 49 deletions

View file

@ -50,7 +50,7 @@ class PreOCIModel(LogEntryDataInterface):
if performer_name:
performer = model.user.get_user(performer_name)
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
# TODO(LogMigrate): Remove the branch once we're back on a single table.
def get_logs(m):
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
@ -62,19 +62,19 @@ class PreOCIModel(LogEntryDataInterface):
max_page=app.config['ACTION_LOG_MAX_PAGE'])
return LogEntryPage([create_log(log) for log in logs], next_page_token)
# First check the LogEntry2 table for the most recent logs, unless we've been expressly told
# to look inside the "second" table.
TOKEN_TABLE_KEY2 = 'ttk2'
is_temp_table = page_token is not None and page_token.get(TOKEN_TABLE_KEY2) == 1
if is_temp_table:
page_result = get_logs(database.LogEntry)
else:
page_result = get_logs(database.LogEntry2)
# First check the LogEntry3 table for the most recent logs, unless we've been expressly told
# to look inside the other tables.
TOKEN_TABLE_ID = 'tti'
tables = [database.LogEntry3, database.LogEntry2, database.LogEntry]
if page_result.next_page_token is None and not is_temp_table:
page_result = page_result._replace(next_page_token={TOKEN_TABLE_KEY2: 1})
elif is_temp_table and page_result.next_page_token is not None:
page_result.next_page_token[TOKEN_TABLE_KEY2] = 1
table_index = 0
table_specified = page_token is not None and page_token.get(TOKEN_TABLE_ID) is not None
if table_specified:
table_index = page_token.get(TOKEN_TABLE_ID)
page_result = get_logs(tables[table_index])
if page_result.next_page_token is None and table_index < len(tables) - 1:
page_result = page_result._replace(next_page_token={TOKEN_TABLE_ID: table_index + 1})
return page_result
@ -97,16 +97,19 @@ class PreOCIModel(LogEntryDataInterface):
if performer_name:
performer = model.user.get_user(performer_name)
# TODO(LogMigrate): Remove the branch once we're back on LogEntry only.
# TODO(LogMigrate): Remove the branch once we're back on a single table.
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry)
aggregated_logs_2 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry2)
aggregated_logs_3 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore, model=database.LogEntry3)
entries = {}
for log in itertools.chain(aggregated_logs, aggregated_logs_2):
for log in itertools.chain(aggregated_logs, aggregated_logs_2, aggregated_logs_3):
key = '%s-%s' % (log.kind_id, log.day)
if key in entries:
entries[key] = AggregatedLogEntry(log.count + entries[key].count, log.kind_id, log.day)

View file

@ -134,8 +134,6 @@ class SuperUserAggregateLogs(ApiResource):
if SuperUserPermission().can():
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
parsed_args['endtime'])
# TODO(LogMigrate): Change to a unified log lookup util lib once we're back on LogEntry only.
aggregated_logs = log_model.get_aggregated_logs(start_time, end_time)
kinds = log_model.get_log_entry_kinds()
return {
@ -166,9 +164,8 @@ class SuperUserLogs(ApiResource):
if SuperUserPermission().can():
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
# TODO(LogMigrate): Change to a unified log lookup util lib once we're back on LogEntry only.
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
log_page = log_model.get_logs_query(start_time, end_time, page_token=page_token)
kinds = log_model.get_log_entry_kinds()
return {

View file

@ -111,7 +111,8 @@ def test_does_repo_exist_returns_true(monkeypatch):
def test_get_aggregated_logs(monkeypatch):
get_aggregated_logs_mock = Mock()
get_aggregated_logs_mock.side_effect = [[AttrDict({'day': '1', 'kind_id': 4, 'count': 6})],
[AttrDict({'day': '1', 'kind_id': 4, 'count': 12})]]
[AttrDict({'day': '1', 'kind_id': 4, 'count': 12})],
[AttrDict({'day': '1', 'kind_id': 4, 'count': 3})]]
monkeypatch.setattr(model.log, 'get_aggregated_logs', get_aggregated_logs_mock)
repo_mock = Mock()
@ -127,4 +128,4 @@ def test_get_aggregated_logs(monkeypatch):
actual = pre_oci_model.get_aggregated_logs('start_time', 'end_time', 'performer_name', 'repository_name',
'namespace_name', set())
assert actual == [AggregatedLogEntry(18, 4, '1')]
assert actual == [AggregatedLogEntry(21, 4, '1')]