2018-05-18 16:54:38 +00:00
|
|
|
import itertools
|
|
|
|
|
2019-01-02 20:57:55 +00:00
|
|
|
from app import app
|
2017-07-18 15:58:42 +00:00
|
|
|
from data import model, database
|
|
|
|
from endpoints.api.logs_models_interface import LogEntryDataInterface, LogEntryPage, LogEntry, AggregatedLogEntry
|
|
|
|
|
|
|
|
|
2018-11-27 16:28:32 +00:00
|
|
|
def create_log(log):
|
2017-07-18 15:58:42 +00:00
|
|
|
account_organization = None
|
|
|
|
account_username = None
|
|
|
|
account_email = None
|
|
|
|
account_robot = None
|
|
|
|
try:
|
|
|
|
account_organization = log.account.organization
|
|
|
|
account_username = log.account.username
|
|
|
|
account_email = log.account.email
|
|
|
|
account_robot = log.account.robot
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
performer_robot = None
|
|
|
|
performer_username = None
|
|
|
|
performer_email = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
performer_robot = log.performer.robot
|
|
|
|
performer_username = log.performer.username
|
|
|
|
performer_email = log.performer.email
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return LogEntry(log.metadata_json, log.ip, log.datetime, performer_email, performer_username,
|
|
|
|
performer_robot, account_organization, account_username,
|
|
|
|
account_email, account_robot, log.kind_id)
|
|
|
|
|
|
|
|
|
|
|
|
class PreOCIModel(LogEntryDataInterface):
|
|
|
|
"""
|
|
|
|
PreOCIModel implements the data model for the Tags using a database schema
|
|
|
|
before it was changed to support the OCI specification.
|
|
|
|
"""
|
|
|
|
|
2018-05-14 15:41:49 +00:00
|
|
|
def get_logs_query(self, start_time, end_time, performer_name=None, repository_name=None,
|
|
|
|
namespace_name=None, ignore=None, page_token=None):
|
2017-07-18 15:58:42 +00:00
|
|
|
repo = None
|
|
|
|
if repository_name and namespace_name:
|
|
|
|
repo = model.repository.get_repository(namespace_name, repository_name)
|
|
|
|
|
|
|
|
performer = None
|
|
|
|
if performer_name:
|
|
|
|
performer = model.user.get_user(performer_name)
|
2019-01-02 18:29:35 +00:00
|
|
|
|
2019-01-03 18:50:43 +00:00
|
|
|
# TODO(LogMigrate): Remove the branch once we're back on a single table.
|
2018-05-18 16:54:38 +00:00
|
|
|
def get_logs(m):
|
|
|
|
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
|
|
|
|
repository=repo, namespace=namespace_name,
|
|
|
|
ignore=ignore, model=m)
|
|
|
|
|
|
|
|
logs, next_page_token = model.modelutil.paginate(logs_query, m,
|
|
|
|
descending=True, page_token=page_token,
|
2019-01-02 20:57:55 +00:00
|
|
|
limit=20,
|
|
|
|
max_page=app.config['ACTION_LOG_MAX_PAGE'])
|
2018-11-27 16:28:32 +00:00
|
|
|
return LogEntryPage([create_log(log) for log in logs], next_page_token)
|
2018-05-18 16:54:38 +00:00
|
|
|
|
2019-01-03 18:50:43 +00:00
|
|
|
# First check the LogEntry3 table for the most recent logs, unless we've been expressly told
|
|
|
|
# to look inside the other tables.
|
|
|
|
TOKEN_TABLE_ID = 'tti'
|
|
|
|
tables = [database.LogEntry3, database.LogEntry2, database.LogEntry]
|
2019-01-02 18:29:35 +00:00
|
|
|
|
2019-01-03 18:50:43 +00:00
|
|
|
table_index = 0
|
|
|
|
table_specified = page_token is not None and page_token.get(TOKEN_TABLE_ID) is not None
|
|
|
|
if table_specified:
|
|
|
|
table_index = page_token.get(TOKEN_TABLE_ID)
|
|
|
|
|
|
|
|
page_result = get_logs(tables[table_index])
|
|
|
|
if page_result.next_page_token is None and table_index < len(tables) - 1:
|
|
|
|
page_result = page_result._replace(next_page_token={TOKEN_TABLE_ID: table_index + 1})
|
2019-01-02 18:29:35 +00:00
|
|
|
|
|
|
|
return page_result
|
2017-07-18 15:58:42 +00:00
|
|
|
|
|
|
|
def get_log_entry_kinds(self):
|
|
|
|
return model.log.get_log_entry_kinds()
|
|
|
|
|
|
|
|
def repo_exists(self, namespace_name, repository_name):
|
|
|
|
repo = model.repository.get_repository(namespace_name, repository_name)
|
|
|
|
if repo is None:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2018-05-14 15:41:49 +00:00
|
|
|
def get_aggregated_logs(self, start_time, end_time, performer_name=None, repository_name=None,
|
|
|
|
namespace_name=None, ignore=None):
|
2017-07-18 15:58:42 +00:00
|
|
|
repo = None
|
|
|
|
if repository_name and namespace_name:
|
|
|
|
repo = model.repository.get_repository(namespace_name, repository_name)
|
|
|
|
|
|
|
|
performer = None
|
|
|
|
if performer_name:
|
|
|
|
performer = model.user.get_user(performer_name)
|
|
|
|
|
2019-01-03 18:50:43 +00:00
|
|
|
# TODO(LogMigrate): Remove the branch once we're back on a single table.
|
2017-07-18 15:58:42 +00:00
|
|
|
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
|
|
|
repository=repo, namespace=namespace_name,
|
2018-05-18 16:54:38 +00:00
|
|
|
ignore=ignore, model=database.LogEntry)
|
2019-01-02 18:29:35 +00:00
|
|
|
aggregated_logs_2 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
|
|
|
repository=repo, namespace=namespace_name,
|
|
|
|
ignore=ignore, model=database.LogEntry2)
|
2019-01-03 18:50:43 +00:00
|
|
|
aggregated_logs_3 = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
|
|
|
repository=repo, namespace=namespace_name,
|
|
|
|
ignore=ignore, model=database.LogEntry3)
|
2018-05-18 16:54:38 +00:00
|
|
|
|
|
|
|
entries = {}
|
2019-01-03 18:50:43 +00:00
|
|
|
for log in itertools.chain(aggregated_logs, aggregated_logs_2, aggregated_logs_3):
|
2018-05-18 16:54:38 +00:00
|
|
|
key = '%s-%s' % (log.kind_id, log.day)
|
|
|
|
if key in entries:
|
|
|
|
entries[key] = AggregatedLogEntry(log.count + entries[key].count, log.kind_id, log.day)
|
|
|
|
else:
|
|
|
|
entries[key] = AggregatedLogEntry(log.count, log.kind_id, log.day)
|
|
|
|
|
|
|
|
return entries.values()
|
2017-07-18 15:58:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
pre_oci_model = PreOCIModel()
|