refactor(endpoints/api/logs*): Refactor to new data model
moving things away from peewee data model to data model interface [TESTING->locally with docker compose] Issue: https://coreosdev.atlassian.net/browse/QUAY-628 - [ ] It works! - [ ] Comments provide sufficient explanations for the next contributor - [ ] Tests cover changes and corner cases - [ ] Follows Quay syntax patterns and format
This commit is contained in:
parent
b0aeb97198
commit
a7c818543d
5 changed files with 522 additions and 98 deletions
|
@ -1,79 +1,22 @@
|
|||
""" Access usage logs for organizations or repositories. """
|
||||
|
||||
import json
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
|
||||
RepositoryParamResource, require_repo_admin, related_user_resource,
|
||||
format_date, require_user_admin, path_param, require_scope, page_support)
|
||||
from endpoints.api.logs_models_pre_oci import pre_oci_model as model
|
||||
from endpoints.exception import Unauthorized, NotFound
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from data import model, database
|
||||
from auth import scopes
|
||||
from app import avatar
|
||||
from tzlocal import get_localzone
|
||||
|
||||
LOGS_PER_PAGE = 20
|
||||
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
|
||||
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
|
||||
|
||||
def log_view(log, kinds, include_namespace):
|
||||
view = {
|
||||
'kind': kinds[log.kind_id],
|
||||
'metadata': json.loads(log.metadata_json),
|
||||
'ip': log.ip,
|
||||
'datetime': format_date(log.datetime),
|
||||
}
|
||||
|
||||
if log.performer and log.performer.username:
|
||||
view['performer'] = {
|
||||
'kind': 'user',
|
||||
'name': log.performer.username,
|
||||
'is_robot': log.performer.robot,
|
||||
'avatar': avatar.get_data_for_user(log.performer),
|
||||
}
|
||||
|
||||
if include_namespace:
|
||||
if log.account and log.account.username:
|
||||
if log.account.organization:
|
||||
view['namespace'] = {
|
||||
'kind': 'org',
|
||||
'name': log.account.username,
|
||||
'avatar': avatar.get_data_for_org(log.account),
|
||||
}
|
||||
else:
|
||||
view['namespace'] = {
|
||||
'kind': 'user',
|
||||
'name': log.account.username,
|
||||
'avatar': avatar.get_data_for_user(log.account),
|
||||
}
|
||||
|
||||
return view
|
||||
|
||||
def aggregated_log_view(log, kinds, start_time):
|
||||
# Because we aggregate based on the day of the month in SQL, we only have that information.
|
||||
# Therefore, create a synthetic date based on the day and the month of the start time.
|
||||
# Logs are allowed for a maximum period of one week, so this calculation should always work.
|
||||
synthetic_date = datetime(start_time.year, start_time.month, int(log.day), tzinfo=get_localzone())
|
||||
if synthetic_date.day < start_time.day:
|
||||
synthetic_date = synthetic_date + relativedelta(months=1)
|
||||
|
||||
view = {
|
||||
'kind': kinds[log.kind_id],
|
||||
'count': log.count,
|
||||
'datetime': format_date(synthetic_date),
|
||||
}
|
||||
|
||||
return view
|
||||
|
||||
def _validate_logs_arguments(start_time, end_time, performer_name):
|
||||
performer = None
|
||||
if performer_name:
|
||||
performer = model.user.get_user(performer_name)
|
||||
|
||||
def _validate_logs_arguments(start_time, end_time):
|
||||
if start_time:
|
||||
try:
|
||||
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
|
||||
|
@ -81,7 +24,7 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
|
|||
start_time = None
|
||||
|
||||
if not start_time:
|
||||
start_time = datetime.today() - timedelta(7) # One week
|
||||
start_time = datetime.today() - timedelta(7) # One week
|
||||
|
||||
if end_time:
|
||||
try:
|
||||
|
@ -93,39 +36,37 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
|
|||
if not end_time:
|
||||
end_time = datetime.today()
|
||||
|
||||
return (start_time, end_time, performer)
|
||||
return start_time, end_time
|
||||
|
||||
|
||||
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
def get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
|
||||
page_token=None, ignore=None):
|
||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||
kinds = model.log.get_log_entry_kinds()
|
||||
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
|
||||
repository=repository, namespace=namespace,
|
||||
ignore=ignore)
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
|
||||
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
|
||||
page_token=page_token, limit=LOGS_PER_PAGE)
|
||||
kinds = model.get_log_entry_kinds()
|
||||
log_entry_page = model.get_logs_query(start_time, end_time, performer_name, repository_name,
|
||||
namespace_name, ignore, page_token)
|
||||
|
||||
include_namespace = namespace_name is None and repository_name is None
|
||||
|
||||
include_namespace = namespace is None and repository is None
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log_view(log, kinds, include_namespace) for log in logs],
|
||||
}, next_page_token
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log.to_dict(kinds, include_namespace) for log in log_entry_page.logs],
|
||||
}, log_entry_page.next_page_token
|
||||
|
||||
|
||||
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
ignore=None):
|
||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
|
||||
kinds = model.log.get_log_entry_kinds()
|
||||
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
|
||||
repository=repository, namespace=namespace,
|
||||
ignore=ignore)
|
||||
kinds = model.get_log_entry_kinds()
|
||||
aggregated_logs = model.get_aggregated_logs(start_time, end_time, performer_name=performer_name,
|
||||
repository_name=repository, namespace_name=namespace,
|
||||
ignore=ignore)
|
||||
|
||||
return {
|
||||
'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs]
|
||||
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
|
||||
}
|
||||
|
||||
|
||||
|
@ -133,6 +74,7 @@ def get_aggregate_logs(start_time, end_time, performer_name=None, repository=Non
|
|||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryLogs(RepositoryParamResource):
|
||||
""" Resource for fetching logs for the specific repository. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('listRepoLogs')
|
||||
@parse_args()
|
||||
|
@ -142,19 +84,19 @@ class RepositoryLogs(RepositoryParamResource):
|
|||
@page_support()
|
||||
def get(self, namespace, repository, page_token, parsed_args):
|
||||
""" List the logs for the specified repository. """
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
if model.repo_exists(namespace, repository) is False:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return get_logs(start_time, end_time, repository=repo, page_token=page_token,
|
||||
return get_logs(start_time, end_time, repository_name=repository, page_token=page_token, namespace_name=namespace,
|
||||
ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/user/logs')
|
||||
class UserLogs(ApiResource):
|
||||
""" Resource for fetching logs for the current user. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('listUserLogs')
|
||||
@parse_args()
|
||||
|
@ -169,7 +111,7 @@ class UserLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
return get_logs(start_time, end_time, performer_name=performer_name, namespace=user.username,
|
||||
return get_logs(start_time, end_time, performer_name=performer_name, namespace_name=user.username,
|
||||
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
|
@ -178,6 +120,7 @@ class UserLogs(ApiResource):
|
|||
@related_user_resource(UserLogs)
|
||||
class OrgLogs(ApiResource):
|
||||
""" Resource for fetching logs for the entire organization. """
|
||||
|
||||
@nickname('listOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
|
@ -194,7 +137,7 @@ class OrgLogs(ApiResource):
|
|||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
return get_logs(start_time, end_time, namespace=orgname, performer_name=performer_name,
|
||||
return get_logs(start_time, end_time, namespace_name=orgname, performer_name=performer_name,
|
||||
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
raise Unauthorized()
|
||||
|
@ -204,6 +147,7 @@ class OrgLogs(ApiResource):
|
|||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryAggregateLogs(RepositoryParamResource):
|
||||
""" Resource for fetching aggregated logs for the specific repository. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('getAggregateRepoLogs')
|
||||
@parse_args()
|
||||
|
@ -211,19 +155,19 @@ class RepositoryAggregateLogs(RepositoryParamResource):
|
|||
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
def get(self, namespace, repository, parsed_args):
|
||||
""" Returns the aggregated logs for the specified repository. """
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if not repo:
|
||||
if model.repo_exists(namespace, repository) is False:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return get_aggregate_logs(start_time, end_time, repository=repo,
|
||||
return get_aggregate_logs(start_time, end_time, repository=repository, namespace=namespace,
|
||||
ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/user/aggregatelogs')
|
||||
class UserAggregateLogs(ApiResource):
|
||||
""" Resource for fetching aggregated logs for the current user. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getAggregateUserLogs')
|
||||
@parse_args()
|
||||
|
@ -246,6 +190,7 @@ class UserAggregateLogs(ApiResource):
|
|||
@related_user_resource(UserLogs)
|
||||
class OrgAggregateLogs(ApiResource):
|
||||
""" Resource for fetching aggregate logs for the entire organization. """
|
||||
|
||||
@nickname('getAggregateOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
|
|
Reference in a new issue