Interface out all action log data model operations
This will allow us to reimplement the logs data model against a non-database system in the near future
This commit is contained in:
parent
a156c91962
commit
b773a18ed8
26 changed files with 714 additions and 902 deletions
|
@ -1,81 +1,70 @@
|
|||
""" Access usage logs for organizations or repositories. """
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from app import export_action_logs_queue
|
||||
from app import app, export_action_logs_queue, avatar
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data.logs_model import logs_model
|
||||
from data.registry_model import registry_model
|
||||
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
|
||||
RepositoryParamResource, require_repo_admin, related_user_resource,
|
||||
format_date, require_user_admin, path_param, require_scope, page_support,
|
||||
validate_json_request, InvalidRequest, show_if)
|
||||
from data import model as data_model
|
||||
from endpoints.api.logs_models_pre_oci import pre_oci_model as model
|
||||
from endpoints.exception import Unauthorized, NotFound
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
|
||||
|
||||
LOGS_PER_PAGE = 20
|
||||
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
|
||||
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
|
||||
|
||||
|
||||
def _parse_datetime(dt_string):
|
||||
if not dt_string:
|
||||
return None
|
||||
|
||||
try:
|
||||
return datetime.strptime(dt_string + ' UTC', '%m/%d/%Y %Z')
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _validate_logs_arguments(start_time, end_time):
|
||||
if start_time:
|
||||
try:
|
||||
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
|
||||
except ValueError:
|
||||
start_time = None
|
||||
|
||||
if not start_time:
|
||||
start_time = datetime.today() - timedelta(7) # One week
|
||||
|
||||
if end_time:
|
||||
try:
|
||||
end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
|
||||
end_time = end_time + timedelta(days=1)
|
||||
except ValueError:
|
||||
end_time = None
|
||||
|
||||
if not end_time:
|
||||
end_time = datetime.today()
|
||||
|
||||
start_time = _parse_datetime(start_time) or (datetime.today() - timedelta(days=1))
|
||||
end_time = _parse_datetime(end_time) or datetime.today()
|
||||
end_time = end_time + timedelta(days=1)
|
||||
return start_time, end_time
|
||||
|
||||
|
||||
def get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
|
||||
page_token=None, ignore=None):
|
||||
def _get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
|
||||
page_token=None, filter_kinds=None):
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
|
||||
kinds = model.get_log_entry_kinds()
|
||||
log_entry_page = model.get_logs_query(start_time, end_time, performer_name, repository_name,
|
||||
namespace_name, ignore, page_token)
|
||||
|
||||
log_entry_page = logs_model.lookup_logs(start_time, end_time, performer_name, repository_name,
|
||||
namespace_name, filter_kinds, page_token,
|
||||
app.config['ACTION_LOG_MAX_PAGE'])
|
||||
include_namespace = namespace_name is None and repository_name is None
|
||||
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log.to_dict(kinds, include_namespace) for log in log_entry_page.logs],
|
||||
'logs': [log.to_dict(avatar, include_namespace) for log in log_entry_page.logs],
|
||||
}, log_entry_page.next_page_token
|
||||
|
||||
|
||||
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
ignore=None):
|
||||
def _get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
filter_kinds=None):
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
|
||||
kinds = model.get_log_entry_kinds()
|
||||
aggregated_logs = model.get_aggregated_logs(start_time, end_time, performer_name=performer_name,
|
||||
repository_name=repository, namespace_name=namespace,
|
||||
ignore=ignore)
|
||||
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time,
|
||||
performer_name=performer_name,
|
||||
repository_name=repository,
|
||||
namespace_name=namespace,
|
||||
filter_kinds=filter_kinds)
|
||||
|
||||
return {
|
||||
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
|
||||
'aggregated': [log.to_dict() for log in aggregated_logs]
|
||||
}
|
||||
|
||||
|
||||
|
@ -87,18 +76,20 @@ class RepositoryLogs(RepositoryParamResource):
|
|||
@require_repo_admin
|
||||
@nickname('listRepoLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. The time should be formatted "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. The time should be formatted "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@page_support()
|
||||
def get(self, namespace, repository, page_token, parsed_args):
|
||||
""" List the logs for the specified repository. """
|
||||
if model.repo_exists(namespace, repository) is False:
|
||||
if registry_model.lookup_repository(namespace, repository) is None:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return get_logs(start_time, end_time, repository_name=repository, page_token=page_token,
|
||||
namespace_name=namespace)
|
||||
return _get_logs(start_time, end_time,
|
||||
repository_name=repository,
|
||||
page_token=page_token,
|
||||
namespace_name=namespace)
|
||||
|
||||
|
||||
@resource('/v1/user/logs')
|
||||
|
@ -108,8 +99,8 @@ class UserLogs(ApiResource):
|
|||
@require_user_admin
|
||||
@nickname('listUserLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@page_support()
|
||||
def get(self, parsed_args, page_token):
|
||||
|
@ -119,9 +110,11 @@ class UserLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
return get_logs(start_time, end_time, performer_name=performer_name,
|
||||
namespace_name=user.username, page_token=page_token,
|
||||
ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
return _get_logs(start_time, end_time,
|
||||
performer_name=performer_name,
|
||||
namespace_name=user.username,
|
||||
page_token=page_token,
|
||||
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/logs')
|
||||
|
@ -132,8 +125,8 @@ class OrgLogs(ApiResource):
|
|||
|
||||
@nickname('listOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@page_support()
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
|
@ -145,8 +138,10 @@ class OrgLogs(ApiResource):
|
|||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
return get_logs(start_time, end_time, namespace_name=orgname, performer_name=performer_name,
|
||||
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
return _get_logs(start_time, end_time,
|
||||
namespace_name=orgname,
|
||||
performer_name=performer_name,
|
||||
page_token=page_token)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
@ -160,16 +155,18 @@ class RepositoryAggregateLogs(RepositoryParamResource):
|
|||
@require_repo_admin
|
||||
@nickname('getAggregateRepoLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
def get(self, namespace, repository, parsed_args):
|
||||
""" Returns the aggregated logs for the specified repository. """
|
||||
if model.repo_exists(namespace, repository) is False:
|
||||
if registry_model.lookup_repository(namespace, repository) is None:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return get_aggregate_logs(start_time, end_time, repository=repository, namespace=namespace)
|
||||
return _get_aggregate_logs(start_time, end_time,
|
||||
repository=repository,
|
||||
namespace=namespace)
|
||||
|
||||
|
||||
@resource('/v1/user/aggregatelogs')
|
||||
|
@ -180,8 +177,8 @@ class UserAggregateLogs(ApiResource):
|
|||
@require_user_admin
|
||||
@nickname('getAggregateUserLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
def get(self, parsed_args):
|
||||
""" Returns the aggregated logs for the current user. """
|
||||
|
@ -190,8 +187,10 @@ class UserAggregateLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
return get_aggregate_logs(start_time, end_time, performer_name=performer_name,
|
||||
namespace=user.username, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
return _get_aggregate_logs(start_time, end_time,
|
||||
performer_name=performer_name,
|
||||
namespace=user.username,
|
||||
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/aggregatelogs')
|
||||
|
@ -203,8 +202,8 @@ class OrgAggregateLogs(ApiResource):
|
|||
|
||||
@nickname('getAggregateOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
def get(self, orgname, parsed_args):
|
||||
|
@ -215,46 +214,13 @@ class OrgAggregateLogs(ApiResource):
|
|||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
return get_aggregate_logs(start_time, end_time, namespace=orgname,
|
||||
performer_name=performer_name, ignore=SERVICE_LEVEL_LOG_KINDS)
|
||||
return _get_aggregate_logs(start_time, end_time,
|
||||
namespace=orgname,
|
||||
performer_name=performer_name)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
def queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
|
||||
export_id = str(uuid.uuid4())
|
||||
namespace = data_model.user.get_namespace_user(namespace_name)
|
||||
if namespace is None:
|
||||
raise InvalidRequest('Unknown namespace')
|
||||
|
||||
repository = None
|
||||
if repository_name is not None:
|
||||
repository = data_model.repository.get_repository(namespace_name, repository_name)
|
||||
if repository is None:
|
||||
raise InvalidRequest('Unknown repository')
|
||||
|
||||
callback_url = options.get('callback_url')
|
||||
if callback_url:
|
||||
if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
|
||||
raise InvalidRequest('Invalid callback URL')
|
||||
|
||||
export_action_logs_queue.put([namespace_name], json.dumps({
|
||||
'export_id': export_id,
|
||||
'repository_id': repository.id if repository else None,
|
||||
'namespace_id': namespace.id,
|
||||
'namespace_name': namespace.username,
|
||||
'repository_name': repository.name if repository else None,
|
||||
'start_time': start_time,
|
||||
'end_time': end_time,
|
||||
'callback_url': callback_url,
|
||||
'callback_email': options.get('callback_email'),
|
||||
}), retries_remaining=3)
|
||||
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
|
||||
EXPORT_LOGS_SCHEMA = {
|
||||
'type': 'object',
|
||||
'description': 'Configuration for an export logs operation',
|
||||
|
@ -271,6 +237,27 @@ EXPORT_LOGS_SCHEMA = {
|
|||
}
|
||||
|
||||
|
||||
def _queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
|
||||
callback_url = options.get('callback_url')
|
||||
if callback_url:
|
||||
if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
|
||||
raise InvalidRequest('Invalid callback URL')
|
||||
|
||||
callback_email = options.get('callback_email')
|
||||
if callback_email:
|
||||
if callback_email.find('@') < 0:
|
||||
raise InvalidRequest('Invalid callback e-mail')
|
||||
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
export_id = logs_model.queue_logs_export(start_time, end_time, export_action_logs_queue,
|
||||
namespace_name, repository_name, callback_url,
|
||||
callback_email)
|
||||
if export_id is None:
|
||||
raise InvalidRequest('Invalid export request')
|
||||
|
||||
return export_id
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/exportlogs')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class ExportRepositoryLogs(RepositoryParamResource):
|
||||
|
@ -282,18 +269,21 @@ class ExportRepositoryLogs(RepositoryParamResource):
|
|||
@require_repo_admin
|
||||
@nickname('exportRepoLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@validate_json_request('ExportLogs')
|
||||
def post(self, namespace, repository, parsed_args):
|
||||
""" Queues an export of the logs for the specified repository. """
|
||||
if model.repo_exists(namespace, repository) is False:
|
||||
if registry_model.lookup_repository(namespace, repository) is None:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return queue_logs_export(start_time, end_time, request.get_json(), namespace,
|
||||
repository_name=repository)
|
||||
export_id = _queue_logs_export(start_time, end_time, request.get_json(), namespace,
|
||||
repository_name=repository)
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/user/exportlogs')
|
||||
|
@ -306,8 +296,8 @@ class ExportUserLogs(ApiResource):
|
|||
@require_user_admin
|
||||
@nickname('exportUserLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@validate_json_request('ExportLogs')
|
||||
def post(self, parsed_args):
|
||||
""" Returns the aggregated logs for the current user. """
|
||||
|
@ -315,7 +305,10 @@ class ExportUserLogs(ApiResource):
|
|||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
return queue_logs_export(start_time, end_time, request.get_json(), user.username)
|
||||
export_id = _queue_logs_export(start_time, end_time, request.get_json(), user.username)
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/exportlogs')
|
||||
|
@ -330,8 +323,8 @@ class ExportOrgLogs(ApiResource):
|
|||
|
||||
@nickname('exportOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@validate_json_request('ExportLogs')
|
||||
def post(self, orgname, parsed_args):
|
||||
|
@ -341,6 +334,9 @@ class ExportOrgLogs(ApiResource):
|
|||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
return queue_logs_export(start_time, end_time, request.get_json(), orgname)
|
||||
export_id = _queue_logs_export(start_time, end_time, request.get_json(), orgname)
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
|
Reference in a new issue