2015-05-14 21:15:39 +00:00
|
|
|
""" Access usage logs for organizations or repositories. """
|
2014-03-14 20:02:13 +00:00
|
|
|
from datetime import datetime, timedelta
|
|
|
|
|
2018-11-27 16:28:32 +00:00
|
|
|
from flask import request
|
|
|
|
|
2019-01-02 19:17:40 +00:00
|
|
|
import features
|
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
from app import app, export_action_logs_queue, avatar
|
|
|
|
from auth.permissions import AdministerOrganizationPermission
|
|
|
|
from auth.auth_context import get_authenticated_user
|
|
|
|
from auth import scopes
|
|
|
|
from data.logs_model import logs_model
|
|
|
|
from data.registry_model import registry_model
|
2014-03-14 20:02:13 +00:00
|
|
|
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
|
2014-03-17 17:10:12 +00:00
|
|
|
RepositoryParamResource, require_repo_admin, related_user_resource,
|
2018-11-27 16:28:32 +00:00
|
|
|
format_date, require_user_admin, path_param, require_scope, page_support,
|
2019-01-02 19:17:40 +00:00
|
|
|
validate_json_request, InvalidRequest, show_if)
|
2016-04-11 20:20:11 +00:00
|
|
|
from endpoints.exception import Unauthorized, NotFound
|
2019-01-08 19:03:28 +00:00
|
|
|
|
2014-03-14 20:02:13 +00:00
|
|
|
|
2015-12-22 14:05:17 +00:00
|
|
|
LOGS_PER_PAGE = 20
|
2016-04-26 19:16:55 +00:00
|
|
|
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
|
|
|
|
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
|
2014-03-14 20:02:13 +00:00
|
|
|
|
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
def _parse_datetime(dt_string):
|
|
|
|
if not dt_string:
|
|
|
|
return None
|
2014-03-14 20:02:13 +00:00
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
try:
|
|
|
|
return datetime.strptime(dt_string + ' UTC', '%m/%d/%Y %Z')
|
|
|
|
except ValueError:
|
|
|
|
return None
|
2014-11-24 21:07:38 +00:00
|
|
|
|
2014-03-14 20:02:13 +00:00
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
def _validate_logs_arguments(start_time, end_time):
|
|
|
|
start_time = _parse_datetime(start_time) or (datetime.today() - timedelta(days=1))
|
|
|
|
end_time = _parse_datetime(end_time) or datetime.today()
|
|
|
|
end_time = end_time + timedelta(days=1)
|
2017-07-18 15:58:42 +00:00
|
|
|
return start_time, end_time
|
2015-07-31 17:38:02 +00:00
|
|
|
|
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
def _get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
|
|
|
|
page_token=None, filter_kinds=None):
|
2017-07-18 15:58:42 +00:00
|
|
|
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
2019-01-08 19:03:28 +00:00
|
|
|
log_entry_page = logs_model.lookup_logs(start_time, end_time, performer_name, repository_name,
|
|
|
|
namespace_name, filter_kinds, page_token,
|
|
|
|
app.config['ACTION_LOG_MAX_PAGE'])
|
2017-07-18 15:58:42 +00:00
|
|
|
include_namespace = namespace_name is None and repository_name is None
|
2014-03-14 20:02:13 +00:00
|
|
|
return {
|
2018-05-14 15:41:49 +00:00
|
|
|
'start_time': format_date(start_time),
|
|
|
|
'end_time': format_date(end_time),
|
2019-01-08 19:03:28 +00:00
|
|
|
'logs': [log.to_dict(avatar, include_namespace) for log in log_entry_page.logs],
|
2018-05-14 15:41:49 +00:00
|
|
|
}, log_entry_page.next_page_token
|
2015-12-22 14:05:17 +00:00
|
|
|
|
2015-07-31 17:38:02 +00:00
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
def _get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
|
|
|
filter_kinds=None):
|
2017-07-18 15:58:42 +00:00
|
|
|
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
2019-01-08 19:03:28 +00:00
|
|
|
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time,
|
|
|
|
performer_name=performer_name,
|
|
|
|
repository_name=repository,
|
|
|
|
namespace_name=namespace,
|
|
|
|
filter_kinds=filter_kinds)
|
2015-07-31 17:38:02 +00:00
|
|
|
|
|
|
|
return {
|
2019-01-08 19:03:28 +00:00
|
|
|
'aggregated': [log.to_dict() for log in aggregated_logs]
|
2014-03-14 20:02:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/logs')
|
2014-08-19 23:05:28 +00:00
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
2014-03-14 20:02:13 +00:00
|
|
|
class RepositoryLogs(RepositoryParamResource):
|
|
|
|
""" Resource for fetching logs for the specific repository. """
|
2017-07-18 15:58:42 +00:00
|
|
|
|
2014-03-14 20:02:13 +00:00
|
|
|
@require_repo_admin
|
|
|
|
@nickname('listRepoLogs')
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2016-01-26 21:27:36 +00:00
|
|
|
@page_support()
|
|
|
|
def get(self, namespace, repository, page_token, parsed_args):
|
2014-03-14 20:02:13 +00:00
|
|
|
""" List the logs for the specified repository. """
|
2019-01-08 19:03:28 +00:00
|
|
|
if registry_model.lookup_repository(namespace, repository) is None:
|
2014-03-17 20:57:35 +00:00
|
|
|
raise NotFound()
|
2014-03-14 20:02:13 +00:00
|
|
|
|
2016-01-26 21:27:36 +00:00
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2019-01-08 19:03:28 +00:00
|
|
|
return _get_logs(start_time, end_time,
|
|
|
|
repository_name=repository,
|
|
|
|
page_token=page_token,
|
|
|
|
namespace_name=namespace)
|
2014-03-14 20:02:13 +00:00
|
|
|
|
|
|
|
|
2014-03-14 21:37:57 +00:00
|
|
|
@resource('/v1/user/logs')
|
|
|
|
class UserLogs(ApiResource):
|
|
|
|
""" Resource for fetching logs for the current user. """
|
2017-07-18 15:58:42 +00:00
|
|
|
|
2014-03-18 23:21:27 +00:00
|
|
|
@require_user_admin
|
2014-03-14 21:37:57 +00:00
|
|
|
@nickname('listUserLogs')
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2014-03-14 21:37:57 +00:00
|
|
|
@query_param('performer', 'Username for which to filter logs.', type=str)
|
2016-01-26 21:27:36 +00:00
|
|
|
@page_support()
|
|
|
|
def get(self, parsed_args, page_token):
|
2014-03-14 21:37:57 +00:00
|
|
|
""" List the logs for the current user. """
|
2016-01-26 21:27:36 +00:00
|
|
|
performer_name = parsed_args['performer']
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2014-03-14 21:37:57 +00:00
|
|
|
|
2014-03-18 19:58:37 +00:00
|
|
|
user = get_authenticated_user()
|
2019-01-08 19:03:28 +00:00
|
|
|
return _get_logs(start_time, end_time,
|
|
|
|
performer_name=performer_name,
|
|
|
|
namespace_name=user.username,
|
|
|
|
page_token=page_token,
|
|
|
|
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
|
2014-03-14 21:37:57 +00:00
|
|
|
|
|
|
|
|
2014-03-14 20:02:13 +00:00
|
|
|
@resource('/v1/organization/<orgname>/logs')
|
2014-08-19 23:05:28 +00:00
|
|
|
@path_param('orgname', 'The name of the organization')
|
2014-03-14 21:35:52 +00:00
|
|
|
@related_user_resource(UserLogs)
|
2014-03-14 20:02:13 +00:00
|
|
|
class OrgLogs(ApiResource):
|
|
|
|
""" Resource for fetching logs for the entire organization. """
|
2017-07-18 15:58:42 +00:00
|
|
|
|
2014-03-14 20:02:13 +00:00
|
|
|
@nickname('listOrgLogs')
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2014-03-14 20:02:13 +00:00
|
|
|
@query_param('performer', 'Username for which to filter logs.', type=str)
|
2016-01-26 21:27:36 +00:00
|
|
|
@page_support()
|
2014-08-19 23:21:41 +00:00
|
|
|
@require_scope(scopes.ORG_ADMIN)
|
2016-01-26 21:27:36 +00:00
|
|
|
def get(self, orgname, page_token, parsed_args):
|
2014-03-14 20:02:13 +00:00
|
|
|
""" List the logs for the specified organization. """
|
|
|
|
permission = AdministerOrganizationPermission(orgname)
|
|
|
|
if permission.can():
|
2016-01-26 21:27:36 +00:00
|
|
|
performer_name = parsed_args['performer']
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2014-03-14 20:02:13 +00:00
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
return _get_logs(start_time, end_time,
|
|
|
|
namespace_name=orgname,
|
|
|
|
performer_name=performer_name,
|
|
|
|
page_token=page_token)
|
2015-07-31 17:38:02 +00:00
|
|
|
|
|
|
|
raise Unauthorized()
|
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/aggregatelogs')
|
2019-01-02 19:17:40 +00:00
|
|
|
@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
|
2015-07-31 17:38:02 +00:00
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
|
|
|
class RepositoryAggregateLogs(RepositoryParamResource):
|
|
|
|
""" Resource for fetching aggregated logs for the specific repository. """
|
2017-07-18 15:58:42 +00:00
|
|
|
|
2015-07-31 17:38:02 +00:00
|
|
|
@require_repo_admin
|
|
|
|
@nickname('getAggregateRepoLogs')
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2016-01-26 21:27:36 +00:00
|
|
|
def get(self, namespace, repository, parsed_args):
|
2015-07-31 17:38:02 +00:00
|
|
|
""" Returns the aggregated logs for the specified repository. """
|
2019-01-08 19:03:28 +00:00
|
|
|
if registry_model.lookup_repository(namespace, repository) is None:
|
2015-07-31 17:38:02 +00:00
|
|
|
raise NotFound()
|
|
|
|
|
2016-01-26 21:27:36 +00:00
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2019-01-08 19:03:28 +00:00
|
|
|
return _get_aggregate_logs(start_time, end_time,
|
|
|
|
repository=repository,
|
|
|
|
namespace=namespace)
|
2015-07-31 17:38:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
@resource('/v1/user/aggregatelogs')
|
2019-01-02 19:17:40 +00:00
|
|
|
@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
|
2015-07-31 17:38:02 +00:00
|
|
|
class UserAggregateLogs(ApiResource):
|
|
|
|
""" Resource for fetching aggregated logs for the current user. """
|
2017-07-18 15:58:42 +00:00
|
|
|
|
2015-07-31 17:38:02 +00:00
|
|
|
@require_user_admin
|
|
|
|
@nickname('getAggregateUserLogs')
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2015-07-31 17:38:02 +00:00
|
|
|
@query_param('performer', 'Username for which to filter logs.', type=str)
|
2016-01-26 21:27:36 +00:00
|
|
|
def get(self, parsed_args):
|
2015-07-31 17:38:02 +00:00
|
|
|
""" Returns the aggregated logs for the current user. """
|
2016-01-26 21:27:36 +00:00
|
|
|
performer_name = parsed_args['performer']
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2015-07-31 17:38:02 +00:00
|
|
|
|
|
|
|
user = get_authenticated_user()
|
2019-01-08 19:03:28 +00:00
|
|
|
return _get_aggregate_logs(start_time, end_time,
|
|
|
|
performer_name=performer_name,
|
|
|
|
namespace=user.username,
|
|
|
|
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
|
2015-07-31 17:38:02 +00:00
|
|
|
|
|
|
|
|
|
|
|
@resource('/v1/organization/<orgname>/aggregatelogs')
|
2019-01-02 19:17:40 +00:00
|
|
|
@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
|
2015-07-31 17:38:02 +00:00
|
|
|
@path_param('orgname', 'The name of the organization')
|
|
|
|
@related_user_resource(UserLogs)
|
|
|
|
class OrgAggregateLogs(ApiResource):
|
|
|
|
""" Resource for fetching aggregate logs for the entire organization. """
|
2017-07-18 15:58:42 +00:00
|
|
|
|
2015-07-31 17:38:02 +00:00
|
|
|
@nickname('getAggregateOrgLogs')
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2015-07-31 17:38:02 +00:00
|
|
|
@query_param('performer', 'Username for which to filter logs.', type=str)
|
|
|
|
@require_scope(scopes.ORG_ADMIN)
|
2016-01-26 21:27:36 +00:00
|
|
|
def get(self, orgname, parsed_args):
|
2015-07-31 17:38:02 +00:00
|
|
|
""" Gets the aggregated logs for the specified organization. """
|
|
|
|
permission = AdministerOrganizationPermission(orgname)
|
|
|
|
if permission.can():
|
2016-01-26 21:27:36 +00:00
|
|
|
performer_name = parsed_args['performer']
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2015-07-31 17:38:02 +00:00
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
return _get_aggregate_logs(start_time, end_time,
|
|
|
|
namespace=orgname,
|
|
|
|
performer_name=performer_name)
|
2014-03-14 20:02:13 +00:00
|
|
|
|
2014-04-10 04:26:55 +00:00
|
|
|
raise Unauthorized()
|
2018-11-27 16:28:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
EXPORT_LOGS_SCHEMA = {
|
|
|
|
'type': 'object',
|
|
|
|
'description': 'Configuration for an export logs operation',
|
|
|
|
'properties': {
|
|
|
|
'callback_url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'The callback URL to invoke with a link to the exported logs',
|
|
|
|
},
|
|
|
|
'callback_email': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'The e-mail address at which to e-mail a link to the exported logs',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
def _queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
|
|
|
|
callback_url = options.get('callback_url')
|
|
|
|
if callback_url:
|
|
|
|
if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
|
|
|
|
raise InvalidRequest('Invalid callback URL')
|
|
|
|
|
|
|
|
callback_email = options.get('callback_email')
|
|
|
|
if callback_email:
|
|
|
|
if callback_email.find('@') < 0:
|
|
|
|
raise InvalidRequest('Invalid callback e-mail')
|
|
|
|
|
|
|
|
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
|
|
|
export_id = logs_model.queue_logs_export(start_time, end_time, export_action_logs_queue,
|
|
|
|
namespace_name, repository_name, callback_url,
|
|
|
|
callback_email)
|
|
|
|
if export_id is None:
|
|
|
|
raise InvalidRequest('Invalid export request')
|
|
|
|
|
|
|
|
return export_id
|
|
|
|
|
|
|
|
|
2018-11-27 16:28:32 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/exportlogs')
|
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
|
|
|
class ExportRepositoryLogs(RepositoryParamResource):
|
|
|
|
""" Resource for exporting the logs for the specific repository. """
|
|
|
|
schemas = {
|
|
|
|
'ExportLogs': EXPORT_LOGS_SCHEMA
|
|
|
|
}
|
|
|
|
|
|
|
|
@require_repo_admin
|
|
|
|
@nickname('exportRepoLogs')
|
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2018-11-27 16:28:32 +00:00
|
|
|
@validate_json_request('ExportLogs')
|
|
|
|
def post(self, namespace, repository, parsed_args):
|
|
|
|
""" Queues an export of the logs for the specified repository. """
|
2019-01-08 19:03:28 +00:00
|
|
|
if registry_model.lookup_repository(namespace, repository) is None:
|
2018-11-27 16:28:32 +00:00
|
|
|
raise NotFound()
|
|
|
|
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
2019-01-08 19:03:28 +00:00
|
|
|
export_id = _queue_logs_export(start_time, end_time, request.get_json(), namespace,
|
|
|
|
repository_name=repository)
|
|
|
|
return {
|
|
|
|
'export_id': export_id,
|
|
|
|
}
|
2018-11-27 16:28:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
@resource('/v1/user/exportlogs')
|
|
|
|
class ExportUserLogs(ApiResource):
|
|
|
|
""" Resource for exporting the logs for the current user repository. """
|
|
|
|
schemas = {
|
|
|
|
'ExportLogs': EXPORT_LOGS_SCHEMA
|
|
|
|
}
|
|
|
|
|
|
|
|
@require_user_admin
|
|
|
|
@nickname('exportUserLogs')
|
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2018-11-27 16:28:32 +00:00
|
|
|
@validate_json_request('ExportLogs')
|
|
|
|
def post(self, parsed_args):
|
|
|
|
""" Returns the aggregated logs for the current user. """
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
|
|
|
|
|
|
|
user = get_authenticated_user()
|
2019-01-08 19:03:28 +00:00
|
|
|
export_id = _queue_logs_export(start_time, end_time, request.get_json(), user.username)
|
|
|
|
return {
|
|
|
|
'export_id': export_id,
|
|
|
|
}
|
2018-11-27 16:28:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
@resource('/v1/organization/<orgname>/exportlogs')
|
2019-01-02 19:17:40 +00:00
|
|
|
@show_if(features.LOG_EXPORT)
|
2018-11-27 16:28:32 +00:00
|
|
|
@path_param('orgname', 'The name of the organization')
|
|
|
|
@related_user_resource(ExportUserLogs)
|
|
|
|
class ExportOrgLogs(ApiResource):
|
|
|
|
""" Resource for exporting the logs for an entire organization. """
|
|
|
|
schemas = {
|
|
|
|
'ExportLogs': EXPORT_LOGS_SCHEMA
|
|
|
|
}
|
|
|
|
|
|
|
|
@nickname('exportOrgLogs')
|
|
|
|
@parse_args()
|
2019-01-08 19:03:28 +00:00
|
|
|
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
|
|
|
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
2018-11-27 16:28:32 +00:00
|
|
|
@require_scope(scopes.ORG_ADMIN)
|
|
|
|
@validate_json_request('ExportLogs')
|
|
|
|
def post(self, orgname, parsed_args):
|
2019-01-02 19:17:40 +00:00
|
|
|
""" Exports the logs for the specified organization. """
|
2018-11-27 16:28:32 +00:00
|
|
|
permission = AdministerOrganizationPermission(orgname)
|
|
|
|
if permission.can():
|
|
|
|
start_time = parsed_args['starttime']
|
|
|
|
end_time = parsed_args['endtime']
|
|
|
|
|
2019-01-08 19:03:28 +00:00
|
|
|
export_id = _queue_logs_export(start_time, end_time, request.get_json(), orgname)
|
|
|
|
return {
|
|
|
|
'export_id': export_id,
|
|
|
|
}
|
2018-11-27 16:28:32 +00:00
|
|
|
|
|
|
|
raise Unauthorized()
|