Implement a worker for batch exporting of usage logs

This will allow customers to request their usage logs for a repository or an entire namespace, and we can export the logs in a manner that doesn't absolutely destroy the database, with every step along the way timed.
This commit is contained in:
Joseph Schorr 2018-11-27 18:28:32 +02:00
parent b8d2e1be9c
commit 8a212728a3
18 changed files with 768 additions and 15 deletions

View file

@ -12,7 +12,7 @@ from flask_login import current_user
import features
from app import (app, billing as stripe, build_logs, avatar, signer, log_archive, config_provider,
get_app_url, instance_keys, user_analytics)
get_app_url, instance_keys, user_analytics, storage)
from auth import scopes
from auth.auth_context import get_authenticated_user
from auth.basic import has_basic_auth
@ -372,6 +372,33 @@ def buildlogs(build_uuid):
return response
@web.route('/exportedlogs/<file_id>', methods=['GET'])
def exportedlogs(file_id):
# Only enable this endpoint if local storage is available.
has_local_storage = False
for storage_type, _ in app.config.get('DISTRIBUTED_STORAGE_CONFIG', {}).values():
if storage_type == 'LocalStorage':
has_local_storage = True
break
if not has_local_storage:
abort(404)
JSON_MIMETYPE = 'application/json'
exported_logs_storage_path = app.config.get('EXPORT_ACTION_LOGS_STORAGE_PATH',
'exportedactionlogs')
export_storage_path = os.path.join(exported_logs_storage_path, file_id)
if not storage.exists(storage.preferred_locations, export_storage_path):
abort(404)
try:
return send_file(storage.stream_read_file(storage.preferred_locations, export_storage_path),
mimetype=JSON_MIMETYPE)
except IOError:
logger.exception('Could not read exported logs')
abort(403)
@web.route('/logarchive/<file_id>', methods=['GET'])
@route_show_if(features.BUILD_SUPPORT)
@process_auth_or_cookie
@ -401,6 +428,7 @@ def logarchive(file_id):
logger.exception('Could not read archived logs')
abort(403)
@web.route('/receipt', methods=['GET'])
@route_show_if(features.BILLING)
@require_session_login