refactor(endpoints/api/logs*): Refactor to new data model

moving things away from peewee data model to data model interface

[TESTING->locally with docker compose]

Issue: https://coreosdev.atlassian.net/browse/QUAY-628

- [ ] It works!
- [ ] Comments provide sufficient explanations for the next contributor
- [ ] Tests cover changes and corner cases
- [ ] Follows Quay syntax patterns and format
This commit is contained in:
Charlton Austin 2017-07-18 11:58:42 -04:00
parent b0aeb97198
commit a7c818543d
5 changed files with 522 additions and 98 deletions

View file

@ -1,15 +1,17 @@
""" Superuser API. """
import json
import logging
import os
import string
import pathvalidate
from datetime import datetime
from datetime import datetime, timedelta
from random import SystemRandom
from dateutil.relativedelta import relativedelta
from flask import request, make_response, jsonify
from tzlocal import get_localzone
import features
@ -22,10 +24,9 @@ from data.buildlogs import BuildStatusRetrievalError
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
internal_only, require_scope, show_if, parse_args,
query_param, abort, require_fresh_login, path_param, verify_not_prod,
page_support, log_action, InvalidRequest)
page_support, log_action, InvalidRequest, format_date)
from endpoints.api.build import build_status_view, get_logs_or_log_url
from endpoints.api.logs import get_logs, get_aggregate_logs
from data import model
from data import model, database
from data.database import ServiceKeyApprovalType
from endpoints.exception import NotFound
from util.useremails import send_confirmation_email, send_recovery_email
@ -34,10 +35,36 @@ from util.security.ssl import load_certificate, CertInvalidException
from util.config.validator import EXTRA_CA_DIRECTORY
from _init import ROOT_DIR
logger = logging.getLogger(__name__)
def _validate_logs_arguments(start_time, end_time, performer_name):
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
if start_time:
try:
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
except ValueError:
start_time = None
if not start_time:
start_time = datetime.today() - timedelta(7) # One week
if end_time:
try:
end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
end_time = end_time + timedelta(days=1)
except ValueError:
end_time = None
if not end_time:
end_time = datetime.today()
return start_time, end_time, performer
def get_immediate_subdirectories(directory):
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
@ -53,6 +80,7 @@ def get_services():
@show_if(features.SUPER_USERS)
class SuperUserGetLogsForService(ApiResource):
""" Resource for fetching the kinds of system logs in the system. """
@require_fresh_login
@verify_not_prod
@nickname('getSystemLogs')
@ -84,6 +112,7 @@ class SuperUserGetLogsForService(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserSystemLogServices(ApiResource):
""" Resource for fetching the kinds of system logs in the system. """
@require_fresh_login
@verify_not_prod
@nickname('listSystemLogServices')
@ -98,10 +127,42 @@ class SuperUserSystemLogServices(ApiResource):
abort(403)
def aggregated_log_view(log, kinds, start_time):
# Because we aggregate based on the day of the month in SQL, we only have that information.
# Therefore, create a synthetic date based on the day and the month of the start time.
# Logs are allowed for a maximum period of one week, so this calculation should always work.
synthetic_date = datetime(start_time.year, start_time.month, int(log.day), tzinfo=get_localzone())
if synthetic_date.day < start_time.day:
synthetic_date = synthetic_date + relativedelta(months=1)
view = {
'kind': kinds[log.kind_id],
'count': log.count,
'datetime': format_date(synthetic_date),
}
return view
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
ignore=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
kinds = model.log.get_log_entry_kinds()
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repository, namespace=namespace,
ignore=ignore)
return {
'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs]
}
@resource('/v1/superuser/aggregatelogs')
@internal_only
class SuperUserAggregateLogs(ApiResource):
""" Resource for fetching aggregated logs for the current user. """
@require_fresh_login
@verify_not_prod
@nickname('listAllAggregateLogs')
@ -119,11 +180,68 @@ class SuperUserAggregateLogs(ApiResource):
abort(403)
LOGS_PER_PAGE = 20
def log_view(log, kinds, include_namespace):
view = {
'kind': kinds[log.kind_id],
'metadata': json.loads(log.metadata_json),
'ip': log.ip,
'datetime': format_date(log.datetime),
}
if log.performer and log.performer.username:
view['performer'] = {
'kind': 'user',
'name': log.performer.username,
'is_robot': log.performer.robot,
'avatar': avatar.get_data_for_user(log.performer),
}
if include_namespace:
if log.account and log.account.username:
if log.account.organization:
view['namespace'] = {
'kind': 'org',
'name': log.account.username,
'avatar': avatar.get_data_for_org(log.account),
}
else:
view['namespace'] = {
'kind': 'user',
'name': log.account.username,
'avatar': avatar.get_data_for_user(log.account),
}
return view
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
page_token=None, ignore=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
kinds = model.log.get_log_entry_kinds()
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repository, namespace=namespace,
ignore=ignore)
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
page_token=page_token, limit=LOGS_PER_PAGE)
include_namespace = namespace is None and repository is None
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log_view(log, kinds, include_namespace) for log in logs],
}, next_page_token
@resource('/v1/superuser/logs')
@internal_only
@show_if(features.SUPER_USERS)
class SuperUserLogs(ApiResource):
""" Resource for fetching all logs in the system. """
@require_fresh_login
@verify_not_prod
@nickname('listAllLogs')
@ -151,6 +269,7 @@ def org_view(org):
'avatar': avatar.get_data_for_org(org),
}
def user_view(user, password=None):
user_data = {
'kind': 'user',
@ -168,11 +287,13 @@ def user_view(user, password=None):
return user_data
@resource('/v1/superuser/changelog/')
@internal_only
@show_if(features.SUPER_USERS)
class ChangeLog(ApiResource):
""" Resource for returning the change log for enterprise customers. """
@require_fresh_login
@verify_not_prod
@nickname('getChangeLog')
@ -188,12 +309,12 @@ class ChangeLog(ApiResource):
abort(403)
@resource('/v1/superuser/organizations/')
@internal_only
@show_if(features.SUPER_USERS)
class SuperUserOrganizationList(ApiResource):
""" Resource for listing organizations in the system. """
@require_fresh_login
@verify_not_prod
@nickname('listAllOrganizations')
@ -247,7 +368,6 @@ class SuperUserList(ApiResource):
abort(403)
@require_fresh_login
@verify_not_prod
@nickname('createInstallUser')
@ -293,6 +413,7 @@ class SuperUserList(ApiResource):
@show_if(features.MAILING)
class SuperUserSendRecoveryEmail(ApiResource):
""" Resource for sending a recovery user on behalf of a user. """
@require_fresh_login
@verify_not_prod
@nickname('sendInstallUserRecoveryEmail')
@ -439,6 +560,7 @@ class SuperUserManagement(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserTakeOwnership(ApiResource):
""" Resource for a superuser to take ownership of a namespace. """
@require_fresh_login
@verify_not_prod
@nickname('takeOwnership')
@ -745,7 +867,6 @@ class SuperUserServiceKey(ApiResource):
log_action('service_key_extend', None, key_log_metadata)
model.service_keys.set_key_expiration(kid, expiration_date)
if 'name' in body or 'metadata' in body:
model.service_keys.update_service_key(kid, body.get('name'), body.get('metadata'))
log_action('service_key_modify', None, key_log_metadata)
@ -837,6 +958,7 @@ class SuperUserServiceKeyApproval(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserCustomCertificates(ApiResource):
""" Resource for managing custom certificates. """
@nickname('getCustomCertificates')
@require_fresh_login
@require_scope(scopes.SUPERUSER)
@ -885,6 +1007,7 @@ class SuperUserCustomCertificates(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserCustomCertificate(ApiResource):
""" Resource for managing a custom certificate. """
@nickname('uploadCustomCertificate')
@require_fresh_login
@require_scope(scopes.SUPERUSER)
@ -901,7 +1024,7 @@ class SuperUserCustomCertificate(ApiResource):
abort(400)
logger.debug('Saving custom certificate %s', certpath)
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
config_provider.save_volume_file(cert_full_path, uploaded_file)
logger.debug('Saved custom certificate %s', certpath)
@ -1018,6 +1141,7 @@ class SuperUserLicense(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildLogs(ApiResource):
""" Resource for loading repository build logs for the superuser. """
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildLogsSuperUser')
@ -1036,6 +1160,7 @@ class SuperUserRepositoryBuildLogs(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildStatus(ApiResource):
""" Resource for dealing with repository build status. """
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildStatusSuperUser')
@ -1054,6 +1179,7 @@ class SuperUserRepositoryBuildStatus(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildResource(ApiResource):
""" Resource for dealing with repository builds as a super user. """
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildSuperUser')