Merge pull request #2800 from charltonaustin/create_data_interface_for_subsystem_api/logs_628

refactor(endpoints/api/logs*): Refactor to new data model
This commit is contained in:
Charlton Austin 2017-07-19 15:51:57 -04:00 committed by GitHub
commit ae30a40921
5 changed files with 522 additions and 98 deletions

View file

@ -1,79 +1,22 @@
""" Access usage logs for organizations or repositories. """
import json
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
RepositoryParamResource, require_repo_admin, related_user_resource,
format_date, require_user_admin, path_param, require_scope, page_support)
from endpoints.api.logs_models_pre_oci import pre_oci_model as model
from endpoints.exception import Unauthorized, NotFound
from auth.permissions import AdministerOrganizationPermission
from auth.auth_context import get_authenticated_user
from data import model, database
from auth import scopes
from app import avatar
from tzlocal import get_localzone
LOGS_PER_PAGE = 20
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
def log_view(log, kinds, include_namespace):
view = {
'kind': kinds[log.kind_id],
'metadata': json.loads(log.metadata_json),
'ip': log.ip,
'datetime': format_date(log.datetime),
}
if log.performer and log.performer.username:
view['performer'] = {
'kind': 'user',
'name': log.performer.username,
'is_robot': log.performer.robot,
'avatar': avatar.get_data_for_user(log.performer),
}
if include_namespace:
if log.account and log.account.username:
if log.account.organization:
view['namespace'] = {
'kind': 'org',
'name': log.account.username,
'avatar': avatar.get_data_for_org(log.account),
}
else:
view['namespace'] = {
'kind': 'user',
'name': log.account.username,
'avatar': avatar.get_data_for_user(log.account),
}
return view
def aggregated_log_view(log, kinds, start_time):
# Because we aggregate based on the day of the month in SQL, we only have that information.
# Therefore, create a synthetic date based on the day and the month of the start time.
# Logs are allowed for a maximum period of one week, so this calculation should always work.
synthetic_date = datetime(start_time.year, start_time.month, int(log.day), tzinfo=get_localzone())
if synthetic_date.day < start_time.day:
synthetic_date = synthetic_date + relativedelta(months=1)
view = {
'kind': kinds[log.kind_id],
'count': log.count,
'datetime': format_date(synthetic_date),
}
return view
def _validate_logs_arguments(start_time, end_time, performer_name):
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
def _validate_logs_arguments(start_time, end_time):
if start_time:
try:
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
@ -81,7 +24,7 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
start_time = None
if not start_time:
start_time = datetime.today() - timedelta(7) # One week
start_time = datetime.today() - timedelta(7) # One week
if end_time:
try:
@ -93,39 +36,37 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
if not end_time:
end_time = datetime.today()
return (start_time, end_time, performer)
return start_time, end_time
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
def get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
page_token=None, ignore=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
kinds = model.log.get_log_entry_kinds()
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repository, namespace=namespace,
ignore=ignore)
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
page_token=page_token, limit=LOGS_PER_PAGE)
kinds = model.get_log_entry_kinds()
log_entry_page = model.get_logs_query(start_time, end_time, performer_name, repository_name,
namespace_name, ignore, page_token)
include_namespace = namespace_name is None and repository_name is None
include_namespace = namespace is None and repository is None
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log_view(log, kinds, include_namespace) for log in logs],
}, next_page_token
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log.to_dict(kinds, include_namespace) for log in log_entry_page.logs],
}, log_entry_page.next_page_token
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
ignore=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
kinds = model.log.get_log_entry_kinds()
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repository, namespace=namespace,
ignore=ignore)
kinds = model.get_log_entry_kinds()
aggregated_logs = model.get_aggregated_logs(start_time, end_time, performer_name=performer_name,
repository_name=repository, namespace_name=namespace,
ignore=ignore)
return {
'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs]
'aggregated': [log.to_dict(kinds, start_time) for log in aggregated_logs]
}
@ -133,6 +74,7 @@ def get_aggregate_logs(start_time, end_time, performer_name=None, repository=Non
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryLogs(RepositoryParamResource):
""" Resource for fetching logs for the specific repository. """
@require_repo_admin
@nickname('listRepoLogs')
@parse_args()
@ -142,19 +84,19 @@ class RepositoryLogs(RepositoryParamResource):
@page_support()
def get(self, namespace, repository, page_token, parsed_args):
""" List the logs for the specified repository. """
repo = model.repository.get_repository(namespace, repository)
if not repo:
if model.repo_exists(namespace, repository) is False:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_logs(start_time, end_time, repository=repo, page_token=page_token,
return get_logs(start_time, end_time, repository_name=repository, page_token=page_token, namespace_name=namespace,
ignore=SERVICE_LEVEL_LOG_KINDS)
@resource('/v1/user/logs')
class UserLogs(ApiResource):
""" Resource for fetching logs for the current user. """
@require_user_admin
@nickname('listUserLogs')
@parse_args()
@ -169,7 +111,7 @@ class UserLogs(ApiResource):
end_time = parsed_args['endtime']
user = get_authenticated_user()
return get_logs(start_time, end_time, performer_name=performer_name, namespace=user.username,
return get_logs(start_time, end_time, performer_name=performer_name, namespace_name=user.username,
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
@ -178,6 +120,7 @@ class UserLogs(ApiResource):
@related_user_resource(UserLogs)
class OrgLogs(ApiResource):
""" Resource for fetching logs for the entire organization. """
@nickname('listOrgLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
@ -194,7 +137,7 @@ class OrgLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_logs(start_time, end_time, namespace=orgname, performer_name=performer_name,
return get_logs(start_time, end_time, namespace_name=orgname, performer_name=performer_name,
page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
raise Unauthorized()
@ -204,6 +147,7 @@ class OrgLogs(ApiResource):
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryAggregateLogs(RepositoryParamResource):
""" Resource for fetching aggregated logs for the specific repository. """
@require_repo_admin
@nickname('getAggregateRepoLogs')
@parse_args()
@ -211,19 +155,19 @@ class RepositoryAggregateLogs(RepositoryParamResource):
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
def get(self, namespace, repository, parsed_args):
""" Returns the aggregated logs for the specified repository. """
repo = model.repository.get_repository(namespace, repository)
if not repo:
if model.repo_exists(namespace, repository) is False:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
return get_aggregate_logs(start_time, end_time, repository=repo,
return get_aggregate_logs(start_time, end_time, repository=repository, namespace=namespace,
ignore=SERVICE_LEVEL_LOG_KINDS)
@resource('/v1/user/aggregatelogs')
class UserAggregateLogs(ApiResource):
""" Resource for fetching aggregated logs for the current user. """
@require_user_admin
@nickname('getAggregateUserLogs')
@parse_args()
@ -246,6 +190,7 @@ class UserAggregateLogs(ApiResource):
@related_user_resource(UserLogs)
class OrgAggregateLogs(ApiResource):
""" Resource for fetching aggregate logs for the entire organization. """
@nickname('getAggregateOrgLogs')
@parse_args()
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)

View file

@ -0,0 +1,141 @@
import json
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from datetime import datetime
from dateutil.relativedelta import relativedelta
from six import add_metaclass
from tzlocal import get_localzone
from app import avatar
from endpoints.api import format_date
from util.morecollections import AttrDict
class LogEntry(
namedtuple('LogEntry', [
'metadata_json', 'ip', 'datetime', 'performer_email', 'performer_username', 'performer_robot',
'account_organization', 'account_username', 'account_email', 'account_robot', 'kind_id'
])):
"""
LogEntry a single log entry.
:type metadata_json: string
:type ip: string
:type datetime: string
:type performer_email: int
:type performer_username: string
:type performer_robot: boolean
:type account_organization: boolean
:type account_username: string
:type account_email: string
:type account_robot: boolean
:type kind_id: int
"""
def to_dict(self, kinds, include_namespace):
view = {
'kind': kinds[self.kind_id],
'metadata': json.loads(self.metadata_json),
'ip': self.ip,
'datetime': format_date(self.datetime),
}
if self.performer_username:
performer = AttrDict({'username': self.performer_username, 'email': self.performer_email})
performer.robot = None
if self.performer_robot:
performer.robot = self.performer_robot
view['performer'] = {
'kind': 'user',
'name': self.performer_username,
'is_robot': self.performer_robot,
'avatar': avatar.get_data_for_user(performer),
}
if include_namespace:
if self.account_username:
account = AttrDict({'username': self.account_username, 'email': self.account_email})
if self.account_organization:
view['namespace'] = {
'kind': 'org',
'name': self.account_username,
'avatar': avatar.get_data_for_org(account),
}
else:
account.robot = None
if self.account_robot:
account.robot = self.account_robot
view['namespace'] = {
'kind': 'user',
'name': self.account_username,
'avatar': avatar.get_data_for_user(account),
}
return view
class LogEntryPage(
namedtuple('LogEntryPage', ['logs', 'next_page_token'])):
"""
LogEntryPage represents a single page of logs.
:type logs: [LogEntry]
:type next_page_token: {any -> any}
"""
class AggregatedLogEntry(
namedtuple('AggregatedLogEntry', ['count', 'kind_id', 'day'])):
"""
AggregatedLogEntry represents an aggregated view of logs.
:type count: int
:type kind_id: int
:type day: string
"""
def to_dict(self, kinds, start_time):
synthetic_date = datetime(start_time.year, start_time.month, int(self.day), tzinfo=get_localzone())
if synthetic_date.day < start_time.day:
synthetic_date = synthetic_date + relativedelta(months=1)
view = {
'kind': kinds[self.kind_id],
'count': self.count,
'datetime': format_date(synthetic_date),
}
return view
@add_metaclass(ABCMeta)
class LogEntryDataInterface(object):
"""
Interface that represents all data store interactions required by a Log.
"""
@abstractmethod
def get_logs_query(self, start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
ignore=None, page_token=None):
"""
Returns a LogEntryPage.
"""
@abstractmethod
def get_log_entry_kinds(self):
"""
Returns a map of LogEntryKind id -> name and name -> id
"""
@abstractmethod
def repo_exists(self, namespace_name, repository_name):
"""
Returns whether or not a repo exists.
"""
@abstractmethod
def get_aggregated_logs(self, start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
ignore=None):
"""
Returns a list of aggregated logs
"""

View file

@ -0,0 +1,84 @@
from data import model, database
from endpoints.api.logs_models_interface import LogEntryDataInterface, LogEntryPage, LogEntry, AggregatedLogEntry
def _create_log(log):
account_organization = None
account_username = None
account_email = None
account_robot = None
try:
account_organization = log.account.organization
account_username = log.account.username
account_email = log.account.email
account_robot = log.account.robot
except AttributeError:
pass
performer_robot = None
performer_username = None
performer_email = None
try:
performer_robot = log.performer.robot
performer_username = log.performer.username
performer_email = log.performer.email
except AttributeError:
pass
return LogEntry(log.metadata_json, log.ip, log.datetime, performer_email, performer_username,
performer_robot, account_organization, account_username,
account_email, account_robot, log.kind_id)
class PreOCIModel(LogEntryDataInterface):
"""
PreOCIModel implements the data model for the Tags using a database schema
before it was changed to support the OCI specification.
"""
def get_logs_query(self, start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
ignore=None, page_token=None):
repo = None
if repository_name and namespace_name:
repo = model.repository.get_repository(namespace_name, repository_name)
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore)
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
page_token=page_token, limit=20)
return LogEntryPage([_create_log(log) for log in logs], next_page_token)
def get_log_entry_kinds(self):
return model.log.get_log_entry_kinds()
def repo_exists(self, namespace_name, repository_name):
repo = model.repository.get_repository(namespace_name, repository_name)
if repo is None:
return False
return True
def get_aggregated_logs(self, start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
ignore=None):
repo = None
if repository_name and namespace_name:
repo = model.repository.get_repository(namespace_name, repository_name)
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repo, namespace=namespace_name,
ignore=ignore)
return [AggregatedLogEntry(log.count, log.kind_id, log.day) for log in aggregated_logs]
pre_oci_model = PreOCIModel()

View file

@ -1,15 +1,17 @@
""" Superuser API. """
import json
import logging
import os
import string
import pathvalidate
from datetime import datetime
from datetime import datetime, timedelta
from random import SystemRandom
from dateutil.relativedelta import relativedelta
from flask import request, make_response, jsonify
from tzlocal import get_localzone
import features
@ -22,10 +24,9 @@ from data.buildlogs import BuildStatusRetrievalError
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
internal_only, require_scope, show_if, parse_args,
query_param, abort, require_fresh_login, path_param, verify_not_prod,
page_support, log_action, InvalidRequest)
page_support, log_action, InvalidRequest, format_date)
from endpoints.api.build import build_status_view, get_logs_or_log_url
from endpoints.api.logs import get_logs, get_aggregate_logs
from data import model
from data import model, database
from data.database import ServiceKeyApprovalType
from endpoints.exception import NotFound
from util.useremails import send_confirmation_email, send_recovery_email
@ -34,10 +35,36 @@ from util.security.ssl import load_certificate, CertInvalidException
from util.config.validator import EXTRA_CA_DIRECTORY
from _init import ROOT_DIR
logger = logging.getLogger(__name__)
def _validate_logs_arguments(start_time, end_time, performer_name):
performer = None
if performer_name:
performer = model.user.get_user(performer_name)
if start_time:
try:
start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
except ValueError:
start_time = None
if not start_time:
start_time = datetime.today() - timedelta(7) # One week
if end_time:
try:
end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
end_time = end_time + timedelta(days=1)
except ValueError:
end_time = None
if not end_time:
end_time = datetime.today()
return start_time, end_time, performer
def get_immediate_subdirectories(directory):
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
@ -53,6 +80,7 @@ def get_services():
@show_if(features.SUPER_USERS)
class SuperUserGetLogsForService(ApiResource):
""" Resource for fetching the kinds of system logs in the system. """
@require_fresh_login
@verify_not_prod
@nickname('getSystemLogs')
@ -84,6 +112,7 @@ class SuperUserGetLogsForService(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserSystemLogServices(ApiResource):
""" Resource for fetching the kinds of system logs in the system. """
@require_fresh_login
@verify_not_prod
@nickname('listSystemLogServices')
@ -98,10 +127,42 @@ class SuperUserSystemLogServices(ApiResource):
abort(403)
def aggregated_log_view(log, kinds, start_time):
# Because we aggregate based on the day of the month in SQL, we only have that information.
# Therefore, create a synthetic date based on the day and the month of the start time.
# Logs are allowed for a maximum period of one week, so this calculation should always work.
synthetic_date = datetime(start_time.year, start_time.month, int(log.day), tzinfo=get_localzone())
if synthetic_date.day < start_time.day:
synthetic_date = synthetic_date + relativedelta(months=1)
view = {
'kind': kinds[log.kind_id],
'count': log.count,
'datetime': format_date(synthetic_date),
}
return view
def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
ignore=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
kinds = model.log.get_log_entry_kinds()
aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
repository=repository, namespace=namespace,
ignore=ignore)
return {
'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs]
}
@resource('/v1/superuser/aggregatelogs')
@internal_only
class SuperUserAggregateLogs(ApiResource):
""" Resource for fetching aggregated logs for the current user. """
@require_fresh_login
@verify_not_prod
@nickname('listAllAggregateLogs')
@ -119,11 +180,68 @@ class SuperUserAggregateLogs(ApiResource):
abort(403)
LOGS_PER_PAGE = 20
def log_view(log, kinds, include_namespace):
view = {
'kind': kinds[log.kind_id],
'metadata': json.loads(log.metadata_json),
'ip': log.ip,
'datetime': format_date(log.datetime),
}
if log.performer and log.performer.username:
view['performer'] = {
'kind': 'user',
'name': log.performer.username,
'is_robot': log.performer.robot,
'avatar': avatar.get_data_for_user(log.performer),
}
if include_namespace:
if log.account and log.account.username:
if log.account.organization:
view['namespace'] = {
'kind': 'org',
'name': log.account.username,
'avatar': avatar.get_data_for_org(log.account),
}
else:
view['namespace'] = {
'kind': 'user',
'name': log.account.username,
'avatar': avatar.get_data_for_user(log.account),
}
return view
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
page_token=None, ignore=None):
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
kinds = model.log.get_log_entry_kinds()
logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
repository=repository, namespace=namespace,
ignore=ignore)
logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
page_token=page_token, limit=LOGS_PER_PAGE)
include_namespace = namespace is None and repository is None
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
'logs': [log_view(log, kinds, include_namespace) for log in logs],
}, next_page_token
@resource('/v1/superuser/logs')
@internal_only
@show_if(features.SUPER_USERS)
class SuperUserLogs(ApiResource):
""" Resource for fetching all logs in the system. """
@require_fresh_login
@verify_not_prod
@nickname('listAllLogs')
@ -151,6 +269,7 @@ def org_view(org):
'avatar': avatar.get_data_for_org(org),
}
def user_view(user, password=None):
user_data = {
'kind': 'user',
@ -168,11 +287,13 @@ def user_view(user, password=None):
return user_data
@resource('/v1/superuser/changelog/')
@internal_only
@show_if(features.SUPER_USERS)
class ChangeLog(ApiResource):
""" Resource for returning the change log for enterprise customers. """
@require_fresh_login
@verify_not_prod
@nickname('getChangeLog')
@ -188,12 +309,12 @@ class ChangeLog(ApiResource):
abort(403)
@resource('/v1/superuser/organizations/')
@internal_only
@show_if(features.SUPER_USERS)
class SuperUserOrganizationList(ApiResource):
""" Resource for listing organizations in the system. """
@require_fresh_login
@verify_not_prod
@nickname('listAllOrganizations')
@ -247,7 +368,6 @@ class SuperUserList(ApiResource):
abort(403)
@require_fresh_login
@verify_not_prod
@nickname('createInstallUser')
@ -293,6 +413,7 @@ class SuperUserList(ApiResource):
@show_if(features.MAILING)
class SuperUserSendRecoveryEmail(ApiResource):
""" Resource for sending a recovery user on behalf of a user. """
@require_fresh_login
@verify_not_prod
@nickname('sendInstallUserRecoveryEmail')
@ -439,6 +560,7 @@ class SuperUserManagement(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserTakeOwnership(ApiResource):
""" Resource for a superuser to take ownership of a namespace. """
@require_fresh_login
@verify_not_prod
@nickname('takeOwnership')
@ -745,7 +867,6 @@ class SuperUserServiceKey(ApiResource):
log_action('service_key_extend', None, key_log_metadata)
model.service_keys.set_key_expiration(kid, expiration_date)
if 'name' in body or 'metadata' in body:
model.service_keys.update_service_key(kid, body.get('name'), body.get('metadata'))
log_action('service_key_modify', None, key_log_metadata)
@ -837,6 +958,7 @@ class SuperUserServiceKeyApproval(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserCustomCertificates(ApiResource):
""" Resource for managing custom certificates. """
@nickname('getCustomCertificates')
@require_fresh_login
@require_scope(scopes.SUPERUSER)
@ -885,6 +1007,7 @@ class SuperUserCustomCertificates(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserCustomCertificate(ApiResource):
""" Resource for managing a custom certificate. """
@nickname('uploadCustomCertificate')
@require_fresh_login
@require_scope(scopes.SUPERUSER)
@ -901,7 +1024,7 @@ class SuperUserCustomCertificate(ApiResource):
abort(400)
logger.debug('Saving custom certificate %s', certpath)
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
config_provider.save_volume_file(cert_full_path, uploaded_file)
logger.debug('Saved custom certificate %s', certpath)
@ -1018,6 +1141,7 @@ class SuperUserLicense(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildLogs(ApiResource):
""" Resource for loading repository build logs for the superuser. """
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildLogsSuperUser')
@ -1036,6 +1160,7 @@ class SuperUserRepositoryBuildLogs(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildStatus(ApiResource):
""" Resource for dealing with repository build status. """
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildStatusSuperUser')
@ -1054,6 +1179,7 @@ class SuperUserRepositoryBuildStatus(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildResource(ApiResource):
""" Resource for dealing with repository builds as a super user. """
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildSuperUser')

View file

@ -0,0 +1,128 @@
import pytest
from mock import Mock
from data import model, database
from endpoints.api.logs_models_interface import LogEntry, LogEntryPage, AggregatedLogEntry
from endpoints.api.logs_models_pre_oci import pre_oci_model
from util.morecollections import AttrDict
def test_get_logs_query(monkeypatch):
get_repository_mock = Mock()
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
get_user_mock = Mock()
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
get_logs_query_mock = Mock()
monkeypatch.setattr(model.log, 'get_logs_query', get_logs_query_mock)
paginate_mock = Mock()
paginate_mock.return_value = ([], {})
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
assert pre_oci_model.get_logs_query('start_time', 'end_time', 'preformer_namne', 'repository_name', 'namespace_name',
set(), 'page_token') == LogEntryPage([], {})
def test_get_logs_query_returns_list_log_entries(monkeypatch):
get_repository_mock = Mock()
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
get_user_mock = Mock()
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
get_logs_query_mock = Mock()
monkeypatch.setattr(model.log, 'get_logs_query', get_logs_query_mock)
paginate_mock = Mock()
paginate_mock.return_value = ([AttrDict({'kind': 1, 'datetime': 'datetime', 'ip': 'ip', 'metadata_json': '{}',
'account': AttrDict(
{'username': 'account_username', 'email': 'account_email', 'robot': False,
'organization': False}),
'performer': AttrDict(
{'email': 'performer_email', 'username': 'performer_username',
'robot': False}), 'kind_id': 1})], {'key': 'value'})
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
assert pre_oci_model.get_logs_query('start_time', 'end_time', 'performer_username', 'repository_name',
'namespace_name',
set(), {'start_id': 1}) == LogEntryPage([
LogEntry('{}', 'ip', 'datetime', 'performer_email', 'performer_username', False,
False, 'account_username', 'account_email', False, 1)], {'key': 'value'})
def test_get_logs_query_calls_get_repository(monkeypatch):
repo_mock = Mock()
performer_mock = Mock()
query_mock = Mock()
get_repository_mock = Mock()
get_repository_mock.return_value = repo_mock
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
get_user_mock = Mock()
get_user_mock.return_value = performer_mock
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
get_logs_query_mock = Mock()
get_logs_query_mock.return_value = query_mock
monkeypatch.setattr(model.log, 'get_logs_query', get_logs_query_mock)
paginate_mock = Mock()
page_token = {}
paginate_mock.return_value = ([], page_token)
monkeypatch.setattr(model.modelutil, 'paginate', paginate_mock)
ignore = set()
pre_oci_model.get_logs_query('start_time', 'end_time', 'performer_username', 'repository_name', 'namespace_name',
ignore, page_token)
get_repository_mock.assert_called_once_with('namespace_name', 'repository_name')
get_user_mock.assert_called_once_with('performer_username')
get_logs_query_mock.assert_called_once_with('start_time', 'end_time', performer=performer_mock, repository=repo_mock,
namespace='namespace_name', ignore=ignore)
paginate_mock.assert_called_once_with(query_mock, database.LogEntry, descending=True,
page_token=page_token, limit=20)
def test_get_log_entry_kinds(monkeypatch):
get_log_entry_kinds_mock = Mock()
monkeypatch.setattr(model.log, 'get_log_entry_kinds', get_log_entry_kinds_mock)
pre_oci_model.get_log_entry_kinds()
get_log_entry_kinds_mock.assert_called_once_with()
def test_does_repo_exist_returns_false(monkeypatch):
get_repository_mock = Mock()
get_repository_mock.return_value = None
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
assert pre_oci_model.repo_exists('namespace_name', 'repository_name') is False
def test_does_repo_exist_returns_true(monkeypatch):
get_repository_mock = Mock()
get_repository_mock.return_value = True
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
assert pre_oci_model.repo_exists('namespace_name', 'repository_name') is True
def test_get_aggregated_logs(monkeypatch):
get_aggregated_logs_mock = Mock()
get_aggregated_logs_mock.return_value = [AttrDict({'day': '1', 'kind_id': 4, 'count': 12})]
monkeypatch.setattr(model.log, 'get_aggregated_logs', get_aggregated_logs_mock)
repo_mock = Mock()
get_repository_mock = Mock()
get_repository_mock.return_value = repo_mock
monkeypatch.setattr(model.repository, 'get_repository', get_repository_mock)
performer_mock = Mock()
get_user_mock = Mock()
get_user_mock.return_value = performer_mock
monkeypatch.setattr(model.user, 'get_user', get_user_mock)
actual = pre_oci_model.get_aggregated_logs('start_time', 'end_time', 'performer_name', 'repository_name',
'namespace_name', set())
assert actual == [AggregatedLogEntry(12, 4, '1')]