initial import for Open Source 🎉
This commit is contained in:
parent
1898c361f3
commit
9c0dd3b722
2048 changed files with 218743 additions and 0 deletions
448
endpoints/api/__init__.py
Normal file
448
endpoints/api/__init__.py
Normal file
|
@ -0,0 +1,448 @@
|
|||
import logging
|
||||
import datetime
|
||||
|
||||
from calendar import timegm
|
||||
from email.utils import formatdate
|
||||
from functools import partial, wraps
|
||||
|
||||
from flask import Blueprint, request, session
|
||||
from flask_restful import Resource, abort, Api, reqparse
|
||||
from flask_restful.utils.cors import crossdomain
|
||||
from jsonschema import validate, ValidationError
|
||||
|
||||
from app import app, metric_queue, authentication
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
|
||||
AdministerRepositoryPermission, UserReadPermission,
|
||||
UserAdminPermission)
|
||||
from auth import scopes
|
||||
from auth.auth_context import (get_authenticated_context, get_authenticated_user,
|
||||
get_validated_oauth_token)
|
||||
from auth.decorators import process_oauth
|
||||
from data import model as data_model
|
||||
from data.logs_model import logs_model
|
||||
from data.database import RepositoryState
|
||||
from endpoints.csrf import csrf_protect
|
||||
from endpoints.exception import (Unauthorized, InvalidRequest, InvalidResponse,
|
||||
FreshLoginRequired, NotFound)
|
||||
from endpoints.decorators import check_anon_protection, require_xhr_from_browser, check_readonly
|
||||
from util.metrics.metricqueue import time_decorator
|
||||
from util.names import parse_namespace_repository
|
||||
from util.pagination import encrypt_page_token, decrypt_page_token
|
||||
from util.request import get_request_ip
|
||||
from __init__models_pre_oci import pre_oci_model as model
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
api_bp = Blueprint('api', __name__)
|
||||
|
||||
|
||||
CROSS_DOMAIN_HEADERS = ['Authorization', 'Content-Type', 'X-Requested-With']
|
||||
|
||||
class ApiExceptionHandlingApi(Api):
|
||||
@crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS)
|
||||
def handle_error(self, error):
|
||||
return super(ApiExceptionHandlingApi, self).handle_error(error)
|
||||
|
||||
|
||||
api = ApiExceptionHandlingApi()
|
||||
api.init_app(api_bp)
|
||||
api.decorators = [csrf_protect(),
|
||||
crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS),
|
||||
process_oauth, time_decorator(api_bp.name, metric_queue),
|
||||
require_xhr_from_browser]
|
||||
|
||||
|
||||
def resource(*urls, **kwargs):
|
||||
def wrapper(api_resource):
|
||||
if not api_resource:
|
||||
return None
|
||||
|
||||
api_resource.registered = True
|
||||
api.add_resource(api_resource, *urls, **kwargs)
|
||||
return api_resource
|
||||
return wrapper
|
||||
|
||||
|
||||
def show_if(value):
|
||||
def f(inner):
|
||||
if hasattr(inner, 'registered') and inner.registered:
|
||||
msg = ('API endpoint %s is already registered; please switch the ' +
|
||||
'@show_if to be *below* the @resource decorator')
|
||||
raise Exception(msg % inner)
|
||||
|
||||
if not value:
|
||||
return None
|
||||
|
||||
return inner
|
||||
return f
|
||||
|
||||
|
||||
def hide_if(value):
|
||||
def f(inner):
|
||||
if hasattr(inner, 'registered') and inner.registered:
|
||||
msg = ('API endpoint %s is already registered; please switch the ' +
|
||||
'@hide_if to be *below* the @resource decorator')
|
||||
raise Exception(msg % inner)
|
||||
|
||||
if value:
|
||||
return None
|
||||
|
||||
return inner
|
||||
return f
|
||||
|
||||
|
||||
def truthy_bool(param):
|
||||
return param not in {False, 'false', 'False', '0', 'FALSE', '', 'null'}
|
||||
|
||||
|
||||
def format_date(date):
|
||||
""" Output an RFC822 date format. """
|
||||
if date is None:
|
||||
return None
|
||||
return formatdate(timegm(date.utctimetuple()))
|
||||
|
||||
|
||||
def add_method_metadata(name, value):
|
||||
def modifier(func):
|
||||
if func is None:
|
||||
return None
|
||||
|
||||
if '__api_metadata' not in dir(func):
|
||||
func.__api_metadata = {}
|
||||
func.__api_metadata[name] = value
|
||||
return func
|
||||
return modifier
|
||||
|
||||
|
||||
def method_metadata(func, name):
|
||||
if func is None:
|
||||
return None
|
||||
|
||||
if '__api_metadata' in dir(func):
|
||||
return func.__api_metadata.get(name, None)
|
||||
return None
|
||||
|
||||
|
||||
|
||||
nickname = partial(add_method_metadata, 'nickname')
|
||||
related_user_resource = partial(add_method_metadata, 'related_user_resource')
|
||||
internal_only = add_method_metadata('internal', True)
|
||||
|
||||
|
||||
def path_param(name, description):
|
||||
def add_param(func):
|
||||
if not func:
|
||||
return func
|
||||
|
||||
if '__api_path_params' not in dir(func):
|
||||
func.__api_path_params = {}
|
||||
func.__api_path_params[name] = {
|
||||
'name': name,
|
||||
'description': description
|
||||
}
|
||||
return func
|
||||
return add_param
|
||||
|
||||
|
||||
def query_param(name, help_str, type=reqparse.text_type, default=None,
|
||||
choices=(), required=False):
|
||||
def add_param(func):
|
||||
if '__api_query_params' not in dir(func):
|
||||
func.__api_query_params = []
|
||||
func.__api_query_params.append({
|
||||
'name': name,
|
||||
'type': type,
|
||||
'help': help_str,
|
||||
'default': default,
|
||||
'choices': choices,
|
||||
'required': required,
|
||||
'location': ('args')
|
||||
})
|
||||
return func
|
||||
return add_param
|
||||
|
||||
def page_support(page_token_kwarg='page_token', parsed_args_kwarg='parsed_args'):
|
||||
def inner(func):
|
||||
""" Adds pagination support to an API endpoint. The decorated API will have an
|
||||
added query parameter named 'next_page'. Works in tandem with the
|
||||
modelutil paginate method.
|
||||
"""
|
||||
@wraps(func)
|
||||
@query_param('next_page', 'The page token for the next page', type=str)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
# Note: if page_token is None, we'll receive the first page of results back.
|
||||
page_token = decrypt_page_token(kwargs[parsed_args_kwarg]['next_page'])
|
||||
kwargs[page_token_kwarg] = page_token
|
||||
|
||||
(result, next_page_token) = func(self, *args, **kwargs)
|
||||
if next_page_token is not None:
|
||||
result['next_page'] = encrypt_page_token(next_page_token)
|
||||
|
||||
return result
|
||||
return wrapper
|
||||
return inner
|
||||
|
||||
def parse_args(kwarg_name='parsed_args'):
|
||||
def inner(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
if '__api_query_params' not in dir(func):
|
||||
abort(500)
|
||||
|
||||
parser = reqparse.RequestParser()
|
||||
for arg_spec in func.__api_query_params:
|
||||
parser.add_argument(**arg_spec)
|
||||
kwargs[kwarg_name] = parser.parse_args()
|
||||
|
||||
return func(self, *args, **kwargs)
|
||||
return wrapper
|
||||
return inner
|
||||
|
||||
def parse_repository_name(func):
|
||||
@wraps(func)
|
||||
def wrapper(repository, *args, **kwargs):
|
||||
(namespace, repository) = parse_namespace_repository(repository, app.config['LIBRARY_NAMESPACE'])
|
||||
return func(namespace, repository, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
class ApiResource(Resource):
|
||||
registered = False
|
||||
method_decorators = [check_anon_protection, check_readonly]
|
||||
|
||||
def options(self):
|
||||
return None, 200
|
||||
|
||||
|
||||
class RepositoryParamResource(ApiResource):
|
||||
method_decorators = [check_anon_protection, parse_repository_name, check_readonly]
|
||||
|
||||
|
||||
def disallow_for_app_repositories(func):
|
||||
@wraps(func)
|
||||
def wrapped(self, namespace_name, repository_name, *args, **kwargs):
|
||||
# Lookup the repository with the given namespace and name and ensure it is not an application
|
||||
# repository.
|
||||
if model.is_app_repository(namespace_name, repository_name):
|
||||
abort(501)
|
||||
|
||||
return func(self, namespace_name, repository_name, *args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def disallow_for_non_normal_repositories(func):
|
||||
@wraps(func)
|
||||
def wrapped(self, namespace_name, repository_name, *args, **kwargs):
|
||||
repo = data_model.repository.get_repository(namespace_name, repository_name)
|
||||
if repo and repo.state != RepositoryState.NORMAL:
|
||||
abort(503, message='Repository is in read only or mirror mode: %s' % repo.state)
|
||||
|
||||
return func(self, namespace_name, repository_name, *args, **kwargs)
|
||||
return wrapped
|
||||
|
||||
|
||||
def require_repo_permission(permission_class, scope, allow_public=False):
|
||||
def wrapper(func):
|
||||
@add_method_metadata('oauth2_scope', scope)
|
||||
@wraps(func)
|
||||
def wrapped(self, namespace, repository, *args, **kwargs):
|
||||
logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace,
|
||||
repository)
|
||||
permission = permission_class(namespace, repository)
|
||||
if (permission.can() or
|
||||
(allow_public and
|
||||
model.repository_is_public(namespace, repository))):
|
||||
return func(self, namespace, repository, *args, **kwargs)
|
||||
raise Unauthorized()
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
require_repo_read = require_repo_permission(ReadRepositoryPermission, scopes.READ_REPO, True)
|
||||
require_repo_write = require_repo_permission(ModifyRepositoryPermission, scopes.WRITE_REPO)
|
||||
require_repo_admin = require_repo_permission(AdministerRepositoryPermission, scopes.ADMIN_REPO)
|
||||
|
||||
|
||||
def require_user_permission(permission_class, scope=None):
|
||||
def wrapper(func):
|
||||
@add_method_metadata('oauth2_scope', scope)
|
||||
@wraps(func)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
user = get_authenticated_user()
|
||||
if not user:
|
||||
raise Unauthorized()
|
||||
|
||||
logger.debug('Checking permission %s for user %s', permission_class, user.username)
|
||||
permission = permission_class(user.username)
|
||||
if permission.can():
|
||||
return func(self, *args, **kwargs)
|
||||
raise Unauthorized()
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
require_user_read = require_user_permission(UserReadPermission, scopes.READ_USER)
|
||||
require_user_admin = require_user_permission(UserAdminPermission, scopes.ADMIN_USER)
|
||||
|
||||
|
||||
def verify_not_prod(func):
|
||||
@add_method_metadata('enterprise_only', True)
|
||||
@wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
# Verify that we are not running on a production (i.e. hosted) stack. If so, we fail.
|
||||
# This should never happen (because of the feature-flag on SUPER_USERS), but we want to be
|
||||
# absolutely sure.
|
||||
if app.config['SERVER_HOSTNAME'].find('quay.io') >= 0:
|
||||
logger.error('!!! Super user method called IN PRODUCTION !!!')
|
||||
raise NotFound()
|
||||
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
def require_fresh_login(func):
|
||||
@add_method_metadata('requires_fresh_login', True)
|
||||
@wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
user = get_authenticated_user()
|
||||
if not user:
|
||||
raise Unauthorized()
|
||||
|
||||
if get_validated_oauth_token():
|
||||
return func(*args, **kwargs)
|
||||
|
||||
logger.debug('Checking fresh login for user %s', user.username)
|
||||
|
||||
last_login = session.get('login_time', datetime.datetime.min)
|
||||
valid_span = datetime.datetime.now() - datetime.timedelta(minutes=10)
|
||||
|
||||
if (not user.password_hash or last_login >= valid_span or
|
||||
not authentication.supports_fresh_login):
|
||||
return func(*args, **kwargs)
|
||||
|
||||
raise FreshLoginRequired()
|
||||
return wrapped
|
||||
|
||||
|
||||
def require_scope(scope_object):
|
||||
def wrapper(func):
|
||||
@add_method_metadata('oauth2_scope', scope_object)
|
||||
@wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
return func(*args, **kwargs)
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
def max_json_size(max_size):
|
||||
def wrapper(func):
|
||||
@wraps(func)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
if request.is_json and len(request.get_data()) > max_size:
|
||||
raise InvalidRequest()
|
||||
|
||||
return func(self, *args, **kwargs)
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
def validate_json_request(schema_name, optional=False):
|
||||
def wrapper(func):
|
||||
@add_method_metadata('request_schema', schema_name)
|
||||
@wraps(func)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
schema = self.schemas[schema_name]
|
||||
try:
|
||||
json_data = request.get_json()
|
||||
if json_data is None:
|
||||
if not optional:
|
||||
raise InvalidRequest('Missing JSON body')
|
||||
else:
|
||||
validate(json_data, schema)
|
||||
return func(self, *args, **kwargs)
|
||||
except ValidationError as ex:
|
||||
raise InvalidRequest(str(ex))
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
def request_error(exception=None, **kwargs):
|
||||
data = kwargs.copy()
|
||||
message = 'Request error.'
|
||||
if exception:
|
||||
message = str(exception)
|
||||
|
||||
message = data.pop('message', message)
|
||||
raise InvalidRequest(message, data)
|
||||
|
||||
|
||||
def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
|
||||
if not metadata:
|
||||
metadata = {}
|
||||
|
||||
oauth_token = get_validated_oauth_token()
|
||||
if oauth_token:
|
||||
metadata['oauth_token_id'] = oauth_token.id
|
||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
||||
metadata['oauth_token_application'] = oauth_token.application.name
|
||||
|
||||
performer = get_authenticated_user()
|
||||
|
||||
if repo_name is not None:
|
||||
repo = data_model.repository.get_repository(user_or_orgname, repo_name)
|
||||
|
||||
logs_model.log_action(kind, user_or_orgname,
|
||||
repository=repo,
|
||||
performer=performer,
|
||||
ip=get_request_ip(),
|
||||
metadata=metadata)
|
||||
|
||||
|
||||
def define_json_response(schema_name):
|
||||
def wrapper(func):
|
||||
@add_method_metadata('response_schema', schema_name)
|
||||
@wraps(func)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
schema = self.schemas[schema_name]
|
||||
resp = func(self, *args, **kwargs)
|
||||
|
||||
if app.config['TESTING']:
|
||||
try:
|
||||
validate(resp, schema)
|
||||
except ValidationError as ex:
|
||||
raise InvalidResponse(str(ex))
|
||||
|
||||
return resp
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
import endpoints.api.appspecifictokens
|
||||
import endpoints.api.billing
|
||||
import endpoints.api.build
|
||||
import endpoints.api.discovery
|
||||
import endpoints.api.error
|
||||
import endpoints.api.globalmessages
|
||||
import endpoints.api.image
|
||||
import endpoints.api.logs
|
||||
import endpoints.api.manifest
|
||||
import endpoints.api.organization
|
||||
import endpoints.api.permission
|
||||
import endpoints.api.prototype
|
||||
import endpoints.api.repository
|
||||
import endpoints.api.repositorynotification
|
||||
import endpoints.api.repoemail
|
||||
import endpoints.api.repotoken
|
||||
import endpoints.api.robot
|
||||
import endpoints.api.search
|
||||
import endpoints.api.suconfig
|
||||
import endpoints.api.superuser
|
||||
import endpoints.api.tag
|
||||
import endpoints.api.team
|
||||
import endpoints.api.trigger
|
||||
import endpoints.api.user
|
||||
import endpoints.api.secscan
|
||||
import endpoints.api.signing
|
||||
import endpoints.api.mirror
|
54
endpoints/api/__init__models_interface.py
Normal file
54
endpoints/api/__init__models_interface.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class InitDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by __init__.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def is_app_repository(self, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: namespace or user
|
||||
repository_name: repository
|
||||
|
||||
Returns:
|
||||
Boolean
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def repository_is_public(self, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: namespace or user
|
||||
repository_name: repository
|
||||
|
||||
Returns:
|
||||
Boolean
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def log_action(self, kind, namespace_name, repository_name, performer, ip, metadata):
|
||||
"""
|
||||
|
||||
Args:
|
||||
kind: type of log
|
||||
user_or_orgname: name of user or organization
|
||||
performer: user doing the action
|
||||
ip: originating ip
|
||||
metadata: metadata
|
||||
repository: repository the action is related to
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
pass
|
||||
|
19
endpoints/api/__init__models_pre_oci.py
Normal file
19
endpoints/api/__init__models_pre_oci.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from __init__models_interface import InitDataInterface
|
||||
|
||||
from data import model
|
||||
from data.logs_model import logs_model
|
||||
|
||||
class PreOCIModel(InitDataInterface):
|
||||
def is_app_repository(self, namespace_name, repository_name):
|
||||
return model.repository.get_repository(namespace_name, repository_name,
|
||||
kind_filter='application') is not None
|
||||
|
||||
def repository_is_public(self, namespace_name, repository_name):
|
||||
return model.repository.repository_is_public(namespace_name, repository_name)
|
||||
|
||||
def log_action(self, kind, namespace_name, repository_name, performer, ip, metadata):
|
||||
repository = model.repository.get_repository(namespace_name, repository_name)
|
||||
logs_model.log_action(kind, namespace_name, performer=performer, ip=ip, metadata=metadata,
|
||||
repository=repository)
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
133
endpoints/api/appspecifictokens.py
Normal file
133
endpoints/api/appspecifictokens.py
Normal file
|
@ -0,0 +1,133 @@
|
|||
""" Manages app specific tokens for the current user. """
|
||||
|
||||
import logging
|
||||
import math
|
||||
|
||||
from datetime import timedelta
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from app import app
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from data import model
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
|
||||
log_action, require_user_admin, require_fresh_login,
|
||||
path_param, NotFound, format_date, show_if, query_param, parse_args,
|
||||
truthy_bool)
|
||||
from util.timedeltastring import convert_to_timedelta
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def token_view(token, include_code=False):
|
||||
data = {
|
||||
'uuid': token.uuid,
|
||||
'title': token.title,
|
||||
'last_accessed': format_date(token.last_accessed),
|
||||
'created': format_date(token.created),
|
||||
'expiration': format_date(token.expiration),
|
||||
}
|
||||
|
||||
if include_code:
|
||||
data.update({
|
||||
'token_code': model.appspecifictoken.get_full_token_string(token),
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# The default window to use when looking up tokens that will be expiring.
|
||||
_DEFAULT_TOKEN_EXPIRATION_WINDOW = '4w'
|
||||
|
||||
|
||||
@resource('/v1/user/apptoken')
|
||||
@show_if(features.APP_SPECIFIC_TOKENS)
|
||||
class AppTokens(ApiResource):
|
||||
""" Lists all app specific tokens for a user """
|
||||
schemas = {
|
||||
'NewToken': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'title',
|
||||
],
|
||||
'properties': {
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'The user-defined title for the token',
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('listAppTokens')
|
||||
@parse_args()
|
||||
@query_param('expiring', 'If true, only returns those tokens expiring soon', type=truthy_bool)
|
||||
def get(self, parsed_args):
|
||||
""" Lists the app specific tokens for the user. """
|
||||
expiring = parsed_args['expiring']
|
||||
if expiring:
|
||||
expiration = app.config.get('APP_SPECIFIC_TOKEN_EXPIRATION')
|
||||
token_expiration = convert_to_timedelta(expiration or _DEFAULT_TOKEN_EXPIRATION_WINDOW)
|
||||
seconds = math.ceil(token_expiration.total_seconds() * 0.1) or 1
|
||||
soon = timedelta(seconds=seconds)
|
||||
tokens = model.appspecifictoken.get_expiring_tokens(get_authenticated_user(), soon)
|
||||
else:
|
||||
tokens = model.appspecifictoken.list_tokens(get_authenticated_user())
|
||||
|
||||
return {
|
||||
'tokens': [token_view(token, include_code=False) for token in tokens],
|
||||
'only_expiring': expiring,
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@require_fresh_login
|
||||
@nickname('createAppToken')
|
||||
@validate_json_request('NewToken')
|
||||
def post(self):
|
||||
""" Create a new app specific token for user. """
|
||||
title = request.get_json()['title']
|
||||
token = model.appspecifictoken.create_token(get_authenticated_user(), title)
|
||||
|
||||
log_action('create_app_specific_token', get_authenticated_user().username,
|
||||
{'app_specific_token_title': token.title,
|
||||
'app_specific_token': token.uuid})
|
||||
|
||||
return {
|
||||
'token': token_view(token, include_code=True),
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/user/apptoken/<token_uuid>')
|
||||
@show_if(features.APP_SPECIFIC_TOKENS)
|
||||
@path_param('token_uuid', 'The uuid of the app specific token')
|
||||
class AppToken(ApiResource):
|
||||
""" Provides operations on an app specific token """
|
||||
@require_user_admin
|
||||
@require_fresh_login
|
||||
@nickname('getAppToken')
|
||||
def get(self, token_uuid):
|
||||
""" Returns a specific app token for the user. """
|
||||
token = model.appspecifictoken.get_token_by_uuid(token_uuid, owner=get_authenticated_user())
|
||||
if token is None:
|
||||
raise NotFound()
|
||||
|
||||
return {
|
||||
'token': token_view(token, include_code=True),
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@require_fresh_login
|
||||
@nickname('revokeAppToken')
|
||||
def delete(self, token_uuid):
|
||||
""" Revokes a specific app token for the user. """
|
||||
token = model.appspecifictoken.revoke_token_by_uuid(token_uuid, owner=get_authenticated_user())
|
||||
if token is None:
|
||||
raise NotFound()
|
||||
|
||||
log_action('revoke_app_specific_token', get_authenticated_user().username,
|
||||
{'app_specific_token_title': token.title,
|
||||
'app_specific_token': token.uuid})
|
||||
|
||||
return '', 204
|
607
endpoints/api/billing.py
Normal file
607
endpoints/api/billing.py
Normal file
|
@ -0,0 +1,607 @@
|
|||
""" Billing information, subscriptions, and plan information. """
|
||||
|
||||
import stripe
|
||||
|
||||
from flask import request
|
||||
from app import billing
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, log_action,
|
||||
related_user_resource, internal_only, require_user_admin, show_if,
|
||||
path_param, require_scope, abort)
|
||||
from endpoints.exception import Unauthorized, NotFound
|
||||
from endpoints.api.subscribe import subscribe, subscription_view
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data import model
|
||||
from data.billing import PLANS, get_plan
|
||||
|
||||
import features
|
||||
import uuid
|
||||
import json
|
||||
|
||||
def get_namespace_plan(namespace):
|
||||
""" Returns the plan of the given namespace. """
|
||||
namespace_user = model.user.get_namespace_user(namespace)
|
||||
if namespace_user is None:
|
||||
return None
|
||||
|
||||
if not namespace_user.stripe_id:
|
||||
return None
|
||||
|
||||
# Ask Stripe for the subscribed plan.
|
||||
# TODO: Can we cache this or make it faster somehow?
|
||||
try:
|
||||
cus = billing.Customer.retrieve(namespace_user.stripe_id)
|
||||
except stripe.error.APIConnectionError:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if not cus.subscription:
|
||||
return None
|
||||
|
||||
return get_plan(cus.subscription.plan.id)
|
||||
|
||||
|
||||
def lookup_allowed_private_repos(namespace):
|
||||
""" Returns false if the given namespace has used its allotment of private repositories. """
|
||||
current_plan = get_namespace_plan(namespace)
|
||||
if current_plan is None:
|
||||
return False
|
||||
|
||||
# Find the number of private repositories used by the namespace and compare it to the
|
||||
# plan subscribed.
|
||||
private_repos = model.user.get_private_repo_count(namespace)
|
||||
|
||||
return private_repos < current_plan['privateRepos']
|
||||
|
||||
|
||||
def carderror_response(e):
|
||||
return {'carderror': str(e)}, 402
|
||||
|
||||
|
||||
def get_card(user):
|
||||
card_info = {
|
||||
'is_valid': False
|
||||
}
|
||||
|
||||
if user.stripe_id:
|
||||
try:
|
||||
cus = billing.Customer.retrieve(user.stripe_id)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if cus and cus.default_card:
|
||||
# Find the default card.
|
||||
default_card = None
|
||||
for card in cus.cards.data:
|
||||
if card.id == cus.default_card:
|
||||
default_card = card
|
||||
break
|
||||
|
||||
if default_card:
|
||||
card_info = {
|
||||
'owner': default_card.name,
|
||||
'type': default_card.type,
|
||||
'last4': default_card.last4,
|
||||
'exp_month': default_card.exp_month,
|
||||
'exp_year': default_card.exp_year
|
||||
}
|
||||
|
||||
return {'card': card_info}
|
||||
|
||||
|
||||
def set_card(user, token):
|
||||
if user.stripe_id:
|
||||
try:
|
||||
cus = billing.Customer.retrieve(user.stripe_id)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if cus:
|
||||
try:
|
||||
cus.card = token
|
||||
cus.save()
|
||||
except stripe.error.CardError as exc:
|
||||
return carderror_response(exc)
|
||||
except stripe.error.InvalidRequestError as exc:
|
||||
return carderror_response(exc)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
return carderror_response(e)
|
||||
|
||||
return get_card(user)
|
||||
|
||||
|
||||
def get_invoices(customer_id):
|
||||
def invoice_view(i):
|
||||
return {
|
||||
'id': i.id,
|
||||
'date': i.date,
|
||||
'period_start': i.period_start,
|
||||
'period_end': i.period_end,
|
||||
'paid': i.paid,
|
||||
'amount_due': i.amount_due,
|
||||
'next_payment_attempt': i.next_payment_attempt,
|
||||
'attempted': i.attempted,
|
||||
'closed': i.closed,
|
||||
'total': i.total,
|
||||
'plan': i.lines.data[0].plan.id if i.lines.data[0].plan else None
|
||||
}
|
||||
|
||||
try:
|
||||
invoices = billing.Invoice.list(customer=customer_id, count=12)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
return {
|
||||
'invoices': [invoice_view(i) for i in invoices.data]
|
||||
}
|
||||
|
||||
|
||||
def get_invoice_fields(user):
|
||||
try:
|
||||
cus = billing.Customer.retrieve(user.stripe_id)
|
||||
except stripe.error.APIConnectionError:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if not 'metadata' in cus:
|
||||
cus.metadata = {}
|
||||
|
||||
return json.loads(cus.metadata.get('invoice_fields') or '[]'), cus
|
||||
|
||||
|
||||
def create_billing_invoice_field(user, title, value):
|
||||
new_field = {
|
||||
'uuid': str(uuid.uuid4()).split('-')[0],
|
||||
'title': title,
|
||||
'value': value
|
||||
}
|
||||
|
||||
invoice_fields, cus = get_invoice_fields(user)
|
||||
invoice_fields.append(new_field)
|
||||
|
||||
if not 'metadata' in cus:
|
||||
cus.metadata = {}
|
||||
|
||||
cus.metadata['invoice_fields'] = json.dumps(invoice_fields)
|
||||
cus.save()
|
||||
return new_field
|
||||
|
||||
|
||||
def delete_billing_invoice_field(user, field_uuid):
|
||||
invoice_fields, cus = get_invoice_fields(user)
|
||||
invoice_fields = [field for field in invoice_fields if not field['uuid'] == field_uuid]
|
||||
|
||||
if not 'metadata' in cus:
|
||||
cus.metadata = {}
|
||||
|
||||
cus.metadata['invoice_fields'] = json.dumps(invoice_fields)
|
||||
cus.save()
|
||||
return True
|
||||
|
||||
|
||||
@resource('/v1/plans/')
|
||||
@show_if(features.BILLING)
|
||||
class ListPlans(ApiResource):
|
||||
""" Resource for listing the available plans. """
|
||||
@nickname('listPlans')
|
||||
def get(self):
|
||||
""" List the avaialble plans. """
|
||||
return {
|
||||
'plans': PLANS,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/user/card')
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class UserCard(ApiResource):
|
||||
""" Resource for managing a user's credit card. """
|
||||
schemas = {
|
||||
'UserCard': {
|
||||
'id': 'UserCard',
|
||||
'type': 'object',
|
||||
'description': 'Description of a user card',
|
||||
'required': [
|
||||
'token',
|
||||
],
|
||||
'properties': {
|
||||
'token': {
|
||||
'type': 'string',
|
||||
'description': 'Stripe token that is generated by stripe checkout.js',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getUserCard')
|
||||
def get(self):
|
||||
""" Get the user's credit card. """
|
||||
user = get_authenticated_user()
|
||||
return get_card(user)
|
||||
|
||||
@require_user_admin
|
||||
@nickname('setUserCard')
|
||||
@validate_json_request('UserCard')
|
||||
def post(self):
|
||||
""" Update the user's credit card. """
|
||||
user = get_authenticated_user()
|
||||
token = request.get_json()['token']
|
||||
response = set_card(user, token)
|
||||
log_action('account_change_cc', user.username)
|
||||
return response
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/card')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@internal_only
|
||||
@related_user_resource(UserCard)
|
||||
@show_if(features.BILLING)
|
||||
class OrganizationCard(ApiResource):
|
||||
""" Resource for managing an organization's credit card. """
|
||||
schemas = {
|
||||
'OrgCard': {
|
||||
'id': 'OrgCard',
|
||||
'type': 'object',
|
||||
'description': 'Description of a user card',
|
||||
'required': [
|
||||
'token',
|
||||
],
|
||||
'properties': {
|
||||
'token': {
|
||||
'type': 'string',
|
||||
'description': 'Stripe token that is generated by stripe checkout.js',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrgCard')
|
||||
def get(self, orgname):
|
||||
""" Get the organization's credit card. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
return get_card(organization)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@nickname('setOrgCard')
|
||||
@validate_json_request('OrgCard')
|
||||
def post(self, orgname):
|
||||
""" Update the orgnaization's credit card. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
token = request.get_json()['token']
|
||||
response = set_card(organization, token)
|
||||
log_action('account_change_cc', orgname)
|
||||
return response
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/user/plan')
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class UserPlan(ApiResource):
|
||||
""" Resource for managing a user's subscription. """
|
||||
schemas = {
|
||||
'UserSubscription': {
|
||||
'id': 'UserSubscription',
|
||||
'type': 'object',
|
||||
'description': 'Description of a user card',
|
||||
'required': [
|
||||
'plan',
|
||||
],
|
||||
'properties': {
|
||||
'token': {
|
||||
'type': 'string',
|
||||
'description': 'Stripe token that is generated by stripe checkout.js',
|
||||
},
|
||||
'plan': {
|
||||
'type': 'string',
|
||||
'description': 'Plan name to which the user wants to subscribe',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('updateUserSubscription')
|
||||
@validate_json_request('UserSubscription')
|
||||
def put(self):
|
||||
""" Create or update the user's subscription. """
|
||||
request_data = request.get_json()
|
||||
plan = request_data['plan']
|
||||
token = request_data['token'] if 'token' in request_data else None
|
||||
user = get_authenticated_user()
|
||||
return subscribe(user, plan, token, False) # Business features not required
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getUserSubscription')
|
||||
def get(self):
|
||||
""" Fetch any existing subscription for the user. """
|
||||
cus = None
|
||||
user = get_authenticated_user()
|
||||
private_repos = model.user.get_private_repo_count(user.username)
|
||||
|
||||
if user.stripe_id:
|
||||
try:
|
||||
cus = billing.Customer.retrieve(user.stripe_id)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if cus.subscription:
|
||||
return subscription_view(cus.subscription, private_repos)
|
||||
|
||||
return {
|
||||
'hasSubscription': False,
|
||||
'isExistingCustomer': cus is not None,
|
||||
'plan': 'free',
|
||||
'usedPrivateRepos': private_repos,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/plan')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@internal_only
|
||||
@related_user_resource(UserPlan)
|
||||
@show_if(features.BILLING)
|
||||
class OrganizationPlan(ApiResource):
|
||||
""" Resource for managing a org's subscription. """
|
||||
schemas = {
|
||||
'OrgSubscription': {
|
||||
'id': 'OrgSubscription',
|
||||
'type': 'object',
|
||||
'description': 'Description of a user card',
|
||||
'required': [
|
||||
'plan',
|
||||
],
|
||||
'properties': {
|
||||
'token': {
|
||||
'type': 'string',
|
||||
'description': 'Stripe token that is generated by stripe checkout.js',
|
||||
},
|
||||
'plan': {
|
||||
'type': 'string',
|
||||
'description': 'Plan name to which the user wants to subscribe',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('updateOrgSubscription')
|
||||
@validate_json_request('OrgSubscription')
|
||||
def put(self, orgname):
|
||||
""" Create or update the org's subscription. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
request_data = request.get_json()
|
||||
plan = request_data['plan']
|
||||
token = request_data['token'] if 'token' in request_data else None
|
||||
organization = model.organization.get_organization(orgname)
|
||||
return subscribe(organization, plan, token, True) # Business plan required
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrgSubscription')
|
||||
def get(self, orgname):
|
||||
""" Fetch any existing subscription for the org. """
|
||||
cus = None
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
private_repos = model.user.get_private_repo_count(orgname)
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if organization.stripe_id:
|
||||
try:
|
||||
cus = billing.Customer.retrieve(organization.stripe_id)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
abort(503, message='Cannot contact Stripe')
|
||||
|
||||
if cus.subscription:
|
||||
return subscription_view(cus.subscription, private_repos)
|
||||
|
||||
return {
|
||||
'hasSubscription': False,
|
||||
'isExistingCustomer': cus is not None,
|
||||
'plan': 'free',
|
||||
'usedPrivateRepos': private_repos,
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/user/invoices')
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class UserInvoiceList(ApiResource):
|
||||
""" Resource for listing a user's invoices. """
|
||||
@require_user_admin
|
||||
@nickname('listUserInvoices')
|
||||
def get(self):
|
||||
""" List the invoices for the current user. """
|
||||
user = get_authenticated_user()
|
||||
if not user.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
return get_invoices(user.stripe_id)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/invoices')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(UserInvoiceList)
|
||||
@show_if(features.BILLING)
|
||||
class OrganizationInvoiceList(ApiResource):
|
||||
""" Resource for listing an orgnaization's invoices. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('listOrgInvoices')
|
||||
def get(self, orgname):
|
||||
""" List the invoices for the specified orgnaization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
return get_invoices(organization.stripe_id)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/user/invoice/fields')
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class UserInvoiceFieldList(ApiResource):
|
||||
""" Resource for listing and creating a user's custom invoice fields. """
|
||||
schemas = {
|
||||
'InvoiceField': {
|
||||
'id': 'InvoiceField',
|
||||
'type': 'object',
|
||||
'description': 'Description of an invoice field',
|
||||
'required': [
|
||||
'title', 'value'
|
||||
],
|
||||
'properties': {
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'The title of the field being added',
|
||||
},
|
||||
'value': {
|
||||
'type': 'string',
|
||||
'description': 'The value of the field being added',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('listUserInvoiceFields')
|
||||
def get(self):
|
||||
""" List the invoice fields for the current user. """
|
||||
user = get_authenticated_user()
|
||||
if not user.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
return {'fields': get_invoice_fields(user)[0]}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('createUserInvoiceField')
|
||||
@validate_json_request('InvoiceField')
|
||||
def post(self):
|
||||
""" Creates a new invoice field. """
|
||||
user = get_authenticated_user()
|
||||
if not user.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
data = request.get_json()
|
||||
created_field = create_billing_invoice_field(user, data['title'], data['value'])
|
||||
return created_field
|
||||
|
||||
|
||||
@resource('/v1/user/invoice/field/<field_uuid>')
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class UserInvoiceField(ApiResource):
|
||||
""" Resource for deleting a user's custom invoice fields. """
|
||||
@require_user_admin
|
||||
@nickname('deleteUserInvoiceField')
|
||||
def delete(self, field_uuid):
|
||||
""" Deletes the invoice field for the current user. """
|
||||
user = get_authenticated_user()
|
||||
if not user.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
result = delete_billing_invoice_field(user, field_uuid)
|
||||
if not result:
|
||||
abort(404)
|
||||
|
||||
return 'Okay', 201
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/invoice/fields')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(UserInvoiceFieldList)
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class OrganizationInvoiceFieldList(ApiResource):
|
||||
""" Resource for listing and creating an organization's custom invoice fields. """
|
||||
schemas = {
|
||||
'InvoiceField': {
|
||||
'id': 'InvoiceField',
|
||||
'type': 'object',
|
||||
'description': 'Description of an invoice field',
|
||||
'required': [
|
||||
'title', 'value'
|
||||
],
|
||||
'properties': {
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'The title of the field being added',
|
||||
},
|
||||
'value': {
|
||||
'type': 'string',
|
||||
'description': 'The value of the field being added',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('listOrgInvoiceFields')
|
||||
def get(self, orgname):
|
||||
""" List the invoice fields for the organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
return {'fields': get_invoice_fields(organization)[0]}
|
||||
|
||||
abort(403)
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('createOrgInvoiceField')
|
||||
@validate_json_request('InvoiceField')
|
||||
def post(self, orgname):
|
||||
""" Creates a new invoice field. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
data = request.get_json()
|
||||
created_field = create_billing_invoice_field(organization, data['title'], data['value'])
|
||||
return created_field
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/invoice/field/<field_uuid>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(UserInvoiceField)
|
||||
@internal_only
|
||||
@show_if(features.BILLING)
|
||||
class OrganizationInvoiceField(ApiResource):
|
||||
""" Resource for deleting an organization's custom invoice fields. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrgInvoiceField')
|
||||
def delete(self, orgname, field_uuid):
|
||||
""" Deletes the invoice field for the current user. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
if not organization.stripe_id:
|
||||
raise NotFound()
|
||||
|
||||
result = delete_billing_invoice_field(organization, field_uuid)
|
||||
if not result:
|
||||
abort(404)
|
||||
|
||||
return 'Okay', 201
|
||||
|
||||
abort(403)
|
485
endpoints/api/build.py
Normal file
485
endpoints/api/build.py
Normal file
|
@ -0,0 +1,485 @@
|
|||
""" Create, list, cancel and get status/logs of repository builds. """
|
||||
import datetime
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from flask import request
|
||||
from urlparse import urlparse
|
||||
|
||||
import features
|
||||
|
||||
from app import userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
|
||||
AdministerRepositoryPermission, AdministerOrganizationPermission,
|
||||
SuperUserPermission)
|
||||
from buildtrigger.basehandler import BuildTriggerHandler
|
||||
from data import database
|
||||
from data import model
|
||||
from data.buildlogs import BuildStatusRetrievalError
|
||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||
require_repo_read, require_repo_write, validate_json_request,
|
||||
ApiResource, internal_only, format_date, api, path_param,
|
||||
require_repo_admin, abort, disallow_for_app_repositories,
|
||||
disallow_for_non_normal_repositories)
|
||||
from endpoints.building import (start_build, PreparedBuild, MaximumBuildsQueuedException,
|
||||
BuildTriggerDisabledException)
|
||||
from endpoints.exception import Unauthorized, NotFound, InvalidRequest
|
||||
from util.names import parse_robot_username
|
||||
from util.request import get_request_ip
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_trigger_config(trigger):
|
||||
try:
|
||||
return json.loads(trigger.config)
|
||||
except:
|
||||
return {}
|
||||
|
||||
|
||||
def get_job_config(build_obj):
|
||||
try:
|
||||
return json.loads(build_obj.job_config)
|
||||
except:
|
||||
return {}
|
||||
|
||||
|
||||
def user_view(user):
|
||||
return {
|
||||
'name': user.username,
|
||||
'kind': 'user',
|
||||
'is_robot': user.robot,
|
||||
}
|
||||
|
||||
|
||||
def trigger_view(trigger, can_read=False, can_admin=False, for_build=False):
|
||||
if trigger and trigger.uuid:
|
||||
build_trigger = BuildTriggerHandler.get_handler(trigger)
|
||||
build_source = build_trigger.config.get('build_source')
|
||||
|
||||
repo_url = build_trigger.get_repository_url() if build_source else None
|
||||
can_read = can_read or can_admin
|
||||
|
||||
trigger_data = {
|
||||
'id': trigger.uuid,
|
||||
'service': trigger.service.name,
|
||||
'is_active': build_trigger.is_active(),
|
||||
|
||||
'build_source': build_source if can_read else None,
|
||||
'repository_url': repo_url if can_read else None,
|
||||
|
||||
'config': build_trigger.config if can_admin else {},
|
||||
'can_invoke': can_admin,
|
||||
'enabled': trigger.enabled,
|
||||
'disabled_reason': trigger.disabled_reason.name if trigger.disabled_reason else None,
|
||||
}
|
||||
|
||||
if not for_build and can_admin and trigger.pull_robot:
|
||||
trigger_data['pull_robot'] = user_view(trigger.pull_robot)
|
||||
|
||||
return trigger_data
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _get_build_status(build_obj):
|
||||
""" Returns the updated build phase, status and (if any) error for the build object. """
|
||||
phase = build_obj.phase
|
||||
status = {}
|
||||
error = None
|
||||
|
||||
# If the build is currently running, then load its "real-time" status from Redis.
|
||||
if not database.BUILD_PHASE.is_terminal_phase(phase):
|
||||
try:
|
||||
status = build_logs.get_status(build_obj.uuid)
|
||||
except BuildStatusRetrievalError as bsre:
|
||||
phase = 'cannot_load'
|
||||
if SuperUserPermission().can():
|
||||
error = str(bsre)
|
||||
else:
|
||||
error = 'Redis may be down. Please contact support.'
|
||||
|
||||
if phase != 'cannot_load':
|
||||
# If the status contains a heartbeat, then check to see if has been written in the last few
|
||||
# minutes. If not, then the build timed out.
|
||||
if status is not None and 'heartbeat' in status and status['heartbeat']:
|
||||
heartbeat = datetime.datetime.utcfromtimestamp(status['heartbeat'])
|
||||
if datetime.datetime.utcnow() - heartbeat > datetime.timedelta(minutes=1):
|
||||
phase = database.BUILD_PHASE.INTERNAL_ERROR
|
||||
|
||||
# If the phase is internal error, return 'expired' instead if the number of retries
|
||||
# on the queue item is 0.
|
||||
if phase == database.BUILD_PHASE.INTERNAL_ERROR:
|
||||
retry = (build_obj.queue_id and
|
||||
dockerfile_build_queue.has_retries_remaining(build_obj.queue_id))
|
||||
if not retry:
|
||||
phase = 'expired'
|
||||
|
||||
return (phase, status, error)
|
||||
|
||||
|
||||
def build_status_view(build_obj):
|
||||
phase, status, error = _get_build_status(build_obj)
|
||||
repo_namespace = build_obj.repository.namespace_user.username
|
||||
repo_name = build_obj.repository.name
|
||||
|
||||
can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
|
||||
can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
|
||||
can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()
|
||||
|
||||
job_config = get_job_config(build_obj)
|
||||
|
||||
resp = {
|
||||
'id': build_obj.uuid,
|
||||
'phase': phase,
|
||||
'started': format_date(build_obj.started),
|
||||
'display_name': build_obj.display_name,
|
||||
'status': status or {},
|
||||
'subdirectory': job_config.get('build_subdir', ''),
|
||||
'dockerfile_path': job_config.get('build_subdir', ''),
|
||||
'context': job_config.get('context', ''),
|
||||
'tags': job_config.get('docker_tags', []),
|
||||
'manual_user': job_config.get('manual_user', None),
|
||||
'is_writer': can_write,
|
||||
'trigger': trigger_view(build_obj.trigger, can_read, can_admin, for_build=True),
|
||||
'trigger_metadata': job_config.get('trigger_metadata', None) if can_read else None,
|
||||
'resource_key': build_obj.resource_key,
|
||||
'pull_robot': user_view(build_obj.pull_robot) if build_obj.pull_robot else None,
|
||||
'repository': {
|
||||
'namespace': repo_namespace,
|
||||
'name': repo_name
|
||||
},
|
||||
'error': error,
|
||||
}
|
||||
|
||||
if can_write or features.READER_BUILD_LOGS:
|
||||
if build_obj.resource_key is not None:
|
||||
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key,
|
||||
get_request_ip(), requires_cors=True)
|
||||
elif job_config.get('archive_url', None):
|
||||
resp['archive_url'] = job_config['archive_url']
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/build/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryBuildList(RepositoryParamResource):
|
||||
""" Resource related to creating and listing repository builds. """
|
||||
schemas = {
|
||||
'RepositoryBuildRequest': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new repository build.',
|
||||
'properties': {
|
||||
'file_id': {
|
||||
'type': 'string',
|
||||
'description': 'The file id that was generated when the build spec was uploaded',
|
||||
},
|
||||
'archive_url': {
|
||||
'type': 'string',
|
||||
'description': 'The URL of the .tar.gz to build. Must start with "http" or "https".',
|
||||
},
|
||||
'subdirectory': {
|
||||
'type': 'string',
|
||||
'description': 'Subdirectory in which the Dockerfile can be found. You can only specify this or dockerfile_path',
|
||||
},
|
||||
'dockerfile_path': {
|
||||
'type': 'string',
|
||||
'description': 'Path to a dockerfile. You can only specify this or subdirectory.',
|
||||
},
|
||||
'context': {
|
||||
'type': 'string',
|
||||
'description': 'Pass in the context for the dockerfile. This is optional.',
|
||||
},
|
||||
'pull_robot': {
|
||||
'type': 'string',
|
||||
'description': 'Username of a Quay robot account to use as pull credentials',
|
||||
},
|
||||
'docker_tags': {
|
||||
'type': 'array',
|
||||
'description': 'The tags to which the built images will be pushed. ' +
|
||||
'If none specified, "latest" is used.',
|
||||
'items': {
|
||||
'type': 'string'
|
||||
},
|
||||
'minItems': 1,
|
||||
'uniqueItems': True
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_read
|
||||
@parse_args()
|
||||
@query_param('limit', 'The maximum number of builds to return', type=int, default=5)
|
||||
@query_param('since', 'Returns all builds since the given unix timecode', type=int, default=None)
|
||||
@nickname('getRepoBuilds')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository, parsed_args):
|
||||
""" Get the list of repository builds. """
|
||||
limit = parsed_args.get('limit', 5)
|
||||
since = parsed_args.get('since', None)
|
||||
|
||||
if since is not None:
|
||||
since = datetime.datetime.utcfromtimestamp(since)
|
||||
|
||||
builds = model.build.list_repository_builds(namespace, repository, limit, since=since)
|
||||
return {
|
||||
'builds': [build_status_view(build) for build in builds]
|
||||
}
|
||||
|
||||
@require_repo_write
|
||||
@nickname('requestRepoBuild')
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@validate_json_request('RepositoryBuildRequest')
|
||||
def post(self, namespace, repository):
|
||||
""" Request that a repository be built and pushed from the specified input. """
|
||||
logger.debug('User requested repository initialization.')
|
||||
request_json = request.get_json()
|
||||
|
||||
dockerfile_id = request_json.get('file_id', None)
|
||||
archive_url = request_json.get('archive_url', None)
|
||||
|
||||
if not dockerfile_id and not archive_url:
|
||||
raise InvalidRequest('file_id or archive_url required')
|
||||
|
||||
if archive_url:
|
||||
archive_match = None
|
||||
try:
|
||||
archive_match = urlparse(archive_url)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not archive_match:
|
||||
raise InvalidRequest('Invalid Archive URL: Must be a valid URI')
|
||||
|
||||
scheme = archive_match.scheme
|
||||
if scheme != 'http' and scheme != 'https':
|
||||
raise InvalidRequest('Invalid Archive URL: Must be http or https')
|
||||
|
||||
context, subdir = self.get_dockerfile_context(request_json)
|
||||
tags = request_json.get('docker_tags', ['latest'])
|
||||
pull_robot_name = request_json.get('pull_robot', None)
|
||||
|
||||
# Verify the security behind the pull robot.
|
||||
if pull_robot_name:
|
||||
result = parse_robot_username(pull_robot_name)
|
||||
if result:
|
||||
try:
|
||||
model.user.lookup_robot(pull_robot_name)
|
||||
except model.InvalidRobotException:
|
||||
raise NotFound()
|
||||
|
||||
# Make sure the user has administer permissions for the robot's namespace.
|
||||
(robot_namespace, _) = result
|
||||
if not AdministerOrganizationPermission(robot_namespace).can():
|
||||
raise Unauthorized()
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
||||
# Check if the dockerfile resource has already been used. If so, then it
|
||||
# can only be reused if the user has access to the repository in which the
|
||||
# dockerfile was previously built.
|
||||
if dockerfile_id:
|
||||
associated_repository = model.build.get_repository_for_resource(dockerfile_id)
|
||||
if associated_repository:
|
||||
if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
|
||||
associated_repository.name):
|
||||
raise Unauthorized()
|
||||
|
||||
# Start the build.
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
if repo is None:
|
||||
raise NotFound()
|
||||
|
||||
try:
|
||||
build_name = (user_files.get_file_checksum(dockerfile_id)
|
||||
if dockerfile_id
|
||||
else hashlib.sha224(archive_url).hexdigest()[0:7])
|
||||
except IOError:
|
||||
raise InvalidRequest('File %s could not be found or is invalid' % dockerfile_id)
|
||||
|
||||
prepared = PreparedBuild()
|
||||
prepared.build_name = build_name
|
||||
prepared.dockerfile_id = dockerfile_id
|
||||
prepared.archive_url = archive_url
|
||||
prepared.tags = tags
|
||||
prepared.subdirectory = subdir
|
||||
prepared.context = context
|
||||
prepared.is_manual = True
|
||||
prepared.metadata = {}
|
||||
try:
|
||||
build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name)
|
||||
except MaximumBuildsQueuedException:
|
||||
abort(429, message='Maximum queued build rate exceeded.')
|
||||
except BuildTriggerDisabledException:
|
||||
abort(400, message='Build trigger is disabled')
|
||||
|
||||
resp = build_status_view(build_request)
|
||||
repo_string = '%s/%s' % (namespace, repository)
|
||||
headers = {
|
||||
'Location': api.url_for(RepositoryBuildStatus, repository=repo_string,
|
||||
build_uuid=build_request.uuid),
|
||||
}
|
||||
return resp, 201, headers
|
||||
|
||||
@staticmethod
|
||||
def get_dockerfile_context(request_json):
|
||||
context = request_json['context'] if 'context' in request_json else os.path.sep
|
||||
if 'dockerfile_path' in request_json:
|
||||
subdir = request_json['dockerfile_path']
|
||||
if 'context' not in request_json:
|
||||
context = os.path.dirname(subdir)
|
||||
return context, subdir
|
||||
|
||||
if 'subdirectory' in request_json:
|
||||
subdir = request_json['subdirectory']
|
||||
context = subdir
|
||||
if not subdir.endswith(os.path.sep):
|
||||
subdir += os.path.sep
|
||||
|
||||
subdir += 'Dockerfile'
|
||||
else:
|
||||
if context.endswith(os.path.sep):
|
||||
subdir = context + 'Dockerfile'
|
||||
else:
|
||||
subdir = context + os.path.sep + 'Dockerfile'
|
||||
|
||||
return context, subdir
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/build/<build_uuid>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('build_uuid', 'The UUID of the build')
|
||||
class RepositoryBuildResource(RepositoryParamResource):
|
||||
""" Resource for dealing with repository builds. """
|
||||
@require_repo_read
|
||||
@nickname('getRepoBuild')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository, build_uuid):
|
||||
""" Returns information about a build. """
|
||||
try:
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
except model.build.InvalidRepositoryBuildException:
|
||||
raise NotFound()
|
||||
|
||||
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
|
||||
raise NotFound()
|
||||
|
||||
return build_status_view(build)
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('cancelRepoBuild')
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
def delete(self, namespace, repository, build_uuid):
|
||||
""" Cancels a repository build. """
|
||||
try:
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
except model.build.InvalidRepositoryBuildException:
|
||||
raise NotFound()
|
||||
|
||||
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
|
||||
raise NotFound()
|
||||
|
||||
if model.build.cancel_repository_build(build, dockerfile_build_queue):
|
||||
return 'Okay', 201
|
||||
else:
|
||||
raise InvalidRequest('Build is currently running or has finished')
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/build/<build_uuid>/status')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('build_uuid', 'The UUID of the build')
|
||||
class RepositoryBuildStatus(RepositoryParamResource):
|
||||
""" Resource for dealing with repository build status. """
|
||||
@require_repo_read
|
||||
@nickname('getRepoBuildStatus')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository, build_uuid):
|
||||
""" Return the status for the builds specified by the build uuids. """
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
||||
return build_status_view(build)
|
||||
|
||||
|
||||
def get_logs_or_log_url(build):
|
||||
# If the logs have been archived, just return a URL of the completed archive
|
||||
if build.logs_archived:
|
||||
return {
|
||||
'logs_url': log_archive.get_file_url(build.uuid, get_request_ip(), requires_cors=True)
|
||||
}
|
||||
start = int(request.args.get('start', 0))
|
||||
|
||||
try:
|
||||
count, logs = build_logs.get_log_entries(build.uuid, start)
|
||||
except BuildStatusRetrievalError:
|
||||
count, logs = (0, [])
|
||||
|
||||
response_obj = {}
|
||||
response_obj.update({
|
||||
'start': start,
|
||||
'total': count,
|
||||
'logs': [log for log in logs],
|
||||
})
|
||||
|
||||
return response_obj
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/build/<build_uuid>/logs')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('build_uuid', 'The UUID of the build')
|
||||
class RepositoryBuildLogs(RepositoryParamResource):
|
||||
""" Resource for loading repository build logs. """
|
||||
@require_repo_read
|
||||
@nickname('getRepoBuildLogs')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository, build_uuid):
|
||||
""" Return the build logs for the build specified by the build uuid. """
|
||||
can_write = ModifyRepositoryPermission(namespace, repository).can()
|
||||
if not features.READER_BUILD_LOGS and not can_write:
|
||||
raise Unauthorized()
|
||||
|
||||
build = model.build.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
||||
return get_logs_or_log_url(build)
|
||||
|
||||
|
||||
@resource('/v1/filedrop/')
|
||||
@internal_only
|
||||
class FileDropResource(ApiResource):
|
||||
""" Custom verb for setting up a client side file transfer. """
|
||||
schemas = {
|
||||
'FileDropRequest': {
|
||||
'type': 'object',
|
||||
'description': 'Description of the file that the user wishes to upload.',
|
||||
'required': [
|
||||
'mimeType',
|
||||
],
|
||||
'properties': {
|
||||
'mimeType': {
|
||||
'type': 'string',
|
||||
'description': 'Type of the file which is about to be uploaded',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@nickname('getFiledropUrl')
|
||||
@validate_json_request('FileDropRequest')
|
||||
def post(self):
|
||||
""" Request a URL to which a file may be uploaded. """
|
||||
mime_type = request.get_json()['mimeType']
|
||||
(url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True)
|
||||
return {
|
||||
'url': url,
|
||||
'file_id': str(file_id),
|
||||
}
|
334
endpoints/api/discovery.py
Normal file
334
endpoints/api/discovery.py
Normal file
|
@ -0,0 +1,334 @@
|
|||
# TODO to extract the discovery stuff into a util at the top level and then use it both here and config_app discovery.py
|
||||
""" API discovery information. """
|
||||
|
||||
import re
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from flask_restful import reqparse
|
||||
|
||||
from app import app
|
||||
from auth import scopes
|
||||
from endpoints.api import (ApiResource, resource, method_metadata, nickname, truthy_bool,
|
||||
parse_args, query_param)
|
||||
from endpoints.decorators import anon_allowed
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PARAM_REGEX = re.compile(r'<([^:>]+:)*([\w]+)>')
|
||||
|
||||
|
||||
TYPE_CONVERTER = {
|
||||
truthy_bool: 'boolean',
|
||||
str: 'string',
|
||||
basestring: 'string',
|
||||
reqparse.text_type: 'string',
|
||||
int: 'integer',
|
||||
}
|
||||
|
||||
PREFERRED_URL_SCHEME = app.config['PREFERRED_URL_SCHEME']
|
||||
SERVER_HOSTNAME = app.config['SERVER_HOSTNAME']
|
||||
|
||||
|
||||
def fully_qualified_name(method_view_class):
|
||||
return '%s.%s' % (method_view_class.__module__, method_view_class.__name__)
|
||||
|
||||
|
||||
def swagger_route_data(include_internal=False, compact=False):
|
||||
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
|
||||
enum=None, schema=None):
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
|
||||
parameter_info = {
|
||||
'name': name,
|
||||
'in': kind,
|
||||
'required': required
|
||||
}
|
||||
|
||||
if not compact:
|
||||
parameter_info['description'] = description or ''
|
||||
|
||||
if schema:
|
||||
parameter_info['schema'] = {
|
||||
'$ref': '#/definitions/%s' % schema
|
||||
}
|
||||
else:
|
||||
parameter_info['type'] = param_type
|
||||
|
||||
if enum is not None and len(list(enum)) > 0:
|
||||
parameter_info['enum'] = list(enum)
|
||||
|
||||
return parameter_info
|
||||
|
||||
paths = {}
|
||||
models = {}
|
||||
tags = []
|
||||
tags_added = set()
|
||||
operationIds = set()
|
||||
|
||||
for rule in app.url_map.iter_rules():
|
||||
endpoint_method = app.view_functions[rule.endpoint]
|
||||
|
||||
# Verify that we have a view class for this API method.
|
||||
if not 'view_class' in dir(endpoint_method):
|
||||
continue
|
||||
|
||||
view_class = endpoint_method.view_class
|
||||
|
||||
# Hide the class if it is internal.
|
||||
internal = method_metadata(view_class, 'internal')
|
||||
if not include_internal and internal:
|
||||
continue
|
||||
|
||||
# Build the tag.
|
||||
parts = fully_qualified_name(view_class).split('.')
|
||||
tag_name = parts[-2]
|
||||
if not tag_name in tags_added:
|
||||
tags_added.add(tag_name)
|
||||
tags.append({
|
||||
'name': tag_name,
|
||||
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
|
||||
})
|
||||
|
||||
# Build the Swagger data for the path.
|
||||
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
|
||||
full_name = fully_qualified_name(view_class)
|
||||
path_swagger = {
|
||||
'x-name': full_name,
|
||||
'x-path': swagger_path,
|
||||
'x-tag': tag_name
|
||||
}
|
||||
|
||||
if include_internal:
|
||||
related_user_res = method_metadata(view_class, 'related_user_resource')
|
||||
if related_user_res is not None:
|
||||
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
|
||||
|
||||
paths[swagger_path] = path_swagger
|
||||
|
||||
# Add any global path parameters.
|
||||
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(view_class) else {}
|
||||
if param_data_map:
|
||||
path_parameters_swagger = []
|
||||
for path_parameter in param_data_map:
|
||||
description = param_data_map[path_parameter].get('description')
|
||||
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
|
||||
|
||||
path_swagger['parameters'] = path_parameters_swagger
|
||||
|
||||
# Add the individual HTTP operations.
|
||||
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
|
||||
for method_name in method_names:
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
|
||||
method = getattr(view_class, method_name.lower(), None)
|
||||
if method is None:
|
||||
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
|
||||
continue
|
||||
|
||||
operationId = method_metadata(method, 'nickname')
|
||||
operation_swagger = {
|
||||
'operationId': operationId,
|
||||
'parameters': [],
|
||||
}
|
||||
|
||||
if operationId is None:
|
||||
continue
|
||||
|
||||
if operationId in operationIds:
|
||||
raise Exception('Duplicate operation Id: %s' % operationId)
|
||||
|
||||
operationIds.add(operationId)
|
||||
|
||||
if not compact:
|
||||
operation_swagger.update({
|
||||
'description': method.__doc__.strip() if method.__doc__ else '',
|
||||
'tags': [tag_name]
|
||||
})
|
||||
|
||||
# Mark the method as internal.
|
||||
internal = method_metadata(method, 'internal')
|
||||
if internal is not None:
|
||||
operation_swagger['x-internal'] = True
|
||||
|
||||
if include_internal:
|
||||
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
|
||||
if requires_fresh_login is not None:
|
||||
operation_swagger['x-requires-fresh-login'] = True
|
||||
|
||||
# Add the path parameters.
|
||||
if rule.arguments:
|
||||
for path_parameter in rule.arguments:
|
||||
description = param_data_map.get(path_parameter, {}).get('description')
|
||||
operation_swagger['parameters'].append(swagger_parameter(path_parameter, description))
|
||||
|
||||
# Add the query parameters.
|
||||
if '__api_query_params' in dir(method):
|
||||
for query_parameter_info in method.__api_query_params:
|
||||
name = query_parameter_info['name']
|
||||
description = query_parameter_info['help']
|
||||
param_type = TYPE_CONVERTER[query_parameter_info['type']]
|
||||
required = query_parameter_info['required']
|
||||
|
||||
operation_swagger['parameters'].append(
|
||||
swagger_parameter(name, description, kind='query',
|
||||
param_type=param_type,
|
||||
required=required,
|
||||
enum=query_parameter_info['choices']))
|
||||
|
||||
# Add the OAuth security block.
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
|
||||
scope = method_metadata(method, 'oauth2_scope')
|
||||
if scope and not compact:
|
||||
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
|
||||
|
||||
# Add the responses block.
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
|
||||
response_schema_name = method_metadata(method, 'response_schema')
|
||||
if not compact:
|
||||
if response_schema_name:
|
||||
models[response_schema_name] = view_class.schemas[response_schema_name]
|
||||
|
||||
models['ApiError'] = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'status': {
|
||||
'type': 'integer',
|
||||
'description': 'Status code of the response.'
|
||||
},
|
||||
'type': {
|
||||
'type': 'string',
|
||||
'description': 'Reference to the type of the error.'
|
||||
},
|
||||
'detail': {
|
||||
'type': 'string',
|
||||
'description': 'Details about the specific instance of the error.'
|
||||
},
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'Unique error code to identify the type of error.'
|
||||
},
|
||||
'error_message': {
|
||||
'type': 'string',
|
||||
'description': 'Deprecated; alias for detail'
|
||||
},
|
||||
'error_type': {
|
||||
'type': 'string',
|
||||
'description': 'Deprecated; alias for detail'
|
||||
}
|
||||
},
|
||||
'required': [
|
||||
'status',
|
||||
'type',
|
||||
'title',
|
||||
]
|
||||
}
|
||||
|
||||
responses = {
|
||||
'400': {
|
||||
'description': 'Bad Request',
|
||||
},
|
||||
|
||||
'401': {
|
||||
'description': 'Session required',
|
||||
},
|
||||
|
||||
'403': {
|
||||
'description': 'Unauthorized access',
|
||||
},
|
||||
|
||||
'404': {
|
||||
'description': 'Not found',
|
||||
},
|
||||
}
|
||||
|
||||
for _, body in responses.items():
|
||||
body['schema'] = {'$ref': '#/definitions/ApiError'}
|
||||
|
||||
if method_name == 'DELETE':
|
||||
responses['204'] = {
|
||||
'description': 'Deleted'
|
||||
}
|
||||
elif method_name == 'POST':
|
||||
responses['201'] = {
|
||||
'description': 'Successful creation'
|
||||
}
|
||||
else:
|
||||
responses['200'] = {
|
||||
'description': 'Successful invocation'
|
||||
}
|
||||
|
||||
if response_schema_name:
|
||||
responses['200']['schema'] = {
|
||||
'$ref': '#/definitions/%s' % response_schema_name
|
||||
}
|
||||
|
||||
operation_swagger['responses'] = responses
|
||||
|
||||
|
||||
# Add the request block.
|
||||
request_schema_name = method_metadata(method, 'request_schema')
|
||||
if request_schema_name and not compact:
|
||||
models[request_schema_name] = view_class.schemas[request_schema_name]
|
||||
|
||||
operation_swagger['parameters'].append(
|
||||
swagger_parameter('body', 'Request body contents.', kind='body',
|
||||
schema=request_schema_name))
|
||||
|
||||
# Add the operation to the parent path.
|
||||
if not internal or (internal and include_internal):
|
||||
path_swagger[method_name.lower()] = operation_swagger
|
||||
|
||||
tags.sort(key=lambda t: t['name'])
|
||||
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
|
||||
|
||||
if compact:
|
||||
return {'paths': paths}
|
||||
|
||||
swagger_data = {
|
||||
'swagger': '2.0',
|
||||
'host': SERVER_HOSTNAME,
|
||||
'basePath': '/',
|
||||
'schemes': [
|
||||
PREFERRED_URL_SCHEME
|
||||
],
|
||||
'info': {
|
||||
'version': 'v1',
|
||||
'title': 'Quay Frontend',
|
||||
'description': ('This API allows you to perform many of the operations required to work '
|
||||
'with Quay repositories, users, and organizations. You can find out more '
|
||||
'at <a href="https://quay.io">Quay</a>.'),
|
||||
'termsOfService': 'https://quay.io/tos',
|
||||
'contact': {
|
||||
'email': 'support@quay.io'
|
||||
}
|
||||
},
|
||||
'securityDefinitions': {
|
||||
'oauth2_implicit': {
|
||||
"type": "oauth2",
|
||||
"flow": "implicit",
|
||||
"authorizationUrl": "%s://%s/oauth/authorize" % (PREFERRED_URL_SCHEME, SERVER_HOSTNAME),
|
||||
'scopes': {scope.scope:scope.description
|
||||
for scope in scopes.app_scopes(app.config).values()},
|
||||
},
|
||||
},
|
||||
'paths': paths,
|
||||
'definitions': models,
|
||||
'tags': tags
|
||||
}
|
||||
|
||||
return swagger_data
|
||||
|
||||
|
||||
@resource('/v1/discovery')
|
||||
class DiscoveryResource(ApiResource):
|
||||
"""Ability to inspect the API for usage information and documentation."""
|
||||
@parse_args()
|
||||
@query_param('internal', 'Whether to include internal APIs.', type=truthy_bool, default=False)
|
||||
@nickname('discovery')
|
||||
@anon_allowed
|
||||
def get(self, parsed_args):
|
||||
""" List all of the API endpoints available in the swagger API format."""
|
||||
return swagger_route_data(parsed_args['internal'])
|
61
endpoints/api/error.py
Normal file
61
endpoints/api/error.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
""" Error details API """
|
||||
from flask import url_for
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, path_param,
|
||||
define_json_response)
|
||||
from endpoints.exception import NotFound, ApiErrorType, ERROR_DESCRIPTION
|
||||
|
||||
def error_view(error_type):
|
||||
return {
|
||||
'type': url_for('api.error', error_type=error_type, _external=True),
|
||||
'title': error_type,
|
||||
'description': ERROR_DESCRIPTION[error_type]
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/error/<error_type>')
|
||||
@path_param('error_type', 'The error code identifying the type of error.')
|
||||
class Error(ApiResource):
|
||||
""" Resource for Error Descriptions"""
|
||||
schemas = {
|
||||
'ApiErrorDescription': {
|
||||
'type': 'object',
|
||||
'description': 'Description of an error',
|
||||
'required': [
|
||||
'type',
|
||||
'description',
|
||||
'title',
|
||||
],
|
||||
'properties': {
|
||||
'type': {
|
||||
'type': 'string',
|
||||
'description': 'A reference to the error type resource'
|
||||
},
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': (
|
||||
'The title of the error. Can be used to uniquely identify the kind'
|
||||
' of error.'
|
||||
),
|
||||
'enum': list(ApiErrorType.__members__)
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': (
|
||||
'A more detailed description of the error that may include help for'
|
||||
' fixing the issue.'
|
||||
)
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@define_json_response('ApiErrorDescription')
|
||||
@nickname('getErrorDescription')
|
||||
def get(self, error_type):
|
||||
""" Get a detailed description of the error """
|
||||
if error_type in ERROR_DESCRIPTION.keys():
|
||||
return error_view(error_type)
|
||||
|
||||
raise NotFound()
|
||||
|
128
endpoints/api/globalmessages.py
Normal file
128
endpoints/api/globalmessages.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
""" Messages API. """
|
||||
from flask import abort
|
||||
from flask import make_response
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
from auth import scopes
|
||||
from auth.permissions import SuperUserPermission
|
||||
from endpoints.api import (ApiResource, resource, nickname,
|
||||
require_fresh_login, verify_not_prod, validate_json_request,
|
||||
require_scope, show_if,)
|
||||
from globalmessages_models_pre_oci import pre_oci_model as model
|
||||
|
||||
|
||||
@resource('/v1/messages')
|
||||
class GlobalUserMessages(ApiResource):
|
||||
""" Resource for getting a list of super user messages """
|
||||
schemas = {
|
||||
'GetMessage': {
|
||||
'id': 'GetMessage',
|
||||
'type': 'object',
|
||||
'description': 'Messages that a super user has saved in the past',
|
||||
'properties': {
|
||||
'message': {
|
||||
'type': 'array',
|
||||
'description': 'A list of messages',
|
||||
'itemType': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'uuid': {
|
||||
'type': 'string',
|
||||
'description': 'The message id',
|
||||
},
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': 'The actual message',
|
||||
},
|
||||
'media_type': {
|
||||
'type': 'string',
|
||||
'description': 'The media type of the message',
|
||||
'enum': ['text/plain', 'text/markdown'],
|
||||
},
|
||||
'severity': {
|
||||
'type': 'string',
|
||||
'description': 'The severity of the message',
|
||||
'enum': ['info', 'warning', 'error'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'CreateMessage': {
|
||||
'id': 'CreateMessage',
|
||||
'type': 'object',
|
||||
'description': 'Create a new message',
|
||||
'properties': {
|
||||
'message': {
|
||||
'type': 'object',
|
||||
'description': 'A single message',
|
||||
'required': [
|
||||
'content',
|
||||
'media_type',
|
||||
'severity',
|
||||
],
|
||||
'properties': {
|
||||
'content': {
|
||||
'type': 'string',
|
||||
'description': 'The actual message',
|
||||
},
|
||||
'media_type': {
|
||||
'type': 'string',
|
||||
'description': 'The media type of the message',
|
||||
'enum': ['text/plain', 'text/markdown'],
|
||||
},
|
||||
'severity': {
|
||||
'type': 'string',
|
||||
'description': 'The severity of the message',
|
||||
'enum': ['info', 'warning', 'error'],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@nickname('getGlobalMessages')
|
||||
def get(self):
|
||||
""" Return a super users messages """
|
||||
return {
|
||||
'messages': [m.to_dict() for m in model.get_all_messages()],
|
||||
}
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('createGlobalMessage')
|
||||
@validate_json_request('CreateMessage')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def post(self):
|
||||
""" Create a message """
|
||||
if not features.SUPER_USERS:
|
||||
abort(404)
|
||||
|
||||
if SuperUserPermission().can():
|
||||
message_req = request.get_json()['message']
|
||||
message = model.create_message(message_req['severity'], message_req['media_type'], message_req['content'])
|
||||
if message is None:
|
||||
abort(400)
|
||||
return make_response('', 201)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
@resource('/v1/message/<uuid>')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class GlobalUserMessage(ApiResource):
|
||||
""" Resource for managing individual messages """
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('deleteGlobalMessage')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def delete(self, uuid):
|
||||
""" Delete a message """
|
||||
if SuperUserPermission().can():
|
||||
model.delete_message(uuid)
|
||||
return make_response('', 204)
|
||||
|
||||
abort(403)
|
54
endpoints/api/globalmessages_models_interface.py
Normal file
54
endpoints/api/globalmessages_models_interface.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
class GlobalMessage(
|
||||
namedtuple('GlobalMessage', [
|
||||
'uuid',
|
||||
'content',
|
||||
'severity',
|
||||
'media_type_name',
|
||||
])):
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'uuid': self.uuid,
|
||||
'content': self.content,
|
||||
'severity': self.severity,
|
||||
'media_type': self.media_type_name,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class GlobalMessageDataInterface(object):
|
||||
"""
|
||||
Data interface for globalmessages API
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_all_messages(self):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
list(GlobalMessage)
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_message(self, severity, media_type_name, content):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
GlobalMessage or None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_message(self, uuid):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
void
|
||||
"""
|
||||
|
||||
|
33
endpoints/api/globalmessages_models_pre_oci.py
Normal file
33
endpoints/api/globalmessages_models_pre_oci.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
from globalmessages_models_interface import GlobalMessageDataInterface, GlobalMessage
|
||||
from data import model
|
||||
|
||||
|
||||
class GlobalMessagePreOCI(GlobalMessageDataInterface):
|
||||
|
||||
def get_all_messages(self):
|
||||
messages = model.message.get_messages()
|
||||
return [self._message(m) for m in messages]
|
||||
|
||||
def create_message(self, severity, media_type_name, content):
|
||||
message = {
|
||||
'severity': severity,
|
||||
'media_type': media_type_name,
|
||||
'content': content
|
||||
}
|
||||
messages = model.message.create([message])
|
||||
return self._message(messages[0])
|
||||
|
||||
def delete_message(self, uuid):
|
||||
model.message.delete_message([uuid])
|
||||
|
||||
def _message(self, message_obj):
|
||||
if message_obj is None:
|
||||
return None
|
||||
return GlobalMessage(
|
||||
uuid=message_obj.uuid,
|
||||
content=message_obj.content,
|
||||
severity=message_obj.severity,
|
||||
media_type_name=message_obj.media_type.name,
|
||||
)
|
||||
|
||||
pre_oci_model = GlobalMessagePreOCI()
|
77
endpoints/api/image.py
Normal file
77
endpoints/api/image.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
""" List and lookup repository images. """
|
||||
import json
|
||||
|
||||
from data.registry_model import registry_model
|
||||
from endpoints.api import (resource, nickname, require_repo_read, RepositoryParamResource,
|
||||
path_param, disallow_for_app_repositories, format_date)
|
||||
from endpoints.exception import NotFound
|
||||
|
||||
|
||||
def image_dict(image, with_history=False, with_tags=False):
|
||||
parsed_command = None
|
||||
if image.command:
|
||||
try:
|
||||
parsed_command = json.loads(image.command)
|
||||
except (ValueError, TypeError):
|
||||
parsed_command = {'error': 'Could not parse command'}
|
||||
|
||||
image_data = {
|
||||
'id': image.docker_image_id,
|
||||
'created': format_date(image.created),
|
||||
'comment': image.comment,
|
||||
'command': parsed_command,
|
||||
'size': image.image_size,
|
||||
'uploading': image.uploading,
|
||||
'sort_index': len(image.parents),
|
||||
}
|
||||
|
||||
if with_tags:
|
||||
image_data['tags'] = [tag.name for tag in image.tags]
|
||||
|
||||
if with_history:
|
||||
image_data['history'] = [image_dict(parent) for parent in image.parents]
|
||||
|
||||
# Calculate the ancestors string, with the DBID's replaced with the docker IDs.
|
||||
parent_docker_ids = [parent_image.docker_image_id for parent_image in image.parents]
|
||||
image_data['ancestors'] = '/{0}/'.format('/'.join(parent_docker_ids))
|
||||
return image_data
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/image/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryImageList(RepositoryParamResource):
|
||||
""" Resource for listing repository images. """
|
||||
|
||||
@require_repo_read
|
||||
@nickname('listRepositoryImages')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository):
|
||||
""" List the images for the specified repository. """
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
images = registry_model.get_legacy_images(repo_ref)
|
||||
return {'images': [image_dict(image, with_tags=True) for image in images]}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/image/<image_id>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('image_id', 'The Docker image ID')
|
||||
class RepositoryImage(RepositoryParamResource):
|
||||
""" Resource for handling repository images. """
|
||||
|
||||
@require_repo_read
|
||||
@nickname('getImage')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository, image_id):
|
||||
""" Get the information available for the specified image. """
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
image = registry_model.get_legacy_image(repo_ref, image_id, include_parents=True)
|
||||
if image is None:
|
||||
raise NotFound()
|
||||
|
||||
return image_dict(image, with_history=True)
|
344
endpoints/api/logs.py
Normal file
344
endpoints/api/logs.py
Normal file
|
@ -0,0 +1,344 @@
|
|||
""" Access usage logs for organizations or repositories. """
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from app import app, export_action_logs_queue, avatar
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data.logs_model import logs_model
|
||||
from data.registry_model import registry_model
|
||||
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
|
||||
RepositoryParamResource, require_repo_admin, related_user_resource,
|
||||
format_date, require_user_admin, path_param, require_scope, page_support,
|
||||
validate_json_request, InvalidRequest, show_if)
|
||||
from endpoints.exception import Unauthorized, NotFound
|
||||
|
||||
|
||||
LOGS_PER_PAGE = 20
|
||||
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
|
||||
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
|
||||
|
||||
|
||||
def _parse_datetime(dt_string):
|
||||
if not dt_string:
|
||||
return None
|
||||
|
||||
try:
|
||||
return datetime.strptime(dt_string + ' UTC', '%m/%d/%Y %Z')
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def _validate_logs_arguments(start_time, end_time):
|
||||
start_time = _parse_datetime(start_time) or (datetime.today() - timedelta(days=1))
|
||||
end_time = _parse_datetime(end_time) or datetime.today()
|
||||
end_time = end_time + timedelta(days=1)
|
||||
return start_time, end_time
|
||||
|
||||
|
||||
def _get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
|
||||
page_token=None, filter_kinds=None):
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
log_entry_page = logs_model.lookup_logs(start_time, end_time, performer_name, repository_name,
|
||||
namespace_name, filter_kinds, page_token,
|
||||
app.config['ACTION_LOG_MAX_PAGE'])
|
||||
include_namespace = namespace_name is None and repository_name is None
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log.to_dict(avatar, include_namespace) for log in log_entry_page.logs],
|
||||
}, log_entry_page.next_page_token
|
||||
|
||||
|
||||
def _get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
|
||||
filter_kinds=None):
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time,
|
||||
performer_name=performer_name,
|
||||
repository_name=repository,
|
||||
namespace_name=namespace,
|
||||
filter_kinds=filter_kinds)
|
||||
|
||||
return {
|
||||
'aggregated': [log.to_dict() for log in aggregated_logs]
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/logs')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryLogs(RepositoryParamResource):
|
||||
""" Resource for fetching logs for the specific repository. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('listRepoLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@page_support()
|
||||
def get(self, namespace, repository, page_token, parsed_args):
|
||||
""" List the logs for the specified repository. """
|
||||
if registry_model.lookup_repository(namespace, repository) is None:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return _get_logs(start_time, end_time,
|
||||
repository_name=repository,
|
||||
page_token=page_token,
|
||||
namespace_name=namespace)
|
||||
|
||||
|
||||
@resource('/v1/user/logs')
|
||||
class UserLogs(ApiResource):
|
||||
""" Resource for fetching logs for the current user. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('listUserLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@page_support()
|
||||
def get(self, parsed_args, page_token):
|
||||
""" List the logs for the current user. """
|
||||
performer_name = parsed_args['performer']
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
return _get_logs(start_time, end_time,
|
||||
performer_name=performer_name,
|
||||
namespace_name=user.username,
|
||||
page_token=page_token,
|
||||
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/logs')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(UserLogs)
|
||||
class OrgLogs(ApiResource):
|
||||
""" Resource for fetching logs for the entire organization. """
|
||||
|
||||
@nickname('listOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@page_support()
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
def get(self, orgname, page_token, parsed_args):
|
||||
""" List the logs for the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
performer_name = parsed_args['performer']
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
return _get_logs(start_time, end_time,
|
||||
namespace_name=orgname,
|
||||
performer_name=performer_name,
|
||||
page_token=page_token)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/aggregatelogs')
|
||||
@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryAggregateLogs(RepositoryParamResource):
|
||||
""" Resource for fetching aggregated logs for the specific repository. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('getAggregateRepoLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
def get(self, namespace, repository, parsed_args):
|
||||
""" Returns the aggregated logs for the specified repository. """
|
||||
if registry_model.lookup_repository(namespace, repository) is None:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
return _get_aggregate_logs(start_time, end_time,
|
||||
repository=repository,
|
||||
namespace=namespace)
|
||||
|
||||
|
||||
@resource('/v1/user/aggregatelogs')
|
||||
@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
|
||||
class UserAggregateLogs(ApiResource):
|
||||
""" Resource for fetching aggregated logs for the current user. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getAggregateUserLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
def get(self, parsed_args):
|
||||
""" Returns the aggregated logs for the current user. """
|
||||
performer_name = parsed_args['performer']
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
return _get_aggregate_logs(start_time, end_time,
|
||||
performer_name=performer_name,
|
||||
namespace=user.username,
|
||||
filter_kinds=SERVICE_LEVEL_LOG_KINDS)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/aggregatelogs')
|
||||
@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(UserLogs)
|
||||
class OrgAggregateLogs(ApiResource):
|
||||
""" Resource for fetching aggregate logs for the entire organization. """
|
||||
|
||||
@nickname('getAggregateOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
def get(self, orgname, parsed_args):
|
||||
""" Gets the aggregated logs for the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
performer_name = parsed_args['performer']
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
return _get_aggregate_logs(start_time, end_time,
|
||||
namespace=orgname,
|
||||
performer_name=performer_name)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
EXPORT_LOGS_SCHEMA = {
|
||||
'type': 'object',
|
||||
'description': 'Configuration for an export logs operation',
|
||||
'properties': {
|
||||
'callback_url': {
|
||||
'type': 'string',
|
||||
'description': 'The callback URL to invoke with a link to the exported logs',
|
||||
},
|
||||
'callback_email': {
|
||||
'type': 'string',
|
||||
'description': 'The e-mail address at which to e-mail a link to the exported logs',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
|
||||
callback_url = options.get('callback_url')
|
||||
if callback_url:
|
||||
if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
|
||||
raise InvalidRequest('Invalid callback URL')
|
||||
|
||||
callback_email = options.get('callback_email')
|
||||
if callback_email:
|
||||
if callback_email.find('@') < 0:
|
||||
raise InvalidRequest('Invalid callback e-mail')
|
||||
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
export_id = logs_model.queue_logs_export(start_time, end_time, export_action_logs_queue,
|
||||
namespace_name, repository_name, callback_url,
|
||||
callback_email)
|
||||
if export_id is None:
|
||||
raise InvalidRequest('Invalid export request')
|
||||
|
||||
return export_id
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/exportlogs')
|
||||
@show_if(features.LOG_EXPORT)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class ExportRepositoryLogs(RepositoryParamResource):
|
||||
""" Resource for exporting the logs for the specific repository. """
|
||||
schemas = {
|
||||
'ExportLogs': EXPORT_LOGS_SCHEMA
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('exportRepoLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@validate_json_request('ExportLogs')
|
||||
def post(self, namespace, repository, parsed_args):
|
||||
""" Queues an export of the logs for the specified repository. """
|
||||
if registry_model.lookup_repository(namespace, repository) is None:
|
||||
raise NotFound()
|
||||
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
export_id = _queue_logs_export(start_time, end_time, request.get_json(), namespace,
|
||||
repository_name=repository)
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/user/exportlogs')
|
||||
@show_if(features.LOG_EXPORT)
|
||||
class ExportUserLogs(ApiResource):
|
||||
""" Resource for exporting the logs for the current user repository. """
|
||||
schemas = {
|
||||
'ExportLogs': EXPORT_LOGS_SCHEMA
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('exportUserLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@validate_json_request('ExportLogs')
|
||||
def post(self, parsed_args):
|
||||
""" Returns the aggregated logs for the current user. """
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
user = get_authenticated_user()
|
||||
export_id = _queue_logs_export(start_time, end_time, request.get_json(), user.username)
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/exportlogs')
|
||||
@show_if(features.LOG_EXPORT)
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(ExportUserLogs)
|
||||
class ExportOrgLogs(ApiResource):
|
||||
""" Resource for exporting the logs for an entire organization. """
|
||||
schemas = {
|
||||
'ExportLogs': EXPORT_LOGS_SCHEMA
|
||||
}
|
||||
|
||||
@nickname('exportOrgLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@validate_json_request('ExportLogs')
|
||||
def post(self, orgname, parsed_args):
|
||||
""" Exports the logs for the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
export_id = _queue_logs_export(start_time, end_time, request.get_json(), orgname)
|
||||
return {
|
||||
'export_id': export_id,
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
273
endpoints/api/manifest.py
Normal file
273
endpoints/api/manifest.py
Normal file
|
@ -0,0 +1,273 @@
|
|||
""" Manage the manifests of a repository. """
|
||||
import json
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
|
||||
from app import label_validator, storage
|
||||
from data.model import InvalidLabelKeyException, InvalidMediaTypeException
|
||||
from data.registry_model import registry_model
|
||||
from digest import digest_tools
|
||||
from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
|
||||
RepositoryParamResource, log_action, validate_json_request,
|
||||
path_param, parse_args, query_param, abort, api,
|
||||
disallow_for_app_repositories, format_date,
|
||||
disallow_for_non_normal_repositories)
|
||||
from endpoints.api.image import image_dict
|
||||
from endpoints.exception import NotFound
|
||||
from util.validation import VALID_LABEL_KEY_REGEX
|
||||
|
||||
|
||||
BASE_MANIFEST_ROUTE = '/v1/repository/<apirepopath:repository>/manifest/<regex("{0}"):manifestref>'
|
||||
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
|
||||
ALLOWED_LABEL_MEDIA_TYPES = ['text/plain', 'application/json']
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _label_dict(label):
|
||||
return {
|
||||
'id': label.uuid,
|
||||
'key': label.key,
|
||||
'value': label.value,
|
||||
'source_type': label.source_type_name,
|
||||
'media_type': label.media_type_name,
|
||||
}
|
||||
|
||||
|
||||
def _layer_dict(manifest_layer, index):
|
||||
# NOTE: The `command` in the layer is either a JSON string of an array (schema 1) or
|
||||
# a single string (schema 2). The block below normalizes it to have the same format.
|
||||
command = None
|
||||
if manifest_layer.command:
|
||||
try:
|
||||
command = json.loads(manifest_layer.command)
|
||||
except (TypeError, ValueError):
|
||||
command = [manifest_layer.command]
|
||||
|
||||
return {
|
||||
'index': index,
|
||||
'compressed_size': manifest_layer.compressed_size,
|
||||
'is_remote': manifest_layer.is_remote,
|
||||
'urls': manifest_layer.urls,
|
||||
'command': command,
|
||||
'comment': manifest_layer.comment,
|
||||
'author': manifest_layer.author,
|
||||
'blob_digest': str(manifest_layer.blob_digest),
|
||||
'created_datetime': format_date(manifest_layer.created_datetime),
|
||||
}
|
||||
|
||||
|
||||
def _manifest_dict(manifest):
|
||||
image = None
|
||||
if manifest.legacy_image_if_present is not None:
|
||||
image = image_dict(manifest.legacy_image, with_history=True)
|
||||
|
||||
layers = None
|
||||
if not manifest.is_manifest_list:
|
||||
layers = registry_model.list_manifest_layers(manifest, storage)
|
||||
if layers is None:
|
||||
logger.debug('Missing layers for manifest `%s`', manifest.digest)
|
||||
abort(404)
|
||||
|
||||
return {
|
||||
'digest': manifest.digest,
|
||||
'is_manifest_list': manifest.is_manifest_list,
|
||||
'manifest_data': manifest.internal_manifest_bytes.as_unicode(),
|
||||
'image': image,
|
||||
'layers': ([_layer_dict(lyr.layer_info, idx) for idx, lyr in enumerate(layers)]
|
||||
if layers else None),
|
||||
}
|
||||
|
||||
|
||||
@resource(MANIFEST_DIGEST_ROUTE)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('manifestref', 'The digest of the manifest')
|
||||
class RepositoryManifest(RepositoryParamResource):
|
||||
""" Resource for retrieving a specific repository manifest. """
|
||||
@require_repo_read
|
||||
@nickname('getRepoManifest')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace_name, repository_name, manifestref):
|
||||
repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref,
|
||||
include_legacy_image=True)
|
||||
if manifest is None:
|
||||
raise NotFound()
|
||||
|
||||
return _manifest_dict(manifest)
|
||||
|
||||
|
||||
@resource(MANIFEST_DIGEST_ROUTE + '/labels')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('manifestref', 'The digest of the manifest')
|
||||
class RepositoryManifestLabels(RepositoryParamResource):
|
||||
""" Resource for listing the labels on a specific repository manifest. """
|
||||
schemas = {
|
||||
'AddLabel': {
|
||||
'type': 'object',
|
||||
'description': 'Adds a label to a manifest',
|
||||
'required': [
|
||||
'key',
|
||||
'value',
|
||||
'media_type',
|
||||
],
|
||||
'properties': {
|
||||
'key': {
|
||||
'type': 'string',
|
||||
'description': 'The key for the label',
|
||||
},
|
||||
'value': {
|
||||
'type': 'string',
|
||||
'description': 'The value for the label',
|
||||
},
|
||||
'media_type': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'The media type for this label',
|
||||
'enum': ALLOWED_LABEL_MEDIA_TYPES + [None],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_read
|
||||
@nickname('listManifestLabels')
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
@query_param('filter', 'If specified, only labels matching the given prefix will be returned',
|
||||
type=str, default=None)
|
||||
def get(self, namespace_name, repository_name, manifestref, parsed_args):
|
||||
repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
|
||||
if manifest is None:
|
||||
raise NotFound()
|
||||
|
||||
labels = registry_model.list_manifest_labels(manifest, parsed_args['filter'])
|
||||
if labels is None:
|
||||
raise NotFound()
|
||||
|
||||
return {
|
||||
'labels': [_label_dict(label) for label in labels]
|
||||
}
|
||||
|
||||
@require_repo_write
|
||||
@nickname('addManifestLabel')
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@validate_json_request('AddLabel')
|
||||
def post(self, namespace_name, repository_name, manifestref):
|
||||
""" Adds a new label into the tag manifest. """
|
||||
label_data = request.get_json()
|
||||
|
||||
# Check for any reserved prefixes.
|
||||
if label_validator.has_reserved_prefix(label_data['key']):
|
||||
abort(400, message='Label has a reserved prefix')
|
||||
|
||||
repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
|
||||
if manifest is None:
|
||||
raise NotFound()
|
||||
|
||||
label = None
|
||||
try:
|
||||
label = registry_model.create_manifest_label(manifest,
|
||||
label_data['key'],
|
||||
label_data['value'],
|
||||
'api',
|
||||
label_data['media_type'])
|
||||
except InvalidLabelKeyException:
|
||||
message = ('Label is of an invalid format or missing please ' +
|
||||
'use %s format for labels' % VALID_LABEL_KEY_REGEX)
|
||||
abort(400, message=message)
|
||||
except InvalidMediaTypeException:
|
||||
message = 'Media type is invalid please use a valid media type: text/plain, application/json'
|
||||
abort(400, message=message)
|
||||
|
||||
if label is None:
|
||||
raise NotFound()
|
||||
|
||||
metadata = {
|
||||
'id': label.uuid,
|
||||
'key': label.key,
|
||||
'value': label.value,
|
||||
'manifest_digest': manifestref,
|
||||
'media_type': label.media_type_name,
|
||||
'namespace': namespace_name,
|
||||
'repo': repository_name,
|
||||
}
|
||||
|
||||
log_action('manifest_label_add', namespace_name, metadata, repo_name=repository_name)
|
||||
|
||||
resp = {'label': _label_dict(label)}
|
||||
repo_string = '%s/%s' % (namespace_name, repository_name)
|
||||
headers = {
|
||||
'Location': api.url_for(ManageRepositoryManifestLabel, repository=repo_string,
|
||||
manifestref=manifestref, labelid=label.uuid),
|
||||
}
|
||||
return resp, 201, headers
|
||||
|
||||
|
||||
@resource(MANIFEST_DIGEST_ROUTE + '/labels/<labelid>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('manifestref', 'The digest of the manifest')
|
||||
@path_param('labelid', 'The ID of the label')
|
||||
class ManageRepositoryManifestLabel(RepositoryParamResource):
|
||||
""" Resource for managing the labels on a specific repository manifest. """
|
||||
@require_repo_read
|
||||
@nickname('getManifestLabel')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace_name, repository_name, manifestref, labelid):
|
||||
""" Retrieves the label with the specific ID under the manifest. """
|
||||
repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
|
||||
if manifest is None:
|
||||
raise NotFound()
|
||||
|
||||
label = registry_model.get_manifest_label(manifest, labelid)
|
||||
if label is None:
|
||||
raise NotFound()
|
||||
|
||||
return _label_dict(label)
|
||||
|
||||
|
||||
@require_repo_write
|
||||
@nickname('deleteManifestLabel')
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
def delete(self, namespace_name, repository_name, manifestref, labelid):
|
||||
""" Deletes an existing label from a manifest. """
|
||||
repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
|
||||
if manifest is None:
|
||||
raise NotFound()
|
||||
|
||||
deleted = registry_model.delete_manifest_label(manifest, labelid)
|
||||
if deleted is None:
|
||||
raise NotFound()
|
||||
|
||||
metadata = {
|
||||
'id': labelid,
|
||||
'key': deleted.key,
|
||||
'value': deleted.value,
|
||||
'manifest_digest': manifestref,
|
||||
'namespace': namespace_name,
|
||||
'repo': repository_name,
|
||||
}
|
||||
|
||||
log_action('manifest_label_delete', namespace_name, metadata, repo_name=repository_name)
|
||||
return '', 204
|
467
endpoints/api/mirror.py
Normal file
467
endpoints/api/mirror.py
Normal file
|
@ -0,0 +1,467 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import logging
|
||||
|
||||
from email.utils import parsedate_tz, mktime_tz
|
||||
from datetime import datetime
|
||||
|
||||
from jsonschema import ValidationError
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from data import model
|
||||
from endpoints.api import (RepositoryParamResource, nickname, path_param, require_repo_admin,
|
||||
resource, validate_json_request, define_json_response, show_if,
|
||||
format_date)
|
||||
from endpoints.exception import NotFound
|
||||
from util.audit import track_and_log, wrap_repository
|
||||
from util.names import parse_robot_username
|
||||
|
||||
|
||||
common_properties = {
|
||||
'is_enabled': {
|
||||
'type': 'boolean',
|
||||
'description': 'Used to enable or disable synchronizations.',
|
||||
},
|
||||
'external_reference': {
|
||||
'type': 'string',
|
||||
'description': 'Location of the external repository.'
|
||||
},
|
||||
'external_registry_username': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'Username used to authenticate with external registry.',
|
||||
},
|
||||
'external_registry_password': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'Password used to authenticate with external registry.',
|
||||
},
|
||||
'sync_start_date': {
|
||||
'type': 'string',
|
||||
'description': 'Determines the next time this repository is ready for synchronization.',
|
||||
},
|
||||
'sync_interval': {
|
||||
'type': 'integer',
|
||||
'minimum': 0,
|
||||
'description': 'Number of seconds after next_start_date to begin synchronizing.'
|
||||
},
|
||||
'robot_username': {
|
||||
'type': 'string',
|
||||
'description': 'Username of robot which will be used for image pushes.'
|
||||
},
|
||||
'root_rule': {
|
||||
'type': 'object',
|
||||
'description': 'Tag mirror rule',
|
||||
'required': [
|
||||
'rule_type',
|
||||
'rule_value'
|
||||
],
|
||||
'properties': {
|
||||
'rule_type': {
|
||||
'type': 'string',
|
||||
'description': 'Rule type must be "TAG_GLOB_CSV"'
|
||||
},
|
||||
'rule_value': {
|
||||
'type': 'array',
|
||||
'description': 'Array of tag patterns',
|
||||
'items': {
|
||||
'type': 'string'
|
||||
}
|
||||
}
|
||||
},
|
||||
'description': 'A list of glob-patterns used to determine which tags should be synchronized.'
|
||||
},
|
||||
'external_registry_config': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'verify_tls': {
|
||||
'type': 'boolean',
|
||||
'description': (
|
||||
'Determines whether HTTPs is required and the certificate is verified when '
|
||||
'communicating with the external repository.'
|
||||
),
|
||||
},
|
||||
'proxy': {
|
||||
'type': 'object',
|
||||
'description': 'Proxy configuration for use during synchronization.',
|
||||
'properties': {
|
||||
'https_proxy': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'Value for HTTPS_PROXY environment variable during sync.'
|
||||
},
|
||||
'http_proxy': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'Value for HTTP_PROXY environment variable during sync.'
|
||||
},
|
||||
'no_proxy': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'Value for NO_PROXY environment variable during sync.'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/mirror/sync-now')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@show_if(features.REPO_MIRROR)
|
||||
class RepoMirrorSyncNowResource(RepositoryParamResource):
|
||||
""" A resource for managing RepoMirrorConfig.sync_status """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('syncNow')
|
||||
def post(self, namespace_name, repository_name):
|
||||
""" Update the sync_status for a given Repository's mirroring configuration. """
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
mirror = model.repo_mirror.get_mirror(repository=repo)
|
||||
if not mirror:
|
||||
raise NotFound()
|
||||
|
||||
if mirror and model.repo_mirror.update_sync_status_to_sync_now(mirror):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="sync_status", to="SYNC_NOW")
|
||||
return '', 204
|
||||
|
||||
raise NotFound()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/mirror/sync-cancel')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@show_if(features.REPO_MIRROR)
|
||||
class RepoMirrorSyncCancelResource(RepositoryParamResource):
|
||||
""" A resource for managing RepoMirrorConfig.sync_status """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('syncCancel')
|
||||
def post(self, namespace_name, repository_name):
|
||||
""" Update the sync_status for a given Repository's mirroring configuration. """
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
mirror = model.repo_mirror.get_mirror(repository=repo)
|
||||
if not mirror:
|
||||
raise NotFound()
|
||||
|
||||
if mirror and model.repo_mirror.update_sync_status_to_cancel(mirror):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="sync_status", to="SYNC_CANCEL")
|
||||
return '', 204
|
||||
|
||||
raise NotFound()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/mirror')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@show_if(features.REPO_MIRROR)
|
||||
class RepoMirrorResource(RepositoryParamResource):
|
||||
"""
|
||||
Resource for managing repository mirroring.
|
||||
"""
|
||||
schemas = {
|
||||
'CreateMirrorConfig': {
|
||||
'description': 'Create the repository mirroring configuration.',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'external_reference',
|
||||
'sync_interval',
|
||||
'sync_start_date',
|
||||
'root_rule'
|
||||
],
|
||||
'properties': common_properties
|
||||
},
|
||||
'UpdateMirrorConfig': {
|
||||
'description': 'Update the repository mirroring configuration.',
|
||||
'type': 'object',
|
||||
'properties': common_properties
|
||||
},
|
||||
'ViewMirrorConfig': {
|
||||
'description': 'View the repository mirroring configuration.',
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'is_enabled',
|
||||
'mirror_type',
|
||||
'external_reference',
|
||||
'external_registry_username',
|
||||
'external_registry_config',
|
||||
'sync_interval',
|
||||
'sync_start_date',
|
||||
'sync_expiration_date',
|
||||
'sync_retries_remaining',
|
||||
'sync_status',
|
||||
'root_rule',
|
||||
'robot_username',
|
||||
],
|
||||
'properties': common_properties
|
||||
}
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@define_json_response('ViewMirrorConfig')
|
||||
@nickname('getRepoMirrorConfig')
|
||||
def get(self, namespace_name, repository_name):
|
||||
""" Return the Mirror configuration for a given Repository. """
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
mirror = model.repo_mirror.get_mirror(repo)
|
||||
if not mirror:
|
||||
raise NotFound()
|
||||
|
||||
# Transformations
|
||||
rules = mirror.root_rule.rule_value
|
||||
username = self._decrypt_username(mirror.external_registry_username)
|
||||
sync_start_date = self._dt_to_string(mirror.sync_start_date)
|
||||
sync_expiration_date = self._dt_to_string(mirror.sync_expiration_date)
|
||||
robot = mirror.internal_robot.username if mirror.internal_robot is not None else None
|
||||
|
||||
return {
|
||||
'is_enabled': mirror.is_enabled,
|
||||
'mirror_type': mirror.mirror_type.name,
|
||||
'external_reference': mirror.external_reference,
|
||||
'external_registry_username': username,
|
||||
'external_registry_config': mirror.external_registry_config or {},
|
||||
'sync_interval': mirror.sync_interval,
|
||||
'sync_start_date': sync_start_date,
|
||||
'sync_expiration_date': sync_expiration_date,
|
||||
'sync_retries_remaining': mirror.sync_retries_remaining,
|
||||
'sync_status': mirror.sync_status.name,
|
||||
'root_rule': {
|
||||
'rule_type': 'TAG_GLOB_CSV',
|
||||
'rule_value': rules
|
||||
},
|
||||
'robot_username': robot,
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('createRepoMirrorConfig')
|
||||
@validate_json_request('CreateMirrorConfig')
|
||||
def post(self, namespace_name, repository_name):
|
||||
""" Create a RepoMirrorConfig for a given Repository. """
|
||||
# TODO: Tidy up this function
|
||||
# TODO: Specify only the data we want to pass on when creating the RepoMirrorConfig. Avoid
|
||||
# the possibility of data injection.
|
||||
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
if model.repo_mirror.get_mirror(repo):
|
||||
return {'detail': 'Mirror configuration already exits for repository %s/%s' % (
|
||||
namespace_name, repository_name)}, 409
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
data['sync_start_date'] = self._string_to_dt(data['sync_start_date'])
|
||||
|
||||
rule = model.repo_mirror.create_rule(repo, data['root_rule']['rule_value'])
|
||||
del data['root_rule']
|
||||
|
||||
# Verify the robot is part of the Repository's namespace
|
||||
robot = self._setup_robot_for_mirroring(namespace_name, repository_name, data['robot_username'])
|
||||
del data['robot_username']
|
||||
|
||||
mirror = model.repo_mirror.enable_mirroring_for_repository(repo, root_rule=rule,
|
||||
internal_robot=robot, **data)
|
||||
if mirror:
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_reference', to=data['external_reference'])
|
||||
return '', 201
|
||||
else:
|
||||
# TODO: Determine appropriate Response
|
||||
return {'detail': 'RepoMirrorConfig already exists for this repository.'}, 409
|
||||
|
||||
@require_repo_admin
|
||||
@validate_json_request('UpdateMirrorConfig')
|
||||
@nickname('changeRepoMirrorConfig')
|
||||
def put(self, namespace_name, repository_name):
|
||||
""" Allow users to modifying the repository's mirroring configuration. """
|
||||
values = request.get_json()
|
||||
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
mirror = model.repo_mirror.get_mirror(repo)
|
||||
if not mirror:
|
||||
raise NotFound()
|
||||
|
||||
if 'is_enabled' in values:
|
||||
if values['is_enabled'] == True:
|
||||
if model.repo_mirror.enable_mirror(repo):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='is_enabled', to=True)
|
||||
if values['is_enabled'] == False:
|
||||
if model.repo_mirror.disable_mirror(repo):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='is_enabled', to=False)
|
||||
|
||||
if 'external_reference' in values:
|
||||
if values['external_reference'] == '':
|
||||
return {'detail': 'Empty string is an invalid repository location.'}, 400
|
||||
if model.repo_mirror.change_remote(repo, values['external_reference']):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_reference', to=values['external_reference'])
|
||||
|
||||
if 'robot_username' in values:
|
||||
robot_username = values['robot_username']
|
||||
robot = self._setup_robot_for_mirroring(namespace_name, repository_name, robot_username)
|
||||
if model.repo_mirror.set_mirroring_robot(repo, robot):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='robot_username', to=robot_username)
|
||||
|
||||
if 'sync_start_date' in values:
|
||||
try:
|
||||
sync_start_date = self._string_to_dt(values['sync_start_date'])
|
||||
except ValueError as e:
|
||||
return {'detail': 'Incorrect DateTime format for sync_start_date.'}, 400
|
||||
if model.repo_mirror.change_sync_start_date(repo, sync_start_date):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='sync_start_date', to=sync_start_date)
|
||||
|
||||
if 'sync_interval' in values:
|
||||
if model.repo_mirror.change_sync_interval(repo, values['sync_interval']):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='sync_interval', to=values['sync_interval'])
|
||||
|
||||
if 'external_registry_username' in values and 'external_registry_password' in values:
|
||||
username = values['external_registry_username']
|
||||
password = values['external_registry_password']
|
||||
if username is None and password is not None:
|
||||
return {'detail': 'Unable to delete username while setting a password.'}, 400
|
||||
if model.repo_mirror.change_credentials(repo, username, password):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_username', to=username)
|
||||
if password is None:
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_password', to=None)
|
||||
else:
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_password', to="********")
|
||||
|
||||
elif 'external_registry_username' in values:
|
||||
username = values['external_registry_username']
|
||||
if model.repo_mirror.change_username(repo, username):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_username', to=username)
|
||||
|
||||
# Do not allow specifying a password without setting a username
|
||||
if 'external_registry_password' in values and 'external_registry_username' not in values:
|
||||
return {'detail': 'Unable to set a new password without also specifying a username.'}, 400
|
||||
|
||||
if 'external_registry_config' in values:
|
||||
external_registry_config = values.get('external_registry_config', {})
|
||||
|
||||
if 'verify_tls' in external_registry_config:
|
||||
updates = {'verify_tls': external_registry_config['verify_tls']}
|
||||
if model.repo_mirror.change_external_registry_config(repo, updates):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='verify_tls', to=external_registry_config['verify_tls'])
|
||||
|
||||
if 'proxy' in external_registry_config:
|
||||
proxy_values = external_registry_config.get('proxy', {})
|
||||
|
||||
if 'http_proxy' in proxy_values:
|
||||
updates = {'proxy': {'http_proxy': proxy_values['http_proxy']}}
|
||||
if model.repo_mirror.change_external_registry_config(repo, updates):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='http_proxy', to=proxy_values['http_proxy'])
|
||||
|
||||
if 'https_proxy' in proxy_values:
|
||||
updates = {'proxy': {'https_proxy': proxy_values['https_proxy']}}
|
||||
if model.repo_mirror.change_external_registry_config(repo, updates):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='https_proxy', to=proxy_values['https_proxy'])
|
||||
|
||||
if 'no_proxy' in proxy_values:
|
||||
updates = {'proxy': {'no_proxy': proxy_values['no_proxy']}}
|
||||
if model.repo_mirror.change_external_registry_config(repo, updates):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='no_proxy', to=proxy_values['no_proxy'])
|
||||
|
||||
return '', 201
|
||||
|
||||
def _setup_robot_for_mirroring(self, namespace_name, repo_name, robot_username):
|
||||
""" Validate robot exists and give write permissions. """
|
||||
robot = model.user.lookup_robot(robot_username)
|
||||
assert robot.robot
|
||||
|
||||
namespace, _ = parse_robot_username(robot_username)
|
||||
if namespace != namespace_name:
|
||||
raise model.DataModelException('Invalid robot')
|
||||
|
||||
# Ensure the robot specified has access to the repository. If not, grant it.
|
||||
permissions = model.permission.get_user_repository_permissions(robot, namespace_name, repo_name)
|
||||
if not permissions or permissions[0].role.name == 'read':
|
||||
model.permission.set_user_repo_permission(robot.username, namespace_name, repo_name, 'write')
|
||||
|
||||
return robot
|
||||
|
||||
def _string_to_dt(self, string):
|
||||
""" Convert String to correct DateTime format. """
|
||||
if string is None:
|
||||
return None
|
||||
|
||||
"""
|
||||
# TODO: Use RFC2822. This doesn't work consistently.
|
||||
# TODO: Move this to same module as `format_date` once fixed.
|
||||
tup = parsedate_tz(string)
|
||||
if len(tup) == 8:
|
||||
tup = tup + (0,) # If TimeZone is omitted, assume UTC
|
||||
ts = mktime_tz(tup)
|
||||
dt = datetime.fromtimestamp(ts, pytz.UTC)
|
||||
return dt
|
||||
"""
|
||||
assert isinstance(string, (str, unicode))
|
||||
dt = datetime.strptime(string, "%Y-%m-%dT%H:%M:%SZ")
|
||||
return dt
|
||||
|
||||
def _dt_to_string(self, dt):
|
||||
""" Convert DateTime to correctly formatted String."""
|
||||
if dt is None:
|
||||
return None
|
||||
|
||||
"""
|
||||
# TODO: Use RFC2822. Need to make it work bi-directionally.
|
||||
return format_date(dt)
|
||||
"""
|
||||
|
||||
assert isinstance(dt, datetime)
|
||||
string = dt.isoformat() + 'Z'
|
||||
return string
|
||||
|
||||
def _decrypt_username(self, username):
|
||||
if username is None:
|
||||
return None
|
||||
return username.decrypt()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/mirror/rules')
|
||||
@show_if(features.REPO_MIRROR)
|
||||
class ManageRepoMirrorRule(RepositoryParamResource):
|
||||
"""
|
||||
Operations to manage a single Repository Mirroring Rule.
|
||||
TODO: At the moment, we are only dealing with a single rule associated with the mirror.
|
||||
This should change to update the rule and address it using its UUID.
|
||||
"""
|
||||
schemas = {
|
||||
'MirrorRule': {
|
||||
'type': 'object',
|
||||
'description': 'A rule used to define how a repository is mirrored.',
|
||||
'required': ['root_rule'],
|
||||
'properties': {
|
||||
'root_rule': common_properties['root_rule']
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('changeRepoMirrorRule')
|
||||
@validate_json_request('MirrorRule')
|
||||
def put(self, namespace_name, repository_name):
|
||||
"""
|
||||
Update an existing RepoMirrorRule
|
||||
"""
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
raise NotFound()
|
||||
|
||||
rule = model.repo_mirror.get_root_rule(repo)
|
||||
if not rule:
|
||||
return {'detail': 'The rule appears to be missing.'}, 400
|
||||
|
||||
data = request.get_json()
|
||||
if model.repo_mirror.change_rule_value(rule, data['root_rule']['rule_value']):
|
||||
track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="mirror_rule", to=data['root_rule']['rule_value'])
|
||||
return 200
|
||||
else:
|
||||
return {'detail': 'Unable to update rule.'}, 400
|
740
endpoints/api/organization.py
Normal file
740
endpoints/api/organization.py
Normal file
|
@ -0,0 +1,740 @@
|
|||
""" Manage organizations, members and OAuth applications. """
|
||||
|
||||
import logging
|
||||
import recaptcha2
|
||||
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from active_migration import ActiveDataMigration, ERTMigrationFlags
|
||||
from app import (billing as stripe, avatar, all_queues, authentication, namespace_gc_queue,
|
||||
ip_resolver, app)
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
related_user_resource, internal_only, require_user_admin, log_action,
|
||||
show_if, path_param, require_scope, require_fresh_login)
|
||||
from endpoints.exception import Unauthorized, NotFound
|
||||
from endpoints.api.user import User, PrivateRepositories
|
||||
from auth.permissions import (AdministerOrganizationPermission, OrganizationMemberPermission,
|
||||
CreateRepositoryPermission, ViewTeamPermission)
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data import model
|
||||
from data.billing import get_plan
|
||||
from util.names import parse_robot_username
|
||||
from util.request import get_request_ip
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def team_view(orgname, team):
|
||||
return {
|
||||
'name': team.name,
|
||||
'description': team.description,
|
||||
'role': team.role_name,
|
||||
'avatar': avatar.get_data_for_team(team),
|
||||
'can_view': ViewTeamPermission(orgname, team.name).can(),
|
||||
|
||||
'repo_count': team.repo_count,
|
||||
'member_count': team.member_count,
|
||||
|
||||
'is_synced': team.is_synced,
|
||||
}
|
||||
|
||||
|
||||
def org_view(o, teams):
|
||||
is_admin = AdministerOrganizationPermission(o.username).can()
|
||||
is_member = OrganizationMemberPermission(o.username).can()
|
||||
|
||||
view = {
|
||||
'name': o.username,
|
||||
'email': o.email if is_admin else '',
|
||||
'avatar': avatar.get_data_for_user(o),
|
||||
'is_admin': is_admin,
|
||||
'is_member': is_member
|
||||
}
|
||||
|
||||
if teams is not None:
|
||||
teams = sorted(teams, key=lambda team: team.id)
|
||||
view['teams'] = {t.name : team_view(o.username, t) for t in teams}
|
||||
view['ordered_teams'] = [team.name for team in teams]
|
||||
|
||||
if is_admin:
|
||||
view['invoice_email'] = o.invoice_email
|
||||
view['invoice_email_address'] = o.invoice_email_address
|
||||
view['tag_expiration_s'] = o.removed_tag_expiration_s
|
||||
view['is_free_account'] = o.stripe_id is None
|
||||
|
||||
return view
|
||||
|
||||
|
||||
@resource('/v1/organization/')
|
||||
class OrganizationList(ApiResource):
|
||||
""" Resource for creating organizations. """
|
||||
schemas = {
|
||||
'NewOrg': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new organization.',
|
||||
'required': [
|
||||
'name',
|
||||
],
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'Organization username',
|
||||
},
|
||||
'email': {
|
||||
'type': 'string',
|
||||
'description': 'Organization contact email',
|
||||
},
|
||||
'recaptcha_response': {
|
||||
'type': 'string',
|
||||
'description': 'The (may be disabled) recaptcha response code for verification',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('createOrganization')
|
||||
@validate_json_request('NewOrg')
|
||||
def post(self):
|
||||
""" Create a new organization. """
|
||||
user = get_authenticated_user()
|
||||
org_data = request.get_json()
|
||||
existing = None
|
||||
|
||||
try:
|
||||
existing = model.organization.get_organization(org_data['name'])
|
||||
except model.InvalidOrganizationException:
|
||||
pass
|
||||
|
||||
if not existing:
|
||||
existing = model.user.get_user(org_data['name'])
|
||||
|
||||
if existing:
|
||||
msg = 'A user or organization with this name already exists'
|
||||
raise request_error(message=msg)
|
||||
|
||||
if features.MAILING and not org_data.get('email'):
|
||||
raise request_error(message='Email address is required')
|
||||
|
||||
# If recaptcha is enabled, then verify the user is a human.
|
||||
if features.RECAPTCHA:
|
||||
recaptcha_response = org_data.get('recaptcha_response', '')
|
||||
result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
|
||||
recaptcha_response,
|
||||
get_request_ip())
|
||||
|
||||
if not result['success']:
|
||||
return {
|
||||
'message': 'Are you a bot? If not, please revalidate the captcha.'
|
||||
}, 400
|
||||
|
||||
is_possible_abuser = ip_resolver.is_ip_possible_threat(get_request_ip())
|
||||
try:
|
||||
model.organization.create_organization(org_data['name'], org_data.get('email'), user,
|
||||
email_required=features.MAILING,
|
||||
is_possible_abuser=is_possible_abuser)
|
||||
return 'Created', 201
|
||||
except model.DataModelException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(User)
|
||||
class Organization(ApiResource):
|
||||
""" Resource for managing organizations. """
|
||||
schemas = {
|
||||
'UpdateOrg': {
|
||||
'type': 'object',
|
||||
'description': 'Description of updates for an existing organization',
|
||||
'properties': {
|
||||
'email': {
|
||||
'type': 'string',
|
||||
'description': 'Organization contact email',
|
||||
},
|
||||
'invoice_email': {
|
||||
'type': 'boolean',
|
||||
'description': 'Whether the organization desires to receive emails for invoices',
|
||||
},
|
||||
'invoice_email_address': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'The email address at which to receive invoices',
|
||||
},
|
||||
'tag_expiration_s': {
|
||||
'type': 'integer',
|
||||
'minimum': 0,
|
||||
'description': 'The number of seconds for tag expiration',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@nickname('getOrganization')
|
||||
def get(self, orgname):
|
||||
""" Get the details for the specified organization """
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
teams = None
|
||||
if OrganizationMemberPermission(orgname).can():
|
||||
has_syncing = features.TEAM_SYNCING and bool(authentication.federated_service)
|
||||
teams = model.team.get_teams_within_org(org, has_syncing)
|
||||
|
||||
return org_view(org, teams)
|
||||
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('changeOrganizationDetails')
|
||||
@validate_json_request('UpdateOrg')
|
||||
def put(self, orgname):
|
||||
""" Change the details for the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
org_data = request.get_json()
|
||||
if 'invoice_email' in org_data:
|
||||
logger.debug('Changing invoice_email for organization: %s', org.username)
|
||||
model.user.change_send_invoice_email(org, org_data['invoice_email'])
|
||||
|
||||
if ('invoice_email_address' in org_data and
|
||||
org_data['invoice_email_address'] != org.invoice_email_address):
|
||||
new_email = org_data['invoice_email_address']
|
||||
logger.debug('Changing invoice email address for organization: %s', org.username)
|
||||
model.user.change_invoice_email_address(org, new_email)
|
||||
|
||||
if 'email' in org_data and org_data['email'] != org.email:
|
||||
new_email = org_data['email']
|
||||
if model.user.find_user_by_email(new_email):
|
||||
raise request_error(message='E-mail address already used')
|
||||
|
||||
logger.debug('Changing email address for organization: %s', org.username)
|
||||
model.user.update_email(org, new_email)
|
||||
|
||||
if features.CHANGE_TAG_EXPIRATION and 'tag_expiration_s' in org_data:
|
||||
logger.debug('Changing organization tag expiration to: %ss', org_data['tag_expiration_s'])
|
||||
model.user.change_user_tag_expiration(org, org_data['tag_expiration_s'])
|
||||
|
||||
teams = model.team.get_teams_within_org(org)
|
||||
return org_view(org, teams)
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@require_fresh_login
|
||||
@nickname('deleteAdminedOrganization')
|
||||
def delete(self, orgname):
|
||||
""" Deletes the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
model.user.mark_namespace_for_deletion(org, all_queues, namespace_gc_queue)
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/private')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@internal_only
|
||||
@related_user_resource(PrivateRepositories)
|
||||
@show_if(features.BILLING)
|
||||
class OrgPrivateRepositories(ApiResource):
|
||||
""" Custom verb to compute whether additional private repositories are available. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationPrivateAllowed')
|
||||
def get(self, orgname):
|
||||
""" Return whether or not this org is allowed to create new private repositories. """
|
||||
permission = CreateRepositoryPermission(orgname)
|
||||
if permission.can():
|
||||
organization = model.organization.get_organization(orgname)
|
||||
private_repos = model.user.get_private_repo_count(organization.username)
|
||||
data = {
|
||||
'privateAllowed': False
|
||||
}
|
||||
|
||||
if organization.stripe_id:
|
||||
cus = stripe.Customer.retrieve(organization.stripe_id)
|
||||
if cus.subscription:
|
||||
repos_allowed = 0
|
||||
plan = get_plan(cus.subscription.plan.id)
|
||||
if plan:
|
||||
repos_allowed = plan['privateRepos']
|
||||
|
||||
data['privateAllowed'] = (private_repos < repos_allowed)
|
||||
|
||||
|
||||
if AdministerOrganizationPermission(orgname).can():
|
||||
data['privateCount'] = private_repos
|
||||
|
||||
return data
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/collaborators')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
class OrganizationCollaboratorList(ApiResource):
|
||||
""" Resource for listing outside collaborators of an organization.
|
||||
|
||||
Collaborators are users that do not belong to any team in the
|
||||
organiztion, but who have direct permissions on one or more
|
||||
repositories belonging to the organization.
|
||||
"""
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationCollaborators')
|
||||
def get(self, orgname):
|
||||
""" List outside collaborators of the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if not permission.can():
|
||||
raise Unauthorized()
|
||||
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
all_perms = model.permission.list_organization_member_permissions(org)
|
||||
membership = model.team.list_organization_members_by_teams(org)
|
||||
|
||||
org_members = set(m.user.username for m in membership)
|
||||
|
||||
collaborators = {}
|
||||
for perm in all_perms:
|
||||
username = perm.user.username
|
||||
|
||||
# Only interested in non-member permissions.
|
||||
if username in org_members:
|
||||
continue
|
||||
|
||||
if username not in collaborators:
|
||||
collaborators[username] = {
|
||||
'kind': 'user',
|
||||
'name': username,
|
||||
'avatar': avatar.get_data_for_user(perm.user),
|
||||
'repositories': [],
|
||||
}
|
||||
|
||||
collaborators[username]['repositories'].append(perm.repository.name)
|
||||
|
||||
return {'collaborators': collaborators.values()}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/members')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
class OrganizationMemberList(ApiResource):
|
||||
""" Resource for listing the members of an organization. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationMembers')
|
||||
def get(self, orgname):
|
||||
""" List the human members of the specified organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
# Loop to create the members dictionary. Note that the members collection
|
||||
# will return an entry for *every team* a member is on, so we will have
|
||||
# duplicate keys (which is why we pre-build the dictionary).
|
||||
members_dict = {}
|
||||
members = model.team.list_organization_members_by_teams(org)
|
||||
for member in members:
|
||||
if member.user.robot:
|
||||
continue
|
||||
|
||||
if not member.user.username in members_dict:
|
||||
member_data = {
|
||||
'name': member.user.username,
|
||||
'kind': 'user',
|
||||
'avatar': avatar.get_data_for_user(member.user),
|
||||
'teams': [],
|
||||
'repositories': []
|
||||
}
|
||||
|
||||
members_dict[member.user.username] = member_data
|
||||
|
||||
members_dict[member.user.username]['teams'].append({
|
||||
'name': member.team.name,
|
||||
'avatar': avatar.get_data_for_team(member.team),
|
||||
})
|
||||
|
||||
# Loop to add direct repository permissions.
|
||||
for permission in model.permission.list_organization_member_permissions(org):
|
||||
username = permission.user.username
|
||||
if not username in members_dict:
|
||||
continue
|
||||
|
||||
members_dict[username]['repositories'].append(permission.repository.name)
|
||||
|
||||
return {'members': members_dict.values()}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/members/<membername>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('membername', 'The username of the organization member')
|
||||
class OrganizationMember(ApiResource):
|
||||
""" Resource for managing individual organization members. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationMember')
|
||||
def get(self, orgname, membername):
|
||||
""" Retrieves the details of a member of the organization.
|
||||
"""
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
# Lookup the user.
|
||||
member = model.user.get_user(membername)
|
||||
if not member:
|
||||
raise NotFound()
|
||||
|
||||
organization = model.user.get_user_or_org(orgname)
|
||||
if not organization:
|
||||
raise NotFound()
|
||||
|
||||
# Lookup the user's information in the organization.
|
||||
teams = list(model.team.get_user_teams_within_org(membername, organization))
|
||||
if not teams:
|
||||
# 404 if the user is not a robot under the organization, as that means the referenced
|
||||
# user or robot is not a member of this organization.
|
||||
if not member.robot:
|
||||
raise NotFound()
|
||||
|
||||
namespace, _ = parse_robot_username(member.username)
|
||||
if namespace != orgname:
|
||||
raise NotFound()
|
||||
|
||||
repo_permissions = model.permission.list_organization_member_permissions(organization, member)
|
||||
|
||||
def local_team_view(team):
|
||||
return {
|
||||
'name': team.name,
|
||||
'avatar': avatar.get_data_for_team(team),
|
||||
}
|
||||
|
||||
return {
|
||||
'name': member.username,
|
||||
'kind': 'robot' if member.robot else 'user',
|
||||
'avatar': avatar.get_data_for_user(member),
|
||||
'teams': [local_team_view(team) for team in teams],
|
||||
'repositories': [permission.repository.name for permission in repo_permissions]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('removeOrganizationMember')
|
||||
def delete(self, orgname, membername):
|
||||
""" Removes a member from an organization, revoking all its repository
|
||||
priviledges and removing it from all teams in the organization.
|
||||
"""
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
# Lookup the user.
|
||||
user = model.user.get_nonrobot_user(membername)
|
||||
if not user:
|
||||
raise NotFound()
|
||||
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
# Remove the user from the organization.
|
||||
model.organization.remove_organization_member(org, user)
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/app/<client_id>')
|
||||
@path_param('client_id', 'The OAuth client ID')
|
||||
class ApplicationInformation(ApiResource):
|
||||
""" Resource that returns public information about a registered application. """
|
||||
|
||||
@nickname('getApplicationInformation')
|
||||
def get(self, client_id):
|
||||
""" Get information on the specified application. """
|
||||
application = model.oauth.get_application_for_client_id(client_id)
|
||||
if not application:
|
||||
raise NotFound()
|
||||
|
||||
app_email = application.avatar_email or application.organization.email
|
||||
app_data = avatar.get_data(application.name, app_email, 'app')
|
||||
|
||||
return {
|
||||
'name': application.name,
|
||||
'description': application.description,
|
||||
'uri': application.application_uri,
|
||||
'avatar': app_data,
|
||||
'organization': org_view(application.organization, [])
|
||||
}
|
||||
|
||||
|
||||
def app_view(application):
|
||||
is_admin = AdministerOrganizationPermission(application.organization.username).can()
|
||||
client_secret = None
|
||||
if is_admin:
|
||||
# TODO(remove-unenc): Remove legacy lookup.
|
||||
client_secret = None
|
||||
if application.secure_client_secret is not None:
|
||||
client_secret = application.secure_client_secret.decrypt()
|
||||
|
||||
if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS) and client_secret is None:
|
||||
client_secret = application.client_secret
|
||||
|
||||
assert (client_secret is not None) == is_admin
|
||||
return {
|
||||
'name': application.name,
|
||||
'description': application.description,
|
||||
'application_uri': application.application_uri,
|
||||
'client_id': application.client_id,
|
||||
'client_secret': client_secret,
|
||||
'redirect_uri': application.redirect_uri if is_admin else None,
|
||||
'avatar_email': application.avatar_email if is_admin else None,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/applications')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
class OrganizationApplications(ApiResource):
|
||||
""" Resource for managing applications defined by an organization. """
|
||||
schemas = {
|
||||
'NewApp': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new organization application.',
|
||||
'required': [
|
||||
'name',
|
||||
],
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The name of the application',
|
||||
},
|
||||
'redirect_uri': {
|
||||
'type': 'string',
|
||||
'description': 'The URI for the application\'s OAuth redirect',
|
||||
},
|
||||
'application_uri': {
|
||||
'type': 'string',
|
||||
'description': 'The URI for the application\'s homepage',
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': 'The human-readable description for the application',
|
||||
},
|
||||
'avatar_email': {
|
||||
'type': 'string',
|
||||
'description': 'The e-mail address of the avatar to use for the application',
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationApplications')
|
||||
def get(self, orgname):
|
||||
""" List the applications for the specified organization """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
applications = model.oauth.list_applications_for_org(org)
|
||||
return {'applications': [app_view(application) for application in applications]}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('createOrganizationApplication')
|
||||
@validate_json_request('NewApp')
|
||||
def post(self, orgname):
|
||||
""" Creates a new application under this organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
app_data = request.get_json()
|
||||
application = model.oauth.create_application(org, app_data['name'],
|
||||
app_data.get('application_uri', ''),
|
||||
app_data.get('redirect_uri', ''),
|
||||
description=app_data.get('description', ''),
|
||||
avatar_email=app_data.get('avatar_email', None))
|
||||
|
||||
app_data.update({
|
||||
'application_name': application.name,
|
||||
'client_id': application.client_id
|
||||
})
|
||||
|
||||
log_action('create_application', orgname, app_data)
|
||||
|
||||
return app_view(application)
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/applications/<client_id>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('client_id', 'The OAuth client ID')
|
||||
class OrganizationApplicationResource(ApiResource):
|
||||
""" Resource for managing an application defined by an organizations. """
|
||||
schemas = {
|
||||
'UpdateApp': {
|
||||
'type': 'object',
|
||||
'description': 'Description of an updated application.',
|
||||
'required': [
|
||||
'name',
|
||||
'redirect_uri',
|
||||
'application_uri'
|
||||
],
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The name of the application',
|
||||
},
|
||||
'redirect_uri': {
|
||||
'type': 'string',
|
||||
'description': 'The URI for the application\'s OAuth redirect',
|
||||
},
|
||||
'application_uri': {
|
||||
'type': 'string',
|
||||
'description': 'The URI for the application\'s homepage',
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': 'The human-readable description for the application',
|
||||
},
|
||||
'avatar_email': {
|
||||
'type': 'string',
|
||||
'description': 'The e-mail address of the avatar to use for the application',
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationApplication')
|
||||
def get(self, orgname, client_id):
|
||||
""" Retrieves the application with the specified client_id under the specified organization """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
application = model.oauth.lookup_application(org, client_id)
|
||||
if not application:
|
||||
raise NotFound()
|
||||
|
||||
return app_view(application)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('updateOrganizationApplication')
|
||||
@validate_json_request('UpdateApp')
|
||||
def put(self, orgname, client_id):
|
||||
""" Updates an application under this organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
application = model.oauth.lookup_application(org, client_id)
|
||||
if not application:
|
||||
raise NotFound()
|
||||
|
||||
app_data = request.get_json()
|
||||
application.name = app_data['name']
|
||||
application.application_uri = app_data['application_uri']
|
||||
application.redirect_uri = app_data['redirect_uri']
|
||||
application.description = app_data.get('description', '')
|
||||
application.avatar_email = app_data.get('avatar_email', None)
|
||||
application.save()
|
||||
|
||||
app_data.update({
|
||||
'application_name': application.name,
|
||||
'client_id': application.client_id
|
||||
})
|
||||
|
||||
log_action('update_application', orgname, app_data)
|
||||
|
||||
return app_view(application)
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrganizationApplication')
|
||||
def delete(self, orgname, client_id):
|
||||
""" Deletes the application under this organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
application = model.oauth.delete_application(org, client_id)
|
||||
if not application:
|
||||
raise NotFound()
|
||||
|
||||
log_action('delete_application', orgname,
|
||||
{'application_name': application.name, 'client_id': client_id})
|
||||
|
||||
return '', 204
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/applications/<client_id>/resetclientsecret')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('client_id', 'The OAuth client ID')
|
||||
@internal_only
|
||||
class OrganizationApplicationResetClientSecret(ApiResource):
|
||||
""" Custom verb for resetting the client secret of an application. """
|
||||
@nickname('resetOrganizationApplicationClientSecret')
|
||||
def post(self, orgname, client_id):
|
||||
""" Resets the client secret of the application. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
application = model.oauth.lookup_application(org, client_id)
|
||||
if not application:
|
||||
raise NotFound()
|
||||
|
||||
application = model.oauth.reset_client_secret(application)
|
||||
log_action('reset_application_client_secret', orgname,
|
||||
{'application_name': application.name, 'client_id': client_id})
|
||||
|
||||
return app_view(application)
|
||||
raise Unauthorized()
|
209
endpoints/api/permission.py
Normal file
209
endpoints/api/permission.py
Normal file
|
@ -0,0 +1,209 @@
|
|||
""" Manage repository permissions. """
|
||||
|
||||
import logging
|
||||
|
||||
from flask import request
|
||||
|
||||
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
|
||||
log_action, request_error, validate_json_request, path_param)
|
||||
from endpoints.exception import NotFound
|
||||
from permission_models_pre_oci import pre_oci_model as model
|
||||
from permission_models_interface import DeleteException, SaveException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/permissions/team/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryTeamPermissionList(RepositoryParamResource):
|
||||
""" Resource for repository team permissions. """
|
||||
@require_repo_admin
|
||||
@nickname('listRepoTeamPermissions')
|
||||
def get(self, namespace_name, repository_name):
|
||||
""" List all team permission. """
|
||||
repo_perms = model.get_repo_permissions_by_team(namespace_name, repository_name)
|
||||
|
||||
return {
|
||||
'permissions': {repo_perm.team_name: repo_perm.to_dict()
|
||||
for repo_perm in repo_perms}
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/permissions/user/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryUserPermissionList(RepositoryParamResource):
|
||||
""" Resource for repository user permissions. """
|
||||
@require_repo_admin
|
||||
@nickname('listRepoUserPermissions')
|
||||
def get(self, namespace_name, repository_name):
|
||||
""" List all user permissions. """
|
||||
perms = model.get_repo_permissions_by_user(namespace_name, repository_name)
|
||||
return {'permissions': {p.username: p.to_dict() for p in perms}}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/permissions/user/<username>/transitive')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('username', 'The username of the user to which the permissions apply')
|
||||
class RepositoryUserTransitivePermission(RepositoryParamResource):
|
||||
""" Resource for retrieving whether a user has access to a repository, either directly
|
||||
or via a team. """
|
||||
@require_repo_admin
|
||||
@nickname('getUserTransitivePermission')
|
||||
def get(self, namespace_name, repository_name, username):
|
||||
""" Get the fetch the permission for the specified user. """
|
||||
|
||||
roles = model.get_repo_roles(username, namespace_name, repository_name)
|
||||
|
||||
if not roles:
|
||||
raise NotFound
|
||||
|
||||
return {
|
||||
'permissions': [r.to_dict() for r in roles]
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/permissions/user/<username>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('username', 'The username of the user to which the permission applies')
|
||||
class RepositoryUserPermission(RepositoryParamResource):
|
||||
""" Resource for managing individual user permissions. """
|
||||
schemas = {
|
||||
'UserPermission': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a user permission.',
|
||||
'required': [
|
||||
'role',
|
||||
],
|
||||
'properties': {
|
||||
'role': {
|
||||
'type': 'string',
|
||||
'description': 'Role to use for the user',
|
||||
'enum': [
|
||||
'read',
|
||||
'write',
|
||||
'admin',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('getUserPermissions')
|
||||
def get(self, namespace_name, repository_name, username):
|
||||
""" Get the permission for the specified user. """
|
||||
logger.debug('Get repo: %s/%s permissions for user %s', namespace_name, repository_name, username)
|
||||
perm = model.get_repo_permission_for_user(username, namespace_name, repository_name)
|
||||
return perm.to_dict()
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('changeUserPermissions')
|
||||
@validate_json_request('UserPermission')
|
||||
def put(self, namespace_name, repository_name, username): # Also needs to respond to post
|
||||
""" Update the perimssions for an existing repository. """
|
||||
new_permission = request.get_json()
|
||||
|
||||
logger.debug('Setting permission to: %s for user %s', new_permission['role'], username)
|
||||
|
||||
try:
|
||||
perm = model.set_repo_permission_for_user(username, namespace_name, repository_name,
|
||||
new_permission['role'])
|
||||
resp = perm.to_dict()
|
||||
except SaveException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
log_action('change_repo_permission', namespace_name,
|
||||
{'username': username, 'repo': repository_name,
|
||||
'namespace': namespace_name,
|
||||
'role': new_permission['role']},
|
||||
repo_name=repository_name)
|
||||
|
||||
return resp, 200
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('deleteUserPermissions')
|
||||
def delete(self, namespace_name, repository_name, username):
|
||||
""" Delete the permission for the user. """
|
||||
try:
|
||||
model.delete_repo_permission_for_user(username, namespace_name, repository_name)
|
||||
except DeleteException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
log_action('delete_repo_permission', namespace_name,
|
||||
{'username': username, 'repo': repository_name, 'namespace': namespace_name},
|
||||
repo_name=repository_name)
|
||||
|
||||
return '', 204
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/permissions/team/<teamname>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('teamname', 'The name of the team to which the permission applies')
|
||||
class RepositoryTeamPermission(RepositoryParamResource):
|
||||
""" Resource for managing individual team permissions. """
|
||||
schemas = {
|
||||
'TeamPermission': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a team permission.',
|
||||
'required': [
|
||||
'role',
|
||||
],
|
||||
'properties': {
|
||||
'role': {
|
||||
'type': 'string',
|
||||
'description': 'Role to use for the team',
|
||||
'enum': [
|
||||
'read',
|
||||
'write',
|
||||
'admin',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('getTeamPermissions')
|
||||
def get(self, namespace_name, repository_name, teamname):
|
||||
""" Fetch the permission for the specified team. """
|
||||
logger.debug('Get repo: %s/%s permissions for team %s', namespace_name, repository_name, teamname)
|
||||
role = model.get_repo_role_for_team(teamname, namespace_name, repository_name)
|
||||
return role.to_dict()
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('changeTeamPermissions')
|
||||
@validate_json_request('TeamPermission')
|
||||
def put(self, namespace_name, repository_name, teamname):
|
||||
""" Update the existing team permission. """
|
||||
new_permission = request.get_json()
|
||||
|
||||
logger.debug('Setting permission to: %s for team %s', new_permission['role'], teamname)
|
||||
|
||||
try:
|
||||
perm = model.set_repo_permission_for_team(teamname, namespace_name, repository_name,
|
||||
new_permission['role'])
|
||||
resp = perm.to_dict()
|
||||
except SaveException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
|
||||
log_action('change_repo_permission', namespace_name,
|
||||
{'team': teamname, 'repo': repository_name,
|
||||
'role': new_permission['role']},
|
||||
repo_name=repository_name)
|
||||
return resp, 200
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('deleteTeamPermissions')
|
||||
def delete(self, namespace_name, repository_name, teamname):
|
||||
""" Delete the permission for the specified team. """
|
||||
try:
|
||||
model.delete_repo_permission_for_team(teamname, namespace_name, repository_name)
|
||||
except DeleteException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
log_action('delete_repo_permission', namespace_name,
|
||||
{'team': teamname, 'repo': repository_name},
|
||||
repo_name=repository_name)
|
||||
|
||||
return '', 204
|
208
endpoints/api/permission_models_interface.py
Normal file
208
endpoints/api/permission_models_interface.py
Normal file
|
@ -0,0 +1,208 @@
|
|||
import sys
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
class SaveException(Exception):
|
||||
def __init__(self, other):
|
||||
self.traceback = sys.exc_info()
|
||||
super(SaveException, self).__init__(str(other))
|
||||
|
||||
class DeleteException(Exception):
|
||||
def __init__(self, other):
|
||||
self.traceback = sys.exc_info()
|
||||
super(DeleteException, self).__init__(str(other))
|
||||
|
||||
|
||||
class Role(namedtuple('Role', ['role_name'])):
|
||||
def to_dict(self):
|
||||
return {
|
||||
'role': self.role_name,
|
||||
}
|
||||
|
||||
class UserPermission(namedtuple('UserPermission', [
|
||||
'role_name',
|
||||
'username',
|
||||
'is_robot',
|
||||
'avatar',
|
||||
'is_org_member',
|
||||
'has_org',
|
||||
])):
|
||||
|
||||
def to_dict(self):
|
||||
perm_dict = {
|
||||
'role': self.role_name,
|
||||
'name': self.username,
|
||||
'is_robot': self.is_robot,
|
||||
'avatar': self.avatar,
|
||||
}
|
||||
if self.has_org:
|
||||
perm_dict['is_org_member'] = self.is_org_member
|
||||
return perm_dict
|
||||
|
||||
|
||||
class RobotPermission(namedtuple('RobotPermission', [
|
||||
'role_name',
|
||||
'username',
|
||||
'is_robot',
|
||||
'is_org_member',
|
||||
])):
|
||||
|
||||
def to_dict(self, user=None, team=None, org_members=None):
|
||||
return {
|
||||
'role': self.role_name,
|
||||
'name': self.username,
|
||||
'is_robot': True,
|
||||
'is_org_member': self.is_org_member,
|
||||
}
|
||||
|
||||
|
||||
class TeamPermission(namedtuple('TeamPermission', [
|
||||
'role_name',
|
||||
'team_name',
|
||||
'avatar',
|
||||
])):
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'role': self.role_name,
|
||||
'name': self.team_name,
|
||||
'avatar': self.avatar,
|
||||
}
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class PermissionDataInterface(object):
|
||||
"""
|
||||
Data interface used by permissions API
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_permissions_by_user(self, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
list(UserPermission)
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_roles(self, username, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
username: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
list(Role) or None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_permission_for_user(self, username, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
username: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
UserPermission
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_repo_permission_for_user(self, username, namespace_name, repository_name, role_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
username: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
role_name: string
|
||||
|
||||
Returns:
|
||||
UserPermission
|
||||
|
||||
Raises:
|
||||
SaveException
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_repo_permission_for_user(self, username, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
username: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
void
|
||||
|
||||
Raises:
|
||||
DeleteException
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_permissions_by_team(self, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
list(TeamPermission)
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_role_for_team(self, team_name, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
team_name: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
Role
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_repo_permission_for_team(self, team_name, namespace_name, repository_name, permission):
|
||||
"""
|
||||
|
||||
Args:
|
||||
team_name: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
permission: string
|
||||
|
||||
Returns:
|
||||
TeamPermission
|
||||
|
||||
Raises:
|
||||
SaveException
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_repo_permission_for_team(self, team_name, namespace_name, repository_name):
|
||||
"""
|
||||
|
||||
Args:
|
||||
team_name: string
|
||||
namespace_name: string
|
||||
repository_name: string
|
||||
|
||||
Returns:
|
||||
TeamPermission
|
||||
|
||||
Raises:
|
||||
DeleteException
|
||||
"""
|
115
endpoints/api/permission_models_pre_oci.py
Normal file
115
endpoints/api/permission_models_pre_oci.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
from app import avatar
|
||||
from data import model
|
||||
from permission_models_interface import PermissionDataInterface, UserPermission, TeamPermission, Role, SaveException, DeleteException
|
||||
|
||||
|
||||
class PreOCIModel(PermissionDataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for Permission using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def get_repo_permissions_by_user(self, namespace_name, repository_name):
|
||||
org = None
|
||||
try:
|
||||
org = model.organization.get_organization(namespace_name) # Will raise an error if not org
|
||||
except model.InvalidOrganizationException:
|
||||
# This repository isn't under an org
|
||||
pass
|
||||
|
||||
# Load the permissions.
|
||||
repo_perms = model.user.get_all_repo_users(namespace_name, repository_name)
|
||||
|
||||
if org:
|
||||
users_filter = {perm.user for perm in repo_perms}
|
||||
org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
|
||||
|
||||
def is_org_member(user):
|
||||
if not org:
|
||||
return False
|
||||
|
||||
return user.robot or user.username in org_members
|
||||
|
||||
return [self._user_permission(perm, org is not None, is_org_member(perm.user)) for perm in repo_perms]
|
||||
|
||||
def get_repo_roles(self, username, namespace_name, repository_name):
|
||||
user = model.user.get_user(username)
|
||||
if not user:
|
||||
return None
|
||||
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if not repo:
|
||||
return None
|
||||
|
||||
return [self._role(r) for r in model.permission.get_user_repo_permissions(user, repo)]
|
||||
|
||||
def get_repo_permission_for_user(self, username, namespace_name, repository_name):
|
||||
perm = model.permission.get_user_reponame_permission(username, namespace_name, repository_name)
|
||||
org = None
|
||||
try:
|
||||
org = model.organization.get_organization(namespace_name)
|
||||
org_members = model.organization.get_organization_member_set(org, users_filter={perm.user})
|
||||
is_org_member = perm.user.robot or perm.user.username in org_members
|
||||
except model.InvalidOrganizationException:
|
||||
# This repository is not part of an organization
|
||||
is_org_member = False
|
||||
|
||||
return self._user_permission(perm, org is not None, is_org_member)
|
||||
|
||||
def set_repo_permission_for_user(self, username, namespace_name, repository_name, role_name):
|
||||
try:
|
||||
perm = model.permission.set_user_repo_permission(username, namespace_name, repository_name, role_name)
|
||||
org = None
|
||||
try:
|
||||
org = model.organization.get_organization(namespace_name)
|
||||
org_members = model.organization.get_organization_member_set(org, users_filter={perm.user})
|
||||
is_org_member = perm.user.robot or perm.user.username in org_members
|
||||
except model.InvalidOrganizationException:
|
||||
# This repository is not part of an organization
|
||||
is_org_member = False
|
||||
return self._user_permission(perm, org is not None, is_org_member)
|
||||
except model.DataModelException as ex:
|
||||
raise SaveException(ex)
|
||||
|
||||
def delete_repo_permission_for_user(self, username, namespace_name, repository_name):
|
||||
try:
|
||||
model.permission.delete_user_permission(username, namespace_name, repository_name)
|
||||
except model.DataModelException as ex:
|
||||
raise DeleteException(ex)
|
||||
|
||||
def get_repo_permissions_by_team(self, namespace_name, repository_name):
|
||||
repo_perms = model.permission.get_all_repo_teams(namespace_name, repository_name)
|
||||
return [self._team_permission(perm, perm.team.name) for perm in repo_perms]
|
||||
|
||||
def get_repo_role_for_team(self, team_name, namespace_name, repository_name):
|
||||
return self._role(model.permission.get_team_reponame_permission(team_name, namespace_name, repository_name))
|
||||
|
||||
def set_repo_permission_for_team(self, team_name, namespace_name, repository_name, role_name):
|
||||
try:
|
||||
return self._team_permission(model.permission.set_team_repo_permission(team_name, namespace_name, repository_name, role_name), team_name)
|
||||
except model.DataModelException as ex:
|
||||
raise SaveException(ex)
|
||||
|
||||
def delete_repo_permission_for_team(self, team_name, namespace_name, repository_name):
|
||||
try:
|
||||
model.permission.delete_team_permission(team_name, namespace_name, repository_name)
|
||||
except model.DataModelException as ex:
|
||||
raise DeleteException(ex)
|
||||
|
||||
def _role(self, permission_obj):
|
||||
return Role(role_name=permission_obj.role.name)
|
||||
|
||||
def _user_permission(self, permission_obj, has_org, is_org_member):
|
||||
return UserPermission(role_name=permission_obj.role.name,
|
||||
username=permission_obj.user.username,
|
||||
is_robot=permission_obj.user.robot,
|
||||
avatar=avatar.get_data_for_user(permission_obj.user),
|
||||
is_org_member=is_org_member,
|
||||
has_org=has_org)
|
||||
|
||||
def _team_permission(self, permission_obj, team_name):
|
||||
return TeamPermission(role_name=permission_obj.role.name,
|
||||
team_name=permission_obj.team.name,
|
||||
avatar=avatar.get_data_for_team(permission_obj.team))
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
270
endpoints/api/prototype.py
Normal file
270
endpoints/api/prototype.py
Normal file
|
@ -0,0 +1,270 @@
|
|||
""" Manage default permissions added to repositories. """
|
||||
|
||||
from flask import request
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
log_action, path_param, require_scope)
|
||||
from endpoints.exception import Unauthorized, NotFound
|
||||
from auth.permissions import AdministerOrganizationPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data import model
|
||||
from app import avatar
|
||||
|
||||
|
||||
def prototype_view(proto, org_members):
|
||||
def prototype_user_view(user):
|
||||
return {
|
||||
'name': user.username,
|
||||
'is_robot': user.robot,
|
||||
'kind': 'user',
|
||||
'is_org_member': user.robot or user.username in org_members,
|
||||
'avatar': avatar.get_data_for_user(user)
|
||||
}
|
||||
|
||||
if proto.delegate_user:
|
||||
delegate_view = prototype_user_view(proto.delegate_user)
|
||||
else:
|
||||
delegate_view = {
|
||||
'name': proto.delegate_team.name,
|
||||
'kind': 'team',
|
||||
'avatar': avatar.get_data_for_team(proto.delegate_team)
|
||||
}
|
||||
|
||||
return {
|
||||
'activating_user': (prototype_user_view(proto.activating_user)
|
||||
if proto.activating_user else None),
|
||||
'delegate': delegate_view,
|
||||
'role': proto.role.name,
|
||||
'id': proto.uuid,
|
||||
}
|
||||
|
||||
def log_prototype_action(action_kind, orgname, prototype, **kwargs):
|
||||
username = get_authenticated_user().username
|
||||
log_params = {
|
||||
'prototypeid': prototype.uuid,
|
||||
'username': username,
|
||||
'activating_username': (prototype.activating_user.username
|
||||
if prototype.activating_user else None),
|
||||
'role': prototype.role.name
|
||||
}
|
||||
|
||||
for key, value in kwargs.items():
|
||||
log_params[key] = value
|
||||
|
||||
if prototype.delegate_user:
|
||||
log_params['delegate_user'] = prototype.delegate_user.username
|
||||
elif prototype.delegate_team:
|
||||
log_params['delegate_team'] = prototype.delegate_team.name
|
||||
|
||||
log_action(action_kind, orgname, log_params)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/prototypes')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
class PermissionPrototypeList(ApiResource):
|
||||
""" Resource for listing and creating permission prototypes. """
|
||||
schemas = {
|
||||
'NewPrototype': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new prototype',
|
||||
'required': [
|
||||
'role',
|
||||
'delegate',
|
||||
],
|
||||
'properties': {
|
||||
'role': {
|
||||
'type': 'string',
|
||||
'description': 'Role that should be applied to the delegate',
|
||||
'enum': [
|
||||
'read',
|
||||
'write',
|
||||
'admin',
|
||||
],
|
||||
},
|
||||
'activating_user': {
|
||||
'type': 'object',
|
||||
'description': 'Repository creating user to whom the rule should apply',
|
||||
'required': [
|
||||
'name',
|
||||
],
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The username for the activating_user',
|
||||
},
|
||||
},
|
||||
},
|
||||
'delegate': {
|
||||
'type': 'object',
|
||||
'description': 'Information about the user or team to which the rule grants access',
|
||||
'required': [
|
||||
'name',
|
||||
'kind',
|
||||
],
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The name for the delegate team or user',
|
||||
},
|
||||
'kind': {
|
||||
'type': 'string',
|
||||
'description': 'Whether the delegate is a user or a team',
|
||||
'enum': [
|
||||
'user',
|
||||
'team',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationPrototypePermissions')
|
||||
def get(self, orgname):
|
||||
""" List the existing prototypes for this organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
permissions = model.permission.get_prototype_permissions(org)
|
||||
|
||||
users_filter = ({p.activating_user for p in permissions} |
|
||||
{p.delegate_user for p in permissions})
|
||||
org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
|
||||
return {'prototypes': [prototype_view(p, org_members) for p in permissions]}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('createOrganizationPrototypePermission')
|
||||
@validate_json_request('NewPrototype')
|
||||
def post(self, orgname):
|
||||
""" Create a new permission prototype. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
details = request.get_json()
|
||||
activating_username = None
|
||||
|
||||
if ('activating_user' in details and details['activating_user'] and
|
||||
'name' in details['activating_user']):
|
||||
activating_username = details['activating_user']['name']
|
||||
|
||||
delegate = details['delegate'] if 'delegate' in details else {}
|
||||
delegate_kind = delegate.get('kind', None)
|
||||
delegate_name = delegate.get('name', None)
|
||||
|
||||
delegate_username = delegate_name if delegate_kind == 'user' else None
|
||||
delegate_teamname = delegate_name if delegate_kind == 'team' else None
|
||||
|
||||
activating_user = (model.user.get_user(activating_username) if activating_username else None)
|
||||
delegate_user = (model.user.get_user(delegate_username) if delegate_username else None)
|
||||
delegate_team = (model.team.get_organization_team(orgname, delegate_teamname)
|
||||
if delegate_teamname else None)
|
||||
|
||||
if activating_username and not activating_user:
|
||||
raise request_error(message='Unknown activating user')
|
||||
|
||||
if not delegate_user and not delegate_team:
|
||||
raise request_error(message='Missing delegate user or team')
|
||||
|
||||
role_name = details['role']
|
||||
|
||||
prototype = model.permission.add_prototype_permission(org, role_name, activating_user,
|
||||
delegate_user, delegate_team)
|
||||
log_prototype_action('create_prototype_permission', orgname, prototype)
|
||||
|
||||
users_filter = {prototype.activating_user, prototype.delegate_user}
|
||||
org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
|
||||
return prototype_view(prototype, org_members)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/prototypes/<prototypeid>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('prototypeid', 'The ID of the prototype')
|
||||
class PermissionPrototype(ApiResource):
|
||||
""" Resource for managingin individual permission prototypes. """
|
||||
schemas = {
|
||||
'PrototypeUpdate': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a the new prototype role',
|
||||
'required': [
|
||||
'role',
|
||||
],
|
||||
'properties': {
|
||||
'role': {
|
||||
'type': 'string',
|
||||
'description': 'Role that should be applied to the permission',
|
||||
'enum': [
|
||||
'read',
|
||||
'write',
|
||||
'admin',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrganizationPrototypePermission')
|
||||
def delete(self, orgname, prototypeid):
|
||||
""" Delete an existing permission prototype. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
prototype = model.permission.delete_prototype_permission(org, prototypeid)
|
||||
if not prototype:
|
||||
raise NotFound()
|
||||
|
||||
log_prototype_action('delete_prototype_permission', orgname, prototype)
|
||||
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('updateOrganizationPrototypePermission')
|
||||
@validate_json_request('PrototypeUpdate')
|
||||
def put(self, orgname, prototypeid):
|
||||
""" Update the role of an existing permission prototype. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
org = model.organization.get_organization(orgname)
|
||||
except model.InvalidOrganizationException:
|
||||
raise NotFound()
|
||||
|
||||
existing = model.permission.get_prototype_permission(org, prototypeid)
|
||||
if not existing:
|
||||
raise NotFound()
|
||||
|
||||
details = request.get_json()
|
||||
role_name = details['role']
|
||||
prototype = model.permission.update_prototype_permission(org, prototypeid, role_name)
|
||||
if not prototype:
|
||||
raise NotFound()
|
||||
|
||||
log_prototype_action('modify_prototype_permission', orgname, prototype,
|
||||
original_role=existing.role.name)
|
||||
|
||||
users_filter = {prototype.activating_user, prototype.delegate_user}
|
||||
org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
|
||||
return prototype_view(prototype, org_members)
|
||||
|
||||
raise Unauthorized()
|
52
endpoints/api/repoemail.py
Normal file
52
endpoints/api/repoemail.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
""" Authorize repository to send e-mail notifications. """
|
||||
|
||||
import logging
|
||||
|
||||
from flask import request, abort
|
||||
|
||||
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
|
||||
log_action, validate_json_request, internal_only, path_param, show_if)
|
||||
from endpoints.api.repoemail_models_pre_oci import pre_oci_model as model
|
||||
from endpoints.exception import NotFound
|
||||
from app import tf
|
||||
from data.database import db
|
||||
from util.useremails import send_repo_authorization_email
|
||||
|
||||
import features
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@internal_only
|
||||
@resource('/v1/repository/<apirepopath:repository>/authorizedemail/<email>')
|
||||
@show_if(features.MAILING)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('email', 'The e-mail address')
|
||||
class RepositoryAuthorizedEmail(RepositoryParamResource):
|
||||
""" Resource for checking and authorizing e-mail addresses to receive repo notifications. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('checkRepoEmailAuthorized')
|
||||
def get(self, namespace, repository, email):
|
||||
""" Checks to see if the given e-mail address is authorized on this repository. """
|
||||
record = model.get_email_authorized_for_repo(namespace, repository, email)
|
||||
if not record:
|
||||
abort(404)
|
||||
|
||||
return record.to_dict()
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('sendAuthorizeRepoEmail')
|
||||
def post(self, namespace, repository, email):
|
||||
""" Starts the authorization process for an e-mail address on a repository. """
|
||||
|
||||
with tf(db):
|
||||
record = model.get_email_authorized_for_repo(namespace, repository, email)
|
||||
if record and record.confirmed:
|
||||
return record.to_dict()
|
||||
|
||||
if not record:
|
||||
record = model.create_email_authorization_for_repo(namespace, repository, email)
|
||||
|
||||
send_repo_authorization_email(namespace, repository, email, record.code)
|
||||
return record.to_dict()
|
50
endpoints/api/repoemail_models_interface.py
Normal file
50
endpoints/api/repoemail_models_interface.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
class RepositoryAuthorizedEmail(
|
||||
namedtuple('RepositoryAuthorizedEmail', [
|
||||
'email',
|
||||
'repository_name',
|
||||
'namespace_name',
|
||||
'confirmed',
|
||||
'code',
|
||||
])):
|
||||
"""
|
||||
Tag represents a name to an image.
|
||||
:type email: string
|
||||
:type repository_name: string
|
||||
:type namespace_name: string
|
||||
:type confirmed: boolean
|
||||
:type code: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'email': self.email,
|
||||
'repository': self.repository_name,
|
||||
'namespace': self.namespace_name,
|
||||
'confirmed': self.confirmed,
|
||||
'code': self.code
|
||||
}
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class RepoEmailDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a Repo Email.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_email_authorized_for_repo(self, namespace_name, repository_name, email):
|
||||
"""
|
||||
Returns a RepositoryAuthorizedEmail if available else None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_email_authorization_for_repo(self, namespace_name, repository_name, email):
|
||||
"""
|
||||
Returns the newly created repository authorized email.
|
||||
"""
|
28
endpoints/api/repoemail_models_pre_oci.py
Normal file
28
endpoints/api/repoemail_models_pre_oci.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
from data import model
|
||||
from endpoints.api.repoemail_models_interface import RepoEmailDataInterface, RepositoryAuthorizedEmail
|
||||
|
||||
|
||||
def _return_none_or_data(func, namespace_name, repository_name, email):
|
||||
data = func(namespace_name, repository_name, email)
|
||||
if data is None:
|
||||
return data
|
||||
return RepositoryAuthorizedEmail(email, repository_name, namespace_name, data.confirmed,
|
||||
data.code)
|
||||
|
||||
|
||||
class PreOCIModel(RepoEmailDataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for the Repo Email using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def get_email_authorized_for_repo(self, namespace_name, repository_name, email):
|
||||
return _return_none_or_data(model.repository.get_email_authorized_for_repo, namespace_name,
|
||||
repository_name, email)
|
||||
|
||||
def create_email_authorization_for_repo(self, namespace_name, repository_name, email):
|
||||
return _return_none_or_data(model.repository.create_email_authorization_for_repo,
|
||||
namespace_name, repository_name, email)
|
||||
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
404
endpoints/api/repository.py
Normal file
404
endpoints/api/repository.py
Normal file
|
@ -0,0 +1,404 @@
|
|||
""" List, create and manage repositories. """
|
||||
|
||||
import logging
|
||||
import datetime
|
||||
import features
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta, datetime
|
||||
|
||||
from flask import request, abort
|
||||
|
||||
from app import dockerfile_build_queue, tuf_metadata_api
|
||||
from data.database import RepositoryState
|
||||
from endpoints.api import (
|
||||
format_date, nickname, log_action, validate_json_request, require_repo_read, require_repo_write,
|
||||
require_repo_admin, RepositoryParamResource, resource, parse_args, ApiResource, request_error,
|
||||
require_scope, path_param, page_support, query_param, truthy_bool, show_if)
|
||||
from endpoints.api.repository_models_pre_oci import pre_oci_model as model
|
||||
from endpoints.exception import (
|
||||
Unauthorized, NotFound, InvalidRequest, ExceedsLicenseException, DownstreamIssue)
|
||||
from endpoints.api.billing import lookup_allowed_private_repos, get_namespace_plan
|
||||
from endpoints.api.subscribe import check_repository_usage
|
||||
|
||||
from auth.permissions import (ModifyRepositoryPermission, AdministerRepositoryPermission,
|
||||
CreateRepositoryPermission, ReadRepositoryPermission)
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from util.names import REPOSITORY_NAME_REGEX
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
REPOS_PER_PAGE = 100
|
||||
MAX_DAYS_IN_3_MONTHS = 92
|
||||
|
||||
|
||||
def check_allowed_private_repos(namespace):
|
||||
""" Checks to see if the given namespace has reached its private repository limit. If so,
|
||||
raises a ExceedsLicenseException.
|
||||
"""
|
||||
# Not enabled if billing is disabled.
|
||||
if not features.BILLING:
|
||||
return
|
||||
|
||||
if not lookup_allowed_private_repos(namespace):
|
||||
raise ExceedsLicenseException()
|
||||
|
||||
|
||||
@resource('/v1/repository')
|
||||
class RepositoryList(ApiResource):
|
||||
"""Operations for creating and listing repositories."""
|
||||
schemas = {
|
||||
'NewRepo': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new repository',
|
||||
'required': [
|
||||
'repository',
|
||||
'visibility',
|
||||
'description',
|
||||
],
|
||||
'properties': {
|
||||
'repository': {
|
||||
'type': 'string',
|
||||
'description': 'Repository name',
|
||||
},
|
||||
'visibility': {
|
||||
'type': 'string',
|
||||
'description': 'Visibility which the repository will start with',
|
||||
'enum': [
|
||||
'public',
|
||||
'private',
|
||||
],
|
||||
},
|
||||
'namespace': {
|
||||
'type':
|
||||
'string',
|
||||
'description': ('Namespace in which the repository should be created. If omitted, the '
|
||||
'username of the caller is used'),
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': 'Markdown encoded description for the repository',
|
||||
},
|
||||
'repo_kind': {
|
||||
'type': ['string', 'null'],
|
||||
'description': 'The kind of repository',
|
||||
'enum': ['image', 'application', None],
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.CREATE_REPO)
|
||||
@nickname('createRepo')
|
||||
@validate_json_request('NewRepo')
|
||||
def post(self):
|
||||
"""Create a new repository."""
|
||||
owner = get_authenticated_user()
|
||||
req = request.get_json()
|
||||
|
||||
if owner is None and 'namespace' not in 'req':
|
||||
raise InvalidRequest('Must provide a namespace or must be logged in.')
|
||||
|
||||
namespace_name = req['namespace'] if 'namespace' in req else owner.username
|
||||
|
||||
permission = CreateRepositoryPermission(namespace_name)
|
||||
if permission.can():
|
||||
repository_name = req['repository']
|
||||
visibility = req['visibility']
|
||||
|
||||
if model.repo_exists(namespace_name, repository_name):
|
||||
raise request_error(message='Repository already exists')
|
||||
|
||||
visibility = req['visibility']
|
||||
if visibility == 'private':
|
||||
check_allowed_private_repos(namespace_name)
|
||||
|
||||
# Verify that the repository name is valid.
|
||||
if not REPOSITORY_NAME_REGEX.match(repository_name):
|
||||
raise InvalidRequest('Invalid repository name')
|
||||
|
||||
kind = req.get('repo_kind', 'image') or 'image'
|
||||
model.create_repo(namespace_name, repository_name, owner, req['description'],
|
||||
visibility=visibility, repo_kind=kind)
|
||||
|
||||
log_action('create_repo', namespace_name,
|
||||
{'repo': repository_name,
|
||||
'namespace': namespace_name}, repo_name=repository_name)
|
||||
return {
|
||||
'namespace': namespace_name,
|
||||
'name': repository_name,
|
||||
'kind': kind,
|
||||
}, 201
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.READ_REPO)
|
||||
@nickname('listRepos')
|
||||
@parse_args()
|
||||
@query_param('namespace', 'Filters the repositories returned to this namespace', type=str)
|
||||
@query_param('starred', 'Filters the repositories returned to those starred by the user',
|
||||
type=truthy_bool, default=False)
|
||||
@query_param('public', 'Adds any repositories visible to the user by virtue of being public',
|
||||
type=truthy_bool, default=False)
|
||||
@query_param('last_modified', 'Whether to include when the repository was last modified.',
|
||||
type=truthy_bool, default=False)
|
||||
@query_param('popularity', 'Whether to include the repository\'s popularity metric.',
|
||||
type=truthy_bool, default=False)
|
||||
@query_param('repo_kind', 'The kind of repositories to return', type=str, default='image')
|
||||
@page_support()
|
||||
def get(self, page_token, parsed_args):
|
||||
""" Fetch the list of repositories visible to the current user under a variety of situations.
|
||||
"""
|
||||
# Ensure that the user requests either filtered by a namespace, only starred repositories,
|
||||
# or public repositories. This ensures that the user is not requesting *all* visible repos,
|
||||
# which can cause a surge in DB CPU usage.
|
||||
if not parsed_args['namespace'] and not parsed_args['starred'] and not parsed_args['public']:
|
||||
raise InvalidRequest('namespace, starred or public are required for this API call')
|
||||
|
||||
user = get_authenticated_user()
|
||||
username = user.username if user else None
|
||||
last_modified = parsed_args['last_modified']
|
||||
popularity = parsed_args['popularity']
|
||||
|
||||
if parsed_args['starred'] and not username:
|
||||
# No repositories should be returned, as there is no user.
|
||||
abort(400)
|
||||
|
||||
repos, next_page_token = model.get_repo_list(
|
||||
parsed_args['starred'], user, parsed_args['repo_kind'], parsed_args['namespace'], username,
|
||||
parsed_args['public'], page_token, last_modified, popularity)
|
||||
|
||||
return {'repositories': [repo.to_dict() for repo in repos]}, next_page_token
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class Repository(RepositoryParamResource):
|
||||
"""Operations for managing a specific repository."""
|
||||
schemas = {
|
||||
'RepoUpdate': {
|
||||
'type': 'object',
|
||||
'description': 'Fields which can be updated in a repository.',
|
||||
'required': ['description',],
|
||||
'properties': {
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': 'Markdown encoded description for the repository',
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@parse_args()
|
||||
@query_param('includeStats', 'Whether to include action statistics', type=truthy_bool,
|
||||
default=False)
|
||||
@query_param('includeTags', 'Whether to include repository tags', type=truthy_bool,
|
||||
default=True)
|
||||
@require_repo_read
|
||||
@nickname('getRepo')
|
||||
def get(self, namespace, repository, parsed_args):
|
||||
"""Fetch the specified repository."""
|
||||
logger.debug('Get repo: %s/%s' % (namespace, repository))
|
||||
include_tags = parsed_args['includeTags']
|
||||
max_tags = 500
|
||||
repo = model.get_repo(namespace, repository, get_authenticated_user(), include_tags, max_tags)
|
||||
if repo is None:
|
||||
raise NotFound()
|
||||
|
||||
has_write_permission = ModifyRepositoryPermission(namespace, repository).can()
|
||||
has_write_permission = has_write_permission and repo.state == RepositoryState.NORMAL
|
||||
|
||||
repo_data = repo.to_dict()
|
||||
repo_data['can_write'] = has_write_permission
|
||||
repo_data['can_admin'] = AdministerRepositoryPermission(namespace, repository).can()
|
||||
|
||||
if parsed_args['includeStats'] and repo.repository_base_elements.kind_name != 'application':
|
||||
stats = []
|
||||
found_dates = {}
|
||||
|
||||
for count in repo.counts:
|
||||
stats.append(count.to_dict())
|
||||
found_dates['%s/%s' % (count.date.month, count.date.day)] = True
|
||||
|
||||
# Fill in any missing stats with zeros.
|
||||
for day in range(1, MAX_DAYS_IN_3_MONTHS):
|
||||
day_date = datetime.now() - timedelta(days=day)
|
||||
key = '%s/%s' % (day_date.month, day_date.day)
|
||||
if key not in found_dates:
|
||||
stats.append({
|
||||
'date': day_date.date().isoformat(),
|
||||
'count': 0,
|
||||
})
|
||||
|
||||
repo_data['stats'] = stats
|
||||
return repo_data
|
||||
|
||||
@require_repo_write
|
||||
@nickname('updateRepo')
|
||||
@validate_json_request('RepoUpdate')
|
||||
def put(self, namespace, repository):
|
||||
""" Update the description in the specified repository. """
|
||||
if not model.repo_exists(namespace, repository):
|
||||
raise NotFound()
|
||||
|
||||
values = request.get_json()
|
||||
model.set_description(namespace, repository, values['description'])
|
||||
|
||||
log_action('set_repo_description', namespace,
|
||||
{'repo': repository,
|
||||
'namespace': namespace,
|
||||
'description': values['description']}, repo_name=repository)
|
||||
return {'success': True}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('deleteRepository')
|
||||
def delete(self, namespace, repository):
|
||||
""" Delete a repository. """
|
||||
username = model.purge_repository(namespace, repository)
|
||||
|
||||
if features.BILLING:
|
||||
plan = get_namespace_plan(namespace)
|
||||
model.check_repository_usage(username, plan)
|
||||
|
||||
# Remove any builds from the queue.
|
||||
dockerfile_build_queue.delete_namespaced_items(namespace, repository)
|
||||
|
||||
log_action('delete_repo', namespace, {'repo': repository, 'namespace': namespace})
|
||||
return '', 204
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/changevisibility')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryVisibility(RepositoryParamResource):
|
||||
""" Custom verb for changing the visibility of the repository. """
|
||||
schemas = {
|
||||
'ChangeVisibility': {
|
||||
'type': 'object',
|
||||
'description': 'Change the visibility for the repository.',
|
||||
'required': ['visibility',],
|
||||
'properties': {
|
||||
'visibility': {
|
||||
'type': 'string',
|
||||
'description': 'Visibility which the repository will start with',
|
||||
'enum': [
|
||||
'public',
|
||||
'private',
|
||||
],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('changeRepoVisibility')
|
||||
@validate_json_request('ChangeVisibility')
|
||||
def post(self, namespace, repository):
|
||||
""" Change the visibility of a repository. """
|
||||
if model.repo_exists(namespace, repository):
|
||||
values = request.get_json()
|
||||
visibility = values['visibility']
|
||||
if visibility == 'private':
|
||||
check_allowed_private_repos(namespace)
|
||||
|
||||
model.set_repository_visibility(namespace, repository, visibility)
|
||||
log_action('change_repo_visibility', namespace,
|
||||
{'repo': repository,
|
||||
'namespace': namespace,
|
||||
'visibility': values['visibility']}, repo_name=repository)
|
||||
return {'success': True}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/changetrust')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryTrust(RepositoryParamResource):
|
||||
""" Custom verb for changing the trust settings of the repository. """
|
||||
schemas = {
|
||||
'ChangeRepoTrust': {
|
||||
'type': 'object',
|
||||
'description': 'Change the trust settings for the repository.',
|
||||
'required': ['trust_enabled',],
|
||||
'properties': {
|
||||
'trust_enabled': {
|
||||
'type': 'boolean',
|
||||
'description': 'Whether or not signing is enabled for the repository.'
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@show_if(features.SIGNING)
|
||||
@require_repo_admin
|
||||
@nickname('changeRepoTrust')
|
||||
@validate_json_request('ChangeRepoTrust')
|
||||
def post(self, namespace, repository):
|
||||
""" Change the visibility of a repository. """
|
||||
if not model.repo_exists(namespace, repository):
|
||||
raise NotFound()
|
||||
|
||||
tags, _ = tuf_metadata_api.get_default_tags_with_expiration(namespace, repository)
|
||||
if tags and not tuf_metadata_api.delete_metadata(namespace, repository):
|
||||
raise DownstreamIssue('Unable to delete downstream trust metadata')
|
||||
|
||||
values = request.get_json()
|
||||
model.set_trust(namespace, repository, values['trust_enabled'])
|
||||
|
||||
log_action(
|
||||
'change_repo_trust', namespace,
|
||||
{'repo': repository,
|
||||
'namespace': namespace,
|
||||
'trust_enabled': values['trust_enabled']}, repo_name=repository)
|
||||
|
||||
return {'success': True}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/changestate')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@show_if(features.REPO_MIRROR)
|
||||
class RepositoryStateResource(RepositoryParamResource):
|
||||
""" Custom verb for changing the state of the repository. """
|
||||
schemas = {
|
||||
'ChangeRepoState': {
|
||||
'type': 'object',
|
||||
'description': 'Change the state of the repository.',
|
||||
'required': ['state'],
|
||||
'properties': {
|
||||
'state': {
|
||||
'type': 'string',
|
||||
'description': 'Determines whether pushes are allowed.',
|
||||
'enum': ['NORMAL', 'READ_ONLY', 'MIRROR'],
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('changeRepoState')
|
||||
@validate_json_request('ChangeRepoState')
|
||||
def put(self, namespace, repository):
|
||||
""" Change the state of a repository. """
|
||||
if not model.repo_exists(namespace, repository):
|
||||
raise NotFound()
|
||||
|
||||
values = request.get_json()
|
||||
state_name = values['state']
|
||||
|
||||
try:
|
||||
state = RepositoryState[state_name]
|
||||
except KeyError:
|
||||
state = None
|
||||
|
||||
if state == RepositoryState.MIRROR and not features.REPO_MIRROR:
|
||||
return {'detail': 'Unknown Repository State: %s' % state_name}, 400
|
||||
|
||||
if state is None:
|
||||
return {'detail': '%s is not a valid Repository state.' % state_name}, 400
|
||||
|
||||
model.set_repository_state(namespace, repository, state)
|
||||
|
||||
log_action('change_repo_state', namespace,
|
||||
{'repo': repository,
|
||||
'namespace': namespace,
|
||||
'state_changed': state_name}, repo_name=repository)
|
||||
|
||||
return {'success': True}
|
279
endpoints/api/repository_models_interface.py
Normal file
279
endpoints/api/repository_models_interface.py
Normal file
|
@ -0,0 +1,279 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple, defaultdict
|
||||
|
||||
from datetime import datetime
|
||||
from six import add_metaclass
|
||||
|
||||
import features
|
||||
from data.database import RepositoryState
|
||||
from endpoints.api import format_date
|
||||
|
||||
|
||||
class RepositoryBaseElement(
|
||||
namedtuple('RepositoryBaseElement', [
|
||||
'namespace_name', 'repository_name', 'is_starred', 'is_public', 'kind_name', 'description',
|
||||
'namespace_user_organization', 'namespace_user_removed_tag_expiration_s', 'last_modified',
|
||||
'action_count', 'should_last_modified', 'should_popularity', 'should_is_starred',
|
||||
'is_free_account', 'state'
|
||||
])):
|
||||
"""
|
||||
Repository a single quay repository
|
||||
:type namespace_name: string
|
||||
:type repository_name: string
|
||||
:type is_starred: boolean
|
||||
:type is_public: boolean
|
||||
:type kind_name: string
|
||||
:type description: string
|
||||
:type namespace_user_organization: boolean
|
||||
:type should_last_modified: boolean
|
||||
:type should_popularity: boolean
|
||||
:type should_is_starred: boolean
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
repo = {
|
||||
'namespace': self.namespace_name,
|
||||
'name': self.repository_name,
|
||||
'description': self.description,
|
||||
'is_public': self.is_public,
|
||||
'kind': self.kind_name,
|
||||
'state': self.state.name if self.state is not None else None,
|
||||
}
|
||||
|
||||
if self.should_last_modified:
|
||||
repo['last_modified'] = self.last_modified
|
||||
|
||||
if self.should_popularity:
|
||||
repo['popularity'] = float(self.action_count if self.action_count else 0)
|
||||
|
||||
if self.should_is_starred:
|
||||
repo['is_starred'] = self.is_starred
|
||||
|
||||
return repo
|
||||
|
||||
|
||||
class ApplicationRepository(
|
||||
namedtuple('ApplicationRepository', ['repository_base_elements', 'channels', 'releases', 'state'])):
|
||||
"""
|
||||
Repository a single quay repository
|
||||
:type repository_base_elements: RepositoryBaseElement
|
||||
:type channels: [Channel]
|
||||
:type releases: [Release]
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
repo_data = {
|
||||
'namespace': self.repository_base_elements.namespace_name,
|
||||
'name': self.repository_base_elements.repository_name,
|
||||
'kind': self.repository_base_elements.kind_name,
|
||||
'description': self.repository_base_elements.description,
|
||||
'is_public': self.repository_base_elements.is_public,
|
||||
'is_organization': self.repository_base_elements.namespace_user_organization,
|
||||
'is_starred': self.repository_base_elements.is_starred,
|
||||
'channels': [chan.to_dict() for chan in self.channels],
|
||||
'releases': [release.to_dict() for release in self.releases],
|
||||
'state': self.state.name if self.state is not None else None,
|
||||
'is_free_account': self.repository_base_elements.is_free_account,
|
||||
}
|
||||
|
||||
return repo_data
|
||||
|
||||
|
||||
class ImageRepositoryRepository(
|
||||
namedtuple('NonApplicationRepository',
|
||||
['repository_base_elements', 'tags', 'counts', 'badge_token', 'trust_enabled',
|
||||
'state'])):
|
||||
"""
|
||||
Repository a single quay repository
|
||||
:type repository_base_elements: RepositoryBaseElement
|
||||
:type tags: [Tag]
|
||||
:type counts: [count]
|
||||
:type badge_token: string
|
||||
:type trust_enabled: boolean
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
img_repo = {
|
||||
'namespace': self.repository_base_elements.namespace_name,
|
||||
'name': self.repository_base_elements.repository_name,
|
||||
'kind': self.repository_base_elements.kind_name,
|
||||
'description': self.repository_base_elements.description,
|
||||
'is_public': self.repository_base_elements.is_public,
|
||||
'is_organization': self.repository_base_elements.namespace_user_organization,
|
||||
'is_starred': self.repository_base_elements.is_starred,
|
||||
'status_token': self.badge_token if not self.repository_base_elements.is_public else '',
|
||||
'trust_enabled': bool(features.SIGNING) and self.trust_enabled,
|
||||
'tag_expiration_s': self.repository_base_elements.namespace_user_removed_tag_expiration_s,
|
||||
'is_free_account': self.repository_base_elements.is_free_account,
|
||||
'state': self.state.name if self.state is not None else None
|
||||
}
|
||||
|
||||
if self.tags is not None:
|
||||
img_repo['tags'] = {tag.name: tag.to_dict() for tag in self.tags}
|
||||
|
||||
if self.repository_base_elements.state:
|
||||
img_repo['state'] = self.repository_base_elements.state.name
|
||||
|
||||
return img_repo
|
||||
|
||||
|
||||
class Repository(namedtuple('Repository', [
|
||||
'namespace_name',
|
||||
'repository_name',
|
||||
])):
|
||||
"""
|
||||
Repository a single quay repository
|
||||
:type namespace_name: string
|
||||
:type repository_name: string
|
||||
"""
|
||||
|
||||
|
||||
class Channel(namedtuple('Channel', ['name', 'linked_tag_name', 'linked_tag_lifetime_start'])):
|
||||
"""
|
||||
Repository a single quay repository
|
||||
:type name: string
|
||||
:type linked_tag_name: string
|
||||
:type linked_tag_lifetime_start: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'release': self.linked_tag_name,
|
||||
'last_modified': format_date(datetime.fromtimestamp(self.linked_tag_lifetime_start / 1000)),
|
||||
}
|
||||
|
||||
|
||||
class Release(
|
||||
namedtuple('Channel', ['name', 'lifetime_start', 'releases_channels_map'])):
|
||||
"""
|
||||
Repository a single quay repository
|
||||
:type name: string
|
||||
:type last_modified: string
|
||||
:type releases_channels_map: {string -> string}
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'last_modified': format_date(datetime.fromtimestamp(self.lifetime_start / 1000)),
|
||||
'channels': self.releases_channels_map[self.name],
|
||||
}
|
||||
|
||||
|
||||
class Tag(
|
||||
namedtuple('Tag', [
|
||||
'name', 'image_docker_image_id', 'image_aggregate_size', 'lifetime_start_ts',
|
||||
'tag_manifest_digest', 'lifetime_end_ts',
|
||||
])):
|
||||
"""
|
||||
:type name: string
|
||||
:type image_docker_image_id: string
|
||||
:type image_aggregate_size: int
|
||||
:type lifetime_start_ts: int
|
||||
:type lifetime_end_ts: int|None
|
||||
:type tag_manifest_digest: string
|
||||
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
tag_info = {
|
||||
'name': self.name,
|
||||
'image_id': self.image_docker_image_id,
|
||||
'size': self.image_aggregate_size
|
||||
}
|
||||
|
||||
if self.lifetime_start_ts > 0:
|
||||
last_modified = format_date(datetime.fromtimestamp(self.lifetime_start_ts))
|
||||
tag_info['last_modified'] = last_modified
|
||||
|
||||
if self.lifetime_end_ts:
|
||||
expiration = format_date(datetime.fromtimestamp(self.lifetime_end_ts))
|
||||
tag_info['expiration'] = expiration
|
||||
|
||||
if self.tag_manifest_digest is not None:
|
||||
tag_info['manifest_digest'] = self.tag_manifest_digest
|
||||
|
||||
return tag_info
|
||||
|
||||
|
||||
class Count(namedtuple('Count', ['date', 'count'])):
|
||||
"""
|
||||
date: DateTime
|
||||
count: int
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'date': self.date.isoformat(),
|
||||
'count': self.count,
|
||||
}
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class RepositoryDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a Repository.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo(self, namespace_name, repository_name, user, include_tags=True, max_tags=500):
|
||||
"""
|
||||
Returns a repository
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def repo_exists(self, namespace_name, repository_name):
|
||||
"""
|
||||
Returns true if a repo exists and false if not
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_repo(self, namespace, name, creating_user, description, visibility='private',
|
||||
repo_kind='image'):
|
||||
"""
|
||||
Returns creates a new repo
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_list(self, starred, user, repo_kind, namespace, username, public, page_token,
|
||||
last_modified, popularity):
|
||||
"""
|
||||
Returns a RepositoryBaseElement
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_repository_visibility(self, namespace_name, repository_name, visibility):
|
||||
"""
|
||||
Sets a repository's visibility if it is found
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_trust(self, namespace_name, repository_name, trust):
|
||||
"""
|
||||
Sets a repository's trust_enabled field if it is found
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_description(self, namespace_name, repository_name, description):
|
||||
"""
|
||||
Sets a repository's description if it is found.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def purge_repository(self, namespace_name, repository_name):
|
||||
"""
|
||||
Removes a repository
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def check_repository_usage(self, user_name, plan_found):
|
||||
"""
|
||||
Creates a notification for a user if they are over or under on their repository usage
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_repository_state(self, namespace_name, repository_name, state):
|
||||
"""
|
||||
Set the State of the Repository.
|
||||
"""
|
190
endpoints/api/repository_models_pre_oci.py
Normal file
190
endpoints/api/repository_models_pre_oci.py
Normal file
|
@ -0,0 +1,190 @@
|
|||
from collections import defaultdict
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from data.database import Repository as RepositoryTable, RepositoryState
|
||||
from data import model
|
||||
from data.appr_model import channel as channel_model, release as release_model
|
||||
from data.registry_model import registry_model
|
||||
from data.registry_model.datatypes import RepositoryReference
|
||||
from endpoints.appr.models_cnr import model as appr_model
|
||||
from endpoints.api.repository_models_interface import RepositoryDataInterface, RepositoryBaseElement, Repository, \
|
||||
ApplicationRepository, ImageRepositoryRepository, Tag, Channel, Release, Count
|
||||
|
||||
MAX_DAYS_IN_3_MONTHS = 92
|
||||
REPOS_PER_PAGE = 100
|
||||
|
||||
|
||||
def _create_channel(channel, releases_channels_map):
|
||||
releases_channels_map[channel.linked_tag.name].append(channel.name)
|
||||
return Channel(channel.name, channel.linked_tag.name, channel.linked_tag.lifetime_start)
|
||||
|
||||
|
||||
class PreOCIModel(RepositoryDataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for the Repo Email using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def check_repository_usage(self, username, plan_found):
|
||||
private_repos = model.user.get_private_repo_count(username)
|
||||
if plan_found is None:
|
||||
repos_allowed = 0
|
||||
else:
|
||||
repos_allowed = plan_found['privateRepos']
|
||||
|
||||
user_or_org = model.user.get_namespace_user(username)
|
||||
if private_repos > repos_allowed:
|
||||
model.notification.create_unique_notification('over_private_usage', user_or_org,
|
||||
{'namespace': username})
|
||||
else:
|
||||
model.notification.delete_notifications_by_kind(user_or_org, 'over_private_usage')
|
||||
|
||||
def purge_repository(self, namespace_name, repository_name):
|
||||
model.gc.purge_repository(namespace_name, repository_name)
|
||||
user = model.user.get_namespace_user(namespace_name)
|
||||
return user.username
|
||||
|
||||
def set_description(self, namespace_name, repository_name, description):
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
model.repository.set_description(repo, description)
|
||||
|
||||
def set_trust(self, namespace_name, repository_name, trust):
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
model.repository.set_trust(repo, trust)
|
||||
|
||||
def set_repository_visibility(self, namespace_name, repository_name, visibility):
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
model.repository.set_repository_visibility(repo, visibility)
|
||||
|
||||
def set_repository_state(self, namespace_name, repository_name, state):
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
model.repository.set_repository_state(repo, state)
|
||||
|
||||
def get_repo_list(self, starred, user, repo_kind, namespace, username, public, page_token,
|
||||
last_modified, popularity):
|
||||
next_page_token = None
|
||||
# Lookup the requested repositories (either starred or non-starred.)
|
||||
if starred:
|
||||
# Return the full list of repos starred by the current user that are still visible to them.
|
||||
def can_view_repo(repo):
|
||||
can_view = ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
|
||||
return can_view or model.repository.is_repository_public(repo)
|
||||
|
||||
unfiltered_repos = model.repository.get_user_starred_repositories(user,
|
||||
kind_filter=repo_kind)
|
||||
repos = [repo for repo in unfiltered_repos if can_view_repo(repo)]
|
||||
elif namespace:
|
||||
# Repositories filtered by namespace do not need pagination (their results are fairly small),
|
||||
# so we just do the lookup directly.
|
||||
repos = list(
|
||||
model.repository.get_visible_repositories(username=username, include_public=public,
|
||||
namespace=namespace, kind_filter=repo_kind))
|
||||
else:
|
||||
# Determine the starting offset for pagination. Note that we don't use the normal
|
||||
# model.modelutil.paginate method here, as that does not operate over UNION queries, which
|
||||
# get_visible_repositories will return if there is a logged-in user (for performance reasons).
|
||||
#
|
||||
# Also note the +1 on the limit, as paginate_query uses the extra result to determine whether
|
||||
# there is a next page.
|
||||
start_id = model.modelutil.pagination_start(page_token)
|
||||
repo_query = model.repository.get_visible_repositories(
|
||||
username=username, include_public=public, start_id=start_id, limit=REPOS_PER_PAGE + 1,
|
||||
kind_filter=repo_kind)
|
||||
|
||||
repos, next_page_token = model.modelutil.paginate_query(repo_query, limit=REPOS_PER_PAGE,
|
||||
sort_field_name='rid')
|
||||
|
||||
# Collect the IDs of the repositories found for subequent lookup of popularity
|
||||
# and/or last modified.
|
||||
last_modified_map = {}
|
||||
action_sum_map = {}
|
||||
if last_modified or popularity:
|
||||
repository_refs = [RepositoryReference.for_id(repo.rid) for repo in repos]
|
||||
repository_ids = [repo.rid for repo in repos]
|
||||
|
||||
if last_modified:
|
||||
last_modified_map = registry_model.get_most_recent_tag_lifetime_start(repository_refs)
|
||||
|
||||
if popularity:
|
||||
action_sum_map = model.log.get_repositories_action_sums(repository_ids)
|
||||
|
||||
# Collect the IDs of the repositories that are starred for the user, so we can mark them
|
||||
# in the returned results.
|
||||
star_set = set()
|
||||
if username:
|
||||
starred_repos = model.repository.get_user_starred_repositories(user)
|
||||
star_set = {starred.id for starred in starred_repos}
|
||||
|
||||
return [
|
||||
RepositoryBaseElement(repo.namespace_user.username, repo.name, repo.id in star_set,
|
||||
repo.visibility_id == model.repository.get_public_repo_visibility().id,
|
||||
repo_kind, repo.description, repo.namespace_user.organization,
|
||||
repo.namespace_user.removed_tag_expiration_s,
|
||||
last_modified_map.get(repo.rid),
|
||||
action_sum_map.get(repo.rid), last_modified, popularity, username,
|
||||
None, repo.state)
|
||||
for repo in repos
|
||||
], next_page_token
|
||||
|
||||
def repo_exists(self, namespace_name, repository_name):
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if repo is None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def create_repo(self, namespace_name, repository_name, owner, description, visibility='private',
|
||||
repo_kind='image'):
|
||||
repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility,
|
||||
repo_kind=repo_kind, description=description)
|
||||
return Repository(namespace_name, repository_name)
|
||||
|
||||
def get_repo(self, namespace_name, repository_name, user, include_tags=True, max_tags=500):
|
||||
repo = model.repository.get_repository(namespace_name, repository_name)
|
||||
if repo is None:
|
||||
return None
|
||||
|
||||
is_starred = model.repository.repository_is_starred(user, repo) if user else False
|
||||
is_public = model.repository.is_repository_public(repo)
|
||||
kind_name = RepositoryTable.kind.get_name(repo.kind_id)
|
||||
base = RepositoryBaseElement(
|
||||
namespace_name, repository_name, is_starred, is_public, kind_name, repo.description,
|
||||
repo.namespace_user.organization, repo.namespace_user.removed_tag_expiration_s, None, None,
|
||||
False, False, False, repo.namespace_user.stripe_id is None, repo.state)
|
||||
|
||||
if base.kind_name == 'application':
|
||||
channels = channel_model.get_repo_channels(repo, appr_model.models_ref)
|
||||
releases = release_model.get_release_objs(repo, appr_model.models_ref)
|
||||
releases_channels_map = defaultdict(list)
|
||||
return ApplicationRepository(
|
||||
base, [_create_channel(channel, releases_channels_map) for channel in channels], [
|
||||
Release(release.name, release.lifetime_start, releases_channels_map)
|
||||
for release in releases
|
||||
], repo.state)
|
||||
|
||||
tags = None
|
||||
repo_ref = RepositoryReference.for_repo_obj(repo)
|
||||
if include_tags:
|
||||
tags, _ = registry_model.list_repository_tag_history(repo_ref, page=1, size=max_tags,
|
||||
active_tags_only=True)
|
||||
tags = [
|
||||
Tag(tag.name,
|
||||
tag.legacy_image.docker_image_id if tag.legacy_image_if_present else None,
|
||||
tag.legacy_image.aggregate_size if tag.legacy_image_if_present else None,
|
||||
tag.lifetime_start_ts,
|
||||
tag.manifest_digest,
|
||||
tag.lifetime_end_ts) for tag in tags
|
||||
]
|
||||
|
||||
start_date = datetime.now() - timedelta(days=MAX_DAYS_IN_3_MONTHS)
|
||||
counts = model.log.get_repository_action_counts(repo, start_date)
|
||||
|
||||
assert repo.state is not None
|
||||
return ImageRepositoryRepository(base, tags,
|
||||
[Count(count.date, count.count) for count in counts],
|
||||
repo.badge_token, repo.trust_enabled, repo.state)
|
||||
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
164
endpoints/api/repositorynotification.py
Normal file
164
endpoints/api/repositorynotification.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
""" List, create and manage repository events/notifications. """
|
||||
|
||||
import logging
|
||||
from flask import request
|
||||
|
||||
from endpoints.api import (
|
||||
RepositoryParamResource, nickname, resource, require_repo_admin, log_action,
|
||||
validate_json_request, request_error, path_param, disallow_for_app_repositories, InvalidRequest)
|
||||
from endpoints.exception import NotFound
|
||||
from notifications.models_interface import Repository
|
||||
from notifications.notificationevent import NotificationEvent
|
||||
from notifications.notificationmethod import (
|
||||
NotificationMethod, CannotValidateNotificationMethodException)
|
||||
from endpoints.api.repositorynotification_models_pre_oci import pre_oci_model as model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/notification/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryNotificationList(RepositoryParamResource):
|
||||
""" Resource for dealing with listing and creating notifications on a repository. """
|
||||
schemas = {
|
||||
'NotificationCreateRequest': {
|
||||
'type': 'object',
|
||||
'description': 'Information for creating a notification on a repository',
|
||||
'required': [
|
||||
'event',
|
||||
'method',
|
||||
'config',
|
||||
'eventConfig',
|
||||
],
|
||||
'properties': {
|
||||
'event': {
|
||||
'type': 'string',
|
||||
'description': 'The event on which the notification will respond',
|
||||
},
|
||||
'method': {
|
||||
'type': 'string',
|
||||
'description': 'The method of notification (such as email or web callback)',
|
||||
},
|
||||
'config': {
|
||||
'type': 'object',
|
||||
'description': 'JSON config information for the specific method of notification'
|
||||
},
|
||||
'eventConfig': {
|
||||
'type': 'object',
|
||||
'description': 'JSON config information for the specific event of notification',
|
||||
},
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'The human-readable title of the notification',
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('createRepoNotification')
|
||||
@disallow_for_app_repositories
|
||||
@validate_json_request('NotificationCreateRequest')
|
||||
def post(self, namespace_name, repository_name):
|
||||
parsed = request.get_json()
|
||||
|
||||
method_handler = NotificationMethod.get_method(parsed['method'])
|
||||
try:
|
||||
method_handler.validate(namespace_name, repository_name, parsed['config'])
|
||||
except CannotValidateNotificationMethodException as ex:
|
||||
raise request_error(message=ex.message)
|
||||
|
||||
new_notification = model.create_repo_notification(namespace_name, repository_name,
|
||||
parsed['event'], parsed['method'],
|
||||
parsed['config'], parsed['eventConfig'],
|
||||
parsed.get('title'))
|
||||
|
||||
log_action('add_repo_notification', namespace_name, {
|
||||
'repo': repository_name,
|
||||
'namespace': namespace_name,
|
||||
'notification_id': new_notification.uuid,
|
||||
'event': new_notification.event_name,
|
||||
'method': new_notification.method_name}, repo_name=repository_name)
|
||||
return new_notification.to_dict(), 201
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('listRepoNotifications')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace_name, repository_name):
|
||||
""" List the notifications for the specified repository. """
|
||||
notifications = model.list_repo_notifications(namespace_name, repository_name)
|
||||
return {'notifications': [n.to_dict() for n in notifications]}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/notification/<uuid>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('uuid', 'The UUID of the notification')
|
||||
class RepositoryNotification(RepositoryParamResource):
|
||||
""" Resource for dealing with specific notifications. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('getRepoNotification')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace_name, repository_name, uuid):
|
||||
""" Get information for the specified notification. """
|
||||
found = model.get_repo_notification(uuid)
|
||||
if not found:
|
||||
raise NotFound()
|
||||
return found.to_dict()
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('deleteRepoNotification')
|
||||
@disallow_for_app_repositories
|
||||
def delete(self, namespace_name, repository_name, uuid):
|
||||
""" Deletes the specified notification. """
|
||||
deleted = model.delete_repo_notification(namespace_name, repository_name, uuid)
|
||||
if not deleted:
|
||||
raise InvalidRequest("No repository notification found for: %s, %s, %s" %
|
||||
(namespace_name, repository_name, uuid))
|
||||
|
||||
log_action('delete_repo_notification', namespace_name, {
|
||||
'repo': repository_name,
|
||||
'namespace': namespace_name,
|
||||
'notification_id': uuid,
|
||||
'event': deleted.event_name,
|
||||
'method': deleted.method_name}, repo_name=repository_name)
|
||||
|
||||
return 'No Content', 204
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('resetRepositoryNotificationFailures')
|
||||
@disallow_for_app_repositories
|
||||
def post(self, namespace_name, repository_name, uuid):
|
||||
""" Resets repository notification to 0 failures. """
|
||||
reset = model.reset_notification_number_of_failures(namespace_name, repository_name, uuid)
|
||||
if not reset:
|
||||
raise InvalidRequest("No repository notification found for: %s, %s, %s" %
|
||||
(namespace_name, repository_name, uuid))
|
||||
|
||||
log_action('reset_repo_notification', namespace_name, {
|
||||
'repo': repository_name,
|
||||
'namespace': namespace_name,
|
||||
'notification_id': uuid,
|
||||
'event': reset.event_name,
|
||||
'method': reset.method_name}, repo_name=repository_name)
|
||||
|
||||
return 'No Content', 204
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/notification/<uuid>/test')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('uuid', 'The UUID of the notification')
|
||||
class TestRepositoryNotification(RepositoryParamResource):
|
||||
""" Resource for queuing a test of a notification. """
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('testRepoNotification')
|
||||
@disallow_for_app_repositories
|
||||
def post(self, namespace_name, repository_name, uuid):
|
||||
""" Queues a test notification for this repository. """
|
||||
test_note = model.queue_test_notification(uuid)
|
||||
if not test_note:
|
||||
raise InvalidRequest("No repository notification found for: %s, %s, %s" %
|
||||
(namespace_name, repository_name, uuid))
|
||||
|
||||
return {}, 200
|
146
endpoints/api/repositorynotification_models_interface.py
Normal file
146
endpoints/api/repositorynotification_models_interface.py
Normal file
|
@ -0,0 +1,146 @@
|
|||
import json
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
class RepositoryNotification(
|
||||
namedtuple('RepositoryNotification', [
|
||||
'uuid',
|
||||
'title',
|
||||
'event_name',
|
||||
'method_name',
|
||||
'config_json',
|
||||
'event_config_json',
|
||||
'number_of_failures',
|
||||
])):
|
||||
"""
|
||||
RepositoryNotification represents a notification for a repository.
|
||||
:type uuid: string
|
||||
:type event: string
|
||||
:type method: string
|
||||
:type config: string
|
||||
:type title: string
|
||||
:type event_config: string
|
||||
:type number_of_failures: int
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
try:
|
||||
config = json.loads(self.config_json)
|
||||
except ValueError:
|
||||
config = {}
|
||||
|
||||
try:
|
||||
event_config = json.loads(self.event_config_json)
|
||||
except ValueError:
|
||||
event_config = {}
|
||||
|
||||
return {
|
||||
'uuid': self.uuid,
|
||||
'title': self.title,
|
||||
'event': self.event_name,
|
||||
'method': self.method_name,
|
||||
'config': config,
|
||||
'event_config': event_config,
|
||||
'number_of_failures': self.number_of_failures,
|
||||
}
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class RepoNotificationInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by the RepositoryNotification API
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_repo_notification(self, namespace_name, repository_name, event_name, method_name,
|
||||
method_config, event_config, title=None):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: namespace of repository
|
||||
repository_name: name of repository
|
||||
event_name: name of event
|
||||
method_name: name of method
|
||||
method_config: method config, json string
|
||||
event_config: event config, json string
|
||||
title: title of the notification
|
||||
|
||||
Returns:
|
||||
RepositoryNotification object
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def list_repo_notifications(self, namespace_name, repository_name, event_name=None):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: namespace of repository
|
||||
repository_name: name of repository
|
||||
event_name: name of event
|
||||
|
||||
Returns:
|
||||
list(RepositoryNotification)
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_repo_notification(self, uuid):
|
||||
"""
|
||||
|
||||
Args:
|
||||
uuid: uuid of notification
|
||||
|
||||
Returns:
|
||||
RepositoryNotification or None
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def delete_repo_notification(self, namespace_name, repository_name, uuid):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: namespace of repository
|
||||
repository_name: name of repository
|
||||
uuid: uuid of notification
|
||||
|
||||
Returns:
|
||||
RepositoryNotification or None
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def reset_notification_number_of_failures(self, namespace_name, repository_name, uuid):
|
||||
"""
|
||||
|
||||
Args:
|
||||
namespace_name: namespace of repository
|
||||
repository_name: name of repository
|
||||
uuid: uuid of notification
|
||||
|
||||
Returns:
|
||||
RepositoryNotification
|
||||
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def queue_test_notification(self, uuid):
|
||||
"""
|
||||
|
||||
Args:
|
||||
uuid: uuid of notification
|
||||
|
||||
Returns:
|
||||
RepositoryNotification or None
|
||||
|
||||
"""
|
||||
pass
|
72
endpoints/api/repositorynotification_models_pre_oci.py
Normal file
72
endpoints/api/repositorynotification_models_pre_oci.py
Normal file
|
@ -0,0 +1,72 @@
|
|||
import json
|
||||
|
||||
from app import notification_queue
|
||||
from data import model
|
||||
from data.model import InvalidNotificationException
|
||||
from endpoints.api.repositorynotification_models_interface import (RepoNotificationInterface,
|
||||
RepositoryNotification)
|
||||
from notifications import build_notification_data
|
||||
from notifications.notificationevent import NotificationEvent
|
||||
|
||||
|
||||
class RepoNotificationPreOCIModel(RepoNotificationInterface):
|
||||
def create_repo_notification(self, namespace_name, repository_name, event_name, method_name,
|
||||
method_config, event_config, title=None):
|
||||
repository = model.repository.get_repository(namespace_name, repository_name)
|
||||
return self._notification(
|
||||
model.notification.create_repo_notification(repository, event_name, method_name,
|
||||
method_config, event_config, title))
|
||||
|
||||
def list_repo_notifications(self, namespace_name, repository_name, event_name=None):
|
||||
return [
|
||||
self._notification(n)
|
||||
for n in model.notification.list_repo_notifications(namespace_name, repository_name,
|
||||
event_name)]
|
||||
|
||||
def get_repo_notification(self, uuid):
|
||||
try:
|
||||
found = model.notification.get_repo_notification(uuid)
|
||||
except InvalidNotificationException:
|
||||
return None
|
||||
return self._notification(found)
|
||||
|
||||
def delete_repo_notification(self, namespace_name, repository_name, uuid):
|
||||
try:
|
||||
found = model.notification.delete_repo_notification(namespace_name, repository_name, uuid)
|
||||
except InvalidNotificationException:
|
||||
return None
|
||||
return self._notification(found)
|
||||
|
||||
def reset_notification_number_of_failures(self, namespace_name, repository_name, uuid):
|
||||
return self._notification(
|
||||
model.notification.reset_notification_number_of_failures(namespace_name, repository_name,
|
||||
uuid))
|
||||
|
||||
def queue_test_notification(self, uuid):
|
||||
try:
|
||||
notification = model.notification.get_repo_notification(uuid)
|
||||
except InvalidNotificationException:
|
||||
return None
|
||||
|
||||
event_config = json.loads(notification.event_config_json or '{}')
|
||||
event_info = NotificationEvent.get_event(notification.event.name)
|
||||
sample_data = event_info.get_sample_data(notification.repository.namespace_user.username,
|
||||
notification.repository.name, event_config)
|
||||
notification_data = build_notification_data(notification, sample_data)
|
||||
notification_queue.put([
|
||||
notification.repository.namespace_user.username, notification.uuid, notification.event.name],
|
||||
json.dumps(notification_data))
|
||||
return self._notification(notification)
|
||||
|
||||
def _notification(self, notification):
|
||||
if not notification:
|
||||
return None
|
||||
|
||||
return RepositoryNotification(
|
||||
uuid=notification.uuid, title=notification.title, event_name=notification.event.name,
|
||||
method_name=notification.method.name, config_json=notification.config_json,
|
||||
event_config_json=notification.event_config_json,
|
||||
number_of_failures=notification.number_of_failures)
|
||||
|
||||
|
||||
pre_oci_model = RepoNotificationPreOCIModel()
|
100
endpoints/api/repotoken.py
Normal file
100
endpoints/api/repotoken.py
Normal file
|
@ -0,0 +1,100 @@
|
|||
""" Manage repository access tokens (DEPRECATED). """
|
||||
|
||||
import logging
|
||||
|
||||
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
|
||||
validate_json_request, path_param)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/tokens/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositoryTokenList(RepositoryParamResource):
|
||||
""" Resource for creating and listing repository tokens. """
|
||||
schemas = {
|
||||
'NewToken': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new token.',
|
||||
'required':[
|
||||
'friendlyName',
|
||||
],
|
||||
'properties': {
|
||||
'friendlyName': {
|
||||
'type': 'string',
|
||||
'description': 'Friendly name to help identify the token',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('listRepoTokens')
|
||||
def get(self, namespace_name, repo_name):
|
||||
""" List the tokens for the specified repository. """
|
||||
return {
|
||||
'message': 'Handling of access tokens is no longer supported',
|
||||
}, 410
|
||||
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('createToken')
|
||||
@validate_json_request('NewToken')
|
||||
def post(self, namespace_name, repo_name):
|
||||
""" Create a new repository token. """
|
||||
return {
|
||||
'message': 'Creation of access tokens is no longer supported',
|
||||
}, 410
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/tokens/<code>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('code', 'The token code')
|
||||
class RepositoryToken(RepositoryParamResource):
|
||||
""" Resource for managing individual tokens. """
|
||||
schemas = {
|
||||
'TokenPermission': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a token permission',
|
||||
'required': [
|
||||
'role',
|
||||
],
|
||||
'properties': {
|
||||
'role': {
|
||||
'type': 'string',
|
||||
'description': 'Role to use for the token',
|
||||
'enum': [
|
||||
'read',
|
||||
'write',
|
||||
'admin',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('getTokens')
|
||||
def get(self, namespace_name, repo_name, code):
|
||||
""" Fetch the specified repository token information. """
|
||||
return {
|
||||
'message': 'Handling of access tokens is no longer supported',
|
||||
}, 410
|
||||
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('changeToken')
|
||||
@validate_json_request('TokenPermission')
|
||||
def put(self, namespace_name, repo_name, code):
|
||||
""" Update the permissions for the specified repository token. """
|
||||
return {
|
||||
'message': 'Handling of access tokens is no longer supported',
|
||||
}, 410
|
||||
|
||||
|
||||
@require_repo_admin
|
||||
@nickname('deleteToken')
|
||||
def delete(self, namespace_name, repo_name, code):
|
||||
""" Delete the repository token. """
|
||||
return {
|
||||
'message': 'Handling of access tokens is no longer supported',
|
||||
}, 410
|
274
endpoints/api/robot.py
Normal file
274
endpoints/api/robot.py
Normal file
|
@ -0,0 +1,274 @@
|
|||
""" Manage user and organization robot accounts. """
|
||||
|
||||
from endpoints.api import (resource, nickname, ApiResource, log_action, related_user_resource,
|
||||
require_user_admin, require_scope, path_param, parse_args,
|
||||
truthy_bool, query_param, validate_json_request, max_json_size)
|
||||
from endpoints.api.robot_models_pre_oci import pre_oci_model as model
|
||||
from endpoints.exception import Unauthorized
|
||||
from auth.permissions import AdministerOrganizationPermission, OrganizationMemberPermission
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from util.names import format_robot_username
|
||||
from flask import abort, request
|
||||
|
||||
|
||||
CREATE_ROBOT_SCHEMA = {
|
||||
'type': 'object',
|
||||
'description': 'Optional data for creating a robot',
|
||||
'properties': {
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': 'Optional text description for the robot',
|
||||
'maxLength': 255,
|
||||
},
|
||||
'unstructured_metadata': {
|
||||
'type': 'object',
|
||||
'description': 'Optional unstructured metadata for the robot',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ROBOT_MAX_SIZE = 1024 * 1024 # 1 KB.
|
||||
|
||||
|
||||
def robots_list(prefix, include_permissions=False, include_token=False, limit=None):
|
||||
robots = model.list_entity_robot_permission_teams(prefix, limit=limit,
|
||||
include_token=include_token,
|
||||
include_permissions=include_permissions)
|
||||
return {'robots': [robot.to_dict(include_token=include_token) for robot in robots]}
|
||||
|
||||
|
||||
@resource('/v1/user/robots')
|
||||
class UserRobotList(ApiResource):
|
||||
""" Resource for listing user robots. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getUserRobots')
|
||||
@parse_args()
|
||||
@query_param('permissions',
|
||||
'Whether to include repositories and teams in which the robots have permission.',
|
||||
type=truthy_bool, default=False)
|
||||
@query_param('token',
|
||||
'If false, the robot\'s token is not returned.',
|
||||
type=truthy_bool, default=True)
|
||||
@query_param('limit',
|
||||
'If specified, the number of robots to return.',
|
||||
type=int, default=None)
|
||||
def get(self, parsed_args):
|
||||
""" List the available robots for the user. """
|
||||
user = get_authenticated_user()
|
||||
return robots_list(user.username, include_token=parsed_args.get('token', True),
|
||||
include_permissions=parsed_args.get('permissions', False),
|
||||
limit=parsed_args.get('limit'))
|
||||
|
||||
|
||||
@resource('/v1/user/robots/<robot_shortname>')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
class UserRobot(ApiResource):
|
||||
""" Resource for managing a user's robots. """
|
||||
schemas = {
|
||||
'CreateRobot': CREATE_ROBOT_SCHEMA,
|
||||
}
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getUserRobot')
|
||||
def get(self, robot_shortname):
|
||||
""" Returns the user's robot with the specified name. """
|
||||
parent = get_authenticated_user()
|
||||
robot = model.get_user_robot(robot_shortname, parent)
|
||||
return robot.to_dict(include_metadata=True, include_token=True)
|
||||
|
||||
@require_user_admin
|
||||
@nickname('createUserRobot')
|
||||
@max_json_size(ROBOT_MAX_SIZE)
|
||||
@validate_json_request('CreateRobot', optional=True)
|
||||
def put(self, robot_shortname):
|
||||
""" Create a new user robot with the specified name. """
|
||||
parent = get_authenticated_user()
|
||||
create_data = request.get_json() or {}
|
||||
robot = model.create_user_robot(robot_shortname, parent, create_data.get('description'),
|
||||
create_data.get('unstructured_metadata'))
|
||||
log_action('create_robot', parent.username, {
|
||||
'robot': robot_shortname,
|
||||
'description': create_data.get('description'),
|
||||
'unstructured_metadata': create_data.get('unstructured_metadata'),
|
||||
})
|
||||
return robot.to_dict(include_metadata=True, include_token=True), 201
|
||||
|
||||
@require_user_admin
|
||||
@nickname('deleteUserRobot')
|
||||
def delete(self, robot_shortname):
|
||||
""" Delete an existing robot. """
|
||||
parent = get_authenticated_user()
|
||||
model.delete_robot(format_robot_username(parent.username, robot_shortname))
|
||||
log_action('delete_robot', parent.username, {'robot': robot_shortname})
|
||||
return '', 204
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/robots')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@related_user_resource(UserRobotList)
|
||||
class OrgRobotList(ApiResource):
|
||||
""" Resource for listing an organization's robots. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrgRobots')
|
||||
@parse_args()
|
||||
@query_param('permissions',
|
||||
'Whether to include repostories and teams in which the robots have permission.',
|
||||
type=truthy_bool, default=False)
|
||||
@query_param('token',
|
||||
'If false, the robot\'s token is not returned.',
|
||||
type=truthy_bool, default=True)
|
||||
@query_param('limit',
|
||||
'If specified, the number of robots to return.',
|
||||
type=int, default=None)
|
||||
def get(self, orgname, parsed_args):
|
||||
""" List the organization's robots. """
|
||||
permission = OrganizationMemberPermission(orgname)
|
||||
if permission.can():
|
||||
include_token = (AdministerOrganizationPermission(orgname).can() and
|
||||
parsed_args.get('token', True))
|
||||
include_permissions = (AdministerOrganizationPermission(orgname).can() and
|
||||
parsed_args.get('permissions', False))
|
||||
return robots_list(orgname, include_permissions=include_permissions,
|
||||
include_token=include_token,
|
||||
limit=parsed_args.get('limit'))
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/robots/<robot_shortname>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
@related_user_resource(UserRobot)
|
||||
class OrgRobot(ApiResource):
|
||||
""" Resource for managing an organization's robots. """
|
||||
schemas = {
|
||||
'CreateRobot': CREATE_ROBOT_SCHEMA,
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrgRobot')
|
||||
def get(self, orgname, robot_shortname):
|
||||
""" Returns the organization's robot with the specified name. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
robot = model.get_org_robot(robot_shortname, orgname)
|
||||
return robot.to_dict(include_metadata=True, include_token=True)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('createOrgRobot')
|
||||
@max_json_size(ROBOT_MAX_SIZE)
|
||||
@validate_json_request('CreateRobot', optional=True)
|
||||
def put(self, orgname, robot_shortname):
|
||||
""" Create a new robot in the organization. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
create_data = request.get_json() or {}
|
||||
robot = model.create_org_robot(robot_shortname, orgname, create_data.get('description'),
|
||||
create_data.get('unstructured_metadata'))
|
||||
log_action('create_robot', orgname, {
|
||||
'robot': robot_shortname,
|
||||
'description': create_data.get('description'),
|
||||
'unstructured_metadata': create_data.get('unstructured_metadata'),
|
||||
})
|
||||
return robot.to_dict(include_metadata=True, include_token=True), 201
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrgRobot')
|
||||
def delete(self, orgname, robot_shortname):
|
||||
""" Delete an existing organization robot. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
model.delete_robot(format_robot_username(orgname, robot_shortname))
|
||||
log_action('delete_robot', orgname, {'robot': robot_shortname})
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/user/robots/<robot_shortname>/permissions')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
class UserRobotPermissions(ApiResource):
|
||||
""" Resource for listing the permissions a user's robot has in the system. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getUserRobotPermissions')
|
||||
def get(self, robot_shortname):
|
||||
""" Returns the list of repository permissions for the user's robot. """
|
||||
parent = get_authenticated_user()
|
||||
robot = model.get_user_robot(robot_shortname, parent)
|
||||
permissions = model.list_robot_permissions(robot.name)
|
||||
|
||||
return {
|
||||
'permissions': [permission.to_dict() for permission in permissions]
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/permissions')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
@related_user_resource(UserRobotPermissions)
|
||||
class OrgRobotPermissions(ApiResource):
|
||||
""" Resource for listing the permissions an org's robot has in the system. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('getOrgRobotPermissions')
|
||||
def get(self, orgname, robot_shortname):
|
||||
""" Returns the list of repository permissions for the org's robot. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
robot = model.get_org_robot(robot_shortname, orgname)
|
||||
permissions = model.list_robot_permissions(robot.name)
|
||||
|
||||
return {
|
||||
'permissions': [permission.to_dict() for permission in permissions]
|
||||
}
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
@resource('/v1/user/robots/<robot_shortname>/regenerate')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
class RegenerateUserRobot(ApiResource):
|
||||
""" Resource for regenerate an organization's robot's token. """
|
||||
|
||||
@require_user_admin
|
||||
@nickname('regenerateUserRobotToken')
|
||||
def post(self, robot_shortname):
|
||||
""" Regenerates the token for a user's robot. """
|
||||
parent = get_authenticated_user()
|
||||
robot = model.regenerate_user_robot_token(robot_shortname, parent)
|
||||
log_action('regenerate_robot_token', parent.username, {'robot': robot_shortname})
|
||||
return robot.to_dict(include_token=True)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/robots/<robot_shortname>/regenerate')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('robot_shortname',
|
||||
'The short name for the robot, without any user or organization prefix')
|
||||
@related_user_resource(RegenerateUserRobot)
|
||||
class RegenerateOrgRobot(ApiResource):
|
||||
""" Resource for regenerate an organization's robot's token. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('regenerateOrgRobotToken')
|
||||
def post(self, orgname, robot_shortname):
|
||||
""" Regenerates the token for an organization robot. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
robot = model.regenerate_org_robot_token(robot_shortname, orgname)
|
||||
log_action('regenerate_robot_token', orgname, {'robot': robot_shortname})
|
||||
return robot.to_dict(include_token=True)
|
||||
|
||||
raise Unauthorized()
|
196
endpoints/api/robot_models_interface.py
Normal file
196
endpoints/api/robot_models_interface.py
Normal file
|
@ -0,0 +1,196 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
from endpoints.api import format_date
|
||||
|
||||
|
||||
class Permission(namedtuple('Permission', ['repository_name', 'repository_visibility_name', 'role_name'])):
|
||||
"""
|
||||
Permission the relationship between a robot and a repository and whether that robot can see the repo.
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'repository': {
|
||||
'name': self.repository_name,
|
||||
'is_public': self.repository_visibility_name == 'public'
|
||||
},
|
||||
'role': self.role_name
|
||||
}
|
||||
|
||||
|
||||
class Team(namedtuple('Team', ['name', 'avatar'])):
|
||||
"""
|
||||
Team represents a team entry for a robot list entry.
|
||||
:type name: string
|
||||
:type avatar: {string -> string}
|
||||
"""
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'avatar': self.avatar,
|
||||
}
|
||||
|
||||
|
||||
class RobotWithPermissions(
|
||||
namedtuple('RobotWithPermissions', [
|
||||
'name',
|
||||
'password',
|
||||
'created',
|
||||
'last_accessed',
|
||||
'teams',
|
||||
'repository_names',
|
||||
'description',
|
||||
])):
|
||||
"""
|
||||
RobotWithPermissions is a list of robot entries.
|
||||
:type name: string
|
||||
:type password: string
|
||||
:type created: datetime|None
|
||||
:type last_accessed: datetime|None
|
||||
:type teams: [Team]
|
||||
:type repository_names: [string]
|
||||
:type description: string
|
||||
"""
|
||||
|
||||
def to_dict(self, include_token=False):
|
||||
data = {
|
||||
'name': self.name,
|
||||
'created': format_date(self.created) if self.created is not None else None,
|
||||
'last_accessed': format_date(self.last_accessed) if self.last_accessed is not None else None,
|
||||
'teams': [team.to_dict() for team in self.teams],
|
||||
'repositories': self.repository_names,
|
||||
'description': self.description,
|
||||
}
|
||||
|
||||
if include_token:
|
||||
data['token'] = self.password
|
||||
|
||||
return data
|
||||
|
||||
|
||||
class Robot(
|
||||
namedtuple('Robot', [
|
||||
'name',
|
||||
'password',
|
||||
'created',
|
||||
'last_accessed',
|
||||
'description',
|
||||
'unstructured_metadata',
|
||||
])):
|
||||
"""
|
||||
Robot represents a robot entity.
|
||||
:type name: string
|
||||
:type password: string
|
||||
:type created: datetime|None
|
||||
:type last_accessed: datetime|None
|
||||
:type description: string
|
||||
:type unstructured_metadata: dict
|
||||
"""
|
||||
|
||||
def to_dict(self, include_metadata=False, include_token=False):
|
||||
data = {
|
||||
'name': self.name,
|
||||
'created': format_date(self.created) if self.created is not None else None,
|
||||
'last_accessed': format_date(self.last_accessed) if self.last_accessed is not None else None,
|
||||
'description': self.description,
|
||||
}
|
||||
|
||||
if include_token:
|
||||
data['token'] = self.password
|
||||
|
||||
if include_metadata:
|
||||
data['unstructured_metadata'] = self.unstructured_metadata
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class RobotInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by the Robot API
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_org_robot(self, robot_shortname, orgname):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_user_robot(self, robot_shortname, owning_user):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_user_robot(self, robot_shortname, owning_user):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_org_robot(self, robot_shortname, orgname):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_robot(self, robot_username):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def regenerate_user_robot_token(self, robot_shortname, owning_user):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def regenerate_org_robot_token(self, robot_shortname, orgname):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
Robot object
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_entity_robot_permission_teams(self, prefix, include_permissions=False,
|
||||
include_token=False, limit=None):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
list of RobotWithPermissions objects
|
||||
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_robot_permissions(self, username):
|
||||
"""
|
||||
|
||||
Returns:
|
||||
list of Robot objects
|
||||
|
||||
"""
|
123
endpoints/api/robot_models_pre_oci.py
Normal file
123
endpoints/api/robot_models_pre_oci.py
Normal file
|
@ -0,0 +1,123 @@
|
|||
import features
|
||||
|
||||
from app import avatar
|
||||
from data import model
|
||||
from active_migration import ActiveDataMigration, ERTMigrationFlags
|
||||
from data.database import (User, FederatedLogin, RobotAccountToken, Team as TeamTable, Repository,
|
||||
RobotAccountMetadata)
|
||||
from endpoints.api.robot_models_interface import (RobotInterface, Robot, RobotWithPermissions, Team,
|
||||
Permission)
|
||||
|
||||
|
||||
class RobotPreOCIModel(RobotInterface):
|
||||
def list_robot_permissions(self, username):
|
||||
permissions = model.permission.list_robot_permissions(username)
|
||||
return [Permission(permission.repository.name, permission.repository.visibility.name, permission.role.name) for
|
||||
permission in permissions]
|
||||
|
||||
def list_entity_robot_permission_teams(self, prefix, include_token=False,
|
||||
include_permissions=False, limit=None):
|
||||
tuples = model.user.list_entity_robot_permission_teams(prefix, limit=limit,
|
||||
include_permissions=include_permissions)
|
||||
robots = {}
|
||||
robot_teams = set()
|
||||
|
||||
for robot_tuple in tuples:
|
||||
robot_name = robot_tuple.get(User.username)
|
||||
if robot_name not in robots:
|
||||
token = None
|
||||
if include_token:
|
||||
# TODO(remove-unenc): Remove branches once migrated.
|
||||
if robot_tuple.get(RobotAccountToken.token):
|
||||
token = robot_tuple.get(RobotAccountToken.token).decrypt()
|
||||
|
||||
if token is None and ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS):
|
||||
token = robot_tuple.get(FederatedLogin.service_ident)
|
||||
assert not token.startswith('robot:')
|
||||
|
||||
robot_dict = {
|
||||
'name': robot_name,
|
||||
'token': token,
|
||||
'created': robot_tuple.get(User.creation_date),
|
||||
'last_accessed': (robot_tuple.get(User.last_accessed)
|
||||
if features.USER_LAST_ACCESSED else None),
|
||||
'description': robot_tuple.get(RobotAccountMetadata.description),
|
||||
'unstructured_metadata': robot_tuple.get(RobotAccountMetadata.unstructured_json),
|
||||
}
|
||||
|
||||
if include_permissions:
|
||||
robot_dict.update({
|
||||
'teams': [],
|
||||
'repositories': [],
|
||||
})
|
||||
|
||||
robots[robot_name] = Robot(robot_dict['name'], robot_dict['token'], robot_dict['created'],
|
||||
robot_dict['last_accessed'], robot_dict['description'],
|
||||
robot_dict['unstructured_metadata'])
|
||||
if include_permissions:
|
||||
team_name = robot_tuple.get(TeamTable.name)
|
||||
repository_name = robot_tuple.get(Repository.name)
|
||||
|
||||
if team_name is not None:
|
||||
check_key = robot_name + ':' + team_name
|
||||
if check_key not in robot_teams:
|
||||
robot_teams.add(check_key)
|
||||
|
||||
robot_dict['teams'].append(Team(
|
||||
team_name,
|
||||
avatar.get_data(team_name, team_name, 'team')
|
||||
))
|
||||
|
||||
if repository_name is not None:
|
||||
if repository_name not in robot_dict['repositories']:
|
||||
robot_dict['repositories'].append(repository_name)
|
||||
robots[robot_name] = RobotWithPermissions(robot_dict['name'], robot_dict['token'],
|
||||
robot_dict['created'],
|
||||
(robot_dict['last_accessed']
|
||||
if features.USER_LAST_ACCESSED else None),
|
||||
robot_dict['teams'],
|
||||
robot_dict['repositories'],
|
||||
robot_dict['description'])
|
||||
|
||||
return robots.values()
|
||||
|
||||
def regenerate_user_robot_token(self, robot_shortname, owning_user):
|
||||
robot, password, metadata = model.user.regenerate_robot_token(robot_shortname, owning_user)
|
||||
return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
|
||||
metadata.description, metadata.unstructured_json)
|
||||
|
||||
def regenerate_org_robot_token(self, robot_shortname, orgname):
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, password, metadata = model.user.regenerate_robot_token(robot_shortname, parent)
|
||||
return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
|
||||
metadata.description, metadata.unstructured_json)
|
||||
|
||||
def delete_robot(self, robot_username):
|
||||
model.user.delete_robot(robot_username)
|
||||
|
||||
def create_user_robot(self, robot_shortname, owning_user, description, unstructured_metadata):
|
||||
robot, password = model.user.create_robot(robot_shortname, owning_user, description or '',
|
||||
unstructured_metadata)
|
||||
return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
|
||||
description or '', unstructured_metadata)
|
||||
|
||||
def create_org_robot(self, robot_shortname, orgname, description, unstructured_metadata):
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, password = model.user.create_robot(robot_shortname, parent, description or '',
|
||||
unstructured_metadata)
|
||||
return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
|
||||
description or '', unstructured_metadata)
|
||||
|
||||
def get_org_robot(self, robot_shortname, orgname):
|
||||
parent = model.organization.get_organization(orgname)
|
||||
robot, password, metadata = model.user.get_robot_and_metadata(robot_shortname, parent)
|
||||
return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
|
||||
metadata.description, metadata.unstructured_json)
|
||||
|
||||
def get_user_robot(self, robot_shortname, owning_user):
|
||||
robot, password, metadata = model.user.get_robot_and_metadata(robot_shortname, owning_user)
|
||||
return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
|
||||
metadata.description, metadata.unstructured_json)
|
||||
|
||||
|
||||
pre_oci_model = RobotPreOCIModel()
|
382
endpoints/api/search.py
Normal file
382
endpoints/api/search.py
Normal file
|
@ -0,0 +1,382 @@
|
|||
""" Conduct searches against all registry context. """
|
||||
|
||||
import features
|
||||
|
||||
from endpoints.api import (ApiResource, parse_args, query_param, truthy_bool, nickname, resource,
|
||||
require_scope, path_param, internal_only, Unauthorized, InvalidRequest,
|
||||
show_if)
|
||||
from data.database import Repository
|
||||
from data import model
|
||||
from data.registry_model import registry_model
|
||||
from auth.permissions import (OrganizationMemberPermission, ReadRepositoryPermission,
|
||||
UserAdminPermission, AdministerOrganizationPermission,
|
||||
ReadRepositoryPermission)
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from app import app, avatar, authentication
|
||||
from flask import abort
|
||||
from operator import itemgetter
|
||||
from stringscore import liquidmetal
|
||||
from util.names import parse_robot_username
|
||||
|
||||
import anunidecode # Don't listen to pylint's lies. This import is required.
|
||||
import math
|
||||
|
||||
|
||||
ENTITY_SEARCH_SCORE = 1
|
||||
TEAM_SEARCH_SCORE = 2
|
||||
REPOSITORY_SEARCH_SCORE = 4
|
||||
|
||||
|
||||
@resource('/v1/entities/link/<username>')
|
||||
@internal_only
|
||||
class LinkExternalEntity(ApiResource):
|
||||
""" Resource for linking external entities to internal users. """
|
||||
@nickname('linkExternalUser')
|
||||
def post(self, username):
|
||||
if not authentication.federated_service:
|
||||
abort(404)
|
||||
|
||||
# Only allowed if there is a logged in user.
|
||||
if not get_authenticated_user():
|
||||
raise Unauthorized()
|
||||
|
||||
# Try to link the user with the given *external* username, to an internal record.
|
||||
(user, err_msg) = authentication.link_user(username)
|
||||
if user is None:
|
||||
raise InvalidRequest(err_msg, payload={'username': username})
|
||||
|
||||
return {
|
||||
'entity': {
|
||||
'name': user.username,
|
||||
'kind': 'user',
|
||||
'is_robot': False,
|
||||
'avatar': avatar.get_data_for_user(user)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/entities/<prefix>')
|
||||
class EntitySearch(ApiResource):
|
||||
""" Resource for searching entities. """
|
||||
@path_param('prefix', 'The prefix of the entities being looked up')
|
||||
@parse_args()
|
||||
@query_param('namespace', 'Namespace to use when querying for org entities.', type=str,
|
||||
default='')
|
||||
@query_param('includeTeams', 'Whether to include team names.', type=truthy_bool, default=False)
|
||||
@query_param('includeOrgs', 'Whether to include orgs names.', type=truthy_bool, default=False)
|
||||
@nickname('getMatchingEntities')
|
||||
def get(self, prefix, parsed_args):
|
||||
""" Get a list of entities that match the specified prefix. """
|
||||
|
||||
# Ensure we don't have any unicode characters in the search, as it breaks the search. Nothing
|
||||
# being searched can have unicode in it anyway, so this is a safe operation.
|
||||
prefix = prefix.encode('unidecode', 'ignore').replace(' ', '').lower()
|
||||
|
||||
teams = []
|
||||
org_data = []
|
||||
|
||||
namespace_name = parsed_args['namespace']
|
||||
robot_namespace = None
|
||||
organization = None
|
||||
|
||||
try:
|
||||
organization = model.organization.get_organization(namespace_name)
|
||||
|
||||
# namespace name was an org
|
||||
permission = OrganizationMemberPermission(namespace_name)
|
||||
if permission.can():
|
||||
robot_namespace = namespace_name
|
||||
|
||||
if parsed_args['includeTeams']:
|
||||
teams = model.team.get_matching_teams(prefix, organization)
|
||||
|
||||
if (parsed_args['includeOrgs'] and AdministerOrganizationPermission(namespace_name) and
|
||||
namespace_name.startswith(prefix)):
|
||||
org_data = [{
|
||||
'name': namespace_name,
|
||||
'kind': 'org',
|
||||
'is_org_member': True,
|
||||
'avatar': avatar.get_data_for_org(organization),
|
||||
}]
|
||||
|
||||
except model.organization.InvalidOrganizationException:
|
||||
# namespace name was a user
|
||||
user = get_authenticated_user()
|
||||
if user and user.username == namespace_name:
|
||||
# Check if there is admin user permissions (login only)
|
||||
admin_permission = UserAdminPermission(user.username)
|
||||
if admin_permission.can():
|
||||
robot_namespace = namespace_name
|
||||
|
||||
# Lookup users in the database for the prefix query.
|
||||
users = model.user.get_matching_users(prefix, robot_namespace, organization, limit=10,
|
||||
exact_matches_only=not features.PARTIAL_USER_AUTOCOMPLETE)
|
||||
|
||||
# Lookup users via the user system for the prefix query. We'll filter out any users that
|
||||
# already exist in the database.
|
||||
external_users, federated_id, _ = authentication.query_users(prefix, limit=10)
|
||||
filtered_external_users = []
|
||||
if external_users and federated_id is not None:
|
||||
users = list(users)
|
||||
user_ids = [user.id for user in users]
|
||||
|
||||
# Filter the users if any are already found via the database. We do so by looking up all
|
||||
# the found users in the federated user system.
|
||||
federated_query = model.user.get_federated_logins(user_ids, federated_id)
|
||||
found = {result.service_ident for result in federated_query}
|
||||
filtered_external_users = [user for user in external_users if not user.username in found]
|
||||
|
||||
def entity_team_view(team):
|
||||
result = {
|
||||
'name': team.name,
|
||||
'kind': 'team',
|
||||
'is_org_member': True,
|
||||
'avatar': avatar.get_data_for_team(team)
|
||||
}
|
||||
return result
|
||||
|
||||
def user_view(user):
|
||||
user_json = {
|
||||
'name': user.username,
|
||||
'kind': 'user',
|
||||
'is_robot': user.robot,
|
||||
'avatar': avatar.get_data_for_user(user)
|
||||
}
|
||||
|
||||
if organization is not None:
|
||||
user_json['is_org_member'] = user.robot or user.is_org_member
|
||||
|
||||
return user_json
|
||||
|
||||
def external_view(user):
|
||||
result = {
|
||||
'name': user.username,
|
||||
'kind': 'external',
|
||||
'title': user.email or '',
|
||||
'avatar': avatar.get_data_for_external_user(user)
|
||||
}
|
||||
return result
|
||||
|
||||
team_data = [entity_team_view(team) for team in teams]
|
||||
user_data = [user_view(user) for user in users]
|
||||
external_data = [external_view(user) for user in filtered_external_users]
|
||||
|
||||
return {
|
||||
'results': team_data + user_data + org_data + external_data
|
||||
}
|
||||
|
||||
|
||||
def search_entity_view(username, entity, get_short_name=None):
|
||||
kind = 'user'
|
||||
title = 'user'
|
||||
avatar_data = avatar.get_data_for_user(entity)
|
||||
href = '/user/' + entity.username
|
||||
|
||||
if entity.organization:
|
||||
kind = 'organization'
|
||||
title = 'org'
|
||||
avatar_data = avatar.get_data_for_org(entity)
|
||||
href = '/organization/' + entity.username
|
||||
elif entity.robot:
|
||||
parts = parse_robot_username(entity.username)
|
||||
if parts[0] == username:
|
||||
href = '/user/' + username + '?tab=robots&showRobot=' + entity.username
|
||||
else:
|
||||
href = '/organization/' + parts[0] + '?tab=robots&showRobot=' + entity.username
|
||||
|
||||
kind = 'robot'
|
||||
title = 'robot'
|
||||
avatar_data = None
|
||||
|
||||
data = {
|
||||
'title': title,
|
||||
'kind': kind,
|
||||
'avatar': avatar_data,
|
||||
'name': entity.username,
|
||||
'score': ENTITY_SEARCH_SCORE,
|
||||
'href': href
|
||||
}
|
||||
|
||||
if get_short_name:
|
||||
data['short_name'] = get_short_name(entity.username)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def conduct_team_search(username, query, encountered_teams, results):
|
||||
""" Finds the matching teams where the user is a member. """
|
||||
matching_teams = model.team.get_matching_user_teams(query, get_authenticated_user(), limit=5)
|
||||
for team in matching_teams:
|
||||
if team.id in encountered_teams:
|
||||
continue
|
||||
|
||||
encountered_teams.add(team.id)
|
||||
|
||||
results.append({
|
||||
'kind': 'team',
|
||||
'name': team.name,
|
||||
'organization': search_entity_view(username, team.organization),
|
||||
'avatar': avatar.get_data_for_team(team),
|
||||
'score': TEAM_SEARCH_SCORE,
|
||||
'href': '/organization/' + team.organization.username + '/teams/' + team.name
|
||||
})
|
||||
|
||||
|
||||
def conduct_admined_team_search(username, query, encountered_teams, results):
|
||||
""" Finds matching teams in orgs admined by the user. """
|
||||
matching_teams = model.team.get_matching_admined_teams(query, get_authenticated_user(), limit=5)
|
||||
for team in matching_teams:
|
||||
if team.id in encountered_teams:
|
||||
continue
|
||||
|
||||
encountered_teams.add(team.id)
|
||||
|
||||
results.append({
|
||||
'kind': 'team',
|
||||
'name': team.name,
|
||||
'organization': search_entity_view(username, team.organization),
|
||||
'avatar': avatar.get_data_for_team(team),
|
||||
'score': TEAM_SEARCH_SCORE,
|
||||
'href': '/organization/' + team.organization.username + '/teams/' + team.name
|
||||
})
|
||||
|
||||
|
||||
def conduct_repo_search(username, query, results, offset=0, limit=5):
|
||||
""" Finds matching repositories. """
|
||||
matching_repos = model.repository.get_filtered_matching_repositories(query, username, limit=limit,
|
||||
repo_kind=None,
|
||||
offset=offset)
|
||||
|
||||
for repo in matching_repos:
|
||||
# TODO: make sure the repo.kind.name doesn't cause extra queries
|
||||
results.append(repo_result_view(repo, username))
|
||||
|
||||
|
||||
def conduct_namespace_search(username, query, results):
|
||||
""" Finds matching users and organizations. """
|
||||
matching_entities = model.user.get_matching_user_namespaces(query, username, limit=5)
|
||||
for entity in matching_entities:
|
||||
results.append(search_entity_view(username, entity))
|
||||
|
||||
|
||||
def conduct_robot_search(username, query, results):
|
||||
""" Finds matching robot accounts. """
|
||||
def get_short_name(name):
|
||||
return parse_robot_username(name)[1]
|
||||
|
||||
matching_robots = model.user.get_matching_robots(query, username, limit=5)
|
||||
for robot in matching_robots:
|
||||
results.append(search_entity_view(username, robot, get_short_name))
|
||||
|
||||
|
||||
def repo_result_view(repo, username, last_modified=None, stars=None, popularity=None):
|
||||
kind = 'application' if Repository.kind.get_name(repo.kind_id) == 'application' else 'repository'
|
||||
view = {
|
||||
'kind': kind,
|
||||
'title': 'app' if kind == 'application' else 'repo',
|
||||
'namespace': search_entity_view(username, repo.namespace_user),
|
||||
'name': repo.name,
|
||||
'description': repo.description,
|
||||
'is_public': model.repository.is_repository_public(repo),
|
||||
'score': REPOSITORY_SEARCH_SCORE,
|
||||
'href': '/' + kind + '/' + repo.namespace_user.username + '/' + repo.name
|
||||
}
|
||||
|
||||
if last_modified is not None:
|
||||
view['last_modified'] = last_modified
|
||||
|
||||
if stars is not None:
|
||||
view['stars'] = stars
|
||||
|
||||
if popularity is not None:
|
||||
view['popularity'] = popularity
|
||||
|
||||
return view
|
||||
|
||||
@resource('/v1/find/all')
|
||||
class ConductSearch(ApiResource):
|
||||
""" Resource for finding users, repositories, teams, etc. """
|
||||
@parse_args()
|
||||
@query_param('query', 'The search query.', type=str, default='')
|
||||
@require_scope(scopes.READ_REPO)
|
||||
@nickname('conductSearch')
|
||||
def get(self, parsed_args):
|
||||
""" Get a list of entities and resources that match the specified query. """
|
||||
query = parsed_args['query']
|
||||
if not query:
|
||||
return {'results': []}
|
||||
|
||||
username = None
|
||||
results = []
|
||||
|
||||
if get_authenticated_user():
|
||||
username = get_authenticated_user().username
|
||||
|
||||
# Search for teams.
|
||||
encountered_teams = set()
|
||||
conduct_team_search(username, query, encountered_teams, results)
|
||||
conduct_admined_team_search(username, query, encountered_teams, results)
|
||||
|
||||
# Search for robot accounts.
|
||||
conduct_robot_search(username, query, results)
|
||||
|
||||
# Search for repos.
|
||||
conduct_repo_search(username, query, results)
|
||||
|
||||
# Search for users and orgs.
|
||||
conduct_namespace_search(username, query, results)
|
||||
|
||||
# Modify the results' scores via how close the query term is to each result's name.
|
||||
for result in results:
|
||||
name = result.get('short_name', result['name'])
|
||||
lm_score = liquidmetal.score(name, query) or 0.5
|
||||
result['score'] = result['score'] * lm_score
|
||||
|
||||
return {'results': sorted(results, key=itemgetter('score'), reverse=True)}
|
||||
|
||||
|
||||
MAX_PER_PAGE = app.config.get('SEARCH_RESULTS_PER_PAGE', 10)
|
||||
MAX_RESULT_PAGE_COUNT = app.config.get('SEARCH_MAX_RESULT_PAGE_COUNT', 10)
|
||||
|
||||
@resource('/v1/find/repositories')
|
||||
class ConductRepositorySearch(ApiResource):
|
||||
""" Resource for finding repositories. """
|
||||
@parse_args()
|
||||
@query_param('query', 'The search query.', type=str, default='')
|
||||
@query_param('page', 'The page.', type=int, default=1)
|
||||
@nickname('conductRepoSearch')
|
||||
def get(self, parsed_args):
|
||||
""" Get a list of apps and repositories that match the specified query. """
|
||||
query = parsed_args['query']
|
||||
page = min(max(1, parsed_args['page']), MAX_RESULT_PAGE_COUNT)
|
||||
offset = (page - 1) * MAX_PER_PAGE
|
||||
limit = offset + MAX_PER_PAGE + 1
|
||||
|
||||
username = get_authenticated_user().username if get_authenticated_user() else None
|
||||
|
||||
# Lookup matching repositories.
|
||||
matching_repos = list(model.repository.get_filtered_matching_repositories(query, username,
|
||||
repo_kind=None,
|
||||
limit=limit,
|
||||
offset=offset))
|
||||
|
||||
# Load secondary information such as last modified time, star count and action count.
|
||||
repository_ids = [repo.id for repo in matching_repos]
|
||||
last_modified_map = registry_model.get_most_recent_tag_lifetime_start(matching_repos)
|
||||
star_map = model.repository.get_stars(repository_ids)
|
||||
action_sum_map = model.log.get_repositories_action_sums(repository_ids)
|
||||
|
||||
# Build the results list.
|
||||
results = [repo_result_view(repo, username, last_modified_map.get(repo.id),
|
||||
star_map.get(repo.id, 0),
|
||||
float(action_sum_map.get(repo.id, 0)))
|
||||
for repo in matching_repos]
|
||||
|
||||
return {
|
||||
'results': results[0:MAX_PER_PAGE],
|
||||
'has_additional': len(results) > MAX_PER_PAGE,
|
||||
'page': page,
|
||||
'page_size': MAX_PER_PAGE,
|
||||
'start_index': offset,
|
||||
}
|
108
endpoints/api/secscan.py
Normal file
108
endpoints/api/secscan.py
Normal file
|
@ -0,0 +1,108 @@
|
|||
""" List and manage repository vulnerabilities and other security information. """
|
||||
|
||||
import logging
|
||||
import features
|
||||
|
||||
from app import app, secscan_api
|
||||
from auth.decorators import process_basic_auth_no_pass
|
||||
from data.registry_model import registry_model
|
||||
from data.registry_model.datatypes import SecurityScanStatus
|
||||
from endpoints.api import (require_repo_read, path_param,
|
||||
RepositoryParamResource, resource, nickname, show_if, parse_args,
|
||||
query_param, truthy_bool, disallow_for_app_repositories)
|
||||
from endpoints.exception import NotFound, DownstreamIssue
|
||||
from endpoints.api.manifest import MANIFEST_DIGEST_ROUTE
|
||||
from util.secscan.api import APIRequestFailure
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _security_info(manifest_or_legacy_image, include_vulnerabilities=True):
|
||||
""" Returns a dict representing the result of a call to the security status API for the given
|
||||
manifest or image.
|
||||
"""
|
||||
status = registry_model.get_security_status(manifest_or_legacy_image)
|
||||
if status is None:
|
||||
raise NotFound()
|
||||
|
||||
if status != SecurityScanStatus.SCANNED:
|
||||
return {
|
||||
'status': status.value,
|
||||
}
|
||||
|
||||
try:
|
||||
if include_vulnerabilities:
|
||||
data = secscan_api.get_layer_data(manifest_or_legacy_image, include_vulnerabilities=True)
|
||||
else:
|
||||
data = secscan_api.get_layer_data(manifest_or_legacy_image, include_features=True)
|
||||
except APIRequestFailure as arf:
|
||||
raise DownstreamIssue(arf.message)
|
||||
|
||||
if data is None:
|
||||
# If no data was found but we reached this point, then it indicates we have incorrect security
|
||||
# status for the manifest or legacy image. Mark the manifest or legacy image as unindexed
|
||||
# so it automatically gets re-indexed.
|
||||
if app.config.get('REGISTRY_STATE', 'normal') == 'normal':
|
||||
registry_model.reset_security_status(manifest_or_legacy_image)
|
||||
|
||||
return {
|
||||
'status': SecurityScanStatus.QUEUED.value,
|
||||
}
|
||||
|
||||
return {
|
||||
'status': status.value,
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/image/<imageid>/security')
|
||||
@show_if(features.SECURITY_SCANNER)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('imageid', 'The image ID')
|
||||
class RepositoryImageSecurity(RepositoryParamResource):
|
||||
""" Operations for managing the vulnerabilities in a repository image. """
|
||||
|
||||
@process_basic_auth_no_pass
|
||||
@require_repo_read
|
||||
@nickname('getRepoImageSecurity')
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
@query_param('vulnerabilities', 'Include vulnerabilities informations', type=truthy_bool,
|
||||
default=False)
|
||||
def get(self, namespace, repository, imageid, parsed_args):
|
||||
""" Fetches the features and vulnerabilities (if any) for a repository image. """
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
legacy_image = registry_model.get_legacy_image(repo_ref, imageid)
|
||||
if legacy_image is None:
|
||||
raise NotFound()
|
||||
|
||||
return _security_info(legacy_image, parsed_args.vulnerabilities)
|
||||
|
||||
|
||||
@resource(MANIFEST_DIGEST_ROUTE + '/security')
|
||||
@show_if(features.SECURITY_SCANNER)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('manifestref', 'The digest of the manifest')
|
||||
class RepositoryManifestSecurity(RepositoryParamResource):
|
||||
""" Operations for managing the vulnerabilities in a repository manifest. """
|
||||
|
||||
@process_basic_auth_no_pass
|
||||
@require_repo_read
|
||||
@nickname('getRepoManifestSecurity')
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
@query_param('vulnerabilities', 'Include vulnerabilities informations', type=truthy_bool,
|
||||
default=False)
|
||||
def get(self, namespace, repository, manifestref, parsed_args):
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref, allow_dead=True)
|
||||
if manifest is None:
|
||||
raise NotFound()
|
||||
|
||||
return _security_info(manifest, parsed_args.vulnerabilities)
|
29
endpoints/api/signing.py
Normal file
29
endpoints/api/signing.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
""" List and manage repository signing information """
|
||||
|
||||
import logging
|
||||
import features
|
||||
|
||||
from app import tuf_metadata_api
|
||||
from endpoints.api import (require_repo_read, path_param,
|
||||
RepositoryParamResource, resource, nickname, show_if,
|
||||
disallow_for_app_repositories, NotFound)
|
||||
from endpoints.api.signing_models_pre_oci import pre_oci_model as model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/signatures')
|
||||
@show_if(features.SIGNING)
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class RepositorySignatures(RepositoryParamResource):
|
||||
""" Operations for managing the signatures in a repository image. """
|
||||
|
||||
@require_repo_read
|
||||
@nickname('getRepoSignatures')
|
||||
@disallow_for_app_repositories
|
||||
def get(self, namespace, repository):
|
||||
""" Fetches the list of signed tags for the repository. """
|
||||
if not model.is_trust_enabled(namespace, repository):
|
||||
raise NotFound()
|
||||
|
||||
return {'delegations': tuf_metadata_api.get_all_tags_with_expiration(namespace, repository)}
|
14
endpoints/api/signing_models_interface.py
Normal file
14
endpoints/api/signing_models_interface.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from six import add_metaclass
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class SigningInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by the signing API endpoint.
|
||||
"""
|
||||
@abstractmethod
|
||||
def is_trust_enabled(self, namespace_name, repo_name):
|
||||
"""
|
||||
Returns whether the repository with the given namespace name and repository name exists and
|
||||
has trust enabled.
|
||||
"""
|
18
endpoints/api/signing_models_pre_oci.py
Normal file
18
endpoints/api/signing_models_pre_oci.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
from data import model
|
||||
from endpoints.api.signing_models_interface import SigningInterface
|
||||
|
||||
|
||||
class PreOCIModel(SigningInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for signing using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
def is_trust_enabled(self, namespace_name, repo_name):
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
if repo is None:
|
||||
return False
|
||||
|
||||
return repo.trust_enabled
|
||||
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
135
endpoints/api/subscribe.py
Normal file
135
endpoints/api/subscribe.py
Normal file
|
@ -0,0 +1,135 @@
|
|||
""" Subscribe to plans. """
|
||||
import logging
|
||||
import stripe
|
||||
import features
|
||||
from app import billing
|
||||
from endpoints.api import request_error, log_action
|
||||
from data.billing import PLANS
|
||||
from endpoints.api.subscribe_models_pre_oci import data_model as model
|
||||
from endpoints.exception import NotFound
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_repository_usage(user_or_org, plan_found):
|
||||
private_repos = model.get_private_repo_count(user_or_org.username)
|
||||
if plan_found is None:
|
||||
repos_allowed = 0
|
||||
else:
|
||||
repos_allowed = plan_found['privateRepos']
|
||||
|
||||
if private_repos > repos_allowed:
|
||||
model.create_unique_notification('over_private_usage', user_or_org.username, {'namespace': user_or_org.username})
|
||||
else:
|
||||
model.delete_notifications_by_kind(user_or_org.username, 'over_private_usage')
|
||||
|
||||
|
||||
def carderror_response(exc):
|
||||
return {'carderror': exc.message}, 402
|
||||
|
||||
def connection_response(exc):
|
||||
return {'message': 'Could not contact Stripe. Please try again.'}, 503
|
||||
|
||||
|
||||
def subscription_view(stripe_subscription, used_repos):
|
||||
view = {
|
||||
'hasSubscription': True,
|
||||
'isExistingCustomer': True,
|
||||
'currentPeriodStart': stripe_subscription.current_period_start,
|
||||
'currentPeriodEnd': stripe_subscription.current_period_end,
|
||||
'plan': stripe_subscription.plan.id,
|
||||
'usedPrivateRepos': used_repos,
|
||||
'trialStart': stripe_subscription.trial_start,
|
||||
'trialEnd': stripe_subscription.trial_end
|
||||
}
|
||||
|
||||
return view
|
||||
|
||||
|
||||
def subscribe(user, plan, token, require_business_plan):
|
||||
if not features.BILLING:
|
||||
return
|
||||
|
||||
plan_found = None
|
||||
for plan_obj in PLANS:
|
||||
if plan_obj['stripeId'] == plan:
|
||||
plan_found = plan_obj
|
||||
|
||||
if not plan_found or plan_found['deprecated']:
|
||||
logger.warning('Plan not found or deprecated: %s', plan)
|
||||
raise NotFound()
|
||||
|
||||
if (require_business_plan and not plan_found['bus_features'] and not
|
||||
plan_found['price'] == 0):
|
||||
logger.warning('Business attempting to subscribe to personal plan: %s',
|
||||
user.username)
|
||||
raise request_error(message='No matching plan found')
|
||||
|
||||
private_repos = model.get_private_repo_count(user.username)
|
||||
|
||||
# This is the default response
|
||||
response_json = {
|
||||
'plan': plan,
|
||||
'usedPrivateRepos': private_repos,
|
||||
}
|
||||
status_code = 200
|
||||
|
||||
if not user.stripe_id:
|
||||
# Check if a non-paying user is trying to subscribe to a free plan
|
||||
if not plan_found['price'] == 0:
|
||||
# They want a real paying plan, create the customer and plan
|
||||
# simultaneously
|
||||
card = token
|
||||
|
||||
try:
|
||||
cus = billing.Customer.create(email=user.email, plan=plan, card=card)
|
||||
user.stripe_id = cus.id
|
||||
user.save()
|
||||
check_repository_usage(user, plan_found)
|
||||
log_action('account_change_plan', user.username, {'plan': plan})
|
||||
except stripe.error.CardError as e:
|
||||
return carderror_response(e)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
return connection_response(e)
|
||||
|
||||
response_json = subscription_view(cus.subscription, private_repos)
|
||||
status_code = 201
|
||||
|
||||
else:
|
||||
# Change the plan
|
||||
try:
|
||||
cus = billing.Customer.retrieve(user.stripe_id)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
return connection_response(e)
|
||||
|
||||
if plan_found['price'] == 0:
|
||||
if cus.subscription is not None:
|
||||
# We only have to cancel the subscription if they actually have one
|
||||
try:
|
||||
cus.subscription.delete()
|
||||
except stripe.error.APIConnectionError as e:
|
||||
return connection_response(e)
|
||||
|
||||
check_repository_usage(user, plan_found)
|
||||
log_action('account_change_plan', user.username, {'plan': plan})
|
||||
|
||||
else:
|
||||
# User may have been a previous customer who is resubscribing
|
||||
if token:
|
||||
cus.card = token
|
||||
|
||||
cus.plan = plan
|
||||
|
||||
try:
|
||||
cus.save()
|
||||
except stripe.error.CardError as e:
|
||||
return carderror_response(e)
|
||||
except stripe.error.APIConnectionError as e:
|
||||
return connection_response(e)
|
||||
|
||||
response_json = subscription_view(cus.subscription, private_repos)
|
||||
check_repository_usage(user, plan_found)
|
||||
log_action('account_change_plan', user.username, {'plan': plan})
|
||||
|
||||
return response_json, status_code
|
26
endpoints/api/subscribe_models_interface.py
Normal file
26
endpoints/api/subscribe_models_interface.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class SubscribeInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by the subscribe API endpoint.
|
||||
"""
|
||||
@abstractmethod
|
||||
def get_private_repo_count(self, username):
|
||||
"""
|
||||
Returns the number of private repositories for a given username or namespace.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_unique_notification(self, kind_name, target_username, metadata={}):
|
||||
"""
|
||||
Creates a notification using the given parameters.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_notifications_by_kind(self, target_username, kind_name):
|
||||
"""
|
||||
Remove notifications for a target based on given kind.
|
||||
"""
|
23
endpoints/api/subscribe_models_pre_oci.py
Normal file
23
endpoints/api/subscribe_models_pre_oci.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
from data.model.notification import create_unique_notification, delete_notifications_by_kind
|
||||
from data.model.user import get_private_repo_count, get_user_or_org
|
||||
from endpoints.api.subscribe_models_interface import SubscribeInterface
|
||||
|
||||
|
||||
class PreOCIModel(SubscribeInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for build triggers using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
def get_private_repo_count(self, username):
|
||||
return get_private_repo_count(username)
|
||||
|
||||
def create_unique_notification(self, kind_name, target_username, metadata={}):
|
||||
target = get_user_or_org(target_username)
|
||||
create_unique_notification(kind_name, target, metadata)
|
||||
|
||||
def delete_notifications_by_kind(self, target_username, kind_name):
|
||||
target = get_user_or_org(target_username)
|
||||
delete_notifications_by_kind(target, kind_name)
|
||||
|
||||
|
||||
data_model = PreOCIModel()
|
104
endpoints/api/suconfig.py
Normal file
104
endpoints/api/suconfig.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
""" Superuser Config API. """
|
||||
|
||||
import logging
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
|
||||
from flask import abort
|
||||
|
||||
from app import app, config_provider
|
||||
from auth.permissions import SuperUserPermission
|
||||
from endpoints.api.suconfig_models_pre_oci import pre_oci_model as model
|
||||
from endpoints.api import (ApiResource, nickname, resource, internal_only, show_if, verify_not_prod)
|
||||
|
||||
import features
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def database_is_valid():
|
||||
""" Returns whether the database, as configured, is valid. """
|
||||
if app.config['TESTING']:
|
||||
return False
|
||||
|
||||
return model.is_valid()
|
||||
|
||||
|
||||
def database_has_users():
|
||||
""" Returns whether the database has any users defined. """
|
||||
return model.has_users()
|
||||
|
||||
|
||||
@resource('/v1/superuser/registrystatus')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserRegistryStatus(ApiResource):
|
||||
""" Resource for determining the status of the registry, such as if config exists,
|
||||
if a database is configured, and if it has any defined users.
|
||||
"""
|
||||
@nickname('scRegistryStatus')
|
||||
@verify_not_prod
|
||||
def get(self):
|
||||
""" Returns the status of the registry. """
|
||||
# If we have SETUP_COMPLETE, then we're ready to go!
|
||||
if app.config.get('SETUP_COMPLETE', False):
|
||||
return {
|
||||
'provider_id': config_provider.provider_id,
|
||||
'status': 'ready'
|
||||
}
|
||||
|
||||
return {
|
||||
'status': 'setup-incomplete'
|
||||
}
|
||||
|
||||
|
||||
class _AlembicLogHandler(logging.Handler):
|
||||
def __init__(self):
|
||||
super(_AlembicLogHandler, self).__init__()
|
||||
self.records = []
|
||||
|
||||
def emit(self, record):
|
||||
self.records.append({
|
||||
'level': record.levelname,
|
||||
'message': record.getMessage()
|
||||
})
|
||||
|
||||
# From: https://stackoverflow.com/a/44712205
|
||||
def get_process_id(name):
|
||||
"""Return process ids found by (partial) name or regex.
|
||||
|
||||
>>> get_process_id('kthreadd')
|
||||
[2]
|
||||
>>> get_process_id('watchdog')
|
||||
[10, 11, 16, 21, 26, 31, 36, 41, 46, 51, 56, 61] # ymmv
|
||||
>>> get_process_id('non-existent process')
|
||||
[]
|
||||
"""
|
||||
child = subprocess.Popen(['pgrep', name], stdout=subprocess.PIPE, shell=False)
|
||||
response = child.communicate()[0]
|
||||
return [int(pid) for pid in response.split()]
|
||||
|
||||
|
||||
@resource('/v1/superuser/shutdown')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserShutdown(ApiResource):
|
||||
""" Resource for sending a shutdown signal to the container. """
|
||||
|
||||
@verify_not_prod
|
||||
@nickname('scShutdownContainer')
|
||||
def post(self):
|
||||
""" Sends a signal to the phusion init system to shut down the container. """
|
||||
# Note: This method is called to set the database configuration before super users exists,
|
||||
# so we also allow it to be called if there is no valid registry configuration setup.
|
||||
if app.config['TESTING'] or not database_has_users() or SuperUserPermission().can():
|
||||
# Note: We skip if debugging locally.
|
||||
if app.config.get('DEBUGGING') == True:
|
||||
return {}
|
||||
|
||||
os.kill(get_process_id('my_init')[0], signal.SIGINT)
|
||||
return {}
|
||||
|
||||
abort(403)
|
39
endpoints/api/suconfig_models_interface.py
Normal file
39
endpoints/api/suconfig_models_interface.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class SuperuserConfigDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by the superuser config API.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def is_valid(self):
|
||||
"""
|
||||
Returns true if the configured database is valid.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def has_users(self):
|
||||
"""
|
||||
Returns true if there are any users defined.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_superuser(self, username, password, email):
|
||||
"""
|
||||
Creates a new superuser with the given username, password and email. Returns the user's UUID.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def has_federated_login(self, username, service_name):
|
||||
"""
|
||||
Returns true if the matching user has a federated login under the matching service.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def attach_federated_login(self, username, service_name, federated_username):
|
||||
"""
|
||||
Attaches a federatated login to the matching user, under the given service.
|
||||
"""
|
33
endpoints/api/suconfig_models_pre_oci.py
Normal file
33
endpoints/api/suconfig_models_pre_oci.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
from data import model
|
||||
from data.database import User
|
||||
from endpoints.api.suconfig_models_interface import SuperuserConfigDataInterface
|
||||
|
||||
class PreOCIModel(SuperuserConfigDataInterface):
|
||||
def is_valid(self):
|
||||
try:
|
||||
list(User.select().limit(1))
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
def has_users(self):
|
||||
return bool(list(User.select().limit(1)))
|
||||
|
||||
def create_superuser(self, username, password, email):
|
||||
return model.user.create_user(username, password, email, auto_verify=True).uuid
|
||||
|
||||
def has_federated_login(self, username, service_name):
|
||||
user = model.user.get_user(username)
|
||||
if user is None:
|
||||
return False
|
||||
|
||||
return bool(model.user.lookup_federated_login(user, service_name))
|
||||
|
||||
def attach_federated_login(self, username, service_name, federated_username):
|
||||
user = model.user.get_user(username)
|
||||
if user is None:
|
||||
return False
|
||||
|
||||
model.user.attach_federated_login(user, service_name, federated_username)
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
856
endpoints/api/superuser.py
Normal file
856
endpoints/api/superuser.py
Normal file
|
@ -0,0 +1,856 @@
|
|||
""" Superuser API. """
|
||||
import logging
|
||||
import os
|
||||
import string
|
||||
import socket
|
||||
|
||||
from datetime import datetime
|
||||
from random import SystemRandom
|
||||
|
||||
from flask import request, make_response, jsonify
|
||||
|
||||
import features
|
||||
|
||||
from app import app, avatar, superusers, authentication, config_provider
|
||||
from auth import scopes
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth.permissions import SuperUserPermission
|
||||
from data.database import ServiceKeyApprovalType
|
||||
from data.logs_model import logs_model
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
|
||||
internal_only, require_scope, show_if, parse_args,
|
||||
query_param, require_fresh_login, path_param, verify_not_prod,
|
||||
page_support, log_action, format_date, truthy_bool,
|
||||
InvalidRequest, NotFound, Unauthorized, InvalidResponse)
|
||||
from endpoints.api.build import get_logs_or_log_url
|
||||
from endpoints.api.superuser_models_pre_oci import (pre_oci_model, ServiceKeyDoesNotExist,
|
||||
ServiceKeyAlreadyApproved,
|
||||
InvalidRepositoryBuildException)
|
||||
from endpoints.api.logs import _validate_logs_arguments
|
||||
from util.request import get_request_ip
|
||||
from util.useremails import send_confirmation_email, send_recovery_email
|
||||
from util.validation import validate_service_key_name
|
||||
from _init import ROOT_DIR
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_immediate_subdirectories(directory):
|
||||
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
|
||||
|
||||
|
||||
def get_services():
|
||||
services = set(get_immediate_subdirectories(app.config['SYSTEM_SERVICES_PATH']))
|
||||
services = services - set(app.config['SYSTEM_SERVICE_BLACKLIST'])
|
||||
return services
|
||||
|
||||
|
||||
@resource('/v1/superuser/aggregatelogs')
|
||||
@internal_only
|
||||
class SuperUserAggregateLogs(ApiResource):
|
||||
""" Resource for fetching aggregated logs for the current user. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('listAllAggregateLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||
def get(self, parsed_args):
|
||||
""" Returns the aggregated logs for the current system. """
|
||||
if SuperUserPermission().can():
|
||||
(start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
|
||||
parsed_args['endtime'])
|
||||
aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time)
|
||||
return {
|
||||
'aggregated': [log.to_dict() for log in aggregated_logs]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
LOGS_PER_PAGE = 20
|
||||
|
||||
@resource('/v1/superuser/logs')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserLogs(ApiResource):
|
||||
""" Resource for fetching all logs in the system. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('listAllLogs')
|
||||
@parse_args()
|
||||
@query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
|
||||
@query_param('page', 'The page number for the logs', type=int, default=1)
|
||||
@page_support()
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, parsed_args, page_token):
|
||||
""" List the usage logs for the current system. """
|
||||
if SuperUserPermission().can():
|
||||
start_time = parsed_args['starttime']
|
||||
end_time = parsed_args['endtime']
|
||||
|
||||
(start_time, end_time) = _validate_logs_arguments(start_time, end_time)
|
||||
log_entry_page = logs_model.lookup_logs(start_time, end_time, page_token=page_token)
|
||||
return {
|
||||
'start_time': format_date(start_time),
|
||||
'end_time': format_date(end_time),
|
||||
'logs': [log.to_dict(avatar, include_namespace=True) for log in log_entry_page.logs],
|
||||
}, log_entry_page.next_page_token
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
def org_view(org):
|
||||
return {
|
||||
'name': org.username,
|
||||
'email': org.email,
|
||||
'avatar': avatar.get_data_for_org(org),
|
||||
}
|
||||
|
||||
|
||||
def user_view(user, password=None):
|
||||
user_data = {
|
||||
'kind': 'user',
|
||||
'name': user.username,
|
||||
'username': user.username,
|
||||
'email': user.email,
|
||||
'verified': user.verified,
|
||||
'avatar': avatar.get_data_for_user(user),
|
||||
'super_user': superusers.is_superuser(user.username),
|
||||
'enabled': user.enabled,
|
||||
}
|
||||
|
||||
if password is not None:
|
||||
user_data['encrypted_password'] = authentication.encrypt_user_password(password)
|
||||
|
||||
return user_data
|
||||
|
||||
|
||||
@resource('/v1/superuser/changelog/')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class ChangeLog(ApiResource):
|
||||
""" Resource for returning the change log for enterprise customers. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('getChangeLog')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self):
|
||||
""" Returns the change log for this installation. """
|
||||
if SuperUserPermission().can():
|
||||
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md'), 'r') as f:
|
||||
return {
|
||||
'log': f.read()
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/organizations/')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserOrganizationList(ApiResource):
|
||||
""" Resource for listing organizations in the system. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('listAllOrganizations')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self):
|
||||
""" Returns a list of all organizations in the system. """
|
||||
if SuperUserPermission().can():
|
||||
return {
|
||||
'organizations': [org.to_dict() for org in pre_oci_model.get_organizations()]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/users/')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserList(ApiResource):
|
||||
""" Resource for listing users in the system. """
|
||||
schemas = {
|
||||
'CreateInstallUser': {
|
||||
'id': 'CreateInstallUser',
|
||||
'description': 'Data for creating a user',
|
||||
'required': ['username'],
|
||||
'properties': {
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'description': 'The username of the user being created'
|
||||
},
|
||||
|
||||
'email': {
|
||||
'type': 'string',
|
||||
'description': 'The email address of the user being created'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('listAllUsers')
|
||||
@parse_args()
|
||||
@query_param('disabled', 'If false, only enabled users will be returned.', type=truthy_bool,
|
||||
default=True)
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, parsed_args):
|
||||
""" Returns a list of all users in the system. """
|
||||
if SuperUserPermission().can():
|
||||
users = pre_oci_model.get_active_users(disabled=parsed_args['disabled'])
|
||||
return {
|
||||
'users': [user.to_dict() for user in users]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('createInstallUser')
|
||||
@validate_json_request('CreateInstallUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def post(self):
|
||||
""" Creates a new user. """
|
||||
# Ensure that we are using database auth.
|
||||
if app.config['AUTHENTICATION_TYPE'] != 'Database':
|
||||
raise InvalidRequest('Cannot create a user in a non-database auth system')
|
||||
|
||||
user_information = request.get_json()
|
||||
if SuperUserPermission().can():
|
||||
# Generate a temporary password for the user.
|
||||
random = SystemRandom()
|
||||
password = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(32)])
|
||||
|
||||
# Create the user.
|
||||
username = user_information['username']
|
||||
email = user_information.get('email')
|
||||
install_user, confirmation_code = pre_oci_model.create_install_user(username, password, email)
|
||||
if features.MAILING:
|
||||
send_confirmation_email(install_user.username, install_user.email, confirmation_code)
|
||||
|
||||
return {
|
||||
'username': username,
|
||||
'email': email,
|
||||
'password': password,
|
||||
'encrypted_password': authentication.encrypt_user_password(password),
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superusers/users/<username>/sendrecovery')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
@show_if(features.MAILING)
|
||||
class SuperUserSendRecoveryEmail(ApiResource):
|
||||
""" Resource for sending a recovery user on behalf of a user. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('sendInstallUserRecoveryEmail')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def post(self, username):
|
||||
# Ensure that we are using database auth.
|
||||
if app.config['AUTHENTICATION_TYPE'] != 'Database':
|
||||
raise InvalidRequest('Cannot send a recovery e-mail for non-database auth')
|
||||
|
||||
if SuperUserPermission().can():
|
||||
user = pre_oci_model.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
raise NotFound()
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
raise InvalidRequest('Cannot send a recovery email for a superuser')
|
||||
|
||||
code = pre_oci_model.create_reset_password_email_code(user.email)
|
||||
send_recovery_email(user.email, code)
|
||||
return {
|
||||
'email': user.email
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/users/<username>')
|
||||
@path_param('username', 'The username of the user being managed')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserManagement(ApiResource):
|
||||
""" Resource for managing users in the system. """
|
||||
schemas = {
|
||||
'UpdateUser': {
|
||||
'id': 'UpdateUser',
|
||||
'type': 'object',
|
||||
'description': 'Description of updates for a user',
|
||||
'properties': {
|
||||
'password': {
|
||||
'type': 'string',
|
||||
'description': 'The new password for the user',
|
||||
},
|
||||
'email': {
|
||||
'type': 'string',
|
||||
'description': 'The new e-mail address for the user',
|
||||
},
|
||||
'enabled': {
|
||||
'type': 'boolean',
|
||||
'description': 'Whether the user is enabled'
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('getInstallUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, username):
|
||||
""" Returns information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = pre_oci_model.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
raise NotFound()
|
||||
|
||||
return user.to_dict()
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('deleteInstallUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def delete(self, username):
|
||||
""" Deletes the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = pre_oci_model.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
raise NotFound()
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
raise InvalidRequest('Cannot delete a superuser')
|
||||
|
||||
pre_oci_model.mark_user_for_deletion(username)
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('changeInstallUser')
|
||||
@validate_json_request('UpdateUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def put(self, username):
|
||||
""" Updates information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
user = pre_oci_model.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
raise NotFound()
|
||||
|
||||
if superusers.is_superuser(username):
|
||||
raise InvalidRequest('Cannot update a superuser')
|
||||
|
||||
user_data = request.get_json()
|
||||
if 'password' in user_data:
|
||||
# Ensure that we are using database auth.
|
||||
if app.config['AUTHENTICATION_TYPE'] != 'Database':
|
||||
raise InvalidRequest('Cannot change password in non-database auth')
|
||||
|
||||
pre_oci_model.change_password(username, user_data['password'])
|
||||
|
||||
if 'email' in user_data:
|
||||
# Ensure that we are using database auth.
|
||||
if app.config['AUTHENTICATION_TYPE'] not in ['Database', 'AppToken']:
|
||||
raise InvalidRequest('Cannot change e-mail in non-database auth')
|
||||
|
||||
pre_oci_model.update_email(username, user_data['email'], auto_verify=True)
|
||||
|
||||
if 'enabled' in user_data:
|
||||
# Disable/enable the user.
|
||||
pre_oci_model.update_enabled(username, bool(user_data['enabled']))
|
||||
|
||||
if 'superuser' in user_data:
|
||||
config_object = config_provider.get_config()
|
||||
superusers_set = set(config_object['SUPER_USERS'])
|
||||
|
||||
if user_data['superuser']:
|
||||
superusers_set.add(username)
|
||||
elif username in superusers_set:
|
||||
superusers_set.remove(username)
|
||||
|
||||
config_object['SUPER_USERS'] = list(superusers_set)
|
||||
config_provider.save_config(config_object)
|
||||
|
||||
return_value = user.to_dict()
|
||||
if user_data.get('password') is not None:
|
||||
password = user_data.get('password')
|
||||
return_value['encrypted_password'] = authentication.encrypt_user_password(password)
|
||||
if user_data.get('email') is not None:
|
||||
return_value['email'] = user_data.get('email')
|
||||
|
||||
return return_value
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/takeownership/<namespace>')
|
||||
@path_param('namespace', 'The namespace of the user or organization being managed')
|
||||
@internal_only
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserTakeOwnership(ApiResource):
|
||||
""" Resource for a superuser to take ownership of a namespace. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('takeOwnership')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def post(self, namespace):
|
||||
""" Takes ownership of the specified organization or user. """
|
||||
if SuperUserPermission().can():
|
||||
# Disallow for superusers.
|
||||
if superusers.is_superuser(namespace):
|
||||
raise InvalidRequest('Cannot take ownership of a superuser')
|
||||
|
||||
authed_user = get_authenticated_user()
|
||||
entity_id, was_user = pre_oci_model.take_ownership(namespace, authed_user)
|
||||
if entity_id is None:
|
||||
raise NotFound()
|
||||
|
||||
# Log the change.
|
||||
log_metadata = {
|
||||
'entity_id': entity_id,
|
||||
'namespace': namespace,
|
||||
'was_user': was_user,
|
||||
'superuser': authed_user.username,
|
||||
}
|
||||
|
||||
log_action('take_ownership', authed_user.username, log_metadata)
|
||||
|
||||
return jsonify({
|
||||
'namespace': namespace
|
||||
})
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/organizations/<name>')
|
||||
@path_param('name', 'The name of the organizaton being managed')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserOrganizationManagement(ApiResource):
|
||||
""" Resource for managing organizations in the system. """
|
||||
schemas = {
|
||||
'UpdateOrg': {
|
||||
'id': 'UpdateOrg',
|
||||
'type': 'object',
|
||||
'description': 'Description of updates for an organization',
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The new name for the organization',
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('deleteOrganization')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def delete(self, name):
|
||||
""" Deletes the specified organization. """
|
||||
if SuperUserPermission().can():
|
||||
pre_oci_model.mark_organization_for_deletion(name)
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('changeOrganization')
|
||||
@validate_json_request('UpdateOrg')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def put(self, name):
|
||||
""" Updates information about the specified user. """
|
||||
if SuperUserPermission().can():
|
||||
org_data = request.get_json()
|
||||
old_name = org_data['name'] if 'name' in org_data else None
|
||||
org = pre_oci_model.change_organization_name(name, old_name)
|
||||
return org.to_dict()
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
def key_view(key):
|
||||
return {
|
||||
'name': key.name,
|
||||
'kid': key.kid,
|
||||
'service': key.service,
|
||||
'jwk': key.jwk,
|
||||
'metadata': key.metadata,
|
||||
'created_date': key.created_date,
|
||||
'expiration_date': key.expiration_date,
|
||||
'rotation_duration': key.rotation_duration,
|
||||
'approval': approval_view(key.approval) if key.approval is not None else None,
|
||||
}
|
||||
|
||||
|
||||
def approval_view(approval):
|
||||
return {
|
||||
'approver': user_view(approval.approver) if approval.approver else None,
|
||||
'approval_type': approval.approval_type,
|
||||
'approved_date': approval.approved_date,
|
||||
'notes': approval.notes,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/superuser/keys')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserServiceKeyManagement(ApiResource):
|
||||
""" Resource for managing service keys."""
|
||||
schemas = {
|
||||
'CreateServiceKey': {
|
||||
'id': 'CreateServiceKey',
|
||||
'type': 'object',
|
||||
'description': 'Description of creation of a service key',
|
||||
'required': ['service', 'expiration'],
|
||||
'properties': {
|
||||
'service': {
|
||||
'type': 'string',
|
||||
'description': 'The service authenticating with this key',
|
||||
},
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The friendly name of a service key',
|
||||
},
|
||||
'metadata': {
|
||||
'type': 'object',
|
||||
'description': 'The key/value pairs of this key\'s metadata',
|
||||
},
|
||||
'notes': {
|
||||
'type': 'string',
|
||||
'description': 'If specified, the extra notes for the key',
|
||||
},
|
||||
'expiration': {
|
||||
'description': 'The expiration date as a unix timestamp',
|
||||
'anyOf': [{'type': 'number'}, {'type': 'null'}],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@verify_not_prod
|
||||
@nickname('listServiceKeys')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self):
|
||||
if SuperUserPermission().can():
|
||||
keys = pre_oci_model.list_all_service_keys()
|
||||
|
||||
return jsonify({
|
||||
'keys': [key.to_dict() for key in keys],
|
||||
})
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('createServiceKey')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
@validate_json_request('CreateServiceKey')
|
||||
def post(self):
|
||||
if SuperUserPermission().can():
|
||||
body = request.get_json()
|
||||
key_name = body.get('name', '')
|
||||
if not validate_service_key_name(key_name):
|
||||
raise InvalidRequest('Invalid service key friendly name: %s' % key_name)
|
||||
|
||||
# Ensure we have a valid expiration date if specified.
|
||||
expiration_date = body.get('expiration', None)
|
||||
if expiration_date is not None:
|
||||
try:
|
||||
expiration_date = datetime.utcfromtimestamp(float(expiration_date))
|
||||
except ValueError as ve:
|
||||
raise InvalidRequest('Invalid expiration date: %s' % ve)
|
||||
|
||||
if expiration_date <= datetime.now():
|
||||
raise InvalidRequest('Expiration date cannot be in the past')
|
||||
|
||||
# Create the metadata for the key.
|
||||
user = get_authenticated_user()
|
||||
metadata = body.get('metadata', {})
|
||||
metadata.update({
|
||||
'created_by': 'Quay Superuser Panel',
|
||||
'creator': user.username,
|
||||
'ip': get_request_ip(),
|
||||
})
|
||||
|
||||
# Generate a key with a private key that we *never save*.
|
||||
(private_key, key_id) = pre_oci_model.generate_service_key(body['service'], expiration_date,
|
||||
metadata=metadata,
|
||||
name=key_name)
|
||||
# Auto-approve the service key.
|
||||
pre_oci_model.approve_service_key(key_id, user, ServiceKeyApprovalType.SUPERUSER,
|
||||
notes=body.get('notes', ''))
|
||||
|
||||
# Log the creation and auto-approval of the service key.
|
||||
key_log_metadata = {
|
||||
'kid': key_id,
|
||||
'preshared': True,
|
||||
'service': body['service'],
|
||||
'name': key_name,
|
||||
'expiration_date': expiration_date,
|
||||
'auto_approved': True,
|
||||
}
|
||||
|
||||
log_action('service_key_create', None, key_log_metadata)
|
||||
log_action('service_key_approve', None, key_log_metadata)
|
||||
|
||||
return jsonify({
|
||||
'kid': key_id,
|
||||
'name': key_name,
|
||||
'service': body['service'],
|
||||
'public_key': private_key.publickey().exportKey('PEM'),
|
||||
'private_key': private_key.exportKey('PEM'),
|
||||
})
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/keys/<kid>')
|
||||
@path_param('kid', 'The unique identifier for a service key')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserServiceKey(ApiResource):
|
||||
""" Resource for managing service keys. """
|
||||
schemas = {
|
||||
'PutServiceKey': {
|
||||
'id': 'PutServiceKey',
|
||||
'type': 'object',
|
||||
'description': 'Description of updates for a service key',
|
||||
'properties': {
|
||||
'name': {
|
||||
'type': 'string',
|
||||
'description': 'The friendly name of a service key',
|
||||
},
|
||||
'metadata': {
|
||||
'type': 'object',
|
||||
'description': 'The key/value pairs of this key\'s metadata',
|
||||
},
|
||||
'expiration': {
|
||||
'description': 'The expiration date as a unix timestamp',
|
||||
'anyOf': [{'type': 'number'}, {'type': 'null'}],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@verify_not_prod
|
||||
@nickname('getServiceKey')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, kid):
|
||||
if SuperUserPermission().can():
|
||||
try:
|
||||
key = pre_oci_model.get_service_key(kid, approved_only=False, alive_only=False)
|
||||
return jsonify(key.to_dict())
|
||||
except ServiceKeyDoesNotExist:
|
||||
raise NotFound()
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('updateServiceKey')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
@validate_json_request('PutServiceKey')
|
||||
def put(self, kid):
|
||||
if SuperUserPermission().can():
|
||||
body = request.get_json()
|
||||
try:
|
||||
key = pre_oci_model.get_service_key(kid, approved_only=False, alive_only=False)
|
||||
except ServiceKeyDoesNotExist:
|
||||
raise NotFound()
|
||||
|
||||
key_log_metadata = {
|
||||
'kid': key.kid,
|
||||
'service': key.service,
|
||||
'name': body.get('name', key.name),
|
||||
'expiration_date': key.expiration_date,
|
||||
}
|
||||
|
||||
if 'expiration' in body:
|
||||
expiration_date = body['expiration']
|
||||
if expiration_date is not None and expiration_date != '':
|
||||
try:
|
||||
expiration_date = datetime.utcfromtimestamp(float(expiration_date))
|
||||
except ValueError as ve:
|
||||
raise InvalidRequest('Invalid expiration date: %s' % ve)
|
||||
|
||||
if expiration_date <= datetime.now():
|
||||
raise InvalidRequest('Cannot have an expiration date in the past')
|
||||
|
||||
key_log_metadata.update({
|
||||
'old_expiration_date': key.expiration_date,
|
||||
'expiration_date': expiration_date,
|
||||
})
|
||||
|
||||
log_action('service_key_extend', None, key_log_metadata)
|
||||
pre_oci_model.set_key_expiration(kid, expiration_date)
|
||||
|
||||
if 'name' in body or 'metadata' in body:
|
||||
key_name = body.get('name')
|
||||
if not validate_service_key_name(key_name):
|
||||
raise InvalidRequest('Invalid service key friendly name: %s' % key_name)
|
||||
|
||||
pre_oci_model.update_service_key(kid, key_name, body.get('metadata'))
|
||||
log_action('service_key_modify', None, key_log_metadata)
|
||||
|
||||
updated_key = pre_oci_model.get_service_key(kid, approved_only=False, alive_only=False)
|
||||
return jsonify(updated_key.to_dict())
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('deleteServiceKey')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def delete(self, kid):
|
||||
if SuperUserPermission().can():
|
||||
try:
|
||||
key = pre_oci_model.delete_service_key(kid)
|
||||
except ServiceKeyDoesNotExist:
|
||||
raise NotFound()
|
||||
|
||||
key_log_metadata = {
|
||||
'kid': kid,
|
||||
'service': key.service,
|
||||
'name': key.name,
|
||||
'created_date': key.created_date,
|
||||
'expiration_date': key.expiration_date,
|
||||
}
|
||||
|
||||
log_action('service_key_delete', None, key_log_metadata)
|
||||
return make_response('', 204)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/approvedkeys/<kid>')
|
||||
@path_param('kid', 'The unique identifier for a service key')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserServiceKeyApproval(ApiResource):
|
||||
""" Resource for approving service keys. """
|
||||
|
||||
schemas = {
|
||||
'ApproveServiceKey': {
|
||||
'id': 'ApproveServiceKey',
|
||||
'type': 'object',
|
||||
'description': 'Information for approving service keys',
|
||||
'properties': {
|
||||
'notes': {
|
||||
'type': 'string',
|
||||
'description': 'Optional approval notes',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('approveServiceKey')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
@validate_json_request('ApproveServiceKey')
|
||||
def post(self, kid):
|
||||
if SuperUserPermission().can():
|
||||
notes = request.get_json().get('notes', '')
|
||||
approver = get_authenticated_user()
|
||||
try:
|
||||
key = pre_oci_model.approve_service_key(kid, approver, ServiceKeyApprovalType.SUPERUSER,
|
||||
notes=notes)
|
||||
|
||||
# Log the approval of the service key.
|
||||
key_log_metadata = {
|
||||
'kid': kid,
|
||||
'service': key.service,
|
||||
'name': key.name,
|
||||
'expiration_date': key.expiration_date,
|
||||
}
|
||||
|
||||
log_action('service_key_approve', None, key_log_metadata)
|
||||
except ServiceKeyDoesNotExist:
|
||||
raise NotFound()
|
||||
except ServiceKeyAlreadyApproved:
|
||||
pass
|
||||
|
||||
return make_response('', 201)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/<build_uuid>/logs')
|
||||
@path_param('build_uuid', 'The UUID of the build')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserRepositoryBuildLogs(ApiResource):
|
||||
""" Resource for loading repository build logs for the superuser. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('getRepoBuildLogsSuperUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, build_uuid):
|
||||
""" Return the build logs for the build specified by the build uuid. """
|
||||
if SuperUserPermission().can():
|
||||
try:
|
||||
repo_build = pre_oci_model.get_repository_build(build_uuid)
|
||||
return get_logs_or_log_url(repo_build)
|
||||
except InvalidRepositoryBuildException as e:
|
||||
raise InvalidResponse(str(e))
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/<build_uuid>/status')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('build_uuid', 'The UUID of the build')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserRepositoryBuildStatus(ApiResource):
|
||||
""" Resource for dealing with repository build status. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('getRepoBuildStatusSuperUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, build_uuid):
|
||||
""" Return the status for the builds specified by the build uuids. """
|
||||
if SuperUserPermission().can():
|
||||
try:
|
||||
build = pre_oci_model.get_repository_build(build_uuid)
|
||||
except InvalidRepositoryBuildException as e:
|
||||
raise InvalidResponse(str(e))
|
||||
return build.to_dict()
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/superuser/<build_uuid>/build')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('build_uuid', 'The UUID of the build')
|
||||
@show_if(features.SUPER_USERS)
|
||||
class SuperUserRepositoryBuildResource(ApiResource):
|
||||
""" Resource for dealing with repository builds as a super user. """
|
||||
|
||||
@require_fresh_login
|
||||
@verify_not_prod
|
||||
@nickname('getRepoBuildSuperUser')
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
def get(self, build_uuid):
|
||||
""" Returns information about a build. """
|
||||
if SuperUserPermission().can():
|
||||
try:
|
||||
build = pre_oci_model.get_repository_build(build_uuid)
|
||||
except InvalidRepositoryBuildException:
|
||||
raise NotFound()
|
||||
|
||||
return build.to_dict()
|
||||
|
||||
raise Unauthorized()
|
335
endpoints/api/superuser_models_interface.py
Normal file
335
endpoints/api/superuser_models_interface.py
Normal file
|
@ -0,0 +1,335 @@
|
|||
import json
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from collections import namedtuple
|
||||
from datetime import datetime
|
||||
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from six import add_metaclass
|
||||
from tzlocal import get_localzone
|
||||
|
||||
from app import avatar, superusers
|
||||
from buildtrigger.basehandler import BuildTriggerHandler
|
||||
from data import model
|
||||
from endpoints.api import format_date
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
|
||||
def user_view(user):
|
||||
return {
|
||||
'name': user.username,
|
||||
'kind': 'user',
|
||||
'is_robot': user.robot,
|
||||
}
|
||||
|
||||
|
||||
class BuildTrigger(
|
||||
namedtuple('BuildTrigger', ['uuid', 'service_name', 'pull_robot', 'can_read', 'can_admin', 'for_build'])):
|
||||
"""
|
||||
BuildTrigger represent a trigger that is associated with a build
|
||||
:type uuid: string
|
||||
:type service_name: string
|
||||
:type pull_robot: User
|
||||
:type can_read: boolean
|
||||
:type can_admin: boolean
|
||||
:type for_build: boolean
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
if not self.uuid:
|
||||
return None
|
||||
|
||||
build_trigger = BuildTriggerHandler.get_handler(self)
|
||||
build_source = build_trigger.config.get('build_source')
|
||||
|
||||
repo_url = build_trigger.get_repository_url() if build_source else None
|
||||
can_read = self.can_read or self.can_admin
|
||||
|
||||
trigger_data = {
|
||||
'id': self.uuid,
|
||||
'service': self.service_name,
|
||||
'is_active': build_trigger.is_active(),
|
||||
|
||||
'build_source': build_source if can_read else None,
|
||||
'repository_url': repo_url if can_read else None,
|
||||
|
||||
'config': build_trigger.config if self.can_admin else {},
|
||||
'can_invoke': self.can_admin,
|
||||
}
|
||||
|
||||
if not self.for_build and self.can_admin and self.pull_robot:
|
||||
trigger_data['pull_robot'] = user_view(self.pull_robot)
|
||||
|
||||
return trigger_data
|
||||
|
||||
|
||||
class RepositoryBuild(namedtuple('RepositoryBuild',
|
||||
['uuid', 'logs_archived', 'repository_namespace_user_username', 'repository_name',
|
||||
'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger', 'display_name',
|
||||
'started', 'job_config', 'phase', 'status', 'error', 'archive_url'])):
|
||||
"""
|
||||
RepositoryBuild represents a build associated with a repostiory
|
||||
:type uuid: string
|
||||
:type logs_archived: boolean
|
||||
:type repository_namespace_user_username: string
|
||||
:type repository_name: string
|
||||
:type can_write: boolean
|
||||
:type can_write: boolean
|
||||
:type pull_robot: User
|
||||
:type resource_key: string
|
||||
:type trigger: Trigger
|
||||
:type display_name: string
|
||||
:type started: boolean
|
||||
:type job_config: {Any -> Any}
|
||||
:type phase: string
|
||||
:type status: string
|
||||
:type error: string
|
||||
:type archive_url: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
|
||||
resp = {
|
||||
'id': self.uuid,
|
||||
'phase': self.phase,
|
||||
'started': format_date(self.started),
|
||||
'display_name': self.display_name,
|
||||
'status': self.status or {},
|
||||
'subdirectory': self.job_config.get('build_subdir', ''),
|
||||
'dockerfile_path': self.job_config.get('build_subdir', ''),
|
||||
'context': self.job_config.get('context', ''),
|
||||
'tags': self.job_config.get('docker_tags', []),
|
||||
'manual_user': self.job_config.get('manual_user', None),
|
||||
'is_writer': self.can_write,
|
||||
'trigger': self.trigger.to_dict(),
|
||||
'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
|
||||
'resource_key': self.resource_key,
|
||||
'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
|
||||
'repository': {
|
||||
'namespace': self.repository_namespace_user_username,
|
||||
'name': self.repository_name
|
||||
},
|
||||
'error': self.error,
|
||||
}
|
||||
|
||||
if self.can_write:
|
||||
if self.resource_key is not None:
|
||||
resp['archive_url'] = self.archive_url
|
||||
elif self.job_config.get('archive_url', None):
|
||||
resp['archive_url'] = self.job_config['archive_url']
|
||||
|
||||
return resp
|
||||
|
||||
|
||||
class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])):
|
||||
"""
|
||||
Approval represents whether a key has been approved or not
|
||||
:type approver: User
|
||||
:type approval_type: string
|
||||
:type approved_date: Date
|
||||
:type notes: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'approver': self.approver.to_dict() if self.approver else None,
|
||||
'approval_type': self.approval_type,
|
||||
'approved_date': self.approved_date,
|
||||
'notes': self.notes,
|
||||
}
|
||||
|
||||
|
||||
class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
|
||||
'expiration_date', 'rotation_duration', 'approval'])):
|
||||
"""
|
||||
ServiceKey is an apostille signing key
|
||||
:type name: string
|
||||
:type kid: int
|
||||
:type service: string
|
||||
:type jwk: string
|
||||
:type metadata: string
|
||||
:type created_date: Date
|
||||
:type expiration_date: Date
|
||||
:type rotation_duration: Date
|
||||
:type approval: Approval
|
||||
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'kid': self.kid,
|
||||
'service': self.service,
|
||||
'jwk': self.jwk,
|
||||
'metadata': self.metadata,
|
||||
'created_date': self.created_date,
|
||||
'expiration_date': self.expiration_date,
|
||||
'rotation_duration': self.rotation_duration,
|
||||
'approval': self.approval.to_dict() if self.approval is not None else None,
|
||||
}
|
||||
|
||||
|
||||
class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])):
|
||||
"""
|
||||
User represents a single user.
|
||||
:type username: string
|
||||
:type email: string
|
||||
:type verified: boolean
|
||||
:type enabled: boolean
|
||||
:type robot: User
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
user_data = {
|
||||
'kind': 'user',
|
||||
'name': self.username,
|
||||
'username': self.username,
|
||||
'email': self.email,
|
||||
'verified': self.verified,
|
||||
'avatar': avatar.get_data_for_user(self),
|
||||
'super_user': superusers.is_superuser(self.username),
|
||||
'enabled': self.enabled,
|
||||
}
|
||||
|
||||
return user_data
|
||||
|
||||
|
||||
class Organization(namedtuple('Organization', ['username', 'email'])):
|
||||
"""
|
||||
Organization represents a single org.
|
||||
:type username: string
|
||||
:type email: string
|
||||
"""
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.username,
|
||||
'email': self.email,
|
||||
'avatar': avatar.get_data_for_org(self),
|
||||
}
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class SuperuserDataInterface(object):
|
||||
"""
|
||||
Interface that represents all data store interactions required by a superuser api.
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_organizations(self):
|
||||
"""
|
||||
Returns a list of Organization
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_active_users(self):
|
||||
"""
|
||||
Returns a list of User
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_install_user(self, username, password, email):
|
||||
"""
|
||||
Returns the created user and confirmation code for email confirmation
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_nonrobot_user(self, username):
|
||||
"""
|
||||
Returns a User
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def create_reset_password_email_code(self, email):
|
||||
"""
|
||||
Returns a recover password code
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def mark_user_for_deletion(self, username):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def change_password(self, username, password):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def update_email(self, username, email, auto_verify):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def update_enabled(self, username, enabled):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def take_ownership(self, namespace, authed_user):
|
||||
"""
|
||||
Returns id of entity and whether the entity was a user
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def mark_organization_for_deletion(self, name):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def change_organization_name(self, old_org_name, new_org_name):
|
||||
"""
|
||||
Returns updated Organization
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def list_all_service_keys(self):
|
||||
"""
|
||||
Returns a list of service keys
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None):
|
||||
"""
|
||||
Returns a tuple of private key and public key id
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def approve_service_key(self, kid, approver, approval_type, notes=''):
|
||||
"""
|
||||
Returns the approved Key
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_service_key(self, kid, service=None, alive_only=True, approved_only=True):
|
||||
"""
|
||||
Returns ServiceKey
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def set_key_expiration(self, kid, expiration_date):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def update_service_key(self, kid, name=None, metadata=None):
|
||||
"""
|
||||
Returns None
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def delete_service_key(self, kid):
|
||||
"""
|
||||
Returns deleted ServiceKey
|
||||
"""
|
||||
|
||||
@abstractmethod
|
||||
def get_repository_build(self, uuid):
|
||||
"""
|
||||
Returns RepositoryBuild
|
||||
"""
|
182
endpoints/api/superuser_models_pre_oci.py
Normal file
182
endpoints/api/superuser_models_pre_oci.py
Normal file
|
@ -0,0 +1,182 @@
|
|||
import features
|
||||
|
||||
from flask import request
|
||||
|
||||
from app import all_queues, userfiles, namespace_gc_queue
|
||||
from auth.permissions import ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission
|
||||
from data import model, database
|
||||
from endpoints.api.build import get_job_config, _get_build_status
|
||||
from endpoints.api.superuser_models_interface import BuildTrigger
|
||||
from endpoints.api.superuser_models_interface import SuperuserDataInterface, Organization, User, \
|
||||
ServiceKey, Approval, RepositoryBuild
|
||||
from util.request import get_request_ip
|
||||
|
||||
|
||||
def _create_user(user):
|
||||
if user is None:
|
||||
return None
|
||||
return User(user.username, user.email, user.verified, user.enabled, user.robot)
|
||||
|
||||
|
||||
def _create_key(key):
|
||||
approval = None
|
||||
if key.approval is not None:
|
||||
approval = Approval(_create_user(key.approval.approver), key.approval.approval_type, key.approval.approved_date,
|
||||
key.approval.notes)
|
||||
|
||||
return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date, key.expiration_date,
|
||||
key.rotation_duration, approval)
|
||||
|
||||
|
||||
class ServiceKeyDoesNotExist(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ServiceKeyAlreadyApproved(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidRepositoryBuildException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class PreOCIModel(SuperuserDataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for the SuperUser using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def get_repository_build(self, uuid):
|
||||
try:
|
||||
build = model.build.get_repository_build(uuid)
|
||||
except model.InvalidRepositoryBuildException as e:
|
||||
raise InvalidRepositoryBuildException(str(e))
|
||||
|
||||
repo_namespace = build.repository_namespace_user_username
|
||||
repo_name = build.repository_name
|
||||
|
||||
can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
|
||||
can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
|
||||
can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()
|
||||
job_config = get_job_config(build.job_config)
|
||||
phase, status, error = _get_build_status(build)
|
||||
url = userfiles.get_file_url(self.resource_key, get_request_ip(), requires_cors=True)
|
||||
|
||||
return RepositoryBuild(build.uuid, build.logs_archived, repo_namespace, repo_name, can_write, can_read,
|
||||
_create_user(build.pull_robot), build.resource_key,
|
||||
BuildTrigger(build.trigger.uuid, build.trigger.service.name,
|
||||
_create_user(build.trigger.pull_robot), can_read, can_admin, True),
|
||||
build.display_name, build.display_name, build.started, job_config, phase, status, error, url)
|
||||
|
||||
def delete_service_key(self, kid):
|
||||
try:
|
||||
key = model.service_keys.delete_service_key(kid)
|
||||
except model.ServiceKeyDoesNotExist:
|
||||
raise ServiceKeyDoesNotExist
|
||||
return _create_key(key)
|
||||
|
||||
def update_service_key(self, kid, name=None, metadata=None):
|
||||
model.service_keys.update_service_key(kid, name, metadata)
|
||||
|
||||
def set_key_expiration(self, kid, expiration_date):
|
||||
model.service_keys.set_key_expiration(kid, expiration_date)
|
||||
|
||||
def get_service_key(self, kid, service=None, alive_only=True, approved_only=True):
|
||||
try:
|
||||
key = model.service_keys.get_service_key(kid, approved_only=approved_only, alive_only=alive_only)
|
||||
return _create_key(key)
|
||||
except model.ServiceKeyDoesNotExist:
|
||||
raise ServiceKeyDoesNotExist
|
||||
|
||||
def approve_service_key(self, kid, approver, approval_type, notes=''):
|
||||
try:
|
||||
key = model.service_keys.approve_service_key(kid, approval_type, approver=approver, notes=notes)
|
||||
return _create_key(key)
|
||||
except model.ServiceKeyDoesNotExist:
|
||||
raise ServiceKeyDoesNotExist
|
||||
except model.ServiceKeyAlreadyApproved:
|
||||
raise ServiceKeyAlreadyApproved
|
||||
|
||||
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None):
|
||||
(private_key, key) = model.service_keys.generate_service_key(service, expiration_date, metadata=metadata, name=name)
|
||||
|
||||
return private_key, key.kid
|
||||
|
||||
def list_all_service_keys(self):
|
||||
keys = model.service_keys.list_all_keys()
|
||||
return [_create_key(key) for key in keys]
|
||||
|
||||
def change_organization_name(self, old_org_name, new_org_name):
|
||||
org = model.organization.get_organization(old_org_name)
|
||||
if new_org_name is not None:
|
||||
org = model.user.change_username(org.id, new_org_name)
|
||||
|
||||
return Organization(org.username, org.email)
|
||||
|
||||
def mark_organization_for_deletion(self, name):
|
||||
org = model.organization.get_organization(name)
|
||||
model.user.mark_namespace_for_deletion(org, all_queues, namespace_gc_queue, force=True)
|
||||
|
||||
def take_ownership(self, namespace, authed_user):
|
||||
entity = model.user.get_user_or_org(namespace)
|
||||
if entity is None:
|
||||
return None, False
|
||||
|
||||
was_user = not entity.organization
|
||||
if entity.organization:
|
||||
# Add the superuser as an admin to the owners team of the org.
|
||||
model.organization.add_user_as_admin(authed_user, entity)
|
||||
else:
|
||||
# If the entity is a user, convert it to an organization and add the current superuser
|
||||
# as the admin.
|
||||
model.organization.convert_user_to_organization(entity, authed_user)
|
||||
return entity.id, was_user
|
||||
|
||||
def update_enabled(self, username, enabled):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
model.user.update_enabled(user, bool(enabled))
|
||||
|
||||
def update_email(self, username, email, auto_verify):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
model.user.update_email(user, email, auto_verify)
|
||||
|
||||
def change_password(self, username, password):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
model.user.change_password(user, password)
|
||||
|
||||
def mark_user_for_deletion(self, username):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
model.user.mark_namespace_for_deletion(user, all_queues, namespace_gc_queue, force=True)
|
||||
|
||||
def create_reset_password_email_code(self, email):
|
||||
code = model.user.create_reset_password_email_code(email)
|
||||
return code
|
||||
|
||||
def get_nonrobot_user(self, username):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if user is None:
|
||||
return None
|
||||
return _create_user(user)
|
||||
|
||||
def create_install_user(self, username, password, email):
|
||||
prompts = model.user.get_default_user_prompts(features)
|
||||
user = model.user.create_user(username, password, email, auto_verify=not features.MAILING,
|
||||
email_required=features.MAILING, prompts=prompts)
|
||||
|
||||
return_user = _create_user(user)
|
||||
# If mailing is turned on, send the user a verification email.
|
||||
if features.MAILING:
|
||||
confirmation_code = model.user.create_confirm_email_code(user)
|
||||
return return_user, confirmation_code
|
||||
|
||||
return return_user, ''
|
||||
|
||||
def get_active_users(self, disabled=True):
|
||||
users = model.user.get_active_users(disabled=disabled)
|
||||
return [_create_user(user) for user in users]
|
||||
|
||||
def get_organizations(self):
|
||||
return [Organization(org.username, org.email) for org in model.organization.get_organizations()]
|
||||
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
336
endpoints/api/tag.py
Normal file
336
endpoints/api/tag.py
Normal file
|
@ -0,0 +1,336 @@
|
|||
""" Manage the tags of a repository. """
|
||||
from datetime import datetime
|
||||
from flask import request, abort
|
||||
|
||||
from app import storage, docker_v2_signing_key
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from data.registry_model import registry_model
|
||||
from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
|
||||
RepositoryParamResource, log_action, validate_json_request, path_param,
|
||||
parse_args, query_param, truthy_bool, disallow_for_app_repositories,
|
||||
format_date, disallow_for_non_normal_repositories)
|
||||
from endpoints.api.image import image_dict
|
||||
from endpoints.exception import NotFound, InvalidRequest
|
||||
from util.names import TAG_ERROR, TAG_REGEX
|
||||
|
||||
|
||||
def _tag_dict(tag):
|
||||
tag_info = {
|
||||
'name': tag.name,
|
||||
'reversion': tag.reversion,
|
||||
}
|
||||
|
||||
if tag.lifetime_start_ts > 0:
|
||||
tag_info['start_ts'] = tag.lifetime_start_ts
|
||||
|
||||
if tag.lifetime_end_ts > 0:
|
||||
tag_info['end_ts'] = tag.lifetime_end_ts
|
||||
|
||||
# TODO: Remove this once fully on OCI data model.
|
||||
if tag.legacy_image_if_present:
|
||||
tag_info['docker_image_id'] = tag.legacy_image.docker_image_id
|
||||
tag_info['image_id'] = tag.legacy_image.docker_image_id
|
||||
tag_info['size'] = tag.legacy_image.aggregate_size
|
||||
|
||||
# TODO: Remove this check once fully on OCI data model.
|
||||
if tag.manifest_digest:
|
||||
tag_info['manifest_digest'] = tag.manifest_digest
|
||||
|
||||
if tag.manifest:
|
||||
tag_info['is_manifest_list'] = tag.manifest.is_manifest_list
|
||||
|
||||
if tag.lifetime_start_ts > 0:
|
||||
last_modified = format_date(datetime.utcfromtimestamp(tag.lifetime_start_ts))
|
||||
tag_info['last_modified'] = last_modified
|
||||
|
||||
if tag.lifetime_end_ts is not None:
|
||||
expiration = format_date(datetime.utcfromtimestamp(tag.lifetime_end_ts))
|
||||
tag_info['expiration'] = expiration
|
||||
|
||||
return tag_info
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/tag/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class ListRepositoryTags(RepositoryParamResource):
|
||||
""" Resource for listing full repository tag history, alive *and dead*. """
|
||||
|
||||
@require_repo_read
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
@query_param('specificTag', 'Filters the tags to the specific tag.', type=str, default='')
|
||||
@query_param('limit', 'Limit to the number of results to return per page. Max 100.', type=int,
|
||||
default=50)
|
||||
@query_param('page', 'Page index for the results. Default 1.', type=int, default=1)
|
||||
@query_param('onlyActiveTags', 'Filter to only active tags.', type=truthy_bool, default=False)
|
||||
@nickname('listRepoTags')
|
||||
def get(self, namespace, repository, parsed_args):
|
||||
specific_tag = parsed_args.get('specificTag') or None
|
||||
page = max(1, parsed_args.get('page', 1))
|
||||
limit = min(100, max(1, parsed_args.get('limit', 50)))
|
||||
active_tags_only = parsed_args.get('onlyActiveTags')
|
||||
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
history, has_more = registry_model.list_repository_tag_history(repo_ref, page=page,
|
||||
size=limit,
|
||||
specific_tag_name=specific_tag,
|
||||
active_tags_only=active_tags_only)
|
||||
return {
|
||||
'tags': [_tag_dict(tag) for tag in history],
|
||||
'page': page,
|
||||
'has_additional': has_more,
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/tag/<tag>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('tag', 'The name of the tag')
|
||||
class RepositoryTag(RepositoryParamResource):
|
||||
""" Resource for managing repository tags. """
|
||||
schemas = {
|
||||
'ChangeTag': {
|
||||
'type': 'object',
|
||||
'description': 'Makes changes to a specific tag',
|
||||
'properties': {
|
||||
'image': {
|
||||
'type': ['string', 'null'],
|
||||
'description': '(Deprecated: Use `manifest_digest`) Image to which the tag should point.',
|
||||
},
|
||||
'manifest_digest': {
|
||||
'type': ['string', 'null'],
|
||||
'description': '(If specified) The manifest digest to which the tag should point',
|
||||
},
|
||||
'expiration': {
|
||||
'type': ['number', 'null'],
|
||||
'description': '(If specified) The expiration for the image',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_write
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('changeTag')
|
||||
@validate_json_request('ChangeTag')
|
||||
def put(self, namespace, repository, tag):
|
||||
""" Change which image a tag points to or create a new tag."""
|
||||
if not TAG_REGEX.match(tag):
|
||||
abort(400, TAG_ERROR)
|
||||
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
if 'expiration' in request.get_json():
|
||||
tag_ref = registry_model.get_repo_tag(repo_ref, tag)
|
||||
if tag_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
expiration = request.get_json().get('expiration')
|
||||
expiration_date = None
|
||||
if expiration is not None:
|
||||
try:
|
||||
expiration_date = datetime.utcfromtimestamp(float(expiration))
|
||||
except ValueError:
|
||||
abort(400)
|
||||
|
||||
if expiration_date <= datetime.now():
|
||||
abort(400)
|
||||
|
||||
existing_end_ts, ok = registry_model.change_repository_tag_expiration(tag_ref,
|
||||
expiration_date)
|
||||
if ok:
|
||||
if not (existing_end_ts is None and expiration_date is None):
|
||||
log_action('change_tag_expiration', namespace, {
|
||||
'username': get_authenticated_user().username,
|
||||
'repo': repository,
|
||||
'tag': tag,
|
||||
'namespace': namespace,
|
||||
'expiration_date': expiration_date,
|
||||
'old_expiration_date': existing_end_ts
|
||||
}, repo_name=repository)
|
||||
else:
|
||||
raise InvalidRequest('Could not update tag expiration; Tag has probably changed')
|
||||
|
||||
if 'image' in request.get_json() or 'manifest_digest' in request.get_json():
|
||||
existing_tag = registry_model.get_repo_tag(repo_ref, tag, include_legacy_image=True)
|
||||
|
||||
manifest_or_image = None
|
||||
image_id = None
|
||||
manifest_digest = None
|
||||
|
||||
if 'image' in request.get_json():
|
||||
image_id = request.get_json()['image']
|
||||
manifest_or_image = registry_model.get_legacy_image(repo_ref, image_id)
|
||||
else:
|
||||
manifest_digest = request.get_json()['manifest_digest']
|
||||
manifest_or_image = registry_model.lookup_manifest_by_digest(repo_ref, manifest_digest,
|
||||
require_available=True)
|
||||
|
||||
if manifest_or_image is None:
|
||||
raise NotFound()
|
||||
|
||||
# TODO: Remove this check once fully on V22
|
||||
existing_manifest_digest = None
|
||||
if existing_tag:
|
||||
existing_manifest = registry_model.get_manifest_for_tag(existing_tag)
|
||||
existing_manifest_digest = existing_manifest.digest if existing_manifest else None
|
||||
|
||||
if not registry_model.retarget_tag(repo_ref, tag, manifest_or_image, storage,
|
||||
docker_v2_signing_key):
|
||||
raise InvalidRequest('Could not move tag')
|
||||
|
||||
username = get_authenticated_user().username
|
||||
|
||||
log_action('move_tag' if existing_tag else 'create_tag', namespace, {
|
||||
'username': username,
|
||||
'repo': repository,
|
||||
'tag': tag,
|
||||
'namespace': namespace,
|
||||
'image': image_id,
|
||||
'manifest_digest': manifest_digest,
|
||||
'original_image': (existing_tag.legacy_image.docker_image_id
|
||||
if existing_tag and existing_tag.legacy_image_if_present
|
||||
else None),
|
||||
'original_manifest_digest': existing_manifest_digest,
|
||||
}, repo_name=repository)
|
||||
|
||||
return 'Updated', 201
|
||||
|
||||
@require_repo_write
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('deleteFullTag')
|
||||
def delete(self, namespace, repository, tag):
|
||||
""" Delete the specified repository tag. """
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
registry_model.delete_tag(repo_ref, tag)
|
||||
|
||||
username = get_authenticated_user().username
|
||||
log_action('delete_tag', namespace,
|
||||
{'username': username,
|
||||
'repo': repository,
|
||||
'namespace': namespace,
|
||||
'tag': tag}, repo_name=repository)
|
||||
|
||||
return '', 204
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/tag/<tag>/images')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('tag', 'The name of the tag')
|
||||
class RepositoryTagImages(RepositoryParamResource):
|
||||
""" Resource for listing the images in a specific repository tag. """
|
||||
|
||||
@require_repo_read
|
||||
@nickname('listTagImages')
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
@query_param('owned', 'If specified, only images wholely owned by this tag are returned.',
|
||||
type=truthy_bool, default=False)
|
||||
def get(self, namespace, repository, tag, parsed_args):
|
||||
""" List the images for the specified repository tag. """
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
tag_ref = registry_model.get_repo_tag(repo_ref, tag, include_legacy_image=True)
|
||||
if tag_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
if tag_ref.legacy_image_if_present is None:
|
||||
return {'images': []}
|
||||
|
||||
image_id = tag_ref.legacy_image.docker_image_id
|
||||
|
||||
all_images = None
|
||||
if parsed_args['owned']:
|
||||
# TODO: Remove the `owned` image concept once we are fully on V2_2.
|
||||
all_images = registry_model.get_legacy_images_owned_by_tag(tag_ref)
|
||||
else:
|
||||
image_with_parents = registry_model.get_legacy_image(repo_ref, image_id, include_parents=True)
|
||||
if image_with_parents is None:
|
||||
raise NotFound()
|
||||
|
||||
all_images = [image_with_parents] + image_with_parents.parents
|
||||
|
||||
return {
|
||||
'images': [image_dict(image) for image in all_images],
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/tag/<tag>/restore')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('tag', 'The name of the tag')
|
||||
class RestoreTag(RepositoryParamResource):
|
||||
""" Resource for restoring a repository tag back to a previous image. """
|
||||
schemas = {
|
||||
'RestoreTag': {
|
||||
'type': 'object',
|
||||
'description': 'Restores a tag to a specific image',
|
||||
'properties': {
|
||||
'image': {
|
||||
'type': 'string',
|
||||
'description': '(Deprecated: use `manifest_digest`) Image to which the tag should point',
|
||||
},
|
||||
'manifest_digest': {
|
||||
'type': 'string',
|
||||
'description': 'If specified, the manifest digest that should be used',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_write
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('restoreTag')
|
||||
@validate_json_request('RestoreTag')
|
||||
def post(self, namespace, repository, tag):
|
||||
""" Restores a repository tag back to a previous image in the repository. """
|
||||
repo_ref = registry_model.lookup_repository(namespace, repository)
|
||||
if repo_ref is None:
|
||||
raise NotFound()
|
||||
|
||||
# Restore the tag back to the previous image.
|
||||
image_id = request.get_json().get('image', None)
|
||||
manifest_digest = request.get_json().get('manifest_digest', None)
|
||||
|
||||
if image_id is None and manifest_digest is None:
|
||||
raise InvalidRequest('Missing manifest_digest')
|
||||
|
||||
# Data for logging the reversion/restoration.
|
||||
username = get_authenticated_user().username
|
||||
log_data = {
|
||||
'username': username,
|
||||
'repo': repository,
|
||||
'tag': tag,
|
||||
'image': image_id,
|
||||
'manifest_digest': manifest_digest,
|
||||
}
|
||||
|
||||
manifest_or_legacy_image = None
|
||||
if manifest_digest is not None:
|
||||
manifest_or_legacy_image = registry_model.lookup_manifest_by_digest(repo_ref, manifest_digest,
|
||||
allow_dead=True,
|
||||
require_available=True)
|
||||
elif image_id is not None:
|
||||
manifest_or_legacy_image = registry_model.get_legacy_image(repo_ref, image_id)
|
||||
|
||||
if manifest_or_legacy_image is None:
|
||||
raise NotFound()
|
||||
|
||||
if not registry_model.retarget_tag(repo_ref, tag, manifest_or_legacy_image, storage,
|
||||
docker_v2_signing_key, is_reversion=True):
|
||||
raise InvalidRequest('Could not restore tag')
|
||||
|
||||
log_action('revert_tag', namespace, log_data, repo_name=repository)
|
||||
|
||||
return {}
|
534
endpoints/api/team.py
Normal file
534
endpoints/api/team.py
Normal file
|
@ -0,0 +1,534 @@
|
|||
""" Create, list and manage an organization's teams. """
|
||||
|
||||
import json
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from flask import request
|
||||
|
||||
import features
|
||||
|
||||
from app import avatar, authentication
|
||||
from auth.permissions import (AdministerOrganizationPermission, ViewTeamPermission,
|
||||
SuperUserPermission)
|
||||
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from data import model
|
||||
from data.database import Team
|
||||
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
|
||||
log_action, internal_only, require_scope, path_param, query_param,
|
||||
truthy_bool, parse_args, require_user_admin, show_if, format_date,
|
||||
verify_not_prod, require_fresh_login)
|
||||
from endpoints.exception import Unauthorized, NotFound, InvalidRequest
|
||||
from util.useremails import send_org_invite_email
|
||||
from util.names import parse_robot_username
|
||||
|
||||
def permission_view(permission):
|
||||
return {
|
||||
'repository': {
|
||||
'name': permission.repository.name,
|
||||
'is_public': permission.repository.visibility.name == 'public'
|
||||
},
|
||||
'role': permission.role.name
|
||||
}
|
||||
|
||||
def try_accept_invite(code, user):
|
||||
(team, inviter) = model.team.confirm_team_invite(code, user)
|
||||
|
||||
model.notification.delete_matching_notifications(user, 'org_team_invite',
|
||||
org=team.organization.username)
|
||||
|
||||
orgname = team.organization.username
|
||||
log_action('org_team_member_invite_accepted', orgname, {
|
||||
'member': user.username,
|
||||
'team': team.name,
|
||||
'inviter': inviter.username
|
||||
})
|
||||
|
||||
return team
|
||||
|
||||
def handle_addinvite_team(inviter, team, user=None, email=None):
|
||||
requires_invite = features.MAILING and features.REQUIRE_TEAM_INVITE
|
||||
invite = model.team.add_or_invite_to_team(inviter, team, user, email,
|
||||
requires_invite=requires_invite)
|
||||
if not invite:
|
||||
# User was added to the team directly.
|
||||
return
|
||||
|
||||
orgname = team.organization.username
|
||||
if user:
|
||||
model.notification.create_notification('org_team_invite', user, metadata={
|
||||
'code': invite.invite_token,
|
||||
'inviter': inviter.username,
|
||||
'org': orgname,
|
||||
'team': team.name
|
||||
})
|
||||
|
||||
send_org_invite_email(user.username if user else email, user.email if user else email,
|
||||
orgname, team.name, inviter.username, invite.invite_token)
|
||||
return invite
|
||||
|
||||
def team_view(orgname, team, is_new_team=False):
|
||||
view_permission = ViewTeamPermission(orgname, team.name)
|
||||
return {
|
||||
'name': team.name,
|
||||
'description': team.description,
|
||||
'can_view': view_permission.can(),
|
||||
'role': Team.role.get_name(team.role_id),
|
||||
'avatar': avatar.get_data_for_team(team),
|
||||
'new_team': is_new_team,
|
||||
}
|
||||
|
||||
def member_view(member, invited=False):
|
||||
return {
|
||||
'name': member.username,
|
||||
'kind': 'user',
|
||||
'is_robot': member.robot,
|
||||
'avatar': avatar.get_data_for_user(member),
|
||||
'invited': invited,
|
||||
}
|
||||
|
||||
def invite_view(invite):
|
||||
if invite.user:
|
||||
return member_view(invite.user, invited=True)
|
||||
else:
|
||||
return {
|
||||
'email': invite.email,
|
||||
'kind': 'invite',
|
||||
'avatar': avatar.get_data(invite.email, invite.email, 'user'),
|
||||
'invited': True
|
||||
}
|
||||
|
||||
def disallow_for_synced_team(except_robots=False):
|
||||
""" Disallows the decorated operation for a team that is marked as being synced from an internal
|
||||
auth provider such as LDAP. If except_robots is True, then the operation is allowed if the
|
||||
member specified on the operation is a robot account.
|
||||
"""
|
||||
def inner(func):
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
# Team syncing can only be enabled if we have a federated service.
|
||||
if features.TEAM_SYNCING and authentication.federated_service:
|
||||
orgname = kwargs['orgname']
|
||||
teamname = kwargs['teamname']
|
||||
if model.team.get_team_sync_information(orgname, teamname):
|
||||
if not except_robots or not parse_robot_username(kwargs.get('membername', '')):
|
||||
raise InvalidRequest('Cannot call this method on an auth-synced team')
|
||||
|
||||
return func(self, *args, **kwargs)
|
||||
return wrapper
|
||||
return inner
|
||||
|
||||
|
||||
disallow_nonrobots_for_synced_team = disallow_for_synced_team(except_robots=True)
|
||||
disallow_all_for_synced_team = disallow_for_synced_team(except_robots=False)
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('teamname', 'The name of the team')
|
||||
class OrganizationTeam(ApiResource):
|
||||
""" Resource for manging an organization's teams. """
|
||||
schemas = {
|
||||
'TeamDescription': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a team',
|
||||
'required': [
|
||||
'role',
|
||||
],
|
||||
'properties': {
|
||||
'role': {
|
||||
'type': 'string',
|
||||
'description': 'Org wide permissions that should apply to the team',
|
||||
'enum': [
|
||||
'member',
|
||||
'creator',
|
||||
'admin',
|
||||
],
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'description': 'Markdown description for the team',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('updateOrganizationTeam')
|
||||
@validate_json_request('TeamDescription')
|
||||
def put(self, orgname, teamname):
|
||||
""" Update the org-wide permission for the specified team. """
|
||||
edit_permission = AdministerOrganizationPermission(orgname)
|
||||
if edit_permission.can():
|
||||
team = None
|
||||
|
||||
details = request.get_json()
|
||||
is_existing = False
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
is_existing = True
|
||||
except model.InvalidTeamException:
|
||||
# Create the new team.
|
||||
description = details['description'] if 'description' in details else ''
|
||||
role = details['role'] if 'role' in details else 'member'
|
||||
|
||||
org = model.organization.get_organization(orgname)
|
||||
team = model.team.create_team(teamname, org, role, description)
|
||||
log_action('org_create_team', orgname, {'team': teamname})
|
||||
|
||||
if is_existing:
|
||||
if ('description' in details and
|
||||
team.description != details['description']):
|
||||
team.description = details['description']
|
||||
team.save()
|
||||
log_action('org_set_team_description', orgname,
|
||||
{'team': teamname, 'description': team.description})
|
||||
|
||||
if 'role' in details:
|
||||
role = Team.role.get_name(team.role_id)
|
||||
if role != details['role']:
|
||||
team = model.team.set_team_org_permission(team, details['role'],
|
||||
get_authenticated_user().username)
|
||||
log_action('org_set_team_role', orgname, {'team': teamname, 'role': details['role']})
|
||||
|
||||
return team_view(orgname, team, is_new_team=not is_existing), 200
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrganizationTeam')
|
||||
def delete(self, orgname, teamname):
|
||||
""" Delete the specified team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
model.team.remove_team(orgname, teamname, get_authenticated_user().username)
|
||||
log_action('org_delete_team', orgname, {'team': teamname})
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
def _syncing_setup_allowed(orgname):
|
||||
""" Returns whether syncing setup is allowed for the current user over the matching org. """
|
||||
if not features.NONSUPERUSER_TEAM_SYNCING_SETUP and not SuperUserPermission().can():
|
||||
return False
|
||||
|
||||
return AdministerOrganizationPermission(orgname).can()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>/syncing')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('teamname', 'The name of the team')
|
||||
@show_if(features.TEAM_SYNCING)
|
||||
class OrganizationTeamSyncing(ApiResource):
|
||||
""" Resource for managing syncing of a team by a backing group. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
@nickname('enableOrganizationTeamSync')
|
||||
@verify_not_prod
|
||||
@require_fresh_login
|
||||
def post(self, orgname, teamname):
|
||||
if _syncing_setup_allowed(orgname):
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
config = request.get_json()
|
||||
|
||||
# Ensure that the specified config points to a valid group.
|
||||
status, err = authentication.check_group_lookup_args(config)
|
||||
if not status:
|
||||
raise InvalidRequest('Could not sync to group: %s' % err)
|
||||
|
||||
# Set the team's syncing config.
|
||||
model.team.set_team_syncing(team, authentication.federated_service, config)
|
||||
|
||||
return team_view(orgname, team)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@require_scope(scopes.SUPERUSER)
|
||||
@nickname('disableOrganizationTeamSync')
|
||||
@verify_not_prod
|
||||
@require_fresh_login
|
||||
def delete(self, orgname, teamname):
|
||||
if _syncing_setup_allowed(orgname):
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
model.team.remove_team_syncing(orgname, teamname)
|
||||
return team_view(orgname, team)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>/members')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('teamname', 'The name of the team')
|
||||
class TeamMemberList(ApiResource):
|
||||
""" Resource for managing the list of members for a team. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@parse_args()
|
||||
@query_param('includePending', 'Whether to include pending members', type=truthy_bool,
|
||||
default=False)
|
||||
@nickname('getOrganizationTeamMembers')
|
||||
def get(self, orgname, teamname, parsed_args):
|
||||
""" Retrieve the list of members for the specified team. """
|
||||
view_permission = ViewTeamPermission(orgname, teamname)
|
||||
edit_permission = AdministerOrganizationPermission(orgname)
|
||||
|
||||
if view_permission.can():
|
||||
team = None
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
members = model.organization.get_organization_team_members(team.id)
|
||||
invites = []
|
||||
|
||||
if parsed_args['includePending'] and edit_permission.can():
|
||||
invites = model.team.get_organization_team_member_invites(team.id)
|
||||
|
||||
data = {
|
||||
'name': teamname,
|
||||
'members': [member_view(m) for m in members] + [invite_view(i) for i in invites],
|
||||
'can_edit': edit_permission.can(),
|
||||
}
|
||||
|
||||
if features.TEAM_SYNCING and authentication.federated_service:
|
||||
if _syncing_setup_allowed(orgname):
|
||||
data['can_sync'] = {
|
||||
'service': authentication.federated_service,
|
||||
}
|
||||
|
||||
data['can_sync'].update(authentication.service_metadata())
|
||||
|
||||
sync_info = model.team.get_team_sync_information(orgname, teamname)
|
||||
if sync_info is not None:
|
||||
data['synced'] = {
|
||||
'service': sync_info.service.name,
|
||||
}
|
||||
|
||||
if SuperUserPermission().can():
|
||||
data['synced'].update({
|
||||
'last_updated': format_date(sync_info.last_updated),
|
||||
'config': json.loads(sync_info.config),
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>/members/<membername>')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('teamname', 'The name of the team')
|
||||
@path_param('membername', 'The username of the team member')
|
||||
class TeamMember(ApiResource):
|
||||
""" Resource for managing individual members of a team. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('updateOrganizationTeamMember')
|
||||
@disallow_nonrobots_for_synced_team
|
||||
def put(self, orgname, teamname, membername):
|
||||
""" Adds or invites a member to an existing team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
team = None
|
||||
user = None
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Find the user.
|
||||
user = model.user.get_user(membername)
|
||||
if not user:
|
||||
raise request_error(message='Unknown user')
|
||||
|
||||
# Add or invite the user to the team.
|
||||
inviter = get_authenticated_user()
|
||||
invite = handle_addinvite_team(inviter, team, user=user)
|
||||
if not invite:
|
||||
log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname})
|
||||
return member_view(user, invited=False)
|
||||
|
||||
# User was invited.
|
||||
log_action('org_invite_team_member', orgname, {
|
||||
'user': membername,
|
||||
'member': membername,
|
||||
'team': teamname
|
||||
})
|
||||
return member_view(user, invited=True)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteOrganizationTeamMember')
|
||||
@disallow_nonrobots_for_synced_team
|
||||
def delete(self, orgname, teamname, membername):
|
||||
""" Delete a member of a team. If the user is merely invited to join
|
||||
the team, then the invite is removed instead.
|
||||
"""
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
# Remote the user from the team.
|
||||
invoking_user = get_authenticated_user().username
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Find the member.
|
||||
member = model.user.get_user(membername)
|
||||
if not member:
|
||||
raise NotFound()
|
||||
|
||||
# First attempt to delete an invite for the user to this team. If none found,
|
||||
# then we try to remove the user directly.
|
||||
if model.team.delete_team_user_invite(team, member):
|
||||
log_action('org_delete_team_member_invite', orgname, {
|
||||
'user': membername,
|
||||
'team': teamname,
|
||||
'member': membername
|
||||
})
|
||||
return '', 204
|
||||
|
||||
model.team.remove_user_from_team(orgname, teamname, membername, invoking_user)
|
||||
log_action('org_remove_team_member', orgname, {'member': membername, 'team': teamname})
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>/invite/<email>')
|
||||
@show_if(features.MAILING)
|
||||
class InviteTeamMember(ApiResource):
|
||||
""" Resource for inviting a team member via email address. """
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('inviteTeamMemberEmail')
|
||||
@disallow_all_for_synced_team
|
||||
def put(self, orgname, teamname, email):
|
||||
""" Invites an email address to an existing team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
team = None
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Invite the email to the team.
|
||||
inviter = get_authenticated_user()
|
||||
invite = handle_addinvite_team(inviter, team, email=email)
|
||||
log_action('org_invite_team_member', orgname, {
|
||||
'email': email,
|
||||
'team': teamname,
|
||||
'member': email
|
||||
})
|
||||
return invite_view(invite)
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('deleteTeamMemberEmailInvite')
|
||||
def delete(self, orgname, teamname, email):
|
||||
""" Delete an invite of an email address to join a team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
team = None
|
||||
|
||||
# Find the team.
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
# Delete the invite.
|
||||
if not model.team.delete_team_email_invite(team, email):
|
||||
raise NotFound()
|
||||
|
||||
log_action('org_delete_team_member_invite', orgname, {
|
||||
'email': email,
|
||||
'team': teamname,
|
||||
'member': email
|
||||
})
|
||||
return '', 204
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/organization/<orgname>/team/<teamname>/permissions')
|
||||
@path_param('orgname', 'The name of the organization')
|
||||
@path_param('teamname', 'The name of the team')
|
||||
class TeamPermissions(ApiResource):
|
||||
""" Resource for listing the permissions an org's team has in the system. """
|
||||
@nickname('getOrganizationTeamPermissions')
|
||||
def get(self, orgname, teamname):
|
||||
""" Returns the list of repository permissions for the org's team. """
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
try:
|
||||
team = model.team.get_organization_team(orgname, teamname)
|
||||
except model.InvalidTeamException:
|
||||
raise NotFound()
|
||||
|
||||
permissions = model.permission.list_team_permissions(team)
|
||||
|
||||
return {
|
||||
'permissions': [permission_view(permission) for permission in permissions]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/teaminvite/<code>')
|
||||
@internal_only
|
||||
@show_if(features.MAILING)
|
||||
class TeamMemberInvite(ApiResource):
|
||||
""" Resource for managing invites to join a team. """
|
||||
@require_user_admin
|
||||
@nickname('acceptOrganizationTeamInvite')
|
||||
def put(self, code):
|
||||
""" Accepts an invite to join a team in an organization. """
|
||||
# Accept the invite for the current user.
|
||||
team = try_accept_invite(code, get_authenticated_user())
|
||||
if not team:
|
||||
raise NotFound()
|
||||
|
||||
orgname = team.organization.username
|
||||
return {
|
||||
'org': orgname,
|
||||
'team': team.name
|
||||
}
|
||||
|
||||
@nickname('declineOrganizationTeamInvite')
|
||||
@require_user_admin
|
||||
def delete(self, code):
|
||||
""" Delete an existing invitation to join a team. """
|
||||
(team, inviter) = model.team.delete_team_invite(code, user_obj=get_authenticated_user())
|
||||
|
||||
model.notification.delete_matching_notifications(get_authenticated_user(), 'org_team_invite',
|
||||
code=code)
|
||||
|
||||
orgname = team.organization.username
|
||||
log_action('org_team_member_invite_declined', orgname, {
|
||||
'member': get_authenticated_user().username,
|
||||
'team': team.name,
|
||||
'inviter': inviter.username
|
||||
})
|
||||
|
||||
return '', 204
|
0
endpoints/api/test/__init__.py
Normal file
0
endpoints/api/test/__init__.py
Normal file
11
endpoints/api/test/shared.py
Normal file
11
endpoints/api/test/shared.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
from endpoints.test.shared import conduct_call
|
||||
from endpoints.api import api
|
||||
|
||||
def conduct_api_call(client, resource, method, params, body=None, expected_code=200, headers=None):
|
||||
""" Conducts an API call to the given resource via the given client, and ensures its returned
|
||||
status matches the code given.
|
||||
|
||||
Returns the response.
|
||||
"""
|
||||
return conduct_call(client, resource, api.url_for, method, params, body, expected_code,
|
||||
headers=headers)
|
50
endpoints/api/test/test_appspecifictoken.py
Normal file
50
endpoints/api/test/test_appspecifictoken.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from data import model
|
||||
from endpoints.api.appspecifictokens import AppTokens, AppToken
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
def test_app_specific_tokens(app, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
# Add an app specific token.
|
||||
token_data = {'title': 'Testing 123'}
|
||||
resp = conduct_api_call(cl, AppTokens, 'POST', None, token_data, 200).json
|
||||
token_uuid = resp['token']['uuid']
|
||||
assert 'token_code' in resp['token']
|
||||
|
||||
# List the tokens and ensure we have the one added.
|
||||
resp = conduct_api_call(cl, AppTokens, 'GET', None, None, 200).json
|
||||
assert len(resp['tokens'])
|
||||
assert token_uuid in set([token['uuid'] for token in resp['tokens']])
|
||||
assert not set([token['token_code'] for token in resp['tokens'] if 'token_code' in token])
|
||||
|
||||
# List the tokens expiring soon and ensure the one added is not present.
|
||||
resp = conduct_api_call(cl, AppTokens, 'GET', {'expiring': True}, None, 200).json
|
||||
assert token_uuid not in set([token['uuid'] for token in resp['tokens']])
|
||||
|
||||
# Get the token and ensure we have its code.
|
||||
resp = conduct_api_call(cl, AppToken, 'GET', {'token_uuid': token_uuid}, None, 200).json
|
||||
assert resp['token']['uuid'] == token_uuid
|
||||
assert 'token_code' in resp['token']
|
||||
|
||||
# Delete the token.
|
||||
conduct_api_call(cl, AppToken, 'DELETE', {'token_uuid': token_uuid}, None, 204)
|
||||
|
||||
# Ensure the token no longer exists.
|
||||
resp = conduct_api_call(cl, AppTokens, 'GET', None, None, 200).json
|
||||
assert len(resp['tokens'])
|
||||
assert token_uuid not in set([token['uuid'] for token in resp['tokens']])
|
||||
|
||||
conduct_api_call(cl, AppToken, 'GET', {'token_uuid': token_uuid}, None, 404)
|
||||
|
||||
|
||||
def test_delete_expired_app_token(app, client):
|
||||
user = model.user.get_user('devtable')
|
||||
expiration = datetime.now() - timedelta(seconds=10)
|
||||
token = model.appspecifictoken.create_token(user, 'some token', expiration)
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
# Delete the token.
|
||||
conduct_api_call(cl, AppToken, 'DELETE', {'token_uuid': token.uuid}, None, 204)
|
20
endpoints/api/test/test_build.py
Normal file
20
endpoints/api/test/test_build.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
import pytest
|
||||
|
||||
from endpoints.api.build import RepositoryBuildList
|
||||
|
||||
|
||||
@pytest.mark.parametrize('request_json,subdir,context', [
|
||||
({}, '/Dockerfile', '/'),
|
||||
({'context': '/some_context'}, '/some_context/Dockerfile', '/some_context'),
|
||||
({'subdirectory': 'some_context'}, 'some_context/Dockerfile', 'some_context'),
|
||||
({'subdirectory': 'some_context/'}, 'some_context/Dockerfile', 'some_context/'),
|
||||
({'dockerfile_path': 'some_context/Dockerfile'}, 'some_context/Dockerfile', 'some_context'),
|
||||
({'dockerfile_path': 'some_context/Dockerfile', 'context': '/'}, 'some_context/Dockerfile', '/'),
|
||||
({'dockerfile_path': 'some_context/Dockerfile',
|
||||
'context': '/',
|
||||
'subdirectory': 'slime'}, 'some_context/Dockerfile', '/'),
|
||||
])
|
||||
def test_extract_dockerfile_args(request_json, subdir, context):
|
||||
actual_context, actual_subdir = RepositoryBuildList.get_dockerfile_context(request_json)
|
||||
assert subdir == actual_subdir
|
||||
assert context == actual_context
|
83
endpoints/api/test/test_disallow_for_apps.py
Normal file
83
endpoints/api/test/test_disallow_for_apps.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import pytest
|
||||
|
||||
from data import model
|
||||
from endpoints.api.repository import Repository
|
||||
from endpoints.api.build import (RepositoryBuildList, RepositoryBuildResource,
|
||||
RepositoryBuildStatus, RepositoryBuildLogs)
|
||||
from endpoints.api.image import RepositoryImageList, RepositoryImage
|
||||
from endpoints.api.manifest import RepositoryManifestLabels, ManageRepositoryManifestLabel
|
||||
from endpoints.api.repositorynotification import (RepositoryNotification,
|
||||
RepositoryNotificationList,
|
||||
TestRepositoryNotification)
|
||||
from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity
|
||||
from endpoints.api.signing import RepositorySignatures
|
||||
from endpoints.api.tag import ListRepositoryTags, RepositoryTag, RepositoryTagImages, RestoreTag
|
||||
from endpoints.api.trigger import (BuildTriggerList, BuildTrigger, BuildTriggerSubdirs,
|
||||
BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
|
||||
TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources,
|
||||
BuildTriggerSourceNamespaces)
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
BUILD_ARGS = {'build_uuid': '1234'}
|
||||
IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
|
||||
MANIFEST_ARGS = {'manifestref': 'sha256:abcd1234'}
|
||||
LABEL_ARGS = {'manifestref': 'sha256:abcd1234', 'labelid': '1234'}
|
||||
NOTIFICATION_ARGS = {'uuid': '1234'}
|
||||
TAG_ARGS = {'tag': 'foobar'}
|
||||
TRIGGER_ARGS = {'trigger_uuid': '1234'}
|
||||
FIELD_ARGS = {'trigger_uuid': '1234', 'field_name': 'foobar'}
|
||||
|
||||
@pytest.mark.parametrize('resource, method, params', [
|
||||
(RepositoryBuildList, 'get', None),
|
||||
(RepositoryBuildList, 'post', None),
|
||||
(RepositoryBuildResource, 'get', BUILD_ARGS),
|
||||
(RepositoryBuildResource, 'delete', BUILD_ARGS),
|
||||
(RepositoryBuildStatus, 'get', BUILD_ARGS),
|
||||
(RepositoryBuildLogs, 'get', BUILD_ARGS),
|
||||
(RepositoryImageList, 'get', None),
|
||||
(RepositoryImage, 'get', IMAGE_ARGS),
|
||||
(RepositoryManifestLabels, 'get', MANIFEST_ARGS),
|
||||
(RepositoryManifestLabels, 'post', MANIFEST_ARGS),
|
||||
(ManageRepositoryManifestLabel, 'get', LABEL_ARGS),
|
||||
(ManageRepositoryManifestLabel, 'delete', LABEL_ARGS),
|
||||
(RepositoryNotificationList, 'get', None),
|
||||
(RepositoryNotificationList, 'post', None),
|
||||
(RepositoryNotification, 'get', NOTIFICATION_ARGS),
|
||||
(RepositoryNotification, 'delete', NOTIFICATION_ARGS),
|
||||
(RepositoryNotification, 'post', NOTIFICATION_ARGS),
|
||||
(TestRepositoryNotification, 'post', NOTIFICATION_ARGS),
|
||||
(RepositoryImageSecurity, 'get', IMAGE_ARGS),
|
||||
(RepositoryManifestSecurity, 'get', MANIFEST_ARGS),
|
||||
(RepositorySignatures, 'get', None),
|
||||
(ListRepositoryTags, 'get', None),
|
||||
(RepositoryTag, 'put', TAG_ARGS),
|
||||
(RepositoryTag, 'delete', TAG_ARGS),
|
||||
(RepositoryTagImages, 'get', TAG_ARGS),
|
||||
(RestoreTag, 'post', TAG_ARGS),
|
||||
(BuildTriggerList, 'get', None),
|
||||
(BuildTrigger, 'get', TRIGGER_ARGS),
|
||||
(BuildTrigger, 'delete', TRIGGER_ARGS),
|
||||
(BuildTriggerSubdirs, 'post', TRIGGER_ARGS),
|
||||
(BuildTriggerActivate, 'post', TRIGGER_ARGS),
|
||||
(BuildTriggerAnalyze, 'post', TRIGGER_ARGS),
|
||||
(ActivateBuildTrigger, 'post', TRIGGER_ARGS),
|
||||
(TriggerBuildList, 'get', TRIGGER_ARGS),
|
||||
(BuildTriggerFieldValues, 'post', FIELD_ARGS),
|
||||
(BuildTriggerSources, 'post', TRIGGER_ARGS),
|
||||
(BuildTriggerSourceNamespaces, 'get', TRIGGER_ARGS),
|
||||
])
|
||||
def test_disallowed_for_apps(resource, method, params, client):
|
||||
namespace = 'devtable'
|
||||
repository = 'someapprepo'
|
||||
|
||||
devtable = model.user.get_user('devtable')
|
||||
model.repository.create_repository(namespace, repository, devtable, repo_kind='application')
|
||||
|
||||
params = params or {}
|
||||
params['repository'] = '%s/%s' % (namespace, repository)
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
conduct_api_call(cl, resource, method, params, None, 501)
|
||||
|
64
endpoints/api/test/test_disallow_for_nonnormal.py
Normal file
64
endpoints/api/test/test_disallow_for_nonnormal.py
Normal file
|
@ -0,0 +1,64 @@
|
|||
import pytest
|
||||
|
||||
from data import model
|
||||
from data.database import RepositoryState
|
||||
from endpoints.api.build import RepositoryBuildList, RepositoryBuildResource
|
||||
from endpoints.api.manifest import RepositoryManifestLabels, ManageRepositoryManifestLabel
|
||||
from endpoints.api.tag import RepositoryTag, RestoreTag
|
||||
from endpoints.api.trigger import (BuildTrigger, BuildTriggerSubdirs,
|
||||
BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
|
||||
BuildTriggerFieldValues, BuildTriggerSources)
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
BUILD_ARGS = {'build_uuid': '1234'}
|
||||
IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
|
||||
MANIFEST_ARGS = {'manifestref': 'sha256:abcd1234'}
|
||||
LABEL_ARGS = {'manifestref': 'sha256:abcd1234', 'labelid': '1234'}
|
||||
NOTIFICATION_ARGS = {'uuid': '1234'}
|
||||
TAG_ARGS = {'tag': 'foobar'}
|
||||
TRIGGER_ARGS = {'trigger_uuid': '1234'}
|
||||
FIELD_ARGS = {'trigger_uuid': '1234', 'field_name': 'foobar'}
|
||||
|
||||
|
||||
@pytest.mark.parametrize('state', [
|
||||
RepositoryState.MIRROR,
|
||||
RepositoryState.READ_ONLY,
|
||||
])
|
||||
@pytest.mark.parametrize('resource, method, params', [
|
||||
(RepositoryBuildList, 'post', None),
|
||||
(RepositoryBuildResource, 'delete', BUILD_ARGS),
|
||||
|
||||
(RepositoryManifestLabels, 'post', MANIFEST_ARGS),
|
||||
(ManageRepositoryManifestLabel, 'delete', LABEL_ARGS),
|
||||
|
||||
(RepositoryTag, 'put', TAG_ARGS),
|
||||
(RepositoryTag, 'delete', TAG_ARGS),
|
||||
|
||||
(RestoreTag, 'post', TAG_ARGS),
|
||||
|
||||
(BuildTrigger, 'delete', TRIGGER_ARGS),
|
||||
(BuildTriggerSubdirs, 'post', TRIGGER_ARGS),
|
||||
(BuildTriggerActivate, 'post', TRIGGER_ARGS),
|
||||
(BuildTriggerAnalyze, 'post', TRIGGER_ARGS),
|
||||
(ActivateBuildTrigger, 'post', TRIGGER_ARGS),
|
||||
|
||||
(BuildTriggerFieldValues, 'post', FIELD_ARGS),
|
||||
(BuildTriggerSources, 'post', TRIGGER_ARGS),
|
||||
|
||||
])
|
||||
def test_disallowed_for_nonnormal(state, resource, method, params, client):
|
||||
namespace = 'devtable'
|
||||
repository = 'somenewstaterepo'
|
||||
|
||||
devtable = model.user.get_user('devtable')
|
||||
repo = model.repository.create_repository(namespace, repository, devtable)
|
||||
repo.state = state
|
||||
repo.save()
|
||||
|
||||
params = params or {}
|
||||
params['repository'] = '%s/%s' % (namespace, repository)
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
conduct_api_call(cl, resource, method, params, None, 503)
|
63
endpoints/api/test/test_endtoend_auth.py
Normal file
63
endpoints/api/test/test_endtoend_auth.py
Normal file
|
@ -0,0 +1,63 @@
|
|||
import pytest
|
||||
|
||||
from mock import patch
|
||||
|
||||
from endpoints.api.search import EntitySearch, LinkExternalEntity
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
|
||||
from test.test_ldap import mock_ldap
|
||||
from test.test_external_jwt_authn import fake_jwt
|
||||
from test.test_keystone_auth import fake_keystone
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.fixture(params=[
|
||||
mock_ldap,
|
||||
fake_jwt,
|
||||
fake_keystone,
|
||||
])
|
||||
def auth_engine(request):
|
||||
return request.param
|
||||
|
||||
|
||||
@pytest.fixture(params=[
|
||||
False,
|
||||
True,
|
||||
])
|
||||
def requires_email(request):
|
||||
return request.param
|
||||
|
||||
|
||||
def test_entity_search(auth_engine, requires_email, client):
|
||||
with auth_engine(requires_email=requires_email) as auth:
|
||||
with patch('endpoints.api.search.authentication', auth):
|
||||
# Try an unknown prefix.
|
||||
response = conduct_api_call(client, EntitySearch, 'GET', params=dict(prefix='unknown'))
|
||||
results = response.json['results']
|
||||
assert len(results) == 0
|
||||
|
||||
# Try a known prefix.
|
||||
response = conduct_api_call(client, EntitySearch, 'GET', params=dict(prefix='cool'))
|
||||
results = response.json['results']
|
||||
entity = results[0]
|
||||
assert entity['name'] == 'cool.user'
|
||||
assert entity['kind'] == 'external'
|
||||
|
||||
|
||||
def test_link_external_entity(auth_engine, requires_email, client):
|
||||
with auth_engine(requires_email=requires_email) as auth:
|
||||
with patch('endpoints.api.search.authentication', auth):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
# Try an unknown user.
|
||||
conduct_api_call(cl, LinkExternalEntity, 'POST', params=dict(username='unknownuser'),
|
||||
expected_code=400)
|
||||
|
||||
# Try a known user.
|
||||
response = conduct_api_call(cl, LinkExternalEntity, 'POST',
|
||||
params=dict(username='cool.user'))
|
||||
|
||||
entity = response.json['entity']
|
||||
assert entity['name'] == 'cool_user'
|
||||
assert entity['kind'] == 'user'
|
34
endpoints/api/test/test_logs.py
Normal file
34
endpoints/api/test/test_logs.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
import os
|
||||
import time
|
||||
|
||||
from mock import patch
|
||||
|
||||
from app import export_action_logs_queue
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.logs import ExportOrgLogs
|
||||
from endpoints.test.shared import client_with_identity
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.skipif(os.environ.get('TEST_DATABASE_URI', '').find('mysql') >= 0,
|
||||
reason="Queue code is very sensitive to times on MySQL, making this flaky")
|
||||
def test_export_logs(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
assert export_action_logs_queue.get() is None
|
||||
|
||||
timecode = time.time()
|
||||
def get_time():
|
||||
return timecode - 2
|
||||
|
||||
with patch('time.time', get_time):
|
||||
# Call to export logs.
|
||||
body = {
|
||||
'callback_url': 'http://some/url',
|
||||
'callback_email': 'a@b.com',
|
||||
}
|
||||
|
||||
conduct_api_call(cl, ExportOrgLogs, 'POST', {'orgname': 'buynlarge'},
|
||||
body, expected_code=200)
|
||||
|
||||
# Ensure the request was queued.
|
||||
assert export_action_logs_queue.get() is not None
|
24
endpoints/api/test/test_manifest.py
Normal file
24
endpoints/api/test/test_manifest.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
from data.registry_model import registry_model
|
||||
from endpoints.api.manifest import RepositoryManifest
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
def test_repository_manifest(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
repo_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tags = registry_model.list_all_active_repository_tags(repo_ref)
|
||||
for tag in tags:
|
||||
manifest_digest = tag.manifest_digest
|
||||
if manifest_digest is None:
|
||||
continue
|
||||
|
||||
params = {
|
||||
'repository': 'devtable/simple',
|
||||
'manifestref': manifest_digest,
|
||||
}
|
||||
result = conduct_api_call(cl, RepositoryManifest, 'GET', params, None, 200).json
|
||||
assert result['digest'] == manifest_digest
|
||||
assert result['manifest_data']
|
||||
assert result['image']
|
230
endpoints/api/test/test_mirror.py
Normal file
230
endpoints/api/test/test_mirror.py
Normal file
|
@ -0,0 +1,230 @@
|
|||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from data import model
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.mirror import RepoMirrorResource
|
||||
from endpoints.test.shared import client_with_identity
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
def _setup_mirror():
|
||||
repo = model.repository.get_repository('devtable', 'simple')
|
||||
assert repo
|
||||
robot = model.user.lookup_robot('devtable+dtrobot')
|
||||
assert robot
|
||||
rule = model.repo_mirror.create_rule(repo, ['latest', '3.3*', 'foo'])
|
||||
assert rule
|
||||
mirror_kwargs = {
|
||||
'is_enabled': True,
|
||||
'external_reference': 'quay.io/redhat/quay',
|
||||
'sync_interval': 5000,
|
||||
'sync_start_date': datetime(2020, 01, 02, 6, 30, 0),
|
||||
'external_registry_username': 'fakeUsername',
|
||||
'external_registry_password': 'fakePassword',
|
||||
'external_registry_config': {
|
||||
'verify_tls': True,
|
||||
'proxy': {
|
||||
'http_proxy': 'http://insecure.proxy.corp',
|
||||
'https_proxy': 'https://secure.proxy.corp',
|
||||
'no_proxy': 'mylocalhost'
|
||||
}
|
||||
}
|
||||
}
|
||||
mirror = model.repo_mirror.enable_mirroring_for_repository(repo, root_rule=rule,
|
||||
internal_robot=robot, **mirror_kwargs)
|
||||
assert mirror
|
||||
return mirror
|
||||
|
||||
|
||||
@pytest.mark.parametrize('existing_robot_permission, expected_permission', [
|
||||
(None, 'write'),
|
||||
('read', 'write'),
|
||||
('write', 'write'),
|
||||
('admin', 'admin'),
|
||||
])
|
||||
def test_create_mirror_sets_permissions(existing_robot_permission, expected_permission, client):
|
||||
mirror_bot, _ = model.user.create_robot('newmirrorbot', model.user.get_namespace_user('devtable'))
|
||||
|
||||
if existing_robot_permission:
|
||||
model.permission.set_user_repo_permission(mirror_bot.username, 'devtable', 'simple',
|
||||
existing_robot_permission)
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
request_body = {
|
||||
'external_reference': 'quay.io/foobar/barbaz',
|
||||
'sync_interval': 100,
|
||||
'sync_start_date': '2019-08-20T17:51:00Z',
|
||||
'root_rule': {
|
||||
'rule_type': 'TAG_GLOB_CSV',
|
||||
'rule_value': ['latest','foo', 'bar']
|
||||
},
|
||||
'robot_username': 'devtable+newmirrorbot',
|
||||
}
|
||||
conduct_api_call(cl, RepoMirrorResource, 'POST', params, request_body, 201)
|
||||
|
||||
# Check the status of the robot.
|
||||
permissions = model.permission.get_user_repository_permissions(mirror_bot, 'devtable', 'simple')
|
||||
assert permissions[0].role.name == expected_permission
|
||||
|
||||
config = model.repo_mirror.get_mirror(model.repository.get_repository('devtable', 'simple'))
|
||||
assert config.root_rule.rule_value == ['latest', 'foo', 'bar']
|
||||
|
||||
|
||||
def test_get_mirror_does_not_exist(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 404)
|
||||
|
||||
|
||||
def test_get_repo_does_not_exist(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/unicorn'}
|
||||
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 404)
|
||||
|
||||
|
||||
def test_get_mirror(client):
|
||||
""" Verify that performing a `GET` request returns expected and accurate data. """
|
||||
mirror = _setup_mirror()
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 200).json
|
||||
|
||||
assert resp['is_enabled'] == True
|
||||
assert resp['external_reference'] == 'quay.io/redhat/quay'
|
||||
assert resp['sync_interval'] == 5000
|
||||
assert resp['sync_start_date'] == '2020-01-02T06:30:00Z'
|
||||
assert resp['external_registry_username'] == 'fakeUsername'
|
||||
assert 'external_registry_password' not in resp
|
||||
assert 'external_registry_config' in resp
|
||||
assert resp['external_registry_config']['verify_tls'] == True
|
||||
assert 'proxy' in resp['external_registry_config']
|
||||
assert resp['external_registry_config']['proxy']['http_proxy'] == 'http://insecure.proxy.corp'
|
||||
assert resp['external_registry_config']['proxy']['https_proxy'] == 'https://secure.proxy.corp'
|
||||
assert resp['external_registry_config']['proxy']['no_proxy'] == 'mylocalhost'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('key, value, expected_status', [
|
||||
|
||||
('is_enabled', True, 201),
|
||||
('is_enabled', False, 201),
|
||||
('is_enabled', None, 400),
|
||||
('is_enabled', 'foo', 400),
|
||||
|
||||
('external_reference', 'example.com/foo/bar', 201),
|
||||
('external_reference', 'example.com/foo', 201),
|
||||
('external_reference', 'example.com', 201),
|
||||
|
||||
('external_registry_username', 'newTestUsername', 201),
|
||||
('external_registry_username', None, 201),
|
||||
('external_registry_username', 123, 400),
|
||||
|
||||
('external_registry_password', 'newTestPassword', 400),
|
||||
('external_registry_password', None, 400),
|
||||
('external_registry_password', 41, 400),
|
||||
|
||||
('robot_username', 'devtable+dtrobot', 201),
|
||||
('robot_username', 'devtable+doesntExist', 400),
|
||||
|
||||
('sync_start_date', '2020-01-01T00:00:00Z', 201),
|
||||
('sync_start_date', 'January 1 2020', 400),
|
||||
('sync_start_date', '2020-01-01T00:00:00.00Z', 400),
|
||||
('sync_start_date', 'Wed, 01 Jan 2020 00:00:00 -0000', 400),
|
||||
('sync_start_date', 'Wed, 02 Oct 2002 08:00:00 EST', 400),
|
||||
|
||||
('sync_interval', 2000, 201),
|
||||
('sync_interval', -5, 400),
|
||||
|
||||
('https_proxy', 'https://proxy.corp.example.com', 201),
|
||||
('https_proxy', None, 201),
|
||||
('https_proxy', 'proxy.example.com; rm -rf /', 201), # Safe; values only set in env, not eval'ed
|
||||
|
||||
('http_proxy', 'http://proxy.corp.example.com', 201),
|
||||
('http_proxy', None, 201),
|
||||
('http_proxy', 'proxy.example.com; rm -rf /', 201), # Safe; values only set in env, not eval'ed
|
||||
|
||||
('no_proxy', 'quay.io', 201),
|
||||
('no_proxy', None, 201),
|
||||
('no_proxy', 'quay.io; rm -rf /', 201), # Safe because proxy values are not eval'ed
|
||||
|
||||
('verify_tls', True, 201),
|
||||
('verify_tls', False, 201),
|
||||
('verify_tls', None, 400),
|
||||
('verify_tls', 'abc', 400),
|
||||
|
||||
])
|
||||
def test_change_config(key, value, expected_status, client):
|
||||
""" Verify that changing each attribute works as expected. """
|
||||
mirror = _setup_mirror()
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
if key in ('http_proxy', 'https_proxy', 'no_proxy'):
|
||||
request_body = {'external_registry_config': {'proxy': {key: value}}}
|
||||
elif key == 'verify_tls':
|
||||
request_body = {'external_registry_config': {key: value}}
|
||||
else:
|
||||
request_body = {key: value}
|
||||
conduct_api_call(cl, RepoMirrorResource, 'PUT', params, request_body, expected_status)
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 200)
|
||||
|
||||
if expected_status < 400:
|
||||
if key == 'external_registry_password':
|
||||
assert key not in resp.json
|
||||
elif key == 'verify_tls':
|
||||
assert resp.json['external_registry_config']['verify_tls'] == value
|
||||
elif key in ('http_proxy', 'https_proxy', 'no_proxy'):
|
||||
assert resp.json['external_registry_config']['proxy'][key] == value
|
||||
else:
|
||||
assert resp.json[key] == value
|
||||
else:
|
||||
if key == 'external_registry_password':
|
||||
assert key not in resp.json
|
||||
elif key == 'verify_tls':
|
||||
assert resp.json['external_registry_config'][key] != value
|
||||
elif key in ('http_proxy', 'https_proxy', 'no_proxy'):
|
||||
assert resp.json['external_registry_config']['proxy'][key] != value
|
||||
else:
|
||||
assert resp.json[key] != value
|
||||
|
||||
|
||||
@pytest.mark.parametrize('request_body, expected_status', [
|
||||
|
||||
# Set a new password and username => Success
|
||||
({ 'external_registry_username': 'newUsername',
|
||||
'external_registry_password': 'newPassword'}, 201 ),
|
||||
|
||||
# Set password and username to None => Success
|
||||
({ 'external_registry_username': None,
|
||||
'external_registry_password': None}, 201 ),
|
||||
|
||||
# Set username to value but password None => Sucess
|
||||
({ 'external_registry_username': 'myUsername',
|
||||
'external_registry_password': None}, 201 ),
|
||||
|
||||
# Set only new Username => Success
|
||||
({'external_registry_username': 'myNewUsername'}, 201),
|
||||
({'external_registry_username': None}, 201),
|
||||
|
||||
# Set only new Password => Failure
|
||||
({'external_registry_password': 'myNewPassword'}, 400),
|
||||
({'external_registry_password': None}, 400),
|
||||
|
||||
# Set username and password to empty string => Success?
|
||||
({'external_registry_username': '',
|
||||
'external_registry_password': ''}, 201),
|
||||
|
||||
])
|
||||
def test_change_credentials(request_body, expected_status, client):
|
||||
""" Verify credentials can only be modified as a pair. """
|
||||
mirror = _setup_mirror()
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
conduct_api_call(cl, RepoMirrorResource, 'PUT', params, request_body, expected_status)
|
38
endpoints/api/test/test_organization.py
Normal file
38
endpoints/api/test/test_organization.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import pytest
|
||||
|
||||
from data import model
|
||||
from endpoints.api import api
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.organization import (Organization,
|
||||
OrganizationCollaboratorList)
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize('expiration, expected_code', [
|
||||
(0, 200),
|
||||
(100, 400),
|
||||
(100000000000000000000, 400),
|
||||
])
|
||||
def test_change_tag_expiration(expiration, expected_code, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
conduct_api_call(cl, Organization, 'PUT', {'orgname': 'buynlarge'},
|
||||
body={'tag_expiration_s': expiration},
|
||||
expected_code=expected_code)
|
||||
|
||||
|
||||
def test_get_organization_collaborators(client):
|
||||
params = {'orgname': 'buynlarge'}
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
resp = conduct_api_call(cl, OrganizationCollaboratorList, 'GET', params)
|
||||
|
||||
collaborator_names = [c['name'] for c in resp.json['collaborators']]
|
||||
assert 'outsideorg' in collaborator_names
|
||||
assert 'devtable' not in collaborator_names
|
||||
assert 'reader' not in collaborator_names
|
||||
|
||||
for collaborator in resp.json['collaborators']:
|
||||
if collaborator['name'] == 'outsideorg':
|
||||
assert 'orgrepo' in collaborator['repositories']
|
||||
assert 'anotherorgrepo' not in collaborator['repositories']
|
23
endpoints/api/test/test_permission.py
Normal file
23
endpoints/api/test/test_permission.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
import pytest
|
||||
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.permission import RepositoryUserPermission
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('repository, username, expected_code', [
|
||||
pytest.param('devtable/simple', 'public', 200, id='valid user under user'),
|
||||
pytest.param('devtable/simple', 'devtable+dtrobot', 200, id='valid robot under user'),
|
||||
pytest.param('devtable/simple', 'buynlarge+coolrobot', 400, id='invalid robot under user'),
|
||||
pytest.param('buynlarge/orgrepo', 'devtable', 200, id='valid user under org'),
|
||||
pytest.param('buynlarge/orgrepo', 'devtable+dtrobot', 400, id='invalid robot under org'),
|
||||
pytest.param('buynlarge/orgrepo', 'buynlarge+coolrobot', 200, id='valid robot under org'),
|
||||
])
|
||||
def test_robot_permission(repository, username, expected_code, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
conduct_api_call(cl, RepositoryUserPermission, 'PUT',
|
||||
{'repository': repository, 'username': username},
|
||||
body={
|
||||
'role': 'read',
|
||||
},
|
||||
expected_code=expected_code)
|
89
endpoints/api/test/test_repoemail_models_pre_oci.py
Normal file
89
endpoints/api/test/test_repoemail_models_pre_oci.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
import pytest
|
||||
from mock import Mock
|
||||
|
||||
import util
|
||||
from data import model
|
||||
from endpoints.api.repoemail_models_interface import RepositoryAuthorizedEmail
|
||||
from endpoints.api.repoemail_models_pre_oci import pre_oci_model
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_monkeypatch(monkeypatch):
|
||||
return monkeypatch
|
||||
|
||||
|
||||
def return_none(name, repo, email):
|
||||
return None
|
||||
|
||||
|
||||
def get_return_mock(mock):
|
||||
def return_mock(name, repo, email):
|
||||
return mock
|
||||
|
||||
return return_mock
|
||||
|
||||
|
||||
def test_get_email_authorized_for_repo(get_monkeypatch):
|
||||
mock = Mock()
|
||||
|
||||
get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', mock)
|
||||
|
||||
pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name', 'email')
|
||||
|
||||
mock.assert_called_once_with('namespace_name', 'repository_name', 'email')
|
||||
|
||||
|
||||
def test_get_email_authorized_for_repo_return_none(get_monkeypatch):
|
||||
get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', return_none)
|
||||
|
||||
repo = pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name', 'email')
|
||||
|
||||
assert repo is None
|
||||
|
||||
|
||||
def test_get_email_authorized_for_repo_return_repo(get_monkeypatch):
|
||||
mock = Mock(confirmed=True, code='code')
|
||||
get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', get_return_mock(mock))
|
||||
|
||||
actual = pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name',
|
||||
'email')
|
||||
|
||||
assert actual == RepositoryAuthorizedEmail('email', 'repository_name', 'namespace_name', True,
|
||||
'code')
|
||||
|
||||
|
||||
def test_create_email_authorization_for_repo(get_monkeypatch):
|
||||
mock = Mock()
|
||||
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', mock)
|
||||
|
||||
pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name', 'email')
|
||||
|
||||
mock.assert_called_once_with('namespace_name', 'repository_name', 'email')
|
||||
|
||||
|
||||
def test_create_email_authorization_for_repo_return_none(get_monkeypatch):
|
||||
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', return_none)
|
||||
|
||||
assert pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
|
||||
'email') is None
|
||||
|
||||
|
||||
def test_create_email_authorization_for_repo_return_mock(get_monkeypatch):
|
||||
mock = Mock()
|
||||
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo',
|
||||
get_return_mock(mock))
|
||||
|
||||
assert pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
|
||||
'email') is not None
|
||||
|
||||
|
||||
def test_create_email_authorization_for_repo_return_value(get_monkeypatch):
|
||||
mock = Mock(confirmed=False, code='code')
|
||||
|
||||
get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo',
|
||||
get_return_mock(mock))
|
||||
|
||||
actual = pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
|
||||
'email')
|
||||
assert actual == RepositoryAuthorizedEmail('email', 'repository_name', 'namespace_name', False,
|
||||
'code')
|
166
endpoints/api/test/test_repository.py
Normal file
166
endpoints/api/test/test_repository.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
import pytest
|
||||
|
||||
from mock import patch, ANY, MagicMock
|
||||
|
||||
from data import model, database
|
||||
from data.appr_model import release, channel, blob
|
||||
from endpoints.appr.models_cnr import model as appr_model
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.repository import RepositoryTrust, Repository, RepositoryList
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from features import FeatureNameValue
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize('trust_enabled,repo_found,expected_status', [
|
||||
(True, True, 200),
|
||||
(False, True, 200),
|
||||
(False, False, 404),
|
||||
('invalid_req', False, 400),
|
||||
])
|
||||
def test_post_changetrust(trust_enabled, repo_found, expected_status, client):
|
||||
with patch('endpoints.api.repository.tuf_metadata_api') as mock_tuf:
|
||||
with patch(
|
||||
'endpoints.api.repository_models_pre_oci.model.repository.get_repository') as mock_model:
|
||||
mock_model.return_value = MagicMock() if repo_found else None
|
||||
mock_tuf.get_default_tags_with_expiration.return_value = ['tags', 'expiration']
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/repo'}
|
||||
request_body = {'trust_enabled': trust_enabled}
|
||||
conduct_api_call(cl, RepositoryTrust, 'POST', params, request_body, expected_status)
|
||||
|
||||
|
||||
def test_signing_disabled(client):
|
||||
with patch('features.SIGNING', FeatureNameValue('SIGNING', False)):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
response = conduct_api_call(cl, Repository, 'GET', params).json
|
||||
assert not response['trust_enabled']
|
||||
|
||||
|
||||
def test_list_starred_repos(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {
|
||||
'starred': 'true',
|
||||
}
|
||||
|
||||
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
|
||||
repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
|
||||
assert 'devtable/simple' in repos
|
||||
assert 'public/publicrepo' not in repos
|
||||
|
||||
# Add a star on publicrepo.
|
||||
publicrepo = model.repository.get_repository('public', 'publicrepo')
|
||||
model.repository.star_repository(model.user.get_user('devtable'), publicrepo)
|
||||
|
||||
# Ensure publicrepo shows up.
|
||||
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
|
||||
repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
|
||||
assert 'devtable/simple' in repos
|
||||
assert 'public/publicrepo' in repos
|
||||
|
||||
# Make publicrepo private and ensure it disappears.
|
||||
model.repository.set_repository_visibility(publicrepo, 'private')
|
||||
|
||||
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
|
||||
repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
|
||||
assert 'devtable/simple' in repos
|
||||
assert 'public/publicrepo' not in repos
|
||||
|
||||
|
||||
def test_list_repositories_last_modified(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {
|
||||
'namespace': 'devtable',
|
||||
'last_modified': 'true',
|
||||
}
|
||||
|
||||
response = conduct_api_call(cl, RepositoryList, 'GET', params).json
|
||||
|
||||
for repo in response['repositories']:
|
||||
if repo['name'] != 'building':
|
||||
assert repo['last_modified'] is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('repo_name, expected_status', [
|
||||
pytest.param('x' * 255, 201, id='Maximum allowed length'),
|
||||
pytest.param('x' * 256, 400, id='Over allowed length'),
|
||||
pytest.param('a|b', 400, id='Invalid name'),
|
||||
])
|
||||
def test_create_repository(repo_name, expected_status, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
body = {
|
||||
'namespace': 'devtable',
|
||||
'repository': repo_name,
|
||||
'visibility': 'public',
|
||||
'description': 'foo',
|
||||
}
|
||||
|
||||
result = conduct_api_call(client, RepositoryList, 'post', None, body,
|
||||
expected_code=expected_status).json
|
||||
if expected_status == 201:
|
||||
assert result['name'] == repo_name
|
||||
assert model.repository.get_repository('devtable', repo_name).name == repo_name
|
||||
|
||||
|
||||
@pytest.mark.parametrize('has_tag_manifest', [
|
||||
True,
|
||||
False,
|
||||
])
|
||||
def test_get_repo(has_tag_manifest, client, initialized_db):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
if not has_tag_manifest:
|
||||
database.TagManifestLabelMap.delete().execute()
|
||||
database.TagManifestToManifest.delete().execute()
|
||||
database.TagManifestLabel.delete().execute()
|
||||
database.TagManifest.delete().execute()
|
||||
|
||||
params = {'repository': 'devtable/simple'}
|
||||
response = conduct_api_call(cl, Repository, 'GET', params).json
|
||||
assert response['kind'] == 'image'
|
||||
|
||||
|
||||
def test_get_app_repo(client, initialized_db):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
devtable = model.user.get_user('devtable')
|
||||
repo = model.repository.create_repository('devtable', 'someappr', devtable,
|
||||
repo_kind='application')
|
||||
|
||||
models_ref = appr_model.models_ref
|
||||
blob.get_or_create_blob('sha256:somedigest', 0, 'application/vnd.cnr.blob.v0.tar+gzip',
|
||||
['local_us'], models_ref)
|
||||
|
||||
release.create_app_release(repo, 'test',
|
||||
dict(mediaType='application/vnd.cnr.package-manifest.helm.v0.json'),
|
||||
'sha256:somedigest', models_ref, False)
|
||||
|
||||
channel.create_or_update_channel(repo, 'somechannel', 'test', models_ref)
|
||||
|
||||
params = {'repository': 'devtable/someappr'}
|
||||
response = conduct_api_call(cl, Repository, 'GET', params).json
|
||||
assert response['kind'] == 'application'
|
||||
assert response['channels']
|
||||
assert response['releases']
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize('state, can_write', [
|
||||
(database.RepositoryState.NORMAL, True),
|
||||
(database.RepositoryState.READ_ONLY, False),
|
||||
(database.RepositoryState.MIRROR, False),
|
||||
])
|
||||
def test_get_repo_state_can_write(state, can_write, client, initialized_db):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
response = conduct_api_call(cl, Repository, 'GET', params).json
|
||||
assert response['can_write']
|
||||
|
||||
repo = model.repository.get_repository('devtable', 'simple')
|
||||
repo.state = state
|
||||
repo.save()
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/simple'}
|
||||
response = conduct_api_call(cl, Repository, 'GET', params).json
|
||||
assert response['can_write'] == can_write
|
90
endpoints/api/test/test_repositorynotification.py
Normal file
90
endpoints/api/test/test_repositorynotification.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
import pytest
|
||||
|
||||
from mock import Mock, MagicMock
|
||||
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.repositorynotification import RepositoryNotificationList, RepositoryNotification, TestRepositoryNotification
|
||||
from endpoints.test.shared import client_with_identity
|
||||
import endpoints.api.repositorynotification_models_interface as iface
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.fixture()
|
||||
def authd_client(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
yield cl
|
||||
|
||||
def mock_get_notification(uuid):
|
||||
mock_notification = MagicMock(iface.RepositoryNotification)
|
||||
if uuid == 'exists':
|
||||
mock_notification.return_value = iface.RepositoryNotification(
|
||||
'exists',
|
||||
'title',
|
||||
'event_name',
|
||||
'method_name',
|
||||
'config_json',
|
||||
'event_config_json',
|
||||
2,
|
||||
)
|
||||
else:
|
||||
mock_notification.return_value = None
|
||||
return mock_notification
|
||||
|
||||
@pytest.mark.parametrize('namespace,repository,body,expected_code',[
|
||||
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_push',
|
||||
method='webhook', eventConfig={}, title='test'), 201),
|
||||
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_started',
|
||||
method='webhook', eventConfig={}, title='test'), 201),
|
||||
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_success',
|
||||
method='webhook', eventConfig={}, title='test'), 201),
|
||||
('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_failed',
|
||||
method='webhook', eventConfig={}, title='test'), 201)
|
||||
])
|
||||
def test_create_repo_notification(namespace, repository, body, expected_code, authd_client):
|
||||
params = {'repository': namespace + '/' + repository}
|
||||
conduct_api_call(authd_client, RepositoryNotificationList, 'POST', params, body, expected_code=expected_code)
|
||||
|
||||
@pytest.mark.parametrize('namespace,repository,expected_code',[
|
||||
('devtable', 'simple', 200)
|
||||
])
|
||||
def test_list_repo_notifications(namespace, repository, expected_code, authd_client):
|
||||
params = {'repository': namespace + '/' + repository}
|
||||
resp = conduct_api_call(authd_client, RepositoryNotificationList, 'GET', params, expected_code=expected_code).json
|
||||
assert len(resp['notifications']) > 0
|
||||
|
||||
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
|
||||
('devtable', 'simple', 'exists', 200),
|
||||
('devtable', 'simple', 'not found', 404),
|
||||
])
|
||||
def test_get_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
|
||||
monkeypatch.setattr('endpoints.api.repositorynotification.model.get_repo_notification', mock_get_notification(uuid))
|
||||
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
|
||||
conduct_api_call(authd_client, RepositoryNotification, 'GET', params, expected_code=expected_code)
|
||||
|
||||
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
|
||||
('devtable', 'simple', 'exists', 204),
|
||||
('devtable', 'simple', 'not found', 400),
|
||||
])
|
||||
def test_delete_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
|
||||
monkeypatch.setattr('endpoints.api.repositorynotification.model.delete_repo_notification', mock_get_notification(uuid))
|
||||
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
|
||||
conduct_api_call(authd_client, RepositoryNotification, 'DELETE', params, expected_code=expected_code)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
|
||||
('devtable', 'simple', 'exists', 204),
|
||||
('devtable', 'simple', 'not found', 400),
|
||||
])
|
||||
def test_reset_repo_noticiation(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
|
||||
monkeypatch.setattr('endpoints.api.repositorynotification.model.reset_notification_number_of_failures', mock_get_notification(uuid))
|
||||
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
|
||||
conduct_api_call(authd_client, RepositoryNotification, 'POST', params, expected_code=expected_code)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
|
||||
('devtable', 'simple', 'exists', 200),
|
||||
('devtable', 'simple', 'not found', 400),
|
||||
])
|
||||
def test_test_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
|
||||
monkeypatch.setattr('endpoints.api.repositorynotification.model.queue_test_notification', mock_get_notification(uuid))
|
||||
params = {'repository': namespace + '/' + repository, 'uuid': uuid}
|
||||
conduct_api_call(authd_client, TestRepositoryNotification, 'POST', params, expected_code=expected_code)
|
104
endpoints/api/test/test_robot.py
Normal file
104
endpoints/api/test/test_robot.py
Normal file
|
@ -0,0 +1,104 @@
|
|||
import pytest
|
||||
import json
|
||||
|
||||
from data import model
|
||||
from endpoints.api import api
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.robot import UserRobot, OrgRobot, UserRobotList, OrgRobotList
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from util.names import parse_robot_username
|
||||
|
||||
from test.test_ldap import mock_ldap
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('endpoint', [
|
||||
UserRobot,
|
||||
OrgRobot,
|
||||
])
|
||||
@pytest.mark.parametrize('body', [
|
||||
{},
|
||||
{'description': 'this is a description'},
|
||||
{'unstructured_metadata': {'foo': 'bar'}},
|
||||
{'description': 'this is a description', 'unstructured_metadata': {'foo': 'bar'}},
|
||||
])
|
||||
def test_create_robot_with_metadata(endpoint, body, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
# Create the robot with the specified body.
|
||||
conduct_api_call(cl, endpoint, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'somebot'},
|
||||
body, expected_code=201)
|
||||
|
||||
# Ensure the create succeeded.
|
||||
resp = conduct_api_call(cl, endpoint, 'GET', {
|
||||
'orgname': 'buynlarge',
|
||||
'robot_shortname': 'somebot',
|
||||
})
|
||||
|
||||
body = body or {}
|
||||
assert resp.json['description'] == (body.get('description') or '')
|
||||
assert resp.json['unstructured_metadata'] == (body.get('unstructured_metadata') or {})
|
||||
|
||||
|
||||
@pytest.mark.parametrize('endpoint, params', [
|
||||
(UserRobot, {'robot_shortname': 'dtrobot'}),
|
||||
(OrgRobot, {'orgname': 'buynlarge', 'robot_shortname': 'coolrobot'}),
|
||||
])
|
||||
def test_retrieve_robot(endpoint, params, app, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
result = conduct_api_call(cl, endpoint, 'GET', params, None)
|
||||
assert result.json['token'] is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('endpoint, params, bot_endpoint', [
|
||||
(UserRobotList, {}, UserRobot),
|
||||
(OrgRobotList, {'orgname': 'buynlarge'}, OrgRobot),
|
||||
])
|
||||
@pytest.mark.parametrize('include_token', [
|
||||
True,
|
||||
False,
|
||||
])
|
||||
@pytest.mark.parametrize('limit', [
|
||||
None,
|
||||
1,
|
||||
5,
|
||||
])
|
||||
def test_retrieve_robots(endpoint, params, bot_endpoint, include_token, limit, app, client):
|
||||
params['token'] = 'true' if include_token else 'false'
|
||||
|
||||
if limit is not None:
|
||||
params['limit'] = limit
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
result = conduct_api_call(cl, endpoint, 'GET', params, None)
|
||||
|
||||
if limit is not None:
|
||||
assert len(result.json['robots']) <= limit
|
||||
|
||||
for robot in result.json['robots']:
|
||||
assert (robot.get('token') is not None) == include_token
|
||||
if include_token:
|
||||
bot_params = dict(params)
|
||||
bot_params['robot_shortname'] = parse_robot_username(robot['name'])[1]
|
||||
result = conduct_api_call(cl, bot_endpoint, 'GET', bot_params, None)
|
||||
assert robot.get('token') == result.json['token']
|
||||
|
||||
|
||||
@pytest.mark.parametrize('username, is_admin', [
|
||||
('devtable', True),
|
||||
('reader', False),
|
||||
])
|
||||
@pytest.mark.parametrize('with_permissions', [
|
||||
True,
|
||||
False,
|
||||
])
|
||||
def test_retrieve_robots_token_permission(username, is_admin, with_permissions, app, client):
|
||||
with client_with_identity(username, client) as cl:
|
||||
params = {'orgname': 'buynlarge', 'token': 'true'}
|
||||
if with_permissions:
|
||||
params['permissions'] = 'true'
|
||||
|
||||
result = conduct_api_call(cl, OrgRobotList, 'GET', params, None)
|
||||
assert result.json['robots']
|
||||
for robot in result.json['robots']:
|
||||
assert (robot.get('token') is not None) == is_admin
|
||||
assert (robot.get('repositories') is not None) == (is_admin and with_permissions)
|
41
endpoints/api/test/test_search.py
Normal file
41
endpoints/api/test/test_search.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import pytest
|
||||
|
||||
from playhouse.test_utils import assert_query_count
|
||||
|
||||
from data import model, database
|
||||
from endpoints.api.search import ConductRepositorySearch, ConductSearch
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('query', [
|
||||
(''),
|
||||
('simple'),
|
||||
('public'),
|
||||
('repository'),
|
||||
])
|
||||
def test_repository_search(query, client):
|
||||
# Prime the caches.
|
||||
database.Repository.kind.get_id('image')
|
||||
database.Repository.kind.get_name(1)
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'query': query}
|
||||
with assert_query_count(7):
|
||||
result = conduct_api_call(cl, ConductRepositorySearch, 'GET', params, None, 200).json
|
||||
assert result['start_index'] == 0
|
||||
assert result['page'] == 1
|
||||
assert len(result['results'])
|
||||
|
||||
|
||||
@pytest.mark.parametrize('query', [
|
||||
('simple'),
|
||||
('public'),
|
||||
('repository'),
|
||||
])
|
||||
def test_search_query_count(query, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'query': query}
|
||||
with assert_query_count(10):
|
||||
result = conduct_api_call(cl, ConductSearch, 'GET', params, None, 200).json
|
||||
assert len(result['results'])
|
30
endpoints/api/test/test_secscan.py
Normal file
30
endpoints/api/test/test_secscan.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
import base64
|
||||
|
||||
import pytest
|
||||
|
||||
from data.registry_model import registry_model
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('endpoint', [
|
||||
RepositoryImageSecurity,
|
||||
RepositoryManifestSecurity,
|
||||
])
|
||||
def test_get_security_info_with_pull_secret(endpoint, client):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tag = registry_model.get_repo_tag(repository_ref, 'latest', include_legacy_image=True)
|
||||
manifest = registry_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
|
||||
|
||||
params = {
|
||||
'repository': 'devtable/simple',
|
||||
'imageid': tag.legacy_image.docker_image_id,
|
||||
'manifestref': manifest.digest,
|
||||
}
|
||||
|
||||
headers = {
|
||||
'Authorization': 'Basic %s' % base64.b64encode('devtable:password'),
|
||||
}
|
||||
|
||||
conduct_api_call(client, endpoint, 'GET', params, None, headers=headers, expected_code=200)
|
1485
endpoints/api/test/test_security.py
Normal file
1485
endpoints/api/test/test_security.py
Normal file
File diff suppressed because it is too large
Load diff
55
endpoints/api/test/test_signing.py
Normal file
55
endpoints/api/test/test_signing.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
import pytest
|
||||
|
||||
from collections import Counter
|
||||
from mock import patch
|
||||
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.signing import RepositorySignatures
|
||||
from endpoints.test.shared import client_with_identity
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
VALID_TARGETS_MAP = {
|
||||
"targets/ci": {
|
||||
"targets": {
|
||||
"latest": {
|
||||
"hashes": {
|
||||
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
|
||||
},
|
||||
"length": 2111
|
||||
}
|
||||
},
|
||||
"expiration": "2020-05-22T10:26:46.618176424-04:00"
|
||||
},
|
||||
"targets": {
|
||||
"targets": {
|
||||
"latest": {
|
||||
"hashes": {
|
||||
"sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
|
||||
},
|
||||
"length": 2111
|
||||
}
|
||||
},
|
||||
"expiration": "2020-05-22T10:26:01.953414888-04:00"}
|
||||
}
|
||||
|
||||
|
||||
def tags_equal(expected, actual):
|
||||
expected_tags = expected.get('delegations')
|
||||
actual_tags = actual.get('delegations')
|
||||
if expected_tags and actual_tags:
|
||||
return Counter(expected_tags) == Counter(actual_tags)
|
||||
return expected == actual
|
||||
|
||||
@pytest.mark.parametrize('targets_map,expected', [
|
||||
(VALID_TARGETS_MAP, {'delegations': VALID_TARGETS_MAP}),
|
||||
({'bad': 'tags'}, {'delegations': {'bad': 'tags'}}),
|
||||
({}, {'delegations': {}}),
|
||||
(None, {'delegations': None}), # API returns None on exceptions
|
||||
])
|
||||
def test_get_signatures(targets_map, expected, client):
|
||||
with patch('endpoints.api.signing.tuf_metadata_api') as mock_tuf:
|
||||
mock_tuf.get_all_tags_with_expiration.return_value = targets_map
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': 'devtable/trusted'}
|
||||
assert tags_equal(expected, conduct_api_call(cl, RepositorySignatures, 'GET', params, None, 200).json)
|
43
endpoints/api/test/test_subscribe_models_pre_oci.py
Normal file
43
endpoints/api/test/test_subscribe_models_pre_oci.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
import pytest
|
||||
from mock import patch
|
||||
|
||||
from endpoints.api.subscribe_models_pre_oci import data_model
|
||||
|
||||
|
||||
@pytest.mark.parametrize('username,repo_count', [
|
||||
('devtable', 3)
|
||||
])
|
||||
def test_get_private_repo_count(username, repo_count):
|
||||
with patch('endpoints.api.subscribe_models_pre_oci.get_private_repo_count') as mock_get_private_reop_count:
|
||||
mock_get_private_reop_count.return_value = repo_count
|
||||
count = data_model.get_private_repo_count(username)
|
||||
|
||||
mock_get_private_reop_count.assert_called_once_with(username)
|
||||
assert count == repo_count
|
||||
|
||||
|
||||
@pytest.mark.parametrize('kind_name,target_username,metadata', [
|
||||
('over_private_usage', 'devtable', {'namespace': 'devtable'})
|
||||
])
|
||||
def test_create_unique_notification(kind_name, target_username, metadata):
|
||||
with patch('endpoints.api.subscribe_models_pre_oci.get_user_or_org') as mock_get_user_or_org:
|
||||
mock_get_user_or_org.return_value = {'username': target_username}
|
||||
with patch('endpoints.api.subscribe_models_pre_oci.create_unique_notification') as mock_create_unique_notification:
|
||||
data_model.create_unique_notification(kind_name, target_username, metadata)
|
||||
|
||||
mock_get_user_or_org.assert_called_once_with(target_username)
|
||||
mock_create_unique_notification.assert_called_once_with(kind_name, mock_get_user_or_org.return_value, metadata)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('target_username,kind_name', [
|
||||
('devtable', 'over_private_usage')
|
||||
])
|
||||
def test_delete_notifications_by_kind(target_username, kind_name):
|
||||
with patch('endpoints.api.subscribe_models_pre_oci.get_user_or_org') as mock_get_user_or_org:
|
||||
mock_get_user_or_org.return_value = {'username': target_username}
|
||||
with patch('endpoints.api.subscribe_models_pre_oci.delete_notifications_by_kind') as mock_delete_notifications_by_kind:
|
||||
data_model.delete_notifications_by_kind(target_username, kind_name)
|
||||
|
||||
mock_get_user_or_org.assert_called_once_with(target_username)
|
||||
mock_delete_notifications_by_kind.assert_called_once_with(mock_get_user_or_org.return_value, kind_name)
|
||||
|
28
endpoints/api/test/test_superuser.py
Normal file
28
endpoints/api/test/test_superuser.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import pytest
|
||||
|
||||
from endpoints.api.superuser import SuperUserList, SuperUserManagement
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('disabled', [
|
||||
(True),
|
||||
(False),
|
||||
])
|
||||
def test_list_all_users(disabled, client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'disabled': disabled}
|
||||
result = conduct_api_call(cl, SuperUserList, 'GET', params, None, 200).json
|
||||
assert len(result['users'])
|
||||
for user in result['users']:
|
||||
if not disabled:
|
||||
assert user['enabled']
|
||||
|
||||
|
||||
def test_change_install_user(client):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'username': 'randomuser'}
|
||||
body = {'email': 'new_email123@test.com'}
|
||||
result = conduct_api_call(cl, SuperUserManagement, 'PUT', params, body, 200).json
|
||||
|
||||
assert result['email'] == body['email']
|
116
endpoints/api/test/test_tag.py
Normal file
116
endpoints/api/test/test_tag.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
import pytest
|
||||
|
||||
from playhouse.test_utils import assert_query_count
|
||||
|
||||
from data.registry_model import registry_model
|
||||
from data.database import Manifest
|
||||
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags, RepositoryTagImages
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('expiration_time, expected_status', [
|
||||
(None, 201),
|
||||
('aksdjhasd', 400),
|
||||
])
|
||||
def test_change_tag_expiration_default(expiration_time, expected_status, client, app):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {
|
||||
'repository': 'devtable/simple',
|
||||
'tag': 'latest',
|
||||
}
|
||||
|
||||
request_body = {
|
||||
'expiration': expiration_time,
|
||||
}
|
||||
|
||||
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
|
||||
|
||||
|
||||
def test_change_tag_expiration(client, app):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {
|
||||
'repository': 'devtable/simple',
|
||||
'tag': 'latest',
|
||||
}
|
||||
|
||||
tag = model.tag.get_active_tag('devtable', 'simple', 'latest')
|
||||
updated_expiration = tag.lifetime_start_ts + 60*60*24
|
||||
|
||||
request_body = {
|
||||
'expiration': updated_expiration,
|
||||
}
|
||||
|
||||
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, 201)
|
||||
tag = model.tag.get_active_tag('devtable', 'simple', 'latest')
|
||||
assert tag.lifetime_end_ts == updated_expiration
|
||||
|
||||
|
||||
@pytest.mark.parametrize('image_exists,test_tag,expected_status', [
|
||||
(True, '-INVALID-TAG-NAME', 400),
|
||||
(True, '.INVALID-TAG-NAME', 400),
|
||||
(True,
|
||||
'INVALID-TAG_NAME-BECAUSE-THIS-IS-WAY-WAY-TOO-LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG',
|
||||
400),
|
||||
(False, 'newtag', 404),
|
||||
(True, 'generatemanifestfail', None),
|
||||
(True, 'latest', 201),
|
||||
(True, 'newtag', 201),
|
||||
])
|
||||
def test_move_tag(image_exists, test_tag, expected_status, client, app):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
test_image = 'unknown'
|
||||
if image_exists:
|
||||
repo_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tag_ref = registry_model.get_repo_tag(repo_ref, 'latest', include_legacy_image=True)
|
||||
assert tag_ref
|
||||
|
||||
test_image = tag_ref.legacy_image.docker_image_id
|
||||
|
||||
params = {'repository': 'devtable/simple', 'tag': test_tag}
|
||||
request_body = {'image': test_image}
|
||||
if expected_status is None:
|
||||
with pytest.raises(Exception):
|
||||
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
|
||||
else:
|
||||
conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('repo_namespace, repo_name, query_count', [
|
||||
('devtable', 'simple', 5),
|
||||
('devtable', 'history', 5),
|
||||
('devtable', 'complex', 5),
|
||||
('devtable', 'gargantuan', 5),
|
||||
('buynlarge', 'orgrepo', 7), # +2 for permissions checks.
|
||||
('buynlarge', 'anotherorgrepo', 7), # +2 for permissions checks.
|
||||
])
|
||||
def test_list_repo_tags(repo_namespace, repo_name, client, query_count, app):
|
||||
# Pre-cache media type loads to ensure consistent query count.
|
||||
Manifest.media_type.get_name(1)
|
||||
|
||||
params = {'repository': repo_namespace + '/' + repo_name}
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
with assert_query_count(query_count):
|
||||
tags = conduct_api_call(cl, ListRepositoryTags, 'get', params).json['tags']
|
||||
|
||||
repo_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
history, _ = registry_model.list_repository_tag_history(repo_ref)
|
||||
assert len(tags) == len(history)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('repository, tag, owned, expect_images', [
|
||||
('devtable/simple', 'prod', False, True),
|
||||
('devtable/simple', 'prod', True, False),
|
||||
('devtable/simple', 'latest', False, True),
|
||||
('devtable/simple', 'latest', True, False),
|
||||
|
||||
('devtable/complex', 'prod', False, True),
|
||||
('devtable/complex', 'prod', True, True),
|
||||
])
|
||||
def test_list_tag_images(repository, tag, owned, expect_images, client, app):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
params = {'repository': repository, 'tag': tag, 'owned': owned}
|
||||
result = conduct_api_call(cl, RepositoryTagImages, 'get', params, None, 200).json
|
||||
assert bool(result['images']) == expect_images
|
90
endpoints/api/test/test_team.py
Normal file
90
endpoints/api/test/test_team.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
import json
|
||||
|
||||
from mock import patch
|
||||
|
||||
from data import model
|
||||
from endpoints.api import api
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList
|
||||
from endpoints.api.organization import Organization
|
||||
from endpoints.test.shared import client_with_identity
|
||||
|
||||
from test.test_ldap import mock_ldap
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
SYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'synced'}
|
||||
UNSYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'owners'}
|
||||
|
||||
def test_team_syncing(client):
|
||||
with mock_ldap() as ldap:
|
||||
with patch('endpoints.api.team.authentication', ldap):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
config = {
|
||||
'group_dn': 'cn=AwesomeFolk',
|
||||
}
|
||||
|
||||
conduct_api_call(cl, OrganizationTeamSyncing, 'POST', UNSYNCED_TEAM_PARAMS, config)
|
||||
|
||||
# Ensure the team is now synced.
|
||||
sync_info = model.team.get_team_sync_information(UNSYNCED_TEAM_PARAMS['orgname'],
|
||||
UNSYNCED_TEAM_PARAMS['teamname'])
|
||||
assert sync_info is not None
|
||||
assert json.loads(sync_info.config) == config
|
||||
|
||||
# Remove the syncing.
|
||||
conduct_api_call(cl, OrganizationTeamSyncing, 'DELETE', UNSYNCED_TEAM_PARAMS, None)
|
||||
|
||||
# Ensure the team is no longer synced.
|
||||
sync_info = model.team.get_team_sync_information(UNSYNCED_TEAM_PARAMS['orgname'],
|
||||
UNSYNCED_TEAM_PARAMS['teamname'])
|
||||
assert sync_info is None
|
||||
|
||||
|
||||
def test_team_member_sync_info(client):
|
||||
with mock_ldap() as ldap:
|
||||
with patch('endpoints.api.team.authentication', ldap):
|
||||
# Check for an unsynced team, with superuser.
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
resp = conduct_api_call(cl, TeamMemberList, 'GET', UNSYNCED_TEAM_PARAMS)
|
||||
assert 'can_sync' in resp.json
|
||||
assert resp.json['can_sync']['service'] == 'ldap'
|
||||
|
||||
assert 'synced' not in resp.json
|
||||
|
||||
# Check for an unsynced team, with non-superuser.
|
||||
with client_with_identity('randomuser', client) as cl:
|
||||
resp = conduct_api_call(cl, TeamMemberList, 'GET', UNSYNCED_TEAM_PARAMS)
|
||||
assert 'can_sync' not in resp.json
|
||||
assert 'synced' not in resp.json
|
||||
|
||||
# Check for a synced team, with superuser.
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS)
|
||||
assert 'can_sync' in resp.json
|
||||
assert resp.json['can_sync']['service'] == 'ldap'
|
||||
|
||||
assert 'synced' in resp.json
|
||||
assert 'last_updated' in resp.json['synced']
|
||||
assert 'group_dn' in resp.json['synced']['config']
|
||||
|
||||
# Check for a synced team, with non-superuser.
|
||||
with client_with_identity('randomuser', client) as cl:
|
||||
resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS)
|
||||
assert 'can_sync' not in resp.json
|
||||
|
||||
assert 'synced' in resp.json
|
||||
assert 'last_updated' not in resp.json['synced']
|
||||
assert 'config' not in resp.json['synced']
|
||||
|
||||
|
||||
def test_organization_teams_sync_bool(client):
|
||||
with mock_ldap() as ldap:
|
||||
with patch('endpoints.api.organization.authentication', ldap):
|
||||
# Ensure synced teams are marked as such in the organization teams list.
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
resp = conduct_api_call(cl, Organization, 'GET', {'orgname': 'sellnsmall'})
|
||||
|
||||
assert not resp.json['teams']['owners']['is_synced']
|
||||
|
||||
assert resp.json['teams']['synced']['is_synced']
|
55
endpoints/api/test/test_trigger.py
Normal file
55
endpoints/api/test/test_trigger.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
import pytest
|
||||
import json
|
||||
|
||||
from data import model
|
||||
from endpoints.api.trigger_analyzer import is_parent
|
||||
from endpoints.api.trigger import BuildTrigger
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize('context,dockerfile_path,expected', [
|
||||
("/", "/a/b", True),
|
||||
("/a", "/a/b", True),
|
||||
("/a/b", "/a/b", False),
|
||||
("/a//", "/a/b", True),
|
||||
("/a", "/a//b/c", True),
|
||||
("/a//", "a/b", True),
|
||||
("/a/b", "a/bc/d", False),
|
||||
("/d", "/a/b", False),
|
||||
("/a/b", "/a/b.c", False),
|
||||
("/a/b", "/a/b/b.c", True),
|
||||
("", "/a/b.c", False),
|
||||
("/a/b", "", False),
|
||||
("", "", False),
|
||||
])
|
||||
def test_super_user_build_endpoints(context, dockerfile_path, expected):
|
||||
assert is_parent(context, dockerfile_path) == expected
|
||||
|
||||
|
||||
def test_enabled_disabled_trigger(app, client):
|
||||
trigger = model.build.list_build_triggers('devtable', 'building')[0]
|
||||
trigger.config = json.dumps({'hook_id': 'someid'})
|
||||
trigger.save()
|
||||
|
||||
params = {
|
||||
'repository': 'devtable/building',
|
||||
'trigger_uuid': trigger.uuid,
|
||||
}
|
||||
|
||||
body = {
|
||||
'enabled': False,
|
||||
}
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
result = conduct_api_call(cl, BuildTrigger, 'PUT', params, body, 200).json
|
||||
assert not result['enabled']
|
||||
|
||||
body = {
|
||||
'enabled': True,
|
||||
}
|
||||
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
result = conduct_api_call(cl, BuildTrigger, 'PUT', params, body, 200).json
|
||||
assert result['enabled']
|
152
endpoints/api/test/test_trigger_analyzer.py
Normal file
152
endpoints/api/test/test_trigger_analyzer.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
import pytest
|
||||
from mock import Mock
|
||||
|
||||
from auth import permissions
|
||||
from data import model
|
||||
from endpoints.api.trigger_analyzer import TriggerAnalyzer
|
||||
from util import dockerfileparse
|
||||
|
||||
BAD_PATH = "\"server_hostname/\" is not a valid Quay repository path"
|
||||
|
||||
EMPTY_CONF = {}
|
||||
|
||||
GOOD_CONF = {'context': '/', 'dockerfile_path': '/file'}
|
||||
|
||||
BAD_CONF = {'context': 'context', 'dockerfile_path': 'dockerfile_path'}
|
||||
|
||||
ONE_ROBOT = {'can_read': False, 'is_robot': True, 'kind': 'user', 'name': 'name'}
|
||||
|
||||
DOCKERFILE_NOT_CHILD = 'Dockerfile, context, is not a child of the context, dockerfile_path.'
|
||||
|
||||
THE_DOCKERFILE_SPECIFIED = 'Could not parse the Dockerfile specified'
|
||||
|
||||
DOCKERFILE_PATH_NOT_FOUND = 'Specified Dockerfile path for the trigger was not found on the main branch. This trigger may fail.'
|
||||
|
||||
NO_FROM_LINE = 'No FROM line found in the Dockerfile'
|
||||
|
||||
REPO_NOT_FOUND = 'Repository "server_hostname/path/file" referenced by the Dockerfile was not found'
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def get_monkeypatch(monkeypatch):
|
||||
return monkeypatch
|
||||
|
||||
|
||||
def patch_permissions(monkeypatch, can_read=False):
|
||||
def can_read_fn(base_namespace, base_repository):
|
||||
return can_read
|
||||
|
||||
monkeypatch.setattr(permissions, 'ReadRepositoryPermission', can_read_fn)
|
||||
|
||||
|
||||
def patch_list_namespace_robots(monkeypatch):
|
||||
my_mock = Mock()
|
||||
my_mock.configure_mock(**{'username': 'name'})
|
||||
return_value = [my_mock]
|
||||
|
||||
def return_list_mocks(namesapce):
|
||||
return return_value
|
||||
|
||||
monkeypatch.setattr(model.user, 'list_namespace_robots', return_list_mocks)
|
||||
return return_value
|
||||
|
||||
|
||||
def patch_get_all_repo_users_transitive(monkeypatch):
|
||||
my_mock = Mock()
|
||||
my_mock.configure_mock(**{'username': 'name'})
|
||||
return_value = [my_mock]
|
||||
|
||||
def return_get_mocks(namesapce, image_repostiory):
|
||||
return return_value
|
||||
|
||||
monkeypatch.setattr(model.user, 'get_all_repo_users_transitive', return_get_mocks)
|
||||
return return_value
|
||||
|
||||
|
||||
def patch_parse_dockerfile(monkeypatch, get_base_image):
|
||||
if get_base_image is not None:
|
||||
def return_return_value(content):
|
||||
parse_mock = Mock()
|
||||
parse_mock.configure_mock(**{'get_base_image': get_base_image})
|
||||
return parse_mock
|
||||
|
||||
monkeypatch.setattr(dockerfileparse, "parse_dockerfile", return_return_value)
|
||||
else:
|
||||
def return_return_value(content):
|
||||
return get_base_image
|
||||
|
||||
monkeypatch.setattr(dockerfileparse, "parse_dockerfile", return_return_value)
|
||||
|
||||
|
||||
def patch_model_repository_get_repository(monkeypatch, get_repository):
|
||||
if get_repository is not None:
|
||||
|
||||
def mock_get_repository(base_namespace, base_repository):
|
||||
vis_mock = Mock()
|
||||
vis_mock.name = get_repository
|
||||
get_repo_mock = Mock(visibility=vis_mock)
|
||||
|
||||
|
||||
return get_repo_mock
|
||||
|
||||
else:
|
||||
def mock_get_repository(base_namespace, base_repository):
|
||||
return None
|
||||
|
||||
monkeypatch.setattr(model.repository, "get_repository", mock_get_repository)
|
||||
|
||||
|
||||
def return_none():
|
||||
return None
|
||||
|
||||
|
||||
def return_content():
|
||||
return Mock()
|
||||
|
||||
|
||||
def return_server_hostname():
|
||||
return "server_hostname/"
|
||||
|
||||
|
||||
def return_non_server_hostname():
|
||||
return "slime"
|
||||
|
||||
|
||||
def return_path():
|
||||
return "server_hostname/path/file"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'handler_fn, config_dict, admin_org_permission, status, message, get_base_image, robots, server_hostname, get_repository, can_read, namespace, name', [
|
||||
(return_none, EMPTY_CONF, False, "warning", DOCKERFILE_PATH_NOT_FOUND, None, [], None, None, False, "namespace", None),
|
||||
(return_none, EMPTY_CONF, True, "warning", DOCKERFILE_PATH_NOT_FOUND, None, [ONE_ROBOT], None, None, False, "namespace", None),
|
||||
(return_content, BAD_CONF, False, "error", THE_DOCKERFILE_SPECIFIED, None, [], None, None, False, "namespace", None),
|
||||
(return_none, EMPTY_CONF, False, "warning", DOCKERFILE_PATH_NOT_FOUND, return_none, [], None, None, False, "namespace", None),
|
||||
(return_none, EMPTY_CONF, True, "warning", DOCKERFILE_PATH_NOT_FOUND, return_none, [ONE_ROBOT], None, None, False, "namespace", None),
|
||||
(return_content, BAD_CONF, False, "error", DOCKERFILE_NOT_CHILD, return_none, [], None, None, False, "namespace", None),
|
||||
(return_content, GOOD_CONF, False, "warning", NO_FROM_LINE, return_none, [], None, None, False, "namespace", None),
|
||||
(return_content, GOOD_CONF, False, "publicbase", None, return_non_server_hostname, [], "server_hostname", None, False, "namespace", None),
|
||||
(return_content, GOOD_CONF, False, "warning", BAD_PATH, return_server_hostname, [], "server_hostname", None, False, "namespace", None),
|
||||
(return_content, GOOD_CONF, False, "error", REPO_NOT_FOUND, return_path, [], "server_hostname", None, False, "namespace", None),
|
||||
(return_content, GOOD_CONF, False, "error", REPO_NOT_FOUND, return_path, [], "server_hostname", "nonpublic", False, "namespace", None),
|
||||
(return_content, GOOD_CONF, False, "requiresrobot", None, return_path, [], "server_hostname", "nonpublic", True, "path", "file"),
|
||||
(return_content, GOOD_CONF, False, "publicbase", None, return_path, [], "server_hostname", "public", True, "path", "file"),
|
||||
|
||||
])
|
||||
def test_trigger_analyzer(handler_fn, config_dict, admin_org_permission, status, message, get_base_image, robots,
|
||||
server_hostname, get_repository, can_read, namespace, name,
|
||||
get_monkeypatch):
|
||||
patch_list_namespace_robots(get_monkeypatch)
|
||||
patch_get_all_repo_users_transitive(get_monkeypatch)
|
||||
patch_parse_dockerfile(get_monkeypatch, get_base_image)
|
||||
patch_model_repository_get_repository(get_monkeypatch, get_repository)
|
||||
patch_permissions(get_monkeypatch, can_read)
|
||||
handler_mock = Mock()
|
||||
handler_mock.configure_mock(**{'load_dockerfile_contents': handler_fn})
|
||||
trigger_analyzer = TriggerAnalyzer(handler_mock, 'namespace', server_hostname, config_dict, admin_org_permission)
|
||||
assert trigger_analyzer.analyze_trigger() == {'namespace': namespace,
|
||||
'name': name,
|
||||
'robots': robots,
|
||||
'status': status,
|
||||
'message': message,
|
||||
'is_admin': admin_org_permission}
|
42
endpoints/api/test/test_user.py
Normal file
42
endpoints/api/test/test_user.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
import pytest
|
||||
|
||||
from mock import patch
|
||||
|
||||
from endpoints.api.test.shared import conduct_api_call
|
||||
from endpoints.api.user import User
|
||||
from endpoints.test.shared import client_with_identity
|
||||
from features import FeatureNameValue
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
def test_user_metadata_update(client):
|
||||
with patch('features.USER_METADATA', FeatureNameValue('USER_METADATA', True)):
|
||||
with client_with_identity('devtable', client) as cl:
|
||||
metadata = {
|
||||
'given_name': 'Quay',
|
||||
'family_name': 'User',
|
||||
'location': 'NYC',
|
||||
'company': 'Red Hat',
|
||||
}
|
||||
|
||||
# Update all user metadata fields.
|
||||
conduct_api_call(cl, User, 'PUT', None, body=metadata)
|
||||
|
||||
# Test that they were successfully updated.
|
||||
user = conduct_api_call(cl, User, 'GET', None).json
|
||||
for field in metadata:
|
||||
assert user.get(field) == metadata.get(field)
|
||||
|
||||
# Now nullify one of the fields, and remove another.
|
||||
metadata['company'] = None
|
||||
location = metadata.pop('location')
|
||||
|
||||
conduct_api_call(cl, User, 'PUT', None, body=metadata)
|
||||
|
||||
user = conduct_api_call(cl, User, 'GET', None).json
|
||||
for field in metadata:
|
||||
assert user.get(field) == metadata.get(field)
|
||||
|
||||
# The location field should be unchanged.
|
||||
assert user.get('location') == location
|
539
endpoints/api/trigger.py
Normal file
539
endpoints/api/trigger.py
Normal file
|
@ -0,0 +1,539 @@
|
|||
""" Create, list and manage build triggers. """
|
||||
|
||||
import logging
|
||||
from urlparse import urlunparse
|
||||
|
||||
from flask import request, url_for
|
||||
|
||||
from active_migration import ActiveDataMigration, ERTMigrationFlags
|
||||
from app import app
|
||||
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
|
||||
AdministerRepositoryPermission)
|
||||
from buildtrigger.basehandler import BuildTriggerHandler
|
||||
from buildtrigger.triggerutil import TriggerException, EmptyRepositoryException
|
||||
from data import model
|
||||
from data.fields import DecryptedValue
|
||||
from data.model.build import update_build_trigger
|
||||
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
|
||||
log_action, request_error, query_param, parse_args, internal_only,
|
||||
validate_json_request, api, path_param, abort,
|
||||
disallow_for_app_repositories, disallow_for_non_normal_repositories)
|
||||
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
|
||||
from endpoints.api.trigger_analyzer import TriggerAnalyzer
|
||||
from endpoints.building import (start_build, MaximumBuildsQueuedException,
|
||||
BuildTriggerDisabledException)
|
||||
from endpoints.exception import NotFound, Unauthorized, InvalidRequest
|
||||
from util.names import parse_robot_username
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _prepare_webhook_url(scheme, username, password, hostname, path):
|
||||
auth_hostname = '%s:%s@%s' % (username, password, hostname)
|
||||
return urlunparse((scheme, auth_hostname, path, '', '', ''))
|
||||
|
||||
|
||||
def get_trigger(trigger_uuid):
|
||||
try:
|
||||
trigger = model.build.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
return trigger
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
class BuildTriggerList(RepositoryParamResource):
|
||||
""" Resource for listing repository build triggers. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@nickname('listBuildTriggers')
|
||||
def get(self, namespace_name, repo_name):
|
||||
""" List the triggers for the specified repository. """
|
||||
triggers = model.build.list_build_triggers(namespace_name, repo_name)
|
||||
return {
|
||||
'triggers': [trigger_view(trigger, can_admin=True) for trigger in triggers]
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
class BuildTrigger(RepositoryParamResource):
|
||||
""" Resource for managing specific build triggers. """
|
||||
schemas = {
|
||||
'UpdateTrigger': {
|
||||
'type': 'object',
|
||||
'description': 'Options for updating a build trigger',
|
||||
'required': [
|
||||
'enabled',
|
||||
],
|
||||
'properties': {
|
||||
'enabled': {
|
||||
'type': 'boolean',
|
||||
'description': 'Whether the build trigger is enabled',
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@nickname('getBuildTrigger')
|
||||
def get(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" Get information for the specified build trigger. """
|
||||
return trigger_view(get_trigger(trigger_uuid), can_admin=True)
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('updateBuildTrigger')
|
||||
@validate_json_request('UpdateTrigger')
|
||||
def put(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" Updates the specified build trigger. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
if not handler.is_active():
|
||||
raise InvalidRequest('Cannot update an unactivated trigger')
|
||||
|
||||
enable = request.get_json()['enabled']
|
||||
model.build.toggle_build_trigger(trigger, enable)
|
||||
log_action('toggle_repo_trigger', namespace_name,
|
||||
{'repo': repo_name, 'trigger_id': trigger_uuid,
|
||||
'service': trigger.service.name, 'enabled': enable},
|
||||
repo=model.repository.get_repository(namespace_name, repo_name))
|
||||
|
||||
return trigger_view(trigger)
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('deleteBuildTrigger')
|
||||
def delete(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" Delete the specified build trigger. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
if handler.is_active():
|
||||
try:
|
||||
handler.deactivate()
|
||||
except TriggerException as ex:
|
||||
# We are just going to eat this error
|
||||
logger.warning('Trigger deactivation problem: %s', ex)
|
||||
|
||||
log_action('delete_repo_trigger', namespace_name,
|
||||
{'repo': repo_name, 'trigger_id': trigger_uuid,
|
||||
'service': trigger.service.name},
|
||||
repo=model.repository.get_repository(namespace_name, repo_name))
|
||||
|
||||
trigger.delete_instance(recursive=True)
|
||||
|
||||
if trigger.write_token is not None:
|
||||
trigger.write_token.delete_instance()
|
||||
|
||||
return 'No Content', 204
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/subdir')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
@internal_only
|
||||
class BuildTriggerSubdirs(RepositoryParamResource):
|
||||
""" Custom verb for fetching the subdirs which are buildable for a trigger. """
|
||||
schemas = {
|
||||
'BuildTriggerSubdirRequest': {
|
||||
'type': 'object',
|
||||
'description': 'Arbitrary json.',
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('listBuildTriggerSubdirs')
|
||||
@validate_json_request('BuildTriggerSubdirRequest')
|
||||
def post(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" List the subdirectories available for the specified build trigger and source. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
user_permission = UserAdminPermission(trigger.connected_user.username)
|
||||
if user_permission.can():
|
||||
new_config_dict = request.get_json()
|
||||
handler = BuildTriggerHandler.get_handler(trigger, new_config_dict)
|
||||
|
||||
try:
|
||||
subdirs = handler.list_build_subdirs()
|
||||
context_map = {}
|
||||
for file in subdirs:
|
||||
context_map = handler.get_parent_directory_mappings(file, context_map)
|
||||
|
||||
return {
|
||||
'dockerfile_paths': ['/' + subdir for subdir in subdirs],
|
||||
'contextMap': context_map,
|
||||
'status': 'success',
|
||||
}
|
||||
except EmptyRepositoryException as exc:
|
||||
return {
|
||||
'status': 'success',
|
||||
'contextMap': {},
|
||||
'dockerfile_paths': [],
|
||||
}
|
||||
except TriggerException as exc:
|
||||
return {
|
||||
'status': 'error',
|
||||
'message': exc.message,
|
||||
}
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/activate')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
class BuildTriggerActivate(RepositoryParamResource):
|
||||
""" Custom verb for activating a build trigger once all required information has been collected.
|
||||
"""
|
||||
schemas = {
|
||||
'BuildTriggerActivateRequest': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'config'
|
||||
],
|
||||
'properties': {
|
||||
'config': {
|
||||
'type': 'object',
|
||||
'description': 'Arbitrary json.',
|
||||
},
|
||||
'pull_robot': {
|
||||
'type': 'string',
|
||||
'description': 'The name of the robot that will be used to pull images.'
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('activateBuildTrigger')
|
||||
@validate_json_request('BuildTriggerActivateRequest')
|
||||
def post(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" Activate the specified build trigger. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
if handler.is_active():
|
||||
raise InvalidRequest('Trigger config is not sufficient for activation.')
|
||||
|
||||
user_permission = UserAdminPermission(trigger.connected_user.username)
|
||||
if user_permission.can():
|
||||
# Update the pull robot (if any).
|
||||
pull_robot_name = request.get_json().get('pull_robot', None)
|
||||
if pull_robot_name:
|
||||
try:
|
||||
pull_robot = model.user.lookup_robot(pull_robot_name)
|
||||
except model.InvalidRobotException:
|
||||
raise NotFound()
|
||||
|
||||
# Make sure the user has administer permissions for the robot's namespace.
|
||||
(robot_namespace, _) = parse_robot_username(pull_robot_name)
|
||||
if not AdministerOrganizationPermission(robot_namespace).can():
|
||||
raise Unauthorized()
|
||||
|
||||
# Make sure the namespace matches that of the trigger.
|
||||
if robot_namespace != namespace_name:
|
||||
raise Unauthorized()
|
||||
|
||||
# Set the pull robot.
|
||||
trigger.pull_robot = pull_robot
|
||||
|
||||
# Update the config.
|
||||
new_config_dict = request.get_json()['config']
|
||||
|
||||
write_token_name = 'Build Trigger: %s' % trigger.service.name
|
||||
write_token = model.token.create_delegate_token(namespace_name, repo_name, write_token_name,
|
||||
'write')
|
||||
|
||||
try:
|
||||
path = url_for('webhooks.build_trigger_webhook', trigger_uuid=trigger.uuid)
|
||||
authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'],
|
||||
'$token', write_token.get_code(),
|
||||
app.config['SERVER_HOSTNAME'], path)
|
||||
|
||||
handler = BuildTriggerHandler.get_handler(trigger, new_config_dict)
|
||||
final_config, private_config = handler.activate(authed_url)
|
||||
|
||||
if 'private_key' in private_config:
|
||||
trigger.secure_private_key = DecryptedValue(private_config['private_key'])
|
||||
|
||||
# TODO(remove-unenc): Remove legacy field.
|
||||
if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS):
|
||||
trigger.private_key = private_config['private_key']
|
||||
|
||||
except TriggerException as exc:
|
||||
write_token.delete_instance()
|
||||
raise request_error(message=exc.message)
|
||||
|
||||
# Save the updated config.
|
||||
update_build_trigger(trigger, final_config, write_token=write_token)
|
||||
|
||||
# Log the trigger setup.
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
log_action('setup_repo_trigger', namespace_name,
|
||||
{'repo': repo_name, 'namespace': namespace_name,
|
||||
'trigger_id': trigger.uuid, 'service': trigger.service.name,
|
||||
'pull_robot': trigger.pull_robot.username if trigger.pull_robot else None,
|
||||
'config': final_config},
|
||||
repo=repo)
|
||||
|
||||
return trigger_view(trigger, can_admin=True)
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/analyze')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
@internal_only
|
||||
class BuildTriggerAnalyze(RepositoryParamResource):
|
||||
""" Custom verb for analyzing the config for a build trigger and suggesting various changes
|
||||
(such as a robot account to use for pulling)
|
||||
"""
|
||||
schemas = {
|
||||
'BuildTriggerAnalyzeRequest': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'config'
|
||||
],
|
||||
'properties': {
|
||||
'config': {
|
||||
'type': 'object',
|
||||
'description': 'Arbitrary json.',
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('analyzeBuildTrigger')
|
||||
@validate_json_request('BuildTriggerAnalyzeRequest')
|
||||
def post(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" Analyze the specified build trigger configuration. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
if trigger.repository.namespace_user.username != namespace_name:
|
||||
raise NotFound()
|
||||
|
||||
if trigger.repository.name != repo_name:
|
||||
raise NotFound()
|
||||
|
||||
new_config_dict = request.get_json()['config']
|
||||
handler = BuildTriggerHandler.get_handler(trigger, new_config_dict)
|
||||
server_hostname = app.config['SERVER_HOSTNAME']
|
||||
try:
|
||||
trigger_analyzer = TriggerAnalyzer(handler,
|
||||
namespace_name,
|
||||
server_hostname,
|
||||
new_config_dict,
|
||||
AdministerOrganizationPermission(namespace_name).can())
|
||||
return trigger_analyzer.analyze_trigger()
|
||||
except TriggerException as rre:
|
||||
return {
|
||||
'status': 'error',
|
||||
'message': 'Could not analyze the repository: %s' % rre.message,
|
||||
}
|
||||
except NotImplementedError:
|
||||
return {
|
||||
'status': 'notimplemented',
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/start')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
class ActivateBuildTrigger(RepositoryParamResource):
|
||||
""" Custom verb to manually activate a build trigger. """
|
||||
schemas = {
|
||||
'RunParameters': {
|
||||
'type': 'object',
|
||||
'description': 'Optional run parameters for activating the build trigger',
|
||||
'properties': {
|
||||
'branch_name': {
|
||||
'type': 'string',
|
||||
'description': '(SCM only) If specified, the name of the branch to build.'
|
||||
},
|
||||
'commit_sha': {
|
||||
'type': 'string',
|
||||
'description': '(Custom Only) If specified, the ref/SHA1 used to checkout a git repository.'
|
||||
},
|
||||
'refs': {
|
||||
'type': ['object', 'null'],
|
||||
'description': '(SCM Only) If specified, the ref to build.'
|
||||
}
|
||||
},
|
||||
'additionalProperties': False
|
||||
}
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('manuallyStartBuildTrigger')
|
||||
@validate_json_request('RunParameters')
|
||||
def post(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" Manually start a build from the specified trigger. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
if not trigger.enabled:
|
||||
raise InvalidRequest('Trigger is not enabled.')
|
||||
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
if not handler.is_active():
|
||||
raise InvalidRequest('Trigger is not active.')
|
||||
|
||||
try:
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
pull_robot_name = model.build.get_pull_robot_name(trigger)
|
||||
|
||||
run_parameters = request.get_json()
|
||||
prepared = handler.manual_start(run_parameters=run_parameters)
|
||||
build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name)
|
||||
except TriggerException as tse:
|
||||
raise InvalidRequest(tse.message)
|
||||
except MaximumBuildsQueuedException:
|
||||
abort(429, message='Maximum queued build rate exceeded.')
|
||||
except BuildTriggerDisabledException:
|
||||
abort(400, message='Build trigger is disabled')
|
||||
|
||||
resp = build_status_view(build_request)
|
||||
repo_string = '%s/%s' % (namespace_name, repo_name)
|
||||
headers = {
|
||||
'Location': api.url_for(RepositoryBuildStatus, repository=repo_string,
|
||||
build_uuid=build_request.uuid),
|
||||
}
|
||||
return resp, 201, headers
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/builds')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
class TriggerBuildList(RepositoryParamResource):
|
||||
""" Resource to represent builds that were activated from the specified trigger. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@parse_args()
|
||||
@query_param('limit', 'The maximum number of builds to return', type=int, default=5)
|
||||
@nickname('listTriggerRecentBuilds')
|
||||
def get(self, namespace_name, repo_name, trigger_uuid, parsed_args):
|
||||
""" List the builds started by the specified trigger. """
|
||||
limit = parsed_args['limit']
|
||||
builds = model.build.list_trigger_builds(namespace_name, repo_name, trigger_uuid, limit)
|
||||
return {
|
||||
'builds': [build_status_view(bld) for bld in builds]
|
||||
}
|
||||
|
||||
|
||||
FIELD_VALUE_LIMIT = 30
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/fields/<field_name>')
|
||||
@internal_only
|
||||
class BuildTriggerFieldValues(RepositoryParamResource):
|
||||
""" Custom verb to fetch a values list for a particular field name. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('listTriggerFieldValues')
|
||||
def post(self, namespace_name, repo_name, trigger_uuid, field_name):
|
||||
""" List the field values for a custom run field. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
config = request.get_json() or None
|
||||
if AdministerRepositoryPermission(namespace_name, repo_name).can():
|
||||
handler = BuildTriggerHandler.get_handler(trigger, config)
|
||||
values = handler.list_field_values(field_name, limit=FIELD_VALUE_LIMIT)
|
||||
|
||||
if values is None:
|
||||
raise NotFound()
|
||||
|
||||
return {
|
||||
'values': values
|
||||
}
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/sources')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
@internal_only
|
||||
class BuildTriggerSources(RepositoryParamResource):
|
||||
""" Custom verb to fetch the list of build sources for the trigger config. """
|
||||
schemas = {
|
||||
'BuildTriggerSourcesRequest': {
|
||||
'type': 'object',
|
||||
'description': 'Specifies the namespace under which to fetch sources',
|
||||
'properties': {
|
||||
'namespace': {
|
||||
'type': 'string',
|
||||
'description': 'The namespace for which to fetch sources'
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@disallow_for_non_normal_repositories
|
||||
@nickname('listTriggerBuildSources')
|
||||
@validate_json_request('BuildTriggerSourcesRequest')
|
||||
def post(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" List the build sources for the trigger configuration thus far. """
|
||||
namespace = request.get_json()['namespace']
|
||||
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
user_permission = UserAdminPermission(trigger.connected_user.username)
|
||||
if user_permission.can():
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
|
||||
try:
|
||||
return {
|
||||
'sources': handler.list_build_sources_for_namespace(namespace)
|
||||
}
|
||||
except TriggerException as rre:
|
||||
raise InvalidRequest(rre.message)
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@resource('/v1/repository/<apirepopath:repository>/trigger/<trigger_uuid>/namespaces')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('trigger_uuid', 'The UUID of the build trigger')
|
||||
@internal_only
|
||||
class BuildTriggerSourceNamespaces(RepositoryParamResource):
|
||||
""" Custom verb to fetch the list of namespaces (orgs, projects, etc) for the trigger config. """
|
||||
|
||||
@require_repo_admin
|
||||
@disallow_for_app_repositories
|
||||
@nickname('listTriggerBuildSourceNamespaces')
|
||||
def get(self, namespace_name, repo_name, trigger_uuid):
|
||||
""" List the build sources for the trigger configuration thus far. """
|
||||
trigger = get_trigger(trigger_uuid)
|
||||
|
||||
user_permission = UserAdminPermission(trigger.connected_user.username)
|
||||
if user_permission.can():
|
||||
handler = BuildTriggerHandler.get_handler(trigger)
|
||||
|
||||
try:
|
||||
return {
|
||||
'namespaces': handler.list_build_source_namespaces()
|
||||
}
|
||||
except TriggerException as rre:
|
||||
raise InvalidRequest(rre.message)
|
||||
else:
|
||||
raise Unauthorized()
|
||||
|
122
endpoints/api/trigger_analyzer.py
Normal file
122
endpoints/api/trigger_analyzer.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
from os import path
|
||||
|
||||
from auth import permissions
|
||||
from data import model
|
||||
from util import dockerfileparse
|
||||
|
||||
|
||||
def is_parent(context, dockerfile_path):
|
||||
""" This checks whether the context is a parent of the dockerfile_path"""
|
||||
if context == "" or dockerfile_path == "":
|
||||
return False
|
||||
|
||||
normalized_context = path.normpath(context)
|
||||
if normalized_context[len(normalized_context) - 1] != path.sep:
|
||||
normalized_context += path.sep
|
||||
|
||||
if normalized_context[0] != path.sep:
|
||||
normalized_context = path.sep + normalized_context
|
||||
|
||||
normalized_subdir = path.normpath(path.dirname(dockerfile_path))
|
||||
if normalized_subdir[0] != path.sep:
|
||||
normalized_subdir = path.sep + normalized_subdir
|
||||
|
||||
if normalized_subdir[len(normalized_subdir) - 1] != path.sep:
|
||||
normalized_subdir += path.sep
|
||||
|
||||
return normalized_subdir.startswith(normalized_context)
|
||||
|
||||
|
||||
class TriggerAnalyzer:
|
||||
""" This analyzes triggers and returns the appropriate trigger and robot view to the frontend. """
|
||||
|
||||
def __init__(self, handler, namespace_name, server_hostname, new_config_dict, admin_org_permission):
|
||||
self.handler = handler
|
||||
self.namespace_name = namespace_name
|
||||
self.server_hostname = server_hostname
|
||||
self.new_config_dict = new_config_dict
|
||||
self.admin_org_permission = admin_org_permission
|
||||
|
||||
def analyze_trigger(self):
|
||||
# Load the contents of the Dockerfile.
|
||||
contents = self.handler.load_dockerfile_contents()
|
||||
if not contents:
|
||||
return self.analyze_view(self.namespace_name, None, 'warning',
|
||||
message='Specified Dockerfile path for the trigger was not found on the main ' +
|
||||
'branch. This trigger may fail.')
|
||||
|
||||
# Parse the contents of the Dockerfile.
|
||||
parsed = dockerfileparse.parse_dockerfile(contents)
|
||||
if not parsed:
|
||||
return self.analyze_view(self.namespace_name, None, 'error', message='Could not parse the Dockerfile specified')
|
||||
|
||||
# Check whether the dockerfile_path is correct
|
||||
if self.new_config_dict.get('context') and not is_parent(self.new_config_dict.get('context'),
|
||||
self.new_config_dict.get('dockerfile_path')):
|
||||
return self.analyze_view(self.namespace_name, None, 'error',
|
||||
message='Dockerfile, %s, is not a child of the context, %s.' %
|
||||
(self.new_config_dict.get('context'),
|
||||
self.new_config_dict.get('dockerfile_path')))
|
||||
|
||||
# Determine the base image (i.e. the FROM) for the Dockerfile.
|
||||
base_image = parsed.get_base_image()
|
||||
if not base_image:
|
||||
return self.analyze_view(self.namespace_name, None, 'warning', message='No FROM line found in the Dockerfile')
|
||||
|
||||
# Check to see if the base image lives in Quay.
|
||||
quay_registry_prefix = '%s/' % self.server_hostname
|
||||
if not base_image.startswith(quay_registry_prefix):
|
||||
return self.analyze_view(self.namespace_name, None, 'publicbase')
|
||||
|
||||
# Lookup the repository in Quay.
|
||||
result = str(base_image)[len(quay_registry_prefix):].split('/', 2)
|
||||
if len(result) != 2:
|
||||
msg = '"%s" is not a valid Quay repository path' % base_image
|
||||
return self.analyze_view(self.namespace_name, None, 'warning', message=msg)
|
||||
|
||||
(base_namespace, base_repository) = result
|
||||
found_repository = model.repository.get_repository(base_namespace, base_repository)
|
||||
if not found_repository:
|
||||
return self.analyze_view(self.namespace_name, None, 'error',
|
||||
message='Repository "%s" referenced by the Dockerfile was not found' % base_image)
|
||||
|
||||
# If the repository is private and the user cannot see that repo, then
|
||||
# mark it as not found.
|
||||
can_read = permissions.ReadRepositoryPermission(base_namespace, base_repository)
|
||||
if found_repository.visibility.name != 'public' and not can_read:
|
||||
return self.analyze_view(self.namespace_name, None, 'error',
|
||||
message='Repository "%s" referenced by the Dockerfile was not found' % base_image)
|
||||
|
||||
if found_repository.visibility.name == 'public':
|
||||
return self.analyze_view(base_namespace, base_repository, 'publicbase')
|
||||
|
||||
return self.analyze_view(base_namespace, base_repository, 'requiresrobot')
|
||||
|
||||
def analyze_view(self, image_namespace, image_repository, status, message=None):
|
||||
# Retrieve the list of robots and mark whether they have read access already.
|
||||
robots = []
|
||||
if self.admin_org_permission:
|
||||
if image_repository is not None:
|
||||
perm_query = model.user.get_all_repo_users_transitive(image_namespace, image_repository)
|
||||
user_ids_with_permission = set([user.id for user in perm_query])
|
||||
else:
|
||||
user_ids_with_permission = set()
|
||||
|
||||
def robot_view(robot):
|
||||
return {
|
||||
'name': robot.username,
|
||||
'kind': 'user',
|
||||
'is_robot': True,
|
||||
'can_read': robot.id in user_ids_with_permission,
|
||||
}
|
||||
|
||||
robots = [robot_view(robot) for robot in model.user.list_namespace_robots(image_namespace)]
|
||||
|
||||
return {
|
||||
'namespace': image_namespace,
|
||||
'name': image_repository,
|
||||
'robots': robots,
|
||||
'status': status,
|
||||
'message': message,
|
||||
'is_admin': self.admin_org_permission,
|
||||
}
|
1120
endpoints/api/user.py
Normal file
1120
endpoints/api/user.py
Normal file
File diff suppressed because it is too large
Load diff
Reference in a new issue