- {% block content %}{% endblock %}
-
+{% endif %}
+ {% block content %}{% endblock %}
+{% if with_base_template %}
@@ -71,3 +73,4 @@
{% endif %}
+{% endif %}
diff --git a/emails/confirmemail.html b/emails/confirmemail.html
index 55b1db157..11ea31d00 100644
--- a/emails/confirmemail.html
+++ b/emails/confirmemail.html
@@ -7,13 +7,13 @@
- This email address was user to register user {{ username }}. |
+ This email address was used to register user {{ username }}. |
- Once you confirm this email you’ll be able to access your {{ app_title }} account. |
+ Once you confirm this email, you’ll be able to access your {{ app_title }} account. |
diff --git a/emails/logsexported.html b/emails/logsexported.html
new file mode 100644
index 000000000..945ddedcc
--- /dev/null
+++ b/emails/logsexported.html
@@ -0,0 +1,44 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Usage Logs Export has completed
+Export ID: {{ export_id }}
+
+
+{% if status == 'success' %}
+
+
+ The exported logs information can be found at {{ exported_data_url }} and will remain accessible for {{ exported_data_expiration }} seconds before being deleted. |
+
+
+{% elif status == 'failed' %}
+
+
+ The attempt to export the logs in the specified range has failed. This operation will be retried up to 3 times. Please contact support if this problem persists. |
+
+
+{% elif status == 'timedout' %}
+
+
+ The attempt to export the logs in the specified range has timed out. Please contact support if this problem persists. |
+
+
+{% elif status == 'invalidrequest' %}
+
+
+ The attempt to export the logs failed due to an invalid request. Please contact support if this problem persists. |
+
+
+{% endif %}
+
+
+
+ If you did not initiate this operation, please delete this e-mail. |
+
+
+
+Best Wishes,
+The {{ app_title }} Team
+
+{% endblock %}
diff --git a/endpoints/api/__init__.py b/endpoints/api/__init__.py
index 6a9369d8d..8dcabe6a3 100644
--- a/endpoints/api/__init__.py
+++ b/endpoints/api/__init__.py
@@ -1,56 +1,63 @@
import logging
import datetime
-import json
from calendar import timegm
from email.utils import formatdate
from functools import partial, wraps
-from enum import Enum
-from flask import Blueprint, Response, request, make_response, jsonify, session, url_for
+from flask import Blueprint, request, session
from flask_restful import Resource, abort, Api, reqparse
from flask_restful.utils.cors import crossdomain
from jsonschema import validate, ValidationError
-from app import app, metric_queue
-from data import model
+from app import app, metric_queue, authentication
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
AdministerRepositoryPermission, UserReadPermission,
UserAdminPermission)
from auth import scopes
-from auth.auth_context import get_authenticated_user, get_validated_oauth_token
-from auth.process import process_oauth
+from auth.auth_context import (get_authenticated_context, get_authenticated_user,
+ get_validated_oauth_token)
+from auth.decorators import process_oauth
+from data import model as data_model
+from data.logs_model import logs_model
+from data.database import RepositoryState
from endpoints.csrf import csrf_protect
-from endpoints.exception import (ApiException, Unauthorized, InvalidRequest, InvalidResponse,
- FreshLoginRequired)
-from endpoints.decorators import check_anon_protection
+from endpoints.exception import (Unauthorized, InvalidRequest, InvalidResponse,
+ FreshLoginRequired, NotFound)
+from endpoints.decorators import check_anon_protection, require_xhr_from_browser, check_readonly
from util.metrics.metricqueue import time_decorator
from util.names import parse_namespace_repository
from util.pagination import encrypt_page_token, decrypt_page_token
+from util.request import get_request_ip
+from __init__models_pre_oci import pre_oci_model as model
+
logger = logging.getLogger(__name__)
api_bp = Blueprint('api', __name__)
-api = Api()
+
+
+CROSS_DOMAIN_HEADERS = ['Authorization', 'Content-Type', 'X-Requested-With']
+
+class ApiExceptionHandlingApi(Api):
+ @crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS)
+ def handle_error(self, error):
+ return super(ApiExceptionHandlingApi, self).handle_error(error)
+
+
+api = ApiExceptionHandlingApi()
api.init_app(api_bp)
api.decorators = [csrf_protect(),
- crossdomain(origin='*', headers=['Authorization', 'Content-Type']),
- process_oauth, time_decorator(api_bp.name, metric_queue)]
+ crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS),
+ process_oauth, time_decorator(api_bp.name, metric_queue),
+ require_xhr_from_browser]
-@api_bp.app_errorhandler(ApiException)
-@crossdomain(origin='*', headers=['Authorization', 'Content-Type'])
-def handle_api_error(error):
- response = Response(json.dumps(error.to_dict()), error.status_code, mimetype='application/json')
- if error.status_code == 401:
- response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' %
- (error.error_type.value, error.error_description))
- return response
-
def resource(*urls, **kwargs):
def wrapper(api_resource):
if not api_resource:
return None
+ api_resource.registered = True
api.add_resource(api_resource, *urls, **kwargs)
return api_resource
return wrapper
@@ -58,6 +65,11 @@ def resource(*urls, **kwargs):
def show_if(value):
def f(inner):
+ if hasattr(inner, 'registered') and inner.registered:
+ msg = ('API endpoint %s is already registered; please switch the ' +
+ '@show_if to be *below* the @resource decorator')
+ raise Exception(msg % inner)
+
if not value:
return None
@@ -67,6 +79,11 @@ def show_if(value):
def hide_if(value):
def f(inner):
+ if hasattr(inner, 'registered') and inner.registered:
+ msg = ('API endpoint %s is already registered; please switch the ' +
+ '@hide_if to be *below* the @resource decorator')
+ raise Exception(msg % inner)
+
if value:
return None
@@ -190,14 +207,39 @@ def parse_repository_name(func):
class ApiResource(Resource):
- method_decorators = [check_anon_protection]
+ registered = False
+ method_decorators = [check_anon_protection, check_readonly]
def options(self):
return None, 200
class RepositoryParamResource(ApiResource):
- method_decorators = [check_anon_protection, parse_repository_name]
+ method_decorators = [check_anon_protection, parse_repository_name, check_readonly]
+
+
+def disallow_for_app_repositories(func):
+ @wraps(func)
+ def wrapped(self, namespace_name, repository_name, *args, **kwargs):
+ # Lookup the repository with the given namespace and name and ensure it is not an application
+ # repository.
+ if model.is_app_repository(namespace_name, repository_name):
+ abort(501)
+
+ return func(self, namespace_name, repository_name, *args, **kwargs)
+
+ return wrapped
+
+
+def disallow_for_non_normal_repositories(func):
+ @wraps(func)
+ def wrapped(self, namespace_name, repository_name, *args, **kwargs):
+ repo = data_model.repository.get_repository(namespace_name, repository_name)
+ if repo and repo.state != RepositoryState.NORMAL:
+ abort(503, message='Repository is in read only or mirror mode: %s' % repo.state)
+
+ return func(self, namespace_name, repository_name, *args, **kwargs)
+ return wrapped
def require_repo_permission(permission_class, scope, allow_public=False):
@@ -210,7 +252,7 @@ def require_repo_permission(permission_class, scope, allow_public=False):
permission = permission_class(namespace, repository)
if (permission.can() or
(allow_public and
- model.repository.repository_is_public(namespace, repository))):
+ model.repository_is_public(namespace, repository))):
return func(self, namespace, repository, *args, **kwargs)
raise Unauthorized()
return wrapped
@@ -268,8 +310,7 @@ def require_fresh_login(func):
if not user:
raise Unauthorized()
- oauth_token = get_validated_oauth_token()
- if oauth_token:
+ if get_validated_oauth_token():
return func(*args, **kwargs)
logger.debug('Checking fresh login for user %s', user.username)
@@ -277,7 +318,8 @@ def require_fresh_login(func):
last_login = session.get('login_time', datetime.datetime.min)
valid_span = datetime.datetime.now() - datetime.timedelta(minutes=10)
- if not user.password_hash or last_login >= valid_span:
+ if (not user.password_hash or last_login >= valid_span or
+ not authentication.supports_fresh_login):
return func(*args, **kwargs)
raise FreshLoginRequired()
@@ -294,7 +336,19 @@ def require_scope(scope_object):
return wrapper
-def validate_json_request(schema_name):
+def max_json_size(max_size):
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(self, *args, **kwargs):
+ if request.is_json and len(request.get_data()) > max_size:
+ raise InvalidRequest()
+
+ return func(self, *args, **kwargs)
+ return wrapped
+ return wrapper
+
+
+def validate_json_request(schema_name, optional=False):
def wrapper(func):
@add_method_metadata('request_schema', schema_name)
@wraps(func)
@@ -303,12 +357,13 @@ def validate_json_request(schema_name):
try:
json_data = request.get_json()
if json_data is None:
- raise InvalidRequest('Missing JSON body')
-
- validate(json_data, schema)
+ if not optional:
+ raise InvalidRequest('Missing JSON body')
+ else:
+ validate(json_data, schema)
return func(self, *args, **kwargs)
except ValidationError as ex:
- raise InvalidRequest(ex.message)
+ raise InvalidRequest(str(ex))
return wrapped
return wrapper
@@ -317,12 +372,13 @@ def request_error(exception=None, **kwargs):
data = kwargs.copy()
message = 'Request error.'
if exception:
- message = exception.message
+ message = str(exception)
+
message = data.pop('message', message)
raise InvalidRequest(message, data)
-def log_action(kind, user_or_orgname, metadata=None, repo=None):
+def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
if not metadata:
metadata = {}
@@ -333,8 +389,15 @@ def log_action(kind, user_or_orgname, metadata=None, repo=None):
metadata['oauth_token_application'] = oauth_token.application.name
performer = get_authenticated_user()
- model.log.log_action(kind, user_or_orgname, performer=performer, ip=request.remote_addr,
- metadata=metadata, repository=repo)
+
+ if repo_name is not None:
+ repo = data_model.repository.get_repository(user_or_orgname, repo_name)
+
+ logs_model.log_action(kind, user_or_orgname,
+ repository=repo,
+ performer=performer,
+ ip=get_request_ip(),
+ metadata=metadata)
def define_json_response(schema_name):
@@ -349,13 +412,14 @@ def define_json_response(schema_name):
try:
validate(resp, schema)
except ValidationError as ex:
- raise InvalidResponse(ex.message)
+ raise InvalidResponse(str(ex))
return resp
return wrapped
return wrapper
+import endpoints.api.appspecifictokens
import endpoints.api.billing
import endpoints.api.build
import endpoints.api.discovery
@@ -380,4 +444,5 @@ import endpoints.api.team
import endpoints.api.trigger
import endpoints.api.user
import endpoints.api.secscan
-
+import endpoints.api.signing
+import endpoints.api.mirror
diff --git a/endpoints/api/__init__models_interface.py b/endpoints/api/__init__models_interface.py
new file mode 100644
index 000000000..974d9e0e1
--- /dev/null
+++ b/endpoints/api/__init__models_interface.py
@@ -0,0 +1,54 @@
+from abc import ABCMeta, abstractmethod
+
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class InitDataInterface(object):
+ """
+ Interface that represents all data store interactions required by __init__.
+ """
+
+ @abstractmethod
+ def is_app_repository(self, namespace_name, repository_name):
+ """
+
+ Args:
+ namespace_name: namespace or user
+ repository_name: repository
+
+ Returns:
+ Boolean
+ """
+ pass
+
+ @abstractmethod
+ def repository_is_public(self, namespace_name, repository_name):
+ """
+
+ Args:
+ namespace_name: namespace or user
+ repository_name: repository
+
+ Returns:
+ Boolean
+ """
+ pass
+
+ @abstractmethod
+ def log_action(self, kind, namespace_name, repository_name, performer, ip, metadata):
+ """
+
+ Args:
+ kind: type of log
+ user_or_orgname: name of user or organization
+ performer: user doing the action
+ ip: originating ip
+ metadata: metadata
+ repository: repository the action is related to
+
+ Returns:
+ None
+ """
+ pass
+
diff --git a/endpoints/api/__init__models_pre_oci.py b/endpoints/api/__init__models_pre_oci.py
new file mode 100644
index 000000000..f14e7267c
--- /dev/null
+++ b/endpoints/api/__init__models_pre_oci.py
@@ -0,0 +1,19 @@
+from __init__models_interface import InitDataInterface
+
+from data import model
+from data.logs_model import logs_model
+
+class PreOCIModel(InitDataInterface):
+ def is_app_repository(self, namespace_name, repository_name):
+ return model.repository.get_repository(namespace_name, repository_name,
+ kind_filter='application') is not None
+
+ def repository_is_public(self, namespace_name, repository_name):
+ return model.repository.repository_is_public(namespace_name, repository_name)
+
+ def log_action(self, kind, namespace_name, repository_name, performer, ip, metadata):
+ repository = model.repository.get_repository(namespace_name, repository_name)
+ logs_model.log_action(kind, namespace_name, performer=performer, ip=ip, metadata=metadata,
+ repository=repository)
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/appspecifictokens.py b/endpoints/api/appspecifictokens.py
new file mode 100644
index 000000000..1e886c385
--- /dev/null
+++ b/endpoints/api/appspecifictokens.py
@@ -0,0 +1,133 @@
+""" Manages app specific tokens for the current user. """
+
+import logging
+import math
+
+from datetime import timedelta
+from flask import request
+
+import features
+
+from app import app
+from auth.auth_context import get_authenticated_user
+from data import model
+from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
+ log_action, require_user_admin, require_fresh_login,
+ path_param, NotFound, format_date, show_if, query_param, parse_args,
+ truthy_bool)
+from util.timedeltastring import convert_to_timedelta
+
+logger = logging.getLogger(__name__)
+
+
+def token_view(token, include_code=False):
+ data = {
+ 'uuid': token.uuid,
+ 'title': token.title,
+ 'last_accessed': format_date(token.last_accessed),
+ 'created': format_date(token.created),
+ 'expiration': format_date(token.expiration),
+ }
+
+ if include_code:
+ data.update({
+ 'token_code': model.appspecifictoken.get_full_token_string(token),
+ })
+
+ return data
+
+
+# The default window to use when looking up tokens that will be expiring.
+_DEFAULT_TOKEN_EXPIRATION_WINDOW = '4w'
+
+
+@resource('/v1/user/apptoken')
+@show_if(features.APP_SPECIFIC_TOKENS)
+class AppTokens(ApiResource):
+ """ Lists all app specific tokens for a user """
+ schemas = {
+ 'NewToken': {
+ 'type': 'object',
+ 'required': [
+ 'title',
+ ],
+ 'properties': {
+ 'title': {
+ 'type': 'string',
+ 'description': 'The user-defined title for the token',
+ },
+ }
+ },
+ }
+
+ @require_user_admin
+ @nickname('listAppTokens')
+ @parse_args()
+ @query_param('expiring', 'If true, only returns those tokens expiring soon', type=truthy_bool)
+ def get(self, parsed_args):
+ """ Lists the app specific tokens for the user. """
+ expiring = parsed_args['expiring']
+ if expiring:
+ expiration = app.config.get('APP_SPECIFIC_TOKEN_EXPIRATION')
+ token_expiration = convert_to_timedelta(expiration or _DEFAULT_TOKEN_EXPIRATION_WINDOW)
+ seconds = math.ceil(token_expiration.total_seconds() * 0.1) or 1
+ soon = timedelta(seconds=seconds)
+ tokens = model.appspecifictoken.get_expiring_tokens(get_authenticated_user(), soon)
+ else:
+ tokens = model.appspecifictoken.list_tokens(get_authenticated_user())
+
+ return {
+ 'tokens': [token_view(token, include_code=False) for token in tokens],
+ 'only_expiring': expiring,
+ }
+
+ @require_user_admin
+ @require_fresh_login
+ @nickname('createAppToken')
+ @validate_json_request('NewToken')
+ def post(self):
+ """ Create a new app specific token for user. """
+ title = request.get_json()['title']
+ token = model.appspecifictoken.create_token(get_authenticated_user(), title)
+
+ log_action('create_app_specific_token', get_authenticated_user().username,
+ {'app_specific_token_title': token.title,
+ 'app_specific_token': token.uuid})
+
+ return {
+ 'token': token_view(token, include_code=True),
+ }
+
+
+@resource('/v1/user/apptoken/')
+@show_if(features.APP_SPECIFIC_TOKENS)
+@path_param('token_uuid', 'The uuid of the app specific token')
+class AppToken(ApiResource):
+ """ Provides operations on an app specific token """
+ @require_user_admin
+ @require_fresh_login
+ @nickname('getAppToken')
+ def get(self, token_uuid):
+ """ Returns a specific app token for the user. """
+ token = model.appspecifictoken.get_token_by_uuid(token_uuid, owner=get_authenticated_user())
+ if token is None:
+ raise NotFound()
+
+ return {
+ 'token': token_view(token, include_code=True),
+ }
+
+ @require_user_admin
+ @require_fresh_login
+ @nickname('revokeAppToken')
+ def delete(self, token_uuid):
+ """ Revokes a specific app token for the user. """
+ token = model.appspecifictoken.revoke_token_by_uuid(token_uuid, owner=get_authenticated_user())
+ if token is None:
+ raise NotFound()
+
+ log_action('revoke_app_specific_token', get_authenticated_user().username,
+ {'app_specific_token_title': token.title,
+ 'app_specific_token': token.uuid})
+
+ return '', 204
diff --git a/endpoints/api/billing.py b/endpoints/api/billing.py
index 5e12b8f6b..db7158d12 100644
--- a/endpoints/api/billing.py
+++ b/endpoints/api/billing.py
@@ -32,7 +32,7 @@ def get_namespace_plan(namespace):
# TODO: Can we cache this or make it faster somehow?
try:
cus = billing.Customer.retrieve(namespace_user.stripe_id)
- except stripe.APIConnectionError:
+ except stripe.error.APIConnectionError:
abort(503, message='Cannot contact Stripe')
if not cus.subscription:
@@ -55,7 +55,7 @@ def lookup_allowed_private_repos(namespace):
def carderror_response(e):
- return {'carderror': e.message}, 402
+ return {'carderror': str(e)}, 402
def get_card(user):
@@ -66,7 +66,7 @@ def get_card(user):
if user.stripe_id:
try:
cus = billing.Customer.retrieve(user.stripe_id)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus and cus.default_card:
@@ -93,18 +93,18 @@ def set_card(user, token):
if user.stripe_id:
try:
cus = billing.Customer.retrieve(user.stripe_id)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus:
try:
cus.card = token
cus.save()
- except stripe.CardError as exc:
+ except stripe.error.CardError as exc:
return carderror_response(exc)
- except stripe.InvalidRequestError as exc:
+ except stripe.error.InvalidRequestError as exc:
return carderror_response(exc)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
return carderror_response(e)
return get_card(user)
@@ -127,8 +127,8 @@ def get_invoices(customer_id):
}
try:
- invoices = billing.Invoice.all(customer=customer_id, count=12)
- except stripe.APIConnectionError as e:
+ invoices = billing.Invoice.list(customer=customer_id, count=12)
+ except stripe.error.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
return {
@@ -139,7 +139,7 @@ def get_invoices(customer_id):
def get_invoice_fields(user):
try:
cus = billing.Customer.retrieve(user.stripe_id)
- except stripe.APIConnectionError:
+ except stripe.error.APIConnectionError:
abort(503, message='Cannot contact Stripe')
if not 'metadata' in cus:
@@ -329,7 +329,7 @@ class UserPlan(ApiResource):
if user.stripe_id:
try:
cus = billing.Customer.retrieve(user.stripe_id)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus.subscription:
@@ -398,7 +398,7 @@ class OrganizationPlan(ApiResource):
if organization.stripe_id:
try:
cus = billing.Customer.retrieve(organization.stripe_id)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
abort(503, message='Cannot contact Stripe')
if cus.subscription:
diff --git a/endpoints/api/build.py b/endpoints/api/build.py
index ae97571eb..d7fb55ae1 100644
--- a/endpoints/api/build.py
+++ b/endpoints/api/build.py
@@ -1,31 +1,33 @@
""" Create, list, cancel and get status/logs of repository builds. """
-
-from urlparse import urlparse
-
-import logging
-import json
import datetime
import hashlib
+import json
+import logging
+import os
from flask import request
+from urlparse import urlparse
+
+import features
from app import userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
-from buildtrigger.basehandler import BuildTriggerHandler
-from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
- require_repo_read, require_repo_write, validate_json_request,
- ApiResource, internal_only, format_date, api, path_param,
- require_repo_admin, abort)
-from endpoints.exception import Unauthorized, NotFound, InvalidRequest
-from endpoints.building import start_build, PreparedBuild, MaximumBuildsQueuedException
-from data import database
-from data import model
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
AdministerRepositoryPermission, AdministerOrganizationPermission,
SuperUserPermission)
-
+from buildtrigger.basehandler import BuildTriggerHandler
+from data import database
+from data import model
from data.buildlogs import BuildStatusRetrievalError
+from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
+ require_repo_read, require_repo_write, validate_json_request,
+ ApiResource, internal_only, format_date, api, path_param,
+ require_repo_admin, abort, disallow_for_app_repositories,
+ disallow_for_non_normal_repositories)
+from endpoints.building import (start_build, PreparedBuild, MaximumBuildsQueuedException,
+ BuildTriggerDisabledException)
+from endpoints.exception import Unauthorized, NotFound, InvalidRequest
from util.names import parse_robot_username
-
+from util.request import get_request_ip
logger = logging.getLogger(__name__)
@@ -51,6 +53,7 @@ def user_view(user):
'is_robot': user.robot,
}
+
def trigger_view(trigger, can_read=False, can_admin=False, for_build=False):
if trigger and trigger.uuid:
build_trigger = BuildTriggerHandler.get_handler(trigger)
@@ -69,6 +72,8 @@ def trigger_view(trigger, can_read=False, can_admin=False, for_build=False):
'config': build_trigger.config if can_admin else {},
'can_invoke': can_admin,
+ 'enabled': trigger.enabled,
+ 'disabled_reason': trigger.disabled_reason.name if trigger.disabled_reason else None,
}
if not for_build and can_admin and trigger.pull_robot:
@@ -133,6 +138,8 @@ def build_status_view(build_obj):
'display_name': build_obj.display_name,
'status': status or {},
'subdirectory': job_config.get('build_subdir', ''),
+ 'dockerfile_path': job_config.get('build_subdir', ''),
+ 'context': job_config.get('context', ''),
'tags': job_config.get('docker_tags', []),
'manual_user': job_config.get('manual_user', None),
'is_writer': can_write,
@@ -147,9 +154,10 @@ def build_status_view(build_obj):
'error': error,
}
- if can_write:
+ if can_write or features.READER_BUILD_LOGS:
if build_obj.resource_key is not None:
- resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
+ resp['archive_url'] = user_files.get_file_url(build_obj.resource_key,
+ get_request_ip(), requires_cors=True)
elif job_config.get('archive_url', None):
resp['archive_url'] = job_config['archive_url']
@@ -175,7 +183,15 @@ class RepositoryBuildList(RepositoryParamResource):
},
'subdirectory': {
'type': 'string',
- 'description': 'Subdirectory in which the Dockerfile can be found',
+ 'description': 'Subdirectory in which the Dockerfile can be found. You can only specify this or dockerfile_path',
+ },
+ 'dockerfile_path': {
+ 'type': 'string',
+ 'description': 'Path to a dockerfile. You can only specify this or subdirectory.',
+ },
+ 'context': {
+ 'type': 'string',
+ 'description': 'Pass in the context for the dockerfile. This is optional.',
},
'pull_robot': {
'type': 'string',
@@ -200,6 +216,7 @@ class RepositoryBuildList(RepositoryParamResource):
@query_param('limit', 'The maximum number of builds to return', type=int, default=5)
@query_param('since', 'Returns all builds since the given unix timecode', type=int, default=None)
@nickname('getRepoBuilds')
+ @disallow_for_app_repositories
def get(self, namespace, repository, parsed_args):
""" Get the list of repository builds. """
limit = parsed_args.get('limit', 5)
@@ -215,6 +232,8 @@ class RepositoryBuildList(RepositoryParamResource):
@require_repo_write
@nickname('requestRepoBuild')
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@validate_json_request('RepositoryBuildRequest')
def post(self, namespace, repository):
""" Request that a repository be built and pushed from the specified input. """
@@ -241,7 +260,7 @@ class RepositoryBuildList(RepositoryParamResource):
if scheme != 'http' and scheme != 'https':
raise InvalidRequest('Invalid Archive URL: Must be http or https')
- subdir = request_json['subdirectory'] if 'subdirectory' in request_json else ''
+ context, subdir = self.get_dockerfile_context(request_json)
tags = request_json.get('docker_tags', ['latest'])
pull_robot_name = request_json.get('pull_robot', None)
@@ -276,9 +295,12 @@ class RepositoryBuildList(RepositoryParamResource):
if repo is None:
raise NotFound()
- build_name = (user_files.get_file_checksum(dockerfile_id)
- if dockerfile_id
- else hashlib.sha224(archive_url).hexdigest()[0:7])
+ try:
+ build_name = (user_files.get_file_checksum(dockerfile_id)
+ if dockerfile_id
+ else hashlib.sha224(archive_url).hexdigest()[0:7])
+ except IOError:
+ raise InvalidRequest('File %s could not be found or is invalid' % dockerfile_id)
prepared = PreparedBuild()
prepared.build_name = build_name
@@ -286,13 +308,15 @@ class RepositoryBuildList(RepositoryParamResource):
prepared.archive_url = archive_url
prepared.tags = tags
prepared.subdirectory = subdir
+ prepared.context = context
prepared.is_manual = True
prepared.metadata = {}
-
try:
build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name)
except MaximumBuildsQueuedException:
abort(429, message='Maximum queued build rate exceeded.')
+ except BuildTriggerDisabledException:
+ abort(400, message='Build trigger is disabled')
resp = build_status_view(build_request)
repo_string = '%s/%s' % (namespace, repository)
@@ -302,8 +326,29 @@ class RepositoryBuildList(RepositoryParamResource):
}
return resp, 201, headers
+ @staticmethod
+ def get_dockerfile_context(request_json):
+ context = request_json['context'] if 'context' in request_json else os.path.sep
+ if 'dockerfile_path' in request_json:
+ subdir = request_json['dockerfile_path']
+ if 'context' not in request_json:
+ context = os.path.dirname(subdir)
+ return context, subdir
+ if 'subdirectory' in request_json:
+ subdir = request_json['subdirectory']
+ context = subdir
+ if not subdir.endswith(os.path.sep):
+ subdir += os.path.sep
+ subdir += 'Dockerfile'
+ else:
+ if context.endswith(os.path.sep):
+ subdir = context + 'Dockerfile'
+ else:
+ subdir = context + os.path.sep + 'Dockerfile'
+
+ return context, subdir
@resource('/v1/repository//build/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@@ -312,6 +357,7 @@ class RepositoryBuildResource(RepositoryParamResource):
""" Resource for dealing with repository builds. """
@require_repo_read
@nickname('getRepoBuild')
+ @disallow_for_app_repositories
def get(self, namespace, repository, build_uuid):
""" Returns information about a build. """
try:
@@ -326,6 +372,8 @@ class RepositoryBuildResource(RepositoryParamResource):
@require_repo_admin
@nickname('cancelRepoBuild')
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
def delete(self, namespace, repository, build_uuid):
""" Cancels a repository build. """
try:
@@ -349,6 +397,7 @@ class RepositoryBuildStatus(RepositoryParamResource):
""" Resource for dealing with repository build status. """
@require_repo_read
@nickname('getRepoBuildStatus')
+ @disallow_for_app_repositories
def get(self, namespace, repository, build_uuid):
""" Return the status for the builds specified by the build uuids. """
build = model.build.get_repository_build(build_uuid)
@@ -363,7 +412,7 @@ def get_logs_or_log_url(build):
# If the logs have been archived, just return a URL of the completed archive
if build.logs_archived:
return {
- 'logs_url': log_archive.get_file_url(build.uuid, requires_cors=True)
+ 'logs_url': log_archive.get_file_url(build.uuid, get_request_ip(), requires_cors=True)
}
start = int(request.args.get('start', 0))
@@ -387,10 +436,15 @@ def get_logs_or_log_url(build):
@path_param('build_uuid', 'The UUID of the build')
class RepositoryBuildLogs(RepositoryParamResource):
""" Resource for loading repository build logs. """
- @require_repo_write
+ @require_repo_read
@nickname('getRepoBuildLogs')
+ @disallow_for_app_repositories
def get(self, namespace, repository, build_uuid):
""" Return the build logs for the build specified by the build uuid. """
+ can_write = ModifyRepositoryPermission(namespace, repository).can()
+ if not features.READER_BUILD_LOGS and not can_write:
+ raise Unauthorized()
+
build = model.build.get_repository_build(build_uuid)
if (not build or build.repository.name != repository or
build.repository.namespace_user.username != namespace):
diff --git a/endpoints/api/discovery.py b/endpoints/api/discovery.py
index c688feb4e..66e7c74a3 100644
--- a/endpoints/api/discovery.py
+++ b/endpoints/api/discovery.py
@@ -1,3 +1,4 @@
+# TODO to extract the discovery stuff into a util at the top level and then use it both here and config_app discovery.py
""" API discovery information. """
import re
@@ -12,12 +13,13 @@ from app import app
from auth import scopes
from endpoints.api import (ApiResource, resource, method_metadata, nickname, truthy_bool,
parse_args, query_param)
+from endpoints.decorators import anon_allowed
logger = logging.getLogger(__name__)
-PARAM_REGEX = re.compile(r'<([\w]+:)?([\w]+)>')
+PARAM_REGEX = re.compile(r'<([^:>]+:)*([\w]+)>')
TYPE_CONVERTER = {
@@ -326,6 +328,7 @@ class DiscoveryResource(ApiResource):
@parse_args()
@query_param('internal', 'Whether to include internal APIs.', type=truthy_bool, default=False)
@nickname('discovery')
+ @anon_allowed
def get(self, parsed_args):
""" List all of the API endpoints available in the swagger API format."""
return swagger_route_data(parsed_args['internal'])
diff --git a/endpoints/api/error.py b/endpoints/api/error.py
index 23d95ed32..bfa80efe2 100644
--- a/endpoints/api/error.py
+++ b/endpoints/api/error.py
@@ -7,7 +7,7 @@ from endpoints.exception import NotFound, ApiErrorType, ERROR_DESCRIPTION
def error_view(error_type):
return {
- 'type': url_for('error', error_type=error_type, _external=True),
+ 'type': url_for('api.error', error_type=error_type, _external=True),
'title': error_type,
'description': ERROR_DESCRIPTION[error_type]
}
diff --git a/endpoints/api/globalmessages.py b/endpoints/api/globalmessages.py
index b27683a17..43ea58083 100644
--- a/endpoints/api/globalmessages.py
+++ b/endpoints/api/globalmessages.py
@@ -6,10 +6,10 @@ from flask import request
import features
from auth import scopes
from auth.permissions import SuperUserPermission
-from data import model
from endpoints.api import (ApiResource, resource, nickname,
require_fresh_login, verify_not_prod, validate_json_request,
require_scope, show_if,)
+from globalmessages_models_pre_oci import pre_oci_model as model
@resource('/v1/messages')
@@ -58,6 +58,11 @@ class GlobalUserMessages(ApiResource):
'message': {
'type': 'object',
'description': 'A single message',
+ 'required': [
+ 'content',
+ 'media_type',
+ 'severity',
+ ],
'properties': {
'content': {
'type': 'string',
@@ -83,7 +88,7 @@ class GlobalUserMessages(ApiResource):
def get(self):
""" Return a super users messages """
return {
- 'messages': [message_view(m) for m in model.message.get_messages()],
+ 'messages': [m.to_dict() for m in model.get_all_messages()],
}
@require_fresh_login
@@ -97,7 +102,10 @@ class GlobalUserMessages(ApiResource):
abort(404)
if SuperUserPermission().can():
- model.message.create([request.get_json()['message']])
+ message_req = request.get_json()['message']
+ message = model.create_message(message_req['severity'], message_req['media_type'], message_req['content'])
+ if message is None:
+ abort(400)
return make_response('', 201)
abort(403)
@@ -114,16 +122,7 @@ class GlobalUserMessage(ApiResource):
def delete(self, uuid):
""" Delete a message """
if SuperUserPermission().can():
- model.message.delete_message([uuid])
+ model.delete_message(uuid)
return make_response('', 204)
abort(403)
-
-
-def message_view(message):
- return {
- 'uuid': message.uuid,
- 'content': message.content,
- 'severity': message.severity,
- 'media_type': message.media_type.name,
- }
diff --git a/endpoints/api/globalmessages_models_interface.py b/endpoints/api/globalmessages_models_interface.py
new file mode 100644
index 000000000..679462c1d
--- /dev/null
+++ b/endpoints/api/globalmessages_models_interface.py
@@ -0,0 +1,54 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+class GlobalMessage(
+ namedtuple('GlobalMessage', [
+ 'uuid',
+ 'content',
+ 'severity',
+ 'media_type_name',
+ ])):
+
+ def to_dict(self):
+ return {
+ 'uuid': self.uuid,
+ 'content': self.content,
+ 'severity': self.severity,
+ 'media_type': self.media_type_name,
+ }
+
+
+
+@add_metaclass(ABCMeta)
+class GlobalMessageDataInterface(object):
+ """
+ Data interface for globalmessages API
+ """
+
+ @abstractmethod
+ def get_all_messages(self):
+ """
+
+ Returns:
+ list(GlobalMessage)
+ """
+
+ @abstractmethod
+ def create_message(self, severity, media_type_name, content):
+ """
+
+ Returns:
+ GlobalMessage or None
+ """
+
+ @abstractmethod
+ def delete_message(self, uuid):
+ """
+
+ Returns:
+ void
+ """
+
+
\ No newline at end of file
diff --git a/endpoints/api/globalmessages_models_pre_oci.py b/endpoints/api/globalmessages_models_pre_oci.py
new file mode 100644
index 000000000..d9a623f1b
--- /dev/null
+++ b/endpoints/api/globalmessages_models_pre_oci.py
@@ -0,0 +1,33 @@
+from globalmessages_models_interface import GlobalMessageDataInterface, GlobalMessage
+from data import model
+
+
+class GlobalMessagePreOCI(GlobalMessageDataInterface):
+
+ def get_all_messages(self):
+ messages = model.message.get_messages()
+ return [self._message(m) for m in messages]
+
+ def create_message(self, severity, media_type_name, content):
+ message = {
+ 'severity': severity,
+ 'media_type': media_type_name,
+ 'content': content
+ }
+ messages = model.message.create([message])
+ return self._message(messages[0])
+
+ def delete_message(self, uuid):
+ model.message.delete_message([uuid])
+
+ def _message(self, message_obj):
+ if message_obj is None:
+ return None
+ return GlobalMessage(
+ uuid=message_obj.uuid,
+ content=message_obj.content,
+ severity=message_obj.severity,
+ media_type_name=message_obj.media_type.name,
+ )
+
+pre_oci_model = GlobalMessagePreOCI()
\ No newline at end of file
diff --git a/endpoints/api/image.py b/endpoints/api/image.py
index 0d6e59425..3a9dcd82c 100644
--- a/endpoints/api/image.py
+++ b/endpoints/api/image.py
@@ -1,85 +1,58 @@
""" List and lookup repository images. """
-
import json
-from collections import defaultdict
+from data.registry_model import registry_model
from endpoints.api import (resource, nickname, require_repo_read, RepositoryParamResource,
- format_date, path_param)
+ path_param, disallow_for_app_repositories, format_date)
from endpoints.exception import NotFound
-from data import model
-def image_view(image, image_map, include_ancestors=True):
- command = image.command
-
- def docker_id(aid):
- if aid not in image_map:
- return ''
-
- return image_map[aid].docker_image_id
+def image_dict(image, with_history=False, with_tags=False):
+ parsed_command = None
+ if image.command:
+ try:
+ parsed_command = json.loads(image.command)
+ except (ValueError, TypeError):
+ parsed_command = {'error': 'Could not parse command'}
image_data = {
'id': image.docker_image_id,
'created': format_date(image.created),
'comment': image.comment,
- 'command': json.loads(command) if command else None,
- 'size': image.storage.image_size,
- 'uploading': image.storage.uploading,
- 'sort_index': len(image.ancestors),
+ 'command': parsed_command,
+ 'size': image.image_size,
+ 'uploading': image.uploading,
+ 'sort_index': len(image.parents),
}
- if include_ancestors:
- # Calculate the ancestors string, with the DBID's replaced with the docker IDs.
- ancestors = [docker_id(a) for a in image.ancestor_id_list()]
- image_data['ancestors'] = '/{0}/'.format('/'.join(ancestors))
+ if with_tags:
+ image_data['tags'] = [tag.name for tag in image.tags]
+ if with_history:
+ image_data['history'] = [image_dict(parent) for parent in image.parents]
+
+ # Calculate the ancestors string, with the DBID's replaced with the docker IDs.
+ parent_docker_ids = [parent_image.docker_image_id for parent_image in image.parents]
+ image_data['ancestors'] = '/{0}/'.format('/'.join(parent_docker_ids))
return image_data
-def historical_image_view(image, image_map):
- ancestors = [image_map[a] for a in image.ancestor_id_list()]
- normal_view = image_view(image, image_map)
- normal_view['history'] = [image_view(parent, image_map, False) for parent in ancestors]
- return normal_view
-
-
@resource('/v1/repository//image/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryImageList(RepositoryParamResource):
""" Resource for listing repository images. """
+
@require_repo_read
@nickname('listRepositoryImages')
+ @disallow_for_app_repositories
def get(self, namespace, repository):
""" List the images for the specified repository. """
- repo = model.repository.get_repository(namespace, repository)
- if not repo:
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
raise NotFound()
- all_images = model.image.get_repository_images_without_placements(repo)
- all_tags = model.tag.list_repository_tags(namespace, repository)
-
- tags_by_docker_id = defaultdict(list)
- found_image_ids = set()
-
- for tag in all_tags:
- tags_by_docker_id[tag.image.docker_image_id].append(tag.name)
- found_image_ids.add(tag.image.id)
- found_image_ids.update(tag.image.ancestor_id_list())
-
- image_map = {}
- filtered_images = []
- for image in all_images:
- if image.id in found_image_ids:
- image_map[image.id] = image
- filtered_images.append(image)
-
- def add_tags(image_json):
- image_json['tags'] = tags_by_docker_id[image_json['id']]
- return image_json
-
- return {
- 'images': [add_tags(image_view(image, image_map)) for image in filtered_images]
- }
+ images = registry_model.get_legacy_images(repo_ref)
+ return {'images': [image_dict(image, with_tags=True) for image in images]}
@resource('/v1/repository//image/')
@@ -87,18 +60,18 @@ class RepositoryImageList(RepositoryParamResource):
@path_param('image_id', 'The Docker image ID')
class RepositoryImage(RepositoryParamResource):
""" Resource for handling repository images. """
+
@require_repo_read
@nickname('getImage')
+ @disallow_for_app_repositories
def get(self, namespace, repository, image_id):
""" Get the information available for the specified image. """
- image = model.image.get_repo_image_extended(namespace, repository, image_id)
- if not image:
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
raise NotFound()
- # Lookup all the ancestor images for the image.
- image_map = {}
- for current_image in model.image.get_parent_images(namespace, repository, image):
- image_map[current_image.id] = current_image
-
- return historical_image_view(image, image_map)
+ image = registry_model.get_legacy_image(repo_ref, image_id, include_parents=True)
+ if image is None:
+ raise NotFound()
+ return image_dict(image, with_history=True)
diff --git a/endpoints/api/logs.py b/endpoints/api/logs.py
index 05d340d20..1760a2e9b 100644
--- a/endpoints/api/logs.py
+++ b/endpoints/api/logs.py
@@ -1,115 +1,70 @@
""" Access usage logs for organizations or repositories. """
-
-import json
-
from datetime import datetime, timedelta
-from dateutil.relativedelta import relativedelta
+from flask import request
+
+import features
+
+from app import app, export_action_logs_queue, avatar
+from auth.permissions import AdministerOrganizationPermission
+from auth.auth_context import get_authenticated_user
+from auth import scopes
+from data.logs_model import logs_model
+from data.registry_model import registry_model
from endpoints.api import (resource, nickname, ApiResource, query_param, parse_args,
RepositoryParamResource, require_repo_admin, related_user_resource,
- format_date, require_user_admin, path_param, require_scope, page_support)
+ format_date, require_user_admin, path_param, require_scope, page_support,
+ validate_json_request, InvalidRequest, show_if)
from endpoints.exception import Unauthorized, NotFound
-from auth.permissions import AdministerOrganizationPermission, AdministerOrganizationPermission
-from auth.auth_context import get_authenticated_user
-from data import model, database
-from auth import scopes
-from app import avatar
-from tzlocal import get_localzone
+
LOGS_PER_PAGE = 20
SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete',
'service_key_modify', 'service_key_extend', 'service_key_rotate'])
-def log_view(log, kinds):
- view = {
- 'kind': kinds[log.kind_id],
- 'metadata': json.loads(log.metadata_json),
- 'ip': log.ip,
- 'datetime': format_date(log.datetime),
- }
- if log.performer and log.performer.username:
- view['performer'] = {
- 'kind': 'user',
- 'name': log.performer.username,
- 'is_robot': log.performer.robot,
- 'avatar': avatar.get_data_for_user(log.performer)
- }
+def _parse_datetime(dt_string):
+ if not dt_string:
+ return None
- return view
-
-def aggregated_log_view(log, kinds, start_time):
- # Because we aggregate based on the day of the month in SQL, we only have that information.
- # Therefore, create a synthetic date based on the day and the month of the start time.
- # Logs are allowed for a maximum period of one week, so this calculation should always work.
- synthetic_date = datetime(start_time.year, start_time.month, int(log.day), tzinfo=get_localzone())
- if synthetic_date.day < start_time.day:
- synthetic_date = synthetic_date + relativedelta(months=1)
-
- view = {
- 'kind': kinds[log.kind_id],
- 'count': log.count,
- 'datetime': format_date(synthetic_date),
- }
-
- return view
-
-def _validate_logs_arguments(start_time, end_time, performer_name):
- performer = None
- if performer_name:
- performer = model.user.get_user(performer_name)
-
- if start_time:
- try:
- start_time = datetime.strptime(start_time + ' UTC', '%m/%d/%Y %Z')
- except ValueError:
- start_time = None
-
- if not start_time:
- start_time = datetime.today() - timedelta(7) # One week
-
- if end_time:
- try:
- end_time = datetime.strptime(end_time + ' UTC', '%m/%d/%Y %Z')
- end_time = end_time + timedelta(days=1)
- except ValueError:
- end_time = None
-
- if not end_time:
- end_time = datetime.today()
-
- return (start_time, end_time, performer)
+ try:
+ return datetime.strptime(dt_string + ' UTC', '%m/%d/%Y %Z')
+ except ValueError:
+ return None
-def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
- page_token=None, ignore=None):
- (start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
- kinds = model.log.get_log_entry_kinds()
- logs_query = model.log.get_logs_query(start_time, end_time, performer=performer,
- repository=repository, namespace=namespace,
- ignore=ignore)
+def _validate_logs_arguments(start_time, end_time):
+ start_time = _parse_datetime(start_time) or (datetime.today() - timedelta(days=1))
+ end_time = _parse_datetime(end_time) or datetime.today()
+ end_time = end_time + timedelta(days=1)
+ return start_time, end_time
- logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True,
- page_token=page_token, limit=LOGS_PER_PAGE)
+def _get_logs(start_time, end_time, performer_name=None, repository_name=None, namespace_name=None,
+ page_token=None, filter_kinds=None):
+ (start_time, end_time) = _validate_logs_arguments(start_time, end_time)
+ log_entry_page = logs_model.lookup_logs(start_time, end_time, performer_name, repository_name,
+ namespace_name, filter_kinds, page_token,
+ app.config['ACTION_LOG_MAX_PAGE'])
+ include_namespace = namespace_name is None and repository_name is None
return {
'start_time': format_date(start_time),
'end_time': format_date(end_time),
- 'logs': [log_view(log, kinds) for log in logs],
- }, next_page_token
+ 'logs': [log.to_dict(avatar, include_namespace) for log in log_entry_page.logs],
+ }, log_entry_page.next_page_token
-def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
- ignore=None):
- (start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
-
- kinds = model.log.get_log_entry_kinds()
- aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer,
- repository=repository, namespace=namespace,
- ignore=ignore)
+def _get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None,
+ filter_kinds=None):
+ (start_time, end_time) = _validate_logs_arguments(start_time, end_time)
+ aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time,
+ performer_name=performer_name,
+ repository_name=repository,
+ namespace_name=namespace,
+ filter_kinds=filter_kinds)
return {
- 'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs]
+ 'aggregated': [log.to_dict() for log in aggregated_logs]
}
@@ -117,33 +72,35 @@ def get_aggregate_logs(start_time, end_time, performer_name=None, repository=Non
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryLogs(RepositoryParamResource):
""" Resource for fetching logs for the specific repository. """
+
@require_repo_admin
@nickname('listRepoLogs')
@parse_args()
- @query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
- @query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
- @query_param('page', 'The page number for the logs', type=int, default=1)
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@page_support()
def get(self, namespace, repository, page_token, parsed_args):
""" List the logs for the specified repository. """
- repo = model.repository.get_repository(namespace, repository)
- if not repo:
+ if registry_model.lookup_repository(namespace, repository) is None:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
- return get_logs(start_time, end_time, repository=repo, page_token=page_token,
- ignore=SERVICE_LEVEL_LOG_KINDS)
+ return _get_logs(start_time, end_time,
+ repository_name=repository,
+ page_token=page_token,
+ namespace_name=namespace)
@resource('/v1/user/logs')
class UserLogs(ApiResource):
""" Resource for fetching logs for the current user. """
+
@require_user_admin
@nickname('listUserLogs')
@parse_args()
- @query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
- @query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
@page_support()
def get(self, parsed_args, page_token):
@@ -153,8 +110,11 @@ class UserLogs(ApiResource):
end_time = parsed_args['endtime']
user = get_authenticated_user()
- return get_logs(start_time, end_time, performer_name=performer_name, namespace=user.username,
- page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
+ return _get_logs(start_time, end_time,
+ performer_name=performer_name,
+ namespace_name=user.username,
+ page_token=page_token,
+ filter_kinds=SERVICE_LEVEL_LOG_KINDS)
@resource('/v1/organization//logs')
@@ -162,12 +122,12 @@ class UserLogs(ApiResource):
@related_user_resource(UserLogs)
class OrgLogs(ApiResource):
""" Resource for fetching logs for the entire organization. """
+
@nickname('listOrgLogs')
@parse_args()
- @query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
- @query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
- @query_param('page', 'The page number for the logs', type=int, default=1)
@page_support()
@require_scope(scopes.ORG_ADMIN)
def get(self, orgname, page_token, parsed_args):
@@ -178,41 +138,47 @@ class OrgLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
- return get_logs(start_time, end_time, namespace=orgname, performer_name=performer_name,
- page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS)
+ return _get_logs(start_time, end_time,
+ namespace_name=orgname,
+ performer_name=performer_name,
+ page_token=page_token)
raise Unauthorized()
@resource('/v1/repository//aggregatelogs')
+@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryAggregateLogs(RepositoryParamResource):
""" Resource for fetching aggregated logs for the specific repository. """
+
@require_repo_admin
@nickname('getAggregateRepoLogs')
@parse_args()
- @query_param('starttime', 'Earliest time from which to get logs (%m/%d/%Y %Z)', type=str)
- @query_param('endtime', 'Latest time to which to get logs (%m/%d/%Y %Z)', type=str)
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
def get(self, namespace, repository, parsed_args):
""" Returns the aggregated logs for the specified repository. """
- repo = model.repository.get_repository(namespace, repository)
- if not repo:
+ if registry_model.lookup_repository(namespace, repository) is None:
raise NotFound()
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
- return get_aggregate_logs(start_time, end_time, repository=repo,
- ignore=SERVICE_LEVEL_LOG_KINDS)
+ return _get_aggregate_logs(start_time, end_time,
+ repository=repository,
+ namespace=namespace)
@resource('/v1/user/aggregatelogs')
+@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
class UserAggregateLogs(ApiResource):
""" Resource for fetching aggregated logs for the current user. """
+
@require_user_admin
@nickname('getAggregateUserLogs')
@parse_args()
- @query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
- @query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
def get(self, parsed_args):
""" Returns the aggregated logs for the current user. """
@@ -221,19 +187,23 @@ class UserAggregateLogs(ApiResource):
end_time = parsed_args['endtime']
user = get_authenticated_user()
- return get_aggregate_logs(start_time, end_time, performer_name=performer_name,
- namespace=user.username, ignore=SERVICE_LEVEL_LOG_KINDS)
+ return _get_aggregate_logs(start_time, end_time,
+ performer_name=performer_name,
+ namespace=user.username,
+ filter_kinds=SERVICE_LEVEL_LOG_KINDS)
@resource('/v1/organization//aggregatelogs')
+@show_if(features.AGGREGATED_LOG_COUNT_RETRIEVAL)
@path_param('orgname', 'The name of the organization')
@related_user_resource(UserLogs)
class OrgAggregateLogs(ApiResource):
""" Resource for fetching aggregate logs for the entire organization. """
+
@nickname('getAggregateOrgLogs')
@parse_args()
- @query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
- @query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
@query_param('performer', 'Username for which to filter logs.', type=str)
@require_scope(scopes.ORG_ADMIN)
def get(self, orgname, parsed_args):
@@ -244,7 +214,131 @@ class OrgAggregateLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
- return get_aggregate_logs(start_time, end_time, namespace=orgname,
- performer_name=performer_name, ignore=SERVICE_LEVEL_LOG_KINDS)
+ return _get_aggregate_logs(start_time, end_time,
+ namespace=orgname,
+ performer_name=performer_name)
+
+ raise Unauthorized()
+
+
+EXPORT_LOGS_SCHEMA = {
+ 'type': 'object',
+ 'description': 'Configuration for an export logs operation',
+ 'properties': {
+ 'callback_url': {
+ 'type': 'string',
+ 'description': 'The callback URL to invoke with a link to the exported logs',
+ },
+ 'callback_email': {
+ 'type': 'string',
+ 'description': 'The e-mail address at which to e-mail a link to the exported logs',
+ },
+ },
+}
+
+
+def _queue_logs_export(start_time, end_time, options, namespace_name, repository_name=None):
+ callback_url = options.get('callback_url')
+ if callback_url:
+ if not callback_url.startswith('https://') and not callback_url.startswith('http://'):
+ raise InvalidRequest('Invalid callback URL')
+
+ callback_email = options.get('callback_email')
+ if callback_email:
+ if callback_email.find('@') < 0:
+ raise InvalidRequest('Invalid callback e-mail')
+
+ (start_time, end_time) = _validate_logs_arguments(start_time, end_time)
+ export_id = logs_model.queue_logs_export(start_time, end_time, export_action_logs_queue,
+ namespace_name, repository_name, callback_url,
+ callback_email)
+ if export_id is None:
+ raise InvalidRequest('Invalid export request')
+
+ return export_id
+
+
+@resource('/v1/repository//exportlogs')
+@show_if(features.LOG_EXPORT)
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+class ExportRepositoryLogs(RepositoryParamResource):
+ """ Resource for exporting the logs for the specific repository. """
+ schemas = {
+ 'ExportLogs': EXPORT_LOGS_SCHEMA
+ }
+
+ @require_repo_admin
+ @nickname('exportRepoLogs')
+ @parse_args()
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @validate_json_request('ExportLogs')
+ def post(self, namespace, repository, parsed_args):
+ """ Queues an export of the logs for the specified repository. """
+ if registry_model.lookup_repository(namespace, repository) is None:
+ raise NotFound()
+
+ start_time = parsed_args['starttime']
+ end_time = parsed_args['endtime']
+ export_id = _queue_logs_export(start_time, end_time, request.get_json(), namespace,
+ repository_name=repository)
+ return {
+ 'export_id': export_id,
+ }
+
+
+@resource('/v1/user/exportlogs')
+@show_if(features.LOG_EXPORT)
+class ExportUserLogs(ApiResource):
+ """ Resource for exporting the logs for the current user repository. """
+ schemas = {
+ 'ExportLogs': EXPORT_LOGS_SCHEMA
+ }
+
+ @require_user_admin
+ @nickname('exportUserLogs')
+ @parse_args()
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @validate_json_request('ExportLogs')
+ def post(self, parsed_args):
+ """ Returns the aggregated logs for the current user. """
+ start_time = parsed_args['starttime']
+ end_time = parsed_args['endtime']
+
+ user = get_authenticated_user()
+ export_id = _queue_logs_export(start_time, end_time, request.get_json(), user.username)
+ return {
+ 'export_id': export_id,
+ }
+
+
+@resource('/v1/organization//exportlogs')
+@show_if(features.LOG_EXPORT)
+@path_param('orgname', 'The name of the organization')
+@related_user_resource(ExportUserLogs)
+class ExportOrgLogs(ApiResource):
+ """ Resource for exporting the logs for an entire organization. """
+ schemas = {
+ 'ExportLogs': EXPORT_LOGS_SCHEMA
+ }
+
+ @nickname('exportOrgLogs')
+ @parse_args()
+ @query_param('starttime', 'Earliest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @query_param('endtime', 'Latest time for logs. Format: "%m/%d/%Y" in UTC.', type=str)
+ @require_scope(scopes.ORG_ADMIN)
+ @validate_json_request('ExportLogs')
+ def post(self, orgname, parsed_args):
+ """ Exports the logs for the specified organization. """
+ permission = AdministerOrganizationPermission(orgname)
+ if permission.can():
+ start_time = parsed_args['starttime']
+ end_time = parsed_args['endtime']
+
+ export_id = _queue_logs_export(start_time, end_time, request.get_json(), orgname)
+ return {
+ 'export_id': export_id,
+ }
raise Unauthorized()
diff --git a/endpoints/api/manifest.py b/endpoints/api/manifest.py
index 0307ecaba..1370fa743 100644
--- a/endpoints/api/manifest.py
+++ b/endpoints/api/manifest.py
@@ -1,29 +1,105 @@
""" Manage the manifests of a repository. """
+import json
+import logging
-from app import label_validator
from flask import request
+
+from app import label_validator, storage
+from data.model import InvalidLabelKeyException, InvalidMediaTypeException
+from data.registry_model import registry_model
+from digest import digest_tools
from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
RepositoryParamResource, log_action, validate_json_request,
- path_param, parse_args, query_param, truthy_bool, abort, api)
+ path_param, parse_args, query_param, abort, api,
+ disallow_for_app_repositories, format_date,
+ disallow_for_non_normal_repositories)
+from endpoints.api.image import image_dict
from endpoints.exception import NotFound
-from data import model
+from util.validation import VALID_LABEL_KEY_REGEX
-from digest import digest_tools
BASE_MANIFEST_ROUTE = '/v1/repository//manifest/'
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
ALLOWED_LABEL_MEDIA_TYPES = ['text/plain', 'application/json']
-def label_view(label):
- view = {
+
+logger = logging.getLogger(__name__)
+
+def _label_dict(label):
+ return {
'id': label.uuid,
'key': label.key,
'value': label.value,
- 'source_type': label.source_type.name,
- 'media_type': label.media_type.name,
+ 'source_type': label.source_type_name,
+ 'media_type': label.media_type_name,
}
- return view
+
+def _layer_dict(manifest_layer, index):
+ # NOTE: The `command` in the layer is either a JSON string of an array (schema 1) or
+ # a single string (schema 2). The block below normalizes it to have the same format.
+ command = None
+ if manifest_layer.command:
+ try:
+ command = json.loads(manifest_layer.command)
+ except (TypeError, ValueError):
+ command = [manifest_layer.command]
+
+ return {
+ 'index': index,
+ 'compressed_size': manifest_layer.compressed_size,
+ 'is_remote': manifest_layer.is_remote,
+ 'urls': manifest_layer.urls,
+ 'command': command,
+ 'comment': manifest_layer.comment,
+ 'author': manifest_layer.author,
+ 'blob_digest': str(manifest_layer.blob_digest),
+ 'created_datetime': format_date(manifest_layer.created_datetime),
+ }
+
+
+def _manifest_dict(manifest):
+ image = None
+ if manifest.legacy_image_if_present is not None:
+ image = image_dict(manifest.legacy_image, with_history=True)
+
+ layers = None
+ if not manifest.is_manifest_list:
+ layers = registry_model.list_manifest_layers(manifest, storage)
+ if layers is None:
+ logger.debug('Missing layers for manifest `%s`', manifest.digest)
+ abort(404)
+
+ return {
+ 'digest': manifest.digest,
+ 'is_manifest_list': manifest.is_manifest_list,
+ 'manifest_data': manifest.internal_manifest_bytes.as_unicode(),
+ 'image': image,
+ 'layers': ([_layer_dict(lyr.layer_info, idx) for idx, lyr in enumerate(layers)]
+ if layers else None),
+ }
+
+
+@resource(MANIFEST_DIGEST_ROUTE)
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@path_param('manifestref', 'The digest of the manifest')
+class RepositoryManifest(RepositoryParamResource):
+ """ Resource for retrieving a specific repository manifest. """
+ @require_repo_read
+ @nickname('getRepoManifest')
+ @disallow_for_app_repositories
+ def get(self, namespace_name, repository_name, manifestref):
+ repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
+ if repo_ref is None:
+ raise NotFound()
+
+ manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref,
+ include_legacy_image=True)
+ if manifest is None:
+ raise NotFound()
+
+ return _manifest_dict(manifest)
+
@resource(MANIFEST_DIGEST_ROUTE + '/labels')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@@ -49,9 +125,9 @@ class RepositoryManifestLabels(RepositoryParamResource):
'description': 'The value for the label',
},
'media_type': {
- 'type': ['string'],
+ 'type': ['string', 'null'],
'description': 'The media type for this label',
- 'enum': ALLOWED_LABEL_MEDIA_TYPES,
+ 'enum': ALLOWED_LABEL_MEDIA_TYPES + [None],
},
},
},
@@ -59,51 +135,80 @@ class RepositoryManifestLabels(RepositoryParamResource):
@require_repo_read
@nickname('listManifestLabels')
+ @disallow_for_app_repositories
@parse_args()
@query_param('filter', 'If specified, only labels matching the given prefix will be returned',
type=str, default=None)
- def get(self, namespace, repository, manifestref, parsed_args):
- try:
- tag_manifest = model.tag.load_manifest_by_digest(namespace, repository, manifestref)
- except model.DataModelException:
+ def get(self, namespace_name, repository_name, manifestref, parsed_args):
+ repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
+ if repo_ref is None:
+ raise NotFound()
+
+ manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
+ if manifest is None:
+ raise NotFound()
+
+ labels = registry_model.list_manifest_labels(manifest, parsed_args['filter'])
+ if labels is None:
raise NotFound()
- labels = model.label.list_manifest_labels(tag_manifest, prefix_filter=parsed_args['filter'])
return {
- 'labels': [label_view(label) for label in labels]
+ 'labels': [_label_dict(label) for label in labels]
}
@require_repo_write
@nickname('addManifestLabel')
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@validate_json_request('AddLabel')
- def post(self, namespace, repository, manifestref):
+ def post(self, namespace_name, repository_name, manifestref):
""" Adds a new label into the tag manifest. """
- try:
- tag_manifest = model.tag.load_manifest_by_digest(namespace, repository, manifestref)
- except model.DataModelException:
- raise NotFound()
-
label_data = request.get_json()
# Check for any reserved prefixes.
if label_validator.has_reserved_prefix(label_data['key']):
abort(400, message='Label has a reserved prefix')
- label = model.label.create_manifest_label(tag_manifest, label_data['key'],
- label_data['value'], 'api',
- media_type_name=label_data['media_type'])
+ repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
+ if repo_ref is None:
+ raise NotFound()
+
+ manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
+ if manifest is None:
+ raise NotFound()
+
+ label = None
+ try:
+ label = registry_model.create_manifest_label(manifest,
+ label_data['key'],
+ label_data['value'],
+ 'api',
+ label_data['media_type'])
+ except InvalidLabelKeyException:
+ message = ('Label is of an invalid format or missing please ' +
+ 'use %s format for labels' % VALID_LABEL_KEY_REGEX)
+ abort(400, message=message)
+ except InvalidMediaTypeException:
+ message = 'Media type is invalid please use a valid media type: text/plain, application/json'
+ abort(400, message=message)
+
+ if label is None:
+ raise NotFound()
+
metadata = {
'id': label.uuid,
- 'key': label_data['key'],
- 'value': label_data['value'],
+ 'key': label.key,
+ 'value': label.value,
'manifest_digest': manifestref,
- 'media_type': label_data['media_type'],
+ 'media_type': label.media_type_name,
+ 'namespace': namespace_name,
+ 'repo': repository_name,
}
- log_action('manifest_label_add', namespace, metadata, repo=tag_manifest.tag.repository)
+ log_action('manifest_label_add', namespace_name, metadata, repo_name=repository_name)
- resp = {'label': label_view(label)}
- repo_string = '%s/%s' % (namespace, repository)
+ resp = {'label': _label_dict(label)}
+ repo_string = '%s/%s' % (namespace_name, repository_name)
headers = {
'Location': api.url_for(ManageRepositoryManifestLabel, repository=repo_string,
manifestref=manifestref, labelid=label.uuid),
@@ -119,30 +224,39 @@ class ManageRepositoryManifestLabel(RepositoryParamResource):
""" Resource for managing the labels on a specific repository manifest. """
@require_repo_read
@nickname('getManifestLabel')
- def get(self, namespace, repository, manifestref, labelid):
+ @disallow_for_app_repositories
+ def get(self, namespace_name, repository_name, manifestref, labelid):
""" Retrieves the label with the specific ID under the manifest. """
- try:
- tag_manifest = model.tag.load_manifest_by_digest(namespace, repository, manifestref)
- except model.DataModelException:
+ repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
+ if repo_ref is None:
raise NotFound()
- label = model.label.get_manifest_label(labelid, tag_manifest)
+ manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
+ if manifest is None:
+ raise NotFound()
+
+ label = registry_model.get_manifest_label(manifest, labelid)
if label is None:
raise NotFound()
- return label_view(label)
+ return _label_dict(label)
@require_repo_write
@nickname('deleteManifestLabel')
- def delete(self, namespace, repository, manifestref, labelid):
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
+ def delete(self, namespace_name, repository_name, manifestref, labelid):
""" Deletes an existing label from a manifest. """
- try:
- tag_manifest = model.tag.load_manifest_by_digest(namespace, repository, manifestref)
- except model.DataModelException:
+ repo_ref = registry_model.lookup_repository(namespace_name, repository_name)
+ if repo_ref is None:
raise NotFound()
- deleted = model.label.delete_manifest_label(labelid, tag_manifest)
+ manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref)
+ if manifest is None:
+ raise NotFound()
+
+ deleted = registry_model.delete_manifest_label(manifest, labelid)
if deleted is None:
raise NotFound()
@@ -150,9 +264,10 @@ class ManageRepositoryManifestLabel(RepositoryParamResource):
'id': labelid,
'key': deleted.key,
'value': deleted.value,
- 'manifest_digest': manifestref
+ 'manifest_digest': manifestref,
+ 'namespace': namespace_name,
+ 'repo': repository_name,
}
- log_action('manifest_label_delete', namespace, metadata, repo=tag_manifest.tag.repository)
+ log_action('manifest_label_delete', namespace_name, metadata, repo_name=repository_name)
return '', 204
-
diff --git a/endpoints/api/mirror.py b/endpoints/api/mirror.py
new file mode 100644
index 000000000..9c898c7f5
--- /dev/null
+++ b/endpoints/api/mirror.py
@@ -0,0 +1,435 @@
+# -*- coding: utf-8 -*-
+import logging
+
+from email.utils import parsedate_tz, mktime_tz
+from datetime import datetime
+
+from jsonschema import ValidationError
+from flask import request
+
+import features
+
+from auth.auth_context import get_authenticated_user
+from data import model
+from data.database import RepoMirrorRuleType
+from endpoints.api import (RepositoryParamResource, nickname, path_param, require_repo_admin,
+ resource, validate_json_request, define_json_response, show_if,
+ format_date)
+from endpoints.exception import NotFound
+from util.audit import track_and_log, wrap_repository
+from util.names import parse_robot_username
+
+
+common_properties = {
+ 'is_enabled': {
+ 'type': 'boolean',
+ 'description': 'Used to enable or disable synchronizations.',
+ },
+ 'external_reference': {
+ 'type': 'string',
+ 'description': 'Location of the external repository.'
+ },
+ 'external_registry_username': {
+ 'type': ['string', 'null'],
+ 'description': 'Username used to authenticate with external registry.',
+ },
+ 'external_registry_password': {
+ 'type': ['string', 'null'],
+ 'description': 'Password used to authenticate with external registry.',
+ },
+ 'sync_start_date': {
+ 'type': 'string',
+ 'description': 'Determines the next time this repository is ready for synchronization.',
+ },
+ 'sync_interval': {
+ 'type': 'integer',
+ 'minimum': 0,
+ 'description': 'Number of seconds after next_start_date to begin synchronizing.'
+ },
+ 'robot_username': {
+ 'type': 'string',
+ 'description': 'Username of robot which will be used for image pushes.'
+ },
+ 'root_rule': {
+ 'type': 'object',
+ 'description': 'Tag mirror rule',
+ 'required': [
+ 'rule_kind',
+ 'rule_value'
+ ],
+ 'properties': {
+ 'rule_kind': {
+ 'type': 'string',
+ 'description': 'The kind of rule type',
+ 'enum': ['tag_glob_csv'],
+ },
+ 'rule_value': {
+ 'type': 'array',
+ 'description': 'Array of tag patterns',
+ 'items': {
+ 'type': 'string'
+ }
+ }
+ },
+ 'description': 'A list of glob-patterns used to determine which tags should be synchronized.'
+ },
+ 'external_registry_config': {
+ 'type': 'object',
+ 'properties': {
+ 'verify_tls': {
+ 'type': 'boolean',
+ 'description': (
+ 'Determines whether HTTPs is required and the certificate is verified when '
+ 'communicating with the external repository.'
+ ),
+ },
+ 'proxy': {
+ 'type': 'object',
+ 'description': 'Proxy configuration for use during synchronization.',
+ 'properties': {
+ 'https_proxy': {
+ 'type': ['string', 'null'],
+ 'description': 'Value for HTTPS_PROXY environment variable during sync.'
+ },
+ 'http_proxy': {
+ 'type': ['string', 'null'],
+ 'description': 'Value for HTTP_PROXY environment variable during sync.'
+ },
+ 'no_proxy': {
+ 'type': ['string', 'null'],
+ 'description': 'Value for NO_PROXY environment variable during sync.'
+ }
+ }
+ }
+ }
+ }
+}
+
+
+@resource('/v1/repository//mirror/sync-now')
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@show_if(features.REPO_MIRROR)
+class RepoMirrorSyncNowResource(RepositoryParamResource):
+ """ A resource for managing RepoMirrorConfig.sync_status """
+
+ @require_repo_admin
+ @nickname('syncNow')
+ def post(self, namespace_name, repository_name):
+ """ Update the sync_status for a given Repository's mirroring configuration. """
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if not repo:
+ raise NotFound()
+
+ mirror = model.repo_mirror.get_mirror(repository=repo)
+ if not mirror:
+ raise NotFound()
+
+ if mirror and model.repo_mirror.update_sync_status_to_sync_now(mirror):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="sync_status", to="SYNC_NOW")
+ return '', 204
+
+ raise NotFound()
+
+
+@resource('/v1/repository//mirror/sync-cancel')
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@show_if(features.REPO_MIRROR)
+class RepoMirrorSyncCancelResource(RepositoryParamResource):
+ """ A resource for managing RepoMirrorConfig.sync_status """
+
+ @require_repo_admin
+ @nickname('syncCancel')
+ def post(self, namespace_name, repository_name):
+ """ Update the sync_status for a given Repository's mirroring configuration. """
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if not repo:
+ raise NotFound()
+
+ mirror = model.repo_mirror.get_mirror(repository=repo)
+ if not mirror:
+ raise NotFound()
+
+ if mirror and model.repo_mirror.update_sync_status_to_cancel(mirror):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="sync_status", to="SYNC_CANCEL")
+ return '', 204
+
+ raise NotFound()
+
+
+@resource('/v1/repository//mirror')
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@show_if(features.REPO_MIRROR)
+class RepoMirrorResource(RepositoryParamResource):
+ """
+ Resource for managing repository mirroring.
+ """
+ schemas = {
+ 'CreateMirrorConfig': {
+ 'description': 'Create the repository mirroring configuration.',
+ 'type': 'object',
+ 'required': [
+ 'external_reference',
+ 'sync_interval',
+ 'sync_start_date',
+ 'root_rule'
+ ],
+ 'properties': common_properties
+ },
+ 'UpdateMirrorConfig': {
+ 'description': 'Update the repository mirroring configuration.',
+ 'type': 'object',
+ 'properties': common_properties
+ },
+ 'ViewMirrorConfig': {
+ 'description': 'View the repository mirroring configuration.',
+ 'type': 'object',
+ 'required': [
+ 'is_enabled',
+ 'mirror_type',
+ 'external_reference',
+ 'external_registry_username',
+ 'external_registry_config',
+ 'sync_interval',
+ 'sync_start_date',
+ 'sync_expiration_date',
+ 'sync_retries_remaining',
+ 'sync_status',
+ 'root_rule',
+ 'robot_username',
+ ],
+ 'properties': common_properties
+ }
+ }
+
+ @require_repo_admin
+ @define_json_response('ViewMirrorConfig')
+ @nickname('getRepoMirrorConfig')
+ def get(self, namespace_name, repository_name):
+ """ Return the Mirror configuration for a given Repository. """
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if not repo:
+ raise NotFound()
+
+ mirror = model.repo_mirror.get_mirror(repo)
+ if not mirror:
+ raise NotFound()
+
+ # Transformations
+ rules = mirror.root_rule.rule_value
+ username = self._decrypt_username(mirror.external_registry_username)
+ sync_start_date = self._dt_to_string(mirror.sync_start_date)
+ sync_expiration_date = self._dt_to_string(mirror.sync_expiration_date)
+ robot = mirror.internal_robot.username if mirror.internal_robot is not None else None
+
+ return {
+ 'is_enabled': mirror.is_enabled,
+ 'mirror_type': mirror.mirror_type.name,
+ 'external_reference': mirror.external_reference,
+ 'external_registry_username': username,
+ 'external_registry_config': mirror.external_registry_config or {},
+ 'sync_interval': mirror.sync_interval,
+ 'sync_start_date': sync_start_date,
+ 'sync_expiration_date': sync_expiration_date,
+ 'sync_retries_remaining': mirror.sync_retries_remaining,
+ 'sync_status': mirror.sync_status.name,
+ 'root_rule': {
+ 'rule_kind': 'tag_glob_csv',
+ 'rule_value': rules
+ },
+ 'robot_username': robot,
+ }
+
+ @require_repo_admin
+ @nickname('createRepoMirrorConfig')
+ @validate_json_request('CreateMirrorConfig')
+ def post(self, namespace_name, repository_name):
+ """ Create a RepoMirrorConfig for a given Repository. """
+ # TODO: Tidy up this function
+ # TODO: Specify only the data we want to pass on when creating the RepoMirrorConfig. Avoid
+ # the possibility of data injection.
+
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if not repo:
+ raise NotFound()
+
+ if model.repo_mirror.get_mirror(repo):
+ return {'detail': 'Mirror configuration already exits for repository %s/%s' % (
+ namespace_name, repository_name)}, 409
+
+ data = request.get_json()
+
+ data['sync_start_date'] = self._string_to_dt(data['sync_start_date'])
+
+ rule = model.repo_mirror.create_rule(repo, data['root_rule']['rule_value'])
+ del data['root_rule']
+
+ # Verify the robot is part of the Repository's namespace
+ robot = self._setup_robot_for_mirroring(namespace_name, repository_name, data['robot_username'])
+ del data['robot_username']
+
+ mirror = model.repo_mirror.enable_mirroring_for_repository(repo, root_rule=rule,
+ internal_robot=robot, **data)
+ if mirror:
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_reference', to=data['external_reference'])
+ return '', 201
+ else:
+ # TODO: Determine appropriate Response
+ return {'detail': 'RepoMirrorConfig already exists for this repository.'}, 409
+
+ @require_repo_admin
+ @validate_json_request('UpdateMirrorConfig')
+ @nickname('changeRepoMirrorConfig')
+ def put(self, namespace_name, repository_name):
+ """ Allow users to modifying the repository's mirroring configuration. """
+ values = request.get_json()
+
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if not repo:
+ raise NotFound()
+
+ mirror = model.repo_mirror.get_mirror(repo)
+ if not mirror:
+ raise NotFound()
+
+ if 'is_enabled' in values:
+ if values['is_enabled'] == True:
+ if model.repo_mirror.enable_mirror(repo):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='is_enabled', to=True)
+ if values['is_enabled'] == False:
+ if model.repo_mirror.disable_mirror(repo):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='is_enabled', to=False)
+
+ if 'external_reference' in values:
+ if values['external_reference'] == '':
+ return {'detail': 'Empty string is an invalid repository location.'}, 400
+ if model.repo_mirror.change_remote(repo, values['external_reference']):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_reference', to=values['external_reference'])
+
+ if 'robot_username' in values:
+ robot_username = values['robot_username']
+ robot = self._setup_robot_for_mirroring(namespace_name, repository_name, robot_username)
+ if model.repo_mirror.set_mirroring_robot(repo, robot):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='robot_username', to=robot_username)
+
+ if 'sync_start_date' in values:
+ try:
+ sync_start_date = self._string_to_dt(values['sync_start_date'])
+ except ValueError as e:
+ return {'detail': 'Incorrect DateTime format for sync_start_date.'}, 400
+ if model.repo_mirror.change_sync_start_date(repo, sync_start_date):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='sync_start_date', to=sync_start_date)
+
+ if 'sync_interval' in values:
+ if model.repo_mirror.change_sync_interval(repo, values['sync_interval']):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='sync_interval', to=values['sync_interval'])
+
+ if 'external_registry_username' in values and 'external_registry_password' in values:
+ username = values['external_registry_username']
+ password = values['external_registry_password']
+ if username is None and password is not None:
+ return {'detail': 'Unable to delete username while setting a password.'}, 400
+ if model.repo_mirror.change_credentials(repo, username, password):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_username', to=username)
+ if password is None:
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_password', to=None)
+ else:
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_password', to="********")
+
+ elif 'external_registry_username' in values:
+ username = values['external_registry_username']
+ if model.repo_mirror.change_username(repo, username):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_username', to=username)
+
+ # Do not allow specifying a password without setting a username
+ if 'external_registry_password' in values and 'external_registry_username' not in values:
+ return {'detail': 'Unable to set a new password without also specifying a username.'}, 400
+
+ if 'external_registry_config' in values:
+ external_registry_config = values.get('external_registry_config', {})
+
+ if 'verify_tls' in external_registry_config:
+ updates = {'verify_tls': external_registry_config['verify_tls']}
+ if model.repo_mirror.change_external_registry_config(repo, updates):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='verify_tls', to=external_registry_config['verify_tls'])
+
+ if 'proxy' in external_registry_config:
+ proxy_values = external_registry_config.get('proxy', {})
+
+ if 'http_proxy' in proxy_values:
+ updates = {'proxy': {'http_proxy': proxy_values['http_proxy']}}
+ if model.repo_mirror.change_external_registry_config(repo, updates):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='http_proxy', to=proxy_values['http_proxy'])
+
+ if 'https_proxy' in proxy_values:
+ updates = {'proxy': {'https_proxy': proxy_values['https_proxy']}}
+ if model.repo_mirror.change_external_registry_config(repo, updates):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='https_proxy', to=proxy_values['https_proxy'])
+
+ if 'no_proxy' in proxy_values:
+ updates = {'proxy': {'no_proxy': proxy_values['no_proxy']}}
+ if model.repo_mirror.change_external_registry_config(repo, updates):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='no_proxy', to=proxy_values['no_proxy'])
+
+ if 'root_rule' in values:
+
+ if values['root_rule']['rule_kind'] != "tag_glob_csv":
+ raise ValidationError('validation failed: rule_kind must be "tag_glob_csv"')
+
+ if model.repo_mirror.change_rule(repo, RepoMirrorRuleType.TAG_GLOB_CSV, values['root_rule']['rule_value']):
+ track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="mirror_rule", to=values['root_rule']['rule_value'])
+
+ return '', 201
+
+ def _setup_robot_for_mirroring(self, namespace_name, repo_name, robot_username):
+ """ Validate robot exists and give write permissions. """
+ robot = model.user.lookup_robot(robot_username)
+ assert robot.robot
+
+ namespace, _ = parse_robot_username(robot_username)
+ if namespace != namespace_name:
+ raise model.DataModelException('Invalid robot')
+
+ # Ensure the robot specified has access to the repository. If not, grant it.
+ permissions = model.permission.get_user_repository_permissions(robot, namespace_name, repo_name)
+ if not permissions or permissions[0].role.name == 'read':
+ model.permission.set_user_repo_permission(robot.username, namespace_name, repo_name, 'write')
+
+ return robot
+
+ def _string_to_dt(self, string):
+ """ Convert String to correct DateTime format. """
+ if string is None:
+ return None
+
+ """
+ # TODO: Use RFC2822. This doesn't work consistently.
+ # TODO: Move this to same module as `format_date` once fixed.
+ tup = parsedate_tz(string)
+ if len(tup) == 8:
+ tup = tup + (0,) # If TimeZone is omitted, assume UTC
+ ts = mktime_tz(tup)
+ dt = datetime.fromtimestamp(ts, pytz.UTC)
+ return dt
+ """
+ assert isinstance(string, (str, unicode))
+ dt = datetime.strptime(string, "%Y-%m-%dT%H:%M:%SZ")
+ return dt
+
+ def _dt_to_string(self, dt):
+ """ Convert DateTime to correctly formatted String."""
+ if dt is None:
+ return None
+
+ """
+ # TODO: Use RFC2822. Need to make it work bi-directionally.
+ return format_date(dt)
+ """
+
+ assert isinstance(dt, datetime)
+ string = dt.isoformat() + 'Z'
+ return string
+
+ def _decrypt_username(self, username):
+ if username is None:
+ return None
+ return username.decrypt()
diff --git a/endpoints/api/organization.py b/endpoints/api/organization.py
index 854451454..e53bba6b9 100644
--- a/endpoints/api/organization.py
+++ b/endpoints/api/organization.py
@@ -1,12 +1,15 @@
""" Manage organizations, members and OAuth applications. """
import logging
+import recaptcha2
from flask import request
import features
-from app import billing as stripe, avatar, all_queues
+from active_migration import ActiveDataMigration, ERTMigrationFlags
+from app import (billing as stripe, avatar, all_queues, authentication, namespace_gc_queue,
+ ip_resolver, app)
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
related_user_resource, internal_only, require_user_admin, log_action,
show_if, path_param, require_scope, require_fresh_login)
@@ -18,6 +21,8 @@ from auth.auth_context import get_authenticated_user
from auth import scopes
from data import model
from data.billing import get_plan
+from util.names import parse_robot_username
+from util.request import get_request_ip
logger = logging.getLogger(__name__)
@@ -33,6 +38,8 @@ def team_view(orgname, team):
'repo_count': team.repo_count,
'member_count': team.member_count,
+
+ 'is_synced': team.is_synced,
}
@@ -56,6 +63,8 @@ def org_view(o, teams):
if is_admin:
view['invoice_email'] = o.invoice_email
view['invoice_email_address'] = o.invoice_email_address
+ view['tag_expiration_s'] = o.removed_tag_expiration_s
+ view['is_free_account'] = o.stripe_id is None
return view
@@ -79,6 +88,10 @@ class OrganizationList(ApiResource):
'type': 'string',
'description': 'Organization contact email',
},
+ 'recaptcha_response': {
+ 'type': 'string',
+ 'description': 'The (may be disabled) recaptcha response code for verification',
+ },
},
},
}
@@ -107,9 +120,23 @@ class OrganizationList(ApiResource):
if features.MAILING and not org_data.get('email'):
raise request_error(message='Email address is required')
+ # If recaptcha is enabled, then verify the user is a human.
+ if features.RECAPTCHA:
+ recaptcha_response = org_data.get('recaptcha_response', '')
+ result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
+ recaptcha_response,
+ get_request_ip())
+
+ if not result['success']:
+ return {
+ 'message': 'Are you a bot? If not, please revalidate the captcha.'
+ }, 400
+
+ is_possible_abuser = ip_resolver.is_ip_possible_threat(get_request_ip())
try:
model.organization.create_organization(org_data['name'], org_data.get('email'), user,
- email_required=features.MAILING)
+ email_required=features.MAILING,
+ is_possible_abuser=is_possible_abuser)
return 'Created', 201
except model.DataModelException as ex:
raise request_error(exception=ex)
@@ -137,16 +164,15 @@ class Organization(ApiResource):
'type': ['string', 'null'],
'description': 'The email address at which to receive invoices',
},
- 'tag_expiration': {
+ 'tag_expiration_s': {
'type': 'integer',
- 'maximum': 2592000,
'minimum': 0,
+ 'description': 'The number of seconds for tag expiration',
},
},
},
}
- @require_scope(scopes.ORG_ADMIN)
@nickname('getOrganization')
def get(self, orgname):
""" Get the details for the specified organization """
@@ -157,7 +183,8 @@ class Organization(ApiResource):
teams = None
if OrganizationMemberPermission(orgname).can():
- teams = model.team.get_teams_within_org(org)
+ has_syncing = features.TEAM_SYNCING and bool(authentication.federated_service)
+ teams = model.team.get_teams_within_org(org, has_syncing)
return org_view(org, teams)
@@ -193,9 +220,9 @@ class Organization(ApiResource):
logger.debug('Changing email address for organization: %s', org.username)
model.user.update_email(org, new_email)
- if 'tag_expiration' in org_data:
- logger.debug('Changing organization tag expiration to: %ss', org_data['tag_expiration'])
- model.user.change_user_tag_expiration(org, org_data['tag_expiration'])
+ if features.CHANGE_TAG_EXPIRATION and 'tag_expiration_s' in org_data:
+ logger.debug('Changing organization tag expiration to: %ss', org_data['tag_expiration_s'])
+ model.user.change_user_tag_expiration(org, org_data['tag_expiration_s'])
teams = model.team.get_teams_within_org(org)
return org_view(org, teams)
@@ -214,7 +241,7 @@ class Organization(ApiResource):
except model.InvalidOrganizationException:
raise NotFound()
- model.user.delete_user(org, all_queues)
+ model.user.mark_namespace_for_deletion(org, all_queues, namespace_gc_queue)
return '', 204
raise Unauthorized()
@@ -259,6 +286,55 @@ class OrgPrivateRepositories(ApiResource):
raise Unauthorized()
+@resource('/v1/organization//collaborators')
+@path_param('orgname', 'The name of the organization')
+class OrganizationCollaboratorList(ApiResource):
+ """ Resource for listing outside collaborators of an organization.
+
+ Collaborators are users that do not belong to any team in the
+ organiztion, but who have direct permissions on one or more
+ repositories belonging to the organization.
+ """
+
+ @require_scope(scopes.ORG_ADMIN)
+ @nickname('getOrganizationCollaborators')
+ def get(self, orgname):
+ """ List outside collaborators of the specified organization. """
+ permission = AdministerOrganizationPermission(orgname)
+ if not permission.can():
+ raise Unauthorized()
+
+ try:
+ org = model.organization.get_organization(orgname)
+ except model.InvalidOrganizationException:
+ raise NotFound()
+
+ all_perms = model.permission.list_organization_member_permissions(org)
+ membership = model.team.list_organization_members_by_teams(org)
+
+ org_members = set(m.user.username for m in membership)
+
+ collaborators = {}
+ for perm in all_perms:
+ username = perm.user.username
+
+ # Only interested in non-member permissions.
+ if username in org_members:
+ continue
+
+ if username not in collaborators:
+ collaborators[username] = {
+ 'kind': 'user',
+ 'name': username,
+ 'avatar': avatar.get_data_for_user(perm.user),
+ 'repositories': [],
+ }
+
+ collaborators[username]['repositories'].append(perm.repository.name)
+
+ return {'collaborators': collaborators.values()}
+
+
@resource('/v1/organization//members')
@path_param('orgname', 'The name of the organization')
class OrganizationMemberList(ApiResource):
@@ -339,7 +415,14 @@ class OrganizationMember(ApiResource):
# Lookup the user's information in the organization.
teams = list(model.team.get_user_teams_within_org(membername, organization))
if not teams:
- raise NotFound()
+ # 404 if the user is not a robot under the organization, as that means the referenced
+ # user or robot is not a member of this organization.
+ if not member.robot:
+ raise NotFound()
+
+ namespace, _ = parse_robot_username(member.username)
+ if namespace != orgname:
+ raise NotFound()
repo_permissions = model.permission.list_organization_member_permissions(organization, member)
@@ -411,14 +494,23 @@ class ApplicationInformation(ApiResource):
def app_view(application):
is_admin = AdministerOrganizationPermission(application.organization.username).can()
+ client_secret = None
+ if is_admin:
+ # TODO(remove-unenc): Remove legacy lookup.
+ client_secret = None
+ if application.secure_client_secret is not None:
+ client_secret = application.secure_client_secret.decrypt()
+ if ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS) and client_secret is None:
+ client_secret = application.client_secret
+
+ assert (client_secret is not None) == is_admin
return {
'name': application.name,
'description': application.description,
'application_uri': application.application_uri,
-
'client_id': application.client_id,
- 'client_secret': application.client_secret if is_admin else None,
+ 'client_secret': client_secret,
'redirect_uri': application.redirect_uri if is_admin else None,
'avatar_email': application.avatar_email if is_admin else None,
}
diff --git a/endpoints/api/permission.py b/endpoints/api/permission.py
index f07d87b2f..e85c6480e 100644
--- a/endpoints/api/permission.py
+++ b/endpoints/api/permission.py
@@ -4,55 +4,27 @@ import logging
from flask import request
-from app import avatar
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
log_action, request_error, validate_json_request, path_param)
from endpoints.exception import NotFound
-from data import model
-
+from permission_models_pre_oci import pre_oci_model as model
+from permission_models_interface import DeleteException, SaveException
logger = logging.getLogger(__name__)
-def role_view(repo_perm_obj):
- return {
- 'role': repo_perm_obj.role.name,
- }
-
-def wrap_role_view_user(role_json, user):
- role_json['name'] = user.username
- role_json['is_robot'] = user.robot
- if not user.robot:
- role_json['avatar'] = avatar.get_data_for_user(user)
- return role_json
-
-
-def wrap_role_view_org(role_json, user, org_members):
- role_json['is_org_member'] = user.robot or user.username in org_members
- return role_json
-
-
-def wrap_role_view_team(role_json, team):
- role_json['name'] = team.name
- role_json['avatar'] = avatar.get_data_for_team(team)
- return role_json
-
-
@resource('/v1/repository//permissions/team/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryTeamPermissionList(RepositoryParamResource):
""" Resource for repository team permissions. """
@require_repo_admin
@nickname('listRepoTeamPermissions')
- def get(self, namespace, repository):
+ def get(self, namespace_name, repository_name):
""" List all team permission. """
- repo_perms = model.permission.get_all_repo_teams(namespace, repository)
-
- def wrapped_role_view(repo_perm):
- return wrap_role_view_team(role_view(repo_perm), repo_perm.team)
+ repo_perms = model.get_repo_permissions_by_team(namespace_name, repository_name)
return {
- 'permissions': {repo_perm.team.name: wrapped_role_view(repo_perm)
+ 'permissions': {repo_perm.team_name: repo_perm.to_dict()
for repo_perm in repo_perms}
}
@@ -63,38 +35,10 @@ class RepositoryUserPermissionList(RepositoryParamResource):
""" Resource for repository user permissions. """
@require_repo_admin
@nickname('listRepoUserPermissions')
- def get(self, namespace, repository):
+ def get(self, namespace_name, repository_name):
""" List all user permissions. """
- # Lookup the organization (if any).
- org = None
- try:
- org = model.organization.get_organization(namespace) # Will raise an error if not org
- except model.InvalidOrganizationException:
- # This repository isn't under an org
- pass
-
- # Determine how to wrap the role(s).
- def wrapped_role_view(repo_perm):
- return wrap_role_view_user(role_view(repo_perm), repo_perm.user)
-
- role_view_func = wrapped_role_view
-
- if org:
- org_members = model.organization.get_organization_member_set(namespace)
- current_func = role_view_func
-
- def wrapped_role_org_view(repo_perm):
- return wrap_role_view_org(current_func(repo_perm), repo_perm.user,
- org_members)
-
- role_view_func = wrapped_role_org_view
-
- # Load and return the permissions.
- repo_perms = model.user.get_all_repo_users(namespace, repository)
- return {
- 'permissions': {perm.user.username: role_view_func(perm)
- for perm in repo_perms}
- }
+ perms = model.get_repo_permissions_by_user(namespace_name, repository_name)
+ return {'permissions': {p.username: p.to_dict() for p in perms}}
@resource('/v1/repository//permissions/user//transitive')
@@ -105,19 +49,16 @@ class RepositoryUserTransitivePermission(RepositoryParamResource):
or via a team. """
@require_repo_admin
@nickname('getUserTransitivePermission')
- def get(self, namespace, repository, username):
+ def get(self, namespace_name, repository_name, username):
""" Get the fetch the permission for the specified user. """
- user = model.user.get_user(username)
- if not user:
+
+ roles = model.get_repo_roles(username, namespace_name, repository_name)
+
+ if not roles:
raise NotFound
-
- repo = model.repository.get_repository(namespace, repository)
- if not repo:
- raise NotFound
-
- permissions = list(model.permission.get_user_repo_permissions(user, repo))
+
return {
- 'permissions': [role_view(permission) for permission in permissions]
+ 'permissions': [r.to_dict() for r in roles]
}
@@ -149,68 +90,48 @@ class RepositoryUserPermission(RepositoryParamResource):
@require_repo_admin
@nickname('getUserPermissions')
- def get(self, namespace, repository, username):
- """ Get the Fetch the permission for the specified user. """
- logger.debug('Get repo: %s/%s permissions for user %s', namespace, repository, username)
- perm = model.permission.get_user_reponame_permission(username, namespace, repository)
- perm_view = wrap_role_view_user(role_view(perm), perm.user)
-
- try:
- model.organization.get_organization(namespace)
- org_members = model.organization.get_organization_member_set(namespace)
- perm_view = wrap_role_view_org(perm_view, perm.user, org_members)
- except model.InvalidOrganizationException:
- # This repository is not part of an organization
- pass
-
- return perm_view
+ def get(self, namespace_name, repository_name, username):
+ """ Get the permission for the specified user. """
+ logger.debug('Get repo: %s/%s permissions for user %s', namespace_name, repository_name, username)
+ perm = model.get_repo_permission_for_user(username, namespace_name, repository_name)
+ return perm.to_dict()
@require_repo_admin
@nickname('changeUserPermissions')
@validate_json_request('UserPermission')
- def put(self, namespace, repository, username): # Also needs to respond to post
+ def put(self, namespace_name, repository_name, username): # Also needs to respond to post
""" Update the perimssions for an existing repository. """
new_permission = request.get_json()
logger.debug('Setting permission to: %s for user %s', new_permission['role'], username)
try:
- perm = model.permission.set_user_repo_permission(username, namespace, repository,
- new_permission['role'])
- except model.DataModelException as ex:
+ perm = model.set_repo_permission_for_user(username, namespace_name, repository_name,
+ new_permission['role'])
+ resp = perm.to_dict()
+ except SaveException as ex:
raise request_error(exception=ex)
- perm_view = wrap_role_view_user(role_view(perm), perm.user)
-
- try:
- model.organization.get_organization(namespace)
- org_members = model.organization.get_organization_member_set(namespace)
- perm_view = wrap_role_view_org(perm_view, perm.user, org_members)
- except model.InvalidOrganizationException:
- # This repository is not part of an organization
- pass
- except model.DataModelException as ex:
- raise request_error(exception=ex)
-
- log_action('change_repo_permission', namespace,
- {'username': username, 'repo': repository,
+ log_action('change_repo_permission', namespace_name,
+ {'username': username, 'repo': repository_name,
+ 'namespace': namespace_name,
'role': new_permission['role']},
- repo=model.repository.get_repository(namespace, repository))
+ repo_name=repository_name)
- return perm_view, 200
+ return resp, 200
@require_repo_admin
@nickname('deleteUserPermissions')
- def delete(self, namespace, repository, username):
+ def delete(self, namespace_name, repository_name, username):
""" Delete the permission for the user. """
try:
- model.permission.delete_user_permission(username, namespace, repository)
- except model.DataModelException as ex:
+ model.delete_repo_permission_for_user(username, namespace_name, repository_name)
+ except DeleteException as ex:
raise request_error(exception=ex)
- log_action('delete_repo_permission', namespace,
- {'username': username, 'repo': repository},
- repo=model.repository.get_repository(namespace, repository))
+ log_action('delete_repo_permission', namespace_name,
+ {'username': username, 'repo': repository_name, 'namespace': namespace_name},
+ repo_name=repository_name)
return '', 204
@@ -243,39 +164,46 @@ class RepositoryTeamPermission(RepositoryParamResource):
@require_repo_admin
@nickname('getTeamPermissions')
- def get(self, namespace, repository, teamname):
+ def get(self, namespace_name, repository_name, teamname):
""" Fetch the permission for the specified team. """
- logger.debug('Get repo: %s/%s permissions for team %s', namespace, repository, teamname)
- perm = model.permission.get_team_reponame_permission(teamname, namespace, repository)
- return role_view(perm)
+ logger.debug('Get repo: %s/%s permissions for team %s', namespace_name, repository_name, teamname)
+ role = model.get_repo_role_for_team(teamname, namespace_name, repository_name)
+ return role.to_dict()
@require_repo_admin
@nickname('changeTeamPermissions')
@validate_json_request('TeamPermission')
- def put(self, namespace, repository, teamname):
+ def put(self, namespace_name, repository_name, teamname):
""" Update the existing team permission. """
new_permission = request.get_json()
logger.debug('Setting permission to: %s for team %s', new_permission['role'], teamname)
- perm = model.permission.set_team_repo_permission(teamname, namespace, repository,
- new_permission['role'])
+ try:
+ perm = model.set_repo_permission_for_team(teamname, namespace_name, repository_name,
+ new_permission['role'])
+ resp = perm.to_dict()
+ except SaveException as ex:
+ raise request_error(exception=ex)
+
- log_action('change_repo_permission', namespace,
- {'team': teamname, 'repo': repository,
+ log_action('change_repo_permission', namespace_name,
+ {'team': teamname, 'repo': repository_name,
'role': new_permission['role']},
- repo=model.repository.get_repository(namespace, repository))
-
- return wrap_role_view_team(role_view(perm), perm.team), 200
+ repo_name=repository_name)
+ return resp, 200
@require_repo_admin
@nickname('deleteTeamPermissions')
- def delete(self, namespace, repository, teamname):
+ def delete(self, namespace_name, repository_name, teamname):
""" Delete the permission for the specified team. """
- model.permission.delete_team_permission(teamname, namespace, repository)
-
- log_action('delete_repo_permission', namespace,
- {'team': teamname, 'repo': repository},
- repo=model.repository.get_repository(namespace, repository))
+ try:
+ model.delete_repo_permission_for_team(teamname, namespace_name, repository_name)
+ except DeleteException as ex:
+ raise request_error(exception=ex)
+
+ log_action('delete_repo_permission', namespace_name,
+ {'team': teamname, 'repo': repository_name},
+ repo_name=repository_name)
return '', 204
diff --git a/endpoints/api/permission_models_interface.py b/endpoints/api/permission_models_interface.py
new file mode 100644
index 000000000..49c24744c
--- /dev/null
+++ b/endpoints/api/permission_models_interface.py
@@ -0,0 +1,208 @@
+import sys
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+
+class SaveException(Exception):
+ def __init__(self, other):
+ self.traceback = sys.exc_info()
+ super(SaveException, self).__init__(str(other))
+
+class DeleteException(Exception):
+ def __init__(self, other):
+ self.traceback = sys.exc_info()
+ super(DeleteException, self).__init__(str(other))
+
+
+class Role(namedtuple('Role', ['role_name'])):
+ def to_dict(self):
+ return {
+ 'role': self.role_name,
+ }
+
+class UserPermission(namedtuple('UserPermission', [
+ 'role_name',
+ 'username',
+ 'is_robot',
+ 'avatar',
+ 'is_org_member',
+ 'has_org',
+ ])):
+
+ def to_dict(self):
+ perm_dict = {
+ 'role': self.role_name,
+ 'name': self.username,
+ 'is_robot': self.is_robot,
+ 'avatar': self.avatar,
+ }
+ if self.has_org:
+ perm_dict['is_org_member'] = self.is_org_member
+ return perm_dict
+
+
+class RobotPermission(namedtuple('RobotPermission', [
+ 'role_name',
+ 'username',
+ 'is_robot',
+ 'is_org_member',
+])):
+
+ def to_dict(self, user=None, team=None, org_members=None):
+ return {
+ 'role': self.role_name,
+ 'name': self.username,
+ 'is_robot': True,
+ 'is_org_member': self.is_org_member,
+ }
+
+
+class TeamPermission(namedtuple('TeamPermission', [
+ 'role_name',
+ 'team_name',
+ 'avatar',
+])):
+
+ def to_dict(self):
+ return {
+ 'role': self.role_name,
+ 'name': self.team_name,
+ 'avatar': self.avatar,
+ }
+
+@add_metaclass(ABCMeta)
+class PermissionDataInterface(object):
+ """
+ Data interface used by permissions API
+ """
+
+ @abstractmethod
+ def get_repo_permissions_by_user(self, namespace_name, repository_name):
+ """
+
+ Args:
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ list(UserPermission)
+ """
+
+ @abstractmethod
+ def get_repo_roles(self, username, namespace_name, repository_name):
+ """
+
+ Args:
+ username: string
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ list(Role) or None
+ """
+
+ @abstractmethod
+ def get_repo_permission_for_user(self, username, namespace_name, repository_name):
+ """
+
+ Args:
+ username: string
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ UserPermission
+ """
+
+ @abstractmethod
+ def set_repo_permission_for_user(self, username, namespace_name, repository_name, role_name):
+ """
+
+ Args:
+ username: string
+ namespace_name: string
+ repository_name: string
+ role_name: string
+
+ Returns:
+ UserPermission
+
+ Raises:
+ SaveException
+ """
+
+ @abstractmethod
+ def delete_repo_permission_for_user(self, username, namespace_name, repository_name):
+ """
+
+ Args:
+ username: string
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ void
+
+ Raises:
+ DeleteException
+ """
+
+ @abstractmethod
+ def get_repo_permissions_by_team(self, namespace_name, repository_name):
+ """
+
+ Args:
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ list(TeamPermission)
+ """
+
+ @abstractmethod
+ def get_repo_role_for_team(self, team_name, namespace_name, repository_name):
+ """
+
+ Args:
+ team_name: string
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ Role
+ """
+
+ @abstractmethod
+ def set_repo_permission_for_team(self, team_name, namespace_name, repository_name, permission):
+ """
+
+ Args:
+ team_name: string
+ namespace_name: string
+ repository_name: string
+ permission: string
+
+ Returns:
+ TeamPermission
+
+ Raises:
+ SaveException
+ """
+
+ @abstractmethod
+ def delete_repo_permission_for_team(self, team_name, namespace_name, repository_name):
+ """
+
+ Args:
+ team_name: string
+ namespace_name: string
+ repository_name: string
+
+ Returns:
+ TeamPermission
+
+ Raises:
+ DeleteException
+ """
\ No newline at end of file
diff --git a/endpoints/api/permission_models_pre_oci.py b/endpoints/api/permission_models_pre_oci.py
new file mode 100644
index 000000000..1f19cad10
--- /dev/null
+++ b/endpoints/api/permission_models_pre_oci.py
@@ -0,0 +1,115 @@
+from app import avatar
+from data import model
+from permission_models_interface import PermissionDataInterface, UserPermission, TeamPermission, Role, SaveException, DeleteException
+
+
+class PreOCIModel(PermissionDataInterface):
+ """
+ PreOCIModel implements the data model for Permission using a database schema
+ before it was changed to support the OCI specification.
+ """
+
+ def get_repo_permissions_by_user(self, namespace_name, repository_name):
+ org = None
+ try:
+ org = model.organization.get_organization(namespace_name) # Will raise an error if not org
+ except model.InvalidOrganizationException:
+ # This repository isn't under an org
+ pass
+
+ # Load the permissions.
+ repo_perms = model.user.get_all_repo_users(namespace_name, repository_name)
+
+ if org:
+ users_filter = {perm.user for perm in repo_perms}
+ org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
+
+ def is_org_member(user):
+ if not org:
+ return False
+
+ return user.robot or user.username in org_members
+
+ return [self._user_permission(perm, org is not None, is_org_member(perm.user)) for perm in repo_perms]
+
+ def get_repo_roles(self, username, namespace_name, repository_name):
+ user = model.user.get_user(username)
+ if not user:
+ return None
+
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if not repo:
+ return None
+
+ return [self._role(r) for r in model.permission.get_user_repo_permissions(user, repo)]
+
+ def get_repo_permission_for_user(self, username, namespace_name, repository_name):
+ perm = model.permission.get_user_reponame_permission(username, namespace_name, repository_name)
+ org = None
+ try:
+ org = model.organization.get_organization(namespace_name)
+ org_members = model.organization.get_organization_member_set(org, users_filter={perm.user})
+ is_org_member = perm.user.robot or perm.user.username in org_members
+ except model.InvalidOrganizationException:
+ # This repository is not part of an organization
+ is_org_member = False
+
+ return self._user_permission(perm, org is not None, is_org_member)
+
+ def set_repo_permission_for_user(self, username, namespace_name, repository_name, role_name):
+ try:
+ perm = model.permission.set_user_repo_permission(username, namespace_name, repository_name, role_name)
+ org = None
+ try:
+ org = model.organization.get_organization(namespace_name)
+ org_members = model.organization.get_organization_member_set(org, users_filter={perm.user})
+ is_org_member = perm.user.robot or perm.user.username in org_members
+ except model.InvalidOrganizationException:
+ # This repository is not part of an organization
+ is_org_member = False
+ return self._user_permission(perm, org is not None, is_org_member)
+ except model.DataModelException as ex:
+ raise SaveException(ex)
+
+ def delete_repo_permission_for_user(self, username, namespace_name, repository_name):
+ try:
+ model.permission.delete_user_permission(username, namespace_name, repository_name)
+ except model.DataModelException as ex:
+ raise DeleteException(ex)
+
+ def get_repo_permissions_by_team(self, namespace_name, repository_name):
+ repo_perms = model.permission.get_all_repo_teams(namespace_name, repository_name)
+ return [self._team_permission(perm, perm.team.name) for perm in repo_perms]
+
+ def get_repo_role_for_team(self, team_name, namespace_name, repository_name):
+ return self._role(model.permission.get_team_reponame_permission(team_name, namespace_name, repository_name))
+
+ def set_repo_permission_for_team(self, team_name, namespace_name, repository_name, role_name):
+ try:
+ return self._team_permission(model.permission.set_team_repo_permission(team_name, namespace_name, repository_name, role_name), team_name)
+ except model.DataModelException as ex:
+ raise SaveException(ex)
+
+ def delete_repo_permission_for_team(self, team_name, namespace_name, repository_name):
+ try:
+ model.permission.delete_team_permission(team_name, namespace_name, repository_name)
+ except model.DataModelException as ex:
+ raise DeleteException(ex)
+
+ def _role(self, permission_obj):
+ return Role(role_name=permission_obj.role.name)
+
+ def _user_permission(self, permission_obj, has_org, is_org_member):
+ return UserPermission(role_name=permission_obj.role.name,
+ username=permission_obj.user.username,
+ is_robot=permission_obj.user.robot,
+ avatar=avatar.get_data_for_user(permission_obj.user),
+ is_org_member=is_org_member,
+ has_org=has_org)
+
+ def _team_permission(self, permission_obj, team_name):
+ return TeamPermission(role_name=permission_obj.role.name,
+ team_name=permission_obj.team.name,
+ avatar=avatar.get_data_for_team(permission_obj.team))
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/prototype.py b/endpoints/api/prototype.py
index f14458594..2944aab60 100644
--- a/endpoints/api/prototype.py
+++ b/endpoints/api/prototype.py
@@ -133,7 +133,10 @@ class PermissionPrototypeList(ApiResource):
raise NotFound()
permissions = model.permission.get_prototype_permissions(org)
- org_members = model.organization.get_organization_member_set(orgname)
+
+ users_filter = ({p.activating_user for p in permissions} |
+ {p.delegate_user for p in permissions})
+ org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
return {'prototypes': [prototype_view(p, org_members) for p in permissions]}
raise Unauthorized()
@@ -180,7 +183,9 @@ class PermissionPrototypeList(ApiResource):
prototype = model.permission.add_prototype_permission(org, role_name, activating_user,
delegate_user, delegate_team)
log_prototype_action('create_prototype_permission', orgname, prototype)
- org_members = model.organization.get_organization_member_set(orgname)
+
+ users_filter = {prototype.activating_user, prototype.delegate_user}
+ org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
return prototype_view(prototype, org_members)
raise Unauthorized()
@@ -257,7 +262,9 @@ class PermissionPrototype(ApiResource):
log_prototype_action('modify_prototype_permission', orgname, prototype,
original_role=existing.role.name)
- org_members = model.organization.get_organization_member_set(orgname)
+
+ users_filter = {prototype.activating_user, prototype.delegate_user}
+ org_members = model.organization.get_organization_member_set(org, users_filter=users_filter)
return prototype_view(prototype, org_members)
raise Unauthorized()
diff --git a/endpoints/api/repoemail.py b/endpoints/api/repoemail.py
index b3c98bc36..3edccb4cc 100644
--- a/endpoints/api/repoemail.py
+++ b/endpoints/api/repoemail.py
@@ -5,46 +5,35 @@ import logging
from flask import request, abort
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
- log_action, validate_json_request, internal_only,
- path_param, show_if)
+ log_action, validate_json_request, internal_only, path_param, show_if)
+from endpoints.api.repoemail_models_pre_oci import pre_oci_model as model
from endpoints.exception import NotFound
from app import tf
-from data import model
from data.database import db
from util.useremails import send_repo_authorization_email
import features
-
logger = logging.getLogger(__name__)
-def record_view(record):
- return {
- 'email': record.email,
- 'repository': record.repository.name,
- 'namespace': record.repository.namespace_user.username,
- 'confirmed': record.confirmed
- }
-
-
@internal_only
-@show_if(features.MAILING)
@resource('/v1/repository//authorizedemail/')
+@show_if(features.MAILING)
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@path_param('email', 'The e-mail address')
class RepositoryAuthorizedEmail(RepositoryParamResource):
""" Resource for checking and authorizing e-mail addresses to receive repo notifications. """
+
@require_repo_admin
@nickname('checkRepoEmailAuthorized')
def get(self, namespace, repository, email):
""" Checks to see if the given e-mail address is authorized on this repository. """
- record = model.repository.get_email_authorized_for_repo(namespace, repository, email)
+ record = model.get_email_authorized_for_repo(namespace, repository, email)
if not record:
abort(404)
- return record_view(record)
-
+ return record.to_dict()
@require_repo_admin
@nickname('sendAuthorizeRepoEmail')
@@ -52,12 +41,12 @@ class RepositoryAuthorizedEmail(RepositoryParamResource):
""" Starts the authorization process for an e-mail address on a repository. """
with tf(db):
- record = model.repository.get_email_authorized_for_repo(namespace, repository, email)
+ record = model.get_email_authorized_for_repo(namespace, repository, email)
if record and record.confirmed:
- return record_view(record)
+ return record.to_dict()
if not record:
- record = model.repository.create_email_authorization_for_repo(namespace, repository, email)
+ record = model.create_email_authorization_for_repo(namespace, repository, email)
send_repo_authorization_email(namespace, repository, email, record.code)
- return record_view(record)
+ return record.to_dict()
diff --git a/endpoints/api/repoemail_models_interface.py b/endpoints/api/repoemail_models_interface.py
new file mode 100644
index 000000000..2aae7ab9c
--- /dev/null
+++ b/endpoints/api/repoemail_models_interface.py
@@ -0,0 +1,50 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+
+class RepositoryAuthorizedEmail(
+ namedtuple('RepositoryAuthorizedEmail', [
+ 'email',
+ 'repository_name',
+ 'namespace_name',
+ 'confirmed',
+ 'code',
+ ])):
+ """
+ Tag represents a name to an image.
+ :type email: string
+ :type repository_name: string
+ :type namespace_name: string
+ :type confirmed: boolean
+ :type code: string
+ """
+
+ def to_dict(self):
+ return {
+ 'email': self.email,
+ 'repository': self.repository_name,
+ 'namespace': self.namespace_name,
+ 'confirmed': self.confirmed,
+ 'code': self.code
+ }
+
+
+@add_metaclass(ABCMeta)
+class RepoEmailDataInterface(object):
+ """
+ Interface that represents all data store interactions required by a Repo Email.
+ """
+
+ @abstractmethod
+ def get_email_authorized_for_repo(self, namespace_name, repository_name, email):
+ """
+ Returns a RepositoryAuthorizedEmail if available else None
+ """
+
+ @abstractmethod
+ def create_email_authorization_for_repo(self, namespace_name, repository_name, email):
+ """
+ Returns the newly created repository authorized email.
+ """
diff --git a/endpoints/api/repoemail_models_pre_oci.py b/endpoints/api/repoemail_models_pre_oci.py
new file mode 100644
index 000000000..80a65c995
--- /dev/null
+++ b/endpoints/api/repoemail_models_pre_oci.py
@@ -0,0 +1,28 @@
+from data import model
+from endpoints.api.repoemail_models_interface import RepoEmailDataInterface, RepositoryAuthorizedEmail
+
+
+def _return_none_or_data(func, namespace_name, repository_name, email):
+ data = func(namespace_name, repository_name, email)
+ if data is None:
+ return data
+ return RepositoryAuthorizedEmail(email, repository_name, namespace_name, data.confirmed,
+ data.code)
+
+
+class PreOCIModel(RepoEmailDataInterface):
+ """
+ PreOCIModel implements the data model for the Repo Email using a database schema
+ before it was changed to support the OCI specification.
+ """
+
+ def get_email_authorized_for_repo(self, namespace_name, repository_name, email):
+ return _return_none_or_data(model.repository.get_email_authorized_for_repo, namespace_name,
+ repository_name, email)
+
+ def create_email_authorization_for_repo(self, namespace_name, repository_name, email):
+ return _return_none_or_data(model.repository.create_email_authorization_for_repo,
+ namespace_name, repository_name, email)
+
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/repository.py b/endpoints/api/repository.py
index 1ae77b6a2..d117f238d 100644
--- a/endpoints/api/repository.py
+++ b/endpoints/api/repository.py
@@ -4,18 +4,20 @@ import logging
import datetime
import features
+from collections import defaultdict
from datetime import timedelta, datetime
from flask import request, abort
-from app import dockerfile_build_queue
-from data import model
-from endpoints.api import (truthy_bool, format_date, nickname, log_action, validate_json_request,
- require_repo_read, require_repo_write, require_repo_admin,
- RepositoryParamResource, resource, query_param, parse_args, ApiResource,
- request_error, require_scope, path_param, page_support, parse_args,
- query_param, truthy_bool)
-from endpoints.exception import Unauthorized, NotFound, InvalidRequest, ExceedsLicenseException
+from app import dockerfile_build_queue, tuf_metadata_api
+from data.database import RepositoryState
+from endpoints.api import (
+ format_date, nickname, log_action, validate_json_request, require_repo_read, require_repo_write,
+ require_repo_admin, RepositoryParamResource, resource, parse_args, ApiResource, request_error,
+ require_scope, path_param, page_support, query_param, truthy_bool, show_if)
+from endpoints.api.repository_models_pre_oci import pre_oci_model as model
+from endpoints.exception import (
+ Unauthorized, NotFound, InvalidRequest, ExceedsLicenseException, DownstreamIssue)
from endpoints.api.billing import lookup_allowed_private_repos, get_namespace_plan
from endpoints.api.subscribe import check_repository_usage
@@ -25,12 +27,12 @@ from auth.auth_context import get_authenticated_user
from auth import scopes
from util.names import REPOSITORY_NAME_REGEX
-
logger = logging.getLogger(__name__)
REPOS_PER_PAGE = 100
MAX_DAYS_IN_3_MONTHS = 92
+
def check_allowed_private_repos(namespace):
""" Checks to see if the given namespace has reached its private repository limit. If so,
raises a ExceedsLicenseException.
@@ -69,7 +71,8 @@ class RepositoryList(ApiResource):
],
},
'namespace': {
- 'type': 'string',
+ 'type':
+ 'string',
'description': ('Namespace in which the repository should be created. If omitted, the '
'username of the caller is used'),
},
@@ -77,6 +80,11 @@ class RepositoryList(ApiResource):
'type': 'string',
'description': 'Markdown encoded description for the repository',
},
+ 'repo_kind': {
+ 'type': ['string', 'null'],
+ 'description': 'The kind of repository',
+ 'enum': ['image', 'application', None],
+ }
},
},
}
@@ -99,8 +107,7 @@ class RepositoryList(ApiResource):
repository_name = req['repository']
visibility = req['visibility']
- existing = model.repository.get_repository(namespace_name, repository_name)
- if existing:
+ if model.repo_exists(namespace_name, repository_name):
raise request_error(message='Repository already exists')
visibility = req['visibility']
@@ -111,20 +118,21 @@ class RepositoryList(ApiResource):
if not REPOSITORY_NAME_REGEX.match(repository_name):
raise InvalidRequest('Invalid repository name')
- repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility)
- repo.description = req['description']
- repo.save()
+ kind = req.get('repo_kind', 'image') or 'image'
+ model.create_repo(namespace_name, repository_name, owner, req['description'],
+ visibility=visibility, repo_kind=kind)
- log_action('create_repo', namespace_name, {'repo': repository_name,
- 'namespace': namespace_name}, repo=repo)
+ log_action('create_repo', namespace_name,
+ {'repo': repository_name,
+ 'namespace': namespace_name}, repo_name=repository_name)
return {
'namespace': namespace_name,
- 'name': repository_name
+ 'name': repository_name,
+ 'kind': kind,
}, 201
raise Unauthorized()
-
@require_scope(scopes.READ_REPO)
@nickname('listRepos')
@parse_args()
@@ -137,6 +145,7 @@ class RepositoryList(ApiResource):
type=truthy_bool, default=False)
@query_param('popularity', 'Whether to include the repository\'s popularity metric.',
type=truthy_bool, default=False)
+ @query_param('repo_kind', 'The kind of repositories to return', type=str, default='image')
@page_support()
def get(self, page_token, parsed_args):
""" Fetch the list of repositories visible to the current user under a variety of situations.
@@ -149,85 +158,18 @@ class RepositoryList(ApiResource):
user = get_authenticated_user()
username = user.username if user else None
- next_page_token = None
- repos = None
+ last_modified = parsed_args['last_modified']
+ popularity = parsed_args['popularity']
- # Lookup the requested repositories (either starred or non-starred.)
- if parsed_args['starred']:
- if not username:
- # No repositories should be returned, as there is no user.
- abort(400)
+ if parsed_args['starred'] and not username:
+ # No repositories should be returned, as there is no user.
+ abort(400)
- # Return the full list of repos starred by the current user that are still visible to them.
- def can_view_repo(repo):
- return ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
+ repos, next_page_token = model.get_repo_list(
+ parsed_args['starred'], user, parsed_args['repo_kind'], parsed_args['namespace'], username,
+ parsed_args['public'], page_token, last_modified, popularity)
- unfiltered_repos = model.repository.get_user_starred_repositories(user)
- repos = [repo for repo in unfiltered_repos if can_view_repo(repo)]
- elif parsed_args['namespace']:
- # Repositories filtered by namespace do not need pagination (their results are fairly small),
- # so we just do the lookup directly.
- repos = list(model.repository.get_visible_repositories(username=username,
- include_public=parsed_args['public'],
- namespace=parsed_args['namespace']))
- else:
- # Determine the starting offset for pagination. Note that we don't use the normal
- # model.modelutil.paginate method here, as that does not operate over UNION queries, which
- # get_visible_repositories will return if there is a logged-in user (for performance reasons).
- #
- # Also note the +1 on the limit, as paginate_query uses the extra result to determine whether
- # there is a next page.
- start_id = model.modelutil.pagination_start(page_token)
- repo_query = model.repository.get_visible_repositories(username=username,
- include_public=parsed_args['public'],
- start_id=start_id,
- limit=REPOS_PER_PAGE+1)
-
- repos, next_page_token = model.modelutil.paginate_query(repo_query, limit=REPOS_PER_PAGE,
- id_alias='rid')
-
- # Collect the IDs of the repositories found for subequent lookup of popularity
- # and/or last modified.
- if parsed_args['last_modified'] or parsed_args['popularity']:
- repository_ids = [repo.rid for repo in repos]
-
- if parsed_args['last_modified']:
- last_modified_map = model.repository.get_when_last_modified(repository_ids)
-
- if parsed_args['popularity']:
- action_sum_map = model.log.get_repositories_action_sums(repository_ids)
-
- # Collect the IDs of the repositories that are starred for the user, so we can mark them
- # in the returned results.
- star_set = set()
- if username:
- starred_repos = model.repository.get_user_starred_repositories(user)
- star_set = {starred.id for starred in starred_repos}
-
- def repo_view(repo_obj):
- repo = {
- 'namespace': repo_obj.namespace_user.username,
- 'name': repo_obj.name,
- 'description': repo_obj.description,
- 'is_public': repo_obj.visibility_id == model.repository.get_public_repo_visibility().id,
- }
-
- repo_id = repo_obj.rid
-
- if parsed_args['last_modified']:
- repo['last_modified'] = last_modified_map.get(repo_id)
-
- if parsed_args['popularity']:
- repo['popularity'] = float(action_sum_map.get(repo_id, 0))
-
- if username:
- repo['is_starred'] = repo_id in star_set
-
- return repo
-
- return {
- 'repositories': [repo_view(repo) for repo in repos]
- }, next_page_token
+ return {'repositories': [repo.to_dict() for repo in repos]}, next_page_token
@resource('/v1/repository/')
@@ -238,9 +180,7 @@ class Repository(RepositoryParamResource):
'RepoUpdate': {
'type': 'object',
'description': 'Fields which can be updated in a repository.',
- 'required': [
- 'description',
- ],
+ 'required': ['description',],
'properties': {
'description': {
'type': 'string',
@@ -253,116 +193,78 @@ class Repository(RepositoryParamResource):
@parse_args()
@query_param('includeStats', 'Whether to include action statistics', type=truthy_bool,
default=False)
+ @query_param('includeTags', 'Whether to include repository tags', type=truthy_bool,
+ default=True)
@require_repo_read
@nickname('getRepo')
def get(self, namespace, repository, parsed_args):
"""Fetch the specified repository."""
logger.debug('Get repo: %s/%s' % (namespace, repository))
+ include_tags = parsed_args['includeTags']
+ max_tags = 500
+ repo = model.get_repo(namespace, repository, get_authenticated_user(), include_tags, max_tags)
+ if repo is None:
+ raise NotFound()
- def tag_view(tag):
- tag_info = {
- 'name': tag.name,
- 'image_id': tag.image.docker_image_id,
- 'size': tag.image.aggregate_size
- }
+ has_write_permission = ModifyRepositoryPermission(namespace, repository).can()
+ has_write_permission = has_write_permission and repo.state == RepositoryState.NORMAL
- if tag.lifetime_start_ts > 0:
- last_modified = format_date(datetime.fromtimestamp(tag.lifetime_start_ts))
- tag_info['last_modified'] = last_modified
+ repo_data = repo.to_dict()
+ repo_data['can_write'] = has_write_permission
+ repo_data['can_admin'] = AdministerRepositoryPermission(namespace, repository).can()
- return tag_info
+ if parsed_args['includeStats'] and repo.repository_base_elements.kind_name != 'application':
+ stats = []
+ found_dates = {}
- repo = model.repository.get_repository(namespace, repository)
- stats = None
- if repo:
- tags = model.tag.list_repository_tags(namespace, repository, include_storage=True)
- tag_dict = {tag.name: tag_view(tag) for tag in tags}
- can_write = ModifyRepositoryPermission(namespace, repository).can()
- can_admin = AdministerRepositoryPermission(namespace, repository).can()
+ for count in repo.counts:
+ stats.append(count.to_dict())
+ found_dates['%s/%s' % (count.date.month, count.date.day)] = True
- is_starred = (model.repository.repository_is_starred(get_authenticated_user(), repo)
- if get_authenticated_user() else False)
- is_public = model.repository.is_repository_public(repo)
-
- if parsed_args['includeStats']:
- stats = []
- found_dates = {}
-
- start_date = datetime.now() - timedelta(days=MAX_DAYS_IN_3_MONTHS)
- counts = model.log.get_repository_action_counts(repo, start_date)
- for count in counts:
+ # Fill in any missing stats with zeros.
+ for day in range(1, MAX_DAYS_IN_3_MONTHS):
+ day_date = datetime.now() - timedelta(days=day)
+ key = '%s/%s' % (day_date.month, day_date.day)
+ if key not in found_dates:
stats.append({
- 'date': count.date.isoformat(),
- 'count': count.count,
+ 'date': day_date.date().isoformat(),
+ 'count': 0,
})
- found_dates['%s/%s' % (count.date.month, count.date.day)] = True
-
- # Fill in any missing stats with zeros.
- for day in range(1, MAX_DAYS_IN_3_MONTHS):
- day_date = datetime.now() - timedelta(days=day)
- key = '%s/%s' % (day_date.month, day_date.day)
- if not key in found_dates:
- stats.append({
- 'date': day_date.date().isoformat(),
- 'count': 0,
- })
-
- repo_data = {
- 'namespace': namespace,
- 'name': repository,
- 'description': repo.description,
- 'tags': tag_dict,
- 'can_write': can_write,
- 'can_admin': can_admin,
- 'is_public': is_public,
- 'is_organization': repo.namespace_user.organization,
- 'is_starred': is_starred,
- 'status_token': repo.badge_token if not is_public else '',
- }
-
- if stats is not None:
- repo_data['stats'] = stats
-
- return repo_data
-
- raise NotFound()
+ repo_data['stats'] = stats
+ return repo_data
@require_repo_write
@nickname('updateRepo')
@validate_json_request('RepoUpdate')
def put(self, namespace, repository):
""" Update the description in the specified repository. """
- repo = model.repository.get_repository(namespace, repository)
- if repo:
- values = request.get_json()
- repo.description = values['description']
- repo.save()
+ if not model.repo_exists(namespace, repository):
+ raise NotFound()
- log_action('set_repo_description', namespace,
- {'repo': repository, 'description': values['description']},
- repo=repo)
- return {
- 'success': True
- }
- raise NotFound()
+ values = request.get_json()
+ model.set_description(namespace, repository, values['description'])
+
+ log_action('set_repo_description', namespace,
+ {'repo': repository,
+ 'namespace': namespace,
+ 'description': values['description']}, repo_name=repository)
+ return {'success': True}
@require_repo_admin
@nickname('deleteRepository')
def delete(self, namespace, repository):
""" Delete a repository. """
- model.repository.purge_repository(namespace, repository)
- user = model.user.get_namespace_user(namespace)
+ username = model.purge_repository(namespace, repository)
if features.BILLING:
plan = get_namespace_plan(namespace)
- check_repository_usage(user, plan)
+ model.check_repository_usage(username, plan)
# Remove any builds from the queue.
dockerfile_build_queue.delete_namespaced_items(namespace, repository)
- log_action('delete_repo', namespace,
- {'repo': repository, 'namespace': namespace})
+ log_action('delete_repo', namespace, {'repo': repository, 'namespace': namespace})
return '', 204
@@ -374,9 +276,7 @@ class RepositoryVisibility(RepositoryParamResource):
'ChangeVisibility': {
'type': 'object',
'description': 'Change the visibility for the repository.',
- 'required': [
- 'visibility',
- ],
+ 'required': ['visibility',],
'properties': {
'visibility': {
'type': 'string',
@@ -395,15 +295,110 @@ class RepositoryVisibility(RepositoryParamResource):
@validate_json_request('ChangeVisibility')
def post(self, namespace, repository):
""" Change the visibility of a repository. """
- repo = model.repository.get_repository(namespace, repository)
- if repo:
+ if model.repo_exists(namespace, repository):
values = request.get_json()
visibility = values['visibility']
if visibility == 'private':
check_allowed_private_repos(namespace)
- model.repository.set_repository_visibility(repo, visibility)
+ model.set_repository_visibility(namespace, repository, visibility)
log_action('change_repo_visibility', namespace,
- {'repo': repository, 'visibility': values['visibility']},
- repo=repo)
+ {'repo': repository,
+ 'namespace': namespace,
+ 'visibility': values['visibility']}, repo_name=repository)
return {'success': True}
+
+
+@resource('/v1/repository//changetrust')
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+class RepositoryTrust(RepositoryParamResource):
+ """ Custom verb for changing the trust settings of the repository. """
+ schemas = {
+ 'ChangeRepoTrust': {
+ 'type': 'object',
+ 'description': 'Change the trust settings for the repository.',
+ 'required': ['trust_enabled',],
+ 'properties': {
+ 'trust_enabled': {
+ 'type': 'boolean',
+ 'description': 'Whether or not signing is enabled for the repository.'
+ },
+ }
+ }
+ }
+
+ @show_if(features.SIGNING)
+ @require_repo_admin
+ @nickname('changeRepoTrust')
+ @validate_json_request('ChangeRepoTrust')
+ def post(self, namespace, repository):
+ """ Change the visibility of a repository. """
+ if not model.repo_exists(namespace, repository):
+ raise NotFound()
+
+ tags, _ = tuf_metadata_api.get_default_tags_with_expiration(namespace, repository)
+ if tags and not tuf_metadata_api.delete_metadata(namespace, repository):
+ raise DownstreamIssue('Unable to delete downstream trust metadata')
+
+ values = request.get_json()
+ model.set_trust(namespace, repository, values['trust_enabled'])
+
+ log_action(
+ 'change_repo_trust', namespace,
+ {'repo': repository,
+ 'namespace': namespace,
+ 'trust_enabled': values['trust_enabled']}, repo_name=repository)
+
+ return {'success': True}
+
+
+@resource('/v1/repository//changestate')
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@show_if(features.REPO_MIRROR)
+class RepositoryStateResource(RepositoryParamResource):
+ """ Custom verb for changing the state of the repository. """
+ schemas = {
+ 'ChangeRepoState': {
+ 'type': 'object',
+ 'description': 'Change the state of the repository.',
+ 'required': ['state'],
+ 'properties': {
+ 'state': {
+ 'type': 'string',
+ 'description': 'Determines whether pushes are allowed.',
+ 'enum': ['NORMAL', 'READ_ONLY', 'MIRROR'],
+ },
+ }
+ }
+ }
+
+ @require_repo_admin
+ @nickname('changeRepoState')
+ @validate_json_request('ChangeRepoState')
+ def put(self, namespace, repository):
+ """ Change the state of a repository. """
+ if not model.repo_exists(namespace, repository):
+ raise NotFound()
+
+ values = request.get_json()
+ state_name = values['state']
+
+ try:
+ state = RepositoryState[state_name]
+ except KeyError:
+ state = None
+
+ if state == RepositoryState.MIRROR and not features.REPO_MIRROR:
+ return {'detail': 'Unknown Repository State: %s' % state_name}, 400
+
+ if state is None:
+ return {'detail': '%s is not a valid Repository state.' % state_name}, 400
+
+ model.set_repository_state(namespace, repository, state)
+
+ log_action('change_repo_state', namespace,
+ {'repo': repository,
+ 'namespace': namespace,
+ 'state_changed': state_name}, repo_name=repository)
+
+ return {'success': True}
diff --git a/endpoints/api/repository_models_interface.py b/endpoints/api/repository_models_interface.py
new file mode 100644
index 000000000..3b5e06a2f
--- /dev/null
+++ b/endpoints/api/repository_models_interface.py
@@ -0,0 +1,279 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple, defaultdict
+
+from datetime import datetime
+from six import add_metaclass
+
+import features
+from data.database import RepositoryState
+from endpoints.api import format_date
+
+
+class RepositoryBaseElement(
+ namedtuple('RepositoryBaseElement', [
+ 'namespace_name', 'repository_name', 'is_starred', 'is_public', 'kind_name', 'description',
+ 'namespace_user_organization', 'namespace_user_removed_tag_expiration_s', 'last_modified',
+ 'action_count', 'should_last_modified', 'should_popularity', 'should_is_starred',
+ 'is_free_account', 'state'
+ ])):
+ """
+ Repository a single quay repository
+ :type namespace_name: string
+ :type repository_name: string
+ :type is_starred: boolean
+ :type is_public: boolean
+ :type kind_name: string
+ :type description: string
+ :type namespace_user_organization: boolean
+ :type should_last_modified: boolean
+ :type should_popularity: boolean
+ :type should_is_starred: boolean
+ """
+
+ def to_dict(self):
+ repo = {
+ 'namespace': self.namespace_name,
+ 'name': self.repository_name,
+ 'description': self.description,
+ 'is_public': self.is_public,
+ 'kind': self.kind_name,
+ 'state': self.state.name if self.state is not None else None,
+ }
+
+ if self.should_last_modified:
+ repo['last_modified'] = self.last_modified
+
+ if self.should_popularity:
+ repo['popularity'] = float(self.action_count if self.action_count else 0)
+
+ if self.should_is_starred:
+ repo['is_starred'] = self.is_starred
+
+ return repo
+
+
+class ApplicationRepository(
+ namedtuple('ApplicationRepository', ['repository_base_elements', 'channels', 'releases', 'state'])):
+ """
+ Repository a single quay repository
+ :type repository_base_elements: RepositoryBaseElement
+ :type channels: [Channel]
+ :type releases: [Release]
+ """
+
+ def to_dict(self):
+ repo_data = {
+ 'namespace': self.repository_base_elements.namespace_name,
+ 'name': self.repository_base_elements.repository_name,
+ 'kind': self.repository_base_elements.kind_name,
+ 'description': self.repository_base_elements.description,
+ 'is_public': self.repository_base_elements.is_public,
+ 'is_organization': self.repository_base_elements.namespace_user_organization,
+ 'is_starred': self.repository_base_elements.is_starred,
+ 'channels': [chan.to_dict() for chan in self.channels],
+ 'releases': [release.to_dict() for release in self.releases],
+ 'state': self.state.name if self.state is not None else None,
+ 'is_free_account': self.repository_base_elements.is_free_account,
+ }
+
+ return repo_data
+
+
+class ImageRepositoryRepository(
+ namedtuple('NonApplicationRepository',
+ ['repository_base_elements', 'tags', 'counts', 'badge_token', 'trust_enabled',
+ 'state'])):
+ """
+ Repository a single quay repository
+ :type repository_base_elements: RepositoryBaseElement
+ :type tags: [Tag]
+ :type counts: [count]
+ :type badge_token: string
+ :type trust_enabled: boolean
+ """
+
+ def to_dict(self):
+ img_repo = {
+ 'namespace': self.repository_base_elements.namespace_name,
+ 'name': self.repository_base_elements.repository_name,
+ 'kind': self.repository_base_elements.kind_name,
+ 'description': self.repository_base_elements.description,
+ 'is_public': self.repository_base_elements.is_public,
+ 'is_organization': self.repository_base_elements.namespace_user_organization,
+ 'is_starred': self.repository_base_elements.is_starred,
+ 'status_token': self.badge_token if not self.repository_base_elements.is_public else '',
+ 'trust_enabled': bool(features.SIGNING) and self.trust_enabled,
+ 'tag_expiration_s': self.repository_base_elements.namespace_user_removed_tag_expiration_s,
+ 'is_free_account': self.repository_base_elements.is_free_account,
+ 'state': self.state.name if self.state is not None else None
+ }
+
+ if self.tags is not None:
+ img_repo['tags'] = {tag.name: tag.to_dict() for tag in self.tags}
+
+ if self.repository_base_elements.state:
+ img_repo['state'] = self.repository_base_elements.state.name
+
+ return img_repo
+
+
+class Repository(namedtuple('Repository', [
+ 'namespace_name',
+ 'repository_name',
+])):
+ """
+ Repository a single quay repository
+ :type namespace_name: string
+ :type repository_name: string
+ """
+
+
+class Channel(namedtuple('Channel', ['name', 'linked_tag_name', 'linked_tag_lifetime_start'])):
+ """
+ Repository a single quay repository
+ :type name: string
+ :type linked_tag_name: string
+ :type linked_tag_lifetime_start: string
+ """
+
+ def to_dict(self):
+ return {
+ 'name': self.name,
+ 'release': self.linked_tag_name,
+ 'last_modified': format_date(datetime.fromtimestamp(self.linked_tag_lifetime_start / 1000)),
+ }
+
+
+class Release(
+ namedtuple('Channel', ['name', 'lifetime_start', 'releases_channels_map'])):
+ """
+ Repository a single quay repository
+ :type name: string
+ :type last_modified: string
+ :type releases_channels_map: {string -> string}
+ """
+
+ def to_dict(self):
+ return {
+ 'name': self.name,
+ 'last_modified': format_date(datetime.fromtimestamp(self.lifetime_start / 1000)),
+ 'channels': self.releases_channels_map[self.name],
+ }
+
+
+class Tag(
+ namedtuple('Tag', [
+ 'name', 'image_docker_image_id', 'image_aggregate_size', 'lifetime_start_ts',
+ 'tag_manifest_digest', 'lifetime_end_ts',
+ ])):
+ """
+ :type name: string
+ :type image_docker_image_id: string
+ :type image_aggregate_size: int
+ :type lifetime_start_ts: int
+ :type lifetime_end_ts: int|None
+ :type tag_manifest_digest: string
+
+ """
+
+ def to_dict(self):
+ tag_info = {
+ 'name': self.name,
+ 'image_id': self.image_docker_image_id,
+ 'size': self.image_aggregate_size
+ }
+
+ if self.lifetime_start_ts > 0:
+ last_modified = format_date(datetime.fromtimestamp(self.lifetime_start_ts))
+ tag_info['last_modified'] = last_modified
+
+ if self.lifetime_end_ts:
+ expiration = format_date(datetime.fromtimestamp(self.lifetime_end_ts))
+ tag_info['expiration'] = expiration
+
+ if self.tag_manifest_digest is not None:
+ tag_info['manifest_digest'] = self.tag_manifest_digest
+
+ return tag_info
+
+
+class Count(namedtuple('Count', ['date', 'count'])):
+ """
+ date: DateTime
+ count: int
+ """
+
+ def to_dict(self):
+ return {
+ 'date': self.date.isoformat(),
+ 'count': self.count,
+ }
+
+
+@add_metaclass(ABCMeta)
+class RepositoryDataInterface(object):
+ """
+ Interface that represents all data store interactions required by a Repository.
+ """
+
+ @abstractmethod
+ def get_repo(self, namespace_name, repository_name, user, include_tags=True, max_tags=500):
+ """
+ Returns a repository
+ """
+
+ @abstractmethod
+ def repo_exists(self, namespace_name, repository_name):
+ """
+ Returns true if a repo exists and false if not
+ """
+
+ @abstractmethod
+ def create_repo(self, namespace, name, creating_user, description, visibility='private',
+ repo_kind='image'):
+ """
+ Returns creates a new repo
+ """
+
+ @abstractmethod
+ def get_repo_list(self, starred, user, repo_kind, namespace, username, public, page_token,
+ last_modified, popularity):
+ """
+ Returns a RepositoryBaseElement
+ """
+
+ @abstractmethod
+ def set_repository_visibility(self, namespace_name, repository_name, visibility):
+ """
+ Sets a repository's visibility if it is found
+ """
+
+ @abstractmethod
+ def set_trust(self, namespace_name, repository_name, trust):
+ """
+ Sets a repository's trust_enabled field if it is found
+ """
+
+ @abstractmethod
+ def set_description(self, namespace_name, repository_name, description):
+ """
+ Sets a repository's description if it is found.
+ """
+
+ @abstractmethod
+ def purge_repository(self, namespace_name, repository_name):
+ """
+ Removes a repository
+ """
+
+ @abstractmethod
+ def check_repository_usage(self, user_name, plan_found):
+ """
+ Creates a notification for a user if they are over or under on their repository usage
+ """
+
+ @abstractmethod
+ def set_repository_state(self, namespace_name, repository_name, state):
+ """
+ Set the State of the Repository.
+ """
diff --git a/endpoints/api/repository_models_pre_oci.py b/endpoints/api/repository_models_pre_oci.py
new file mode 100644
index 000000000..328c5443e
--- /dev/null
+++ b/endpoints/api/repository_models_pre_oci.py
@@ -0,0 +1,190 @@
+from collections import defaultdict
+
+from datetime import datetime, timedelta
+
+from auth.permissions import ReadRepositoryPermission
+from data.database import Repository as RepositoryTable, RepositoryState
+from data import model
+from data.appr_model import channel as channel_model, release as release_model
+from data.registry_model import registry_model
+from data.registry_model.datatypes import RepositoryReference
+from endpoints.appr.models_cnr import model as appr_model
+from endpoints.api.repository_models_interface import RepositoryDataInterface, RepositoryBaseElement, Repository, \
+ ApplicationRepository, ImageRepositoryRepository, Tag, Channel, Release, Count
+
+MAX_DAYS_IN_3_MONTHS = 92
+REPOS_PER_PAGE = 100
+
+
+def _create_channel(channel, releases_channels_map):
+ releases_channels_map[channel.linked_tag.name].append(channel.name)
+ return Channel(channel.name, channel.linked_tag.name, channel.linked_tag.lifetime_start)
+
+
+class PreOCIModel(RepositoryDataInterface):
+ """
+ PreOCIModel implements the data model for the Repo Email using a database schema
+ before it was changed to support the OCI specification.
+ """
+
+ def check_repository_usage(self, username, plan_found):
+ private_repos = model.user.get_private_repo_count(username)
+ if plan_found is None:
+ repos_allowed = 0
+ else:
+ repos_allowed = plan_found['privateRepos']
+
+ user_or_org = model.user.get_namespace_user(username)
+ if private_repos > repos_allowed:
+ model.notification.create_unique_notification('over_private_usage', user_or_org,
+ {'namespace': username})
+ else:
+ model.notification.delete_notifications_by_kind(user_or_org, 'over_private_usage')
+
+ def purge_repository(self, namespace_name, repository_name):
+ model.gc.purge_repository(namespace_name, repository_name)
+ user = model.user.get_namespace_user(namespace_name)
+ return user.username
+
+ def set_description(self, namespace_name, repository_name, description):
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ model.repository.set_description(repo, description)
+
+ def set_trust(self, namespace_name, repository_name, trust):
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ model.repository.set_trust(repo, trust)
+
+ def set_repository_visibility(self, namespace_name, repository_name, visibility):
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ model.repository.set_repository_visibility(repo, visibility)
+
+ def set_repository_state(self, namespace_name, repository_name, state):
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ model.repository.set_repository_state(repo, state)
+
+ def get_repo_list(self, starred, user, repo_kind, namespace, username, public, page_token,
+ last_modified, popularity):
+ next_page_token = None
+ # Lookup the requested repositories (either starred or non-starred.)
+ if starred:
+ # Return the full list of repos starred by the current user that are still visible to them.
+ def can_view_repo(repo):
+ can_view = ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
+ return can_view or model.repository.is_repository_public(repo)
+
+ unfiltered_repos = model.repository.get_user_starred_repositories(user,
+ kind_filter=repo_kind)
+ repos = [repo for repo in unfiltered_repos if can_view_repo(repo)]
+ elif namespace:
+ # Repositories filtered by namespace do not need pagination (their results are fairly small),
+ # so we just do the lookup directly.
+ repos = list(
+ model.repository.get_visible_repositories(username=username, include_public=public,
+ namespace=namespace, kind_filter=repo_kind))
+ else:
+ # Determine the starting offset for pagination. Note that we don't use the normal
+ # model.modelutil.paginate method here, as that does not operate over UNION queries, which
+ # get_visible_repositories will return if there is a logged-in user (for performance reasons).
+ #
+ # Also note the +1 on the limit, as paginate_query uses the extra result to determine whether
+ # there is a next page.
+ start_id = model.modelutil.pagination_start(page_token)
+ repo_query = model.repository.get_visible_repositories(
+ username=username, include_public=public, start_id=start_id, limit=REPOS_PER_PAGE + 1,
+ kind_filter=repo_kind)
+
+ repos, next_page_token = model.modelutil.paginate_query(repo_query, limit=REPOS_PER_PAGE,
+ sort_field_name='rid')
+
+ # Collect the IDs of the repositories found for subequent lookup of popularity
+ # and/or last modified.
+ last_modified_map = {}
+ action_sum_map = {}
+ if last_modified or popularity:
+ repository_refs = [RepositoryReference.for_id(repo.rid) for repo in repos]
+ repository_ids = [repo.rid for repo in repos]
+
+ if last_modified:
+ last_modified_map = registry_model.get_most_recent_tag_lifetime_start(repository_refs)
+
+ if popularity:
+ action_sum_map = model.log.get_repositories_action_sums(repository_ids)
+
+ # Collect the IDs of the repositories that are starred for the user, so we can mark them
+ # in the returned results.
+ star_set = set()
+ if username:
+ starred_repos = model.repository.get_user_starred_repositories(user)
+ star_set = {starred.id for starred in starred_repos}
+
+ return [
+ RepositoryBaseElement(repo.namespace_user.username, repo.name, repo.id in star_set,
+ repo.visibility_id == model.repository.get_public_repo_visibility().id,
+ repo_kind, repo.description, repo.namespace_user.organization,
+ repo.namespace_user.removed_tag_expiration_s,
+ last_modified_map.get(repo.rid),
+ action_sum_map.get(repo.rid), last_modified, popularity, username,
+ None, repo.state)
+ for repo in repos
+ ], next_page_token
+
+ def repo_exists(self, namespace_name, repository_name):
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if repo is None:
+ return False
+
+ return True
+
+ def create_repo(self, namespace_name, repository_name, owner, description, visibility='private',
+ repo_kind='image'):
+ repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility,
+ repo_kind=repo_kind, description=description)
+ return Repository(namespace_name, repository_name)
+
+ def get_repo(self, namespace_name, repository_name, user, include_tags=True, max_tags=500):
+ repo = model.repository.get_repository(namespace_name, repository_name)
+ if repo is None:
+ return None
+
+ is_starred = model.repository.repository_is_starred(user, repo) if user else False
+ is_public = model.repository.is_repository_public(repo)
+ kind_name = RepositoryTable.kind.get_name(repo.kind_id)
+ base = RepositoryBaseElement(
+ namespace_name, repository_name, is_starred, is_public, kind_name, repo.description,
+ repo.namespace_user.organization, repo.namespace_user.removed_tag_expiration_s, None, None,
+ False, False, False, repo.namespace_user.stripe_id is None, repo.state)
+
+ if base.kind_name == 'application':
+ channels = channel_model.get_repo_channels(repo, appr_model.models_ref)
+ releases = release_model.get_release_objs(repo, appr_model.models_ref)
+ releases_channels_map = defaultdict(list)
+ return ApplicationRepository(
+ base, [_create_channel(channel, releases_channels_map) for channel in channels], [
+ Release(release.name, release.lifetime_start, releases_channels_map)
+ for release in releases
+ ], repo.state)
+
+ tags = None
+ repo_ref = RepositoryReference.for_repo_obj(repo)
+ if include_tags:
+ tags, _ = registry_model.list_repository_tag_history(repo_ref, page=1, size=max_tags,
+ active_tags_only=True)
+ tags = [
+ Tag(tag.name,
+ tag.legacy_image.docker_image_id if tag.legacy_image_if_present else None,
+ tag.legacy_image.aggregate_size if tag.legacy_image_if_present else None,
+ tag.lifetime_start_ts,
+ tag.manifest_digest,
+ tag.lifetime_end_ts) for tag in tags
+ ]
+
+ start_date = datetime.now() - timedelta(days=MAX_DAYS_IN_3_MONTHS)
+ counts = model.log.get_repository_action_counts(repo, start_date)
+
+ assert repo.state is not None
+ return ImageRepositoryRepository(base, tags,
+ [Count(count.date, count.count) for count in counts],
+ repo.badge_token, repo.trust_enabled, repo.state)
+
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/repositorynotification.py b/endpoints/api/repositorynotification.py
index a9828d518..c34cbc553 100644
--- a/endpoints/api/repositorynotification.py
+++ b/endpoints/api/repositorynotification.py
@@ -1,42 +1,19 @@
""" List, create and manage repository events/notifications. """
-import json
-
+import logging
from flask import request
-from app import notification_queue
-from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
- log_action, validate_json_request, request_error,
- path_param)
+from endpoints.api import (
+ RepositoryParamResource, nickname, resource, require_repo_admin, log_action,
+ validate_json_request, request_error, path_param, disallow_for_app_repositories, InvalidRequest)
from endpoints.exception import NotFound
-from endpoints.notificationevent import NotificationEvent
-from endpoints.notificationmethod import (NotificationMethod,
- CannotValidateNotificationMethodException)
-from endpoints.notificationhelper import build_notification_data
-from data import model
+from notifications.models_interface import Repository
+from notifications.notificationevent import NotificationEvent
+from notifications.notificationmethod import (
+ NotificationMethod, CannotValidateNotificationMethodException)
+from endpoints.api.repositorynotification_models_pre_oci import pre_oci_model as model
-
-def notification_view(note):
- config = {}
- try:
- config = json.loads(note.config_json)
- except:
- config = {}
-
- event_config = {}
- try:
- event_config = json.loads(note.event_config_json)
- except:
- event_config = {}
-
- return {
- 'uuid': note.uuid,
- 'event': note.event.name,
- 'method': note.method.name,
- 'config': config,
- 'title': note.title,
- 'event_config': event_config,
- }
+logger = logging.getLogger(__name__)
@resource('/v1/repository//notification/')
@@ -80,41 +57,37 @@ class RepositoryNotificationList(RepositoryParamResource):
@require_repo_admin
@nickname('createRepoNotification')
+ @disallow_for_app_repositories
@validate_json_request('NotificationCreateRequest')
- def post(self, namespace, repository):
- """ Create a new notification for the specified repository. """
- repo = model.repository.get_repository(namespace, repository)
+ def post(self, namespace_name, repository_name):
parsed = request.get_json()
method_handler = NotificationMethod.get_method(parsed['method'])
- if not method_handler:
- raise request_error(message='Unknown method')
-
try:
- method_handler.validate(repo, parsed['config'])
+ method_handler.validate(namespace_name, repository_name, parsed['config'])
except CannotValidateNotificationMethodException as ex:
raise request_error(message=ex.message)
- new_notification = model.notification.create_repo_notification(repo, parsed['event'],
- parsed['method'], parsed['config'],
- parsed['eventConfig'],
- parsed.get('title', None))
+ new_notification = model.create_repo_notification(namespace_name, repository_name,
+ parsed['event'], parsed['method'],
+ parsed['config'], parsed['eventConfig'],
+ parsed.get('title'))
- resp = notification_view(new_notification)
- log_action('add_repo_notification', namespace,
- {'repo': repository, 'notification_id': new_notification.uuid,
- 'event': parsed['event'], 'method': parsed['method']},
- repo=repo)
- return resp, 201
+ log_action('add_repo_notification', namespace_name, {
+ 'repo': repository_name,
+ 'namespace': namespace_name,
+ 'notification_id': new_notification.uuid,
+ 'event': new_notification.event_name,
+ 'method': new_notification.method_name}, repo_name=repository_name)
+ return new_notification.to_dict(), 201
@require_repo_admin
@nickname('listRepoNotifications')
- def get(self, namespace, repository):
+ @disallow_for_app_repositories
+ def get(self, namespace_name, repository_name):
""" List the notifications for the specified repository. """
- notifications = model.notification.list_repo_notifications(namespace, repository)
- return {
- 'notifications': [notification_view(n) for n in notifications]
- }
+ notifications = model.list_repo_notifications(namespace_name, repository_name)
+ return {'notifications': [n.to_dict() for n in notifications]}
@resource('/v1/repository//notification/')
@@ -122,30 +95,52 @@ class RepositoryNotificationList(RepositoryParamResource):
@path_param('uuid', 'The UUID of the notification')
class RepositoryNotification(RepositoryParamResource):
""" Resource for dealing with specific notifications. """
+
@require_repo_admin
@nickname('getRepoNotification')
- def get(self, namespace, repository, uuid):
+ @disallow_for_app_repositories
+ def get(self, namespace_name, repository_name, uuid):
""" Get information for the specified notification. """
- try:
- found = model.notification.get_repo_notification(uuid)
- except model.InvalidNotificationException:
+ found = model.get_repo_notification(uuid)
+ if not found:
raise NotFound()
-
- if (found.repository.namespace_user.username != namespace or
- found.repository.name != repository):
- raise NotFound()
-
- return notification_view(found)
+ return found.to_dict()
@require_repo_admin
@nickname('deleteRepoNotification')
- def delete(self, namespace, repository, uuid):
+ @disallow_for_app_repositories
+ def delete(self, namespace_name, repository_name, uuid):
""" Deletes the specified notification. """
- deleted = model.notification.delete_repo_notification(namespace, repository, uuid)
- log_action('delete_repo_notification', namespace,
- {'repo': repository, 'notification_id': uuid,
- 'event': deleted.event.name, 'method': deleted.method.name},
- repo=model.repository.get_repository(namespace, repository))
+ deleted = model.delete_repo_notification(namespace_name, repository_name, uuid)
+ if not deleted:
+ raise InvalidRequest("No repository notification found for: %s, %s, %s" %
+ (namespace_name, repository_name, uuid))
+
+ log_action('delete_repo_notification', namespace_name, {
+ 'repo': repository_name,
+ 'namespace': namespace_name,
+ 'notification_id': uuid,
+ 'event': deleted.event_name,
+ 'method': deleted.method_name}, repo_name=repository_name)
+
+ return 'No Content', 204
+
+ @require_repo_admin
+ @nickname('resetRepositoryNotificationFailures')
+ @disallow_for_app_repositories
+ def post(self, namespace_name, repository_name, uuid):
+ """ Resets repository notification to 0 failures. """
+ reset = model.reset_notification_number_of_failures(namespace_name, repository_name, uuid)
+ if not reset:
+ raise InvalidRequest("No repository notification found for: %s, %s, %s" %
+ (namespace_name, repository_name, uuid))
+
+ log_action('reset_repo_notification', namespace_name, {
+ 'repo': repository_name,
+ 'namespace': namespace_name,
+ 'notification_id': uuid,
+ 'event': reset.event_name,
+ 'method': reset.method_name}, repo_name=repository_name)
return 'No Content', 204
@@ -155,23 +150,15 @@ class RepositoryNotification(RepositoryParamResource):
@path_param('uuid', 'The UUID of the notification')
class TestRepositoryNotification(RepositoryParamResource):
""" Resource for queuing a test of a notification. """
+
@require_repo_admin
@nickname('testRepoNotification')
- def post(self, namespace, repository, uuid):
+ @disallow_for_app_repositories
+ def post(self, namespace_name, repository_name, uuid):
""" Queues a test notification for this repository. """
- try:
- test_note = model.notification.get_repo_notification(uuid)
- except model.InvalidNotificationException:
- raise NotFound()
+ test_note = model.queue_test_notification(uuid)
+ if not test_note:
+ raise InvalidRequest("No repository notification found for: %s, %s, %s" %
+ (namespace_name, repository_name, uuid))
- if (test_note.repository.namespace_user.username != namespace or
- test_note.repository.name != repository):
- raise NotFound()
-
- event_info = NotificationEvent.get_event(test_note.event.name)
- sample_data = event_info.get_sample_data(test_note)
- notification_data = build_notification_data(test_note, sample_data)
- notification_queue.put([test_note.repository.namespace_user.username, repository,
- test_note.event.name], json.dumps(notification_data))
-
- return {}
+ return {}, 200
diff --git a/endpoints/api/repositorynotification_models_interface.py b/endpoints/api/repositorynotification_models_interface.py
new file mode 100644
index 000000000..ed0ebd2f7
--- /dev/null
+++ b/endpoints/api/repositorynotification_models_interface.py
@@ -0,0 +1,146 @@
+import json
+
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+
+class RepositoryNotification(
+ namedtuple('RepositoryNotification', [
+ 'uuid',
+ 'title',
+ 'event_name',
+ 'method_name',
+ 'config_json',
+ 'event_config_json',
+ 'number_of_failures',
+ ])):
+ """
+ RepositoryNotification represents a notification for a repository.
+ :type uuid: string
+ :type event: string
+ :type method: string
+ :type config: string
+ :type title: string
+ :type event_config: string
+ :type number_of_failures: int
+ """
+
+ def to_dict(self):
+ try:
+ config = json.loads(self.config_json)
+ except ValueError:
+ config = {}
+
+ try:
+ event_config = json.loads(self.event_config_json)
+ except ValueError:
+ event_config = {}
+
+ return {
+ 'uuid': self.uuid,
+ 'title': self.title,
+ 'event': self.event_name,
+ 'method': self.method_name,
+ 'config': config,
+ 'event_config': event_config,
+ 'number_of_failures': self.number_of_failures,
+ }
+
+
+@add_metaclass(ABCMeta)
+class RepoNotificationInterface(object):
+ """
+ Interface that represents all data store interactions required by the RepositoryNotification API
+ """
+
+ @abstractmethod
+ def create_repo_notification(self, namespace_name, repository_name, event_name, method_name,
+ method_config, event_config, title=None):
+ """
+
+ Args:
+ namespace_name: namespace of repository
+ repository_name: name of repository
+ event_name: name of event
+ method_name: name of method
+ method_config: method config, json string
+ event_config: event config, json string
+ title: title of the notification
+
+ Returns:
+ RepositoryNotification object
+
+ """
+ pass
+
+ @abstractmethod
+ def list_repo_notifications(self, namespace_name, repository_name, event_name=None):
+ """
+
+ Args:
+ namespace_name: namespace of repository
+ repository_name: name of repository
+ event_name: name of event
+
+ Returns:
+ list(RepositoryNotification)
+ """
+ pass
+
+ @abstractmethod
+ def get_repo_notification(self, uuid):
+ """
+
+ Args:
+ uuid: uuid of notification
+
+ Returns:
+ RepositoryNotification or None
+
+ """
+ pass
+
+ @abstractmethod
+ def delete_repo_notification(self, namespace_name, repository_name, uuid):
+ """
+
+ Args:
+ namespace_name: namespace of repository
+ repository_name: name of repository
+ uuid: uuid of notification
+
+ Returns:
+ RepositoryNotification or None
+
+ """
+ pass
+
+ @abstractmethod
+ def reset_notification_number_of_failures(self, namespace_name, repository_name, uuid):
+ """
+
+ Args:
+ namespace_name: namespace of repository
+ repository_name: name of repository
+ uuid: uuid of notification
+
+ Returns:
+ RepositoryNotification
+
+ """
+ pass
+
+ @abstractmethod
+ def queue_test_notification(self, uuid):
+ """
+
+ Args:
+ uuid: uuid of notification
+
+ Returns:
+ RepositoryNotification or None
+
+ """
+ pass
diff --git a/endpoints/api/repositorynotification_models_pre_oci.py b/endpoints/api/repositorynotification_models_pre_oci.py
new file mode 100644
index 000000000..b3edf43ae
--- /dev/null
+++ b/endpoints/api/repositorynotification_models_pre_oci.py
@@ -0,0 +1,72 @@
+import json
+
+from app import notification_queue
+from data import model
+from data.model import InvalidNotificationException
+from endpoints.api.repositorynotification_models_interface import (RepoNotificationInterface,
+ RepositoryNotification)
+from notifications import build_notification_data
+from notifications.notificationevent import NotificationEvent
+
+
+class RepoNotificationPreOCIModel(RepoNotificationInterface):
+ def create_repo_notification(self, namespace_name, repository_name, event_name, method_name,
+ method_config, event_config, title=None):
+ repository = model.repository.get_repository(namespace_name, repository_name)
+ return self._notification(
+ model.notification.create_repo_notification(repository, event_name, method_name,
+ method_config, event_config, title))
+
+ def list_repo_notifications(self, namespace_name, repository_name, event_name=None):
+ return [
+ self._notification(n)
+ for n in model.notification.list_repo_notifications(namespace_name, repository_name,
+ event_name)]
+
+ def get_repo_notification(self, uuid):
+ try:
+ found = model.notification.get_repo_notification(uuid)
+ except InvalidNotificationException:
+ return None
+ return self._notification(found)
+
+ def delete_repo_notification(self, namespace_name, repository_name, uuid):
+ try:
+ found = model.notification.delete_repo_notification(namespace_name, repository_name, uuid)
+ except InvalidNotificationException:
+ return None
+ return self._notification(found)
+
+ def reset_notification_number_of_failures(self, namespace_name, repository_name, uuid):
+ return self._notification(
+ model.notification.reset_notification_number_of_failures(namespace_name, repository_name,
+ uuid))
+
+ def queue_test_notification(self, uuid):
+ try:
+ notification = model.notification.get_repo_notification(uuid)
+ except InvalidNotificationException:
+ return None
+
+ event_config = json.loads(notification.event_config_json or '{}')
+ event_info = NotificationEvent.get_event(notification.event.name)
+ sample_data = event_info.get_sample_data(notification.repository.namespace_user.username,
+ notification.repository.name, event_config)
+ notification_data = build_notification_data(notification, sample_data)
+ notification_queue.put([
+ notification.repository.namespace_user.username, notification.uuid, notification.event.name],
+ json.dumps(notification_data))
+ return self._notification(notification)
+
+ def _notification(self, notification):
+ if not notification:
+ return None
+
+ return RepositoryNotification(
+ uuid=notification.uuid, title=notification.title, event_name=notification.event.name,
+ method_name=notification.method.name, config_json=notification.config_json,
+ event_config_json=notification.event_config_json,
+ number_of_failures=notification.number_of_failures)
+
+
+pre_oci_model = RepoNotificationPreOCIModel()
diff --git a/endpoints/api/repotoken.py b/endpoints/api/repotoken.py
index 5ef427bf9..efa25a2fb 100644
--- a/endpoints/api/repotoken.py
+++ b/endpoints/api/repotoken.py
@@ -2,25 +2,11 @@
import logging
-from flask import request
-
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
- log_action, validate_json_request, path_param)
-from endpoints.exception import NotFound
-from data import model
-
+ validate_json_request, path_param)
logger = logging.getLogger(__name__)
-
-def token_view(token_obj):
- return {
- 'friendlyName': token_obj.friendly_name,
- 'code': token_obj.code,
- 'role': token_obj.role.name,
- }
-
-
@resource('/v1/repository//tokens/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class RepositoryTokenList(RepositoryParamResource):
@@ -43,28 +29,21 @@ class RepositoryTokenList(RepositoryParamResource):
@require_repo_admin
@nickname('listRepoTokens')
- def get(self, namespace, repository):
+ def get(self, namespace_name, repo_name):
""" List the tokens for the specified repository. """
- tokens = model.token.get_repository_delegate_tokens(namespace, repository)
-
return {
- 'tokens': {token.code: token_view(token) for token in tokens}
- }
+ 'message': 'Handling of access tokens is no longer supported',
+ }, 410
+
@require_repo_admin
@nickname('createToken')
@validate_json_request('NewToken')
- def post(self, namespace, repository):
+ def post(self, namespace_name, repo_name):
""" Create a new repository token. """
- token_params = request.get_json()
-
- token = model.token.create_delegate_token(namespace, repository, token_params['friendlyName'])
-
- log_action('add_repo_accesstoken', namespace,
- {'repo': repository, 'token': token_params['friendlyName']},
- repo=model.repository.get_repository(namespace, repository))
-
- return token_view(token), 201
+ return {
+ 'message': 'Creation of access tokens is no longer supported',
+ }, 410
@resource('/v1/repository//tokens/')
@@ -92,46 +71,30 @@ class RepositoryToken(RepositoryParamResource):
},
},
}
+
@require_repo_admin
@nickname('getTokens')
- def get(self, namespace, repository, code):
+ def get(self, namespace_name, repo_name, code):
""" Fetch the specified repository token information. """
- try:
- perm = model.token.get_repo_delegate_token(namespace, repository, code)
- except model.InvalidTokenException:
- raise NotFound()
+ return {
+ 'message': 'Handling of access tokens is no longer supported',
+ }, 410
- return token_view(perm)
@require_repo_admin
@nickname('changeToken')
@validate_json_request('TokenPermission')
- def put(self, namespace, repository, code):
+ def put(self, namespace_name, repo_name, code):
""" Update the permissions for the specified repository token. """
- new_permission = request.get_json()
+ return {
+ 'message': 'Handling of access tokens is no longer supported',
+ }, 410
- logger.debug('Setting permission to: %s for code %s' %
- (new_permission['role'], code))
-
- token = model.token.set_repo_delegate_token_role(namespace, repository, code,
- new_permission['role'])
-
- log_action('change_repo_permission', namespace,
- {'repo': repository, 'token': token.friendly_name, 'code': code,
- 'role': new_permission['role']},
- repo=model.repository.get_repository(namespace, repository))
-
- return token_view(token)
@require_repo_admin
@nickname('deleteToken')
- def delete(self, namespace, repository, code):
+ def delete(self, namespace_name, repo_name, code):
""" Delete the repository token. """
- token = model.token.delete_delegate_token(namespace, repository, code)
-
- log_action('delete_repo_accesstoken', namespace,
- {'repo': repository, 'token': token.friendly_name,
- 'code': code},
- repo=model.repository.get_repository(namespace, repository))
-
- return '', 204
+ return {
+ 'message': 'Handling of access tokens is no longer supported',
+ }, 410
diff --git a/endpoints/api/robot.py b/endpoints/api/robot.py
index 8f1cbde73..867329323 100644
--- a/endpoints/api/robot.py
+++ b/endpoints/api/robot.py
@@ -2,88 +2,64 @@
from endpoints.api import (resource, nickname, ApiResource, log_action, related_user_resource,
require_user_admin, require_scope, path_param, parse_args,
- truthy_bool, query_param)
+ truthy_bool, query_param, validate_json_request, max_json_size)
+from endpoints.api.robot_models_pre_oci import pre_oci_model as model
from endpoints.exception import Unauthorized
from auth.permissions import AdministerOrganizationPermission, OrganizationMemberPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
-from data import model
-from data.database import User, Team, Repository, FederatedLogin
from util.names import format_robot_username
-from flask import abort
-from app import avatar
-
-def robot_view(name, token):
- return {
- 'name': name,
- 'token': token
- }
+from flask import abort, request
-def permission_view(permission):
- return {
- 'repository': {
- 'name': permission.repository.name,
- 'is_public': permission.repository.visibility.name == 'public'
+CREATE_ROBOT_SCHEMA = {
+ 'type': 'object',
+ 'description': 'Optional data for creating a robot',
+ 'properties': {
+ 'description': {
+ 'type': 'string',
+ 'description': 'Optional text description for the robot',
+ 'maxLength': 255,
},
- 'role': permission.role.name
- }
+ 'unstructured_metadata': {
+ 'type': 'object',
+ 'description': 'Optional unstructured metadata for the robot',
+ },
+ },
+}
+
+ROBOT_MAX_SIZE = 1024 * 1024 # 1 KB.
-def robots_list(prefix, include_permissions=False):
- tuples = model.user.list_entity_robot_permission_teams(prefix,
- include_permissions=include_permissions)
+def robots_list(prefix, include_permissions=False, include_token=False, limit=None):
+ robots = model.list_entity_robot_permission_teams(prefix, limit=limit,
+ include_token=include_token,
+ include_permissions=include_permissions)
+ return {'robots': [robot.to_dict(include_token=include_token) for robot in robots]}
- robots = {}
- robot_teams = set()
-
- for robot_tuple in tuples:
- robot_name = robot_tuple.get(User.username)
- if not robot_name in robots:
- robots[robot_name] = {
- 'name': robot_name,
- 'token': robot_tuple.get(FederatedLogin.service_ident)
- }
-
- if include_permissions:
- robots[robot_name].update({
- 'teams': [],
- 'repositories': []
- })
-
- if include_permissions:
- team_name = robot_tuple.get(Team.name)
- repository_name = robot_tuple.get(Repository.name)
-
- if team_name is not None:
- check_key = robot_name + ':' + team_name
- if not check_key in robot_teams:
- robot_teams.add(check_key)
-
- robots[robot_name]['teams'].append({
- 'name': team_name,
- 'avatar': avatar.get_data(team_name, team_name, 'team')
- })
-
- if repository_name is not None:
- if not repository_name in robots[robot_name]['repositories']:
- robots[robot_name]['repositories'].append(repository_name)
-
- return {'robots': robots.values()}
@resource('/v1/user/robots')
class UserRobotList(ApiResource):
""" Resource for listing user robots. """
+
@require_user_admin
@nickname('getUserRobots')
@parse_args()
@query_param('permissions',
- 'Whether to include repostories and teams in which the robots have permission.',
+ 'Whether to include repositories and teams in which the robots have permission.',
type=truthy_bool, default=False)
+ @query_param('token',
+ 'If false, the robot\'s token is not returned.',
+ type=truthy_bool, default=True)
+ @query_param('limit',
+ 'If specified, the number of robots to return.',
+ type=int, default=None)
def get(self, parsed_args):
""" List the available robots for the user. """
user = get_authenticated_user()
- return robots_list(user.username, include_permissions=parsed_args.get('permissions', False))
+ return robots_list(user.username, include_token=parsed_args.get('token', True),
+ include_permissions=parsed_args.get('permissions', False),
+ limit=parsed_args.get('limit'))
@resource('/v1/user/robots/')
@@ -91,29 +67,41 @@ class UserRobotList(ApiResource):
'The short name for the robot, without any user or organization prefix')
class UserRobot(ApiResource):
""" Resource for managing a user's robots. """
+ schemas = {
+ 'CreateRobot': CREATE_ROBOT_SCHEMA,
+ }
+
@require_user_admin
@nickname('getUserRobot')
def get(self, robot_shortname):
""" Returns the user's robot with the specified name. """
parent = get_authenticated_user()
- robot, password = model.user.get_robot(robot_shortname, parent)
- return robot_view(robot.username, password)
+ robot = model.get_user_robot(robot_shortname, parent)
+ return robot.to_dict(include_metadata=True, include_token=True)
@require_user_admin
@nickname('createUserRobot')
+ @max_json_size(ROBOT_MAX_SIZE)
+ @validate_json_request('CreateRobot', optional=True)
def put(self, robot_shortname):
""" Create a new user robot with the specified name. """
parent = get_authenticated_user()
- robot, password = model.user.create_robot(robot_shortname, parent)
- log_action('create_robot', parent.username, {'robot': robot_shortname})
- return robot_view(robot.username, password), 201
+ create_data = request.get_json() or {}
+ robot = model.create_user_robot(robot_shortname, parent, create_data.get('description'),
+ create_data.get('unstructured_metadata'))
+ log_action('create_robot', parent.username, {
+ 'robot': robot_shortname,
+ 'description': create_data.get('description'),
+ 'unstructured_metadata': create_data.get('unstructured_metadata'),
+ })
+ return robot.to_dict(include_metadata=True, include_token=True), 201
@require_user_admin
@nickname('deleteUserRobot')
def delete(self, robot_shortname):
""" Delete an existing robot. """
parent = get_authenticated_user()
- model.user.delete_robot(format_robot_username(parent.username, robot_shortname))
+ model.delete_robot(format_robot_username(parent.username, robot_shortname))
log_action('delete_robot', parent.username, {'robot': robot_shortname})
return '', 204
@@ -123,17 +111,30 @@ class UserRobot(ApiResource):
@related_user_resource(UserRobotList)
class OrgRobotList(ApiResource):
""" Resource for listing an organization's robots. """
+
@require_scope(scopes.ORG_ADMIN)
@nickname('getOrgRobots')
@parse_args()
@query_param('permissions',
'Whether to include repostories and teams in which the robots have permission.',
type=truthy_bool, default=False)
+ @query_param('token',
+ 'If false, the robot\'s token is not returned.',
+ type=truthy_bool, default=True)
+ @query_param('limit',
+ 'If specified, the number of robots to return.',
+ type=int, default=None)
def get(self, orgname, parsed_args):
""" List the organization's robots. """
permission = OrganizationMemberPermission(orgname)
if permission.can():
- return robots_list(orgname, include_permissions=parsed_args.get('permissions', False))
+ include_token = (AdministerOrganizationPermission(orgname).can() and
+ parsed_args.get('token', True))
+ include_permissions = (AdministerOrganizationPermission(orgname).can() and
+ parsed_args.get('permissions', False))
+ return robots_list(orgname, include_permissions=include_permissions,
+ include_token=include_token,
+ limit=parsed_args.get('limit'))
raise Unauthorized()
@@ -145,28 +146,38 @@ class OrgRobotList(ApiResource):
@related_user_resource(UserRobot)
class OrgRobot(ApiResource):
""" Resource for managing an organization's robots. """
+ schemas = {
+ 'CreateRobot': CREATE_ROBOT_SCHEMA,
+ }
+
@require_scope(scopes.ORG_ADMIN)
@nickname('getOrgRobot')
def get(self, orgname, robot_shortname):
""" Returns the organization's robot with the specified name. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
- parent = model.organization.get_organization(orgname)
- robot, password = model.user.get_robot(robot_shortname, parent)
- return robot_view(robot.username, password)
+ robot = model.get_org_robot(robot_shortname, orgname)
+ return robot.to_dict(include_metadata=True, include_token=True)
raise Unauthorized()
@require_scope(scopes.ORG_ADMIN)
@nickname('createOrgRobot')
+ @max_json_size(ROBOT_MAX_SIZE)
+ @validate_json_request('CreateRobot', optional=True)
def put(self, orgname, robot_shortname):
""" Create a new robot in the organization. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
- parent = model.organization.get_organization(orgname)
- robot, password = model.user.create_robot(robot_shortname, parent)
- log_action('create_robot', orgname, {'robot': robot_shortname})
- return robot_view(robot.username, password), 201
+ create_data = request.get_json() or {}
+ robot = model.create_org_robot(robot_shortname, orgname, create_data.get('description'),
+ create_data.get('unstructured_metadata'))
+ log_action('create_robot', orgname, {
+ 'robot': robot_shortname,
+ 'description': create_data.get('description'),
+ 'unstructured_metadata': create_data.get('unstructured_metadata'),
+ })
+ return robot.to_dict(include_metadata=True, include_token=True), 201
raise Unauthorized()
@@ -176,7 +187,7 @@ class OrgRobot(ApiResource):
""" Delete an existing organization robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
- model.user.delete_robot(format_robot_username(orgname, robot_shortname))
+ model.delete_robot(format_robot_username(orgname, robot_shortname))
log_action('delete_robot', orgname, {'robot': robot_shortname})
return '', 204
@@ -188,16 +199,17 @@ class OrgRobot(ApiResource):
'The short name for the robot, without any user or organization prefix')
class UserRobotPermissions(ApiResource):
""" Resource for listing the permissions a user's robot has in the system. """
+
@require_user_admin
@nickname('getUserRobotPermissions')
def get(self, robot_shortname):
""" Returns the list of repository permissions for the user's robot. """
parent = get_authenticated_user()
- robot, _ = model.user.get_robot(robot_shortname, parent)
- permissions = model.permission.list_robot_permissions(robot.username)
+ robot = model.get_user_robot(robot_shortname, parent)
+ permissions = model.list_robot_permissions(robot.name)
return {
- 'permissions': [permission_view(permission) for permission in permissions]
+ 'permissions': [permission.to_dict() for permission in permissions]
}
@@ -208,18 +220,18 @@ class UserRobotPermissions(ApiResource):
@related_user_resource(UserRobotPermissions)
class OrgRobotPermissions(ApiResource):
""" Resource for listing the permissions an org's robot has in the system. """
+
@require_user_admin
@nickname('getOrgRobotPermissions')
def get(self, orgname, robot_shortname):
""" Returns the list of repository permissions for the org's robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
- parent = model.organization.get_organization(orgname)
- robot, _ = model.user.get_robot(robot_shortname, parent)
- permissions = model.permission.list_robot_permissions(robot.username)
+ robot = model.get_org_robot(robot_shortname, orgname)
+ permissions = model.list_robot_permissions(robot.name)
return {
- 'permissions': [permission_view(permission) for permission in permissions]
+ 'permissions': [permission.to_dict() for permission in permissions]
}
abort(403)
@@ -230,14 +242,15 @@ class OrgRobotPermissions(ApiResource):
'The short name for the robot, without any user or organization prefix')
class RegenerateUserRobot(ApiResource):
""" Resource for regenerate an organization's robot's token. """
+
@require_user_admin
@nickname('regenerateUserRobotToken')
def post(self, robot_shortname):
""" Regenerates the token for a user's robot. """
parent = get_authenticated_user()
- robot, password = model.user.regenerate_robot_token(robot_shortname, parent)
+ robot = model.regenerate_user_robot_token(robot_shortname, parent)
log_action('regenerate_robot_token', parent.username, {'robot': robot_shortname})
- return robot_view(robot.username, password)
+ return robot.to_dict(include_token=True)
@resource('/v1/organization//robots//regenerate')
@@ -247,15 +260,15 @@ class RegenerateUserRobot(ApiResource):
@related_user_resource(RegenerateUserRobot)
class RegenerateOrgRobot(ApiResource):
""" Resource for regenerate an organization's robot's token. """
+
@require_scope(scopes.ORG_ADMIN)
@nickname('regenerateOrgRobotToken')
def post(self, orgname, robot_shortname):
""" Regenerates the token for an organization robot. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
- parent = model.organization.get_organization(orgname)
- robot, password = model.user.regenerate_robot_token(robot_shortname, parent)
+ robot = model.regenerate_org_robot_token(robot_shortname, orgname)
log_action('regenerate_robot_token', orgname, {'robot': robot_shortname})
- return robot_view(robot.username, password)
+ return robot.to_dict(include_token=True)
raise Unauthorized()
diff --git a/endpoints/api/robot_models_interface.py b/endpoints/api/robot_models_interface.py
new file mode 100644
index 000000000..c4a07d304
--- /dev/null
+++ b/endpoints/api/robot_models_interface.py
@@ -0,0 +1,196 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+from endpoints.api import format_date
+
+
+class Permission(namedtuple('Permission', ['repository_name', 'repository_visibility_name', 'role_name'])):
+ """
+ Permission the relationship between a robot and a repository and whether that robot can see the repo.
+ """
+
+ def to_dict(self):
+ return {
+ 'repository': {
+ 'name': self.repository_name,
+ 'is_public': self.repository_visibility_name == 'public'
+ },
+ 'role': self.role_name
+ }
+
+
+class Team(namedtuple('Team', ['name', 'avatar'])):
+ """
+ Team represents a team entry for a robot list entry.
+ :type name: string
+ :type avatar: {string -> string}
+ """
+ def to_dict(self):
+ return {
+ 'name': self.name,
+ 'avatar': self.avatar,
+ }
+
+
+class RobotWithPermissions(
+ namedtuple('RobotWithPermissions', [
+ 'name',
+ 'password',
+ 'created',
+ 'last_accessed',
+ 'teams',
+ 'repository_names',
+ 'description',
+ ])):
+ """
+ RobotWithPermissions is a list of robot entries.
+ :type name: string
+ :type password: string
+ :type created: datetime|None
+ :type last_accessed: datetime|None
+ :type teams: [Team]
+ :type repository_names: [string]
+ :type description: string
+ """
+
+ def to_dict(self, include_token=False):
+ data = {
+ 'name': self.name,
+ 'created': format_date(self.created) if self.created is not None else None,
+ 'last_accessed': format_date(self.last_accessed) if self.last_accessed is not None else None,
+ 'teams': [team.to_dict() for team in self.teams],
+ 'repositories': self.repository_names,
+ 'description': self.description,
+ }
+
+ if include_token:
+ data['token'] = self.password
+
+ return data
+
+
+class Robot(
+ namedtuple('Robot', [
+ 'name',
+ 'password',
+ 'created',
+ 'last_accessed',
+ 'description',
+ 'unstructured_metadata',
+ ])):
+ """
+ Robot represents a robot entity.
+ :type name: string
+ :type password: string
+ :type created: datetime|None
+ :type last_accessed: datetime|None
+ :type description: string
+ :type unstructured_metadata: dict
+ """
+
+ def to_dict(self, include_metadata=False, include_token=False):
+ data = {
+ 'name': self.name,
+ 'created': format_date(self.created) if self.created is not None else None,
+ 'last_accessed': format_date(self.last_accessed) if self.last_accessed is not None else None,
+ 'description': self.description,
+ }
+
+ if include_token:
+ data['token'] = self.password
+
+ if include_metadata:
+ data['unstructured_metadata'] = self.unstructured_metadata
+
+ return data
+
+
+@add_metaclass(ABCMeta)
+class RobotInterface(object):
+ """
+ Interface that represents all data store interactions required by the Robot API
+ """
+
+ @abstractmethod
+ def get_org_robot(self, robot_shortname, orgname):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def get_user_robot(self, robot_shortname, owning_user):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def create_user_robot(self, robot_shortname, owning_user):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def create_org_robot(self, robot_shortname, orgname):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def delete_robot(self, robot_username):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def regenerate_user_robot_token(self, robot_shortname, owning_user):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def regenerate_org_robot_token(self, robot_shortname, orgname):
+ """
+
+ Returns:
+ Robot object
+
+ """
+
+ @abstractmethod
+ def list_entity_robot_permission_teams(self, prefix, include_permissions=False,
+ include_token=False, limit=None):
+ """
+
+ Returns:
+ list of RobotWithPermissions objects
+
+ """
+
+ @abstractmethod
+ def list_robot_permissions(self, username):
+ """
+
+ Returns:
+ list of Robot objects
+
+ """
diff --git a/endpoints/api/robot_models_pre_oci.py b/endpoints/api/robot_models_pre_oci.py
new file mode 100644
index 000000000..ad83decdf
--- /dev/null
+++ b/endpoints/api/robot_models_pre_oci.py
@@ -0,0 +1,123 @@
+import features
+
+from app import avatar
+from data import model
+from active_migration import ActiveDataMigration, ERTMigrationFlags
+from data.database import (User, FederatedLogin, RobotAccountToken, Team as TeamTable, Repository,
+ RobotAccountMetadata)
+from endpoints.api.robot_models_interface import (RobotInterface, Robot, RobotWithPermissions, Team,
+ Permission)
+
+
+class RobotPreOCIModel(RobotInterface):
+ def list_robot_permissions(self, username):
+ permissions = model.permission.list_robot_permissions(username)
+ return [Permission(permission.repository.name, permission.repository.visibility.name, permission.role.name) for
+ permission in permissions]
+
+ def list_entity_robot_permission_teams(self, prefix, include_token=False,
+ include_permissions=False, limit=None):
+ tuples = model.user.list_entity_robot_permission_teams(prefix, limit=limit,
+ include_permissions=include_permissions)
+ robots = {}
+ robot_teams = set()
+
+ for robot_tuple in tuples:
+ robot_name = robot_tuple.get(User.username)
+ if robot_name not in robots:
+ token = None
+ if include_token:
+ # TODO(remove-unenc): Remove branches once migrated.
+ if robot_tuple.get(RobotAccountToken.token):
+ token = robot_tuple.get(RobotAccountToken.token).decrypt()
+
+ if token is None and ActiveDataMigration.has_flag(ERTMigrationFlags.READ_OLD_FIELDS):
+ token = robot_tuple.get(FederatedLogin.service_ident)
+ assert not token.startswith('robot:')
+
+ robot_dict = {
+ 'name': robot_name,
+ 'token': token,
+ 'created': robot_tuple.get(User.creation_date),
+ 'last_accessed': (robot_tuple.get(User.last_accessed)
+ if features.USER_LAST_ACCESSED else None),
+ 'description': robot_tuple.get(RobotAccountMetadata.description),
+ 'unstructured_metadata': robot_tuple.get(RobotAccountMetadata.unstructured_json),
+ }
+
+ if include_permissions:
+ robot_dict.update({
+ 'teams': [],
+ 'repositories': [],
+ })
+
+ robots[robot_name] = Robot(robot_dict['name'], robot_dict['token'], robot_dict['created'],
+ robot_dict['last_accessed'], robot_dict['description'],
+ robot_dict['unstructured_metadata'])
+ if include_permissions:
+ team_name = robot_tuple.get(TeamTable.name)
+ repository_name = robot_tuple.get(Repository.name)
+
+ if team_name is not None:
+ check_key = robot_name + ':' + team_name
+ if check_key not in robot_teams:
+ robot_teams.add(check_key)
+
+ robot_dict['teams'].append(Team(
+ team_name,
+ avatar.get_data(team_name, team_name, 'team')
+ ))
+
+ if repository_name is not None:
+ if repository_name not in robot_dict['repositories']:
+ robot_dict['repositories'].append(repository_name)
+ robots[robot_name] = RobotWithPermissions(robot_dict['name'], robot_dict['token'],
+ robot_dict['created'],
+ (robot_dict['last_accessed']
+ if features.USER_LAST_ACCESSED else None),
+ robot_dict['teams'],
+ robot_dict['repositories'],
+ robot_dict['description'])
+
+ return robots.values()
+
+ def regenerate_user_robot_token(self, robot_shortname, owning_user):
+ robot, password, metadata = model.user.regenerate_robot_token(robot_shortname, owning_user)
+ return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
+ metadata.description, metadata.unstructured_json)
+
+ def regenerate_org_robot_token(self, robot_shortname, orgname):
+ parent = model.organization.get_organization(orgname)
+ robot, password, metadata = model.user.regenerate_robot_token(robot_shortname, parent)
+ return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
+ metadata.description, metadata.unstructured_json)
+
+ def delete_robot(self, robot_username):
+ model.user.delete_robot(robot_username)
+
+ def create_user_robot(self, robot_shortname, owning_user, description, unstructured_metadata):
+ robot, password = model.user.create_robot(robot_shortname, owning_user, description or '',
+ unstructured_metadata)
+ return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
+ description or '', unstructured_metadata)
+
+ def create_org_robot(self, robot_shortname, orgname, description, unstructured_metadata):
+ parent = model.organization.get_organization(orgname)
+ robot, password = model.user.create_robot(robot_shortname, parent, description or '',
+ unstructured_metadata)
+ return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
+ description or '', unstructured_metadata)
+
+ def get_org_robot(self, robot_shortname, orgname):
+ parent = model.organization.get_organization(orgname)
+ robot, password, metadata = model.user.get_robot_and_metadata(robot_shortname, parent)
+ return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
+ metadata.description, metadata.unstructured_json)
+
+ def get_user_robot(self, robot_shortname, owning_user):
+ robot, password, metadata = model.user.get_robot_and_metadata(robot_shortname, owning_user)
+ return Robot(robot.username, password, robot.creation_date, robot.last_accessed,
+ metadata.description, metadata.unstructured_json)
+
+
+pre_oci_model = RobotPreOCIModel()
diff --git a/endpoints/api/search.py b/endpoints/api/search.py
index 018ab713c..0ddbbc3fa 100644
--- a/endpoints/api/search.py
+++ b/endpoints/api/search.py
@@ -1,15 +1,19 @@
""" Conduct searches against all registry context. """
+import features
+
from endpoints.api import (ApiResource, parse_args, query_param, truthy_bool, nickname, resource,
require_scope, path_param, internal_only, Unauthorized, InvalidRequest,
show_if)
+from data.database import Repository
from data import model
+from data.registry_model import registry_model
from auth.permissions import (OrganizationMemberPermission, ReadRepositoryPermission,
UserAdminPermission, AdministerOrganizationPermission,
ReadRepositoryPermission)
from auth.auth_context import get_authenticated_user
from auth import scopes
-from app import avatar, authentication
+from app import app, avatar, authentication
from flask import abort
from operator import itemgetter
from stringscore import liquidmetal
@@ -18,6 +22,12 @@ from util.names import parse_robot_username
import anunidecode # Don't listen to pylint's lies. This import is required.
import math
+
+ENTITY_SEARCH_SCORE = 1
+TEAM_SEARCH_SCORE = 2
+REPOSITORY_SEARCH_SCORE = 4
+
+
@resource('/v1/entities/link/')
@internal_only
class LinkExternalEntity(ApiResource):
@@ -100,7 +110,8 @@ class EntitySearch(ApiResource):
robot_namespace = namespace_name
# Lookup users in the database for the prefix query.
- users = model.user.get_matching_users(prefix, robot_namespace, organization, limit=10)
+ users = model.user.get_matching_users(prefix, robot_namespace, organization, limit=10,
+ exact_matches_only=not features.PARTIAL_USER_AUTOCOMPLETE)
# Lookup users via the user system for the prefix query. We'll filter out any users that
# already exist in the database.
@@ -158,11 +169,13 @@ class EntitySearch(ApiResource):
def search_entity_view(username, entity, get_short_name=None):
kind = 'user'
+ title = 'user'
avatar_data = avatar.get_data_for_user(entity)
href = '/user/' + entity.username
if entity.organization:
kind = 'organization'
+ title = 'org'
avatar_data = avatar.get_data_for_org(entity)
href = '/organization/' + entity.username
elif entity.robot:
@@ -173,13 +186,15 @@ def search_entity_view(username, entity, get_short_name=None):
href = '/organization/' + parts[0] + '?tab=robots&showRobot=' + entity.username
kind = 'robot'
+ title = 'robot'
avatar_data = None
data = {
+ 'title': title,
'kind': kind,
'avatar': avatar_data,
'name': entity.username,
- 'score': 1,
+ 'score': ENTITY_SEARCH_SCORE,
'href': href
}
@@ -203,7 +218,7 @@ def conduct_team_search(username, query, encountered_teams, results):
'name': team.name,
'organization': search_entity_view(username, team.organization),
'avatar': avatar.get_data_for_team(team),
- 'score': 2,
+ 'score': TEAM_SEARCH_SCORE,
'href': '/organization/' + team.organization.username + '/teams/' + team.name
})
@@ -222,40 +237,20 @@ def conduct_admined_team_search(username, query, encountered_teams, results):
'name': team.name,
'organization': search_entity_view(username, team.organization),
'avatar': avatar.get_data_for_team(team),
- 'score': 2,
+ 'score': TEAM_SEARCH_SCORE,
'href': '/organization/' + team.organization.username + '/teams/' + team.name
})
-def conduct_repo_search(username, query, results):
+def conduct_repo_search(username, query, results, offset=0, limit=5):
""" Finds matching repositories. """
- def can_read(repo):
- if repo.is_public:
- return True
-
- return ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
-
- only_public = username is None
- matching_repos = model.repository.get_sorted_matching_repositories(query, only_public, can_read,
- limit=5)
+ matching_repos = model.repository.get_filtered_matching_repositories(query, username, limit=limit,
+ repo_kind=None,
+ offset=offset)
for repo in matching_repos:
- repo_score = math.log(repo.count or 1, 10) or 1
-
- # If the repository is under the user's namespace, give it 20% more weight.
- namespace = repo.namespace_user.username
- if OrganizationMemberPermission(namespace).can() or namespace == username:
- repo_score = repo_score * 1.2
-
- results.append({
- 'kind': 'repository',
- 'namespace': search_entity_view(username, repo.namespace_user),
- 'name': repo.name,
- 'description': repo.description,
- 'is_public': repo.is_public,
- 'score': repo_score,
- 'href': '/repository/' + repo.namespace_user.username + '/' + repo.name
- })
+ # TODO: make sure the repo.kind.name doesn't cause extra queries
+ results.append(repo_result_view(repo, username))
def conduct_namespace_search(username, query, results):
@@ -275,6 +270,30 @@ def conduct_robot_search(username, query, results):
results.append(search_entity_view(username, robot, get_short_name))
+def repo_result_view(repo, username, last_modified=None, stars=None, popularity=None):
+ kind = 'application' if Repository.kind.get_name(repo.kind_id) == 'application' else 'repository'
+ view = {
+ 'kind': kind,
+ 'title': 'app' if kind == 'application' else 'repo',
+ 'namespace': search_entity_view(username, repo.namespace_user),
+ 'name': repo.name,
+ 'description': repo.description,
+ 'is_public': model.repository.is_repository_public(repo),
+ 'score': REPOSITORY_SEARCH_SCORE,
+ 'href': '/' + kind + '/' + repo.namespace_user.username + '/' + repo.name
+ }
+
+ if last_modified is not None:
+ view['last_modified'] = last_modified
+
+ if stars is not None:
+ view['stars'] = stars
+
+ if popularity is not None:
+ view['popularity'] = popularity
+
+ return view
+
@resource('/v1/find/all')
class ConductSearch(ApiResource):
""" Resource for finding users, repositories, teams, etc. """
@@ -315,3 +334,49 @@ class ConductSearch(ApiResource):
result['score'] = result['score'] * lm_score
return {'results': sorted(results, key=itemgetter('score'), reverse=True)}
+
+
+MAX_PER_PAGE = app.config.get('SEARCH_RESULTS_PER_PAGE', 10)
+MAX_RESULT_PAGE_COUNT = app.config.get('SEARCH_MAX_RESULT_PAGE_COUNT', 10)
+
+@resource('/v1/find/repositories')
+class ConductRepositorySearch(ApiResource):
+ """ Resource for finding repositories. """
+ @parse_args()
+ @query_param('query', 'The search query.', type=str, default='')
+ @query_param('page', 'The page.', type=int, default=1)
+ @nickname('conductRepoSearch')
+ def get(self, parsed_args):
+ """ Get a list of apps and repositories that match the specified query. """
+ query = parsed_args['query']
+ page = min(max(1, parsed_args['page']), MAX_RESULT_PAGE_COUNT)
+ offset = (page - 1) * MAX_PER_PAGE
+ limit = offset + MAX_PER_PAGE + 1
+
+ username = get_authenticated_user().username if get_authenticated_user() else None
+
+ # Lookup matching repositories.
+ matching_repos = list(model.repository.get_filtered_matching_repositories(query, username,
+ repo_kind=None,
+ limit=limit,
+ offset=offset))
+
+ # Load secondary information such as last modified time, star count and action count.
+ repository_ids = [repo.id for repo in matching_repos]
+ last_modified_map = registry_model.get_most_recent_tag_lifetime_start(matching_repos)
+ star_map = model.repository.get_stars(repository_ids)
+ action_sum_map = model.log.get_repositories_action_sums(repository_ids)
+
+ # Build the results list.
+ results = [repo_result_view(repo, username, last_modified_map.get(repo.id),
+ star_map.get(repo.id, 0),
+ float(action_sum_map.get(repo.id, 0)))
+ for repo in matching_repos]
+
+ return {
+ 'results': results[0:MAX_PER_PAGE],
+ 'has_additional': len(results) > MAX_PER_PAGE,
+ 'page': page,
+ 'page_size': MAX_PER_PAGE,
+ 'start_index': offset,
+ }
diff --git a/endpoints/api/secscan.py b/endpoints/api/secscan.py
index f8727140c..71422184f 100644
--- a/endpoints/api/secscan.py
+++ b/endpoints/api/secscan.py
@@ -3,69 +3,106 @@
import logging
import features
-from app import secscan_api
-from data import model
+from app import app, secscan_api
+from auth.decorators import process_basic_auth_no_pass
+from data.registry_model import registry_model
+from data.registry_model.datatypes import SecurityScanStatus
from endpoints.api import (require_repo_read, path_param,
RepositoryParamResource, resource, nickname, show_if, parse_args,
- query_param, truthy_bool)
+ query_param, truthy_bool, disallow_for_app_repositories)
from endpoints.exception import NotFound, DownstreamIssue
+from endpoints.api.manifest import MANIFEST_DIGEST_ROUTE
from util.secscan.api import APIRequestFailure
logger = logging.getLogger(__name__)
+def _security_info(manifest_or_legacy_image, include_vulnerabilities=True):
+ """ Returns a dict representing the result of a call to the security status API for the given
+ manifest or image.
+ """
+ status = registry_model.get_security_status(manifest_or_legacy_image)
+ if status is None:
+ raise NotFound()
-class SCAN_STATUS(object):
- """ Security scan status enum """
- SCANNED = 'scanned'
- FAILED = 'failed'
- QUEUED = 'queued'
+ if status != SecurityScanStatus.SCANNED:
+ return {
+ 'status': status.value,
+ }
+
+ try:
+ if include_vulnerabilities:
+ data = secscan_api.get_layer_data(manifest_or_legacy_image, include_vulnerabilities=True)
+ else:
+ data = secscan_api.get_layer_data(manifest_or_legacy_image, include_features=True)
+ except APIRequestFailure as arf:
+ raise DownstreamIssue(arf.message)
+
+ if data is None:
+ # If no data was found but we reached this point, then it indicates we have incorrect security
+ # status for the manifest or legacy image. Mark the manifest or legacy image as unindexed
+ # so it automatically gets re-indexed.
+ if app.config.get('REGISTRY_STATE', 'normal') == 'normal':
+ registry_model.reset_security_status(manifest_or_legacy_image)
+
+ return {
+ 'status': SecurityScanStatus.QUEUED.value,
+ }
+
+ return {
+ 'status': status.value,
+ 'data': data,
+ }
-def _get_status(repo_image):
- if repo_image.security_indexed_engine is not None and repo_image.security_indexed_engine >= 0:
- return SCAN_STATUS.SCANNED if repo_image.security_indexed else SCAN_STATUS.FAILED
-
- return SCAN_STATUS.QUEUED
-
-
-@show_if(features.SECURITY_SCANNER)
@resource('/v1/repository//image//security')
+@show_if(features.SECURITY_SCANNER)
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@path_param('imageid', 'The image ID')
class RepositoryImageSecurity(RepositoryParamResource):
""" Operations for managing the vulnerabilities in a repository image. """
+ @process_basic_auth_no_pass
@require_repo_read
@nickname('getRepoImageSecurity')
+ @disallow_for_app_repositories
@parse_args()
@query_param('vulnerabilities', 'Include vulnerabilities informations', type=truthy_bool,
default=False)
def get(self, namespace, repository, imageid, parsed_args):
""" Fetches the features and vulnerabilities (if any) for a repository image. """
- repo_image = model.image.get_repo_image(namespace, repository, imageid)
- if repo_image is None:
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
raise NotFound()
- if not repo_image.security_indexed:
- logger.debug('Image %s under repository %s/%s not security indexed',
- repo_image.docker_image_id, namespace, repository)
- return {
- 'status': _get_status(repo_image),
- }
-
- try:
- if parsed_args.vulnerabilities:
- data = secscan_api.get_layer_data(repo_image, include_vulnerabilities=True)
- else:
- data = secscan_api.get_layer_data(repo_image, include_features=True)
- except APIRequestFailure as arf:
- raise DownstreamIssue({'message': arf.message})
-
- if data is None:
+ legacy_image = registry_model.get_legacy_image(repo_ref, imageid)
+ if legacy_image is None:
raise NotFound()
- return {
- 'status': _get_status(repo_image),
- 'data': data,
- }
+ return _security_info(legacy_image, parsed_args.vulnerabilities)
+
+
+@resource(MANIFEST_DIGEST_ROUTE + '/security')
+@show_if(features.SECURITY_SCANNER)
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@path_param('manifestref', 'The digest of the manifest')
+class RepositoryManifestSecurity(RepositoryParamResource):
+ """ Operations for managing the vulnerabilities in a repository manifest. """
+
+ @process_basic_auth_no_pass
+ @require_repo_read
+ @nickname('getRepoManifestSecurity')
+ @disallow_for_app_repositories
+ @parse_args()
+ @query_param('vulnerabilities', 'Include vulnerabilities informations', type=truthy_bool,
+ default=False)
+ def get(self, namespace, repository, manifestref, parsed_args):
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
+ raise NotFound()
+
+ manifest = registry_model.lookup_manifest_by_digest(repo_ref, manifestref, allow_dead=True)
+ if manifest is None:
+ raise NotFound()
+
+ return _security_info(manifest, parsed_args.vulnerabilities)
diff --git a/endpoints/api/signing.py b/endpoints/api/signing.py
new file mode 100644
index 000000000..eb2e942ec
--- /dev/null
+++ b/endpoints/api/signing.py
@@ -0,0 +1,29 @@
+""" List and manage repository signing information """
+
+import logging
+import features
+
+from app import tuf_metadata_api
+from endpoints.api import (require_repo_read, path_param,
+ RepositoryParamResource, resource, nickname, show_if,
+ disallow_for_app_repositories, NotFound)
+from endpoints.api.signing_models_pre_oci import pre_oci_model as model
+
+logger = logging.getLogger(__name__)
+
+
+@resource('/v1/repository//signatures')
+@show_if(features.SIGNING)
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+class RepositorySignatures(RepositoryParamResource):
+ """ Operations for managing the signatures in a repository image. """
+
+ @require_repo_read
+ @nickname('getRepoSignatures')
+ @disallow_for_app_repositories
+ def get(self, namespace, repository):
+ """ Fetches the list of signed tags for the repository. """
+ if not model.is_trust_enabled(namespace, repository):
+ raise NotFound()
+
+ return {'delegations': tuf_metadata_api.get_all_tags_with_expiration(namespace, repository)}
diff --git a/endpoints/api/signing_models_interface.py b/endpoints/api/signing_models_interface.py
new file mode 100644
index 000000000..6e5ce4ca4
--- /dev/null
+++ b/endpoints/api/signing_models_interface.py
@@ -0,0 +1,14 @@
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+@add_metaclass(ABCMeta)
+class SigningInterface(object):
+ """
+ Interface that represents all data store interactions required by the signing API endpoint.
+ """
+ @abstractmethod
+ def is_trust_enabled(self, namespace_name, repo_name):
+ """
+ Returns whether the repository with the given namespace name and repository name exists and
+ has trust enabled.
+ """
diff --git a/endpoints/api/signing_models_pre_oci.py b/endpoints/api/signing_models_pre_oci.py
new file mode 100644
index 000000000..03afb1104
--- /dev/null
+++ b/endpoints/api/signing_models_pre_oci.py
@@ -0,0 +1,18 @@
+from data import model
+from endpoints.api.signing_models_interface import SigningInterface
+
+
+class PreOCIModel(SigningInterface):
+ """
+ PreOCIModel implements the data model for signing using a database schema
+ before it was changed to support the OCI specification.
+ """
+ def is_trust_enabled(self, namespace_name, repo_name):
+ repo = model.repository.get_repository(namespace_name, repo_name)
+ if repo is None:
+ return False
+
+ return repo.trust_enabled
+
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/subscribe.py b/endpoints/api/subscribe.py
index a0be987eb..b526e25d2 100644
--- a/endpoints/api/subscribe.py
+++ b/endpoints/api/subscribe.py
@@ -1,32 +1,28 @@
""" Subscribe to plans. """
-
import logging
import stripe
-
+import features
from app import billing
from endpoints.api import request_error, log_action
-from endpoints.exception import NotFound
-from data import model
from data.billing import PLANS
-
-import features
+from endpoints.api.subscribe_models_pre_oci import data_model as model
+from endpoints.exception import NotFound
logger = logging.getLogger(__name__)
def check_repository_usage(user_or_org, plan_found):
- private_repos = model.user.get_private_repo_count(user_or_org.username)
+ private_repos = model.get_private_repo_count(user_or_org.username)
if plan_found is None:
repos_allowed = 0
else:
repos_allowed = plan_found['privateRepos']
if private_repos > repos_allowed:
- model.notification.create_unique_notification('over_private_usage', user_or_org,
- {'namespace': user_or_org.username})
+ model.create_unique_notification('over_private_usage', user_or_org.username, {'namespace': user_or_org.username})
else:
- model.notification.delete_notifications_by_kind(user_or_org, 'over_private_usage')
+ model.delete_notifications_by_kind(user_or_org.username, 'over_private_usage')
def carderror_response(exc):
@@ -70,7 +66,7 @@ def subscribe(user, plan, token, require_business_plan):
user.username)
raise request_error(message='No matching plan found')
- private_repos = model.user.get_private_repo_count(user.username)
+ private_repos = model.get_private_repo_count(user.username)
# This is the default response
response_json = {
@@ -92,9 +88,9 @@ def subscribe(user, plan, token, require_business_plan):
user.save()
check_repository_usage(user, plan_found)
log_action('account_change_plan', user.username, {'plan': plan})
- except stripe.CardError as e:
+ except stripe.error.CardError as e:
return carderror_response(e)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
return connection_response(e)
response_json = subscription_view(cus.subscription, private_repos)
@@ -104,19 +100,17 @@ def subscribe(user, plan, token, require_business_plan):
# Change the plan
try:
cus = billing.Customer.retrieve(user.stripe_id)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
return connection_response(e)
if plan_found['price'] == 0:
if cus.subscription is not None:
# We only have to cancel the subscription if they actually have one
try:
- cus.cancel_subscription()
- cus.save()
- except stripe.APIConnectionError as e:
+ cus.subscription.delete()
+ except stripe.error.APIConnectionError as e:
return connection_response(e)
-
check_repository_usage(user, plan_found)
log_action('account_change_plan', user.username, {'plan': plan})
@@ -129,9 +123,9 @@ def subscribe(user, plan, token, require_business_plan):
try:
cus.save()
- except stripe.CardError as e:
+ except stripe.error.CardError as e:
return carderror_response(e)
- except stripe.APIConnectionError as e:
+ except stripe.error.APIConnectionError as e:
return connection_response(e)
response_json = subscription_view(cus.subscription, private_repos)
diff --git a/endpoints/api/subscribe_models_interface.py b/endpoints/api/subscribe_models_interface.py
new file mode 100644
index 000000000..fbc7a8a70
--- /dev/null
+++ b/endpoints/api/subscribe_models_interface.py
@@ -0,0 +1,26 @@
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class SubscribeInterface(object):
+ """
+ Interface that represents all data store interactions required by the subscribe API endpoint.
+ """
+ @abstractmethod
+ def get_private_repo_count(self, username):
+ """
+ Returns the number of private repositories for a given username or namespace.
+ """
+
+ @abstractmethod
+ def create_unique_notification(self, kind_name, target_username, metadata={}):
+ """
+ Creates a notification using the given parameters.
+ """
+
+ @abstractmethod
+ def delete_notifications_by_kind(self, target_username, kind_name):
+ """
+ Remove notifications for a target based on given kind.
+ """
diff --git a/endpoints/api/subscribe_models_pre_oci.py b/endpoints/api/subscribe_models_pre_oci.py
new file mode 100644
index 000000000..a5ca83149
--- /dev/null
+++ b/endpoints/api/subscribe_models_pre_oci.py
@@ -0,0 +1,23 @@
+from data.model.notification import create_unique_notification, delete_notifications_by_kind
+from data.model.user import get_private_repo_count, get_user_or_org
+from endpoints.api.subscribe_models_interface import SubscribeInterface
+
+
+class PreOCIModel(SubscribeInterface):
+ """
+ PreOCIModel implements the data model for build triggers using a database schema
+ before it was changed to support the OCI specification.
+ """
+ def get_private_repo_count(self, username):
+ return get_private_repo_count(username)
+
+ def create_unique_notification(self, kind_name, target_username, metadata={}):
+ target = get_user_or_org(target_username)
+ create_unique_notification(kind_name, target, metadata)
+
+ def delete_notifications_by_kind(self, target_username, kind_name):
+ target = get_user_or_org(target_username)
+ delete_notifications_by_kind(target, kind_name)
+
+
+data_model = PreOCIModel()
diff --git a/endpoints/api/suconfig.py b/endpoints/api/suconfig.py
index db5050489..a96a7356b 100644
--- a/endpoints/api/suconfig.py
+++ b/endpoints/api/suconfig.py
@@ -3,25 +3,14 @@
import logging
import os
import signal
+import subprocess
from flask import abort
-from endpoints.api import (ApiResource, nickname, resource, internal_only, show_if,
- require_fresh_login, request, validate_json_request, verify_not_prod,
- InvalidRequest)
-from endpoints.common import common_login
-from app import app, config_provider, superusers, OVERRIDE_CONFIG_DIRECTORY
-from data import model
-from data.database import configure
+from app import app, config_provider
from auth.permissions import SuperUserPermission
-from auth.auth_context import get_authenticated_user
-from data.database import User
-from util.config.configutil import add_enterprise_config_defaults
-from util.config.database import sync_database_with_config
-from util.config.validator import validate_service_for_config, CONFIG_FILENAMES
-from util.license import decode_license, LicenseDecodeError
-from data.runmigration import run_alembic_migration
-from data.users import get_federated_service_name, get_users_handler
+from endpoints.api.suconfig_models_pre_oci import pre_oci_model as model
+from endpoints.api import (ApiResource, nickname, resource, internal_only, show_if, verify_not_prod)
import features
@@ -34,16 +23,12 @@ def database_is_valid():
if app.config['TESTING']:
return False
- try:
- list(User.select().limit(1))
- return True
- except:
- return False
+ return model.is_valid()
def database_has_users():
""" Returns whether the database has any users defined. """
- return bool(list(User.select().limit(1)))
+ return model.has_users()
@resource('/v1/superuser/registrystatus')
@@ -57,41 +42,15 @@ class SuperUserRegistryStatus(ApiResource):
@verify_not_prod
def get(self):
""" Returns the status of the registry. """
-
# If we have SETUP_COMPLETE, then we're ready to go!
if app.config.get('SETUP_COMPLETE', False):
return {
'provider_id': config_provider.provider_id,
- 'requires_restart': config_provider.requires_restart(app.config),
'status': 'ready'
}
- # If there is no conf/stack volume, then report that status.
- if not config_provider.volume_exists():
- return {
- 'status': 'missing-config-dir'
- }
-
- # If there is no license file, we need to ask the user to upload it.
- if not config_provider.has_license_file():
- return {
- 'status': 'upload-license'
- }
-
- # If there is no config file, we need to setup the database.
- if not config_provider.config_exists():
- return {
- 'status': 'config-db'
- }
-
- # If the database isn't yet valid, then we need to set it up.
- if not database_is_valid():
- return {
- 'status': 'setup-db'
- }
-
return {
- 'status': 'create-superuser' if not database_has_users() else 'config'
+ 'status': 'setup-incomplete'
}
@@ -106,40 +65,20 @@ class _AlembicLogHandler(logging.Handler):
'message': record.getMessage()
})
-@resource('/v1/superuser/setupdb')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserSetupDatabase(ApiResource):
- """ Resource for invoking alembic to setup the database. """
- @verify_not_prod
- @nickname('scSetupDatabase')
- def get(self):
- """ Invokes the alembic upgrade process. """
- # Note: This method is called after the database configured is saved, but before the
- # database has any tables. Therefore, we only allow it to be run in that unique case.
- if config_provider.config_exists() and not database_is_valid():
- # Note: We need to reconfigure the database here as the config has changed.
- combined = dict(**app.config)
- combined.update(config_provider.get_config())
-
- configure(combined)
- app.config['DB_URI'] = combined['DB_URI']
-
- log_handler = _AlembicLogHandler()
-
- try:
- run_alembic_migration(log_handler)
- except Exception as ex:
- return {
- 'error': str(ex)
- }
-
- return {
- 'logs': log_handler.records
- }
-
- abort(403)
+# From: https://stackoverflow.com/a/44712205
+def get_process_id(name):
+ """Return process ids found by (partial) name or regex.
+ >>> get_process_id('kthreadd')
+ [2]
+ >>> get_process_id('watchdog')
+ [10, 11, 16, 21, 26, 31, 36, 41, 46, 51, 56, 61] # ymmv
+ >>> get_process_id('non-existent process')
+ []
+ """
+ child = subprocess.Popen(['pgrep', name], stdout=subprocess.PIPE, shell=False)
+ response = child.communicate()[0]
+ return [int(pid) for pid in response.split()]
@resource('/v1/superuser/shutdown')
@@ -159,292 +98,7 @@ class SuperUserShutdown(ApiResource):
if app.config.get('DEBUGGING') == True:
return {}
- os.kill(1, signal.SIGINT)
+ os.kill(get_process_id('my_init')[0], signal.SIGINT)
return {}
abort(403)
-
-
-@resource('/v1/superuser/config')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserConfig(ApiResource):
- """ Resource for fetching and updating the current configuration, if any. """
- schemas = {
- 'UpdateConfig': {
- 'type': 'object',
- 'description': 'Updates the YAML config file',
- 'required': [
- 'config',
- 'hostname'
- ],
- 'properties': {
- 'config': {
- 'type': 'object'
- },
- 'hostname': {
- 'type': 'string'
- },
- 'password': {
- 'type': 'string'
- },
- },
- },
- }
-
- @require_fresh_login
- @verify_not_prod
- @nickname('scGetConfig')
- def get(self):
- """ Returns the currently defined configuration, if any. """
- if SuperUserPermission().can():
- config_object = config_provider.get_config()
- return {
- 'config': config_object
- }
-
- abort(403)
-
- @nickname('scUpdateConfig')
- @verify_not_prod
- @validate_json_request('UpdateConfig')
- def put(self):
- """ Updates the config override file. """
- # Note: This method is called to set the database configuration before super users exists,
- # so we also allow it to be called if there is no valid registry configuration setup.
- if not config_provider.config_exists() or SuperUserPermission().can():
- config_object = request.get_json()['config']
- hostname = request.get_json()['hostname']
-
- # Add any enterprise defaults missing from the config.
- add_enterprise_config_defaults(config_object, app.config['SECRET_KEY'], hostname)
-
- # Write the configuration changes to the config override file.
- config_provider.save_config(config_object)
-
- # If the authentication system is not the database, link the superuser account to the
- # the authentication system chosen.
- if config_object.get('AUTHENTICATION_TYPE', 'Database') != 'Database':
- current_user = get_authenticated_user()
- if current_user is None:
- abort(401)
-
- service_name = get_federated_service_name(config_object['AUTHENTICATION_TYPE'])
- if not model.user.lookup_federated_login(current_user, service_name):
- # Verify the user's credentials and retrieve the user's external username+email.
- handler = get_users_handler(config_object, config_provider, OVERRIDE_CONFIG_DIRECTORY)
- (result, err_msg) = handler.verify_credentials(current_user.username,
- request.get_json().get('password', ''))
- if not result:
- logger.error('Could not save configuration due to external auth failure: %s', err_msg)
- abort(400)
-
- # Link the existing user to the external user.
- model.user.attach_federated_login(current_user, service_name, result.username)
-
- # Ensure database is up-to-date with config
- sync_database_with_config(config_object)
-
- return {
- 'exists': True,
- 'config': config_object
- }
-
- abort(403)
-
-
-@resource('/v1/superuser/config/license')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserSetAndValidateLicense(ApiResource):
- """ Resource for setting and validating a license. """
- schemas = {
- 'ValidateLicense': {
- 'type': 'object',
- 'description': 'Validates and sets a license',
- 'required': [
- 'license',
- ],
- 'properties': {
- 'license': {
- 'type': 'string'
- },
- },
- },
- }
-
- @nickname('suSetAndValidateLicense')
- @verify_not_prod
- @validate_json_request('ValidateLicense')
- def post(self):
- """ Validates the given license contents and then saves it to the config volume. """
- if config_provider.has_license_file():
- abort(403)
-
- license_contents = request.get_json()['license']
- try:
- decoded_license = decode_license(license_contents)
- except LicenseDecodeError as le:
- raise InvalidRequest(le.message)
-
- statuses = decoded_license.validate({})
- all_met = all(status.is_met() for status in statuses)
- if all_met:
- config_provider.save_license(license_contents)
-
- return {
- 'status': [status.as_dict(for_private=True) for status in statuses],
- 'success': all_met,
- }
-
-
-@resource('/v1/superuser/config/file/')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserConfigFile(ApiResource):
- """ Resource for fetching the status of config files and overriding them. """
- @nickname('scConfigFileExists')
- @verify_not_prod
- def get(self, filename):
- """ Returns whether the configuration file with the given name exists. """
- if not filename in CONFIG_FILENAMES:
- abort(404)
-
- if SuperUserPermission().can():
- return {
- 'exists': config_provider.volume_file_exists(filename)
- }
-
- abort(403)
-
- @nickname('scUpdateConfigFile')
- @verify_not_prod
- def post(self, filename):
- """ Updates the configuration file with the given name. """
- if not filename in CONFIG_FILENAMES:
- abort(404)
-
- # Note: This method can be called before the configuration exists
- # to upload the database SSL cert.
- if not config_provider.config_exists() or SuperUserPermission().can():
- uploaded_file = request.files['file']
- if not uploaded_file:
- abort(400)
-
- config_provider.save_volume_file(filename, uploaded_file)
- return {
- 'status': True
- }
-
- abort(403)
-
-
-@resource('/v1/superuser/config/createsuperuser')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserCreateInitialSuperUser(ApiResource):
- """ Resource for creating the initial super user. """
- schemas = {
- 'CreateSuperUser': {
- 'type': 'object',
- 'description': 'Information for creating the initial super user',
- 'required': [
- 'username',
- 'password',
- 'email'
- ],
- 'properties': {
- 'username': {
- 'type': 'string',
- 'description': 'The username for the superuser'
- },
- 'password': {
- 'type': 'string',
- 'description': 'The password for the superuser'
- },
- 'email': {
- 'type': 'string',
- 'description': 'The e-mail address for the superuser'
- },
- },
- },
- }
-
- @nickname('scCreateInitialSuperuser')
- @verify_not_prod
- @validate_json_request('CreateSuperUser')
- def post(self):
- """ Creates the initial super user, updates the underlying configuration and
- sets the current session to have that super user. """
-
- # Special security check: This method is only accessible when:
- # - There is a valid config YAML file.
- # - There are currently no users in the database (clean install)
- #
- # We do this special security check because at the point this method is called, the database
- # is clean but does not (yet) have any super users for our permissions code to check against.
- if config_provider.config_exists() and not database_has_users():
- data = request.get_json()
- username = data['username']
- password = data['password']
- email = data['email']
-
- # Create the user in the database.
- superuser = model.user.create_user(username, password, email, auto_verify=True)
-
- # Add the user to the config.
- config_object = config_provider.get_config()
- config_object['SUPER_USERS'] = [username]
- config_provider.save_config(config_object)
-
- # Update the in-memory config for the new superuser.
- superusers.register_superuser(username)
-
- # Conduct login with that user.
- common_login(superuser)
-
- return {
- 'status': True
- }
-
-
- abort(403)
-
-
-@resource('/v1/superuser/config/validate/')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserConfigValidate(ApiResource):
- """ Resource for validating a block of configuration against an external service. """
- schemas = {
- 'ValidateConfig': {
- 'type': 'object',
- 'description': 'Validates configuration',
- 'required': [
- 'config'
- ],
- 'properties': {
- 'config': {
- 'type': 'object'
- },
- 'password': {
- 'type': 'string',
- 'description': 'The users password, used for auth validation'
- }
- },
- },
- }
-
- @nickname('scValidateConfig')
- @verify_not_prod
- @validate_json_request('ValidateConfig')
- def post(self, service):
- """ Validates the given config for the given service. """
- # Note: This method is called to validate the database configuration before super users exists,
- # so we also allow it to be called if there is no valid registry configuration setup. Note that
- # this is also safe since this method does not access any information not given in the request.
- if not config_provider.config_exists() or SuperUserPermission().can():
- config = request.get_json()['config']
- return validate_service_for_config(service, config, request.get_json().get('password', ''))
-
- abort(403)
diff --git a/endpoints/api/suconfig_models_interface.py b/endpoints/api/suconfig_models_interface.py
new file mode 100644
index 000000000..9f8cbd0cb
--- /dev/null
+++ b/endpoints/api/suconfig_models_interface.py
@@ -0,0 +1,39 @@
+from abc import ABCMeta, abstractmethod
+from six import add_metaclass
+
+
+@add_metaclass(ABCMeta)
+class SuperuserConfigDataInterface(object):
+ """
+ Interface that represents all data store interactions required by the superuser config API.
+ """
+
+ @abstractmethod
+ def is_valid(self):
+ """
+ Returns true if the configured database is valid.
+ """
+
+ @abstractmethod
+ def has_users(self):
+ """
+ Returns true if there are any users defined.
+ """
+
+ @abstractmethod
+ def create_superuser(self, username, password, email):
+ """
+ Creates a new superuser with the given username, password and email. Returns the user's UUID.
+ """
+
+ @abstractmethod
+ def has_federated_login(self, username, service_name):
+ """
+ Returns true if the matching user has a federated login under the matching service.
+ """
+
+ @abstractmethod
+ def attach_federated_login(self, username, service_name, federated_username):
+ """
+ Attaches a federatated login to the matching user, under the given service.
+ """
diff --git a/endpoints/api/suconfig_models_pre_oci.py b/endpoints/api/suconfig_models_pre_oci.py
new file mode 100644
index 000000000..9bcb40acd
--- /dev/null
+++ b/endpoints/api/suconfig_models_pre_oci.py
@@ -0,0 +1,33 @@
+from data import model
+from data.database import User
+from endpoints.api.suconfig_models_interface import SuperuserConfigDataInterface
+
+class PreOCIModel(SuperuserConfigDataInterface):
+ def is_valid(self):
+ try:
+ list(User.select().limit(1))
+ return True
+ except:
+ return False
+
+ def has_users(self):
+ return bool(list(User.select().limit(1)))
+
+ def create_superuser(self, username, password, email):
+ return model.user.create_user(username, password, email, auto_verify=True).uuid
+
+ def has_federated_login(self, username, service_name):
+ user = model.user.get_user(username)
+ if user is None:
+ return False
+
+ return bool(model.user.lookup_federated_login(user, service_name))
+
+ def attach_federated_login(self, username, service_name, federated_username):
+ user = model.user.get_user(username)
+ if user is None:
+ return False
+
+ model.user.attach_federated_login(user, service_name, federated_username)
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/superuser.py b/endpoints/api/superuser.py
index 9a1da3dea..ec1a4992f 100644
--- a/endpoints/api/superuser.py
+++ b/endpoints/api/superuser.py
@@ -1,10 +1,8 @@
""" Superuser API. """
-
import logging
import os
import string
-
-import pathvalidate
+import socket
from datetime import datetime
from random import SystemRandom
@@ -13,26 +11,26 @@ from flask import request, make_response, jsonify
import features
-from app import (app, avatar, superusers, authentication, config_provider, license_validator,
- all_queues, log_archive, build_logs)
+from app import app, avatar, superusers, authentication, config_provider
from auth import scopes
from auth.auth_context import get_authenticated_user
from auth.permissions import SuperUserPermission
-from data.buildlogs import BuildStatusRetrievalError
+from data.database import ServiceKeyApprovalType
+from data.logs_model import logs_model
from endpoints.api import (ApiResource, nickname, resource, validate_json_request,
internal_only, require_scope, show_if, parse_args,
- query_param, abort, require_fresh_login, path_param, verify_not_prod,
- page_support, log_action, InvalidRequest)
-from endpoints.api.build import build_status_view, get_logs_or_log_url
-from endpoints.api.logs import get_logs, get_aggregate_logs
-from data import model
-from data.database import ServiceKeyApprovalType
-from endpoints.exception import NotFound
+ query_param, require_fresh_login, path_param, verify_not_prod,
+ page_support, log_action, format_date, truthy_bool,
+ InvalidRequest, NotFound, Unauthorized, InvalidResponse)
+from endpoints.api.build import get_logs_or_log_url
+from endpoints.api.superuser_models_pre_oci import (pre_oci_model, ServiceKeyDoesNotExist,
+ ServiceKeyAlreadyApproved,
+ InvalidRepositoryBuildException)
+from endpoints.api.logs import _validate_logs_arguments
+from util.request import get_request_ip
from util.useremails import send_confirmation_email, send_recovery_email
-from util.license import decode_license, LicenseDecodeError
-from util.security.ssl import load_certificate, CertInvalidException
-from util.config.validator import EXTRA_CA_DIRECTORY
-
+from util.validation import validate_service_key_name
+from _init import ROOT_DIR
logger = logging.getLogger(__name__)
@@ -47,60 +45,11 @@ def get_services():
return services
-@resource('/v1/superuser/systemlogs/')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserGetLogsForService(ApiResource):
- """ Resource for fetching the kinds of system logs in the system. """
- @require_fresh_login
- @verify_not_prod
- @nickname('getSystemLogs')
- @require_scope(scopes.SUPERUSER)
- def get(self, service):
- """ Returns the logs for the specific service. """
- if SuperUserPermission().can():
- if not service in get_services():
- abort(404)
-
- logs = []
- try:
- with open(app.config['SYSTEM_LOGS_FILE'], 'r') as f:
- logs = [line for line in f if line.find(service + '[') >= 0]
-
- except Exception:
- logger.exception('Cannot read logs')
- abort(400)
-
- return {
- 'logs': '\n'.join(logs)
- }
-
- abort(403)
-
-
-@resource('/v1/superuser/systemlogs/')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserSystemLogServices(ApiResource):
- """ Resource for fetching the kinds of system logs in the system. """
- @require_fresh_login
- @verify_not_prod
- @nickname('listSystemLogServices')
- @require_scope(scopes.SUPERUSER)
- def get(self):
- """ List the system logs for the current system. """
- if SuperUserPermission().can():
- return {
- 'services': list(get_services())
- }
-
- abort(403)
-
-
@resource('/v1/superuser/aggregatelogs')
@internal_only
class SuperUserAggregateLogs(ApiResource):
""" Resource for fetching aggregated logs for the current user. """
+
@require_fresh_login
@verify_not_prod
@nickname('listAllAggregateLogs')
@@ -110,19 +59,23 @@ class SuperUserAggregateLogs(ApiResource):
def get(self, parsed_args):
""" Returns the aggregated logs for the current system. """
if SuperUserPermission().can():
- start_time = parsed_args['starttime']
- end_time = parsed_args['endtime']
+ (start_time, end_time) = _validate_logs_arguments(parsed_args['starttime'],
+ parsed_args['endtime'])
+ aggregated_logs = logs_model.get_aggregated_log_counts(start_time, end_time)
+ return {
+ 'aggregated': [log.to_dict() for log in aggregated_logs]
+ }
- return get_aggregate_logs(start_time, end_time)
-
- abort(403)
+ raise Unauthorized()
+LOGS_PER_PAGE = 20
@resource('/v1/superuser/logs')
@internal_only
@show_if(features.SUPER_USERS)
class SuperUserLogs(ApiResource):
""" Resource for fetching all logs in the system. """
+
@require_fresh_login
@verify_not_prod
@nickname('listAllLogs')
@@ -138,9 +91,15 @@ class SuperUserLogs(ApiResource):
start_time = parsed_args['starttime']
end_time = parsed_args['endtime']
- return get_logs(start_time, end_time, page_token=page_token)
+ (start_time, end_time) = _validate_logs_arguments(start_time, end_time)
+ log_entry_page = logs_model.lookup_logs(start_time, end_time, page_token=page_token)
+ return {
+ 'start_time': format_date(start_time),
+ 'end_time': format_date(end_time),
+ 'logs': [log.to_dict(avatar, include_namespace=True) for log in log_entry_page.logs],
+ }, log_entry_page.next_page_token
- abort(403)
+ raise Unauthorized()
def org_view(org):
@@ -150,6 +109,7 @@ def org_view(org):
'avatar': avatar.get_data_for_org(org),
}
+
def user_view(user, password=None):
user_data = {
'kind': 'user',
@@ -167,11 +127,13 @@ def user_view(user, password=None):
return user_data
+
@resource('/v1/superuser/changelog/')
@internal_only
@show_if(features.SUPER_USERS)
class ChangeLog(ApiResource):
""" Resource for returning the change log for enterprise customers. """
+
@require_fresh_login
@verify_not_prod
@nickname('getChangeLog')
@@ -179,13 +141,12 @@ class ChangeLog(ApiResource):
def get(self):
""" Returns the change log for this installation. """
if SuperUserPermission().can():
- with open('CHANGELOG.md', 'r') as f:
+ with open(os.path.join(ROOT_DIR, 'CHANGELOG.md'), 'r') as f:
return {
'log': f.read()
}
- abort(403)
-
+ raise Unauthorized()
@resource('/v1/superuser/organizations/')
@@ -193,6 +154,7 @@ class ChangeLog(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserOrganizationList(ApiResource):
""" Resource for listing organizations in the system. """
+
@require_fresh_login
@verify_not_prod
@nickname('listAllOrganizations')
@@ -200,16 +162,14 @@ class SuperUserOrganizationList(ApiResource):
def get(self):
""" Returns a list of all organizations in the system. """
if SuperUserPermission().can():
- orgs = model.organization.get_organizations()
return {
- 'organizations': [org_view(org) for org in orgs]
+ 'organizations': [org.to_dict() for org in pre_oci_model.get_organizations()]
}
- abort(403)
+ raise Unauthorized()
@resource('/v1/superuser/users/')
-@internal_only
@show_if(features.SUPER_USERS)
class SuperUserList(ApiResource):
""" Resource for listing users in the system. """
@@ -235,17 +195,19 @@ class SuperUserList(ApiResource):
@require_fresh_login
@verify_not_prod
@nickname('listAllUsers')
+ @parse_args()
+ @query_param('disabled', 'If false, only enabled users will be returned.', type=truthy_bool,
+ default=True)
@require_scope(scopes.SUPERUSER)
- def get(self):
+ def get(self, parsed_args):
""" Returns a list of all users in the system. """
if SuperUserPermission().can():
- users = model.user.get_active_users()
+ users = pre_oci_model.get_active_users(disabled=parsed_args['disabled'])
return {
- 'users': [user_view(user) for user in users]
+ 'users': [user.to_dict() for user in users]
}
- abort(403)
-
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -256,7 +218,7 @@ class SuperUserList(ApiResource):
""" Creates a new user. """
# Ensure that we are using database auth.
if app.config['AUTHENTICATION_TYPE'] != 'Database':
- abort(400)
+ raise InvalidRequest('Cannot create a user in a non-database auth system')
user_information = request.get_json()
if SuperUserPermission().can():
@@ -267,14 +229,9 @@ class SuperUserList(ApiResource):
# Create the user.
username = user_information['username']
email = user_information.get('email')
- prompts = model.user.get_default_user_prompts(features)
- user = model.user.create_user(username, password, email, auto_verify=not features.MAILING,
- email_required=features.MAILING, prompts=prompts)
-
- # If mailing is turned on, send the user a verification email.
+ install_user, confirmation_code = pre_oci_model.create_install_user(username, password, email)
if features.MAILING:
- confirmation = model.user.create_confirm_email_code(user)
- send_confirmation_email(user.username, user.email, confirmation.code)
+ send_confirmation_email(install_user.username, install_user.email, confirmation_code)
return {
'username': username,
@@ -283,7 +240,7 @@ class SuperUserList(ApiResource):
'encrypted_password': authentication.encrypt_user_password(password),
}
- abort(403)
+ raise Unauthorized()
@resource('/v1/superusers/users//sendrecovery')
@@ -292,6 +249,7 @@ class SuperUserList(ApiResource):
@show_if(features.MAILING)
class SuperUserSendRecoveryEmail(ApiResource):
""" Resource for sending a recovery user on behalf of a user. """
+
@require_fresh_login
@verify_not_prod
@nickname('sendInstallUserRecoveryEmail')
@@ -299,23 +257,23 @@ class SuperUserSendRecoveryEmail(ApiResource):
def post(self, username):
# Ensure that we are using database auth.
if app.config['AUTHENTICATION_TYPE'] != 'Database':
- abort(400)
+ raise InvalidRequest('Cannot send a recovery e-mail for non-database auth')
if SuperUserPermission().can():
- user = model.user.get_nonrobot_user(username)
- if not user:
- abort(404)
+ user = pre_oci_model.get_nonrobot_user(username)
+ if user is None:
+ raise NotFound()
if superusers.is_superuser(username):
- abort(403)
+ raise InvalidRequest('Cannot send a recovery email for a superuser')
- code = model.user.create_reset_password_email_code(user.email)
- send_recovery_email(user.email, code.code)
+ code = pre_oci_model.create_reset_password_email_code(user.email)
+ send_recovery_email(user.email, code)
return {
'email': user.email
}
- abort(403)
+ raise Unauthorized()
@resource('/v1/superuser/users/')
@@ -353,13 +311,13 @@ class SuperUserManagement(ApiResource):
def get(self, username):
""" Returns information about the specified user. """
if SuperUserPermission().can():
- user = model.user.get_nonrobot_user(username)
- if not user:
- abort(404)
+ user = pre_oci_model.get_nonrobot_user(username)
+ if user is None:
+ raise NotFound()
- return user_view(user)
+ return user.to_dict()
- abort(403)
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -368,17 +326,17 @@ class SuperUserManagement(ApiResource):
def delete(self, username):
""" Deletes the specified user. """
if SuperUserPermission().can():
- user = model.user.get_nonrobot_user(username)
- if not user:
- abort(404)
+ user = pre_oci_model.get_nonrobot_user(username)
+ if user is None:
+ raise NotFound()
if superusers.is_superuser(username):
- abort(403)
+ raise InvalidRequest('Cannot delete a superuser')
- model.user.delete_user(user, all_queues, force=True)
+ pre_oci_model.mark_user_for_deletion(username)
return '', 204
- abort(403)
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -388,32 +346,31 @@ class SuperUserManagement(ApiResource):
def put(self, username):
""" Updates information about the specified user. """
if SuperUserPermission().can():
- user = model.user.get_nonrobot_user(username)
- if not user:
- abort(404)
+ user = pre_oci_model.get_nonrobot_user(username)
+ if user is None:
+ raise NotFound()
if superusers.is_superuser(username):
- abort(403)
+ raise InvalidRequest('Cannot update a superuser')
user_data = request.get_json()
if 'password' in user_data:
# Ensure that we are using database auth.
if app.config['AUTHENTICATION_TYPE'] != 'Database':
- abort(400)
+ raise InvalidRequest('Cannot change password in non-database auth')
- model.user.change_password(user, user_data['password'])
+ pre_oci_model.change_password(username, user_data['password'])
if 'email' in user_data:
# Ensure that we are using database auth.
- if app.config['AUTHENTICATION_TYPE'] != 'Database':
- abort(400)
+ if app.config['AUTHENTICATION_TYPE'] not in ['Database', 'AppToken']:
+ raise InvalidRequest('Cannot change e-mail in non-database auth')
- model.user.update_email(user, user_data['email'], auto_verify=True)
+ pre_oci_model.update_email(username, user_data['email'], auto_verify=True)
if 'enabled' in user_data:
# Disable/enable the user.
- user.enabled = bool(user_data['enabled'])
- user.save()
+ pre_oci_model.update_enabled(username, bool(user_data['enabled']))
if 'superuser' in user_data:
config_object = config_provider.get_config()
@@ -427,9 +384,16 @@ class SuperUserManagement(ApiResource):
config_object['SUPER_USERS'] = list(superusers_set)
config_provider.save_config(config_object)
- return user_view(user, password=user_data.get('password'))
+ return_value = user.to_dict()
+ if user_data.get('password') is not None:
+ password = user_data.get('password')
+ return_value['encrypted_password'] = authentication.encrypt_user_password(password)
+ if user_data.get('email') is not None:
+ return_value['email'] = user_data.get('email')
- abort(403)
+ return return_value
+
+ raise Unauthorized()
@resource('/v1/superuser/takeownership/')
@@ -438,6 +402,7 @@ class SuperUserManagement(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserTakeOwnership(ApiResource):
""" Resource for a superuser to take ownership of a namespace. """
+
@require_fresh_login
@verify_not_prod
@nickname('takeOwnership')
@@ -447,25 +412,16 @@ class SuperUserTakeOwnership(ApiResource):
if SuperUserPermission().can():
# Disallow for superusers.
if superusers.is_superuser(namespace):
- abort(400)
-
- entity = model.user.get_user_or_org(namespace)
- if entity is None:
- abort(404)
+ raise InvalidRequest('Cannot take ownership of a superuser')
authed_user = get_authenticated_user()
- was_user = not entity.organization
- if entity.organization:
- # Add the superuser as an admin to the owners team of the org.
- model.organization.add_user_as_admin(authed_user, entity)
- else:
- # If the entity is a user, convert it to an organization and add the current superuser
- # as the admin.
- model.organization.convert_user_to_organization(entity, get_authenticated_user())
+ entity_id, was_user = pre_oci_model.take_ownership(namespace, authed_user)
+ if entity_id is None:
+ raise NotFound()
# Log the change.
log_metadata = {
- 'entity_id': entity.id,
+ 'entity_id': entity_id,
'namespace': namespace,
'was_user': was_user,
'superuser': authed_user.username,
@@ -477,12 +433,11 @@ class SuperUserTakeOwnership(ApiResource):
'namespace': namespace
})
- abort(403)
+ raise Unauthorized()
@resource('/v1/superuser/organizations/')
@path_param('name', 'The name of the organizaton being managed')
-@internal_only
@show_if(features.SUPER_USERS)
class SuperUserOrganizationManagement(ApiResource):
""" Resource for managing organizations in the system. """
@@ -507,12 +462,10 @@ class SuperUserOrganizationManagement(ApiResource):
def delete(self, name):
""" Deletes the specified organization. """
if SuperUserPermission().can():
- org = model.organization.get_organization(name)
-
- model.user.delete_user(org, all_queues)
+ pre_oci_model.mark_organization_for_deletion(name)
return '', 204
- abort(403)
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -522,15 +475,12 @@ class SuperUserOrganizationManagement(ApiResource):
def put(self, name):
""" Updates information about the specified user. """
if SuperUserPermission().can():
- org = model.organization.get_organization(name)
org_data = request.get_json()
+ old_name = org_data['name'] if 'name' in org_data else None
+ org = pre_oci_model.change_organization_name(name, old_name)
+ return org.to_dict()
- if 'name' in org_data:
- org = model.user.change_username(org.id, org_data['name'])
-
- return org_view(org)
-
- abort(403)
+ raise Unauthorized()
def key_view(key):
@@ -596,13 +546,13 @@ class SuperUserServiceKeyManagement(ApiResource):
@require_scope(scopes.SUPERUSER)
def get(self):
if SuperUserPermission().can():
- keys = model.service_keys.list_all_keys()
+ keys = pre_oci_model.list_all_service_keys()
return jsonify({
- 'keys': [key_view(key) for key in keys],
+ 'keys': [key.to_dict() for key in keys],
})
- abort(403)
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -612,17 +562,20 @@ class SuperUserServiceKeyManagement(ApiResource):
def post(self):
if SuperUserPermission().can():
body = request.get_json()
+ key_name = body.get('name', '')
+ if not validate_service_key_name(key_name):
+ raise InvalidRequest('Invalid service key friendly name: %s' % key_name)
# Ensure we have a valid expiration date if specified.
expiration_date = body.get('expiration', None)
if expiration_date is not None:
try:
expiration_date = datetime.utcfromtimestamp(float(expiration_date))
- except ValueError:
- abort(400)
+ except ValueError as ve:
+ raise InvalidRequest('Invalid expiration date: %s' % ve)
if expiration_date <= datetime.now():
- abort(400)
+ raise InvalidRequest('Expiration date cannot be in the past')
# Create the metadata for the key.
user = get_authenticated_user()
@@ -630,23 +583,23 @@ class SuperUserServiceKeyManagement(ApiResource):
metadata.update({
'created_by': 'Quay Superuser Panel',
'creator': user.username,
- 'ip': request.remote_addr,
+ 'ip': get_request_ip(),
})
# Generate a key with a private key that we *never save*.
- (private_key, key) = model.service_keys.generate_service_key(body['service'], expiration_date,
- metadata=metadata,
- name=body.get('name', ''))
+ (private_key, key_id) = pre_oci_model.generate_service_key(body['service'], expiration_date,
+ metadata=metadata,
+ name=key_name)
# Auto-approve the service key.
- model.service_keys.approve_service_key(key.kid, user, ServiceKeyApprovalType.SUPERUSER,
- notes=body.get('notes', ''))
+ pre_oci_model.approve_service_key(key_id, user, ServiceKeyApprovalType.SUPERUSER,
+ notes=body.get('notes', ''))
# Log the creation and auto-approval of the service key.
key_log_metadata = {
- 'kid': key.kid,
+ 'kid': key_id,
'preshared': True,
'service': body['service'],
- 'name': body.get('name', ''),
+ 'name': key_name,
'expiration_date': expiration_date,
'auto_approved': True,
}
@@ -655,14 +608,14 @@ class SuperUserServiceKeyManagement(ApiResource):
log_action('service_key_approve', None, key_log_metadata)
return jsonify({
- 'kid': key.kid,
- 'name': body.get('name', ''),
+ 'kid': key_id,
+ 'name': key_name,
'service': body['service'],
'public_key': private_key.publickey().exportKey('PEM'),
'private_key': private_key.exportKey('PEM'),
})
- abort(403)
+ raise Unauthorized()
@resource('/v1/superuser/keys/')
@@ -698,12 +651,12 @@ class SuperUserServiceKey(ApiResource):
def get(self, kid):
if SuperUserPermission().can():
try:
- key = model.service_keys.get_service_key(kid, approved_only=False, alive_only=False)
- return jsonify(key_view(key))
- except model.service_keys.ServiceKeyDoesNotExist:
- abort(404)
+ key = pre_oci_model.get_service_key(kid, approved_only=False, alive_only=False)
+ return jsonify(key.to_dict())
+ except ServiceKeyDoesNotExist:
+ raise NotFound()
- abort(403)
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -714,9 +667,9 @@ class SuperUserServiceKey(ApiResource):
if SuperUserPermission().can():
body = request.get_json()
try:
- key = model.service_keys.get_service_key(kid, approved_only=False, alive_only=False)
- except model.service_keys.ServiceKeyDoesNotExist:
- abort(404)
+ key = pre_oci_model.get_service_key(kid, approved_only=False, alive_only=False)
+ except ServiceKeyDoesNotExist:
+ raise NotFound()
key_log_metadata = {
'kid': key.kid,
@@ -730,11 +683,11 @@ class SuperUserServiceKey(ApiResource):
if expiration_date is not None and expiration_date != '':
try:
expiration_date = datetime.utcfromtimestamp(float(expiration_date))
- except ValueError:
- abort(400)
+ except ValueError as ve:
+ raise InvalidRequest('Invalid expiration date: %s' % ve)
if expiration_date <= datetime.now():
- abort(400)
+ raise InvalidRequest('Cannot have an expiration date in the past')
key_log_metadata.update({
'old_expiration_date': key.expiration_date,
@@ -742,17 +695,20 @@ class SuperUserServiceKey(ApiResource):
})
log_action('service_key_extend', None, key_log_metadata)
- model.service_keys.set_key_expiration(kid, expiration_date)
-
+ pre_oci_model.set_key_expiration(kid, expiration_date)
if 'name' in body or 'metadata' in body:
- model.service_keys.update_service_key(kid, body.get('name'), body.get('metadata'))
+ key_name = body.get('name')
+ if not validate_service_key_name(key_name):
+ raise InvalidRequest('Invalid service key friendly name: %s' % key_name)
+
+ pre_oci_model.update_service_key(kid, key_name, body.get('metadata'))
log_action('service_key_modify', None, key_log_metadata)
- updated_key = model.service_keys.get_service_key(kid, approved_only=False, alive_only=False)
- return jsonify(key_view(updated_key))
+ updated_key = pre_oci_model.get_service_key(kid, approved_only=False, alive_only=False)
+ return jsonify(updated_key.to_dict())
- abort(403)
+ raise Unauthorized()
@require_fresh_login
@verify_not_prod
@@ -761,9 +717,9 @@ class SuperUserServiceKey(ApiResource):
def delete(self, kid):
if SuperUserPermission().can():
try:
- key = model.service_keys.delete_service_key(kid)
- except model.service_keys.ServiceKeyDoesNotExist:
- abort(404)
+ key = pre_oci_model.delete_service_key(kid)
+ except ServiceKeyDoesNotExist:
+ raise NotFound()
key_log_metadata = {
'kid': kid,
@@ -776,7 +732,7 @@ class SuperUserServiceKey(ApiResource):
log_action('service_key_delete', None, key_log_metadata)
return make_response('', 204)
- abort(403)
+ raise Unauthorized()
@resource('/v1/superuser/approvedkeys/')
@@ -809,8 +765,8 @@ class SuperUserServiceKeyApproval(ApiResource):
notes = request.get_json().get('notes', '')
approver = get_authenticated_user()
try:
- key = model.service_keys.approve_service_key(kid, approver, ServiceKeyApprovalType.SUPERUSER,
- notes=notes)
+ key = pre_oci_model.approve_service_key(kid, approver, ServiceKeyApprovalType.SUPERUSER,
+ notes=notes)
# Log the approval of the service key.
key_log_metadata = {
@@ -821,168 +777,14 @@ class SuperUserServiceKeyApproval(ApiResource):
}
log_action('service_key_approve', None, key_log_metadata)
- except model.ServiceKeyDoesNotExist:
- abort(404)
- except model.ServiceKeyAlreadyApproved:
+ except ServiceKeyDoesNotExist:
+ raise NotFound()
+ except ServiceKeyAlreadyApproved:
pass
return make_response('', 201)
- abort(403)
-
-
-@resource('/v1/superuser/customcerts')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserCustomCertificates(ApiResource):
- """ Resource for managing custom certificates. """
- @nickname('getCustomCertificates')
- @require_fresh_login
- @require_scope(scopes.SUPERUSER)
- @verify_not_prod
- def get(self):
- if SuperUserPermission().can():
- has_extra_certs_path = config_provider.volume_file_exists(EXTRA_CA_DIRECTORY)
- extra_certs_found = config_provider.list_volume_directory(EXTRA_CA_DIRECTORY)
- if extra_certs_found is None:
- return {
- 'status': 'file' if has_extra_certs_path else 'none',
- }
-
- cert_views = []
- for extra_cert_path in extra_certs_found:
- try:
- cert_full_path = os.path.join(EXTRA_CA_DIRECTORY, extra_cert_path)
- with config_provider.get_volume_file(cert_full_path) as f:
- certificate = load_certificate(f.read())
- cert_views.append({
- 'path': extra_cert_path,
- 'names': list(certificate.names),
- 'expired': certificate.expired,
- })
- except CertInvalidException as cie:
- cert_views.append({
- 'path': extra_cert_path,
- 'error': cie.message,
- })
- except IOError as ioe:
- cert_views.append({
- 'path': extra_cert_path,
- 'error': ioe.message,
- })
-
- return {
- 'status': 'directory',
- 'certs': cert_views,
- }
-
- abort(403)
-
-
-@resource('/v1/superuser/customcerts/')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserCustomCertificate(ApiResource):
- """ Resource for managing a custom certificate. """
- @nickname('uploadCustomCertificate')
- @require_fresh_login
- @require_scope(scopes.SUPERUSER)
- @verify_not_prod
- def post(self, certpath):
- if SuperUserPermission().can():
- uploaded_file = request.files['file']
- if not uploaded_file:
- abort(400)
-
- certpath = pathvalidate.sanitize_filename(certpath)
- cert_full_path = os.path.join(EXTRA_CA_DIRECTORY, certpath)
- config_provider.save_volume_file(cert_full_path, uploaded_file)
- return '', 204
-
- abort(403)
-
- @nickname('deleteCustomCertificate')
- @require_fresh_login
- @require_scope(scopes.SUPERUSER)
- @verify_not_prod
- def delete(self, certpath):
- if SuperUserPermission().can():
- cert_full_path = os.path.join(EXTRA_CA_DIRECTORY, certpath)
- config_provider.remove_volume_file(cert_full_path)
- return '', 204
-
- abort(403)
-
-
-@resource('/v1/superuser/license')
-@internal_only
-@show_if(features.SUPER_USERS)
-class SuperUserLicense(ApiResource):
- """ Resource for getting and setting a license. """
- schemas = {
- 'UpdateLicense': {
- 'type': 'object',
- 'description': 'Updates a license',
- 'required': [
- 'license',
- ],
- 'properties': {
- 'license': {
- 'type': 'string'
- },
- },
- },
- }
-
- @nickname('getLicense')
- @require_fresh_login
- @require_scope(scopes.SUPERUSER)
- @verify_not_prod
- def get(self):
- """ Returns the current decoded license. """
- if SuperUserPermission().can():
- try:
- decoded_license = config_provider.get_license()
- except LicenseDecodeError as le:
- raise InvalidRequest(le.message)
-
- statuses = decoded_license.validate(app.config)
- all_met = all(status.is_met() for status in statuses)
-
- return {
- 'status': [status.as_dict(for_private=True) for status in statuses],
- 'success': all_met,
- }
-
- abort(403)
-
- @nickname('updateLicense')
- @require_fresh_login
- @require_scope(scopes.SUPERUSER)
- @verify_not_prod
- @validate_json_request('UpdateLicense')
- def put(self):
- """ Validates the given license contents and then saves it to the config volume. """
- if SuperUserPermission().can():
- license_contents = request.get_json()['license']
- try:
- decoded_license = decode_license(license_contents)
- except LicenseDecodeError as le:
- raise InvalidRequest(le.message)
-
- statuses = decoded_license.validate(app.config)
- all_met = all(status.is_met() for status in statuses)
- if all_met:
- # Save the license and update the license check thread.
- config_provider.save_license(license_contents)
- license_validator.compute_license_sufficiency()
-
- return {
- 'status': [status.as_dict(for_private=True) for status in statuses],
- 'success': all_met,
- }
-
- abort(403)
+ raise Unauthorized()
@resource('/v1/superuser//logs')
@@ -990,16 +792,21 @@ class SuperUserLicense(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildLogs(ApiResource):
""" Resource for loading repository build logs for the superuser. """
+
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildLogsSuperUser')
@require_scope(scopes.SUPERUSER)
def get(self, build_uuid):
""" Return the build logs for the build specified by the build uuid. """
- if not SuperUserPermission().can():
- abort(403)
+ if SuperUserPermission().can():
+ try:
+ repo_build = pre_oci_model.get_repository_build(build_uuid)
+ return get_logs_or_log_url(repo_build)
+ except InvalidRepositoryBuildException as e:
+ raise InvalidResponse(str(e))
- return get_logs_or_log_url(model.build.get_repository_build(build_uuid))
+ raise Unauthorized()
@resource('/v1/superuser//status')
@@ -1008,16 +815,21 @@ class SuperUserRepositoryBuildLogs(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildStatus(ApiResource):
""" Resource for dealing with repository build status. """
+
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildStatusSuperUser')
@require_scope(scopes.SUPERUSER)
def get(self, build_uuid):
""" Return the status for the builds specified by the build uuids. """
- if not SuperUserPermission().can():
- abort(403)
- build = model.build.get_repository_build(build_uuid)
- return build_status_view(build)
+ if SuperUserPermission().can():
+ try:
+ build = pre_oci_model.get_repository_build(build_uuid)
+ except InvalidRepositoryBuildException as e:
+ raise InvalidResponse(str(e))
+ return build.to_dict()
+
+ raise Unauthorized()
@resource('/v1/superuser//build')
@@ -1026,18 +838,19 @@ class SuperUserRepositoryBuildStatus(ApiResource):
@show_if(features.SUPER_USERS)
class SuperUserRepositoryBuildResource(ApiResource):
""" Resource for dealing with repository builds as a super user. """
+
@require_fresh_login
@verify_not_prod
@nickname('getRepoBuildSuperUser')
@require_scope(scopes.SUPERUSER)
def get(self, build_uuid):
""" Returns information about a build. """
- if not SuperUserPermission().can():
- abort(403)
+ if SuperUserPermission().can():
+ try:
+ build = pre_oci_model.get_repository_build(build_uuid)
+ except InvalidRepositoryBuildException:
+ raise NotFound()
- try:
- build = model.build.get_repository_build(build_uuid)
- except model.build.InvalidRepositoryBuildException:
- raise NotFound()
+ return build.to_dict()
- return build_status_view(build)
+ raise Unauthorized()
diff --git a/endpoints/api/superuser_models_interface.py b/endpoints/api/superuser_models_interface.py
new file mode 100644
index 000000000..e03d98e8c
--- /dev/null
+++ b/endpoints/api/superuser_models_interface.py
@@ -0,0 +1,335 @@
+import json
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+from datetime import datetime
+
+from dateutil.relativedelta import relativedelta
+from six import add_metaclass
+from tzlocal import get_localzone
+
+from app import avatar, superusers
+from buildtrigger.basehandler import BuildTriggerHandler
+from data import model
+from endpoints.api import format_date
+from util.morecollections import AttrDict
+
+
+def user_view(user):
+ return {
+ 'name': user.username,
+ 'kind': 'user',
+ 'is_robot': user.robot,
+ }
+
+
+class BuildTrigger(
+ namedtuple('BuildTrigger', ['uuid', 'service_name', 'pull_robot', 'can_read', 'can_admin', 'for_build'])):
+ """
+ BuildTrigger represent a trigger that is associated with a build
+ :type uuid: string
+ :type service_name: string
+ :type pull_robot: User
+ :type can_read: boolean
+ :type can_admin: boolean
+ :type for_build: boolean
+ """
+
+ def to_dict(self):
+ if not self.uuid:
+ return None
+
+ build_trigger = BuildTriggerHandler.get_handler(self)
+ build_source = build_trigger.config.get('build_source')
+
+ repo_url = build_trigger.get_repository_url() if build_source else None
+ can_read = self.can_read or self.can_admin
+
+ trigger_data = {
+ 'id': self.uuid,
+ 'service': self.service_name,
+ 'is_active': build_trigger.is_active(),
+
+ 'build_source': build_source if can_read else None,
+ 'repository_url': repo_url if can_read else None,
+
+ 'config': build_trigger.config if self.can_admin else {},
+ 'can_invoke': self.can_admin,
+ }
+
+ if not self.for_build and self.can_admin and self.pull_robot:
+ trigger_data['pull_robot'] = user_view(self.pull_robot)
+
+ return trigger_data
+
+
+class RepositoryBuild(namedtuple('RepositoryBuild',
+ ['uuid', 'logs_archived', 'repository_namespace_user_username', 'repository_name',
+ 'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger', 'display_name',
+ 'started', 'job_config', 'phase', 'status', 'error', 'archive_url'])):
+ """
+ RepositoryBuild represents a build associated with a repostiory
+ :type uuid: string
+ :type logs_archived: boolean
+ :type repository_namespace_user_username: string
+ :type repository_name: string
+ :type can_write: boolean
+ :type can_write: boolean
+ :type pull_robot: User
+ :type resource_key: string
+ :type trigger: Trigger
+ :type display_name: string
+ :type started: boolean
+ :type job_config: {Any -> Any}
+ :type phase: string
+ :type status: string
+ :type error: string
+ :type archive_url: string
+ """
+
+ def to_dict(self):
+
+ resp = {
+ 'id': self.uuid,
+ 'phase': self.phase,
+ 'started': format_date(self.started),
+ 'display_name': self.display_name,
+ 'status': self.status or {},
+ 'subdirectory': self.job_config.get('build_subdir', ''),
+ 'dockerfile_path': self.job_config.get('build_subdir', ''),
+ 'context': self.job_config.get('context', ''),
+ 'tags': self.job_config.get('docker_tags', []),
+ 'manual_user': self.job_config.get('manual_user', None),
+ 'is_writer': self.can_write,
+ 'trigger': self.trigger.to_dict(),
+ 'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
+ 'resource_key': self.resource_key,
+ 'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
+ 'repository': {
+ 'namespace': self.repository_namespace_user_username,
+ 'name': self.repository_name
+ },
+ 'error': self.error,
+ }
+
+ if self.can_write:
+ if self.resource_key is not None:
+ resp['archive_url'] = self.archive_url
+ elif self.job_config.get('archive_url', None):
+ resp['archive_url'] = self.job_config['archive_url']
+
+ return resp
+
+
+class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])):
+ """
+ Approval represents whether a key has been approved or not
+ :type approver: User
+ :type approval_type: string
+ :type approved_date: Date
+ :type notes: string
+ """
+
+ def to_dict(self):
+ return {
+ 'approver': self.approver.to_dict() if self.approver else None,
+ 'approval_type': self.approval_type,
+ 'approved_date': self.approved_date,
+ 'notes': self.notes,
+ }
+
+
+class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
+ 'expiration_date', 'rotation_duration', 'approval'])):
+ """
+ ServiceKey is an apostille signing key
+ :type name: string
+ :type kid: int
+ :type service: string
+ :type jwk: string
+ :type metadata: string
+ :type created_date: Date
+ :type expiration_date: Date
+ :type rotation_duration: Date
+ :type approval: Approval
+
+ """
+
+ def to_dict(self):
+ return {
+ 'name': self.name,
+ 'kid': self.kid,
+ 'service': self.service,
+ 'jwk': self.jwk,
+ 'metadata': self.metadata,
+ 'created_date': self.created_date,
+ 'expiration_date': self.expiration_date,
+ 'rotation_duration': self.rotation_duration,
+ 'approval': self.approval.to_dict() if self.approval is not None else None,
+ }
+
+
+class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])):
+ """
+ User represents a single user.
+ :type username: string
+ :type email: string
+ :type verified: boolean
+ :type enabled: boolean
+ :type robot: User
+ """
+
+ def to_dict(self):
+ user_data = {
+ 'kind': 'user',
+ 'name': self.username,
+ 'username': self.username,
+ 'email': self.email,
+ 'verified': self.verified,
+ 'avatar': avatar.get_data_for_user(self),
+ 'super_user': superusers.is_superuser(self.username),
+ 'enabled': self.enabled,
+ }
+
+ return user_data
+
+
+class Organization(namedtuple('Organization', ['username', 'email'])):
+ """
+ Organization represents a single org.
+ :type username: string
+ :type email: string
+ """
+
+ def to_dict(self):
+ return {
+ 'name': self.username,
+ 'email': self.email,
+ 'avatar': avatar.get_data_for_org(self),
+ }
+
+
+@add_metaclass(ABCMeta)
+class SuperuserDataInterface(object):
+ """
+ Interface that represents all data store interactions required by a superuser api.
+ """
+
+ @abstractmethod
+ def get_organizations(self):
+ """
+ Returns a list of Organization
+ """
+
+ @abstractmethod
+ def get_active_users(self):
+ """
+ Returns a list of User
+ """
+
+ @abstractmethod
+ def create_install_user(self, username, password, email):
+ """
+ Returns the created user and confirmation code for email confirmation
+ """
+
+ @abstractmethod
+ def get_nonrobot_user(self, username):
+ """
+ Returns a User
+ """
+
+ @abstractmethod
+ def create_reset_password_email_code(self, email):
+ """
+ Returns a recover password code
+ """
+
+ @abstractmethod
+ def mark_user_for_deletion(self, username):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def change_password(self, username, password):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def update_email(self, username, email, auto_verify):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def update_enabled(self, username, enabled):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def take_ownership(self, namespace, authed_user):
+ """
+ Returns id of entity and whether the entity was a user
+ """
+
+ @abstractmethod
+ def mark_organization_for_deletion(self, name):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def change_organization_name(self, old_org_name, new_org_name):
+ """
+ Returns updated Organization
+ """
+
+ @abstractmethod
+ def list_all_service_keys(self):
+ """
+ Returns a list of service keys
+ """
+
+ @abstractmethod
+ def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None):
+ """
+ Returns a tuple of private key and public key id
+ """
+
+ @abstractmethod
+ def approve_service_key(self, kid, approver, approval_type, notes=''):
+ """
+ Returns the approved Key
+ """
+
+ @abstractmethod
+ def get_service_key(self, kid, service=None, alive_only=True, approved_only=True):
+ """
+ Returns ServiceKey
+ """
+
+ @abstractmethod
+ def set_key_expiration(self, kid, expiration_date):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def update_service_key(self, kid, name=None, metadata=None):
+ """
+ Returns None
+ """
+
+ @abstractmethod
+ def delete_service_key(self, kid):
+ """
+ Returns deleted ServiceKey
+ """
+
+ @abstractmethod
+ def get_repository_build(self, uuid):
+ """
+ Returns RepositoryBuild
+ """
diff --git a/endpoints/api/superuser_models_pre_oci.py b/endpoints/api/superuser_models_pre_oci.py
new file mode 100644
index 000000000..0458f9226
--- /dev/null
+++ b/endpoints/api/superuser_models_pre_oci.py
@@ -0,0 +1,182 @@
+import features
+
+from flask import request
+
+from app import all_queues, userfiles, namespace_gc_queue
+from auth.permissions import ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission
+from data import model, database
+from endpoints.api.build import get_job_config, _get_build_status
+from endpoints.api.superuser_models_interface import BuildTrigger
+from endpoints.api.superuser_models_interface import SuperuserDataInterface, Organization, User, \
+ ServiceKey, Approval, RepositoryBuild
+from util.request import get_request_ip
+
+
+def _create_user(user):
+ if user is None:
+ return None
+ return User(user.username, user.email, user.verified, user.enabled, user.robot)
+
+
+def _create_key(key):
+ approval = None
+ if key.approval is not None:
+ approval = Approval(_create_user(key.approval.approver), key.approval.approval_type, key.approval.approved_date,
+ key.approval.notes)
+
+ return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date, key.expiration_date,
+ key.rotation_duration, approval)
+
+
+class ServiceKeyDoesNotExist(Exception):
+ pass
+
+
+class ServiceKeyAlreadyApproved(Exception):
+ pass
+
+
+class InvalidRepositoryBuildException(Exception):
+ pass
+
+
+class PreOCIModel(SuperuserDataInterface):
+ """
+ PreOCIModel implements the data model for the SuperUser using a database schema
+ before it was changed to support the OCI specification.
+ """
+
+ def get_repository_build(self, uuid):
+ try:
+ build = model.build.get_repository_build(uuid)
+ except model.InvalidRepositoryBuildException as e:
+ raise InvalidRepositoryBuildException(str(e))
+
+ repo_namespace = build.repository_namespace_user_username
+ repo_name = build.repository_name
+
+ can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
+ can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
+ can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()
+ job_config = get_job_config(build.job_config)
+ phase, status, error = _get_build_status(build)
+ url = userfiles.get_file_url(self.resource_key, get_request_ip(), requires_cors=True)
+
+ return RepositoryBuild(build.uuid, build.logs_archived, repo_namespace, repo_name, can_write, can_read,
+ _create_user(build.pull_robot), build.resource_key,
+ BuildTrigger(build.trigger.uuid, build.trigger.service.name,
+ _create_user(build.trigger.pull_robot), can_read, can_admin, True),
+ build.display_name, build.display_name, build.started, job_config, phase, status, error, url)
+
+ def delete_service_key(self, kid):
+ try:
+ key = model.service_keys.delete_service_key(kid)
+ except model.ServiceKeyDoesNotExist:
+ raise ServiceKeyDoesNotExist
+ return _create_key(key)
+
+ def update_service_key(self, kid, name=None, metadata=None):
+ model.service_keys.update_service_key(kid, name, metadata)
+
+ def set_key_expiration(self, kid, expiration_date):
+ model.service_keys.set_key_expiration(kid, expiration_date)
+
+ def get_service_key(self, kid, service=None, alive_only=True, approved_only=True):
+ try:
+ key = model.service_keys.get_service_key(kid, approved_only=approved_only, alive_only=alive_only)
+ return _create_key(key)
+ except model.ServiceKeyDoesNotExist:
+ raise ServiceKeyDoesNotExist
+
+ def approve_service_key(self, kid, approver, approval_type, notes=''):
+ try:
+ key = model.service_keys.approve_service_key(kid, approval_type, approver=approver, notes=notes)
+ return _create_key(key)
+ except model.ServiceKeyDoesNotExist:
+ raise ServiceKeyDoesNotExist
+ except model.ServiceKeyAlreadyApproved:
+ raise ServiceKeyAlreadyApproved
+
+ def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None):
+ (private_key, key) = model.service_keys.generate_service_key(service, expiration_date, metadata=metadata, name=name)
+
+ return private_key, key.kid
+
+ def list_all_service_keys(self):
+ keys = model.service_keys.list_all_keys()
+ return [_create_key(key) for key in keys]
+
+ def change_organization_name(self, old_org_name, new_org_name):
+ org = model.organization.get_organization(old_org_name)
+ if new_org_name is not None:
+ org = model.user.change_username(org.id, new_org_name)
+
+ return Organization(org.username, org.email)
+
+ def mark_organization_for_deletion(self, name):
+ org = model.organization.get_organization(name)
+ model.user.mark_namespace_for_deletion(org, all_queues, namespace_gc_queue, force=True)
+
+ def take_ownership(self, namespace, authed_user):
+ entity = model.user.get_user_or_org(namespace)
+ if entity is None:
+ return None, False
+
+ was_user = not entity.organization
+ if entity.organization:
+ # Add the superuser as an admin to the owners team of the org.
+ model.organization.add_user_as_admin(authed_user, entity)
+ else:
+ # If the entity is a user, convert it to an organization and add the current superuser
+ # as the admin.
+ model.organization.convert_user_to_organization(entity, authed_user)
+ return entity.id, was_user
+
+ def update_enabled(self, username, enabled):
+ user = model.user.get_nonrobot_user(username)
+ model.user.update_enabled(user, bool(enabled))
+
+ def update_email(self, username, email, auto_verify):
+ user = model.user.get_nonrobot_user(username)
+ model.user.update_email(user, email, auto_verify)
+
+ def change_password(self, username, password):
+ user = model.user.get_nonrobot_user(username)
+ model.user.change_password(user, password)
+
+ def mark_user_for_deletion(self, username):
+ user = model.user.get_nonrobot_user(username)
+ model.user.mark_namespace_for_deletion(user, all_queues, namespace_gc_queue, force=True)
+
+ def create_reset_password_email_code(self, email):
+ code = model.user.create_reset_password_email_code(email)
+ return code
+
+ def get_nonrobot_user(self, username):
+ user = model.user.get_nonrobot_user(username)
+ if user is None:
+ return None
+ return _create_user(user)
+
+ def create_install_user(self, username, password, email):
+ prompts = model.user.get_default_user_prompts(features)
+ user = model.user.create_user(username, password, email, auto_verify=not features.MAILING,
+ email_required=features.MAILING, prompts=prompts)
+
+ return_user = _create_user(user)
+ # If mailing is turned on, send the user a verification email.
+ if features.MAILING:
+ confirmation_code = model.user.create_confirm_email_code(user)
+ return return_user, confirmation_code
+
+ return return_user, ''
+
+ def get_active_users(self, disabled=True):
+ users = model.user.get_active_users(disabled=disabled)
+ return [_create_user(user) for user in users]
+
+ def get_organizations(self):
+ return [Organization(org.username, org.email) for org in model.organization.get_organizations()]
+
+
+pre_oci_model = PreOCIModel()
diff --git a/endpoints/api/tag.py b/endpoints/api/tag.py
index 51ace9d9c..573f0fc97 100644
--- a/endpoints/api/tag.py
+++ b/endpoints/api/tag.py
@@ -1,59 +1,87 @@
""" Manage the tags of a repository. """
-
+from datetime import datetime
from flask import request, abort
-from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
- RepositoryParamResource, log_action, validate_json_request,
- path_param, parse_args, query_param, truthy_bool)
-from endpoints.exception import NotFound
-from endpoints.api.image import image_view
-from data import model
+from app import storage, docker_v2_signing_key
from auth.auth_context import get_authenticated_user
+from data.registry_model import registry_model
+from endpoints.api import (resource, nickname, require_repo_read, require_repo_write,
+ RepositoryParamResource, log_action, validate_json_request, path_param,
+ parse_args, query_param, truthy_bool, disallow_for_app_repositories,
+ format_date, disallow_for_non_normal_repositories)
+from endpoints.api.image import image_dict
+from endpoints.exception import NotFound, InvalidRequest
from util.names import TAG_ERROR, TAG_REGEX
+def _tag_dict(tag):
+ tag_info = {
+ 'name': tag.name,
+ 'reversion': tag.reversion,
+ }
+
+ if tag.lifetime_start_ts > 0:
+ tag_info['start_ts'] = tag.lifetime_start_ts
+
+ if tag.lifetime_end_ts > 0:
+ tag_info['end_ts'] = tag.lifetime_end_ts
+
+ # TODO: Remove this once fully on OCI data model.
+ if tag.legacy_image_if_present:
+ tag_info['docker_image_id'] = tag.legacy_image.docker_image_id
+ tag_info['image_id'] = tag.legacy_image.docker_image_id
+ tag_info['size'] = tag.legacy_image.aggregate_size
+
+ # TODO: Remove this check once fully on OCI data model.
+ if tag.manifest_digest:
+ tag_info['manifest_digest'] = tag.manifest_digest
+
+ if tag.manifest:
+ tag_info['is_manifest_list'] = tag.manifest.is_manifest_list
+
+ if tag.lifetime_start_ts > 0:
+ last_modified = format_date(datetime.utcfromtimestamp(tag.lifetime_start_ts))
+ tag_info['last_modified'] = last_modified
+
+ if tag.lifetime_end_ts is not None:
+ expiration = format_date(datetime.utcfromtimestamp(tag.lifetime_end_ts))
+ tag_info['expiration'] = expiration
+
+ return tag_info
+
+
@resource('/v1/repository//tag/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class ListRepositoryTags(RepositoryParamResource):
""" Resource for listing full repository tag history, alive *and dead*. """
@require_repo_read
+ @disallow_for_app_repositories
@parse_args()
@query_param('specificTag', 'Filters the tags to the specific tag.', type=str, default='')
- @query_param('limit', 'Limit to the number of results to return per page. Max 100.', type=int, default=50)
+ @query_param('limit', 'Limit to the number of results to return per page. Max 100.', type=int,
+ default=50)
@query_param('page', 'Page index for the results. Default 1.', type=int, default=1)
+ @query_param('onlyActiveTags', 'Filter to only active tags.', type=truthy_bool, default=False)
@nickname('listRepoTags')
def get(self, namespace, repository, parsed_args):
- repo = model.repository.get_repository(namespace, repository)
- if not repo:
- raise NotFound()
-
- def tag_view(tag):
- tag_info = {
- 'name': tag.name,
- 'docker_image_id': tag.image.docker_image_id,
- 'reversion': tag.reversion,
- }
-
- if tag.lifetime_start_ts > 0:
- tag_info['start_ts'] = tag.lifetime_start_ts
-
- if tag.lifetime_end_ts > 0:
- tag_info['end_ts'] = tag.lifetime_end_ts
-
- return tag_info
-
specific_tag = parsed_args.get('specificTag') or None
-
page = max(1, parsed_args.get('page', 1))
limit = min(100, max(1, parsed_args.get('limit', 50)))
- tags, has_additional = model.tag.list_repository_tag_history(repo, page=page, size=limit,
- specific_tag=specific_tag)
+ active_tags_only = parsed_args.get('onlyActiveTags')
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
+ raise NotFound()
+
+ history, has_more = registry_model.list_repository_tag_history(repo_ref, page=page,
+ size=limit,
+ specific_tag_name=specific_tag,
+ active_tags_only=active_tags_only)
return {
- 'tags': [tag_view(tag) for tag in tags],
+ 'tags': [_tag_dict(tag) for tag in history],
'page': page,
- 'has_additional': has_additional,
+ 'has_additional': has_more,
}
@@ -63,64 +91,134 @@ class ListRepositoryTags(RepositoryParamResource):
class RepositoryTag(RepositoryParamResource):
""" Resource for managing repository tags. """
schemas = {
- 'MoveTag': {
+ 'ChangeTag': {
'type': 'object',
- 'description': 'Description of to which image a new or existing tag should point',
- 'required': [
- 'image',
- ],
+ 'description': 'Makes changes to a specific tag',
'properties': {
'image': {
- 'type': 'string',
- 'description': 'Image identifier to which the tag should point',
+ 'type': ['string', 'null'],
+ 'description': '(Deprecated: Use `manifest_digest`) Image to which the tag should point.',
+ },
+ 'manifest_digest': {
+ 'type': ['string', 'null'],
+ 'description': '(If specified) The manifest digest to which the tag should point',
+ },
+ 'expiration': {
+ 'type': ['number', 'null'],
+ 'description': '(If specified) The expiration for the image',
},
},
},
}
@require_repo_write
- @nickname('changeTagImage')
- @validate_json_request('MoveTag')
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
+ @nickname('changeTag')
+ @validate_json_request('ChangeTag')
def put(self, namespace, repository, tag):
""" Change which image a tag points to or create a new tag."""
-
if not TAG_REGEX.match(tag):
abort(400, TAG_ERROR)
- image_id = request.get_json()['image']
- image = model.image.get_repo_image(namespace, repository, image_id)
- if not image:
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
raise NotFound()
- original_image_id = None
- try:
- original_tag_image = model.tag.get_repo_tag_image(image.repository, tag)
- if original_tag_image:
- original_image_id = original_tag_image.docker_image_id
- except model.DataModelException:
- # This is a new tag.
- pass
+ if 'expiration' in request.get_json():
+ tag_ref = registry_model.get_repo_tag(repo_ref, tag)
+ if tag_ref is None:
+ raise NotFound()
- model.tag.create_or_update_tag(namespace, repository, tag, image_id)
+ expiration = request.get_json().get('expiration')
+ expiration_date = None
+ if expiration is not None:
+ try:
+ expiration_date = datetime.utcfromtimestamp(float(expiration))
+ except ValueError:
+ abort(400)
- username = get_authenticated_user().username
- log_action('move_tag' if original_image_id else 'create_tag', namespace,
- {'username': username, 'repo': repository, 'tag': tag,
- 'image': image_id, 'original_image': original_image_id},
- repo=model.repository.get_repository(namespace, repository))
+ if expiration_date <= datetime.now():
+ abort(400)
+
+ existing_end_ts, ok = registry_model.change_repository_tag_expiration(tag_ref,
+ expiration_date)
+ if ok:
+ if not (existing_end_ts is None and expiration_date is None):
+ log_action('change_tag_expiration', namespace, {
+ 'username': get_authenticated_user().username,
+ 'repo': repository,
+ 'tag': tag,
+ 'namespace': namespace,
+ 'expiration_date': expiration_date,
+ 'old_expiration_date': existing_end_ts
+ }, repo_name=repository)
+ else:
+ raise InvalidRequest('Could not update tag expiration; Tag has probably changed')
+
+ if 'image' in request.get_json() or 'manifest_digest' in request.get_json():
+ existing_tag = registry_model.get_repo_tag(repo_ref, tag, include_legacy_image=True)
+
+ manifest_or_image = None
+ image_id = None
+ manifest_digest = None
+
+ if 'image' in request.get_json():
+ image_id = request.get_json()['image']
+ manifest_or_image = registry_model.get_legacy_image(repo_ref, image_id)
+ else:
+ manifest_digest = request.get_json()['manifest_digest']
+ manifest_or_image = registry_model.lookup_manifest_by_digest(repo_ref, manifest_digest,
+ require_available=True)
+
+ if manifest_or_image is None:
+ raise NotFound()
+
+ # TODO: Remove this check once fully on V22
+ existing_manifest_digest = None
+ if existing_tag:
+ existing_manifest = registry_model.get_manifest_for_tag(existing_tag)
+ existing_manifest_digest = existing_manifest.digest if existing_manifest else None
+
+ if not registry_model.retarget_tag(repo_ref, tag, manifest_or_image, storage,
+ docker_v2_signing_key):
+ raise InvalidRequest('Could not move tag')
+
+ username = get_authenticated_user().username
+
+ log_action('move_tag' if existing_tag else 'create_tag', namespace, {
+ 'username': username,
+ 'repo': repository,
+ 'tag': tag,
+ 'namespace': namespace,
+ 'image': image_id,
+ 'manifest_digest': manifest_digest,
+ 'original_image': (existing_tag.legacy_image.docker_image_id
+ if existing_tag and existing_tag.legacy_image_if_present
+ else None),
+ 'original_manifest_digest': existing_manifest_digest,
+ }, repo_name=repository)
return 'Updated', 201
@require_repo_write
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('deleteFullTag')
def delete(self, namespace, repository, tag):
""" Delete the specified repository tag. """
- model.tag.delete_tag(namespace, repository, tag)
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
+ raise NotFound()
+
+ registry_model.delete_tag(repo_ref, tag)
username = get_authenticated_user().username
log_action('delete_tag', namespace,
- {'username': username, 'repo': repository, 'tag': tag},
- repo=model.repository.get_repository(namespace, repository))
+ {'username': username,
+ 'repo': repository,
+ 'namespace': namespace,
+ 'tag': tag}, repo_name=repository)
return '', 204
@@ -130,95 +228,109 @@ class RepositoryTag(RepositoryParamResource):
@path_param('tag', 'The name of the tag')
class RepositoryTagImages(RepositoryParamResource):
""" Resource for listing the images in a specific repository tag. """
+
@require_repo_read
@nickname('listTagImages')
+ @disallow_for_app_repositories
@parse_args()
@query_param('owned', 'If specified, only images wholely owned by this tag are returned.',
type=truthy_bool, default=False)
def get(self, namespace, repository, tag, parsed_args):
""" List the images for the specified repository tag. """
- try:
- tag_image = model.tag.get_tag_image(namespace, repository, tag)
- except model.DataModelException:
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
raise NotFound()
- parent_images = model.image.get_parent_images(namespace, repository, tag_image)
- image_map = {}
+ tag_ref = registry_model.get_repo_tag(repo_ref, tag, include_legacy_image=True)
+ if tag_ref is None:
+ raise NotFound()
- image_map[str(tag_image.id)] = tag_image
+ if tag_ref.legacy_image_if_present is None:
+ return {'images': []}
- for image in parent_images:
- image_map[str(image.id)] = image
+ image_id = tag_ref.legacy_image.docker_image_id
- image_map_all = dict(image_map)
- all_images = [tag_image] + list(parent_images)
-
- # Filter the images returned to those not found in the ancestry of any of the other tags in
- # the repository.
+ all_images = None
if parsed_args['owned']:
- all_tags = model.tag.list_repository_tags(namespace, repository)
- for current_tag in all_tags:
- if current_tag.name == tag:
- continue
+ # TODO: Remove the `owned` image concept once we are fully on V2_2.
+ all_images = registry_model.get_legacy_images_owned_by_tag(tag_ref)
+ else:
+ image_with_parents = registry_model.get_legacy_image(repo_ref, image_id, include_parents=True)
+ if image_with_parents is None:
+ raise NotFound()
- # Remove the tag's image ID.
- tag_image_id = str(current_tag.image_id)
- image_map.pop(tag_image_id, None)
-
- # Remove any ancestors:
- for ancestor_id in current_tag.image.ancestors.split('/'):
- image_map.pop(ancestor_id, None)
+ all_images = [image_with_parents] + image_with_parents.parents
return {
- 'images': [image_view(image, image_map_all) for image in all_images
- if not parsed_args['owned'] or (str(image.id) in image_map)]
+ 'images': [image_dict(image) for image in all_images],
}
-
-@resource('/v1/repository//tag//revert')
+@resource('/v1/repository//tag//restore')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@path_param('tag', 'The name of the tag')
-class RevertTag(RepositoryParamResource):
- """ Resource for reverting a repository tag back to a previous image. """
+class RestoreTag(RepositoryParamResource):
+ """ Resource for restoring a repository tag back to a previous image. """
schemas = {
- 'RevertTag': {
+ 'RestoreTag': {
'type': 'object',
- 'description': 'Reverts a tag to a specific image',
- 'required': [
- 'image',
- ],
+ 'description': 'Restores a tag to a specific image',
'properties': {
'image': {
'type': 'string',
- 'description': 'Image identifier to which the tag should point',
+ 'description': '(Deprecated: use `manifest_digest`) Image to which the tag should point',
+ },
+ 'manifest_digest': {
+ 'type': 'string',
+ 'description': 'If specified, the manifest digest that should be used',
},
},
},
}
@require_repo_write
- @nickname('revertTag')
- @validate_json_request('RevertTag')
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
+ @nickname('restoreTag')
+ @validate_json_request('RestoreTag')
def post(self, namespace, repository, tag):
- """ Reverts a repository tag back to a previous image in the repository. """
- try:
- tag_image = model.tag.get_tag_image(namespace, repository, tag)
- except model.DataModelException:
+ """ Restores a repository tag back to a previous image in the repository. """
+ repo_ref = registry_model.lookup_repository(namespace, repository)
+ if repo_ref is None:
raise NotFound()
- # Revert the tag back to the previous image.
- image_id = request.get_json()['image']
- model.tag.revert_tag(tag_image.repository, tag, image_id)
+ # Restore the tag back to the previous image.
+ image_id = request.get_json().get('image', None)
+ manifest_digest = request.get_json().get('manifest_digest', None)
- # Log the reversion.
+ if image_id is None and manifest_digest is None:
+ raise InvalidRequest('Missing manifest_digest')
+
+ # Data for logging the reversion/restoration.
username = get_authenticated_user().username
- log_action('revert_tag', namespace,
- {'username': username, 'repo': repository, 'tag': tag,
- 'image': image_id, 'original_image': tag_image.docker_image_id},
- repo=model.repository.get_repository(namespace, repository))
-
- return {
- 'image_id': image_id,
- 'original_image_id': tag_image.docker_image_id
+ log_data = {
+ 'username': username,
+ 'repo': repository,
+ 'tag': tag,
+ 'image': image_id,
+ 'manifest_digest': manifest_digest,
}
+
+ manifest_or_legacy_image = None
+ if manifest_digest is not None:
+ manifest_or_legacy_image = registry_model.lookup_manifest_by_digest(repo_ref, manifest_digest,
+ allow_dead=True,
+ require_available=True)
+ elif image_id is not None:
+ manifest_or_legacy_image = registry_model.get_legacy_image(repo_ref, image_id)
+
+ if manifest_or_legacy_image is None:
+ raise NotFound()
+
+ if not registry_model.retarget_tag(repo_ref, tag, manifest_or_legacy_image, storage,
+ docker_v2_signing_key, is_reversion=True):
+ raise InvalidRequest('Could not restore tag')
+
+ log_action('revert_tag', namespace, log_data, repo_name=repository)
+
+ return {}
diff --git a/endpoints/api/team.py b/endpoints/api/team.py
index a427c472a..b00a14393 100644
--- a/endpoints/api/team.py
+++ b/endpoints/api/team.py
@@ -1,19 +1,28 @@
""" Create, list and manage an organization's teams. """
+import json
+
+from functools import wraps
+
from flask import request
import features
-from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
- log_action, internal_only, require_scope, path_param, query_param,
- truthy_bool, parse_args, require_user_admin, show_if)
-from endpoints.exception import Unauthorized, NotFound
-from auth.permissions import AdministerOrganizationPermission, ViewTeamPermission
+from app import avatar, authentication
+from auth.permissions import (AdministerOrganizationPermission, ViewTeamPermission,
+ SuperUserPermission)
+
from auth.auth_context import get_authenticated_user
from auth import scopes
from data import model
+from data.database import Team
+from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
+ log_action, internal_only, require_scope, path_param, query_param,
+ truthy_bool, parse_args, require_user_admin, show_if, format_date,
+ verify_not_prod, require_fresh_login)
+from endpoints.exception import Unauthorized, NotFound, InvalidRequest
from util.useremails import send_org_invite_email
-from app import avatar
+from util.names import parse_robot_username
def permission_view(permission):
return {
@@ -24,7 +33,6 @@ def permission_view(permission):
'role': permission.role.name
}
-
def try_accept_invite(code, user):
(team, inviter) = model.team.confirm_team_invite(code, user)
@@ -40,7 +48,6 @@ def try_accept_invite(code, user):
return team
-
def handle_addinvite_team(inviter, team, user=None, email=None):
requires_invite = features.MAILING and features.REQUIRE_TEAM_INVITE
invite = model.team.add_or_invite_to_team(inviter, team, user, email,
@@ -62,15 +69,15 @@ def handle_addinvite_team(inviter, team, user=None, email=None):
orgname, team.name, inviter.username, invite.invite_token)
return invite
-def team_view(orgname, team):
+def team_view(orgname, team, is_new_team=False):
view_permission = ViewTeamPermission(orgname, team.name)
- role = model.team.get_team_org_role(team).name
return {
'name': team.name,
'description': team.description,
'can_view': view_permission.can(),
- 'role': role,
- 'avatar': avatar.get_data_for_team(team)
+ 'role': Team.role.get_name(team.role_id),
+ 'avatar': avatar.get_data_for_team(team),
+ 'new_team': is_new_team,
}
def member_view(member, invited=False):
@@ -82,7 +89,6 @@ def member_view(member, invited=False):
'invited': invited,
}
-
def invite_view(invite):
if invite.user:
return member_view(invite.user, invited=True)
@@ -94,6 +100,30 @@ def invite_view(invite):
'invited': True
}
+def disallow_for_synced_team(except_robots=False):
+ """ Disallows the decorated operation for a team that is marked as being synced from an internal
+ auth provider such as LDAP. If except_robots is True, then the operation is allowed if the
+ member specified on the operation is a robot account.
+ """
+ def inner(func):
+ @wraps(func)
+ def wrapper(self, *args, **kwargs):
+ # Team syncing can only be enabled if we have a federated service.
+ if features.TEAM_SYNCING and authentication.federated_service:
+ orgname = kwargs['orgname']
+ teamname = kwargs['teamname']
+ if model.team.get_team_sync_information(orgname, teamname):
+ if not except_robots or not parse_robot_username(kwargs.get('membername', '')):
+ raise InvalidRequest('Cannot call this method on an auth-synced team')
+
+ return func(self, *args, **kwargs)
+ return wrapper
+ return inner
+
+
+disallow_nonrobots_for_synced_team = disallow_for_synced_team(except_robots=True)
+disallow_all_for_synced_team = disallow_for_synced_team(except_robots=False)
+
@resource('/v1/organization//team/')
@path_param('orgname', 'The name of the organization')
@@ -157,13 +187,13 @@ class OrganizationTeam(ApiResource):
{'team': teamname, 'description': team.description})
if 'role' in details:
- role = model.team.get_team_org_role(team).name
+ role = Team.role.get_name(team.role_id)
if role != details['role']:
team = model.team.set_team_org_permission(team, details['role'],
get_authenticated_user().username)
log_action('org_set_team_role', orgname, {'team': teamname, 'role': details['role']})
- return team_view(orgname, team), 200
+ return team_view(orgname, team, is_new_team=not is_existing), 200
raise Unauthorized()
@@ -180,6 +210,64 @@ class OrganizationTeam(ApiResource):
raise Unauthorized()
+def _syncing_setup_allowed(orgname):
+ """ Returns whether syncing setup is allowed for the current user over the matching org. """
+ if not features.NONSUPERUSER_TEAM_SYNCING_SETUP and not SuperUserPermission().can():
+ return False
+
+ return AdministerOrganizationPermission(orgname).can()
+
+
+@resource('/v1/organization//team//syncing')
+@path_param('orgname', 'The name of the organization')
+@path_param('teamname', 'The name of the team')
+@show_if(features.TEAM_SYNCING)
+class OrganizationTeamSyncing(ApiResource):
+ """ Resource for managing syncing of a team by a backing group. """
+ @require_scope(scopes.ORG_ADMIN)
+ @require_scope(scopes.SUPERUSER)
+ @nickname('enableOrganizationTeamSync')
+ @verify_not_prod
+ @require_fresh_login
+ def post(self, orgname, teamname):
+ if _syncing_setup_allowed(orgname):
+ try:
+ team = model.team.get_organization_team(orgname, teamname)
+ except model.InvalidTeamException:
+ raise NotFound()
+
+ config = request.get_json()
+
+ # Ensure that the specified config points to a valid group.
+ status, err = authentication.check_group_lookup_args(config)
+ if not status:
+ raise InvalidRequest('Could not sync to group: %s' % err)
+
+ # Set the team's syncing config.
+ model.team.set_team_syncing(team, authentication.federated_service, config)
+
+ return team_view(orgname, team)
+
+ raise Unauthorized()
+
+ @require_scope(scopes.ORG_ADMIN)
+ @require_scope(scopes.SUPERUSER)
+ @nickname('disableOrganizationTeamSync')
+ @verify_not_prod
+ @require_fresh_login
+ def delete(self, orgname, teamname):
+ if _syncing_setup_allowed(orgname):
+ try:
+ team = model.team.get_organization_team(orgname, teamname)
+ except model.InvalidTeamException:
+ raise NotFound()
+
+ model.team.remove_team_syncing(orgname, teamname)
+ return team_view(orgname, team)
+
+ raise Unauthorized()
+
+
@resource('/v1/organization//team//members')
@path_param('orgname', 'The name of the organization')
@path_param('teamname', 'The name of the team')
@@ -211,9 +299,29 @@ class TeamMemberList(ApiResource):
data = {
'name': teamname,
'members': [member_view(m) for m in members] + [invite_view(i) for i in invites],
- 'can_edit': edit_permission.can()
+ 'can_edit': edit_permission.can(),
}
+ if features.TEAM_SYNCING and authentication.federated_service:
+ if _syncing_setup_allowed(orgname):
+ data['can_sync'] = {
+ 'service': authentication.federated_service,
+ }
+
+ data['can_sync'].update(authentication.service_metadata())
+
+ sync_info = model.team.get_team_sync_information(orgname, teamname)
+ if sync_info is not None:
+ data['synced'] = {
+ 'service': sync_info.service.name,
+ }
+
+ if SuperUserPermission().can():
+ data['synced'].update({
+ 'last_updated': format_date(sync_info.last_updated),
+ 'config': json.loads(sync_info.config),
+ })
+
return data
raise Unauthorized()
@@ -228,6 +336,7 @@ class TeamMember(ApiResource):
@require_scope(scopes.ORG_ADMIN)
@nickname('updateOrganizationTeamMember')
+ @disallow_nonrobots_for_synced_team
def put(self, orgname, teamname, membername):
""" Adds or invites a member to an existing team. """
permission = AdministerOrganizationPermission(orgname)
@@ -265,6 +374,7 @@ class TeamMember(ApiResource):
@require_scope(scopes.ORG_ADMIN)
@nickname('deleteOrganizationTeamMember')
+ @disallow_nonrobots_for_synced_team
def delete(self, orgname, teamname, membername):
""" Delete a member of a team. If the user is merely invited to join
the team, then the invite is removed instead.
@@ -308,6 +418,7 @@ class InviteTeamMember(ApiResource):
""" Resource for inviting a team member via email address. """
@require_scope(scopes.ORG_ADMIN)
@nickname('inviteTeamMemberEmail')
+ @disallow_all_for_synced_team
def put(self, orgname, teamname, email):
""" Invites an email address to an existing team. """
permission = AdministerOrganizationPermission(orgname)
@@ -347,7 +458,9 @@ class InviteTeamMember(ApiResource):
raise NotFound()
# Delete the invite.
- model.team.delete_team_email_invite(team, email)
+ if not model.team.delete_team_email_invite(team, email):
+ raise NotFound()
+
log_action('org_delete_team_member_invite', orgname, {
'email': email,
'team': teamname,
@@ -405,7 +518,7 @@ class TeamMemberInvite(ApiResource):
@nickname('declineOrganizationTeamInvite')
@require_user_admin
def delete(self, code):
- """ Delete an existing member of a team. """
+ """ Delete an existing invitation to join a team. """
(team, inviter) = model.team.delete_team_invite(code, user_obj=get_authenticated_user())
model.notification.delete_matching_notifications(get_authenticated_user(), 'org_team_invite',
diff --git a/endpoints/api/test/__init__.py b/endpoints/api/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/endpoints/api/test/shared.py b/endpoints/api/test/shared.py
new file mode 100644
index 000000000..c5a553f09
--- /dev/null
+++ b/endpoints/api/test/shared.py
@@ -0,0 +1,11 @@
+from endpoints.test.shared import conduct_call
+from endpoints.api import api
+
+def conduct_api_call(client, resource, method, params, body=None, expected_code=200, headers=None):
+ """ Conducts an API call to the given resource via the given client, and ensures its returned
+ status matches the code given.
+
+ Returns the response.
+ """
+ return conduct_call(client, resource, api.url_for, method, params, body, expected_code,
+ headers=headers)
diff --git a/endpoints/api/test/test_appspecifictoken.py b/endpoints/api/test/test_appspecifictoken.py
new file mode 100644
index 000000000..28e2bcd00
--- /dev/null
+++ b/endpoints/api/test/test_appspecifictoken.py
@@ -0,0 +1,50 @@
+from datetime import datetime, timedelta
+
+from data import model
+from endpoints.api.appspecifictokens import AppTokens, AppToken
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+def test_app_specific_tokens(app, client):
+ with client_with_identity('devtable', client) as cl:
+ # Add an app specific token.
+ token_data = {'title': 'Testing 123'}
+ resp = conduct_api_call(cl, AppTokens, 'POST', None, token_data, 200).json
+ token_uuid = resp['token']['uuid']
+ assert 'token_code' in resp['token']
+
+ # List the tokens and ensure we have the one added.
+ resp = conduct_api_call(cl, AppTokens, 'GET', None, None, 200).json
+ assert len(resp['tokens'])
+ assert token_uuid in set([token['uuid'] for token in resp['tokens']])
+ assert not set([token['token_code'] for token in resp['tokens'] if 'token_code' in token])
+
+ # List the tokens expiring soon and ensure the one added is not present.
+ resp = conduct_api_call(cl, AppTokens, 'GET', {'expiring': True}, None, 200).json
+ assert token_uuid not in set([token['uuid'] for token in resp['tokens']])
+
+ # Get the token and ensure we have its code.
+ resp = conduct_api_call(cl, AppToken, 'GET', {'token_uuid': token_uuid}, None, 200).json
+ assert resp['token']['uuid'] == token_uuid
+ assert 'token_code' in resp['token']
+
+ # Delete the token.
+ conduct_api_call(cl, AppToken, 'DELETE', {'token_uuid': token_uuid}, None, 204)
+
+ # Ensure the token no longer exists.
+ resp = conduct_api_call(cl, AppTokens, 'GET', None, None, 200).json
+ assert len(resp['tokens'])
+ assert token_uuid not in set([token['uuid'] for token in resp['tokens']])
+
+ conduct_api_call(cl, AppToken, 'GET', {'token_uuid': token_uuid}, None, 404)
+
+
+def test_delete_expired_app_token(app, client):
+ user = model.user.get_user('devtable')
+ expiration = datetime.now() - timedelta(seconds=10)
+ token = model.appspecifictoken.create_token(user, 'some token', expiration)
+
+ with client_with_identity('devtable', client) as cl:
+ # Delete the token.
+ conduct_api_call(cl, AppToken, 'DELETE', {'token_uuid': token.uuid}, None, 204)
diff --git a/endpoints/api/test/test_build.py b/endpoints/api/test/test_build.py
new file mode 100644
index 000000000..bf98ad4eb
--- /dev/null
+++ b/endpoints/api/test/test_build.py
@@ -0,0 +1,20 @@
+import pytest
+
+from endpoints.api.build import RepositoryBuildList
+
+
+@pytest.mark.parametrize('request_json,subdir,context', [
+ ({}, '/Dockerfile', '/'),
+ ({'context': '/some_context'}, '/some_context/Dockerfile', '/some_context'),
+ ({'subdirectory': 'some_context'}, 'some_context/Dockerfile', 'some_context'),
+ ({'subdirectory': 'some_context/'}, 'some_context/Dockerfile', 'some_context/'),
+ ({'dockerfile_path': 'some_context/Dockerfile'}, 'some_context/Dockerfile', 'some_context'),
+ ({'dockerfile_path': 'some_context/Dockerfile', 'context': '/'}, 'some_context/Dockerfile', '/'),
+ ({'dockerfile_path': 'some_context/Dockerfile',
+ 'context': '/',
+ 'subdirectory': 'slime'}, 'some_context/Dockerfile', '/'),
+])
+def test_extract_dockerfile_args(request_json, subdir, context):
+ actual_context, actual_subdir = RepositoryBuildList.get_dockerfile_context(request_json)
+ assert subdir == actual_subdir
+ assert context == actual_context
diff --git a/endpoints/api/test/test_disallow_for_apps.py b/endpoints/api/test/test_disallow_for_apps.py
new file mode 100644
index 000000000..b9112c291
--- /dev/null
+++ b/endpoints/api/test/test_disallow_for_apps.py
@@ -0,0 +1,83 @@
+import pytest
+
+from data import model
+from endpoints.api.repository import Repository
+from endpoints.api.build import (RepositoryBuildList, RepositoryBuildResource,
+ RepositoryBuildStatus, RepositoryBuildLogs)
+from endpoints.api.image import RepositoryImageList, RepositoryImage
+from endpoints.api.manifest import RepositoryManifestLabels, ManageRepositoryManifestLabel
+from endpoints.api.repositorynotification import (RepositoryNotification,
+ RepositoryNotificationList,
+ TestRepositoryNotification)
+from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity
+from endpoints.api.signing import RepositorySignatures
+from endpoints.api.tag import ListRepositoryTags, RepositoryTag, RepositoryTagImages, RestoreTag
+from endpoints.api.trigger import (BuildTriggerList, BuildTrigger, BuildTriggerSubdirs,
+ BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
+ TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources,
+ BuildTriggerSourceNamespaces)
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+BUILD_ARGS = {'build_uuid': '1234'}
+IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
+MANIFEST_ARGS = {'manifestref': 'sha256:abcd1234'}
+LABEL_ARGS = {'manifestref': 'sha256:abcd1234', 'labelid': '1234'}
+NOTIFICATION_ARGS = {'uuid': '1234'}
+TAG_ARGS = {'tag': 'foobar'}
+TRIGGER_ARGS = {'trigger_uuid': '1234'}
+FIELD_ARGS = {'trigger_uuid': '1234', 'field_name': 'foobar'}
+
+@pytest.mark.parametrize('resource, method, params', [
+ (RepositoryBuildList, 'get', None),
+ (RepositoryBuildList, 'post', None),
+ (RepositoryBuildResource, 'get', BUILD_ARGS),
+ (RepositoryBuildResource, 'delete', BUILD_ARGS),
+ (RepositoryBuildStatus, 'get', BUILD_ARGS),
+ (RepositoryBuildLogs, 'get', BUILD_ARGS),
+ (RepositoryImageList, 'get', None),
+ (RepositoryImage, 'get', IMAGE_ARGS),
+ (RepositoryManifestLabels, 'get', MANIFEST_ARGS),
+ (RepositoryManifestLabels, 'post', MANIFEST_ARGS),
+ (ManageRepositoryManifestLabel, 'get', LABEL_ARGS),
+ (ManageRepositoryManifestLabel, 'delete', LABEL_ARGS),
+ (RepositoryNotificationList, 'get', None),
+ (RepositoryNotificationList, 'post', None),
+ (RepositoryNotification, 'get', NOTIFICATION_ARGS),
+ (RepositoryNotification, 'delete', NOTIFICATION_ARGS),
+ (RepositoryNotification, 'post', NOTIFICATION_ARGS),
+ (TestRepositoryNotification, 'post', NOTIFICATION_ARGS),
+ (RepositoryImageSecurity, 'get', IMAGE_ARGS),
+ (RepositoryManifestSecurity, 'get', MANIFEST_ARGS),
+ (RepositorySignatures, 'get', None),
+ (ListRepositoryTags, 'get', None),
+ (RepositoryTag, 'put', TAG_ARGS),
+ (RepositoryTag, 'delete', TAG_ARGS),
+ (RepositoryTagImages, 'get', TAG_ARGS),
+ (RestoreTag, 'post', TAG_ARGS),
+ (BuildTriggerList, 'get', None),
+ (BuildTrigger, 'get', TRIGGER_ARGS),
+ (BuildTrigger, 'delete', TRIGGER_ARGS),
+ (BuildTriggerSubdirs, 'post', TRIGGER_ARGS),
+ (BuildTriggerActivate, 'post', TRIGGER_ARGS),
+ (BuildTriggerAnalyze, 'post', TRIGGER_ARGS),
+ (ActivateBuildTrigger, 'post', TRIGGER_ARGS),
+ (TriggerBuildList, 'get', TRIGGER_ARGS),
+ (BuildTriggerFieldValues, 'post', FIELD_ARGS),
+ (BuildTriggerSources, 'post', TRIGGER_ARGS),
+ (BuildTriggerSourceNamespaces, 'get', TRIGGER_ARGS),
+])
+def test_disallowed_for_apps(resource, method, params, client):
+ namespace = 'devtable'
+ repository = 'someapprepo'
+
+ devtable = model.user.get_user('devtable')
+ model.repository.create_repository(namespace, repository, devtable, repo_kind='application')
+
+ params = params or {}
+ params['repository'] = '%s/%s' % (namespace, repository)
+
+ with client_with_identity('devtable', client) as cl:
+ conduct_api_call(cl, resource, method, params, None, 501)
+
diff --git a/endpoints/api/test/test_disallow_for_nonnormal.py b/endpoints/api/test/test_disallow_for_nonnormal.py
new file mode 100644
index 000000000..7d8ace845
--- /dev/null
+++ b/endpoints/api/test/test_disallow_for_nonnormal.py
@@ -0,0 +1,64 @@
+import pytest
+
+from data import model
+from data.database import RepositoryState
+from endpoints.api.build import RepositoryBuildList, RepositoryBuildResource
+from endpoints.api.manifest import RepositoryManifestLabels, ManageRepositoryManifestLabel
+from endpoints.api.tag import RepositoryTag, RestoreTag
+from endpoints.api.trigger import (BuildTrigger, BuildTriggerSubdirs,
+ BuildTriggerActivate, BuildTriggerAnalyze, ActivateBuildTrigger,
+ BuildTriggerFieldValues, BuildTriggerSources)
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+BUILD_ARGS = {'build_uuid': '1234'}
+IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
+MANIFEST_ARGS = {'manifestref': 'sha256:abcd1234'}
+LABEL_ARGS = {'manifestref': 'sha256:abcd1234', 'labelid': '1234'}
+NOTIFICATION_ARGS = {'uuid': '1234'}
+TAG_ARGS = {'tag': 'foobar'}
+TRIGGER_ARGS = {'trigger_uuid': '1234'}
+FIELD_ARGS = {'trigger_uuid': '1234', 'field_name': 'foobar'}
+
+
+@pytest.mark.parametrize('state', [
+ RepositoryState.MIRROR,
+ RepositoryState.READ_ONLY,
+])
+@pytest.mark.parametrize('resource, method, params', [
+ (RepositoryBuildList, 'post', None),
+ (RepositoryBuildResource, 'delete', BUILD_ARGS),
+
+ (RepositoryManifestLabels, 'post', MANIFEST_ARGS),
+ (ManageRepositoryManifestLabel, 'delete', LABEL_ARGS),
+
+ (RepositoryTag, 'put', TAG_ARGS),
+ (RepositoryTag, 'delete', TAG_ARGS),
+
+ (RestoreTag, 'post', TAG_ARGS),
+
+ (BuildTrigger, 'delete', TRIGGER_ARGS),
+ (BuildTriggerSubdirs, 'post', TRIGGER_ARGS),
+ (BuildTriggerActivate, 'post', TRIGGER_ARGS),
+ (BuildTriggerAnalyze, 'post', TRIGGER_ARGS),
+ (ActivateBuildTrigger, 'post', TRIGGER_ARGS),
+
+ (BuildTriggerFieldValues, 'post', FIELD_ARGS),
+ (BuildTriggerSources, 'post', TRIGGER_ARGS),
+
+])
+def test_disallowed_for_nonnormal(state, resource, method, params, client):
+ namespace = 'devtable'
+ repository = 'somenewstaterepo'
+
+ devtable = model.user.get_user('devtable')
+ repo = model.repository.create_repository(namespace, repository, devtable)
+ repo.state = state
+ repo.save()
+
+ params = params or {}
+ params['repository'] = '%s/%s' % (namespace, repository)
+
+ with client_with_identity('devtable', client) as cl:
+ conduct_api_call(cl, resource, method, params, None, 503)
diff --git a/endpoints/api/test/test_endtoend_auth.py b/endpoints/api/test/test_endtoend_auth.py
new file mode 100644
index 000000000..0bcf9c7e4
--- /dev/null
+++ b/endpoints/api/test/test_endtoend_auth.py
@@ -0,0 +1,63 @@
+import pytest
+
+from mock import patch
+
+from endpoints.api.search import EntitySearch, LinkExternalEntity
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+
+from test.test_ldap import mock_ldap
+from test.test_external_jwt_authn import fake_jwt
+from test.test_keystone_auth import fake_keystone
+
+from test.fixtures import *
+
+
+@pytest.fixture(params=[
+ mock_ldap,
+ fake_jwt,
+ fake_keystone,
+])
+def auth_engine(request):
+ return request.param
+
+
+@pytest.fixture(params=[
+ False,
+ True,
+])
+def requires_email(request):
+ return request.param
+
+
+def test_entity_search(auth_engine, requires_email, client):
+ with auth_engine(requires_email=requires_email) as auth:
+ with patch('endpoints.api.search.authentication', auth):
+ # Try an unknown prefix.
+ response = conduct_api_call(client, EntitySearch, 'GET', params=dict(prefix='unknown'))
+ results = response.json['results']
+ assert len(results) == 0
+
+ # Try a known prefix.
+ response = conduct_api_call(client, EntitySearch, 'GET', params=dict(prefix='cool'))
+ results = response.json['results']
+ entity = results[0]
+ assert entity['name'] == 'cool.user'
+ assert entity['kind'] == 'external'
+
+
+def test_link_external_entity(auth_engine, requires_email, client):
+ with auth_engine(requires_email=requires_email) as auth:
+ with patch('endpoints.api.search.authentication', auth):
+ with client_with_identity('devtable', client) as cl:
+ # Try an unknown user.
+ conduct_api_call(cl, LinkExternalEntity, 'POST', params=dict(username='unknownuser'),
+ expected_code=400)
+
+ # Try a known user.
+ response = conduct_api_call(cl, LinkExternalEntity, 'POST',
+ params=dict(username='cool.user'))
+
+ entity = response.json['entity']
+ assert entity['name'] == 'cool_user'
+ assert entity['kind'] == 'user'
diff --git a/endpoints/api/test/test_logs.py b/endpoints/api/test/test_logs.py
new file mode 100644
index 000000000..a73561bfa
--- /dev/null
+++ b/endpoints/api/test/test_logs.py
@@ -0,0 +1,34 @@
+import os
+import time
+
+from mock import patch
+
+from app import export_action_logs_queue
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.logs import ExportOrgLogs
+from endpoints.test.shared import client_with_identity
+
+from test.fixtures import *
+
+@pytest.mark.skipif(os.environ.get('TEST_DATABASE_URI', '').find('mysql') >= 0,
+ reason="Queue code is very sensitive to times on MySQL, making this flaky")
+def test_export_logs(client):
+ with client_with_identity('devtable', client) as cl:
+ assert export_action_logs_queue.get() is None
+
+ timecode = time.time()
+ def get_time():
+ return timecode - 2
+
+ with patch('time.time', get_time):
+ # Call to export logs.
+ body = {
+ 'callback_url': 'http://some/url',
+ 'callback_email': 'a@b.com',
+ }
+
+ conduct_api_call(cl, ExportOrgLogs, 'POST', {'orgname': 'buynlarge'},
+ body, expected_code=200)
+
+ # Ensure the request was queued.
+ assert export_action_logs_queue.get() is not None
diff --git a/endpoints/api/test/test_manifest.py b/endpoints/api/test/test_manifest.py
new file mode 100644
index 000000000..164c26061
--- /dev/null
+++ b/endpoints/api/test/test_manifest.py
@@ -0,0 +1,24 @@
+from data.registry_model import registry_model
+from endpoints.api.manifest import RepositoryManifest
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+
+from test.fixtures import *
+
+def test_repository_manifest(client):
+ with client_with_identity('devtable', client) as cl:
+ repo_ref = registry_model.lookup_repository('devtable', 'simple')
+ tags = registry_model.list_all_active_repository_tags(repo_ref)
+ for tag in tags:
+ manifest_digest = tag.manifest_digest
+ if manifest_digest is None:
+ continue
+
+ params = {
+ 'repository': 'devtable/simple',
+ 'manifestref': manifest_digest,
+ }
+ result = conduct_api_call(cl, RepositoryManifest, 'GET', params, None, 200).json
+ assert result['digest'] == manifest_digest
+ assert result['manifest_data']
+ assert result['image']
diff --git a/endpoints/api/test/test_mirror.py b/endpoints/api/test/test_mirror.py
new file mode 100644
index 000000000..8fcd9ef4a
--- /dev/null
+++ b/endpoints/api/test/test_mirror.py
@@ -0,0 +1,235 @@
+from datetime import datetime
+
+import pytest
+
+from data import model
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.mirror import RepoMirrorResource
+from endpoints.test.shared import client_with_identity
+
+from test.fixtures import *
+
+def _setup_mirror():
+ repo = model.repository.get_repository('devtable', 'simple')
+ assert repo
+ robot = model.user.lookup_robot('devtable+dtrobot')
+ assert robot
+ rule = model.repo_mirror.create_rule(repo, ['latest', '3.3*', 'foo'])
+ assert rule
+ mirror_kwargs = {
+ 'is_enabled': True,
+ 'external_reference': 'quay.io/redhat/quay',
+ 'sync_interval': 5000,
+ 'sync_start_date': datetime(2020, 01, 02, 6, 30, 0),
+ 'external_registry_username': 'fakeUsername',
+ 'external_registry_password': 'fakePassword',
+ 'external_registry_config': {
+ 'verify_tls': True,
+ 'proxy': {
+ 'http_proxy': 'http://insecure.proxy.corp',
+ 'https_proxy': 'https://secure.proxy.corp',
+ 'no_proxy': 'mylocalhost'
+ }
+ }
+ }
+ mirror = model.repo_mirror.enable_mirroring_for_repository(repo, root_rule=rule,
+ internal_robot=robot, **mirror_kwargs)
+ assert mirror
+ return mirror
+
+
+@pytest.mark.parametrize('existing_robot_permission, expected_permission', [
+ (None, 'write'),
+ ('read', 'write'),
+ ('write', 'write'),
+ ('admin', 'admin'),
+])
+def test_create_mirror_sets_permissions(existing_robot_permission, expected_permission, client):
+ mirror_bot, _ = model.user.create_robot('newmirrorbot', model.user.get_namespace_user('devtable'))
+
+ if existing_robot_permission:
+ model.permission.set_user_repo_permission(mirror_bot.username, 'devtable', 'simple',
+ existing_robot_permission)
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ request_body = {
+ 'external_reference': 'quay.io/foobar/barbaz',
+ 'sync_interval': 100,
+ 'sync_start_date': '2019-08-20T17:51:00Z',
+ 'root_rule': {
+ 'rule_kind': 'tag_glob_csv',
+ 'rule_value': ['latest','foo', 'bar']
+ },
+ 'robot_username': 'devtable+newmirrorbot',
+ }
+ conduct_api_call(cl, RepoMirrorResource, 'POST', params, request_body, 201)
+
+ # Check the status of the robot.
+ permissions = model.permission.get_user_repository_permissions(mirror_bot, 'devtable', 'simple')
+ assert permissions[0].role.name == expected_permission
+
+ config = model.repo_mirror.get_mirror(model.repository.get_repository('devtable', 'simple'))
+ assert config.root_rule.rule_value == ['latest', 'foo', 'bar']
+
+
+def test_get_mirror_does_not_exist(client):
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 404)
+
+
+def test_get_repo_does_not_exist(client):
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/unicorn'}
+ resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 404)
+
+
+def test_get_mirror(client):
+ """ Verify that performing a `GET` request returns expected and accurate data. """
+ mirror = _setup_mirror()
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 200).json
+
+ assert resp['is_enabled'] == True
+ assert resp['external_reference'] == 'quay.io/redhat/quay'
+ assert resp['sync_interval'] == 5000
+ assert resp['sync_start_date'] == '2020-01-02T06:30:00Z'
+ assert resp['external_registry_username'] == 'fakeUsername'
+ assert 'external_registry_password' not in resp
+ assert 'external_registry_config' in resp
+ assert resp['external_registry_config']['verify_tls'] == True
+ assert 'proxy' in resp['external_registry_config']
+ assert resp['external_registry_config']['proxy']['http_proxy'] == 'http://insecure.proxy.corp'
+ assert resp['external_registry_config']['proxy']['https_proxy'] == 'https://secure.proxy.corp'
+ assert resp['external_registry_config']['proxy']['no_proxy'] == 'mylocalhost'
+
+
+@pytest.mark.parametrize('key, value, expected_status', [
+
+ ('is_enabled', True, 201),
+ ('is_enabled', False, 201),
+ ('is_enabled', None, 400),
+ ('is_enabled', 'foo', 400),
+
+ ('external_reference', 'example.com/foo/bar', 201),
+ ('external_reference', 'example.com/foo', 201),
+ ('external_reference', 'example.com', 201),
+
+ ('external_registry_username', 'newTestUsername', 201),
+ ('external_registry_username', None, 201),
+ ('external_registry_username', 123, 400),
+
+ ('external_registry_password', 'newTestPassword', 400),
+ ('external_registry_password', None, 400),
+ ('external_registry_password', 41, 400),
+
+ ('robot_username', 'devtable+dtrobot', 201),
+ ('robot_username', 'devtable+doesntExist', 400),
+
+ ('sync_start_date', '2020-01-01T00:00:00Z', 201),
+ ('sync_start_date', 'January 1 2020', 400),
+ ('sync_start_date', '2020-01-01T00:00:00.00Z', 400),
+ ('sync_start_date', 'Wed, 01 Jan 2020 00:00:00 -0000', 400),
+ ('sync_start_date', 'Wed, 02 Oct 2002 08:00:00 EST', 400),
+
+ ('sync_interval', 2000, 201),
+ ('sync_interval', -5, 400),
+
+ ('https_proxy', 'https://proxy.corp.example.com', 201),
+ ('https_proxy', None, 201),
+ ('https_proxy', 'proxy.example.com; rm -rf /', 201), # Safe; values only set in env, not eval'ed
+
+ ('http_proxy', 'http://proxy.corp.example.com', 201),
+ ('http_proxy', None, 201),
+ ('http_proxy', 'proxy.example.com; rm -rf /', 201), # Safe; values only set in env, not eval'ed
+
+ ('no_proxy', 'quay.io', 201),
+ ('no_proxy', None, 201),
+ ('no_proxy', 'quay.io; rm -rf /', 201), # Safe because proxy values are not eval'ed
+
+ ('verify_tls', True, 201),
+ ('verify_tls', False, 201),
+ ('verify_tls', None, 400),
+ ('verify_tls', 'abc', 400),
+
+ ('root_rule', {'rule_kind': 'tag_glob_csv', 'rule_value': ['3.1', '3.1*']}, 201),
+ ('root_rule', {'rule_kind': 'tag_glob_csv'}, 400),
+ ('root_rule', {'rule_kind': 'tag_glob_csv', 'rule_value': []}, 400),
+ ('root_rule', {'rule_kind': 'incorrect', 'rule_value': ['3.1', '3.1*']}, 400),
+
+])
+def test_change_config(key, value, expected_status, client):
+ """ Verify that changing each attribute works as expected. """
+ mirror = _setup_mirror()
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ if key in ('http_proxy', 'https_proxy', 'no_proxy'):
+ request_body = {'external_registry_config': {'proxy': {key: value}}}
+ elif key == 'verify_tls':
+ request_body = {'external_registry_config': {key: value}}
+ else:
+ request_body = {key: value}
+ conduct_api_call(cl, RepoMirrorResource, 'PUT', params, request_body, expected_status)
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ resp = conduct_api_call(cl, RepoMirrorResource, 'GET', params, None, 200)
+
+ if expected_status < 400:
+ if key == 'external_registry_password':
+ assert key not in resp.json
+ elif key == 'verify_tls':
+ assert resp.json['external_registry_config']['verify_tls'] == value
+ elif key in ('http_proxy', 'https_proxy', 'no_proxy'):
+ assert resp.json['external_registry_config']['proxy'][key] == value
+ else:
+ assert resp.json[key] == value
+ else:
+ if key == 'external_registry_password':
+ assert key not in resp.json
+ elif key == 'verify_tls':
+ assert resp.json['external_registry_config'][key] != value
+ elif key in ('http_proxy', 'https_proxy', 'no_proxy'):
+ assert resp.json['external_registry_config']['proxy'][key] != value
+ else:
+ assert resp.json[key] != value
+
+
+@pytest.mark.parametrize('request_body, expected_status', [
+
+ # Set a new password and username => Success
+ ({ 'external_registry_username': 'newUsername',
+ 'external_registry_password': 'newPassword'}, 201 ),
+
+ # Set password and username to None => Success
+ ({ 'external_registry_username': None,
+ 'external_registry_password': None}, 201 ),
+
+ # Set username to value but password None => Sucess
+ ({ 'external_registry_username': 'myUsername',
+ 'external_registry_password': None}, 201 ),
+
+ # Set only new Username => Success
+ ({'external_registry_username': 'myNewUsername'}, 201),
+ ({'external_registry_username': None}, 201),
+
+ # Set only new Password => Failure
+ ({'external_registry_password': 'myNewPassword'}, 400),
+ ({'external_registry_password': None}, 400),
+
+ # Set username and password to empty string => Success?
+ ({'external_registry_username': '',
+ 'external_registry_password': ''}, 201),
+
+])
+def test_change_credentials(request_body, expected_status, client):
+ """ Verify credentials can only be modified as a pair. """
+ mirror = _setup_mirror()
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ conduct_api_call(cl, RepoMirrorResource, 'PUT', params, request_body, expected_status)
diff --git a/endpoints/api/test/test_organization.py b/endpoints/api/test/test_organization.py
new file mode 100644
index 000000000..4341e1125
--- /dev/null
+++ b/endpoints/api/test/test_organization.py
@@ -0,0 +1,38 @@
+import pytest
+
+from data import model
+from endpoints.api import api
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.organization import (Organization,
+ OrganizationCollaboratorList)
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+
+@pytest.mark.parametrize('expiration, expected_code', [
+ (0, 200),
+ (100, 400),
+ (100000000000000000000, 400),
+])
+def test_change_tag_expiration(expiration, expected_code, client):
+ with client_with_identity('devtable', client) as cl:
+ conduct_api_call(cl, Organization, 'PUT', {'orgname': 'buynlarge'},
+ body={'tag_expiration_s': expiration},
+ expected_code=expected_code)
+
+
+def test_get_organization_collaborators(client):
+ params = {'orgname': 'buynlarge'}
+
+ with client_with_identity('devtable', client) as cl:
+ resp = conduct_api_call(cl, OrganizationCollaboratorList, 'GET', params)
+
+ collaborator_names = [c['name'] for c in resp.json['collaborators']]
+ assert 'outsideorg' in collaborator_names
+ assert 'devtable' not in collaborator_names
+ assert 'reader' not in collaborator_names
+
+ for collaborator in resp.json['collaborators']:
+ if collaborator['name'] == 'outsideorg':
+ assert 'orgrepo' in collaborator['repositories']
+ assert 'anotherorgrepo' not in collaborator['repositories']
diff --git a/endpoints/api/test/test_permission.py b/endpoints/api/test/test_permission.py
new file mode 100644
index 000000000..1182f1071
--- /dev/null
+++ b/endpoints/api/test/test_permission.py
@@ -0,0 +1,23 @@
+import pytest
+
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.permission import RepositoryUserPermission
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+@pytest.mark.parametrize('repository, username, expected_code', [
+ pytest.param('devtable/simple', 'public', 200, id='valid user under user'),
+ pytest.param('devtable/simple', 'devtable+dtrobot', 200, id='valid robot under user'),
+ pytest.param('devtable/simple', 'buynlarge+coolrobot', 400, id='invalid robot under user'),
+ pytest.param('buynlarge/orgrepo', 'devtable', 200, id='valid user under org'),
+ pytest.param('buynlarge/orgrepo', 'devtable+dtrobot', 400, id='invalid robot under org'),
+ pytest.param('buynlarge/orgrepo', 'buynlarge+coolrobot', 200, id='valid robot under org'),
+])
+def test_robot_permission(repository, username, expected_code, client):
+ with client_with_identity('devtable', client) as cl:
+ conduct_api_call(cl, RepositoryUserPermission, 'PUT',
+ {'repository': repository, 'username': username},
+ body={
+ 'role': 'read',
+ },
+ expected_code=expected_code)
diff --git a/endpoints/api/test/test_repoemail_models_pre_oci.py b/endpoints/api/test/test_repoemail_models_pre_oci.py
new file mode 100644
index 000000000..7c8de8226
--- /dev/null
+++ b/endpoints/api/test/test_repoemail_models_pre_oci.py
@@ -0,0 +1,89 @@
+import pytest
+from mock import Mock
+
+import util
+from data import model
+from endpoints.api.repoemail_models_interface import RepositoryAuthorizedEmail
+from endpoints.api.repoemail_models_pre_oci import pre_oci_model
+
+
+@pytest.fixture
+def get_monkeypatch(monkeypatch):
+ return monkeypatch
+
+
+def return_none(name, repo, email):
+ return None
+
+
+def get_return_mock(mock):
+ def return_mock(name, repo, email):
+ return mock
+
+ return return_mock
+
+
+def test_get_email_authorized_for_repo(get_monkeypatch):
+ mock = Mock()
+
+ get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', mock)
+
+ pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name', 'email')
+
+ mock.assert_called_once_with('namespace_name', 'repository_name', 'email')
+
+
+def test_get_email_authorized_for_repo_return_none(get_monkeypatch):
+ get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', return_none)
+
+ repo = pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name', 'email')
+
+ assert repo is None
+
+
+def test_get_email_authorized_for_repo_return_repo(get_monkeypatch):
+ mock = Mock(confirmed=True, code='code')
+ get_monkeypatch.setattr(model.repository, 'get_email_authorized_for_repo', get_return_mock(mock))
+
+ actual = pre_oci_model.get_email_authorized_for_repo('namespace_name', 'repository_name',
+ 'email')
+
+ assert actual == RepositoryAuthorizedEmail('email', 'repository_name', 'namespace_name', True,
+ 'code')
+
+
+def test_create_email_authorization_for_repo(get_monkeypatch):
+ mock = Mock()
+ get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', mock)
+
+ pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name', 'email')
+
+ mock.assert_called_once_with('namespace_name', 'repository_name', 'email')
+
+
+def test_create_email_authorization_for_repo_return_none(get_monkeypatch):
+ get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo', return_none)
+
+ assert pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
+ 'email') is None
+
+
+def test_create_email_authorization_for_repo_return_mock(get_monkeypatch):
+ mock = Mock()
+ get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo',
+ get_return_mock(mock))
+
+ assert pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
+ 'email') is not None
+
+
+def test_create_email_authorization_for_repo_return_value(get_monkeypatch):
+ mock = Mock(confirmed=False, code='code')
+
+ get_monkeypatch.setattr(model.repository, 'create_email_authorization_for_repo',
+ get_return_mock(mock))
+
+ actual = pre_oci_model.create_email_authorization_for_repo('namespace_name', 'repository_name',
+ 'email')
+ assert actual == RepositoryAuthorizedEmail('email', 'repository_name', 'namespace_name', False,
+ 'code')
diff --git a/endpoints/api/test/test_repository.py b/endpoints/api/test/test_repository.py
new file mode 100644
index 000000000..4edca0e35
--- /dev/null
+++ b/endpoints/api/test/test_repository.py
@@ -0,0 +1,166 @@
+import pytest
+
+from mock import patch, ANY, MagicMock
+
+from data import model, database
+from data.appr_model import release, channel, blob
+from endpoints.appr.models_cnr import model as appr_model
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.repository import RepositoryTrust, Repository, RepositoryList
+from endpoints.test.shared import client_with_identity
+from features import FeatureNameValue
+
+from test.fixtures import *
+
+
+@pytest.mark.parametrize('trust_enabled,repo_found,expected_status', [
+ (True, True, 200),
+ (False, True, 200),
+ (False, False, 404),
+ ('invalid_req', False, 400),
+])
+def test_post_changetrust(trust_enabled, repo_found, expected_status, client):
+ with patch('endpoints.api.repository.tuf_metadata_api') as mock_tuf:
+ with patch(
+ 'endpoints.api.repository_models_pre_oci.model.repository.get_repository') as mock_model:
+ mock_model.return_value = MagicMock() if repo_found else None
+ mock_tuf.get_default_tags_with_expiration.return_value = ['tags', 'expiration']
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/repo'}
+ request_body = {'trust_enabled': trust_enabled}
+ conduct_api_call(cl, RepositoryTrust, 'POST', params, request_body, expected_status)
+
+
+def test_signing_disabled(client):
+ with patch('features.SIGNING', FeatureNameValue('SIGNING', False)):
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ response = conduct_api_call(cl, Repository, 'GET', params).json
+ assert not response['trust_enabled']
+
+
+def test_list_starred_repos(client):
+ with client_with_identity('devtable', client) as cl:
+ params = {
+ 'starred': 'true',
+ }
+
+ response = conduct_api_call(cl, RepositoryList, 'GET', params).json
+ repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
+ assert 'devtable/simple' in repos
+ assert 'public/publicrepo' not in repos
+
+ # Add a star on publicrepo.
+ publicrepo = model.repository.get_repository('public', 'publicrepo')
+ model.repository.star_repository(model.user.get_user('devtable'), publicrepo)
+
+ # Ensure publicrepo shows up.
+ response = conduct_api_call(cl, RepositoryList, 'GET', params).json
+ repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
+ assert 'devtable/simple' in repos
+ assert 'public/publicrepo' in repos
+
+ # Make publicrepo private and ensure it disappears.
+ model.repository.set_repository_visibility(publicrepo, 'private')
+
+ response = conduct_api_call(cl, RepositoryList, 'GET', params).json
+ repos = {r['namespace'] + '/' + r['name'] for r in response['repositories']}
+ assert 'devtable/simple' in repos
+ assert 'public/publicrepo' not in repos
+
+
+def test_list_repositories_last_modified(client):
+ with client_with_identity('devtable', client) as cl:
+ params = {
+ 'namespace': 'devtable',
+ 'last_modified': 'true',
+ }
+
+ response = conduct_api_call(cl, RepositoryList, 'GET', params).json
+
+ for repo in response['repositories']:
+ if repo['name'] != 'building':
+ assert repo['last_modified'] is not None
+
+
+@pytest.mark.parametrize('repo_name, expected_status', [
+ pytest.param('x' * 255, 201, id='Maximum allowed length'),
+ pytest.param('x' * 256, 400, id='Over allowed length'),
+ pytest.param('a|b', 400, id='Invalid name'),
+])
+def test_create_repository(repo_name, expected_status, client):
+ with client_with_identity('devtable', client) as cl:
+ body = {
+ 'namespace': 'devtable',
+ 'repository': repo_name,
+ 'visibility': 'public',
+ 'description': 'foo',
+ }
+
+ result = conduct_api_call(client, RepositoryList, 'post', None, body,
+ expected_code=expected_status).json
+ if expected_status == 201:
+ assert result['name'] == repo_name
+ assert model.repository.get_repository('devtable', repo_name).name == repo_name
+
+
+@pytest.mark.parametrize('has_tag_manifest', [
+ True,
+ False,
+])
+def test_get_repo(has_tag_manifest, client, initialized_db):
+ with client_with_identity('devtable', client) as cl:
+ if not has_tag_manifest:
+ database.TagManifestLabelMap.delete().execute()
+ database.TagManifestToManifest.delete().execute()
+ database.TagManifestLabel.delete().execute()
+ database.TagManifest.delete().execute()
+
+ params = {'repository': 'devtable/simple'}
+ response = conduct_api_call(cl, Repository, 'GET', params).json
+ assert response['kind'] == 'image'
+
+
+def test_get_app_repo(client, initialized_db):
+ with client_with_identity('devtable', client) as cl:
+ devtable = model.user.get_user('devtable')
+ repo = model.repository.create_repository('devtable', 'someappr', devtable,
+ repo_kind='application')
+
+ models_ref = appr_model.models_ref
+ blob.get_or_create_blob('sha256:somedigest', 0, 'application/vnd.cnr.blob.v0.tar+gzip',
+ ['local_us'], models_ref)
+
+ release.create_app_release(repo, 'test',
+ dict(mediaType='application/vnd.cnr.package-manifest.helm.v0.json'),
+ 'sha256:somedigest', models_ref, False)
+
+ channel.create_or_update_channel(repo, 'somechannel', 'test', models_ref)
+
+ params = {'repository': 'devtable/someappr'}
+ response = conduct_api_call(cl, Repository, 'GET', params).json
+ assert response['kind'] == 'application'
+ assert response['channels']
+ assert response['releases']
+
+
+
+@pytest.mark.parametrize('state, can_write', [
+ (database.RepositoryState.NORMAL, True),
+ (database.RepositoryState.READ_ONLY, False),
+ (database.RepositoryState.MIRROR, False),
+])
+def test_get_repo_state_can_write(state, can_write, client, initialized_db):
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ response = conduct_api_call(cl, Repository, 'GET', params).json
+ assert response['can_write']
+
+ repo = model.repository.get_repository('devtable', 'simple')
+ repo.state = state
+ repo.save()
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/simple'}
+ response = conduct_api_call(cl, Repository, 'GET', params).json
+ assert response['can_write'] == can_write
diff --git a/endpoints/api/test/test_repositorynotification.py b/endpoints/api/test/test_repositorynotification.py
new file mode 100644
index 000000000..06d65e2f0
--- /dev/null
+++ b/endpoints/api/test/test_repositorynotification.py
@@ -0,0 +1,90 @@
+import pytest
+
+from mock import Mock, MagicMock
+
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.repositorynotification import RepositoryNotificationList, RepositoryNotification, TestRepositoryNotification
+from endpoints.test.shared import client_with_identity
+import endpoints.api.repositorynotification_models_interface as iface
+from test.fixtures import *
+
+@pytest.fixture()
+def authd_client(client):
+ with client_with_identity('devtable', client) as cl:
+ yield cl
+
+def mock_get_notification(uuid):
+ mock_notification = MagicMock(iface.RepositoryNotification)
+ if uuid == 'exists':
+ mock_notification.return_value = iface.RepositoryNotification(
+ 'exists',
+ 'title',
+ 'event_name',
+ 'method_name',
+ 'config_json',
+ 'event_config_json',
+ 2,
+ )
+ else:
+ mock_notification.return_value = None
+ return mock_notification
+
+@pytest.mark.parametrize('namespace,repository,body,expected_code',[
+ ('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_push',
+ method='webhook', eventConfig={}, title='test'), 201),
+ ('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_started',
+ method='webhook', eventConfig={}, title='test'), 201),
+ ('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_success',
+ method='webhook', eventConfig={}, title='test'), 201),
+ ('devtable', 'simple', dict(config={'url': 'http://example.com'}, event='repo_mirror_sync_failed',
+ method='webhook', eventConfig={}, title='test'), 201)
+])
+def test_create_repo_notification(namespace, repository, body, expected_code, authd_client):
+ params = {'repository': namespace + '/' + repository}
+ conduct_api_call(authd_client, RepositoryNotificationList, 'POST', params, body, expected_code=expected_code)
+
+@pytest.mark.parametrize('namespace,repository,expected_code',[
+ ('devtable', 'simple', 200)
+])
+def test_list_repo_notifications(namespace, repository, expected_code, authd_client):
+ params = {'repository': namespace + '/' + repository}
+ resp = conduct_api_call(authd_client, RepositoryNotificationList, 'GET', params, expected_code=expected_code).json
+ assert len(resp['notifications']) > 0
+
+@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
+ ('devtable', 'simple', 'exists', 200),
+ ('devtable', 'simple', 'not found', 404),
+])
+def test_get_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
+ monkeypatch.setattr('endpoints.api.repositorynotification.model.get_repo_notification', mock_get_notification(uuid))
+ params = {'repository': namespace + '/' + repository, 'uuid': uuid}
+ conduct_api_call(authd_client, RepositoryNotification, 'GET', params, expected_code=expected_code)
+
+@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
+ ('devtable', 'simple', 'exists', 204),
+ ('devtable', 'simple', 'not found', 400),
+])
+def test_delete_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
+ monkeypatch.setattr('endpoints.api.repositorynotification.model.delete_repo_notification', mock_get_notification(uuid))
+ params = {'repository': namespace + '/' + repository, 'uuid': uuid}
+ conduct_api_call(authd_client, RepositoryNotification, 'DELETE', params, expected_code=expected_code)
+
+
+@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
+ ('devtable', 'simple', 'exists', 204),
+ ('devtable', 'simple', 'not found', 400),
+])
+def test_reset_repo_noticiation(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
+ monkeypatch.setattr('endpoints.api.repositorynotification.model.reset_notification_number_of_failures', mock_get_notification(uuid))
+ params = {'repository': namespace + '/' + repository, 'uuid': uuid}
+ conduct_api_call(authd_client, RepositoryNotification, 'POST', params, expected_code=expected_code)
+
+
+@pytest.mark.parametrize('namespace,repository,uuid,expected_code',[
+ ('devtable', 'simple', 'exists', 200),
+ ('devtable', 'simple', 'not found', 400),
+])
+def test_test_repo_notification(namespace, repository, uuid, expected_code, authd_client, monkeypatch):
+ monkeypatch.setattr('endpoints.api.repositorynotification.model.queue_test_notification', mock_get_notification(uuid))
+ params = {'repository': namespace + '/' + repository, 'uuid': uuid}
+ conduct_api_call(authd_client, TestRepositoryNotification, 'POST', params, expected_code=expected_code)
diff --git a/endpoints/api/test/test_robot.py b/endpoints/api/test/test_robot.py
new file mode 100644
index 000000000..7c5349549
--- /dev/null
+++ b/endpoints/api/test/test_robot.py
@@ -0,0 +1,104 @@
+import pytest
+import json
+
+from data import model
+from endpoints.api import api
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.robot import UserRobot, OrgRobot, UserRobotList, OrgRobotList
+from endpoints.test.shared import client_with_identity
+from util.names import parse_robot_username
+
+from test.test_ldap import mock_ldap
+
+from test.fixtures import *
+
+@pytest.mark.parametrize('endpoint', [
+ UserRobot,
+ OrgRobot,
+])
+@pytest.mark.parametrize('body', [
+ {},
+ {'description': 'this is a description'},
+ {'unstructured_metadata': {'foo': 'bar'}},
+ {'description': 'this is a description', 'unstructured_metadata': {'foo': 'bar'}},
+])
+def test_create_robot_with_metadata(endpoint, body, client):
+ with client_with_identity('devtable', client) as cl:
+ # Create the robot with the specified body.
+ conduct_api_call(cl, endpoint, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'somebot'},
+ body, expected_code=201)
+
+ # Ensure the create succeeded.
+ resp = conduct_api_call(cl, endpoint, 'GET', {
+ 'orgname': 'buynlarge',
+ 'robot_shortname': 'somebot',
+ })
+
+ body = body or {}
+ assert resp.json['description'] == (body.get('description') or '')
+ assert resp.json['unstructured_metadata'] == (body.get('unstructured_metadata') or {})
+
+
+@pytest.mark.parametrize('endpoint, params', [
+ (UserRobot, {'robot_shortname': 'dtrobot'}),
+ (OrgRobot, {'orgname': 'buynlarge', 'robot_shortname': 'coolrobot'}),
+])
+def test_retrieve_robot(endpoint, params, app, client):
+ with client_with_identity('devtable', client) as cl:
+ result = conduct_api_call(cl, endpoint, 'GET', params, None)
+ assert result.json['token'] is not None
+
+
+@pytest.mark.parametrize('endpoint, params, bot_endpoint', [
+ (UserRobotList, {}, UserRobot),
+ (OrgRobotList, {'orgname': 'buynlarge'}, OrgRobot),
+])
+@pytest.mark.parametrize('include_token', [
+ True,
+ False,
+])
+@pytest.mark.parametrize('limit', [
+ None,
+ 1,
+ 5,
+])
+def test_retrieve_robots(endpoint, params, bot_endpoint, include_token, limit, app, client):
+ params['token'] = 'true' if include_token else 'false'
+
+ if limit is not None:
+ params['limit'] = limit
+
+ with client_with_identity('devtable', client) as cl:
+ result = conduct_api_call(cl, endpoint, 'GET', params, None)
+
+ if limit is not None:
+ assert len(result.json['robots']) <= limit
+
+ for robot in result.json['robots']:
+ assert (robot.get('token') is not None) == include_token
+ if include_token:
+ bot_params = dict(params)
+ bot_params['robot_shortname'] = parse_robot_username(robot['name'])[1]
+ result = conduct_api_call(cl, bot_endpoint, 'GET', bot_params, None)
+ assert robot.get('token') == result.json['token']
+
+
+@pytest.mark.parametrize('username, is_admin', [
+ ('devtable', True),
+ ('reader', False),
+])
+@pytest.mark.parametrize('with_permissions', [
+ True,
+ False,
+])
+def test_retrieve_robots_token_permission(username, is_admin, with_permissions, app, client):
+ with client_with_identity(username, client) as cl:
+ params = {'orgname': 'buynlarge', 'token': 'true'}
+ if with_permissions:
+ params['permissions'] = 'true'
+
+ result = conduct_api_call(cl, OrgRobotList, 'GET', params, None)
+ assert result.json['robots']
+ for robot in result.json['robots']:
+ assert (robot.get('token') is not None) == is_admin
+ assert (robot.get('repositories') is not None) == (is_admin and with_permissions)
diff --git a/endpoints/api/test/test_search.py b/endpoints/api/test/test_search.py
new file mode 100644
index 000000000..5e034934c
--- /dev/null
+++ b/endpoints/api/test/test_search.py
@@ -0,0 +1,41 @@
+import pytest
+
+from playhouse.test_utils import assert_query_count
+
+from data import model, database
+from endpoints.api.search import ConductRepositorySearch, ConductSearch
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+@pytest.mark.parametrize('query', [
+ (''),
+ ('simple'),
+ ('public'),
+ ('repository'),
+])
+def test_repository_search(query, client):
+ # Prime the caches.
+ database.Repository.kind.get_id('image')
+ database.Repository.kind.get_name(1)
+
+ with client_with_identity('devtable', client) as cl:
+ params = {'query': query}
+ with assert_query_count(7):
+ result = conduct_api_call(cl, ConductRepositorySearch, 'GET', params, None, 200).json
+ assert result['start_index'] == 0
+ assert result['page'] == 1
+ assert len(result['results'])
+
+
+@pytest.mark.parametrize('query', [
+ ('simple'),
+ ('public'),
+ ('repository'),
+])
+def test_search_query_count(query, client):
+ with client_with_identity('devtable', client) as cl:
+ params = {'query': query}
+ with assert_query_count(10):
+ result = conduct_api_call(cl, ConductSearch, 'GET', params, None, 200).json
+ assert len(result['results'])
diff --git a/endpoints/api/test/test_secscan.py b/endpoints/api/test/test_secscan.py
new file mode 100644
index 000000000..40afa6ac3
--- /dev/null
+++ b/endpoints/api/test/test_secscan.py
@@ -0,0 +1,30 @@
+import base64
+
+import pytest
+
+from data.registry_model import registry_model
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.secscan import RepositoryImageSecurity, RepositoryManifestSecurity
+
+from test.fixtures import *
+
+@pytest.mark.parametrize('endpoint', [
+ RepositoryImageSecurity,
+ RepositoryManifestSecurity,
+])
+def test_get_security_info_with_pull_secret(endpoint, client):
+ repository_ref = registry_model.lookup_repository('devtable', 'simple')
+ tag = registry_model.get_repo_tag(repository_ref, 'latest', include_legacy_image=True)
+ manifest = registry_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
+
+ params = {
+ 'repository': 'devtable/simple',
+ 'imageid': tag.legacy_image.docker_image_id,
+ 'manifestref': manifest.digest,
+ }
+
+ headers = {
+ 'Authorization': 'Basic %s' % base64.b64encode('devtable:password'),
+ }
+
+ conduct_api_call(client, endpoint, 'GET', params, None, headers=headers, expected_code=200)
diff --git a/endpoints/api/test/test_security.py b/endpoints/api/test/test_security.py
new file mode 100644
index 000000000..a2b18ef4c
--- /dev/null
+++ b/endpoints/api/test/test_security.py
@@ -0,0 +1,1480 @@
+from mock import patch
+
+import pytest
+from flask_principal import AnonymousIdentity
+
+from endpoints.api import api
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity, toggle_feature
+
+from endpoints.api.appspecifictokens import *
+from endpoints.api.billing import *
+from endpoints.api.build import *
+from endpoints.api.discovery import *
+from endpoints.api.globalmessages import *
+from endpoints.api.image import *
+from endpoints.api.logs import *
+from endpoints.api.manifest import *
+from endpoints.api.organization import *
+from endpoints.api.permission import *
+from endpoints.api.prototype import *
+from endpoints.api.repoemail import *
+from endpoints.api.repository import *
+from endpoints.api.repositorynotification import *
+from endpoints.api.repotoken import *
+from endpoints.api.robot import *
+from endpoints.api.search import *
+from endpoints.api.secscan import *
+from endpoints.api.signing import *
+from endpoints.api.subscribe import *
+from endpoints.api.suconfig import *
+from endpoints.api.superuser import *
+from endpoints.api.tag import *
+from endpoints.api.team import *
+from endpoints.api.trigger import *
+from endpoints.api.user import *
+from endpoints.api.mirror import *
+
+from endpoints.api.repository import Repository
+
+from test.fixtures import *
+
+ORG_PARAMS = {'orgname': 'buynlarge'}
+TEAM_PARAMS = {'orgname': 'buynlarge', 'teamname': 'owners'}
+BUILD_PARAMS = {'build_uuid': 'test-1234'}
+REPO_PARAMS = {'repository': 'devtable/someapp'}
+SEARCH_PARAMS = {'query': ''}
+NOTIFICATION_PARAMS = {'namespace': 'devtable', 'repository': 'devtable/simple', 'uuid': 'some uuid'}
+TOKEN_PARAMS = {'token_uuid': 'someuuid'}
+TRIGGER_PARAMS = {'repository': 'devtable/simple', 'trigger_uuid': 'someuuid'}
+MANIFEST_PARAMS = {'repository': 'devtable/simple', 'manifestref': 'sha256:deadbeef'}
+EXPORTLOGS_PARAMS = {'callback_url': 'http://foo'}
+
+SECURITY_TESTS = [
+ (AppTokens, 'GET', {}, {}, None, 401),
+ (AppTokens, 'GET', {}, {}, 'freshuser', 200),
+ (AppTokens, 'GET', {}, {}, 'reader', 200),
+ (AppTokens, 'GET', {}, {}, 'devtable', 200),
+
+ (AppTokens, 'POST', {}, {}, None, 401),
+ (AppTokens, 'POST', {}, {}, 'freshuser', 400),
+ (AppTokens, 'POST', {}, {}, 'reader', 400),
+ (AppTokens, 'POST', {}, {}, 'devtable', 400),
+
+ (AppToken, 'GET', TOKEN_PARAMS, {}, None, 401),
+ (AppToken, 'GET', TOKEN_PARAMS, {}, 'freshuser', 404),
+ (AppToken, 'GET', TOKEN_PARAMS, {}, 'reader', 404),
+ (AppToken, 'GET', TOKEN_PARAMS, {}, 'devtable', 404),
+
+ (AppToken, 'DELETE', TOKEN_PARAMS, {}, None, 401),
+ (AppToken, 'DELETE', TOKEN_PARAMS, {}, 'freshuser', 404),
+ (AppToken, 'DELETE', TOKEN_PARAMS, {}, 'reader', 404),
+ (AppToken, 'DELETE', TOKEN_PARAMS, {}, 'devtable', 404),
+
+ (RepositoryManifest, 'GET', MANIFEST_PARAMS, {}, None, 401),
+ (RepositoryManifest, 'GET', MANIFEST_PARAMS, {}, 'freshuser', 403),
+ (RepositoryManifest, 'GET', MANIFEST_PARAMS, {}, 'reader', 403),
+ (RepositoryManifest, 'GET', MANIFEST_PARAMS, {}, 'devtable', 404),
+
+ (OrganizationCollaboratorList, 'GET', ORG_PARAMS, None, None, 401),
+ (OrganizationCollaboratorList, 'GET', ORG_PARAMS, None, 'freshuser', 403),
+ (OrganizationCollaboratorList, 'GET', ORG_PARAMS, None, 'reader', 403),
+ (OrganizationCollaboratorList, 'GET', ORG_PARAMS, None, 'devtable', 200),
+
+ (OrganizationTeamSyncing, 'POST', TEAM_PARAMS, {}, None, 401),
+ (OrganizationTeamSyncing, 'POST', TEAM_PARAMS, {}, 'freshuser', 403),
+ (OrganizationTeamSyncing, 'POST', TEAM_PARAMS, {}, 'reader', 403),
+ (OrganizationTeamSyncing, 'POST', TEAM_PARAMS, {}, 'devtable', 400),
+
+ (OrganizationTeamSyncing, 'DELETE', TEAM_PARAMS, {}, None, 401),
+ (OrganizationTeamSyncing, 'DELETE', TEAM_PARAMS, {}, 'freshuser', 403),
+ (OrganizationTeamSyncing, 'DELETE', TEAM_PARAMS, {}, 'reader', 403),
+ (OrganizationTeamSyncing, 'DELETE', TEAM_PARAMS, {}, 'devtable', 200),
+
+ (ConductRepositorySearch, 'GET', SEARCH_PARAMS, None, None, 200),
+ (ConductRepositorySearch, 'GET', SEARCH_PARAMS, None, 'freshuser', 200),
+ (ConductRepositorySearch, 'GET', SEARCH_PARAMS, None, 'reader', 200),
+ (ConductRepositorySearch, 'GET', SEARCH_PARAMS, None, 'devtable', 200),
+
+ (SuperUserRepositoryBuildLogs, 'GET', BUILD_PARAMS, None, None, 401),
+ (SuperUserRepositoryBuildLogs, 'GET', BUILD_PARAMS, None, 'freshuser', 403),
+ (SuperUserRepositoryBuildLogs, 'GET', BUILD_PARAMS, None, 'reader', 403),
+ (SuperUserRepositoryBuildLogs, 'GET', BUILD_PARAMS, None, 'devtable', 400),
+
+ (SuperUserRepositoryBuildStatus, 'GET', BUILD_PARAMS, None, None, 401),
+ (SuperUserRepositoryBuildStatus, 'GET', BUILD_PARAMS, None, 'freshuser', 403),
+ (SuperUserRepositoryBuildStatus, 'GET', BUILD_PARAMS, None, 'reader', 403),
+ (SuperUserRepositoryBuildStatus, 'GET', BUILD_PARAMS, None, 'devtable', 400),
+
+ (SuperUserRepositoryBuildResource, 'GET', BUILD_PARAMS, None, None, 401),
+ (SuperUserRepositoryBuildResource, 'GET', BUILD_PARAMS, None, 'freshuser', 403),
+ (SuperUserRepositoryBuildResource, 'GET', BUILD_PARAMS, None, 'reader', 403),
+ (SuperUserRepositoryBuildResource, 'GET', BUILD_PARAMS, None, 'devtable', 404),
+
+ (RepositorySignatures, 'GET', REPO_PARAMS, {}, 'freshuser', 403),
+ (RepositorySignatures, 'GET', REPO_PARAMS, {}, 'reader', 403),
+ (RepositorySignatures, 'GET', REPO_PARAMS, {}, 'devtable', 404),
+
+ (RepositoryNotification, 'POST', NOTIFICATION_PARAMS, {}, None, 401),
+ (RepositoryNotification, 'POST', NOTIFICATION_PARAMS, {}, 'freshuser', 403),
+ (RepositoryNotification, 'POST', NOTIFICATION_PARAMS, {}, 'reader', 403),
+ (RepositoryNotification, 'POST', NOTIFICATION_PARAMS, {}, 'devtable', 400),
+
+ (RepositoryTrust, 'POST', REPO_PARAMS, {'trust_enabled': True}, None, 401),
+ (RepositoryTrust, 'POST', REPO_PARAMS, {'trust_enabled': True}, 'freshuser', 403),
+ (RepositoryTrust, 'POST', REPO_PARAMS, {'trust_enabled': True}, 'reader', 403),
+ (RepositoryTrust, 'POST', REPO_PARAMS, {'trust_enabled': True}, 'devtable', 404),
+
+ (BuildTrigger, 'GET', TRIGGER_PARAMS, {}, None, 401),
+ (BuildTrigger, 'GET', TRIGGER_PARAMS, {}, 'freshuser', 403),
+ (BuildTrigger, 'GET', TRIGGER_PARAMS, {}, 'reader', 403),
+ (BuildTrigger, 'GET', TRIGGER_PARAMS, {}, 'devtable', 404),
+
+ (BuildTrigger, 'DELETE', TRIGGER_PARAMS, {}, None, 401),
+ (BuildTrigger, 'DELETE', TRIGGER_PARAMS, {}, 'freshuser', 403),
+ (BuildTrigger, 'DELETE', TRIGGER_PARAMS, {}, 'reader', 403),
+ (BuildTrigger, 'DELETE', TRIGGER_PARAMS, {}, 'devtable', 404),
+
+ (BuildTrigger, 'PUT', TRIGGER_PARAMS, {}, None, 401),
+ (BuildTrigger, 'PUT', TRIGGER_PARAMS, {}, 'freshuser', 403),
+ (BuildTrigger, 'PUT', TRIGGER_PARAMS, {}, 'reader', 403),
+ (BuildTrigger, 'PUT', TRIGGER_PARAMS, {}, 'devtable', 400),
+
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'A2O9','repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'devtable','repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryUserTransitivePermission, 'GET', {'username': 'devtable','repository': 'devtable/nope'}, None, 'devtable', 404),
+
+ (StarredRepositoryList, 'GET', None, None, None, 401),
+ (StarredRepositoryList, 'GET', None, None, 'devtable', 200),
+ (StarredRepositoryList, 'GET', None, None, 'freshuser', 200),
+ (StarredRepositoryList, 'GET', None, None, 'reader', 200),
+ (StarredRepositoryList, 'POST', None, {u'namespace': 'public', u'repository': 'publicrepo'}, None, 401),
+ (StarredRepositoryList, 'POST', None, {u'namespace': 'public', u'repository': 'publicrepo'}, 'devtable', 201),
+ (StarredRepositoryList, 'POST', None, {u'namespace': 'public', u'repository': 'publicrepo'}, 'freshuser', 201),
+ (StarredRepositoryList, 'POST', None, {u'namespace': 'public', u'repository': 'publicrepo'}, 'reader', 201),
+
+ (StarredRepository, 'DELETE', {'repository': 'public/publicrepo'}, None, None, 401),
+ (StarredRepository, 'DELETE', {'repository': 'public/publicrepo'}, None, 'devtable', 204),
+ (StarredRepository, 'DELETE', {'repository': 'public/publicrepo'}, None, 'freshuser', 204),
+ (StarredRepository, 'DELETE', {'repository': 'public/publicrepo'}, None, 'reader', 204),
+
+ (UserNotification, 'GET', {'uuid': 'someuuid'}, None, None, 401),
+ (UserNotification, 'GET', {'uuid': 'someuuid'}, None, 'devtable', 404),
+ (UserNotification, 'GET', {'uuid': 'someuuid'}, None, 'freshuser', 404),
+ (UserNotification, 'GET', {'uuid': 'someuuid'}, None, 'reader', 404),
+ (UserNotification, 'PUT', {'uuid': 'someuuid'}, {}, None, 401),
+ (UserNotification, 'PUT', {'uuid': 'someuuid'}, {}, 'devtable', 404),
+ (UserNotification, 'PUT', {'uuid': 'someuuid'}, {}, 'freshuser', 404),
+ (UserNotification, 'PUT', {'uuid': 'someuuid'}, {}, 'reader', 404),
+
+ (UserInvoiceList, 'GET', None, None, None, 401),
+ (UserInvoiceList, 'GET', None, None, 'devtable', 200),
+ (UserInvoiceList, 'GET', None, None, 'freshuser', 404),
+ (UserInvoiceList, 'GET', None, None, 'reader', 404),
+
+ (PrivateRepositories, 'GET', None, None, None, 401),
+ (PrivateRepositories, 'GET', None, None, 'devtable', 200),
+ (PrivateRepositories, 'GET', None, None, 'freshuser', 200),
+ (PrivateRepositories, 'GET', None, None, 'reader', 200),
+
+ (ConvertToOrganization, 'POST', None, {u'adminPassword': 'IQTM', u'plan': '1RB4', u'adminUser': '44E8'}, None, 401),
+ (ConvertToOrganization, 'POST', None, {u'adminPassword': 'IQTM', u'plan': '1RB4', u'adminUser': '44E8'}, 'devtable', 400),
+ (ConvertToOrganization, 'POST', None, {u'adminPassword': 'IQTM', u'plan': '1RB4', u'adminUser': '44E8'}, 'freshuser', 400),
+ (ConvertToOrganization, 'POST', None, {u'adminPassword': 'IQTM', u'plan': '1RB4', u'adminUser': '44E8'}, 'reader', 400),
+
+ (UserRobotList, 'GET', None, None, None, 401),
+ (UserRobotList, 'GET', None, None, 'devtable', 200),
+ (UserRobotList, 'GET', None, None, 'freshuser', 200),
+ (UserRobotList, 'GET', None, None, 'reader', 200),
+
+ (UserCard, 'GET', None, None, None, 401),
+ (UserCard, 'GET', None, None, 'devtable', 200),
+ (UserCard, 'GET', None, None, 'freshuser', 200),
+ (UserCard, 'GET', None, None, 'reader', 200),
+ (UserCard, 'POST', None, {u'token': 'ORH4'}, None, 401),
+
+ (UserPlan, 'GET', None, None, None, 401),
+ (UserPlan, 'GET', None, None, 'devtable', 200),
+ (UserPlan, 'GET', None, None, 'freshuser', 200),
+ (UserPlan, 'GET', None, None, 'reader', 200),
+ (UserPlan, 'PUT', None, {u'plan': '1QIK'}, None, 401),
+
+ (UserLogs, 'GET', None, None, None, 401),
+ (UserLogs, 'GET', None, None, 'devtable', 200),
+ (UserLogs, 'GET', None, None, 'freshuser', 200),
+ (UserLogs, 'GET', None, None, 'reader', 200),
+
+ (OrganizationList, 'POST', None, {u'name': 'KSIS', u'email': 'DHVZ'}, None, 401),
+ (OrganizationList, 'POST', None, {u'name': 'KSIS', u'email': 'DHVZ'}, 'devtable', 400),
+ (OrganizationList, 'POST', None, {u'name': 'KSIS', u'email': 'DHVZ'}, 'freshuser', 400),
+ (OrganizationList, 'POST', None, {u'name': 'KSIS', u'email': 'DHVZ'}, 'reader', 400),
+
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, None, 200),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 200),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 200),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 200),
+
+ (RepositoryList, 'GET', None, None, None, 400),
+ (RepositoryList, 'GET', None, None, 'devtable', 400),
+ (RepositoryList, 'GET', None, None, 'freshuser', 400),
+ (RepositoryList, 'GET', None, None, 'reader', 400),
+ (RepositoryList, 'POST', None, {u'repository': 'XZGB', u'visibility': u'public', u'description': '0O8U'}, None, 400),
+ (RepositoryList, 'POST', None, {u'repository': 'XZGB', u'visibility': u'public', u'description': '0O8U'}, 'devtable', 201),
+ (RepositoryList, 'POST', None, {u'repository': 'XZGB', u'visibility': u'public', u'description': '0O8U'}, 'freshuser', 201),
+ (RepositoryList, 'POST', None, {u'repository': 'XZGB', u'visibility': u'public', u'description': '0O8U'}, 'reader', 201),
+
+ (DiscoveryResource, 'GET', None, None, None, 200),
+ (DiscoveryResource, 'GET', None, None, 'devtable', 200),
+ (DiscoveryResource, 'GET', None, None, 'freshuser', 200),
+ (DiscoveryResource, 'GET', None, None, 'reader', 200),
+
+ (FileDropResource, 'POST', None, {u'mimeType': 'TKBX'}, None, 200),
+ (FileDropResource, 'POST', None, {u'mimeType': 'TKBX'}, 'devtable', 200),
+ (FileDropResource, 'POST', None, {u'mimeType': 'TKBX'}, 'freshuser', 200),
+ (FileDropResource, 'POST', None, {u'mimeType': 'TKBX'}, 'reader', 200),
+
+ (Recovery, 'POST', None, {u'email': '826S'}, None, 200),
+ (Recovery, 'POST', None, {u'email': '826S'}, 'devtable', 200),
+ (Recovery, 'POST', None, {u'email': '826S'}, 'freshuser', 200),
+ (Recovery, 'POST', None, {u'email': '826S'}, 'reader', 200),
+
+ (Signout, 'POST', None, None, None, 200),
+ (Signout, 'POST', None, None, 'devtable', 200),
+ (Signout, 'POST', None, None, 'freshuser', 200),
+ (Signout, 'POST', None, None, 'reader', 200),
+
+ (Signin, 'POST', None, {u'username': 'E9RY', u'password': 'LQ0N'}, None, 403),
+ (Signin, 'POST', None, {u'username': 'E9RY', u'password': 'LQ0N'}, 'devtable', 403),
+ (Signin, 'POST', None, {u'username': 'E9RY', u'password': 'LQ0N'}, 'freshuser', 403),
+ (Signin, 'POST', None, {u'username': 'E9RY', u'password': 'LQ0N'}, 'reader', 403),
+
+ (ExternalLoginInformation, 'POST', {'service_id': 'someservice'}, {}, None, 400),
+ (ExternalLoginInformation, 'POST', {'service_id': 'someservice'}, {}, 'devtable', 400),
+ (ExternalLoginInformation, 'POST', {'service_id': 'someservice'}, {}, 'freshuser', 400),
+ (ExternalLoginInformation, 'POST', {'service_id': 'someservice'}, {}, 'reader', 400),
+
+ (DetachExternal, 'POST', {'service_id': 'someservice'}, {}, None, 401),
+ (DetachExternal, 'POST', {'service_id': 'someservice'}, {}, 'devtable', 200),
+ (DetachExternal, 'POST', {'service_id': 'someservice'}, {}, 'freshuser', 200),
+ (DetachExternal, 'POST', {'service_id': 'someservice'}, {}, 'reader', 200),
+
+ (VerifyUser, 'POST', None, {u'password': 'LQ0N'}, None, 401),
+ (VerifyUser, 'POST', None, {u'password': 'password'}, 'devtable', 200),
+ (VerifyUser, 'POST', None, {u'password': 'LQ0N'}, 'freshuser', 403),
+ (VerifyUser, 'POST', None, {u'password': 'LQ0N'}, 'reader', 403),
+
+ (ClientKey, 'POST', None, {u'password': 'LQ0N'}, None, 401),
+ (ClientKey, 'POST', None, {u'password': 'password'}, 'devtable', 200),
+ (ClientKey, 'POST', None, {u'password': 'LQ0N'}, 'freshuser', 400),
+ (ClientKey, 'POST', None, {u'password': 'password'}, 'reader', 200),
+
+ (ListPlans, 'GET', None, None, None, 200),
+ (ListPlans, 'GET', None, None, 'devtable', 200),
+ (ListPlans, 'GET', None, None, 'freshuser', 200),
+ (ListPlans, 'GET', None, None, 'reader', 200),
+
+ (User, 'GET', None, None, None, 401),
+ (User, 'GET', None, None, 'devtable', 200),
+ (User, 'GET', None, None, 'freshuser', 200),
+ (User, 'GET', None, None, 'reader', 200),
+ (User, 'POST', None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'}, None, 400),
+ (User, 'POST', None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'}, 'devtable', 400),
+ (User, 'POST', None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'}, 'freshuser', 400),
+ (User, 'POST', None, {u'username': 'T946', u'password': '0SG4', u'email': 'MENT'}, 'reader', 400),
+ (User, 'PUT', None, {}, None, 401),
+ (User, 'PUT', None, {}, 'devtable', 200),
+ (User, 'PUT', None, {}, 'freshuser', 200),
+ (User, 'PUT', None, {}, 'reader', 200),
+ (User, 'DELETE', None, {}, None, 401),
+ (User, 'DELETE', None, {}, 'devtable', 400),
+ (User, 'DELETE', None, {}, 'freshuser', 204),
+ (User, 'DELETE', None, {}, 'reader', 204),
+
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, None, 401),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, 'devtable', 400),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, 'reader', 403),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, None, 401),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, 'devtable', 200),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'readers'}, None, 'reader', 403),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, None, 401),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, 'devtable', 400),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (TeamMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, 'reader', 403),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, None, 401),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, 'devtable', 400),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (TeamMember, 'PUT', {'orgname': 'buynlarge', 'membername': 'devtable', 'teamname': 'owners'}, None, 'reader', 403),
+
+ (TeamPermissions, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, None, 401),
+ (TeamPermissions, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'devtable', 200),
+ (TeamPermissions, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (TeamPermissions, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'reader', 403),
+
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, None, 401),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'devtable', 200),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'reader', 200),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, None, 401),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, 'devtable', 200),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (TeamMemberList, 'GET', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, 'reader', 403),
+
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'public/publicrepo'}, {u'role': u'read'}, None, 401),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'public/publicrepo'}, {u'role': u'read'}, 'devtable', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'public/publicrepo'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'public/publicrepo'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, 'devtable', 400),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, 'devtable', 400),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'devtable/shared'}, {u'role': u'read'}, None, 401),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'devtable/shared'}, {u'role': u'read'}, 'devtable', 400),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'devtable/shared'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'devtable/shared'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 400),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryUserPermission, 'DELETE', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 400),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryUserPermission, 'GET', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, None, 401),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, 'devtable', 400),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryUserPermission, 'PUT', {'username': 'A2O9', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, 'reader', 403),
+
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, None, 401),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, 'devtable', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, None, 401),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, 'devtable', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'readers'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'readers'}, {u'role': u'read'}, None, 401),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'readers'}, {u'role': u'read'}, 'devtable', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'readers'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'readers'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, None, 401),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, 'devtable', 400),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, None, 401),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, 'devtable', 400),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'readers'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'readers'}, {u'role': u'read'}, None, 401),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'readers'}, {u'role': u'read'}, 'devtable', 400),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'readers'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'readers'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, None, 401),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, 'devtable', 204),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, None, 401),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, 'devtable', 200),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, {u'role': u'read'}, None, 401),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, {u'role': u'read'}, 'devtable', 200),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'readers'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, None, 401),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, 'devtable', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, None, 401),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, 'devtable', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'public/publicrepo', 'teamname': 'owners'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'owners'}, {u'role': u'read'}, None, 401),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'owners'}, {u'role': u'read'}, 'devtable', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'owners'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'public/publicrepo', 'teamname': 'owners'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, None, 401),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, 'devtable', 400),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, None, 401),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, 'devtable', 400),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'devtable/shared', 'teamname': 'owners'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'owners'}, {u'role': u'read'}, None, 401),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'owners'}, {u'role': u'read'}, 'devtable', 400),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'owners'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'devtable/shared', 'teamname': 'owners'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, None, 401),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, 'devtable', 400),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'DELETE', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, None, 401),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, 'devtable', 400),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (RepositoryTeamPermission, 'GET', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, None, 'reader', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, {u'role': u'read'}, None, 401),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, {u'role': u'read'}, 'devtable', 200),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryTeamPermission, 'PUT', {'repository': 'buynlarge/orgrepo', 'teamname': 'owners'}, {u'role': u'read'}, 'reader', 403),
+
+ (BuildTriggerActivate, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, None, 401),
+ (BuildTriggerActivate, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, 'devtable', 403),
+ (BuildTriggerActivate, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, 'freshuser', 403),
+ (BuildTriggerActivate, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, 'reader', 403),
+ (BuildTriggerActivate, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {}, None, 401),
+ (BuildTriggerActivate, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {'config': {}}, 'devtable', 404),
+ (BuildTriggerActivate, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {}, 'freshuser', 403),
+ (BuildTriggerActivate, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {}, 'reader', 403),
+ (BuildTriggerActivate, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {}, None, 401),
+ (BuildTriggerActivate, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {'config': {}}, 'devtable', 404),
+ (BuildTriggerActivate, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {}, 'freshuser', 403),
+ (BuildTriggerActivate, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {}, 'reader', 403),
+
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, None, 401),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, 'devtable', 403),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, 'freshuser', 403),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'public/publicrepo', 'trigger_uuid': 'SWO1'}, {}, 'reader', 403),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {}, None, 401),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {'config': {}}, 'devtable', 404),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {}, 'freshuser', 403),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'devtable/shared', 'trigger_uuid': 'SWO1'}, {}, 'reader', 403),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {}, None, 401),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {'config': {}}, 'devtable', 404),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {}, 'freshuser', 403),
+ (BuildTriggerFieldValues, 'POST', {'field_name': 'test_field', 'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'SWO1'}, {}, 'reader', 403),
+
+ (BuildTriggerSources, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '831C'}, None, None, 401),
+ (BuildTriggerSources, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '831C'}, {'namespace': 'foo'}, 'devtable', 403),
+ (BuildTriggerSources, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '831C'}, None, 'freshuser', 403),
+ (BuildTriggerSources, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '831C'}, None, 'reader', 403),
+ (BuildTriggerSources, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '831C'}, None, None, 401),
+ (BuildTriggerSources, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '831C'}, {'namespace': 'foo'}, 'devtable', 404),
+ (BuildTriggerSources, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '831C'}, None, 'freshuser', 403),
+ (BuildTriggerSources, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '831C'}, None, 'reader', 403),
+ (BuildTriggerSources, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '831C'}, None, None, 401),
+ (BuildTriggerSources, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '831C'}, {'namespace': 'foo'}, 'devtable', 404),
+ (BuildTriggerSources, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '831C'}, None, 'freshuser', 403),
+ (BuildTriggerSources, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '831C'}, None, 'reader', 403),
+
+ (BuildTriggerSubdirs, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '4I2Y'}, {}, None, 401),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '4I2Y'}, {}, 'devtable', 403),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '4I2Y'}, {}, 'freshuser', 403),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '4I2Y'}, {}, 'reader', 403),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '4I2Y'}, {}, None, 401),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '4I2Y'}, {}, 'devtable', 404),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '4I2Y'}, {}, 'freshuser', 403),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '4I2Y'}, {}, 'reader', 403),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '4I2Y'}, {}, None, 401),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '4I2Y'}, {}, 'devtable', 404),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '4I2Y'}, {}, 'freshuser', 403),
+ (BuildTriggerSubdirs, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '4I2Y'}, {}, 'reader', 403),
+
+ (TriggerBuildList, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'ZM1W'}, None, None, 401),
+ (TriggerBuildList, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'ZM1W'}, None, 'devtable', 403),
+ (TriggerBuildList, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'ZM1W'}, None, 'freshuser', 403),
+ (TriggerBuildList, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'ZM1W'}, None, 'reader', 403),
+ (TriggerBuildList, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'ZM1W'}, None, None, 401),
+ (TriggerBuildList, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'ZM1W'}, None, 'devtable', 200),
+ (TriggerBuildList, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'ZM1W'}, None, 'freshuser', 403),
+ (TriggerBuildList, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'ZM1W'}, None, 'reader', 403),
+ (TriggerBuildList, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'ZM1W'}, None, None, 401),
+ (TriggerBuildList, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'ZM1W'}, None, 'devtable', 200),
+ (TriggerBuildList, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'ZM1W'}, None, 'freshuser', 403),
+ (TriggerBuildList, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'ZM1W'}, None, 'reader', 403),
+
+ (ActivateBuildTrigger, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, None, 401),
+ (ActivateBuildTrigger, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, 'devtable', 403),
+ (ActivateBuildTrigger, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, 'freshuser', 403),
+ (ActivateBuildTrigger, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, 'reader', 403),
+ (ActivateBuildTrigger, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, None, None, 401),
+ (ActivateBuildTrigger, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, {}, 'devtable', 404),
+ (ActivateBuildTrigger, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, None, 'freshuser', 403),
+ (ActivateBuildTrigger, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, None, 'reader', 403),
+ (ActivateBuildTrigger, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, None, None, 401),
+ (ActivateBuildTrigger, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, {}, 'devtable', 404),
+ (ActivateBuildTrigger, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, None, 'freshuser', 403),
+ (ActivateBuildTrigger, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, None, 'reader', 403),
+
+ (BuildTriggerAnalyze, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, None, 401),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, {'config': {}}, 'devtable', 403),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, 'freshuser', 403),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'public/publicrepo', 'trigger_uuid': '0BYE'}, None, 'reader', 403),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, None, None, 401),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, {'config': {}}, 'devtable', 404),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, None, 'freshuser', 403),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'devtable/shared', 'trigger_uuid': '0BYE'}, None, 'reader', 403),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, None, None, 401),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, {'config': {}}, 'devtable', 404),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, None, 'freshuser', 403),
+ (BuildTriggerAnalyze, 'POST', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': '0BYE'}, None, 'reader', 403),
+
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, None, 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'devtable', 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'freshuser', 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'reader', 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'devtable', 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'reader', 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 400),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildStatus, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 400),
+
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, None, 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'devtable', 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'freshuser', 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'reader', 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'reader', 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildResource, 'GET', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 404),
+
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildResource, 'DELETE', {'build_uuid': 'FG86', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'devtable/shared'}, None, 'devtable', 400),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 400),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildLogs, 'GET', {'build_uuid': 'S5J8', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, None, 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, 'devtable', 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, 'freshuser', 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, 'reader', 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, None, 401),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, 'reader', 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (ListRepositoryTags, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 200),
+
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, None, 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, 'devtable', 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, 'freshuser', 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'public/publicrepo'}, None, 'reader', 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'devtable/shared'}, None, 'reader', 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryTagImages, 'GET', {'tag': 'TN96', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 404),
+
+ (PermissionPrototype, 'DELETE', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, None, None, 401),
+ (PermissionPrototype, 'DELETE', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, None, 'devtable', 404),
+ (PermissionPrototype, 'DELETE', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, None, 'freshuser', 403),
+ (PermissionPrototype, 'DELETE', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, None, 'reader', 403),
+ (PermissionPrototype, 'PUT', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, {u'role': u'read'}, None, 401),
+ (PermissionPrototype, 'PUT', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, {u'role': u'read'}, 'devtable', 404),
+ (PermissionPrototype, 'PUT', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, {u'role': u'read'}, 'freshuser', 403),
+ (PermissionPrototype, 'PUT', {'orgname': 'buynlarge', 'prototypeid': 'L24B'}, {u'role': u'read'}, 'reader', 403),
+
+ (OrganizationMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, None, 401),
+ (OrganizationMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, 'devtable', 404),
+ (OrganizationMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, 'freshuser', 403),
+ (OrganizationMember, 'DELETE', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, 'reader', 403),
+ (OrganizationMember, 'GET', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, None, 401),
+ (OrganizationMember, 'GET', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, 'devtable', 404),
+ (OrganizationMember, 'GET', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, 'freshuser', 403),
+ (OrganizationMember, 'GET', {'orgname': 'buynlarge', 'membername': 'someuser'}, None, 'reader', 403),
+
+ (OrgRobot, 'DELETE', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, None, 401),
+ (OrgRobot, 'DELETE', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, 'devtable', 400),
+ (OrgRobot, 'DELETE', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, 'freshuser', 403),
+ (OrgRobot, 'DELETE', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, 'reader', 403),
+ (OrgRobot, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, None, 401),
+ (OrgRobot, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, 'devtable', 400),
+ (OrgRobot, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, 'freshuser', 403),
+ (OrgRobot, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, None, 'reader', 403),
+ (OrgRobot, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, {}, None, 401),
+ (OrgRobot, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, {}, 'devtable', 400),
+ (OrgRobot, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, {}, 'freshuser', 403),
+ (OrgRobot, 'PUT', {'orgname': 'buynlarge', 'robot_shortname': 'Z7PD'}, {}, 'reader', 403),
+
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, None, 401),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'devtable', 204),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'freshuser', 403),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'readers'}, None, 'reader', 403),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'readers'}, {u'role': u'member'}, None, 401),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'readers'}, {u'role': u'member'}, 'devtable', 200),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'readers'}, {u'role': u'member'}, 'freshuser', 403),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'readers'}, {u'role': u'member'}, 'reader', 403),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, None, 401),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, 'devtable', 400),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, 'freshuser', 403),
+ (OrganizationTeam, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners'}, None, 'reader', 403),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners'}, {u'role': u'member'}, None, 401),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners'}, {u'role': u'member'}, 'devtable', 400),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners'}, {u'role': u'member'}, 'freshuser', 403),
+ (OrganizationTeam, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners'}, {u'role': u'member'}, 'reader', 403),
+
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryTeamPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (RepositoryUserPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryUserPermissionList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (BuildTrigger, 'DELETE', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, None, 401),
+ (BuildTrigger, 'DELETE', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, 'devtable', 403),
+ (BuildTrigger, 'DELETE', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, 'freshuser', 403),
+ (BuildTrigger, 'DELETE', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, 'reader', 403),
+ (BuildTrigger, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, None, 401),
+ (BuildTrigger, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, 'devtable', 403),
+ (BuildTrigger, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, 'freshuser', 403),
+ (BuildTrigger, 'GET', {'repository': 'public/publicrepo', 'trigger_uuid': 'D6TI'}, None, 'reader', 403),
+ (BuildTrigger, 'DELETE', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, None, 401),
+ (BuildTrigger, 'DELETE', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, 'devtable', 404),
+ (BuildTrigger, 'DELETE', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, 'freshuser', 403),
+ (BuildTrigger, 'DELETE', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, 'reader', 403),
+ (BuildTrigger, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, None, 401),
+ (BuildTrigger, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, 'devtable', 404),
+ (BuildTrigger, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, 'freshuser', 403),
+ (BuildTrigger, 'GET', {'repository': 'devtable/shared', 'trigger_uuid': 'D6TI'}, None, 'reader', 403),
+ (BuildTrigger, 'DELETE', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, None, 401),
+ (BuildTrigger, 'DELETE', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, 'devtable', 404),
+ (BuildTrigger, 'DELETE', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, 'freshuser', 403),
+ (BuildTrigger, 'DELETE', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, 'reader', 403),
+ (BuildTrigger, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, None, 401),
+ (BuildTrigger, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, 'devtable', 404),
+ (BuildTrigger, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, 'freshuser', 403),
+ (BuildTrigger, 'GET', {'repository': 'buynlarge/orgrepo', 'trigger_uuid': 'D6TI'}, None, 'reader', 403),
+
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, 'devtable', 400),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 400),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryNotification, 'DELETE', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryNotification, 'GET', {'uuid': 'QFAT', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'public/publicrepo'}, {u'role': u'read'}, None, 401),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'public/publicrepo'}, {u'role': u'read'}, 'devtable', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'public/publicrepo'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'public/publicrepo'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, 'devtable', 410),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, 'devtable', 410),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'devtable/shared'}, {u'role': u'read'}, None, 401),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'devtable/shared'}, {u'role': u'read'}, 'devtable', 410),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'devtable/shared'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'devtable/shared'}, {u'role': u'read'}, 'reader', 403),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 410),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryToken, 'DELETE', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 410),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryToken, 'GET', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, None, 401),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, 'devtable', 410),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, 'freshuser', 403),
+ (RepositoryToken, 'PUT', {'code': 'UJQB', 'repository': 'buynlarge/orgrepo'}, {u'role': u'read'}, 'reader', 403),
+
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'public/publicrepo'}, None, None, 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'public/publicrepo'}, None, 'devtable', 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'public/publicrepo'}, None, 'freshuser', 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'public/publicrepo'}, None, 'reader', 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'devtable/shared'}, None, 'reader', 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryImage, 'GET', {'image_id': '5AVQ', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 404),
+
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, None, 401),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, 'devtable', 403),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, 'freshuser', 403),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, 'reader', 403),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, None, 401),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, 'devtable', 404),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, 'freshuser', 403),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, 'reader', 403),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, None, 401),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, 'devtable', 404),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, 'freshuser', 403),
+ (RestoreTag, 'POST', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, 'reader', 403),
+
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, None, 401),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, 'devtable', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, 'freshuser', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'public/publicrepo'}, {u'image': 'WXNG'}, 'reader', 403),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'devtable/shared'}, None, 'devtable', 400),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, None, 401),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, 'devtable', 404),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, 'freshuser', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'devtable/shared'}, {u'image': 'WXNG'}, 'reader', 403),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 400),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryTag, 'DELETE', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, None, 401),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, 'devtable', 404),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, 'freshuser', 403),
+ (RepositoryTag, 'PUT', {'tag': 'HP8R', 'repository': 'buynlarge/orgrepo'}, {u'image': 'WXNG'}, 'reader', 403),
+
+ (PermissionPrototypeList, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (PermissionPrototypeList, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (PermissionPrototypeList, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (PermissionPrototypeList, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+ (PermissionPrototypeList, 'POST', {'orgname': 'buynlarge'}, {u'role': u'read', u'delegate': {u'kind': u'user', u'name': '7DGP'}}, None, 401),
+ (PermissionPrototypeList, 'POST', {'orgname': 'buynlarge'}, {u'role': u'read', u'delegate': {u'kind': u'user', u'name': '7DGP'}}, 'devtable', 400),
+ (PermissionPrototypeList, 'POST', {'orgname': 'buynlarge'}, {u'role': u'read', u'delegate': {u'kind': u'user', u'name': '7DGP'}}, 'freshuser', 403),
+ (PermissionPrototypeList, 'POST', {'orgname': 'buynlarge'}, {u'role': u'read', u'delegate': {u'kind': u'user', u'name': '7DGP'}}, 'reader', 403),
+
+ (OrganizationInvoiceList, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrganizationInvoiceList, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrganizationInvoiceList, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrganizationInvoiceList, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+
+ (OrgPrivateRepositories, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrgPrivateRepositories, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrgPrivateRepositories, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrgPrivateRepositories, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+
+ (OrganizationMemberList, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrganizationMemberList, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrganizationMemberList, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrganizationMemberList, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+
+ (OrgRobotList, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrgRobotList, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrgRobotList, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrgRobotList, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 200),
+
+ (OrganizationCard, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrganizationCard, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrganizationCard, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrganizationCard, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+ (OrganizationCard, 'POST', {'orgname': 'buynlarge'}, {u'token': '4VFR'}, None, 401),
+ (OrganizationCard, 'POST', {'orgname': 'buynlarge'}, {u'token': '4VFR'}, 'freshuser', 403),
+ (OrganizationCard, 'POST', {'orgname': 'buynlarge'}, {u'token': '4VFR'}, 'reader', 403),
+
+ (OrganizationPlan, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrganizationPlan, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrganizationPlan, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrganizationPlan, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+ (OrganizationPlan, 'PUT', {'orgname': 'buynlarge'}, {u'plan': 'WWEI'}, None, 401),
+ (OrganizationPlan, 'PUT', {'orgname': 'buynlarge'}, {u'plan': 'WWEI'}, 'freshuser', 403),
+ (OrganizationPlan, 'PUT', {'orgname': 'buynlarge'}, {u'plan': 'WWEI'}, 'reader', 403),
+
+ (OrgLogs, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrgLogs, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrgLogs, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrgLogs, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+
+ (RepositoryVisibility, 'POST', {'repository': 'public/publicrepo'}, {u'visibility': u'public'}, None, 401),
+ (RepositoryVisibility, 'POST', {'repository': 'public/publicrepo'}, {u'visibility': u'public'}, 'devtable', 403),
+ (RepositoryVisibility, 'POST', {'repository': 'public/publicrepo'}, {u'visibility': u'public'}, 'freshuser', 403),
+ (RepositoryVisibility, 'POST', {'repository': 'public/publicrepo'}, {u'visibility': u'public'}, 'reader', 403),
+ (RepositoryVisibility, 'POST', {'repository': 'devtable/shared'}, {u'visibility': u'public'}, None, 401),
+ (RepositoryVisibility, 'POST', {'repository': 'devtable/shared'}, {u'visibility': u'public'}, 'devtable', 200),
+ (RepositoryVisibility, 'POST', {'repository': 'devtable/shared'}, {u'visibility': u'public'}, 'freshuser', 403),
+ (RepositoryVisibility, 'POST', {'repository': 'devtable/shared'}, {u'visibility': u'public'}, 'reader', 403),
+ (RepositoryVisibility, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'visibility': u'public'}, None, 401),
+ (RepositoryVisibility, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'visibility': u'public'}, 'devtable', 200),
+ (RepositoryVisibility, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'visibility': u'public'}, 'freshuser', 403),
+ (RepositoryVisibility, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'visibility': u'public'}, 'reader', 403),
+
+ (BuildTriggerList, 'GET', {'repository': 'public/publicrepo'}, None, None, 401),
+ (BuildTriggerList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (BuildTriggerList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (BuildTriggerList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (BuildTriggerList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (BuildTriggerList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (BuildTriggerList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (BuildTriggerList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (BuildTriggerList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (BuildTriggerList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (BuildTriggerList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (BuildTriggerList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (RepositoryNotificationList, 'GET', {'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryNotificationList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryNotificationList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryNotificationList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'public/publicrepo'}, {}, None, 401),
+ (RepositoryNotificationList, 'POST', {'repository': 'public/publicrepo'}, {}, 'devtable', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'public/publicrepo'}, {}, 'freshuser', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'public/publicrepo'}, {}, 'reader', 403),
+ (RepositoryNotificationList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryNotificationList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryNotificationList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryNotificationList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'devtable/shared'}, {}, None, 401),
+ (RepositoryNotificationList, 'POST', {'repository': 'devtable/shared'}, {'config': {'email': 'a@b.com'}, 'event': 'repo_push', 'method': 'email'}, 'devtable', 400),
+ (RepositoryNotificationList, 'POST', {'repository': 'devtable/shared'}, {}, 'freshuser', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'devtable/shared'}, {}, 'reader', 403),
+ (RepositoryNotificationList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryNotificationList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (RepositoryNotificationList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryNotificationList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'buynlarge/orgrepo'}, {}, None, 401),
+ (RepositoryNotificationList, 'POST', {'repository': 'buynlarge/orgrepo'}, {'config': {'email': 'a@b.com'}, 'event': 'repo_push', 'method': 'email'}, 'devtable', 400),
+ (RepositoryNotificationList, 'POST', {'repository': 'buynlarge/orgrepo'}, {}, 'freshuser', 403),
+ (RepositoryNotificationList, 'POST', {'repository': 'buynlarge/orgrepo'}, {}, 'reader', 403),
+
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, {}, None, 401),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, {}, 'devtable', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, {}, 'freshuser', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'public/publicrepo'}, {}, 'reader', 403),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, None, 'devtable', 404),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, {}, None, 401),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, {}, 'devtable', 200),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, {}, 'freshuser', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'devtable/shared'}, {}, 'reader', 403),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, None, 'devtable', 404),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryAuthorizedEmail, 'GET', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, {}, None, 401),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, {}, 'devtable', 200),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, {}, 'freshuser', 403),
+ (RepositoryAuthorizedEmail, 'POST', {'email': 'jschorr@devtable.com', 'repository': 'buynlarge/orgrepo'}, {}, 'reader', 403),
+
+ (RepositoryTokenList, 'GET', {'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryTokenList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryTokenList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryTokenList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'public/publicrepo'}, {u'friendlyName': 'R1CN'}, None, 401),
+ (RepositoryTokenList, 'POST', {'repository': 'public/publicrepo'}, {u'friendlyName': 'R1CN'}, 'devtable', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'public/publicrepo'}, {u'friendlyName': 'R1CN'}, 'freshuser', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'public/publicrepo'}, {u'friendlyName': 'R1CN'}, 'reader', 403),
+ (RepositoryTokenList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryTokenList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 410),
+ (RepositoryTokenList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryTokenList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'devtable/shared'}, {u'friendlyName': 'R1CN'}, None, 401),
+ (RepositoryTokenList, 'POST', {'repository': 'devtable/shared'}, {u'friendlyName': 'R1CN'}, 'devtable', 410),
+ (RepositoryTokenList, 'POST', {'repository': 'devtable/shared'}, {u'friendlyName': 'R1CN'}, 'freshuser', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'devtable/shared'}, {u'friendlyName': 'R1CN'}, 'reader', 403),
+ (RepositoryTokenList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryTokenList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 410),
+ (RepositoryTokenList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryTokenList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'friendlyName': 'R1CN'}, None, 401),
+ (RepositoryTokenList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'friendlyName': 'R1CN'}, 'devtable', 410),
+ (RepositoryTokenList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'friendlyName': 'R1CN'}, 'freshuser', 403),
+ (RepositoryTokenList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'friendlyName': 'R1CN'}, 'reader', 403),
+
+ (RepositoryBuildList, 'GET', {'repository': 'public/publicrepo'}, None, None, 200),
+ (RepositoryBuildList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 200),
+ (RepositoryBuildList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 200),
+ (RepositoryBuildList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 200),
+ (RepositoryBuildList, 'POST', {'repository': 'public/publicrepo'}, {u'file_id': 'UX7K'}, None, 401),
+ (RepositoryBuildList, 'POST', {'repository': 'public/publicrepo'}, {u'file_id': 'UX7K'}, 'devtable', 403),
+ (RepositoryBuildList, 'POST', {'repository': 'public/publicrepo'}, {u'file_id': 'UX7K'}, 'freshuser', 403),
+ (RepositoryBuildList, 'POST', {'repository': 'public/publicrepo'}, {u'file_id': 'UX7K'}, 'reader', 403),
+ (RepositoryBuildList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryBuildList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryBuildList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryBuildList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 200),
+ (RepositoryBuildList, 'POST', {'repository': 'devtable/shared'}, {u'file_id': 'UX7K'}, None, 401),
+ (RepositoryBuildList, 'POST', {'repository': 'devtable/shared'}, {u'file_id': 'UX7K'}, 'devtable', 201),
+ (RepositoryBuildList, 'POST', {'repository': 'devtable/shared'}, {u'file_id': 'UX7K'}, 'freshuser', 403),
+ (RepositoryBuildList, 'POST', {'repository': 'devtable/shared'}, {u'file_id': 'UX7K'}, 'reader', 403),
+ (RepositoryBuildList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryBuildList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (RepositoryBuildList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryBuildList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 200),
+ (RepositoryBuildList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'file_id': 'UX7K'}, None, 401),
+ (RepositoryBuildList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'file_id': 'UX7K'}, 'devtable', 201),
+ (RepositoryBuildList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'file_id': 'UX7K'}, 'freshuser', 403),
+ (RepositoryBuildList, 'POST', {'repository': 'buynlarge/orgrepo'}, {u'file_id': 'UX7K'}, 'reader', 403),
+
+ (RepositoryImageList, 'GET', {'repository': 'public/publicrepo'}, None, None, 200),
+ (RepositoryImageList, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 200),
+ (RepositoryImageList, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 200),
+ (RepositoryImageList, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 200),
+ (RepositoryImageList, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryImageList, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryImageList, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryImageList, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 200),
+ (RepositoryImageList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryImageList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (RepositoryImageList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryImageList, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 200),
+
+ (RepositoryLogs, 'GET', {'repository': 'public/publicrepo'}, None, None, 401),
+ (RepositoryLogs, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (RepositoryLogs, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (RepositoryLogs, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (RepositoryLogs, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (RepositoryLogs, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (RepositoryLogs, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (RepositoryLogs, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (RepositoryLogs, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (RepositoryLogs, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (RepositoryLogs, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (RepositoryLogs, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+
+ (UserRobot, 'DELETE', {'robot_shortname': 'robotname'}, None, None, 401),
+ (UserRobot, 'DELETE', {'robot_shortname': 'robotname'}, None, 'devtable', 400),
+ (UserRobot, 'DELETE', {'robot_shortname': 'robotname'}, None, 'freshuser', 400),
+ (UserRobot, 'DELETE', {'robot_shortname': 'robotname'}, None, 'reader', 400),
+ (UserRobot, 'GET', {'robot_shortname': 'robotname'}, None, None, 401),
+ (UserRobot, 'GET', {'robot_shortname': 'robotname'}, None, 'devtable', 400),
+ (UserRobot, 'GET', {'robot_shortname': 'robotname'}, None, 'freshuser', 400),
+ (UserRobot, 'GET', {'robot_shortname': 'robotname'}, None, 'reader', 400),
+ (UserRobot, 'PUT', {'robot_shortname': 'robotname'}, {}, None, 401),
+ (UserRobot, 'PUT', {'robot_shortname': 'robotname'}, {}, 'devtable', 201),
+ (UserRobot, 'PUT', {'robot_shortname': 'robotname'}, {}, 'freshuser', 201),
+ (UserRobot, 'PUT', {'robot_shortname': 'robotname'}, {}, 'reader', 201),
+
+ (RegenerateUserRobot, 'POST', {'robot_shortname': 'robotname'}, None, None, 401),
+ (RegenerateUserRobot, 'POST', {'robot_shortname': 'robotname'}, None, 'devtable', 400),
+ (RegenerateUserRobot, 'POST', {'robot_shortname': 'robotname'}, None, 'freshuser', 400),
+ (RegenerateUserRobot, 'POST', {'robot_shortname': 'robotname'}, None, 'reader', 400),
+
+ (RegenerateOrgRobot, 'POST', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, None, 401),
+ (RegenerateOrgRobot, 'POST', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, 'devtable', 400),
+ (RegenerateOrgRobot, 'POST', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, 'freshuser', 403),
+ (RegenerateOrgRobot, 'POST', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, 'reader', 403),
+
+ (UserRobotPermissions, 'GET', {'robot_shortname': 'robotname'}, None, None, 401),
+ (UserRobotPermissions, 'GET', {'robot_shortname': 'robotname'}, None, 'devtable', 400),
+ (UserRobotPermissions, 'GET', {'robot_shortname': 'robotname'}, None, 'freshuser', 400),
+ (UserRobotPermissions, 'GET', {'robot_shortname': 'robotname'}, None, 'reader', 400),
+
+ (OrgRobotPermissions, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, None, 401),
+ (OrgRobotPermissions, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, 'devtable', 400),
+ (OrgRobotPermissions, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, 'freshuser', 403),
+ (OrgRobotPermissions, 'GET', {'orgname': 'buynlarge', 'robot_shortname': 'robotname'}, None, 'reader', 403),
+
+ (Organization, 'DELETE', {'orgname': 'buynlarge'}, {}, None, 401),
+ (Organization, 'DELETE', {'orgname': 'buynlarge'}, {}, 'devtable', 204),
+ (Organization, 'DELETE', {'orgname': 'buynlarge'}, {}, 'freshuser', 403),
+ (Organization, 'DELETE', {'orgname': 'buynlarge'}, {}, 'reader', 403),
+ (Organization, 'GET', {'orgname': 'buynlarge'}, None, None, 200),
+ (Organization, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (Organization, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 200),
+ (Organization, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 200),
+ (Organization, 'PUT', {'orgname': 'buynlarge'}, {}, None, 401),
+ (Organization, 'PUT', {'orgname': 'buynlarge'}, {}, 'devtable', 200),
+ (Organization, 'PUT', {'orgname': 'buynlarge'}, {}, 'freshuser', 403),
+ (Organization, 'PUT', {'orgname': 'buynlarge'}, {}, 'reader', 403),
+
+ (Repository, 'DELETE', {'repository': 'public/publicrepo'}, None, None, 401),
+ (Repository, 'DELETE', {'repository': 'public/publicrepo'}, None, 'devtable', 403),
+ (Repository, 'DELETE', {'repository': 'public/publicrepo'}, None, 'freshuser', 403),
+ (Repository, 'DELETE', {'repository': 'public/publicrepo'}, None, 'reader', 403),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, None, 200),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, 'devtable', 200),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, 'freshuser', 200),
+ (Repository, 'GET', {'repository': 'public/publicrepo'}, None, 'reader', 200),
+ (Repository, 'PUT', {'repository': 'public/publicrepo'}, {u'description': 'WXNG'}, None, 401),
+ (Repository, 'PUT', {'repository': 'public/publicrepo'}, {u'description': 'WXNG'}, 'devtable', 403),
+ (Repository, 'PUT', {'repository': 'public/publicrepo'}, {u'description': 'WXNG'}, 'freshuser', 403),
+ (Repository, 'PUT', {'repository': 'public/publicrepo'}, {u'description': 'WXNG'}, 'reader', 403),
+ (Repository, 'DELETE', {'repository': 'devtable/shared'}, None, None, 401),
+ (Repository, 'DELETE', {'repository': 'devtable/shared'}, None, 'devtable', 204),
+ (Repository, 'DELETE', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (Repository, 'DELETE', {'repository': 'devtable/shared'}, None, 'reader', 403),
+ (Repository, 'GET', {'repository': 'devtable/shared'}, None, None, 401),
+ (Repository, 'GET', {'repository': 'devtable/shared'}, None, 'devtable', 200),
+ (Repository, 'GET', {'repository': 'devtable/shared'}, None, 'freshuser', 403),
+ (Repository, 'GET', {'repository': 'devtable/shared'}, None, 'reader', 200),
+ (Repository, 'PUT', {'repository': 'devtable/shared'}, {u'description': 'WXNG'}, None, 401),
+ (Repository, 'PUT', {'repository': 'devtable/shared'}, {u'description': 'WXNG'}, 'devtable', 200),
+ (Repository, 'PUT', {'repository': 'devtable/shared'}, {u'description': 'WXNG'}, 'freshuser', 403),
+ (Repository, 'PUT', {'repository': 'devtable/shared'}, {u'description': 'WXNG'}, 'reader', 403),
+ (Repository, 'DELETE', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (Repository, 'DELETE', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 204),
+ (Repository, 'DELETE', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (Repository, 'DELETE', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 403),
+ (Repository, 'GET', {'repository': 'buynlarge/orgrepo'}, None, None, 401),
+ (Repository, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'devtable', 200),
+ (Repository, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'freshuser', 403),
+ (Repository, 'GET', {'repository': 'buynlarge/orgrepo'}, None, 'reader', 200),
+ (Repository, 'PUT', {'repository': 'buynlarge/orgrepo'}, {u'description': 'WXNG'}, None, 401),
+ (Repository, 'PUT', {'repository': 'buynlarge/orgrepo'}, {u'description': 'WXNG'}, 'devtable', 200),
+ (Repository, 'PUT', {'repository': 'buynlarge/orgrepo'}, {u'description': 'WXNG'}, 'freshuser', 403),
+ (Repository, 'PUT', {'repository': 'buynlarge/orgrepo'}, {u'description': 'WXNG'}, 'reader', 403),
+
+ (EntitySearch, 'GET', {'prefix': 'R9NZ'}, None, None, 200),
+ (EntitySearch, 'GET', {'prefix': 'R9NZ'}, None, 'devtable', 200),
+ (EntitySearch, 'GET', {'prefix': 'R9NZ'}, None, 'freshuser', 200),
+ (EntitySearch, 'GET', {'prefix': 'R9NZ'}, None, 'reader', 200),
+
+ (ApplicationInformation, 'GET', {'client_id': '3LGI'}, None, None, 404),
+ (ApplicationInformation, 'GET', {'client_id': '3LGI'}, None, 'devtable', 404),
+ (ApplicationInformation, 'GET', {'client_id': '3LGI'}, None, 'freshuser', 404),
+ (ApplicationInformation, 'GET', {'client_id': '3LGI'}, None, 'reader', 404),
+
+ (OrganizationApplications, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrganizationApplications, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrganizationApplications, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrganizationApplications, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+ (OrganizationApplications, 'POST', {'orgname': 'buynlarge'}, {u'name': 'foo'}, None, 401),
+ (OrganizationApplications, 'POST', {'orgname': 'buynlarge'}, {u'name': 'foo'}, 'devtable', 200),
+ (OrganizationApplications, 'POST', {'orgname': 'buynlarge'}, {u'name': 'foo'}, 'freshuser', 403),
+ (OrganizationApplications, 'POST', {'orgname': 'buynlarge'}, {u'name': 'foo'}, 'reader', 403),
+
+ (OrganizationApplicationResource, 'DELETE', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, None, 401),
+ (OrganizationApplicationResource, 'DELETE', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'devtable', 204),
+ (OrganizationApplicationResource, 'DELETE', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'freshuser', 403),
+ (OrganizationApplicationResource, 'DELETE', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'reader', 403),
+ (OrganizationApplicationResource, 'GET', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, None, 401),
+ (OrganizationApplicationResource, 'GET', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'devtable', 200),
+ (OrganizationApplicationResource, 'GET', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'freshuser', 403),
+ (OrganizationApplicationResource, 'GET', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'reader', 403),
+ (OrganizationApplicationResource, 'PUT', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, {u'redirect_uri': 'foo', u'name': 'foo', u'application_uri': 'foo'}, None, 401),
+ (OrganizationApplicationResource, 'PUT', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, {u'redirect_uri': 'foo', u'name': 'foo', u'application_uri': 'foo'}, 'devtable', 200),
+ (OrganizationApplicationResource, 'PUT', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, {u'redirect_uri': 'foo', u'name': 'foo', u'application_uri': 'foo'}, 'freshuser', 403),
+ (OrganizationApplicationResource, 'PUT', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, {u'redirect_uri': 'foo', u'name': 'foo', u'application_uri': 'foo'}, 'reader', 403),
+
+ (OrganizationApplicationResetClientSecret, 'POST', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, None, 401),
+ (OrganizationApplicationResetClientSecret, 'POST', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'devtable', 200),
+ (OrganizationApplicationResetClientSecret, 'POST', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'freshuser', 403),
+ (OrganizationApplicationResetClientSecret, 'POST', {'orgname': 'buynlarge', 'client_id': 'deadbeef'}, None, 'reader', 403),
+
+ (Users, 'GET', {'username': 'devtable'}, None, None, 200),
+
+ (UserNotificationList, 'GET', None, None, None, 401),
+ (UserNotificationList, 'GET', None, None, 'devtable', 200),
+ (UserNotificationList, 'GET', None, None, 'freshuser', 200),
+ (UserNotificationList, 'GET', None, None, 'reader', 200),
+
+ (UserAuthorizationList, 'GET', None, None, None, 401),
+ (UserAuthorizationList, 'GET', None, None, 'devtable', 200),
+ (UserAuthorizationList, 'GET', None, None, 'freshuser', 200),
+ (UserAuthorizationList, 'GET', None, None, 'reader', 200),
+
+ (UserAuthorization, 'DELETE', {'access_token_uuid': 'fake'}, None, None, 401),
+ (UserAuthorization, 'DELETE', {'access_token_uuid': 'fake'}, None, 'devtable', 404),
+ (UserAuthorization, 'DELETE', {'access_token_uuid': 'fake'}, None, 'freshuser', 404),
+ (UserAuthorization, 'DELETE', {'access_token_uuid': 'fake'}, None, 'reader', 404),
+ (UserAuthorization, 'GET', {'access_token_uuid': 'fake'}, None, None, 401),
+ (UserAuthorization, 'GET', {'access_token_uuid': 'fake'}, None, 'devtable', 404),
+ (UserAuthorization, 'GET', {'access_token_uuid': 'fake'}, None, 'freshuser', 404),
+ (UserAuthorization, 'GET', {'access_token_uuid': 'fake'}, None, 'reader', 404),
+
+ (UserAggregateLogs, 'GET', None, None, None, 401),
+ (UserAggregateLogs, 'GET', None, None, 'devtable', 200),
+ (UserAggregateLogs, 'GET', None, None, 'freshuser', 200),
+ (UserAggregateLogs, 'GET', None, None, 'reader', 200),
+
+ (OrgAggregateLogs, 'GET', {'orgname': 'buynlarge'}, None, None, 401),
+ (OrgAggregateLogs, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrgAggregateLogs, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrgAggregateLogs, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+
+ (RepositoryAggregateLogs, 'GET', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepositoryAggregateLogs, 'GET', {'repository': 'devtable/simple'}, None, 'devtable', 200),
+ (RepositoryAggregateLogs, 'GET', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepositoryAggregateLogs, 'GET', {'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (ExportUserLogs, 'POST', None, EXPORTLOGS_PARAMS, None, 401),
+ (ExportUserLogs, 'POST', None, EXPORTLOGS_PARAMS, 'devtable', 200),
+ (ExportUserLogs, 'POST', None, EXPORTLOGS_PARAMS, 'freshuser', 200),
+ (ExportUserLogs, 'POST', None, EXPORTLOGS_PARAMS, 'reader', 200),
+
+ (ExportOrgLogs, 'POST', {'orgname': 'buynlarge'}, EXPORTLOGS_PARAMS, None, 401),
+ (ExportOrgLogs, 'POST', {'orgname': 'buynlarge'}, EXPORTLOGS_PARAMS, 'devtable', 200),
+ (ExportOrgLogs, 'POST', {'orgname': 'buynlarge'}, EXPORTLOGS_PARAMS, 'freshuser', 403),
+ (ExportOrgLogs, 'POST', {'orgname': 'buynlarge'}, EXPORTLOGS_PARAMS, 'reader', 403),
+
+ (ExportRepositoryLogs, 'POST', {'repository': 'devtable/simple'}, EXPORTLOGS_PARAMS, None, 401),
+ (ExportRepositoryLogs, 'POST', {'repository': 'devtable/simple'}, EXPORTLOGS_PARAMS, 'devtable', 200),
+ (ExportRepositoryLogs, 'POST', {'repository': 'devtable/simple'}, EXPORTLOGS_PARAMS, 'freshuser', 403),
+ (ExportRepositoryLogs, 'POST', {'repository': 'devtable/simple'}, EXPORTLOGS_PARAMS, 'reader', 403),
+
+ (SuperUserAggregateLogs, 'GET', None, None, None, 401),
+ (SuperUserAggregateLogs, 'GET', None, None, 'devtable', 200),
+ (SuperUserAggregateLogs, 'GET', None, None, 'freshuser', 403),
+ (SuperUserAggregateLogs, 'GET', None, None, 'reader', 403),
+
+ (SuperUserLogs, 'GET', None, None, None, 401),
+ (SuperUserLogs, 'GET', None, None, 'devtable', 200),
+ (SuperUserLogs, 'GET', None, None, 'freshuser', 403),
+ (SuperUserLogs, 'GET', None, None, 'reader', 403),
+
+ (SuperUserSendRecoveryEmail, 'POST', {'username': 'someuser'}, None, None, 401),
+ (SuperUserSendRecoveryEmail, 'POST', {'username': 'someuser'}, None, 'devtable', 404),
+ (SuperUserSendRecoveryEmail, 'POST', {'username': 'someuser'}, None, 'freshuser', 403),
+ (SuperUserSendRecoveryEmail, 'POST', {'username': 'someuser'}, None, 'reader', 403),
+
+ (SuperUserTakeOwnership, 'POST', {'namespace': 'invalidnamespace'}, {}, None, 401),
+ (SuperUserTakeOwnership, 'POST', {'namespace': 'invalidnamespace'}, {}, 'devtable', 404),
+ (SuperUserTakeOwnership, 'POST', {'namespace': 'invalidnamespace'}, {}, 'freshuser', 403),
+ (SuperUserTakeOwnership, 'POST', {'namespace': 'invalidnamespace'}, {}, 'reader', 403),
+
+ (SuperUserServiceKeyApproval, 'POST', {'kid': 1234}, {}, None, 401),
+ (SuperUserServiceKeyApproval, 'POST', {'kid': 1234}, {}, 'devtable', 404),
+ (SuperUserServiceKeyApproval, 'POST', {'kid': 1234}, {}, 'freshuser', 403),
+ (SuperUserServiceKeyApproval, 'POST', {'kid': 1234}, {}, 'reader', 403),
+
+ (SuperUserServiceKeyManagement, 'GET', None, None, None, 401),
+ (SuperUserServiceKeyManagement, 'GET', None, None, 'devtable', 200),
+ (SuperUserServiceKeyManagement, 'GET', None, None, 'freshuser', 403),
+ (SuperUserServiceKeyManagement, 'GET', None, None, 'reader', 403),
+ (SuperUserServiceKeyManagement, 'POST', None, {'expiration': None, 'service': 'someservice'}, None, 401),
+ (SuperUserServiceKeyManagement, 'POST', None, {'expiration': None, 'service': 'someservice'}, 'devtable', 200),
+ (SuperUserServiceKeyManagement, 'POST', None, {'expiration': None, 'service': 'someservice'}, 'freshuser', 403),
+ (SuperUserServiceKeyManagement, 'POST', None, {'expiration': None, 'service': 'someservice'}, 'reader', 403),
+
+ (SuperUserServiceKey, 'DELETE', {'kid': 1234}, None, None, 401),
+ (SuperUserServiceKey, 'DELETE', {'kid': 1234}, None, 'devtable', 404),
+ (SuperUserServiceKey, 'DELETE', {'kid': 1234}, None, 'freshuser', 403),
+ (SuperUserServiceKey, 'DELETE', {'kid': 1234}, None, 'reader', 403),
+ (SuperUserServiceKey, 'GET', {'kid': 1234}, None, None, 401),
+ (SuperUserServiceKey, 'GET', {'kid': 1234}, None, 'devtable', 404),
+ (SuperUserServiceKey, 'GET', {'kid': 1234}, None, 'freshuser', 403),
+ (SuperUserServiceKey, 'GET', {'kid': 1234}, None, 'reader', 403),
+ (SuperUserServiceKey, 'PUT', {'kid': 1234}, {}, None, 401),
+ (SuperUserServiceKey, 'PUT', {'kid': 1234}, {}, 'devtable', 404),
+ (SuperUserServiceKey, 'PUT', {'kid': 1234}, {}, 'freshuser', 403),
+ (SuperUserServiceKey, 'PUT', {'kid': 1234}, {}, 'reader', 403),
+
+ (TeamMemberInvite, 'DELETE', {'code': 'foobarbaz'}, None, None, 401),
+ (TeamMemberInvite, 'DELETE', {'code': 'foobarbaz'}, None, 'devtable', 400),
+ (TeamMemberInvite, 'DELETE', {'code': 'foobarbaz'}, None, 'freshuser', 400),
+ (TeamMemberInvite, 'DELETE', {'code': 'foobarbaz'}, None, 'reader', 400),
+ (TeamMemberInvite, 'PUT', {'code': 'foobarbaz'}, None, None, 401),
+ (TeamMemberInvite, 'PUT', {'code': 'foobarbaz'}, None, 'devtable', 400),
+ (TeamMemberInvite, 'PUT', {'code': 'foobarbaz'}, None, 'freshuser', 400),
+ (TeamMemberInvite, 'PUT', {'code': 'foobarbaz'}, None, 'reader', 400),
+
+ (ConductSearch, 'GET', None, None, None, 200),
+ (ConductSearch, 'GET', None, None, 'devtable', 200),
+
+ (ChangeLog, 'GET', None, None, None, 401),
+ (ChangeLog, 'GET', None, None, 'devtable', 200),
+ (ChangeLog, 'GET', None, None, 'freshuser', 403),
+ (ChangeLog, 'GET', None, None, 'reader', 403),
+
+ (SuperUserOrganizationList, 'GET', None, None, None, 401),
+ (SuperUserOrganizationList, 'GET', None, None, 'devtable', 200),
+ (SuperUserOrganizationList, 'GET', None, None, 'freshuser', 403),
+ (SuperUserOrganizationList, 'GET', None, None, 'reader', 403),
+
+ (SuperUserOrganizationManagement, 'DELETE', {'name': 'buynlarge'}, None, None, 401),
+ (SuperUserOrganizationManagement, 'DELETE', {'name': 'buynlarge'}, None, 'devtable', 204),
+ (SuperUserOrganizationManagement, 'DELETE', {'name': 'buynlarge'}, None, 'freshuser', 403),
+ (SuperUserOrganizationManagement, 'DELETE', {'name': 'buynlarge'}, None, 'reader', 403),
+ (SuperUserOrganizationManagement, 'PUT', {'name': 'buynlarge'}, {}, None, 401),
+ (SuperUserOrganizationManagement, 'PUT', {'name': 'buynlarge'}, {}, 'devtable', 200),
+ (SuperUserOrganizationManagement, 'PUT', {'name': 'buynlarge'}, {}, 'freshuser', 403),
+ (SuperUserOrganizationManagement, 'PUT', {'name': 'buynlarge'}, {}, 'reader', 403),
+
+ (SuperUserList, 'GET', None, None, None, 401),
+ (SuperUserList, 'GET', None, None, 'devtable', 200),
+ (SuperUserList, 'GET', None, None, 'freshuser', 403),
+ (SuperUserList, 'GET', None, None, 'reader', 403),
+
+ (SuperUserList, 'POST', None, {'username': 'foo'}, None, 401),
+ (SuperUserList, 'POST', None, {'username': 'foo'}, 'devtable', 400),
+ (SuperUserList, 'POST', None, {'username': 'foo'}, 'freshuser', 403),
+ (SuperUserList, 'POST', None, {'username': 'foo'}, 'reader', 403),
+
+ (SuperUserManagement, 'DELETE', {'username': 'freshuser'}, None, None, 401),
+ (SuperUserManagement, 'DELETE', {'username': 'freshuser'}, None, 'devtable', 204),
+ (SuperUserManagement, 'DELETE', {'username': 'freshuser'}, None, 'freshuser', 403),
+ (SuperUserManagement, 'DELETE', {'username': 'freshuser'}, None, 'reader', 403),
+ (SuperUserManagement, 'GET', {'username': 'freshuser'}, None, None, 401),
+ (SuperUserManagement, 'GET', {'username': 'freshuser'}, None, 'devtable', 200),
+ (SuperUserManagement, 'GET', {'username': 'freshuser'}, None, 'freshuser', 403),
+ (SuperUserManagement, 'GET', {'username': 'freshuser'}, None, 'reader', 403),
+ (SuperUserManagement, 'PUT', {'username': 'freshuser'}, {}, None, 401),
+ (SuperUserManagement, 'PUT', {'username': 'freshuser'}, {}, 'devtable', 200),
+ (SuperUserManagement, 'PUT', {'username': 'freshuser'}, {}, 'freshuser', 403),
+ (SuperUserManagement, 'PUT', {'username': 'freshuser'}, {}, 'reader', 403),
+
+ (GlobalUserMessages, 'GET', None, None, None, 200),
+
+ (GlobalUserMessages, 'POST', None, None, None, 401),
+ (GlobalUserMessages, 'POST', None, {'message': {'content': 'msg', 'media_type': 'text/plain', 'severity': 'info'}}, 'devtable', 201),
+ (GlobalUserMessages, 'POST', None, {'message': {'content': 'msg', 'media_type': 'text/plain', 'severity': 'info'}}, 'freshuser', 403),
+ (GlobalUserMessages, 'POST', None, {'message': {'content': 'msg', 'media_type': 'text/plain', 'severity': 'info'}}, 'reader', 403),
+
+ (GlobalUserMessage, 'DELETE', {'uuid': '1234'}, None, None, 401),
+ (GlobalUserMessage, 'DELETE', {'uuid': '1234'}, None, 'devtable', 204),
+ (GlobalUserMessage, 'DELETE', {'uuid': '1234'}, None, 'freshuser', 403),
+ (GlobalUserMessage, 'DELETE', {'uuid': '1234'}, None, 'reader', 403),
+
+ (UserInvoiceFieldList, 'GET', None, None, None, 401),
+ (UserInvoiceFieldList, 'GET', None, None, 'devtable', 200),
+ (UserInvoiceFieldList, 'GET', None, None, 'freshuser', 404),
+ (UserInvoiceFieldList, 'GET', None, None, 'reader', 404),
+ (UserInvoiceFieldList, 'POST', None, None, None, 401),
+ (UserInvoiceFieldList, 'POST', None, {'value': 'bar', 'title': 'foo'}, 'devtable', 200),
+ (UserInvoiceFieldList, 'POST', None, {'value': 'bar', 'title': 'foo'}, 'freshuser', 404),
+ (UserInvoiceFieldList, 'POST', None, {'value': 'bar', 'title': 'foo'}, 'reader', 404),
+
+ (UserInvoiceField, 'DELETE', {'field_uuid': '1234'}, None, None, 401),
+ (UserInvoiceField, 'DELETE', {'field_uuid': '1234'}, None, 'devtable', 201),
+ (UserInvoiceField, 'DELETE', {'field_uuid': '1234'}, None, 'freshuser', 404),
+ (UserInvoiceField, 'DELETE', {'field_uuid': '1234'}, None, 'reader', 404),
+
+ (OrganizationInvoiceFieldList, 'GET', {'orgname': 'buynlarge'}, None, None, 403),
+ (OrganizationInvoiceFieldList, 'GET', {'orgname': 'buynlarge'}, None, 'devtable', 200),
+ (OrganizationInvoiceFieldList, 'GET', {'orgname': 'buynlarge'}, None, 'freshuser', 403),
+ (OrganizationInvoiceFieldList, 'GET', {'orgname': 'buynlarge'}, None, 'reader', 403),
+ (OrganizationInvoiceFieldList, 'POST', {'orgname': 'buynlarge'}, {'value': 'bar', 'title': 'foo'}, None, 403),
+ (OrganizationInvoiceFieldList, 'POST', {'orgname': 'buynlarge'}, {'value': 'bar', 'title': 'foo'}, 'devtable', 200),
+ (OrganizationInvoiceFieldList, 'POST', {'orgname': 'buynlarge'}, {'value': 'bar', 'title': 'foo'}, 'freshuser', 403),
+ (OrganizationInvoiceFieldList, 'POST', {'orgname': 'buynlarge'}, {'value': 'bar', 'title': 'foo'}, 'reader', 403),
+
+ (OrganizationInvoiceField, 'DELETE', {'orgname': 'buynlarge', 'field_uuid': '1234'}, None, None, 403),
+ (OrganizationInvoiceField, 'DELETE', {'orgname': 'buynlarge', 'field_uuid': '1234'}, None, 'devtable', 201),
+ (OrganizationInvoiceField, 'DELETE', {'orgname': 'buynlarge', 'field_uuid': '1234'}, None, 'freshuser', 403),
+ (OrganizationInvoiceField, 'DELETE', {'orgname': 'buynlarge', 'field_uuid': '1234'}, None, 'reader', 403),
+
+ (RepositoryImageSecurity, 'GET', {'repository': 'devtable/simple', 'imageid': 'fake'}, None, None, 401),
+ (RepositoryImageSecurity, 'GET', {'repository': 'devtable/simple', 'imageid': 'fake'}, None, 'devtable', 404),
+ (RepositoryImageSecurity, 'GET', {'repository': 'devtable/simple', 'imageid': 'fake'}, None, 'freshuser', 403),
+ (RepositoryImageSecurity, 'GET', {'repository': 'devtable/simple', 'imageid': 'fake'}, None, 'reader', 403),
+
+ (RepositoryManifestSecurity, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, None, 401),
+ (RepositoryManifestSecurity, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (RepositoryManifestSecurity, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepositoryManifestSecurity, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (RepositoryManifestLabels, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, None, 401),
+ (RepositoryManifestLabels, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (RepositoryManifestLabels, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepositoryManifestLabels, 'GET', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'reader', 403),
+ (RepositoryManifestLabels, 'POST', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, {'media_type': 'text/plain', 'value': 'bar', 'key': 'foo'}, None, 401),
+ (RepositoryManifestLabels, 'POST', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, {'media_type': 'text/plain', 'value': 'bar', 'key': 'foo'}, 'devtable', 404),
+ (RepositoryManifestLabels, 'POST', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, {'media_type': 'text/plain', 'value': 'bar', 'key': 'foo'}, 'freshuser', 403),
+ (RepositoryManifestLabels, 'POST', {'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, {'media_type': 'text/plain', 'value': 'bar', 'key': 'foo'}, 'reader', 403),
+
+ (ManageRepositoryManifestLabel, 'GET', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, None, 401),
+ (ManageRepositoryManifestLabel, 'GET', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (ManageRepositoryManifestLabel, 'GET', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (ManageRepositoryManifestLabel, 'GET', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (ManageRepositoryManifestLabel, 'DELETE', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, None, 401),
+ (ManageRepositoryManifestLabel, 'DELETE', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (ManageRepositoryManifestLabel, 'DELETE', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (ManageRepositoryManifestLabel, 'DELETE', {'labelid': 'someid', 'manifestref': 'sha256:abcd', 'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (InviteTeamMember, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, None, 401),
+ (InviteTeamMember, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, 'devtable', 200),
+ (InviteTeamMember, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, 'freshuser', 403),
+ (InviteTeamMember, 'PUT', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, 'reader', 403),
+
+ (InviteTeamMember, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, None, 401),
+ (InviteTeamMember, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, 'devtable', 404),
+ (InviteTeamMember, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, 'freshuser', 403),
+ (InviteTeamMember, 'DELETE', {'orgname': 'buynlarge', 'teamname': 'owners', 'email': 'a@example.com'}, None, 'reader', 403),
+
+ (TestRepositoryNotification, 'POST', {'repository': 'buynlarge/orgrepo', 'uuid': 'foo'}, None, None, 401),
+ (TestRepositoryNotification, 'POST', {'repository': 'buynlarge/orgrepo', 'uuid': 'foo'}, None, 'devtable', 400),
+ (TestRepositoryNotification, 'POST', {'repository': 'buynlarge/orgrepo', 'uuid': 'foo'}, None, 'freshuser', 403),
+ (TestRepositoryNotification, 'POST', {'repository': 'buynlarge/orgrepo', 'uuid': 'foo'}, None, 'reader', 403),
+
+ (LinkExternalEntity, 'POST', {'username': 'foo'}, None, None, 404),
+
+ (BuildTriggerSourceNamespaces, 'GET', {'repository': 'devtable/simple', 'trigger_uuid': 'foo'}, None, None, 401),
+ (BuildTriggerSourceNamespaces, 'GET', {'repository': 'devtable/simple', 'trigger_uuid': 'foo'}, None, 'devtable', 404),
+ (BuildTriggerSourceNamespaces, 'GET', {'repository': 'devtable/simple', 'trigger_uuid': 'foo'}, None, 'freshuser', 403),
+ (BuildTriggerSourceNamespaces, 'GET', {'repository': 'devtable/simple', 'trigger_uuid': 'foo'}, None, 'reader', 403),
+
+ (RepoMirrorResource, 'GET', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepoMirrorResource, 'GET', {'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (RepoMirrorResource, 'GET', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepoMirrorResource, 'GET', {'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (RepoMirrorResource, 'POST', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepoMirrorResource, 'POST', {'repository': 'devtable/simple'}, None, 'devtable', 400),
+ (RepoMirrorResource, 'POST', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepoMirrorResource, 'POST', {'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (RepoMirrorResource, 'PUT', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepoMirrorResource, 'PUT', {'repository': 'devtable/simple'}, None, 'devtable', 400),
+ (RepoMirrorResource, 'PUT', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepoMirrorResource, 'PUT', {'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (RepoMirrorSyncNowResource, 'POST', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepoMirrorSyncNowResource, 'POST', {'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (RepoMirrorSyncNowResource, 'POST', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepoMirrorSyncNowResource, 'POST', {'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (RepoMirrorSyncCancelResource, 'POST', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepoMirrorSyncCancelResource, 'POST', {'repository': 'devtable/simple'}, None, 'devtable', 404),
+ (RepoMirrorSyncCancelResource, 'POST', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepoMirrorSyncCancelResource, 'POST', {'repository': 'devtable/simple'}, None, 'reader', 403),
+
+ (RepositoryStateResource, 'PUT', {'repository': 'devtable/simple'}, None, None, 401),
+ (RepositoryStateResource, 'PUT', {'repository': 'devtable/simple'}, None, 'devtable', 400),
+ (RepositoryStateResource, 'PUT', {'repository': 'devtable/simple'}, None, 'freshuser', 403),
+ (RepositoryStateResource, 'PUT', {'repository': 'devtable/simple'}, None, 'reader', 403),
+]
+
+@pytest.mark.parametrize('resource,method,params,body,identity,expected', SECURITY_TESTS)
+def test_api_security(resource, method, params, body, identity, expected, client):
+ with client_with_identity(identity, client) as cl:
+ conduct_api_call(cl, resource, method, params, body, expected)
+
+
+ALLOWED_MISSING_MODULES = {'endpoints.api.suconfig', 'endpoints.api.error', 'data.userfiles'}
+
+def test_all_apis_tested(app):
+ required_tests = set()
+
+ for rule in app.url_map.iter_rules():
+ endpoint_method = app.view_functions[rule.endpoint]
+
+ # Verify that we have a view class for this API method.
+ if not 'view_class' in dir(endpoint_method):
+ continue
+
+ view_class = endpoint_method.view_class
+ if view_class.__module__ in ALLOWED_MISSING_MODULES:
+ continue
+
+ method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
+ full_name = '%s.%s' % (view_class.__module__, view_class.__name__)
+ for method_name in method_names:
+ required_tests.add('%s::%s' % (full_name, method_name.upper()))
+
+ assert required_tests
+
+ for test in SECURITY_TESTS:
+ view_class = test[0]
+ required_tests.discard('%s.%s::%s' % (view_class.__module__, view_class.__name__,
+ test[1].upper()))
+
+ assert not required_tests, "API security tests missing for: %s" % required_tests
+
+
+@pytest.mark.parametrize('is_superuser', [
+ (True),
+ (False),
+])
+@pytest.mark.parametrize('allow_nonsuperuser', [
+ (True),
+ (False),
+])
+@pytest.mark.parametrize('method, expected', [
+ ('POST', 400),
+ ('DELETE', 200),
+])
+def test_team_sync_security(is_superuser, allow_nonsuperuser, method, expected, client):
+ def is_superuser_method(_):
+ return is_superuser
+
+ with patch('auth.permissions.superusers.is_superuser', is_superuser_method):
+ with toggle_feature('NONSUPERUSER_TEAM_SYNCING_SETUP', allow_nonsuperuser):
+ with client_with_identity('devtable', client) as cl:
+ expect_success = is_superuser or allow_nonsuperuser
+ expected_status = expected if expect_success else 403
+ conduct_api_call(cl, OrganizationTeamSyncing, method, TEAM_PARAMS, {}, expected_status)
diff --git a/endpoints/api/test/test_signing.py b/endpoints/api/test/test_signing.py
new file mode 100644
index 000000000..e941cee56
--- /dev/null
+++ b/endpoints/api/test/test_signing.py
@@ -0,0 +1,55 @@
+import pytest
+
+from collections import Counter
+from mock import patch
+
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.signing import RepositorySignatures
+from endpoints.test.shared import client_with_identity
+
+from test.fixtures import *
+
+VALID_TARGETS_MAP = {
+ "targets/ci": {
+ "targets": {
+ "latest": {
+ "hashes": {
+ "sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
+ },
+ "length": 2111
+ }
+ },
+ "expiration": "2020-05-22T10:26:46.618176424-04:00"
+ },
+ "targets": {
+ "targets": {
+ "latest": {
+ "hashes": {
+ "sha256": "2Q8GLEgX62VBWeL76axFuDj/Z1dd6Zhx0ZDM6kNwPkQ="
+ },
+ "length": 2111
+ }
+ },
+ "expiration": "2020-05-22T10:26:01.953414888-04:00"}
+ }
+
+
+def tags_equal(expected, actual):
+ expected_tags = expected.get('delegations')
+ actual_tags = actual.get('delegations')
+ if expected_tags and actual_tags:
+ return Counter(expected_tags) == Counter(actual_tags)
+ return expected == actual
+
+@pytest.mark.parametrize('targets_map,expected', [
+ (VALID_TARGETS_MAP, {'delegations': VALID_TARGETS_MAP}),
+ ({'bad': 'tags'}, {'delegations': {'bad': 'tags'}}),
+ ({}, {'delegations': {}}),
+ (None, {'delegations': None}), # API returns None on exceptions
+])
+def test_get_signatures(targets_map, expected, client):
+ with patch('endpoints.api.signing.tuf_metadata_api') as mock_tuf:
+ mock_tuf.get_all_tags_with_expiration.return_value = targets_map
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': 'devtable/trusted'}
+ assert tags_equal(expected, conduct_api_call(cl, RepositorySignatures, 'GET', params, None, 200).json)
diff --git a/endpoints/api/test/test_subscribe_models_pre_oci.py b/endpoints/api/test/test_subscribe_models_pre_oci.py
new file mode 100644
index 000000000..8810e36f5
--- /dev/null
+++ b/endpoints/api/test/test_subscribe_models_pre_oci.py
@@ -0,0 +1,43 @@
+import pytest
+from mock import patch
+
+from endpoints.api.subscribe_models_pre_oci import data_model
+
+
+@pytest.mark.parametrize('username,repo_count', [
+ ('devtable', 3)
+])
+def test_get_private_repo_count(username, repo_count):
+ with patch('endpoints.api.subscribe_models_pre_oci.get_private_repo_count') as mock_get_private_reop_count:
+ mock_get_private_reop_count.return_value = repo_count
+ count = data_model.get_private_repo_count(username)
+
+ mock_get_private_reop_count.assert_called_once_with(username)
+ assert count == repo_count
+
+
+@pytest.mark.parametrize('kind_name,target_username,metadata', [
+ ('over_private_usage', 'devtable', {'namespace': 'devtable'})
+])
+def test_create_unique_notification(kind_name, target_username, metadata):
+ with patch('endpoints.api.subscribe_models_pre_oci.get_user_or_org') as mock_get_user_or_org:
+ mock_get_user_or_org.return_value = {'username': target_username}
+ with patch('endpoints.api.subscribe_models_pre_oci.create_unique_notification') as mock_create_unique_notification:
+ data_model.create_unique_notification(kind_name, target_username, metadata)
+
+ mock_get_user_or_org.assert_called_once_with(target_username)
+ mock_create_unique_notification.assert_called_once_with(kind_name, mock_get_user_or_org.return_value, metadata)
+
+
+@pytest.mark.parametrize('target_username,kind_name', [
+ ('devtable', 'over_private_usage')
+])
+def test_delete_notifications_by_kind(target_username, kind_name):
+ with patch('endpoints.api.subscribe_models_pre_oci.get_user_or_org') as mock_get_user_or_org:
+ mock_get_user_or_org.return_value = {'username': target_username}
+ with patch('endpoints.api.subscribe_models_pre_oci.delete_notifications_by_kind') as mock_delete_notifications_by_kind:
+ data_model.delete_notifications_by_kind(target_username, kind_name)
+
+ mock_get_user_or_org.assert_called_once_with(target_username)
+ mock_delete_notifications_by_kind.assert_called_once_with(mock_get_user_or_org.return_value, kind_name)
+
diff --git a/endpoints/api/test/test_superuser.py b/endpoints/api/test/test_superuser.py
new file mode 100644
index 000000000..46e4bacf3
--- /dev/null
+++ b/endpoints/api/test/test_superuser.py
@@ -0,0 +1,28 @@
+import pytest
+
+from endpoints.api.superuser import SuperUserList, SuperUserManagement
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+@pytest.mark.parametrize('disabled', [
+ (True),
+ (False),
+])
+def test_list_all_users(disabled, client):
+ with client_with_identity('devtable', client) as cl:
+ params = {'disabled': disabled}
+ result = conduct_api_call(cl, SuperUserList, 'GET', params, None, 200).json
+ assert len(result['users'])
+ for user in result['users']:
+ if not disabled:
+ assert user['enabled']
+
+
+def test_change_install_user(client):
+ with client_with_identity('devtable', client) as cl:
+ params = {'username': 'randomuser'}
+ body = {'email': 'new_email123@test.com'}
+ result = conduct_api_call(cl, SuperUserManagement, 'PUT', params, body, 200).json
+
+ assert result['email'] == body['email']
diff --git a/endpoints/api/test/test_tag.py b/endpoints/api/test/test_tag.py
new file mode 100644
index 000000000..54f6df599
--- /dev/null
+++ b/endpoints/api/test/test_tag.py
@@ -0,0 +1,116 @@
+import pytest
+
+from playhouse.test_utils import assert_query_count
+
+from data.registry_model import registry_model
+from data.database import Manifest
+
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from endpoints.api.tag import RepositoryTag, RestoreTag, ListRepositoryTags, RepositoryTagImages
+
+from test.fixtures import *
+
+@pytest.mark.parametrize('expiration_time, expected_status', [
+ (None, 201),
+ ('aksdjhasd', 400),
+])
+def test_change_tag_expiration_default(expiration_time, expected_status, client, app):
+ with client_with_identity('devtable', client) as cl:
+ params = {
+ 'repository': 'devtable/simple',
+ 'tag': 'latest',
+ }
+
+ request_body = {
+ 'expiration': expiration_time,
+ }
+
+ conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
+
+
+def test_change_tag_expiration(client, app):
+ with client_with_identity('devtable', client) as cl:
+ params = {
+ 'repository': 'devtable/simple',
+ 'tag': 'latest',
+ }
+
+ tag = model.tag.get_active_tag('devtable', 'simple', 'latest')
+ updated_expiration = tag.lifetime_start_ts + 60*60*24
+
+ request_body = {
+ 'expiration': updated_expiration,
+ }
+
+ conduct_api_call(cl, RepositoryTag, 'put', params, request_body, 201)
+ tag = model.tag.get_active_tag('devtable', 'simple', 'latest')
+ assert tag.lifetime_end_ts == updated_expiration
+
+
+@pytest.mark.parametrize('image_exists,test_tag,expected_status', [
+ (True, '-INVALID-TAG-NAME', 400),
+ (True, '.INVALID-TAG-NAME', 400),
+ (True,
+ 'INVALID-TAG_NAME-BECAUSE-THIS-IS-WAY-WAY-TOO-LOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOONG',
+ 400),
+ (False, 'newtag', 404),
+ (True, 'generatemanifestfail', None),
+ (True, 'latest', 201),
+ (True, 'newtag', 201),
+])
+def test_move_tag(image_exists, test_tag, expected_status, client, app):
+ with client_with_identity('devtable', client) as cl:
+ test_image = 'unknown'
+ if image_exists:
+ repo_ref = registry_model.lookup_repository('devtable', 'simple')
+ tag_ref = registry_model.get_repo_tag(repo_ref, 'latest', include_legacy_image=True)
+ assert tag_ref
+
+ test_image = tag_ref.legacy_image.docker_image_id
+
+ params = {'repository': 'devtable/simple', 'tag': test_tag}
+ request_body = {'image': test_image}
+ if expected_status is None:
+ with pytest.raises(Exception):
+ conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
+ else:
+ conduct_api_call(cl, RepositoryTag, 'put', params, request_body, expected_status)
+
+
+@pytest.mark.parametrize('repo_namespace, repo_name, query_count', [
+ ('devtable', 'simple', 5),
+ ('devtable', 'history', 5),
+ ('devtable', 'complex', 5),
+ ('devtable', 'gargantuan', 5),
+ ('buynlarge', 'orgrepo', 7), # +2 for permissions checks.
+ ('buynlarge', 'anotherorgrepo', 7), # +2 for permissions checks.
+])
+def test_list_repo_tags(repo_namespace, repo_name, client, query_count, app):
+ # Pre-cache media type loads to ensure consistent query count.
+ Manifest.media_type.get_name(1)
+
+ params = {'repository': repo_namespace + '/' + repo_name}
+ with client_with_identity('devtable', client) as cl:
+ with assert_query_count(query_count):
+ tags = conduct_api_call(cl, ListRepositoryTags, 'get', params).json['tags']
+
+ repo_ref = registry_model.lookup_repository(repo_namespace, repo_name)
+ history, _ = registry_model.list_repository_tag_history(repo_ref)
+ assert len(tags) == len(history)
+
+
+@pytest.mark.parametrize('repository, tag, owned, expect_images', [
+ ('devtable/simple', 'prod', False, True),
+ ('devtable/simple', 'prod', True, False),
+ ('devtable/simple', 'latest', False, True),
+ ('devtable/simple', 'latest', True, False),
+
+ ('devtable/complex', 'prod', False, True),
+ ('devtable/complex', 'prod', True, True),
+])
+def test_list_tag_images(repository, tag, owned, expect_images, client, app):
+ with client_with_identity('devtable', client) as cl:
+ params = {'repository': repository, 'tag': tag, 'owned': owned}
+ result = conduct_api_call(cl, RepositoryTagImages, 'get', params, None, 200).json
+ assert bool(result['images']) == expect_images
diff --git a/endpoints/api/test/test_team.py b/endpoints/api/test/test_team.py
new file mode 100644
index 000000000..9a17a36e4
--- /dev/null
+++ b/endpoints/api/test/test_team.py
@@ -0,0 +1,90 @@
+import json
+
+from mock import patch
+
+from data import model
+from endpoints.api import api
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList
+from endpoints.api.organization import Organization
+from endpoints.test.shared import client_with_identity
+
+from test.test_ldap import mock_ldap
+
+from test.fixtures import *
+
+SYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'synced'}
+UNSYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'owners'}
+
+def test_team_syncing(client):
+ with mock_ldap() as ldap:
+ with patch('endpoints.api.team.authentication', ldap):
+ with client_with_identity('devtable', client) as cl:
+ config = {
+ 'group_dn': 'cn=AwesomeFolk',
+ }
+
+ conduct_api_call(cl, OrganizationTeamSyncing, 'POST', UNSYNCED_TEAM_PARAMS, config)
+
+ # Ensure the team is now synced.
+ sync_info = model.team.get_team_sync_information(UNSYNCED_TEAM_PARAMS['orgname'],
+ UNSYNCED_TEAM_PARAMS['teamname'])
+ assert sync_info is not None
+ assert json.loads(sync_info.config) == config
+
+ # Remove the syncing.
+ conduct_api_call(cl, OrganizationTeamSyncing, 'DELETE', UNSYNCED_TEAM_PARAMS, None)
+
+ # Ensure the team is no longer synced.
+ sync_info = model.team.get_team_sync_information(UNSYNCED_TEAM_PARAMS['orgname'],
+ UNSYNCED_TEAM_PARAMS['teamname'])
+ assert sync_info is None
+
+
+def test_team_member_sync_info(client):
+ with mock_ldap() as ldap:
+ with patch('endpoints.api.team.authentication', ldap):
+ # Check for an unsynced team, with superuser.
+ with client_with_identity('devtable', client) as cl:
+ resp = conduct_api_call(cl, TeamMemberList, 'GET', UNSYNCED_TEAM_PARAMS)
+ assert 'can_sync' in resp.json
+ assert resp.json['can_sync']['service'] == 'ldap'
+
+ assert 'synced' not in resp.json
+
+ # Check for an unsynced team, with non-superuser.
+ with client_with_identity('randomuser', client) as cl:
+ resp = conduct_api_call(cl, TeamMemberList, 'GET', UNSYNCED_TEAM_PARAMS)
+ assert 'can_sync' not in resp.json
+ assert 'synced' not in resp.json
+
+ # Check for a synced team, with superuser.
+ with client_with_identity('devtable', client) as cl:
+ resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS)
+ assert 'can_sync' in resp.json
+ assert resp.json['can_sync']['service'] == 'ldap'
+
+ assert 'synced' in resp.json
+ assert 'last_updated' in resp.json['synced']
+ assert 'group_dn' in resp.json['synced']['config']
+
+ # Check for a synced team, with non-superuser.
+ with client_with_identity('randomuser', client) as cl:
+ resp = conduct_api_call(cl, TeamMemberList, 'GET', SYNCED_TEAM_PARAMS)
+ assert 'can_sync' not in resp.json
+
+ assert 'synced' in resp.json
+ assert 'last_updated' not in resp.json['synced']
+ assert 'config' not in resp.json['synced']
+
+
+def test_organization_teams_sync_bool(client):
+ with mock_ldap() as ldap:
+ with patch('endpoints.api.organization.authentication', ldap):
+ # Ensure synced teams are marked as such in the organization teams list.
+ with client_with_identity('devtable', client) as cl:
+ resp = conduct_api_call(cl, Organization, 'GET', {'orgname': 'sellnsmall'})
+
+ assert not resp.json['teams']['owners']['is_synced']
+
+ assert resp.json['teams']['synced']['is_synced']
diff --git a/endpoints/api/test/test_trigger.py b/endpoints/api/test/test_trigger.py
new file mode 100644
index 000000000..946b34431
--- /dev/null
+++ b/endpoints/api/test/test_trigger.py
@@ -0,0 +1,55 @@
+import pytest
+import json
+
+from data import model
+from endpoints.api.trigger_analyzer import is_parent
+from endpoints.api.trigger import BuildTrigger
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+
+@pytest.mark.parametrize('context,dockerfile_path,expected', [
+ ("/", "/a/b", True),
+ ("/a", "/a/b", True),
+ ("/a/b", "/a/b", False),
+ ("/a//", "/a/b", True),
+ ("/a", "/a//b/c", True),
+ ("/a//", "a/b", True),
+ ("/a/b", "a/bc/d", False),
+ ("/d", "/a/b", False),
+ ("/a/b", "/a/b.c", False),
+ ("/a/b", "/a/b/b.c", True),
+ ("", "/a/b.c", False),
+ ("/a/b", "", False),
+ ("", "", False),
+])
+def test_super_user_build_endpoints(context, dockerfile_path, expected):
+ assert is_parent(context, dockerfile_path) == expected
+
+
+def test_enabled_disabled_trigger(app, client):
+ trigger = model.build.list_build_triggers('devtable', 'building')[0]
+ trigger.config = json.dumps({'hook_id': 'someid'})
+ trigger.save()
+
+ params = {
+ 'repository': 'devtable/building',
+ 'trigger_uuid': trigger.uuid,
+ }
+
+ body = {
+ 'enabled': False,
+ }
+
+ with client_with_identity('devtable', client) as cl:
+ result = conduct_api_call(cl, BuildTrigger, 'PUT', params, body, 200).json
+ assert not result['enabled']
+
+ body = {
+ 'enabled': True,
+ }
+
+ with client_with_identity('devtable', client) as cl:
+ result = conduct_api_call(cl, BuildTrigger, 'PUT', params, body, 200).json
+ assert result['enabled']
diff --git a/endpoints/api/test/test_trigger_analyzer.py b/endpoints/api/test/test_trigger_analyzer.py
new file mode 100644
index 000000000..881bad8a3
--- /dev/null
+++ b/endpoints/api/test/test_trigger_analyzer.py
@@ -0,0 +1,152 @@
+import pytest
+from mock import Mock
+
+from auth import permissions
+from data import model
+from endpoints.api.trigger_analyzer import TriggerAnalyzer
+from util import dockerfileparse
+
+BAD_PATH = "\"server_hostname/\" is not a valid Quay repository path"
+
+EMPTY_CONF = {}
+
+GOOD_CONF = {'context': '/', 'dockerfile_path': '/file'}
+
+BAD_CONF = {'context': 'context', 'dockerfile_path': 'dockerfile_path'}
+
+ONE_ROBOT = {'can_read': False, 'is_robot': True, 'kind': 'user', 'name': 'name'}
+
+DOCKERFILE_NOT_CHILD = 'Dockerfile, context, is not a child of the context, dockerfile_path.'
+
+THE_DOCKERFILE_SPECIFIED = 'Could not parse the Dockerfile specified'
+
+DOCKERFILE_PATH_NOT_FOUND = 'Specified Dockerfile path for the trigger was not found on the main branch. This trigger may fail.'
+
+NO_FROM_LINE = 'No FROM line found in the Dockerfile'
+
+REPO_NOT_FOUND = 'Repository "server_hostname/path/file" referenced by the Dockerfile was not found'
+
+
+@pytest.fixture
+def get_monkeypatch(monkeypatch):
+ return monkeypatch
+
+
+def patch_permissions(monkeypatch, can_read=False):
+ def can_read_fn(base_namespace, base_repository):
+ return can_read
+
+ monkeypatch.setattr(permissions, 'ReadRepositoryPermission', can_read_fn)
+
+
+def patch_list_namespace_robots(monkeypatch):
+ my_mock = Mock()
+ my_mock.configure_mock(**{'username': 'name'})
+ return_value = [my_mock]
+
+ def return_list_mocks(namesapce):
+ return return_value
+
+ monkeypatch.setattr(model.user, 'list_namespace_robots', return_list_mocks)
+ return return_value
+
+
+def patch_get_all_repo_users_transitive(monkeypatch):
+ my_mock = Mock()
+ my_mock.configure_mock(**{'username': 'name'})
+ return_value = [my_mock]
+
+ def return_get_mocks(namesapce, image_repostiory):
+ return return_value
+
+ monkeypatch.setattr(model.user, 'get_all_repo_users_transitive', return_get_mocks)
+ return return_value
+
+
+def patch_parse_dockerfile(monkeypatch, get_base_image):
+ if get_base_image is not None:
+ def return_return_value(content):
+ parse_mock = Mock()
+ parse_mock.configure_mock(**{'get_base_image': get_base_image})
+ return parse_mock
+
+ monkeypatch.setattr(dockerfileparse, "parse_dockerfile", return_return_value)
+ else:
+ def return_return_value(content):
+ return get_base_image
+
+ monkeypatch.setattr(dockerfileparse, "parse_dockerfile", return_return_value)
+
+
+def patch_model_repository_get_repository(monkeypatch, get_repository):
+ if get_repository is not None:
+
+ def mock_get_repository(base_namespace, base_repository):
+ vis_mock = Mock()
+ vis_mock.name = get_repository
+ get_repo_mock = Mock(visibility=vis_mock)
+
+
+ return get_repo_mock
+
+ else:
+ def mock_get_repository(base_namespace, base_repository):
+ return None
+
+ monkeypatch.setattr(model.repository, "get_repository", mock_get_repository)
+
+
+def return_none():
+ return None
+
+
+def return_content():
+ return Mock()
+
+
+def return_server_hostname():
+ return "server_hostname/"
+
+
+def return_non_server_hostname():
+ return "slime"
+
+
+def return_path():
+ return "server_hostname/path/file"
+
+
+@pytest.mark.parametrize(
+ 'handler_fn, config_dict, admin_org_permission, status, message, get_base_image, robots, server_hostname, get_repository, can_read, namespace, name', [
+ (return_none, EMPTY_CONF, False, "warning", DOCKERFILE_PATH_NOT_FOUND, None, [], None, None, False, "namespace", None),
+ (return_none, EMPTY_CONF, True, "warning", DOCKERFILE_PATH_NOT_FOUND, None, [ONE_ROBOT], None, None, False, "namespace", None),
+ (return_content, BAD_CONF, False, "error", THE_DOCKERFILE_SPECIFIED, None, [], None, None, False, "namespace", None),
+ (return_none, EMPTY_CONF, False, "warning", DOCKERFILE_PATH_NOT_FOUND, return_none, [], None, None, False, "namespace", None),
+ (return_none, EMPTY_CONF, True, "warning", DOCKERFILE_PATH_NOT_FOUND, return_none, [ONE_ROBOT], None, None, False, "namespace", None),
+ (return_content, BAD_CONF, False, "error", DOCKERFILE_NOT_CHILD, return_none, [], None, None, False, "namespace", None),
+ (return_content, GOOD_CONF, False, "warning", NO_FROM_LINE, return_none, [], None, None, False, "namespace", None),
+ (return_content, GOOD_CONF, False, "publicbase", None, return_non_server_hostname, [], "server_hostname", None, False, "namespace", None),
+ (return_content, GOOD_CONF, False, "warning", BAD_PATH, return_server_hostname, [], "server_hostname", None, False, "namespace", None),
+ (return_content, GOOD_CONF, False, "error", REPO_NOT_FOUND, return_path, [], "server_hostname", None, False, "namespace", None),
+ (return_content, GOOD_CONF, False, "error", REPO_NOT_FOUND, return_path, [], "server_hostname", "nonpublic", False, "namespace", None),
+ (return_content, GOOD_CONF, False, "requiresrobot", None, return_path, [], "server_hostname", "nonpublic", True, "path", "file"),
+ (return_content, GOOD_CONF, False, "publicbase", None, return_path, [], "server_hostname", "public", True, "path", "file"),
+
+ ])
+def test_trigger_analyzer(handler_fn, config_dict, admin_org_permission, status, message, get_base_image, robots,
+ server_hostname, get_repository, can_read, namespace, name,
+ get_monkeypatch):
+ patch_list_namespace_robots(get_monkeypatch)
+ patch_get_all_repo_users_transitive(get_monkeypatch)
+ patch_parse_dockerfile(get_monkeypatch, get_base_image)
+ patch_model_repository_get_repository(get_monkeypatch, get_repository)
+ patch_permissions(get_monkeypatch, can_read)
+ handler_mock = Mock()
+ handler_mock.configure_mock(**{'load_dockerfile_contents': handler_fn})
+ trigger_analyzer = TriggerAnalyzer(handler_mock, 'namespace', server_hostname, config_dict, admin_org_permission)
+ assert trigger_analyzer.analyze_trigger() == {'namespace': namespace,
+ 'name': name,
+ 'robots': robots,
+ 'status': status,
+ 'message': message,
+ 'is_admin': admin_org_permission}
diff --git a/endpoints/api/test/test_user.py b/endpoints/api/test/test_user.py
new file mode 100644
index 000000000..bf31b0b6d
--- /dev/null
+++ b/endpoints/api/test/test_user.py
@@ -0,0 +1,42 @@
+import pytest
+
+from mock import patch
+
+from endpoints.api.test.shared import conduct_api_call
+from endpoints.api.user import User
+from endpoints.test.shared import client_with_identity
+from features import FeatureNameValue
+
+from test.fixtures import *
+
+
+def test_user_metadata_update(client):
+ with patch('features.USER_METADATA', FeatureNameValue('USER_METADATA', True)):
+ with client_with_identity('devtable', client) as cl:
+ metadata = {
+ 'given_name': 'Quay',
+ 'family_name': 'User',
+ 'location': 'NYC',
+ 'company': 'Red Hat',
+ }
+
+ # Update all user metadata fields.
+ conduct_api_call(cl, User, 'PUT', None, body=metadata)
+
+ # Test that they were successfully updated.
+ user = conduct_api_call(cl, User, 'GET', None).json
+ for field in metadata:
+ assert user.get(field) == metadata.get(field)
+
+ # Now nullify one of the fields, and remove another.
+ metadata['company'] = None
+ location = metadata.pop('location')
+
+ conduct_api_call(cl, User, 'PUT', None, body=metadata)
+
+ user = conduct_api_call(cl, User, 'GET', None).json
+ for field in metadata:
+ assert user.get(field) == metadata.get(field)
+
+ # The location field should be unchanged.
+ assert user.get('location') == location
diff --git a/endpoints/api/trigger.py b/endpoints/api/trigger.py
index c03e9fbd9..fb9f72a48 100644
--- a/endpoints/api/trigger.py
+++ b/endpoints/api/trigger.py
@@ -1,45 +1,52 @@
""" Create, list and manage build triggers. """
-import json
import logging
-
-from urllib import quote
from urlparse import urlunparse
from flask import request, url_for
+from active_migration import ActiveDataMigration, ERTMigrationFlags
from app import app
+from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
+ AdministerRepositoryPermission)
from buildtrigger.basehandler import BuildTriggerHandler
-from buildtrigger.triggerutil import (TriggerDeactivationException,
- TriggerActivationException, EmptyRepositoryException,
- RepositoryReadException, TriggerStartException)
+from buildtrigger.triggerutil import TriggerException, EmptyRepositoryException
+from data import model
+from data.fields import DecryptedValue
+from data.model.build import update_build_trigger
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
log_action, request_error, query_param, parse_args, internal_only,
- validate_json_request, api, path_param, abort)
-from endpoints.exception import NotFound, Unauthorized, InvalidRequest
+ validate_json_request, api, path_param, abort,
+ disallow_for_app_repositories, disallow_for_non_normal_repositories)
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
-from endpoints.building import start_build, MaximumBuildsQueuedException
-from data import model
-from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
- ReadRepositoryPermission, AdministerRepositoryPermission)
+from endpoints.api.trigger_analyzer import TriggerAnalyzer
+from endpoints.building import (start_build, MaximumBuildsQueuedException,
+ BuildTriggerDisabledException)
+from endpoints.exception import NotFound, Unauthorized, InvalidRequest
from util.names import parse_robot_username
-from util.dockerfileparse import parse_dockerfile
-
logger = logging.getLogger(__name__)
def _prepare_webhook_url(scheme, username, password, hostname, path):
- auth_hostname = '%s:%s@%s' % (quote(username), quote(password), hostname)
+ auth_hostname = '%s:%s@%s' % (username, password, hostname)
return urlunparse((scheme, auth_hostname, path, '', '', ''))
+def get_trigger(trigger_uuid):
+ try:
+ trigger = model.build.get_build_trigger(trigger_uuid)
+ except model.InvalidBuildTriggerException:
+ raise NotFound()
+ return trigger
+
@resource('/v1/repository//trigger/')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
class BuildTriggerList(RepositoryParamResource):
""" Resource for listing repository build triggers. """
@require_repo_admin
+ @disallow_for_app_repositories
@nickname('listBuildTriggers')
def get(self, namespace_name, repo_name):
""" List the triggers for the specified repository. """
@@ -54,32 +61,64 @@ class BuildTriggerList(RepositoryParamResource):
@path_param('trigger_uuid', 'The UUID of the build trigger')
class BuildTrigger(RepositoryParamResource):
""" Resource for managing specific build triggers. """
+ schemas = {
+ 'UpdateTrigger': {
+ 'type': 'object',
+ 'description': 'Options for updating a build trigger',
+ 'required': [
+ 'enabled',
+ ],
+ 'properties': {
+ 'enabled': {
+ 'type': 'boolean',
+ 'description': 'Whether the build trigger is enabled',
+ },
+ }
+ },
+ }
@require_repo_admin
+ @disallow_for_app_repositories
@nickname('getBuildTrigger')
def get(self, namespace_name, repo_name, trigger_uuid):
""" Get information for the specified build trigger. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
-
- return trigger_view(trigger, can_admin=True)
+ return trigger_view(get_trigger(trigger_uuid), can_admin=True)
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
+ @nickname('updateBuildTrigger')
+ @validate_json_request('UpdateTrigger')
+ def put(self, namespace_name, repo_name, trigger_uuid):
+ """ Updates the specified build trigger. """
+ trigger = get_trigger(trigger_uuid)
+
+ handler = BuildTriggerHandler.get_handler(trigger)
+ if not handler.is_active():
+ raise InvalidRequest('Cannot update an unactivated trigger')
+
+ enable = request.get_json()['enabled']
+ model.build.toggle_build_trigger(trigger, enable)
+ log_action('toggle_repo_trigger', namespace_name,
+ {'repo': repo_name, 'trigger_id': trigger_uuid,
+ 'service': trigger.service.name, 'enabled': enable},
+ repo=model.repository.get_repository(namespace_name, repo_name))
+
+ return trigger_view(trigger)
+
+ @require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('deleteBuildTrigger')
def delete(self, namespace_name, repo_name, trigger_uuid):
""" Delete the specified build trigger. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
+ trigger = get_trigger(trigger_uuid)
handler = BuildTriggerHandler.get_handler(trigger)
if handler.is_active():
try:
handler.deactivate()
- except TriggerDeactivationException as ex:
+ except TriggerException as ex:
# We are just going to eat this error
logger.warning('Trigger deactivation problem: %s', ex)
@@ -110,14 +149,13 @@ class BuildTriggerSubdirs(RepositoryParamResource):
}
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('listBuildTriggerSubdirs')
@validate_json_request('BuildTriggerSubdirRequest')
def post(self, namespace_name, repo_name, trigger_uuid):
""" List the subdirectories available for the specified build trigger and source. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
+ trigger = get_trigger(trigger_uuid)
user_permission = UserAdminPermission(trigger.connected_user.username)
if user_permission.can():
@@ -126,19 +164,25 @@ class BuildTriggerSubdirs(RepositoryParamResource):
try:
subdirs = handler.list_build_subdirs()
+ context_map = {}
+ for file in subdirs:
+ context_map = handler.get_parent_directory_mappings(file, context_map)
+
return {
- 'subdir': subdirs,
- 'status': 'success'
+ 'dockerfile_paths': ['/' + subdir for subdir in subdirs],
+ 'contextMap': context_map,
+ 'status': 'success',
}
except EmptyRepositoryException as exc:
return {
'status': 'success',
- 'subdir': []
+ 'contextMap': {},
+ 'dockerfile_paths': [],
}
- except RepositoryReadException as exc:
+ except TriggerException as exc:
return {
'status': 'error',
- 'message': exc.message
+ 'message': exc.message,
}
else:
raise Unauthorized()
@@ -170,15 +214,13 @@ class BuildTriggerActivate(RepositoryParamResource):
}
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('activateBuildTrigger')
@validate_json_request('BuildTriggerActivateRequest')
def post(self, namespace_name, repo_name, trigger_uuid):
""" Activate the specified build trigger. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
-
+ trigger = get_trigger(trigger_uuid)
handler = BuildTriggerHandler.get_handler(trigger)
if handler.is_active():
raise InvalidRequest('Trigger config is not sufficient for activation.')
@@ -215,23 +257,25 @@ class BuildTriggerActivate(RepositoryParamResource):
try:
path = url_for('webhooks.build_trigger_webhook', trigger_uuid=trigger.uuid)
authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'],
- '$token', write_token.code,
+ '$token', write_token.get_code(),
app.config['SERVER_HOSTNAME'], path)
handler = BuildTriggerHandler.get_handler(trigger, new_config_dict)
final_config, private_config = handler.activate(authed_url)
if 'private_key' in private_config:
- trigger.private_key = private_config['private_key']
+ trigger.secure_private_key = DecryptedValue(private_config['private_key'])
- except TriggerActivationException as exc:
+ # TODO(remove-unenc): Remove legacy field.
+ if ActiveDataMigration.has_flag(ERTMigrationFlags.WRITE_OLD_FIELDS):
+ trigger.private_key = private_config['private_key']
+
+ except TriggerException as exc:
write_token.delete_instance()
raise request_error(message=exc.message)
# Save the updated config.
- trigger.config = json.dumps(final_config)
- trigger.write_token = write_token
- trigger.save()
+ update_build_trigger(trigger, final_config, write_token=write_token)
# Log the trigger setup.
repo = model.repository.get_repository(namespace_name, repo_name)
@@ -271,120 +315,40 @@ class BuildTriggerAnalyze(RepositoryParamResource):
}
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('analyzeBuildTrigger')
@validate_json_request('BuildTriggerAnalyzeRequest')
def post(self, namespace_name, repo_name, trigger_uuid):
""" Analyze the specified build trigger configuration. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
+ trigger = get_trigger(trigger_uuid)
+
+ if trigger.repository.namespace_user.username != namespace_name:
+ raise NotFound()
+
+ if trigger.repository.name != repo_name:
raise NotFound()
new_config_dict = request.get_json()['config']
handler = BuildTriggerHandler.get_handler(trigger, new_config_dict)
-
+ server_hostname = app.config['SERVER_HOSTNAME']
try:
- # Load the contents of the Dockerfile.
- contents = handler.load_dockerfile_contents()
- if not contents:
- return {
- 'status': 'error',
- 'message': 'Could not read the Dockerfile for the trigger'
- }
-
- # Parse the contents of the Dockerfile.
- parsed = parse_dockerfile(contents)
- if not parsed:
- return {
- 'status': 'error',
- 'message': 'Could not parse the Dockerfile specified'
- }
-
- # Determine the base image (i.e. the FROM) for the Dockerfile.
- base_image = parsed.get_base_image()
- if not base_image:
- return {
- 'status': 'warning',
- 'message': 'No FROM line found in the Dockerfile'
- }
-
- # Check to see if the base image lives in Quay.
- quay_registry_prefix = '%s/' % (app.config['SERVER_HOSTNAME'])
-
- if not base_image.startswith(quay_registry_prefix):
- return {
- 'status': 'publicbase'
- }
-
- # Lookup the repository in Quay.
- result = base_image[len(quay_registry_prefix):].split('/', 2)
- if len(result) != 2:
- return {
- 'status': 'warning',
- 'message': '"%s" is not a valid Quay repository path' % (base_image)
- }
-
- (base_namespace, base_repository) = result
- found_repository = model.repository.get_repository(base_namespace, base_repository)
- if not found_repository:
- return {
- 'status': 'error',
- 'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
- }
-
- # If the repository is private and the user cannot see that repo, then
- # mark it as not found.
- can_read = ReadRepositoryPermission(base_namespace, base_repository)
- if found_repository.visibility.name != 'public' and not can_read:
- return {
- 'status': 'error',
- 'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
- }
-
- # Check to see if the repository is public. If not, we suggest the
- # usage of a robot account to conduct the pull.
- read_robots = []
-
- if AdministerOrganizationPermission(base_namespace).can():
- def robot_view(robot):
- return {
- 'name': robot.username,
- 'kind': 'user',
- 'is_robot': True
- }
-
- def is_valid_robot(user):
- # Make sure the user is a robot.
- if not user.robot:
- return False
-
- # Make sure the current user can see/administer the robot.
- (robot_namespace, shortname) = parse_robot_username(user.username)
- return AdministerOrganizationPermission(robot_namespace).can()
-
- repo_users = list(model.user.get_all_repo_users_transitive(base_namespace, base_repository))
- read_robots = [robot_view(user) for user in repo_users if is_valid_robot(user)]
-
- return {
- 'namespace': base_namespace,
- 'name': base_repository,
- 'is_public': found_repository.visibility.name == 'public',
- 'robots': read_robots,
- 'status': 'analyzed'
- }
-
- except RepositoryReadException as rre:
+ trigger_analyzer = TriggerAnalyzer(handler,
+ namespace_name,
+ server_hostname,
+ new_config_dict,
+ AdministerOrganizationPermission(namespace_name).can())
+ return trigger_analyzer.analyze_trigger()
+ except TriggerException as rre:
return {
'status': 'error',
- 'message': rre.message
+ 'message': 'Could not analyze the repository: %s' % rre.message,
}
except NotImplementedError:
return {
'status': 'notimplemented',
}
- raise NotFound()
-
@resource('/v1/repository//trigger//start')
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
@@ -405,7 +369,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
'description': '(Custom Only) If specified, the ref/SHA1 used to checkout a git repository.'
},
'refs': {
- 'type': 'object',
+ 'type': ['object', 'null'],
'description': '(SCM Only) If specified, the ref to build.'
}
},
@@ -414,14 +378,15 @@ class ActivateBuildTrigger(RepositoryParamResource):
}
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('manuallyStartBuildTrigger')
@validate_json_request('RunParameters')
def post(self, namespace_name, repo_name, trigger_uuid):
""" Manually start a build from the specified trigger. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
+ trigger = get_trigger(trigger_uuid)
+ if not trigger.enabled:
+ raise InvalidRequest('Trigger is not enabled.')
handler = BuildTriggerHandler.get_handler(trigger)
if not handler.is_active():
@@ -434,10 +399,12 @@ class ActivateBuildTrigger(RepositoryParamResource):
run_parameters = request.get_json()
prepared = handler.manual_start(run_parameters=run_parameters)
build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name)
- except TriggerStartException as tse:
+ except TriggerException as tse:
raise InvalidRequest(tse.message)
except MaximumBuildsQueuedException:
abort(429, message='Maximum queued build rate exceeded.')
+ except BuildTriggerDisabledException:
+ abort(400, message='Build trigger is disabled')
resp = build_status_view(build_request)
repo_string = '%s/%s' % (namespace_name, repo_name)
@@ -453,7 +420,9 @@ class ActivateBuildTrigger(RepositoryParamResource):
@path_param('trigger_uuid', 'The UUID of the build trigger')
class TriggerBuildList(RepositoryParamResource):
""" Resource to represent builds that were activated from the specified trigger. """
+
@require_repo_admin
+ @disallow_for_app_repositories
@parse_args()
@query_param('limit', 'The maximum number of builds to return', type=int, default=5)
@nickname('listTriggerRecentBuilds')
@@ -468,18 +437,19 @@ class TriggerBuildList(RepositoryParamResource):
FIELD_VALUE_LIMIT = 30
+
@resource('/v1/repository//trigger//fields/')
@internal_only
class BuildTriggerFieldValues(RepositoryParamResource):
""" Custom verb to fetch a values list for a particular field name. """
+
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('listTriggerFieldValues')
def post(self, namespace_name, repo_name, trigger_uuid, field_name):
""" List the field values for a custom run field. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
+ trigger = get_trigger(trigger_uuid)
config = request.get_json() or None
if AdministerRepositoryPermission(namespace_name, repo_name).can():
@@ -502,14 +472,29 @@ class BuildTriggerFieldValues(RepositoryParamResource):
@internal_only
class BuildTriggerSources(RepositoryParamResource):
""" Custom verb to fetch the list of build sources for the trigger config. """
+ schemas = {
+ 'BuildTriggerSourcesRequest': {
+ 'type': 'object',
+ 'description': 'Specifies the namespace under which to fetch sources',
+ 'properties': {
+ 'namespace': {
+ 'type': 'string',
+ 'description': 'The namespace for which to fetch sources'
+ },
+ },
+ }
+ }
+
@require_repo_admin
+ @disallow_for_app_repositories
+ @disallow_for_non_normal_repositories
@nickname('listTriggerBuildSources')
- def get(self, namespace_name, repo_name, trigger_uuid):
+ @validate_json_request('BuildTriggerSourcesRequest')
+ def post(self, namespace_name, repo_name, trigger_uuid):
""" List the build sources for the trigger configuration thus far. """
- try:
- trigger = model.build.get_build_trigger(trigger_uuid)
- except model.InvalidBuildTriggerException:
- raise NotFound()
+ namespace = request.get_json()['namespace']
+
+ trigger = get_trigger(trigger_uuid)
user_permission = UserAdminPermission(trigger.connected_user.username)
if user_permission.can():
@@ -517,9 +502,38 @@ class BuildTriggerSources(RepositoryParamResource):
try:
return {
- 'sources': handler.list_build_sources()
+ 'sources': handler.list_build_sources_for_namespace(namespace)
}
- except RepositoryReadException as rre:
+ except TriggerException as rre:
raise InvalidRequest(rre.message)
else:
raise Unauthorized()
+
+
+@resource('/v1/repository//trigger//namespaces')
+@path_param('repository', 'The full path of the repository. e.g. namespace/name')
+@path_param('trigger_uuid', 'The UUID of the build trigger')
+@internal_only
+class BuildTriggerSourceNamespaces(RepositoryParamResource):
+ """ Custom verb to fetch the list of namespaces (orgs, projects, etc) for the trigger config. """
+
+ @require_repo_admin
+ @disallow_for_app_repositories
+ @nickname('listTriggerBuildSourceNamespaces')
+ def get(self, namespace_name, repo_name, trigger_uuid):
+ """ List the build sources for the trigger configuration thus far. """
+ trigger = get_trigger(trigger_uuid)
+
+ user_permission = UserAdminPermission(trigger.connected_user.username)
+ if user_permission.can():
+ handler = BuildTriggerHandler.get_handler(trigger)
+
+ try:
+ return {
+ 'namespaces': handler.list_build_source_namespaces()
+ }
+ except TriggerException as rre:
+ raise InvalidRequest(rre.message)
+ else:
+ raise Unauthorized()
+
diff --git a/endpoints/api/trigger_analyzer.py b/endpoints/api/trigger_analyzer.py
new file mode 100644
index 000000000..2a29e502e
--- /dev/null
+++ b/endpoints/api/trigger_analyzer.py
@@ -0,0 +1,122 @@
+from os import path
+
+from auth import permissions
+from data import model
+from util import dockerfileparse
+
+
+def is_parent(context, dockerfile_path):
+ """ This checks whether the context is a parent of the dockerfile_path"""
+ if context == "" or dockerfile_path == "":
+ return False
+
+ normalized_context = path.normpath(context)
+ if normalized_context[len(normalized_context) - 1] != path.sep:
+ normalized_context += path.sep
+
+ if normalized_context[0] != path.sep:
+ normalized_context = path.sep + normalized_context
+
+ normalized_subdir = path.normpath(path.dirname(dockerfile_path))
+ if normalized_subdir[0] != path.sep:
+ normalized_subdir = path.sep + normalized_subdir
+
+ if normalized_subdir[len(normalized_subdir) - 1] != path.sep:
+ normalized_subdir += path.sep
+
+ return normalized_subdir.startswith(normalized_context)
+
+
+class TriggerAnalyzer:
+ """ This analyzes triggers and returns the appropriate trigger and robot view to the frontend. """
+
+ def __init__(self, handler, namespace_name, server_hostname, new_config_dict, admin_org_permission):
+ self.handler = handler
+ self.namespace_name = namespace_name
+ self.server_hostname = server_hostname
+ self.new_config_dict = new_config_dict
+ self.admin_org_permission = admin_org_permission
+
+ def analyze_trigger(self):
+ # Load the contents of the Dockerfile.
+ contents = self.handler.load_dockerfile_contents()
+ if not contents:
+ return self.analyze_view(self.namespace_name, None, 'warning',
+ message='Specified Dockerfile path for the trigger was not found on the main ' +
+ 'branch. This trigger may fail.')
+
+ # Parse the contents of the Dockerfile.
+ parsed = dockerfileparse.parse_dockerfile(contents)
+ if not parsed:
+ return self.analyze_view(self.namespace_name, None, 'error', message='Could not parse the Dockerfile specified')
+
+ # Check whether the dockerfile_path is correct
+ if self.new_config_dict.get('context') and not is_parent(self.new_config_dict.get('context'),
+ self.new_config_dict.get('dockerfile_path')):
+ return self.analyze_view(self.namespace_name, None, 'error',
+ message='Dockerfile, %s, is not a child of the context, %s.' %
+ (self.new_config_dict.get('context'),
+ self.new_config_dict.get('dockerfile_path')))
+
+ # Determine the base image (i.e. the FROM) for the Dockerfile.
+ base_image = parsed.get_base_image()
+ if not base_image:
+ return self.analyze_view(self.namespace_name, None, 'warning', message='No FROM line found in the Dockerfile')
+
+ # Check to see if the base image lives in Quay.
+ quay_registry_prefix = '%s/' % self.server_hostname
+ if not base_image.startswith(quay_registry_prefix):
+ return self.analyze_view(self.namespace_name, None, 'publicbase')
+
+ # Lookup the repository in Quay.
+ result = str(base_image)[len(quay_registry_prefix):].split('/', 2)
+ if len(result) != 2:
+ msg = '"%s" is not a valid Quay repository path' % base_image
+ return self.analyze_view(self.namespace_name, None, 'warning', message=msg)
+
+ (base_namespace, base_repository) = result
+ found_repository = model.repository.get_repository(base_namespace, base_repository)
+ if not found_repository:
+ return self.analyze_view(self.namespace_name, None, 'error',
+ message='Repository "%s" referenced by the Dockerfile was not found' % base_image)
+
+ # If the repository is private and the user cannot see that repo, then
+ # mark it as not found.
+ can_read = permissions.ReadRepositoryPermission(base_namespace, base_repository)
+ if found_repository.visibility.name != 'public' and not can_read:
+ return self.analyze_view(self.namespace_name, None, 'error',
+ message='Repository "%s" referenced by the Dockerfile was not found' % base_image)
+
+ if found_repository.visibility.name == 'public':
+ return self.analyze_view(base_namespace, base_repository, 'publicbase')
+
+ return self.analyze_view(base_namespace, base_repository, 'requiresrobot')
+
+ def analyze_view(self, image_namespace, image_repository, status, message=None):
+ # Retrieve the list of robots and mark whether they have read access already.
+ robots = []
+ if self.admin_org_permission:
+ if image_repository is not None:
+ perm_query = model.user.get_all_repo_users_transitive(image_namespace, image_repository)
+ user_ids_with_permission = set([user.id for user in perm_query])
+ else:
+ user_ids_with_permission = set()
+
+ def robot_view(robot):
+ return {
+ 'name': robot.username,
+ 'kind': 'user',
+ 'is_robot': True,
+ 'can_read': robot.id in user_ids_with_permission,
+ }
+
+ robots = [robot_view(robot) for robot in model.user.list_namespace_robots(image_namespace)]
+
+ return {
+ 'namespace': image_namespace,
+ 'name': image_repository,
+ 'robots': robots,
+ 'status': status,
+ 'message': message,
+ 'is_admin': self.admin_org_permission,
+ }
diff --git a/endpoints/api/user.py b/endpoints/api/user.py
index c969926dd..4eabe1088 100644
--- a/endpoints/api/user.py
+++ b/endpoints/api/user.py
@@ -11,27 +11,33 @@ from peewee import IntegrityError
import features
-from app import app, billing as stripe, authentication, avatar, user_analytics, all_queues
+from app import (app, billing as stripe, authentication, avatar, user_analytics, all_queues,
+ oauth_login, namespace_gc_queue, ip_resolver, url_scheme_and_hostname)
+
from auth import scopes
from auth.auth_context import get_authenticated_user
from auth.permissions import (AdministerOrganizationPermission, CreateRepositoryPermission,
UserAdminPermission, UserReadPermission, SuperUserPermission)
from data import model
from data.billing import get_plan
-from data.database import Repository as RepositoryTable, UserPromptTypes
+from data.database import Repository as RepositoryTable
+from data.users.shared import can_create_user
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
log_action, internal_only, require_user_admin, parse_args,
query_param, require_scope, format_date, show_if,
require_fresh_login, path_param, define_json_response,
RepositoryParamResource, page_support)
-from endpoints.exception import NotFound, InvalidToken
+from endpoints.exception import NotFound, InvalidToken, InvalidRequest, DownstreamIssue
from endpoints.api.subscribe import subscribe
from endpoints.common import common_login
from endpoints.csrf import generate_csrf_token, OAUTH_CSRF_TOKEN_NAME
-from endpoints.decorators import anon_allowed
+from endpoints.decorators import anon_allowed, readonly_call_allowed
+from oauth.oidc import DiscoveryFailureException
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email,
send_password_changed, send_org_recovery_email)
from util.names import parse_single_urn
+from util.saas.useranalytics import build_error_callback
+from util.request import get_request_ip
REPOS_PER_PAGE = 100
@@ -75,6 +81,7 @@ def user_view(user, previous_username=None):
'name': o.username,
'avatar': avatar.get_data_for_org(o),
'can_create_repo': CreateRepositoryPermission(o.username).can(),
+ 'public': o.username in app.config.get('PUBLIC_NAMESPACES', []),
}
if user_admin:
@@ -85,7 +92,13 @@ def user_view(user, previous_username=None):
return org_response
- organizations = model.organization.get_user_organizations(user.username)
+ # Retrieve the organizations for the user.
+ organizations = {o.username: o for o in model.organization.get_user_organizations(user.username)}
+
+ # Add any public namespaces.
+ public_namespaces = app.config.get('PUBLIC_NAMESPACES', [])
+ if public_namespaces:
+ organizations.update({ns: model.user.get_namespace_user(ns) for ns in public_namespaces})
def login_view(login):
try:
@@ -118,8 +131,14 @@ def user_view(user, previous_username=None):
'invoice_email': user.invoice_email,
'invoice_email_address': user.invoice_email_address,
'preferred_namespace': not (user.stripe_id is None),
- 'tag_expiration': user.removed_tag_expiration_s,
+ 'tag_expiration_s': user.removed_tag_expiration_s,
'prompts': model.user.get_user_prompts(user),
+ 'company': user.company,
+ 'family_name': user.family_name,
+ 'given_name': user.given_name,
+ 'location': user.location,
+ 'is_free_account': user.stripe_id is None,
+ 'has_password_set': authentication.has_password_set(user.username),
})
analytics_metadata = user_analytics.get_user_analytics_metadata(user)
@@ -133,7 +152,7 @@ def user_view(user, previous_username=None):
user_view_perm = UserReadPermission(user.username)
if user_view_perm.can():
user_response.update({
- 'organizations': [org_view(o, user_admin=user_admin.can()) for o in organizations],
+ 'organizations': [org_view(o, user_admin=user_admin.can()) for o in organizations.values()],
})
@@ -206,10 +225,10 @@ class User(ApiResource):
'type': 'string',
'description': 'The user\'s email address',
},
- 'tag_expiration': {
+ 'tag_expiration_s': {
'type': 'integer',
- 'maximum': 2592000,
'minimum': 0,
+ 'description': 'The number of seconds for tag expiration',
},
'username': {
'type': 'string',
@@ -220,17 +239,21 @@ class User(ApiResource):
'description': 'Custom email address for receiving invoices',
},
'given_name': {
- 'type': 'string',
+ 'type': ['string', 'null'],
'description': 'The optional entered given name for the user',
},
'family_name': {
- 'type': 'string',
+ 'type': ['string', 'null'],
'description': 'The optional entered family name for the user',
},
'company': {
- 'type': 'string',
+ 'type': ['string', 'null'],
'description': 'The optional entered company for the user',
},
+ 'location': {
+ 'type': ['string', 'null'],
+ 'description': 'The optional entered location for the user',
+ },
},
},
'UserView': {
@@ -297,12 +320,12 @@ class User(ApiResource):
@nickname('changeUserDetails')
@internal_only
@validate_json_request('UpdateUser')
- @define_json_response('UserView')
def put(self):
""" Update a users details such as password or email. """
user = get_authenticated_user()
user_data = request.get_json()
previous_username = None
+ headers = None
try:
if 'password' in user_data:
@@ -313,7 +336,9 @@ class User(ApiResource):
model.user.change_password(user, user_data['password'])
# Login again to reset their session cookie.
- common_login(user)
+ success, headers = common_login(user.uuid)
+ if not success:
+ raise request_error(message='Could not perform login action')
if features.MAILING:
send_password_changed(user.username, user.email)
@@ -322,9 +347,9 @@ class User(ApiResource):
logger.debug('Changing invoice_email for user: %s', user.username)
model.user.change_send_invoice_email(user, user_data['invoice_email'])
- if 'tag_expiration' in user_data:
- logger.debug('Changing user tag expiration to: %ss', user_data['tag_expiration'])
- model.user.change_user_tag_expiration(user, user_data['tag_expiration'])
+ if features.CHANGE_TAG_EXPIRATION and 'tag_expiration_s' in user_data:
+ logger.debug('Changing user tag expiration to: %ss', user_data['tag_expiration_s'])
+ model.user.change_user_tag_expiration(user, user_data['tag_expiration_s'])
if ('invoice_email_address' in user_data and
user_data['invoice_email_address'] != user.invoice_email_address):
@@ -339,17 +364,25 @@ class User(ApiResource):
if features.MAILING:
logger.debug('Sending email to change email address for user: %s',
user.username)
- code = model.user.create_confirm_email_code(user, new_email=new_email)
- send_change_email(user.username, user_data['email'], code.code)
+ confirmation_code = model.user.create_confirm_email_code(user, new_email=new_email)
+ send_change_email(user.username, user_data['email'], confirmation_code)
else:
- user_analytics.change_email(user.email, new_email)
+ ua_future = user_analytics.change_email(user.email, new_email)
+ ua_future.add_done_callback(build_error_callback('Change email failed'))
model.user.update_email(user, new_email, auto_verify=not features.MAILING)
- if 'given_name' in user_data or 'family_name' in user_data or 'company' in user_data:
- model.user.update_user_metadata(user, user_data.get('given_name'),
- user_data.get('family_name'), user_data.get('company'))
- user_analytics.change_metadata(user.email, user_data.get('given_name'),
- user_data.get('family_name'), user_data.get('company'))
+ if features.USER_METADATA:
+ metadata = {}
+
+ for field in ('given_name', 'family_name', 'company', 'location'):
+ if field in user_data:
+ metadata[field] = user_data.get(field)
+
+ if len(metadata) > 0:
+ model.user.update_user_metadata(user, metadata)
+
+ ua_mdata_future = user_analytics.change_metadata(user.email, **metadata)
+ ua_mdata_future.add_done_callback(build_error_callback('Change metadata failed'))
# Check for username rename. A username can be renamed if the feature is enabled OR the user
# currently has a confirm_username prompt.
@@ -358,7 +391,8 @@ class User(ApiResource):
new_username = user_data.get('username')
previous_username = user.username
- rename_allowed = features.USER_RENAME or confirm_username
+ rename_allowed = (features.USER_RENAME or
+ (confirm_username and features.USERNAME_CONFIRMATION))
username_changing = new_username and new_username != previous_username
if rename_allowed and username_changing:
@@ -367,14 +401,16 @@ class User(ApiResource):
raise request_error(message='Username is already in use')
user = model.user.change_username(user.id, new_username)
- user_analytics.change_username(user.email, new_username)
+ username_future = user_analytics.change_username(user.email, new_username)
+ username_future.add_done_callback(build_error_callback('Change username failed'))
+
elif confirm_username:
model.user.remove_user_prompt(user, 'confirm_username')
except model.user.InvalidPasswordException, ex:
raise request_error(exception=ex)
- return user_view(user, previous_username=previous_username)
+ return user_view(user, previous_username=previous_username), 200, headers
@show_if(features.USER_CREATION)
@show_if(features.DIRECT_LOGIN)
@@ -388,44 +424,63 @@ class User(ApiResource):
user_data = request.get_json()
+ invite_code = user_data.get('invite_code', '')
+ existing_user = model.user.get_nonrobot_user(user_data['username'])
+ if existing_user:
+ raise request_error(message='The username already exists')
+
+ # Ensure an e-mail address was specified if required.
+ if features.MAILING and not user_data.get('email'):
+ raise request_error(message='Email address is required')
+
+ # If invite-only user creation is turned on and no invite code was sent, return an error.
+ # Technically, this is handled by the can_create_user call below as well, but it makes
+ # a nicer error.
+ if features.INVITE_ONLY_USER_CREATION and not invite_code:
+ raise request_error(message='Cannot create non-invited user')
+
+ # Ensure that this user can be created.
+ blacklisted_domains = app.config.get('BLACKLISTED_EMAIL_DOMAINS', [])
+ if not can_create_user(user_data.get('email'), blacklisted_domains=blacklisted_domains):
+ raise request_error(message='Creation of a user account for this e-mail is disabled; please contact an administrator')
+
# If recaptcha is enabled, then verify the user is a human.
if features.RECAPTCHA:
recaptcha_response = user_data.get('recaptcha_response', '')
result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
recaptcha_response,
- request.remote_addr)
+ get_request_ip())
if not result['success']:
return {
'message': 'Are you a bot? If not, please revalidate the captcha.'
}, 400
- invite_code = user_data.get('invite_code', '')
- existing_user = model.user.get_nonrobot_user(user_data['username'])
- if existing_user:
- raise request_error(message='The username already exists')
-
- if features.MAILING and not user_data.get('email'):
- raise request_error(message='Email address is required')
-
+ is_possible_abuser = ip_resolver.is_ip_possible_threat(get_request_ip())
try:
prompts = model.user.get_default_user_prompts(features)
new_user = model.user.create_user(user_data['username'], user_data['password'],
user_data.get('email'),
auto_verify=not features.MAILING,
email_required=features.MAILING,
+ is_possible_abuser=is_possible_abuser,
prompts=prompts)
email_address_confirmed = handle_invite_code(invite_code, new_user)
if features.MAILING and not email_address_confirmed:
- code = model.user.create_confirm_email_code(new_user)
- send_confirmation_email(new_user.username, new_user.email, code.code)
+ confirmation_code = model.user.create_confirm_email_code(new_user)
+ send_confirmation_email(new_user.username, new_user.email, confirmation_code)
return {
'awaiting_verification': True
}
else:
- common_login(new_user)
- return user_view(new_user)
+ success, headers = common_login(new_user.uuid)
+ if not success:
+ return {
+ 'message': 'Could not login. Is your account inactive?'
+ }, 403
+
+ return user_view(new_user), 200, headers
except model.user.DataModelException as ex:
raise request_error(exception=ex)
@@ -438,7 +493,7 @@ class User(ApiResource):
if app.config['AUTHENTICATION_TYPE'] != 'Database':
abort(404)
- model.user.delete_user(get_authenticated_user(), all_queues)
+ model.user.mark_namespace_for_deletion(get_authenticated_user(), all_queues, namespace_gc_queue)
return '', 204
@@ -494,6 +549,9 @@ class ClientKey(ApiResource):
@validate_json_request('GenerateClientKey')
def post(self):
""" Return's the user's private client key. """
+ if not authentication.supports_encrypted_credentials:
+ raise NotFound()
+
username = get_authenticated_user().username
password = request.get_json()['password']
(result, error_message) = authentication.confirm_existing_user(username, password)
@@ -516,8 +574,9 @@ def conduct_signin(username_or_email, password, invite_code=None):
if invite_code:
handle_invite_code(invite_code, found_user)
- if common_login(found_user):
- return {'success': True}
+ success, headers = common_login(found_user.uuid)
+ if success:
+ return {'success': True}, 200, headers
else:
needs_email_verification = True
@@ -528,7 +587,7 @@ def conduct_signin(username_or_email, password, invite_code=None):
'needsEmailVerification': needs_email_verification,
'invalidCredentials': invalid_credentials,
'message': error_message
- }, 403
+ }, 403, None
@resource('/v1/user/convert')
@@ -627,6 +686,7 @@ class Signin(ApiResource):
@nickname('signinUser')
@validate_json_request('SigninUser')
@anon_allowed
+ @readonly_call_allowed
def post(self):
""" Sign in the user with the specified credentials. """
signin_data = request.get_json()
@@ -663,6 +723,7 @@ class VerifyUser(ApiResource):
@require_user_admin
@nickname('verifyUser')
@validate_json_request('VerifyUser')
+ @readonly_call_allowed
def post(self):
""" Verifies the signed in the user with the specified credentials. """
signin_data = request.get_json()
@@ -676,8 +737,13 @@ class VerifyUser(ApiResource):
'invalidCredentials': True,
}, 403
- common_login(result)
- return {'success': True}
+ success, headers = common_login(result.uuid)
+ if not success:
+ return {
+ 'message': 'Could not verify user.',
+ }, 403
+
+ return {'success': True}, 200, headers
@resource('/v1/signout')
@@ -687,31 +753,75 @@ class Signout(ApiResource):
@nickname('logout')
def post(self):
""" Request that the current user be signed out. """
- logout_user()
+ # Invalidate all sessions for the user.
+ model.user.invalidate_all_sessions(get_authenticated_user())
+
+ # Clear out the user's identity.
identity_changed.send(app, identity=AnonymousIdentity())
+
+ # Remove the user's session cookie.
+ logout_user()
+
return {'success': True}
-@resource('/v1/externaltoken')
+@resource('/v1/externallogin/')
@internal_only
-class GenerateExternalToken(ApiResource):
- """ Resource for generating a token for external login. """
- @nickname('generateExternalLoginToken')
- def post(self):
- """ Generates a CSRF token explicitly for OIDC/OAuth-associated login. """
- return {'token': generate_csrf_token(OAUTH_CSRF_TOKEN_NAME)}
+class ExternalLoginInformation(ApiResource):
+ """ Resource for both setting a token for external login and returning its authorization
+ url.
+ """
+ schemas = {
+ 'GetLogin': {
+ 'type': 'object',
+ 'description': 'Information required to an retrieve external login URL.',
+ 'required': [
+ 'kind',
+ ],
+ 'properties': {
+ 'kind': {
+ 'type': 'string',
+ 'description': 'The kind of URL',
+ 'enum': ['login', 'attach', 'cli'],
+ },
+ },
+ },
+ }
-@resource('/v1/detachexternal/')
+ @nickname('retrieveExternalLoginAuthorizationUrl')
+ @anon_allowed
+ @readonly_call_allowed
+ @validate_json_request('GetLogin')
+ def post(self, service_id):
+ """ Generates the auth URL and CSRF token explicitly for OIDC/OAuth-associated login. """
+ login_service = oauth_login.get_service(service_id)
+ if login_service is None:
+ raise InvalidRequest()
+
+ csrf_token = generate_csrf_token(OAUTH_CSRF_TOKEN_NAME)
+ kind = request.get_json()['kind']
+ redirect_suffix = '' if kind == 'login' else '/' + kind
+
+ try:
+ login_scopes = login_service.get_login_scopes()
+ auth_url = login_service.get_auth_url(url_scheme_and_hostname, redirect_suffix, csrf_token, login_scopes)
+ return {'auth_url': auth_url}
+ except DiscoveryFailureException as dfe:
+ logger.exception('Could not discovery OAuth endpoint information')
+ raise DownstreamIssue(dfe.message)
+
+
+@resource('/v1/detachexternal/')
@show_if(features.DIRECT_LOGIN)
@internal_only
class DetachExternal(ApiResource):
""" Resource for detaching an external login. """
@require_user_admin
@nickname('detachExternalLogin')
- def post(self, servicename):
+ def post(self, service_id):
""" Request that the current user be detached from the external login service. """
- model.user.detach_external_login(get_authenticated_user(), servicename)
+ model.user.detach_external_login(get_authenticated_user(), service_id)
return {'success': True}
@@ -732,6 +842,10 @@ class Recovery(ApiResource):
'type': 'string',
'description': 'The user\'s email address',
},
+ 'recaptcha_response': {
+ 'type': 'string',
+ 'description': 'The (may be disabled) recaptcha response code for verification',
+ },
},
},
}
@@ -752,10 +866,26 @@ class Recovery(ApiResource):
return v
- email = request.get_json()['email']
+ recovery_data = request.get_json()
+
+ # If recaptcha is enabled, then verify the user is a human.
+ if features.RECAPTCHA:
+ recaptcha_response = recovery_data.get('recaptcha_response', '')
+ result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
+ recaptcha_response,
+ get_request_ip())
+
+ if not result['success']:
+ return {
+ 'message': 'Are you a bot? If not, please revalidate the captcha.'
+ }, 400
+
+ email = recovery_data['email']
user = model.user.find_user_by_email(email)
if not user:
- raise model.InvalidEmailAddressException('Email address was not found.')
+ return {
+ 'status': 'sent',
+ }
if user.organization:
send_org_recovery_email(user, model.organization.get_admin_users(user))
@@ -765,8 +895,8 @@ class Recovery(ApiResource):
'orgname': redact(user.username),
}
- code = model.user.create_reset_password_email_code(email)
- send_recovery_email(email, code.code)
+ confirmation_code = model.user.create_reset_password_email_code(email)
+ send_recovery_email(email, confirmation_code)
return {
'status': 'sent',
}
@@ -988,4 +1118,3 @@ class Users(ApiResource):
abort(404)
return user_view(user)
-
diff --git a/endpoints/appr/__init__.py b/endpoints/appr/__init__.py
new file mode 100644
index 000000000..c998d8a95
--- /dev/null
+++ b/endpoints/appr/__init__.py
@@ -0,0 +1,43 @@
+import logging
+
+from functools import wraps
+
+from cnr.exception import Forbidden
+from flask import Blueprint
+
+from app import metric_queue
+from auth.permissions import (AdministerRepositoryPermission, ReadRepositoryPermission,
+ ModifyRepositoryPermission)
+from endpoints.appr.decorators import require_repo_permission
+from util.metrics.metricqueue import time_blueprint
+
+
+appr_bp = Blueprint('appr', __name__)
+time_blueprint(appr_bp, metric_queue)
+logger = logging.getLogger(__name__)
+
+
+def _raise_method(repository, scopes):
+ raise Forbidden("Unauthorized access for: %s" % repository,
+ {"package": repository, "scopes": scopes})
+
+
+def _get_reponame_kwargs(*args, **kwargs):
+ return [kwargs['namespace'], kwargs['package_name']]
+
+
+require_app_repo_read = require_repo_permission(ReadRepositoryPermission,
+ scopes=['pull'],
+ allow_public=True,
+ raise_method=_raise_method,
+ get_reponame_method=_get_reponame_kwargs)
+
+require_app_repo_write = require_repo_permission(ModifyRepositoryPermission,
+ scopes=['pull', 'push'],
+ raise_method=_raise_method,
+ get_reponame_method=_get_reponame_kwargs)
+
+require_app_repo_admin = require_repo_permission(AdministerRepositoryPermission,
+ scopes=['pull', 'push'],
+ raise_method=_raise_method,
+ get_reponame_method=_get_reponame_kwargs)
diff --git a/endpoints/appr/cnr_backend.py b/endpoints/appr/cnr_backend.py
new file mode 100644
index 000000000..a9e1b2539
--- /dev/null
+++ b/endpoints/appr/cnr_backend.py
@@ -0,0 +1,177 @@
+import base64
+
+from cnr.exception import raise_package_not_found
+from cnr.models.blob_base import BlobBase
+from cnr.models.channel_base import ChannelBase
+from cnr.models.db_base import CnrDB
+from cnr.models.package_base import PackageBase, manifest_media_type
+
+from flask import request
+from app import storage
+from endpoints.appr.models_cnr import model
+from util.request import get_request_ip
+
+
+class Blob(BlobBase):
+ @classmethod
+ def upload_url(cls, digest):
+ return "cnr/blobs/sha256/%s/%s" % (digest[0:2], digest)
+
+ def save(self, content_media_type):
+ model.store_blob(self, content_media_type)
+
+ @classmethod
+ def delete(cls, package_name, digest):
+ pass
+
+ @classmethod
+ def _fetch_b64blob(cls, package_name, digest):
+ blobpath = cls.upload_url(digest)
+ locations = model.get_blob_locations(digest)
+ if not locations:
+ raise_package_not_found(package_name, digest)
+ return base64.b64encode(storage.get_content(locations, blobpath))
+
+ @classmethod
+ def download_url(cls, package_name, digest):
+ blobpath = cls.upload_url(digest)
+ locations = model.get_blob_locations(digest)
+ if not locations:
+ raise_package_not_found(package_name, digest)
+ return storage.get_direct_download_url(locations, blobpath, get_request_ip())
+
+
+class Channel(ChannelBase):
+ """ CNR Channel model implemented against the Quay data model. """
+
+ def __init__(self, name, package, current=None):
+ super(Channel, self).__init__(name, package, current=current)
+ self._channel_data = None
+
+ def _exists(self):
+ """ Check if the channel is saved already """
+ return model.channel_exists(self.package, self.name)
+
+ @classmethod
+ def get(cls, name, package):
+ chanview = model.fetch_channel(package, name, with_releases=False)
+ return cls(name, package, chanview.current)
+
+ def save(self):
+ model.update_channel(self.package, self.name, self.current)
+
+ def delete(self):
+ model.delete_channel(self.package, self.name)
+
+ @classmethod
+ def all(cls, package_name):
+ return [
+ Channel(c.name, package_name, c.current) for c in model.list_channels(package_name)
+ ]
+
+ @property
+ def _channel(self):
+ if self._channel_data is None:
+ self._channel_data = model.fetch_channel(self.package, self.name)
+ return self._channel_data
+
+ def releases(self):
+ """ Returns the list of versions """
+ return self._channel.releases
+
+ def _add_release(self, release):
+ return model.update_channel(self.package, self.name, release)._asdict
+
+ def _remove_release(self, release):
+ model.delete_channel(self.package, self.name)
+
+
+class User(object):
+ """ User in CNR models """
+
+ @classmethod
+ def get_user(cls, username, password):
+ """ Returns True if user creds is valid """
+ return model.get_user(username, password)
+
+
+class Package(PackageBase):
+ """ CNR Package model implemented against the Quay data model. """
+
+ @classmethod
+ def _apptuple_to_dict(cls, apptuple):
+ return {
+ 'release': apptuple.release,
+ 'created_at': apptuple.created_at,
+ 'digest': apptuple.manifest.digest,
+ 'mediaType': apptuple.manifest.mediaType,
+ 'package': apptuple.name,
+ 'content': apptuple.manifest.content._asdict()
+ }
+
+ @classmethod
+ def create_repository(cls, package_name, visibility, owner):
+ model.create_application(package_name, visibility, owner)
+
+ @classmethod
+ def exists(cls, package_name):
+ return model.application_exists(package_name)
+
+ @classmethod
+ def all(cls, organization=None, media_type=None, search=None, username=None, **kwargs):
+ return [
+ dict(x._asdict())
+ for x in model.list_applications(namespace=organization, media_type=media_type,
+ search=search, username=username)
+ ]
+
+ @classmethod
+ def _fetch(cls, package_name, release, media_type):
+ data = model.fetch_release(package_name, release, manifest_media_type(media_type))
+ return cls._apptuple_to_dict(data)
+
+ @classmethod
+ def all_releases(cls, package_name, media_type=None):
+ return model.list_releases(package_name, media_type)
+
+ @classmethod
+ def search(cls, query, username=None):
+ return model.basic_search(query, username=username)
+
+ def _save(self, force=False, **kwargs):
+ user = kwargs['user']
+ visibility = kwargs['visibility']
+ model.create_release(self, user, visibility, force)
+
+ @classmethod
+ def _delete(cls, package_name, release, media_type):
+ model.delete_release(package_name, release, manifest_media_type(media_type))
+
+ @classmethod
+ def isdeleted_release(cls, package, release):
+ return model.release_exists(package, release)
+
+ def channels(self, channel_class, iscurrent=True):
+ return [
+ c.name
+ for c in model.list_release_channels(self.package, self.release, active=iscurrent)
+ ]
+
+ @classmethod
+ def manifests(cls, package, release=None):
+ return model.list_manifests(package, release)
+
+ @classmethod
+ def dump_all(cls, blob_cls):
+ raise NotImplementedError
+
+
+class QuayDB(CnrDB):
+ """ Wrapper Class to embed all CNR Models """
+ Channel = Channel
+ Package = Package
+ Blob = Blob
+
+ @classmethod
+ def reset_db(cls, force=False):
+ pass
diff --git a/endpoints/appr/decorators.py b/endpoints/appr/decorators.py
new file mode 100644
index 000000000..8df6a46a9
--- /dev/null
+++ b/endpoints/appr/decorators.py
@@ -0,0 +1,52 @@
+import logging
+
+from functools import wraps
+
+from data import model
+from util.http import abort
+
+
+logger = logging.getLogger(__name__)
+
+
+def _raise_unauthorized(repository, scopes):
+ raise StandardError("Unauthorized acces to %s", repository)
+
+
+def _get_reponame_kwargs(*args, **kwargs):
+ return [kwargs['namespace'], kwargs['package_name']]
+
+
+def disallow_for_image_repository(get_reponame_method=_get_reponame_kwargs):
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(*args, **kwargs):
+ namespace_name, repo_name = get_reponame_method(*args, **kwargs)
+ image_repo = model.repository.get_repository(namespace_name, repo_name, kind_filter='image')
+ if image_repo is not None:
+ logger.debug('Tried to invoked a CNR method on an image repository')
+ abort(405, message='Cannot push an application to an image repository with the same name')
+ return func(*args, **kwargs)
+ return wrapped
+ return wrapper
+
+
+def require_repo_permission(permission_class, scopes=None, allow_public=False,
+ raise_method=_raise_unauthorized,
+ get_reponame_method=_get_reponame_kwargs):
+ def wrapper(func):
+ @wraps(func)
+ @disallow_for_image_repository(get_reponame_method=get_reponame_method)
+ def wrapped(*args, **kwargs):
+ namespace_name, repo_name = get_reponame_method(*args, **kwargs)
+ logger.debug('Checking permission %s for repo: %s/%s', permission_class,
+ namespace_name, repo_name)
+ permission = permission_class(namespace_name, repo_name)
+ if (permission.can() or
+ (allow_public and
+ model.repository.repository_is_public(namespace_name, repo_name))):
+ return func(*args, **kwargs)
+ repository = namespace_name + '/' + repo_name
+ raise_method(repository, scopes)
+ return wrapped
+ return wrapper
diff --git a/endpoints/appr/models_cnr.py b/endpoints/appr/models_cnr.py
new file mode 100644
index 000000000..89216127c
--- /dev/null
+++ b/endpoints/appr/models_cnr.py
@@ -0,0 +1,316 @@
+from datetime import datetime
+
+import cnr.semver
+
+from cnr.exception import raise_package_not_found, raise_channel_not_found, CnrException
+
+import features
+import data.model
+
+from app import storage, authentication
+from data import appr_model
+from data.database import Repository, MediaType, db_transaction
+from data.appr_model.models import NEW_MODELS
+from endpoints.appr.models_interface import (
+ ApplicationManifest, ApplicationRelease, ApplicationSummaryView, AppRegistryDataInterface,
+ BlobDescriptor, ChannelView, ChannelReleasesView)
+from util.audit import track_and_log
+from util.morecollections import AttrDict
+from util.names import parse_robot_username
+
+
+
+class ReadOnlyException(CnrException):
+ status_code = 405
+ errorcode = "read-only"
+
+
+def _strip_sha256_header(digest):
+ if digest.startswith('sha256:'):
+ return digest.split('sha256:')[1]
+ return digest
+
+
+def _split_package_name(package):
+ """ Returns the namespace and package-name """
+ return package.split("/")
+
+
+def _join_package_name(ns, name):
+ """ Returns a app-name in the 'namespace/name' format """
+ return "%s/%s" % (ns, name)
+
+
+def _timestamp_to_iso(timestamp, in_ms=True):
+ if in_ms:
+ timestamp = timestamp / 1000
+ return datetime.fromtimestamp(timestamp).isoformat()
+
+
+def _application(package):
+ ns, name = _split_package_name(package)
+ repo = data.model.repository.get_app_repository(ns, name)
+ if repo is None:
+ raise_package_not_found(package)
+ return repo
+
+
+class CNRAppModel(AppRegistryDataInterface):
+ def __init__(self, models_ref, is_readonly):
+ self.models_ref = models_ref
+ self.is_readonly = is_readonly
+
+ def log_action(self, event_name, namespace_name, repo_name=None, analytics_name=None,
+ analytics_sample=1, metadata=None):
+ metadata = {} if metadata is None else metadata
+
+ repo = None
+ if repo_name is not None:
+ db_repo = data.model.repository.get_repository(namespace_name, repo_name,
+ kind_filter='application')
+ repo = AttrDict({
+ 'id': db_repo.id,
+ 'name': db_repo.name,
+ 'namespace_name': db_repo.namespace_user.username,
+ 'is_free_namespace': db_repo.namespace_user.stripe_id is None,
+ })
+ track_and_log(event_name, repo, analytics_name=analytics_name,
+ analytics_sample=analytics_sample, **metadata)
+
+ def list_applications(self, namespace=None, media_type=None, search=None, username=None,
+ with_channels=False):
+ """ Lists all repositories that contain applications, with optional filtering to a specific
+ namespace and view a specific user.
+ """
+
+ views = []
+ for repo in appr_model.package.list_packages_query(self.models_ref, namespace, media_type,
+ search, username=username):
+ tag_set_prefetch = getattr(repo, self.models_ref.tag_set_prefetch_name)
+ releases = [t.name for t in tag_set_prefetch]
+ if not releases:
+ continue
+ available_releases = [
+ str(x) for x in sorted(cnr.semver.versions(releases, False), reverse=True)]
+ channels = None
+ if with_channels:
+ channels = [
+ ChannelView(name=chan.name, current=chan.linked_tag.name)
+ for chan in appr_model.channel.get_repo_channels(repo, self.models_ref)]
+
+ app_name = _join_package_name(repo.namespace_user.username, repo.name)
+ manifests = self.list_manifests(app_name, available_releases[0])
+ view = ApplicationSummaryView(
+ namespace=repo.namespace_user.username,
+ name=app_name,
+ visibility=repo.visibility.name,
+ default=available_releases[0],
+ channels=channels,
+ manifests=manifests,
+ releases=available_releases,
+ updated_at=_timestamp_to_iso(tag_set_prefetch[-1].lifetime_start),
+ created_at=_timestamp_to_iso(tag_set_prefetch[0].lifetime_start),)
+ views.append(view)
+ return views
+
+ def application_is_public(self, package_name):
+ """
+ Returns:
+ * True if the repository is public
+ """
+ namespace, name = _split_package_name(package_name)
+ return data.model.repository.repository_is_public(namespace, name)
+
+ def create_application(self, package_name, visibility, owner):
+ """ Create a new app repository, owner is the user who creates it """
+ if self.is_readonly:
+ raise ReadOnlyException('Currently in read-only mode')
+
+ ns, name = _split_package_name(package_name)
+ data.model.repository.create_repository(ns, name, owner, visibility, 'application')
+
+ def application_exists(self, package_name):
+ """ Create a new app repository, owner is the user who creates it """
+ ns, name = _split_package_name(package_name)
+ return data.model.repository.get_repository(ns, name, kind_filter='application') is not None
+
+ def basic_search(self, query, username=None):
+ """ Returns an array of matching AppRepositories in the format: 'namespace/name'
+ Note:
+ * Only 'public' repositories are returned
+
+ Todo:
+ * Filter results with readeable reposistory for the user (including visibilitys)
+ """
+ return [
+ _join_package_name(r.namespace_user.username, r.name)
+ for r in data.model.repository.get_app_search(lookup=query, username=username, limit=50)]
+
+ def list_releases(self, package_name, media_type=None):
+ """ Return the list of all releases of an Application
+ Example:
+ >>> get_app_releases('ant31/rocketchat')
+ ['1.7.1', '1.7.0', '1.7.2']
+
+ Todo:
+ * Paginate
+ """
+ return appr_model.release.get_releases(_application(package_name), self.models_ref, media_type)
+
+ def list_manifests(self, package_name, release=None):
+ """ Returns the list of all manifests of an Application.
+
+ Todo:
+ * Paginate
+ """
+ try:
+ repo = _application(package_name)
+ return list(appr_model.manifest.get_manifest_types(repo, self.models_ref, release))
+ except (Repository.DoesNotExist, self.models_ref.Tag.DoesNotExist):
+ raise_package_not_found(package_name, release)
+
+ def fetch_release(self, package_name, release, media_type):
+ """
+ Retrieves an AppRelease from it's repository-name and release-name
+ """
+ repo = _application(package_name)
+ try:
+ tag, manifest, blob = appr_model.release.get_app_release(repo, release, media_type,
+ self.models_ref)
+ created_at = _timestamp_to_iso(tag.lifetime_start)
+
+ blob_descriptor = BlobDescriptor(digest=_strip_sha256_header(blob.digest),
+ mediaType=blob.media_type.name, size=blob.size, urls=[])
+
+ app_manifest = ApplicationManifest(
+ digest=manifest.digest, mediaType=manifest.media_type.name, content=blob_descriptor)
+
+ app_release = ApplicationRelease(release=tag.name, created_at=created_at, name=package_name,
+ manifest=app_manifest)
+ return app_release
+ except (self.models_ref.Tag.DoesNotExist,
+ self.models_ref.Manifest.DoesNotExist,
+ self.models_ref.Blob.DoesNotExist,
+ Repository.DoesNotExist,
+ MediaType.DoesNotExist):
+ raise_package_not_found(package_name, release, media_type)
+
+ def store_blob(self, cnrblob, content_media_type):
+ if self.is_readonly:
+ raise ReadOnlyException('Currently in read-only mode')
+
+ fp = cnrblob.packager.io_file
+ path = cnrblob.upload_url(cnrblob.digest)
+ locations = storage.preferred_locations
+ storage.stream_write(locations, path, fp, 'application/x-gzip')
+ db_blob = appr_model.blob.get_or_create_blob(cnrblob.digest, cnrblob.size, content_media_type,
+ locations, self.models_ref)
+ return BlobDescriptor(mediaType=content_media_type,
+ digest=_strip_sha256_header(db_blob.digest), size=db_blob.size, urls=[])
+
+ def create_release(self, package, user, visibility, force=False):
+ """ Add an app-release to a repository
+ package is an instance of data.cnr.package.Package
+ """
+ if self.is_readonly:
+ raise ReadOnlyException('Currently in read-only mode')
+
+ manifest = package.manifest()
+ ns, name = package.namespace, package.name
+ repo = data.model.repository.get_or_create_repository(ns, name, user, visibility=visibility,
+ repo_kind='application')
+ tag_name = package.release
+ appr_model.release.create_app_release(repo, tag_name, package.manifest(),
+ manifest['content']['digest'], self.models_ref, force)
+
+ def delete_release(self, package_name, release, media_type):
+ """ Remove/Delete an app-release from an app-repository.
+ It does not delete the entire app-repository, only a single release
+ """
+ if self.is_readonly:
+ raise ReadOnlyException('Currently in read-only mode')
+
+ repo = _application(package_name)
+ try:
+ appr_model.release.delete_app_release(repo, release, media_type, self.models_ref)
+ except (self.models_ref.Channel.DoesNotExist,
+ self.models_ref.Tag.DoesNotExist,
+ MediaType.DoesNotExist):
+ raise_package_not_found(package_name, release, media_type)
+
+ def release_exists(self, package, release):
+ """ Return true if a release with that name already exist or
+ have existed (include deleted ones) """
+ # TODO: Figure out why this isn't implemented.
+
+ def channel_exists(self, package_name, channel_name):
+ """ Returns true if channel exists """
+ repo = _application(package_name)
+ return appr_model.tag.tag_exists(repo, channel_name, self.models_ref, "channel")
+
+ def delete_channel(self, package_name, channel_name):
+ """ Delete an AppChannel
+ Note:
+ It doesn't delete the AppReleases
+ """
+ if self.is_readonly:
+ raise ReadOnlyException('Currently in read-only mode')
+
+ repo = _application(package_name)
+ try:
+ appr_model.channel.delete_channel(repo, channel_name, self.models_ref)
+ except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
+ raise_channel_not_found(package_name, channel_name)
+
+ def list_channels(self, package_name):
+ """ Returns all AppChannel for a package """
+ repo = _application(package_name)
+ channels = appr_model.channel.get_repo_channels(repo, self.models_ref)
+ return [ChannelView(name=chan.name, current=chan.linked_tag.name) for chan in channels]
+
+ def fetch_channel(self, package_name, channel_name, with_releases=True):
+ """ Returns an AppChannel """
+ repo = _application(package_name)
+
+ try:
+ channel = appr_model.channel.get_channel(repo, channel_name, self.models_ref)
+ except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
+ raise_channel_not_found(package_name, channel_name)
+
+ if with_releases:
+ releases = appr_model.channel.get_channel_releases(repo, channel, self.models_ref)
+ chanview = ChannelReleasesView(
+ current=channel.linked_tag.name, name=channel.name,
+ releases=[channel.linked_tag.name] + [c.name for c in releases])
+ else:
+ chanview = ChannelView(current=channel.linked_tag.name, name=channel.name)
+
+ return chanview
+
+ def list_release_channels(self, package_name, release, active=True):
+ repo = _application(package_name)
+ try:
+ channels = appr_model.channel.get_tag_channels(repo, release, self.models_ref, active=active)
+ return [ChannelView(name=c.name, current=c.linked_tag.name) for c in channels]
+ except (self.models_ref.Channel.DoesNotExist, self.models_ref.Tag.DoesNotExist):
+ raise_package_not_found(package_name, release)
+
+ def update_channel(self, package_name, channel_name, release):
+ """ Append a new release to the AppChannel
+ Returns:
+ A new AppChannel with the release
+ """
+ if self.is_readonly:
+ raise ReadOnlyException('Currently in read-only mode')
+
+ repo = _application(package_name)
+ channel = appr_model.channel.create_or_update_channel(repo, channel_name, release,
+ self.models_ref)
+ return ChannelView(current=channel.linked_tag.name, name=channel.name)
+
+ def get_blob_locations(self, digest):
+ return appr_model.blob.get_blob_locations(digest, self.models_ref)
+
+
+# Phase 3: Read and write from new tables.
+model = CNRAppModel(NEW_MODELS, features.READONLY_APP_REGISTRY)
diff --git a/endpoints/appr/models_interface.py b/endpoints/appr/models_interface.py
new file mode 100644
index 000000000..6ebf949ac
--- /dev/null
+++ b/endpoints/appr/models_interface.py
@@ -0,0 +1,191 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+
+class BlobDescriptor(namedtuple('Blob', ['mediaType', 'size', 'digest', 'urls'])):
+ """ BlobDescriptor describes a blob with its mediatype, size and digest.
+ A BlobDescriptor is used to retrieves the actual blob.
+ """
+
+
+class ChannelReleasesView(namedtuple('ChannelReleasesView', ['name', 'current', 'releases'])):
+ """ A channel is a pointer to a Release (current).
+ Releases are the previous tags pointed by channel (history).
+ """
+
+
+class ChannelView(namedtuple('ChannelView', ['name', 'current'])):
+ """ A channel is a pointer to a Release (current).
+ """
+
+
+class ApplicationSummaryView(
+ namedtuple('ApplicationSummaryView', [
+ 'name', 'namespace', 'visibility', 'default', 'manifests', 'channels', 'releases',
+ 'updated_at', 'created_at'
+ ])):
+ """ ApplicationSummaryView is an aggregated view of an application repository.
+ """
+
+
+class ApplicationManifest(namedtuple('ApplicationManifest', ['mediaType', 'digest', 'content'])):
+ """ ApplicationManifest embed the BlobDescriptor and some metadata around it.
+ An ApplicationManifest is content-addressable.
+ """
+
+
+class ApplicationRelease(
+ namedtuple('ApplicationRelease', ['release', 'name', 'created_at', 'manifest'])):
+ """ The ApplicationRelease associates an ApplicationManifest to a repository and release.
+ """
+
+
+@add_metaclass(ABCMeta)
+class AppRegistryDataInterface(object):
+ """ Interface that represents all data store interactions required by a App Registry.
+ """
+
+ @abstractmethod
+ def list_applications(self, namespace=None, media_type=None, search=None, username=None,
+ with_channels=False):
+ """ Lists all repositories that contain applications, with optional filtering to a specific
+ namespace and/or to those visible to a specific user.
+
+ Returns: list of ApplicationSummaryView
+ """
+ pass
+
+ @abstractmethod
+ def application_is_public(self, package_name):
+ """
+ Returns true if the application is public
+ """
+ pass
+
+ @abstractmethod
+ def create_application(self, package_name, visibility, owner):
+ """ Create a new app repository, owner is the user who creates it """
+ pass
+
+ @abstractmethod
+ def application_exists(self, package_name):
+ """ Returns true if the application exists """
+ pass
+
+ @abstractmethod
+ def basic_search(self, query, username=None):
+ """ Returns an array of matching application in the format: 'namespace/name'
+ Note:
+ * Only 'public' repositories are returned
+ """
+ pass
+
+ # @TODO: Paginate
+ @abstractmethod
+ def list_releases(self, package_name, media_type=None):
+ """ Returns the list of all releases(names) of an AppRepository
+ Example:
+ >>> get_app_releases('ant31/rocketchat')
+ ['1.7.1', '1.7.0', '1.7.2']
+ """
+ pass
+
+ # @TODO: Paginate
+ @abstractmethod
+ def list_manifests(self, package_name, release=None):
+ """ Returns the list of all available manifests type of an Application across all releases or
+ for a specific one.
+
+ Example:
+ >>> get_app_releases('ant31/rocketchat')
+ ['1.7.1', '1.7.0', '1.7.2']
+ """
+ pass
+
+ @abstractmethod
+ def fetch_release(self, package_name, release, media_type):
+ """
+ Returns an ApplicationRelease
+ """
+ pass
+
+ @abstractmethod
+ def store_blob(self, cnrblob, content_media_type):
+ """
+ Upload the blob content to a storage location and creates a Blob entry in the DB.
+
+ Returns a BlobDescriptor
+ """
+ pass
+
+ @abstractmethod
+ def create_release(self, package, user, visibility, force=False):
+ """ Creates and returns an ApplicationRelease
+ - package is a data.model.Package object
+ - user is the owner of the package
+ - visibility is a string: 'public' or 'private'
+ """
+ pass
+
+ @abstractmethod
+ def release_exists(self, package, release):
+ """ Return true if a release with that name already exist or
+ has existed (including deleted ones)
+ """
+ pass
+
+ @abstractmethod
+ def delete_release(self, package_name, release, media_type):
+ """ Remove/Delete an app-release from an app-repository.
+ It does not delete the entire app-repository, only a single release
+ """
+ pass
+
+ @abstractmethod
+ def list_release_channels(self, package_name, release, active=True):
+ """ Returns a list of Channel that are/was pointing to a release.
+ If active is True, returns only active Channel (lifetime_end not null)
+ """
+ pass
+
+ @abstractmethod
+ def channel_exists(self, package_name, channel_name):
+ """ Returns true if the channel with the given name exists under the matching package """
+ pass
+
+ @abstractmethod
+ def update_channel(self, package_name, channel_name, release):
+ """ Append a new release to the Channel
+ Returns a new Channel with the release as current
+ """
+ pass
+
+ @abstractmethod
+ def delete_channel(self, package_name, channel_name):
+ """ Delete a Channel, it doesn't delete/touch the ApplicationRelease pointed by the channel """
+
+ # @TODO: Paginate
+ @abstractmethod
+ def list_channels(self, package_name):
+ """ Returns all AppChannel for a package """
+ pass
+
+ @abstractmethod
+ def fetch_channel(self, package_name, channel_name, with_releases=True):
+ """ Returns an Channel
+ Raises: ChannelNotFound, PackageNotFound
+ """
+ pass
+
+ @abstractmethod
+ def log_action(self, event_name, namespace_name, repo_name=None, analytics_name=None,
+ analytics_sample=1, **kwargs):
+ """ Logs an action to the audit log. """
+ pass
+
+ @abstractmethod
+ def get_blob_locations(self, digest):
+ """ Returns a list of strings for the locations in which a Blob is present. """
+ pass
diff --git a/endpoints/appr/registry.py b/endpoints/appr/registry.py
new file mode 100644
index 000000000..0b470f878
--- /dev/null
+++ b/endpoints/appr/registry.py
@@ -0,0 +1,318 @@
+import logging
+from base64 import b64encode
+
+import cnr
+from cnr.api.impl import registry as cnr_registry
+from cnr.api.registry import _pull, repo_name
+from cnr.exception import (
+ ChannelNotFound, CnrException, Forbidden, InvalidParams, InvalidRelease, InvalidUsage,
+ PackageAlreadyExists, PackageNotFound, PackageReleaseNotFound, UnableToLockResource,
+ UnauthorizedAccess, Unsupported)
+from flask import jsonify, request
+
+from auth.auth_context import get_authenticated_user
+from auth.credentials import validate_credentials
+from auth.decorators import process_auth
+from auth.permissions import CreateRepositoryPermission, ModifyRepositoryPermission
+from data.logs_model import logs_model
+from endpoints.appr import appr_bp, require_app_repo_read, require_app_repo_write
+from endpoints.appr.cnr_backend import Blob, Channel, Package, User
+from endpoints.appr.decorators import disallow_for_image_repository
+from endpoints.appr.models_cnr import model
+from endpoints.decorators import anon_allowed, anon_protect, check_region_blacklisted
+from util.names import REPOSITORY_NAME_REGEX, TAG_REGEX
+
+logger = logging.getLogger(__name__)
+
+
+@appr_bp.errorhandler(Unsupported)
+@appr_bp.errorhandler(PackageAlreadyExists)
+@appr_bp.errorhandler(InvalidRelease)
+@appr_bp.errorhandler(Forbidden)
+@appr_bp.errorhandler(UnableToLockResource)
+@appr_bp.errorhandler(UnauthorizedAccess)
+@appr_bp.errorhandler(PackageNotFound)
+@appr_bp.errorhandler(PackageReleaseNotFound)
+@appr_bp.errorhandler(CnrException)
+@appr_bp.errorhandler(InvalidUsage)
+@appr_bp.errorhandler(InvalidParams)
+@appr_bp.errorhandler(ChannelNotFound)
+def render_error(error):
+ response = jsonify({"error": error.to_dict()})
+ response.status_code = error.status_code
+ return response
+
+
+@appr_bp.route("/version")
+@anon_allowed
+def version():
+ return jsonify({"cnr-api": cnr.__version__})
+
+
+@appr_bp.route("/api/v1/users/login", methods=['POST'])
+@anon_allowed
+def login():
+ values = request.get_json(force=True, silent=True) or {}
+ username = values.get('user', {}).get('username')
+ password = values.get('user', {}).get('password')
+ if not username or not password:
+ raise InvalidUsage('Missing username or password')
+
+ result, _ = validate_credentials(username, password)
+ if not result.auth_valid:
+ raise UnauthorizedAccess(result.error_message)
+
+ return jsonify({'token': "basic " + b64encode("%s:%s" % (username, password))})
+
+
+# @TODO: Redirect to S3 url
+@appr_bp.route(
+ "/api/v1/packages///blobs/sha256/",
+ methods=['GET'],
+ strict_slashes=False,)
+@process_auth
+@require_app_repo_read
+@check_region_blacklisted(namespace_name_kwarg='namespace')
+@anon_protect
+def blobs(namespace, package_name, digest):
+ reponame = repo_name(namespace, package_name)
+ data = cnr_registry.pull_blob(reponame, digest, blob_class=Blob)
+ json_format = request.args.get('format', None) == 'json'
+ return _pull(data, json_format=json_format)
+
+
+@appr_bp.route("/api/v1/packages", methods=['GET'], strict_slashes=False)
+@process_auth
+@anon_protect
+def list_packages():
+ namespace = request.args.get('namespace', None)
+ media_type = request.args.get('media_type', None)
+ query = request.args.get('query', None)
+ user = get_authenticated_user()
+ username = None
+ if user:
+ username = user.username
+ result_data = cnr_registry.list_packages(namespace, package_class=Package, search=query,
+ media_type=media_type, username=username)
+ return jsonify(result_data)
+
+
+@appr_bp.route(
+ "/api/v1/packages////",
+ methods=['DELETE'], strict_slashes=False)
+@process_auth
+@require_app_repo_write
+@anon_protect
+def delete_package(namespace, package_name, release, media_type):
+ reponame = repo_name(namespace, package_name)
+ result = cnr_registry.delete_package(reponame, release, media_type, package_class=Package)
+ logs_model.log_action('delete_tag', namespace, repository_name=package_name,
+ metadata={'release': release, 'mediatype': media_type})
+ return jsonify(result)
+
+
+@appr_bp.route(
+ "/api/v1/packages////",
+ methods=['GET'], strict_slashes=False)
+@process_auth
+@require_app_repo_read
+@check_region_blacklisted(namespace_name_kwarg='namespace')
+@anon_protect
+def show_package(namespace, package_name, release, media_type):
+ reponame = repo_name(namespace, package_name)
+ result = cnr_registry.show_package(reponame, release, media_type, channel_class=Channel,
+ package_class=Package)
+ return jsonify(result)
+
+
+@appr_bp.route("/api/v1/packages//", methods=['GET'],
+ strict_slashes=False)
+@process_auth
+@require_app_repo_read
+@anon_protect
+def show_package_releases(namespace, package_name):
+ reponame = repo_name(namespace, package_name)
+ media_type = request.args.get('media_type', None)
+ result = cnr_registry.show_package_releases(reponame, media_type=media_type,
+ package_class=Package)
+ return jsonify(result)
+
+
+@appr_bp.route("/api/v1/packages///",
+ methods=['GET'], strict_slashes=False)
+@process_auth
+@require_app_repo_read
+@anon_protect
+def show_package_release_manifests(namespace, package_name, release):
+ reponame = repo_name(namespace, package_name)
+ result = cnr_registry.show_package_manifests(reponame, release, package_class=Package)
+ return jsonify(result)
+
+
+@appr_bp.route(
+ "/api/v1/packages/////pull",
+ methods=['GET'],
+ strict_slashes=False,)
+@process_auth
+@require_app_repo_read
+@check_region_blacklisted(namespace_name_kwarg='namespace')
+@anon_protect
+def pull(namespace, package_name, release, media_type):
+ logger.debug('Pull of release %s of app repository %s/%s', release, namespace, package_name)
+ reponame = repo_name(namespace, package_name)
+ data = cnr_registry.pull(reponame, release, media_type, Package, blob_class=Blob)
+ logs_model.log_action('pull_repo', namespace, repository_name=package_name,
+ metadata={'release': release, 'mediatype': media_type})
+ json_format = request.args.get('format', None) == 'json'
+ return _pull(data, json_format)
+
+
+@appr_bp.route("/api/v1/packages//", methods=['POST'],
+ strict_slashes=False)
+@disallow_for_image_repository()
+@process_auth
+@anon_protect
+def push(namespace, package_name):
+ reponame = repo_name(namespace, package_name)
+
+ if not REPOSITORY_NAME_REGEX.match(package_name):
+ logger.debug('Found invalid repository name CNR push: %s', reponame)
+ raise InvalidUsage('invalid repository name: %s' % reponame)
+
+ values = request.get_json(force=True, silent=True) or {}
+ private = values.get('visibility', 'private')
+
+ owner = get_authenticated_user()
+ if not Package.exists(reponame):
+ if not CreateRepositoryPermission(namespace).can():
+ raise Forbidden("Unauthorized access for: %s" % reponame,
+ {"package": reponame,
+ "scopes": ['create']})
+ Package.create_repository(reponame, private, owner)
+ logs_model.log_action('create_repo', namespace, repository_name=package_name)
+
+ if not ModifyRepositoryPermission(namespace, package_name).can():
+ raise Forbidden("Unauthorized access for: %s" % reponame,
+ {"package": reponame,
+ "scopes": ['push']})
+
+ if not 'release' in values:
+ raise InvalidUsage('Missing release')
+
+ if not 'media_type' in values:
+ raise InvalidUsage('Missing media_type')
+
+ if not 'blob' in values:
+ raise InvalidUsage('Missing blob')
+
+ release_version = str(values['release'])
+ media_type = values['media_type']
+ force = request.args.get('force', 'false') == 'true'
+
+ blob = Blob(reponame, values['blob'])
+ app_release = cnr_registry.push(reponame, release_version, media_type, blob, force,
+ package_class=Package, user=owner, visibility=private)
+ logs_model.log_action('push_repo', namespace, repository_name=package_name,
+ metadata={'release': release_version})
+ return jsonify(app_release)
+
+
+@appr_bp.route("/api/v1/packages/search", methods=['GET'], strict_slashes=False)
+@process_auth
+@anon_protect
+def search_packages():
+ query = request.args.get("q")
+ user = get_authenticated_user()
+ username = None
+ if user:
+ username = user.username
+
+ search_results = cnr_registry.search(query, Package, username=username)
+ return jsonify(search_results)
+
+
+# CHANNELS
+@appr_bp.route("/api/v1/packages///channels",
+ methods=['GET'], strict_slashes=False)
+@process_auth
+@require_app_repo_read
+@anon_protect
+def list_channels(namespace, package_name):
+ reponame = repo_name(namespace, package_name)
+ return jsonify(cnr_registry.list_channels(reponame, channel_class=Channel))
+
+
+@appr_bp.route(
+ "/api/v1/packages///channels/",
+ methods=['GET'], strict_slashes=False)
+@process_auth
+@require_app_repo_read
+@anon_protect
+def show_channel(namespace, package_name, channel_name):
+ reponame = repo_name(namespace, package_name)
+ channel = cnr_registry.show_channel(reponame, channel_name, channel_class=Channel)
+ return jsonify(channel)
+
+
+@appr_bp.route(
+ "/api/v1/packages///channels//",
+ methods=['POST'],
+ strict_slashes=False,)
+@process_auth
+@require_app_repo_write
+@anon_protect
+def add_channel_release(namespace, package_name, channel_name, release):
+ _check_channel_name(channel_name, release)
+ reponame = repo_name(namespace, package_name)
+ result = cnr_registry.add_channel_release(reponame, channel_name, release, channel_class=Channel,
+ package_class=Package)
+ logs_model.log_action('create_tag', namespace, repository_name=package_name,
+ metadata={'channel': channel_name, 'release': release})
+ return jsonify(result)
+
+
+def _check_channel_name(channel_name, release=None):
+ if not TAG_REGEX.match(channel_name):
+ logger.debug('Found invalid channel name CNR add channel release: %s', channel_name)
+ raise InvalidUsage("Found invalid channelname %s" % release,
+ {'name': channel_name,
+ 'release': release})
+
+ if release is not None and not TAG_REGEX.match(release):
+ logger.debug('Found invalid release name CNR add channel release: %s', release)
+ raise InvalidUsage('Found invalid channel release name %s' % release,
+ {'name': channel_name,
+ 'release': release})
+
+
+@appr_bp.route(
+ "/api/v1/packages///channels//",
+ methods=['DELETE'],
+ strict_slashes=False,)
+@process_auth
+@require_app_repo_write
+@anon_protect
+def delete_channel_release(namespace, package_name, channel_name, release):
+ _check_channel_name(channel_name, release)
+ reponame = repo_name(namespace, package_name)
+ result = cnr_registry.delete_channel_release(reponame, channel_name, release,
+ channel_class=Channel, package_class=Package)
+ logs_model.log_action('delete_tag', namespace, repository_name=package_name,
+ metadata={'channel': channel_name, 'release': release})
+ return jsonify(result)
+
+
+@appr_bp.route(
+ "/api/v1/packages///channels/",
+ methods=['DELETE'],
+ strict_slashes=False,)
+@process_auth
+@require_app_repo_write
+@anon_protect
+def delete_channel(namespace, package_name, channel_name):
+ _check_channel_name(channel_name)
+ reponame = repo_name(namespace, package_name)
+ result = cnr_registry.delete_channel(reponame, channel_name, channel_class=Channel)
+ logs_model.log_action('delete_tag', namespace, repository_name=package_name,
+ metadata={'channel': channel_name})
+ return jsonify(result)
diff --git a/endpoints/appr/test/test_api.py b/endpoints/appr/test/test_api.py
new file mode 100644
index 000000000..99af88c2c
--- /dev/null
+++ b/endpoints/appr/test/test_api.py
@@ -0,0 +1,163 @@
+import uuid
+
+import pytest
+
+from cnr.tests.conftest import *
+from cnr.tests.test_apiserver import BaseTestServer
+from cnr.tests.test_models import CnrTestModels
+
+import data.appr_model.blob as appr_blob
+
+from data.database import User
+from data.model import organization, user
+from endpoints.appr import registry # Needed to register the endpoint
+from endpoints.appr.cnr_backend import Channel, Package, QuayDB
+from endpoints.appr.models_cnr import model as appr_app_model
+
+from test.fixtures import *
+
+
+def create_org(namespace, owner):
+ try:
+ User.get(username=namespace)
+ except User.DoesNotExist:
+ organization.create_organization(namespace, "%s@test.com" % str(uuid.uuid1()), owner)
+
+
+class ChannelTest(Channel):
+ @classmethod
+ def dump_all(cls, package_class=None):
+ result = []
+ for repo in appr_app_model.list_applications(with_channels=True):
+ for chan in repo.channels:
+ result.append({'name': chan.name, 'current': chan.current, 'package': repo.name})
+ return result
+
+
+class PackageTest(Package):
+ def _save(self, force, **kwargs):
+ owner = user.get_user('devtable')
+ create_org(self.namespace, owner)
+ super(PackageTest, self)._save(force, user=owner, visibility="public")
+
+ @classmethod
+ def create_repository(cls, package_name, visibility, owner):
+ ns, _ = package_name.split("/")
+ owner = user.get_user('devtable')
+ visibility = "public"
+ create_org(ns, owner)
+ return super(PackageTest, cls).create_repository(package_name, visibility, owner)
+
+ @classmethod
+ def dump_all(cls, blob_cls):
+ result = []
+ for repo in appr_app_model.list_applications(with_channels=True):
+ package_name = repo.name
+ for release in repo.releases:
+ for mtype in cls.manifests(package_name, release):
+ package = appr_app_model.fetch_release(package_name, release, mtype)
+ blob = blob_cls.get(package_name, package.manifest.content.digest)
+ app_data = cls._apptuple_to_dict(package)
+ app_data.pop('digest')
+ app_data['channels'] = [
+ x.name
+ for x in appr_app_model.list_release_channels(package_name, package.release, False)
+ ]
+ app_data['blob'] = blob.b64blob
+ result.append(app_data)
+ return result
+
+
+@pytest.fixture(autouse=True)
+def quaydb(monkeypatch, app):
+ monkeypatch.setattr('endpoints.appr.cnr_backend.QuayDB.Package', PackageTest)
+ monkeypatch.setattr('endpoints.appr.cnr_backend.Package', PackageTest)
+ monkeypatch.setattr('endpoints.appr.registry.Package', PackageTest)
+ monkeypatch.setattr('cnr.models.Package', PackageTest)
+
+ monkeypatch.setattr('endpoints.appr.cnr_backend.QuayDB.Channel', ChannelTest)
+ monkeypatch.setattr('endpoints.appr.registry.Channel', ChannelTest)
+ monkeypatch.setattr('cnr.models.Channel', ChannelTest)
+
+
+class TestServerQuayDB(BaseTestServer):
+ DB_CLASS = QuayDB
+
+ @property
+ def token(self):
+ return "basic ZGV2dGFibGU6cGFzc3dvcmQ="
+
+ def test_search_package_match(self, db_with_data1, client):
+ """ TODO: search cross namespace and package name """
+ BaseTestServer.test_search_package_match(self, db_with_data1, client)
+
+ def test_list_search_package_match(self, db_with_data1, client):
+ url = self._url_for("api/v1/packages")
+ res = self.Client(client, self.headers()).get(url, params={'query': 'rocketchat'})
+ assert res.status_code == 200
+ assert len(self.json(res)) == 1
+
+ def test_list_search_package_no_match(self, db_with_data1, client):
+ url = self._url_for("api/v1/packages")
+ res = self.Client(client, self.headers()).get(url, params={'query': 'toto'})
+ assert res.status_code == 200
+ assert len(self.json(res)) == 0
+
+ @pytest.mark.xfail
+ def test_push_package_already_exists_force(self, db_with_data1, package_b64blob, client):
+ """ No force push implemented """
+ BaseTestServer.test_push_package_already_exists_force(self, db_with_data1, package_b64blob,
+ client)
+
+ @pytest.mark.xfail
+ def test_delete_channel_release_absent_release(self, db_with_data1, client):
+ BaseTestServer.test_delete_channel_release_absent_release(self, db_with_data1, client)
+
+ @pytest.mark.xfail
+ def test_get_absent_blob(self, newdb, client):
+ pass
+
+
+class TestQuayModels(CnrTestModels):
+ DB_CLASS = QuayDB
+
+ @pytest.mark.xfail
+ def test_channel_delete_releases(self, db_with_data1):
+ """ Can't remove a release from the channel, only delete the channel entirely """
+ CnrTestModels.test_channel_delete_releases(self, db_with_data1)
+
+ @pytest.mark.xfail
+ def test_forbiddeb_db_reset(self, db_class):
+ pass
+
+ @pytest.mark.xfail
+ def test_db_restore(self, newdb, dbdata1):
+ # This will fail as long as CNR tests use a mediatype with v1.
+ pass
+
+ def test_push_same_blob(self, db_with_data1):
+ p = db_with_data1.Package.get("titi/rocketchat", ">1.2", 'kpm')
+ assert p.package == "titi/rocketchat"
+ assert p.release == "2.0.1"
+ assert p.digest == "d3b54b7912fe770a61b59ab612a442eac52a8a5d8d05dbe92bf8f212d68aaa80"
+ blob = db_with_data1.Blob.get("titi/rocketchat", p.digest)
+ bdb = appr_blob.get_blob(p.digest, appr_app_model.models_ref)
+ newblob = db_with_data1.Blob("titi/app2", blob.b64blob)
+ p2 = db_with_data1.Package("titi/app2", "1.0.0", "helm", newblob)
+ p2.save()
+ b2db = appr_blob.get_blob(p2.digest, appr_app_model.models_ref)
+ assert b2db.id == bdb.id
+
+ def test_force_push_different_blob(self, db_with_data1):
+ p = db_with_data1.Package.get("titi/rocketchat", "2.0.1", 'kpm')
+ assert p.package == "titi/rocketchat"
+ assert p.release == "2.0.1"
+ assert p.digest == "d3b54b7912fe770a61b59ab612a442eac52a8a5d8d05dbe92bf8f212d68aaa80"
+ blob = db_with_data1.Blob.get(
+ "titi/rocketchat", "72ed15c9a65961ecd034cca098ec18eb99002cd402824aae8a674a8ae41bd0ef")
+ p2 = db_with_data1.Package("titi/rocketchat", "2.0.1", "kpm", blob)
+ p2.save(force=True)
+ pnew = db_with_data1.Package.get("titi/rocketchat", "2.0.1", 'kpm')
+ assert pnew.package == "titi/rocketchat"
+ assert pnew.release == "2.0.1"
+ assert pnew.digest == "72ed15c9a65961ecd034cca098ec18eb99002cd402824aae8a674a8ae41bd0ef"
diff --git a/endpoints/appr/test/test_api_security.py b/endpoints/appr/test/test_api_security.py
new file mode 100644
index 000000000..c3e52b30c
--- /dev/null
+++ b/endpoints/appr/test/test_api_security.py
@@ -0,0 +1,97 @@
+import base64
+import pytest
+
+from flask import url_for
+
+from data import model
+from endpoints.appr.registry import appr_bp, blobs
+from endpoints.test.shared import client_with_identity
+from test.fixtures import *
+
+BLOB_ARGS = {'digest': 'abcd1235'}
+PACKAGE_ARGS = {'release': 'r', 'media_type': 'foo'}
+RELEASE_ARGS = {'release': 'r'}
+CHANNEL_ARGS = {'channel_name': 'c'}
+CHANNEL_RELEASE_ARGS = {'channel_name': 'c', 'release': 'r'}
+
+@pytest.mark.parametrize('resource,method,params,owned_by,is_public,identity,expected', [
+ ('appr.blobs', 'GET', BLOB_ARGS, 'devtable', False, 'public', 403),
+ ('appr.blobs', 'GET', BLOB_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.blobs', 'GET', BLOB_ARGS, 'devtable', True, 'public', 404),
+ ('appr.blobs', 'GET', BLOB_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.delete_package', 'DELETE', PACKAGE_ARGS, 'devtable', False, 'public', 403),
+ ('appr.delete_package', 'DELETE', PACKAGE_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.delete_package', 'DELETE', PACKAGE_ARGS, 'devtable', True, 'public', 403),
+ ('appr.delete_package', 'DELETE', PACKAGE_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.show_package', 'GET', PACKAGE_ARGS, 'devtable', False, 'public', 403),
+ ('appr.show_package', 'GET', PACKAGE_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.show_package', 'GET', PACKAGE_ARGS, 'devtable', True, 'public', 404),
+ ('appr.show_package', 'GET', PACKAGE_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.show_package_releases', 'GET', {}, 'devtable', False, 'public', 403),
+ ('appr.show_package_releases', 'GET', {}, 'devtable', False, 'devtable', 200),
+ ('appr.show_package_releases', 'GET', {}, 'devtable', True, 'public', 200),
+ ('appr.show_package_releases', 'GET', {}, 'devtable', True, 'devtable', 200),
+
+ ('appr.show_package_release_manifests', 'GET', RELEASE_ARGS, 'devtable', False, 'public', 403),
+ ('appr.show_package_release_manifests', 'GET', RELEASE_ARGS, 'devtable', False, 'devtable', 200),
+ ('appr.show_package_release_manifests', 'GET', RELEASE_ARGS, 'devtable', True, 'public', 200),
+ ('appr.show_package_release_manifests', 'GET', RELEASE_ARGS, 'devtable', True, 'devtable', 200),
+
+ ('appr.pull', 'GET', PACKAGE_ARGS, 'devtable', False, 'public', 403),
+ ('appr.pull', 'GET', PACKAGE_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.pull', 'GET', PACKAGE_ARGS, 'devtable', True, 'public', 404),
+ ('appr.pull', 'GET', PACKAGE_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.push', 'POST', {}, 'devtable', False, 'public', 403),
+ ('appr.push', 'POST', {}, 'devtable', False, 'devtable', 400),
+ ('appr.push', 'POST', {}, 'devtable', True, 'public', 403),
+ ('appr.push', 'POST', {}, 'devtable', True, 'devtable', 400),
+
+ ('appr.list_channels', 'GET', {}, 'devtable', False, 'public', 403),
+ ('appr.list_channels', 'GET', {}, 'devtable', False, 'devtable', 200),
+ ('appr.list_channels', 'GET', {}, 'devtable', True, 'public', 200),
+ ('appr.list_channels', 'GET', {}, 'devtable', True, 'devtable', 200),
+
+ ('appr.show_channel', 'GET', CHANNEL_ARGS, 'devtable', False, 'public', 403),
+ ('appr.show_channel', 'GET', CHANNEL_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.show_channel', 'GET', CHANNEL_ARGS, 'devtable', True, 'public', 404),
+ ('appr.show_channel', 'GET', CHANNEL_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.delete_channel', 'DELETE', CHANNEL_ARGS, 'devtable', False, 'public', 403),
+ ('appr.delete_channel', 'DELETE', CHANNEL_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.delete_channel', 'DELETE', CHANNEL_ARGS, 'devtable', True, 'public', 403),
+ ('appr.delete_channel', 'DELETE', CHANNEL_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.add_channel_release', 'POST', CHANNEL_RELEASE_ARGS, 'devtable', False, 'public', 403),
+ ('appr.add_channel_release', 'POST', CHANNEL_RELEASE_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.add_channel_release', 'POST', CHANNEL_RELEASE_ARGS, 'devtable', True, 'public', 403),
+ ('appr.add_channel_release', 'POST', CHANNEL_RELEASE_ARGS, 'devtable', True, 'devtable', 404),
+
+ ('appr.delete_channel_release', 'DELETE', CHANNEL_RELEASE_ARGS, 'devtable', False, 'public', 403),
+ ('appr.delete_channel_release', 'DELETE', CHANNEL_RELEASE_ARGS, 'devtable', False, 'devtable', 404),
+ ('appr.delete_channel_release', 'DELETE', CHANNEL_RELEASE_ARGS, 'devtable', True, 'public', 403),
+ ('appr.delete_channel_release', 'DELETE', CHANNEL_RELEASE_ARGS, 'devtable', True, 'devtable', 404),
+])
+def test_api_security(resource, method, params, owned_by, is_public, identity, expected, app, client):
+ app.register_blueprint(appr_bp, url_prefix='/cnr')
+
+ with client_with_identity(identity, client) as cl:
+ owner = model.user.get_user(owned_by)
+ visibility = 'public' if is_public else 'private'
+ model.repository.create_repository(owned_by, 'someapprepo', owner, visibility=visibility,
+ repo_kind='application')
+
+ params['namespace'] = owned_by
+ params['package_name'] = 'someapprepo'
+ params['_csrf_token'] = '123csrfforme'
+
+ url = url_for(resource, **params)
+ headers = {}
+ if identity is not None:
+ headers['authorization'] = 'basic ' + base64.b64encode('%s:password' % identity)
+
+ rv = cl.open(url, headers=headers, method=method)
+ assert rv.status_code == expected
diff --git a/endpoints/appr/test/test_appr_decorators.py b/endpoints/appr/test/test_appr_decorators.py
new file mode 100644
index 000000000..77519d6bd
--- /dev/null
+++ b/endpoints/appr/test/test_appr_decorators.py
@@ -0,0 +1,20 @@
+import pytest
+
+from werkzeug.exceptions import HTTPException
+
+from data import model
+from endpoints.appr import require_app_repo_read
+
+from test.fixtures import *
+
+def test_require_app_repo_read(app):
+ called = [False]
+
+ # Ensure that trying to read an *image* repository fails.
+ @require_app_repo_read
+ def empty(**kwargs):
+ called[0] = True
+
+ with pytest.raises(HTTPException):
+ empty(namespace='devtable', package_name='simple')
+ assert not called[0]
diff --git a/endpoints/appr/test/test_digest_prefix.py b/endpoints/appr/test/test_digest_prefix.py
new file mode 100644
index 000000000..089becd43
--- /dev/null
+++ b/endpoints/appr/test/test_digest_prefix.py
@@ -0,0 +1,11 @@
+import pytest
+from endpoints.appr.models_cnr import _strip_sha256_header
+
+
+@pytest.mark.parametrize('digest,expected', [
+ ('sha256:251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a',
+ '251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a'),
+ ('251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a',
+ '251b6897608fb18b8a91ac9abac686e2e95245d5a041f2d1e78fe7a815e6480a'),])
+def test_stip_sha256(digest, expected):
+ assert _strip_sha256_header(digest) == expected
diff --git a/endpoints/appr/test/test_registry.py b/endpoints/appr/test/test_registry.py
new file mode 100644
index 000000000..bd6602675
--- /dev/null
+++ b/endpoints/appr/test/test_registry.py
@@ -0,0 +1,92 @@
+import base64
+import json
+
+from mock import patch
+
+import pytest
+
+from flask import url_for
+
+from data import model
+from endpoints.appr.registry import appr_bp
+
+from test.fixtures import *
+
+
+@pytest.mark.parametrize('login_data, expected_code', [
+ ({
+ "username": "devtable",
+ "password": "password"
+ }, 200),
+ ({
+ "username": "devtable",
+ "password": "badpass"
+ }, 401),
+ ({
+ "username": "devtable+dtrobot",
+ "password": "badpass"
+ }, 401),
+ ({
+ "username": "devtable+dtrobot2",
+ "password": "badpass"
+ }, 401),
+])
+def test_login(login_data, expected_code, app, client):
+ if "+" in login_data['username'] and login_data['password'] is None:
+ username, robotname = login_data['username'].split("+")
+ _, login_data['password'] = model.user.create_robot(robotname, model.user.get_user(username))
+
+ url = url_for('appr.login')
+ headers = {'Content-Type': 'application/json'}
+ data = {'user': login_data}
+
+ rv = client.open(url, method='POST', data=json.dumps(data), headers=headers)
+ assert rv.status_code == expected_code
+
+
+@pytest.mark.parametrize('release_name', [
+ '1.0',
+ '1',
+ 1,
+])
+def test_invalid_release_name(release_name, app, client):
+ params = {
+ 'namespace': 'devtable',
+ 'package_name': 'someapprepo',
+ }
+
+ url = url_for('appr.push', **params)
+ auth = base64.b64encode('devtable:password')
+ headers = {'Content-Type': 'application/json', 'Authorization': 'Basic ' + auth}
+ data = {
+ 'release': release_name,
+ 'media_type': 'application/vnd.cnr.manifest.v1+json',
+ 'blob': 'H4sIAFQwWVoAA+3PMQrCQBAF0Bxlb+Bk143nETGIIEoSC29vMMFOu3TvNb/5DH/Ot8f02jWbiohDremT3ZKR90uuUlty7nKJNmqKtkQuTarbzlo8x+k4zFOu4+lyH4afvbnW93/urH98EwAAAAAAAAAAADb0BsdwExIAKAAA',
+ }
+
+ rv = client.open(url, method='POST', data=json.dumps(data), headers=headers)
+ assert rv.status_code == 422
+
+
+@pytest.mark.parametrize('readonly, expected_status', [
+ (True, 405),
+ (False, 422),
+])
+def test_readonly(readonly, expected_status, app, client):
+ params = {
+ 'namespace': 'devtable',
+ 'package_name': 'someapprepo',
+ }
+
+ url = url_for('appr.push', **params)
+ auth = base64.b64encode('devtable:password')
+ headers = {'Content-Type': 'application/json', 'Authorization': 'Basic ' + auth}
+ data = {
+ 'release': '1.0',
+ 'media_type': 'application/vnd.cnr.manifest.v0+json',
+ 'blob': 'H4sIAFQwWVoAA+3PMQrCQBAF0Bxlb+Bk143nETGIIEoSC29vMMFOu3TvNb/5DH/Ot8f02jWbiohDremT3ZKR90uuUlty7nKJNmqKtkQuTarbzlo8x+k4zFOu4+lyH4afvbnW93/urH98EwAAAAAAAAAAADb0BsdwExIAKAAA',
+ }
+
+ with patch('endpoints.appr.models_cnr.model.is_readonly', readonly):
+ rv = client.open(url, method='POST', data=json.dumps(data), headers=headers)
+ assert rv.status_code == expected_status
diff --git a/endpoints/bitbuckettrigger.py b/endpoints/bitbuckettrigger.py
index 8c8052235..7e521c10d 100644
--- a/endpoints/bitbuckettrigger.py
+++ b/endpoints/bitbuckettrigger.py
@@ -4,11 +4,11 @@ from flask import request, redirect, url_for, Blueprint
from flask_login import current_user
from app import app
-from auth.process import require_session_login
+from auth.decorators import require_session_login
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger
from data import model
-from endpoints.common import route_show_if
+from endpoints.decorators import route_show_if
from util.http import abort
import features
@@ -40,8 +40,7 @@ def attach_bitbucket_build_trigger(trigger_uuid):
repository = trigger.repository.name
repo_path = '%s/%s' % (namespace, repository)
- full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
- trigger.uuid)
+ full_url = url_for('web.buildtrigger', path=repo_path, trigger=trigger.uuid)
logger.debug('Redirecting to full url: %s', full_url)
return redirect(full_url)
diff --git a/endpoints/building.py b/endpoints/building.py
index 9ad61f8a1..247d0a932 100644
--- a/endpoints/building.py
+++ b/endpoints/building.py
@@ -5,20 +5,19 @@ from datetime import datetime, timedelta
from flask import request
-from app import app, dockerfile_build_queue
+from app import app, dockerfile_build_queue, metric_queue
from data import model
-from data.database import db
+from data.logs_model import logs_model
+from data.database import db, RepositoryState
from auth.auth_context import get_authenticated_user
-from endpoints.notificationhelper import spawn_notification
+from notifications import spawn_notification
from util.names import escape_tag
from util.morecollections import AttrDict
+from util.request import get_request_ip
logger = logging.getLogger(__name__)
-MAX_BUILD_QUEUE_RATE_ITEMS = app.config.get('MAX_BUILD_QUEUE_RATE_ITEMS', -1)
-MAX_BUILD_QUEUE_RATE_SECS = app.config.get('MAX_BUILD_QUEUE_RATE_SECS', -1)
-
class MaximumBuildsQueuedException(Exception):
"""
@@ -28,16 +27,35 @@ class MaximumBuildsQueuedException(Exception):
pass
+class BuildTriggerDisabledException(Exception):
+ """
+ This exception is raised when a build is required, but the build trigger has been disabled.
+ """
+ pass
+
+
def start_build(repository, prepared_build, pull_robot_name=None):
- if MAX_BUILD_QUEUE_RATE_ITEMS > 0 and MAX_BUILD_QUEUE_RATE_SECS > 0:
- queue_item_canonical_name = [repository.namespace_user.username, repository.name]
- now = datetime.utcnow()
- available_min = now - timedelta(seconds=MAX_BUILD_QUEUE_RATE_SECS)
- available_builds = dockerfile_build_queue.num_available_jobs_between(available_min,
- now,
- queue_item_canonical_name)
- if available_builds >= MAX_BUILD_QUEUE_RATE_ITEMS:
- raise MaximumBuildsQueuedException()
+ # Ensure that builds are only run in image repositories.
+ if repository.kind.name != 'image':
+ raise Exception('Attempt to start a build for application repository %s' % repository.id)
+
+ # Ensure the repository isn't in mirror or read-only mode.
+ if repository.state != RepositoryState.NORMAL:
+ raise Exception(('Attempt to start a build for a non-normal repository: %s %s' %
+ (repository.id, repository.state)))
+
+ # Ensure that disabled triggers are not run.
+ if prepared_build.trigger is not None and not prepared_build.trigger.enabled:
+ raise BuildTriggerDisabledException
+
+ if repository.namespace_user.maximum_queued_builds_count is not None:
+ queue_item_canonical_name = [repository.namespace_user.username]
+ alive_builds = dockerfile_build_queue.num_alive_jobs(queue_item_canonical_name)
+ if alive_builds >= repository.namespace_user.maximum_queued_builds_count:
+ logger.debug('Prevented queueing of build under namespace %s due to reaching max: %s',
+ repository.namespace_user.username,
+ repository.namespace_user.maximum_queued_builds_count)
+ raise MaximumBuildsQueuedException()
host = app.config['SERVER_HOSTNAME']
repo_path = '%s/%s/%s' % (host, repository.namespace_user.username, repository.name)
@@ -51,6 +69,7 @@ def start_build(repository, prepared_build, pull_robot_name=None):
'docker_tags': prepared_build.tags,
'registry': host,
'build_subdir': prepared_build.subdirectory,
+ 'context': prepared_build.context,
'trigger_metadata': prepared_build.metadata or {},
'is_manual': prepared_build.is_manual,
'manual_user': get_authenticated_user().username if get_authenticated_user() else None,
@@ -78,6 +97,10 @@ def start_build(repository, prepared_build, pull_robot_name=None):
build_request.queue_id = queue_id
build_request.save()
+ # Add the queueing of the build to the metrics queue.
+ metric_queue.repository_build_queued.Inc(labelvalues=[repository.namespace_user.username,
+ repository.name])
+
# Add the build to the repo's log and spawn the build_queued notification.
event_log_metadata = {
'build_id': build_request.uuid,
@@ -93,10 +116,10 @@ def start_build(repository, prepared_build, pull_robot_name=None):
event_log_metadata['trigger_kind'] = prepared_build.trigger.service.name
event_log_metadata['trigger_metadata'] = prepared_build.metadata or {}
- model.log.log_action('build_dockerfile', repository.namespace_user.username,
- ip=request.remote_addr, metadata=event_log_metadata, repository=repository)
+ logs_model.log_action('build_dockerfile', repository.namespace_user.username,
+ ip=get_request_ip(), metadata=event_log_metadata, repository=repository)
- # TODO(jzelinskie): remove when more endpoints have been converted to using interfaces
+ # TODO: remove when more endpoints have been converted to using interfaces
repo = AttrDict({
'namespace_name': repository.namespace_user.username,
'name': repository.name,
@@ -119,6 +142,7 @@ class PreparedBuild(object):
self._tags = None
self._build_name = None
self._subdirectory = None
+ self._context = None
self._metadata = None
self._trigger = trigger
self._is_manual = None
@@ -221,6 +245,20 @@ class PreparedBuild(object):
self._subdirectory = value
+ @property
+ def context(self):
+ if self._context is None:
+ raise Exception('Missing property context')
+
+ return self._context
+
+ @context.setter
+ def context(self, value):
+ if self._context:
+ raise Exception('Property context already set')
+
+ self._context = value
+
@property
def metadata(self):
if self._metadata is None:
diff --git a/endpoints/common.py b/endpoints/common.py
index 6fd7ff348..6ce1e745d 100644
--- a/endpoints/common.py
+++ b/endpoints/common.py
@@ -1,118 +1,45 @@
import logging
-import json
-import string
import datetime
import os
-import re
-from random import SystemRandom
-from functools import wraps
-
-from cachetools import lru_cache
-from flask import make_response, render_template, request, abort, session
+from flask import make_response, render_template, request, session
from flask_login import login_user
from flask_principal import identity_changed
-import endpoints.decorated # Register the various exceptions via decorators.
+import endpoints.decorated # Register the various exceptions via decorators.
import features
-from app import app, oauth_apps, LoginWrappedDBUser, user_analytics, license_validator
+from app import app, oauth_apps, oauth_login, LoginWrappedDBUser, user_analytics, IS_KUBERNETES
from auth import scopes
from auth.permissions import QuayDeferredPermissionUser
from config import frontend_visible_config
from external_libraries import get_external_javascript, get_external_css
-from util.names import parse_namespace_repository
+from endpoints.common_models_pre_oci import pre_oci_model as model
+from endpoints.csrf import generate_csrf_token, QUAY_CSRF_UPDATED_HEADER_NAME
+from util.config.provider.k8sprovider import QE_NAMESPACE
from util.secscan import PRIORITY_LEVELS
+from util.saas.useranalytics import build_error_callback
from util.timedeltastring import convert_to_timedelta
+from _init import __version__
logger = logging.getLogger(__name__)
-route_data = None
-CACHE_BUSTERS_JSON = 'static/dist/cachebusters.json'
-CACHE_BUSTERS = None
+JS_BUNDLE_NAME = 'bundle'
-def get_cache_busters():
- """ Retrieves the cache busters hashes. """
- global CACHE_BUSTERS
- if CACHE_BUSTERS is not None:
- return CACHE_BUSTERS
+def common_login(user_uuid, permanent_session=True):
+ """ Performs login of the given user, with optional non-permanence on the session.
+ Returns a tuple with (success, headers to set on success).
+ """
+ user = model.get_user(user_uuid)
+ if user is None:
+ return (False, None)
- if not os.path.exists(CACHE_BUSTERS_JSON):
- return {}
-
- with open(CACHE_BUSTERS_JSON, 'r') as f:
- CACHE_BUSTERS = json.loads(f.read())
- return CACHE_BUSTERS
-
-
-def parse_repository_name(include_tag=False,
- ns_kwarg_name='namespace_name',
- repo_kwarg_name='repo_name',
- tag_kwarg_name='tag_name',
- incoming_repo_kwarg='repository'):
- def inner(func):
- @wraps(func)
- def wrapper(*args, **kwargs):
- repo_name_components = parse_namespace_repository(kwargs[incoming_repo_kwarg],
- app.config['LIBRARY_NAMESPACE'],
- include_tag=include_tag)
- del kwargs[incoming_repo_kwarg]
- kwargs[ns_kwarg_name] = repo_name_components[0]
- kwargs[repo_kwarg_name] = repo_name_components[1]
- if include_tag:
- kwargs[tag_kwarg_name] = repo_name_components[2]
- return func(*args, **kwargs)
- return wrapper
- return inner
-
-
-def route_show_if(value):
- def decorator(f):
- @wraps(f)
- def decorated_function(*args, **kwargs):
- if not value:
- abort(404)
-
- return f(*args, **kwargs)
- return decorated_function
- return decorator
-
-
-def route_hide_if(value):
- def decorator(f):
- @wraps(f)
- def decorated_function(*args, **kwargs):
- if value:
- abort(404)
-
- return f(*args, **kwargs)
- return decorated_function
- return decorator
-
-
-def truthy_param(param):
- return param not in {False, 'false', 'False', '0', 'FALSE', '', 'null'}
-
-
-def param_required(param_name, allow_body=False):
- def wrapper(wrapped):
- @wraps(wrapped)
- def decorated(*args, **kwargs):
- if param_name not in request.args:
- if not allow_body or param_name not in request.values:
- abort(make_response('Required param: %s' % param_name, 400))
- return wrapped(*args, **kwargs)
- return decorated
- return wrapper
-
-
-def common_login(db_user, permanent_session=True):
- if login_user(LoginWrappedDBUser(db_user.uuid, db_user)):
- logger.debug('Successfully signed in as: %s (%s)' % (db_user.username, db_user.uuid))
- new_identity = QuayDeferredPermissionUser.for_user(db_user)
+ if login_user(LoginWrappedDBUser(user_uuid)):
+ logger.debug('Successfully signed in as user %s with uuid %s', user.username, user_uuid)
+ new_identity = QuayDeferredPermissionUser.for_id(user_uuid)
identity_changed.send(app, identity=new_identity)
session['login_time'] = datetime.datetime.now()
@@ -122,72 +49,68 @@ def common_login(db_user, permanent_session=True):
session.permanent_session_lifetime = convert_to_timedelta(session_timeout_str)
# Inform our user analytics that we have a new "lead"
- user_analytics.create_lead(db_user.email, db_user.username, db_user.given_name,
- db_user.family_name, db_user.company)
- return True
- else:
- logger.debug('User could not be logged in, inactive?.')
- return False
+ create_lead_future = user_analytics.create_lead(
+ user.email,
+ user.username,
+ user.given_name,
+ user.family_name,
+ user.company,
+ user.location,
+ )
-def random_string():
- random = SystemRandom()
- return ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(8)])
+ create_lead_future.add_done_callback(build_error_callback('Create lead failed'))
-def list_files(path, extension):
- import os
+ # Force a new CSRF token.
+ headers = {}
+ headers[QUAY_CSRF_UPDATED_HEADER_NAME] = generate_csrf_token(force=True)
+ return (True, headers)
+
+ logger.debug('User could not be logged in, inactive?')
+ return (False, None)
+
+
+def _list_files(path, extension, contains=""):
+ """ Returns a list of all the files with the given extension found under the given path. """
def matches(f):
- return os.path.splitext(f)[1] == '.' + extension and f.split(os.path.extsep)[1] != 'spec'
+ return os.path.splitext(f)[1] == '.' + extension and contains in os.path.splitext(f)[0]
def join_path(dp, f):
# Remove the static/ prefix. It is added in the template.
return os.path.join(dp, f)[len('static/'):]
- filepath = 'static/' + path
- return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
+ filepath = os.path.join('static/', path)
+ return [join_path(dp, f) for dp, _, files in os.walk(filepath) for f in files if matches(f)]
+
+
+FONT_AWESOME_5 = 'use.fontawesome.com/releases/v5.0.4/css/all.css'
-@lru_cache(maxsize=1)
-def _get_version_number():
- try:
- with open('CHANGELOG.md') as f:
- return re.search('(v[0-9]+\.[0-9]+\.[0-9]+)', f.readline()).group(0)
- except IOError:
- return ''
def render_page_template(name, route_data=None, **kwargs):
- debugging = app.config.get('DEBUGGING', False)
- if debugging:
- # If DEBUGGING is enabled, then we load the full set of individual JS and CSS files
- # from the file system.
- library_styles = list_files('lib', 'css')
- main_styles = list_files('css', 'css')
- library_scripts = list_files('lib', 'js')
- main_scripts = list_files('js', 'js')
-
- file_lists = [library_styles, main_styles, library_scripts, main_scripts]
- for file_list in file_lists:
- file_list.sort()
- else:
- library_styles = []
- main_styles = ['dist/quay-frontend.css']
- library_scripts = []
- main_scripts = ['dist/quay-frontend.min.js']
+ """ Renders the page template with the given name as the response and returns its contents. """
+ main_scripts = _list_files('build', 'js', JS_BUNDLE_NAME)
use_cdn = app.config.get('USE_CDN', True)
if request.args.get('use_cdn') is not None:
use_cdn = request.args.get('use_cdn') == 'true'
- external_styles = get_external_css(local=not use_cdn)
+ external_styles = get_external_css(local=not use_cdn, exclude=FONT_AWESOME_5)
external_scripts = get_external_javascript(local=not use_cdn)
# Add Stripe checkout if billing is enabled.
if features.BILLING:
external_scripts.append('//checkout.stripe.com/checkout.js')
- def add_cachebusters(filenames):
- cachebusters = get_cache_busters()
- for filename in filenames:
- cache_buster = cachebusters.get(filename, random_string()) if not debugging else 'debugging'
- yield (filename, cache_buster)
+ def get_external_login_config():
+ login_config = []
+ for login_service in oauth_login.services:
+ login_config.append({
+ 'id': login_service.service_id(),
+ 'title': login_service.service_name(),
+ 'config': login_service.get_public_config(),
+ 'icon': login_service.get_icon(),
+ })
+
+ return login_config
def get_oauth_config():
oauth_config = {}
@@ -196,46 +119,48 @@ def render_page_template(name, route_data=None, **kwargs):
return oauth_config
+ has_contact = len(app.config.get('CONTACT_INFO', [])) > 0
contact_href = None
if len(app.config.get('CONTACT_INFO', [])) == 1:
contact_href = app.config['CONTACT_INFO'][0]
version_number = ''
if not features.BILLING:
- version_number = ' - ' + _get_version_number()
+ version_number = 'Quay %s' % __version__
- resp = make_response(render_template(name,
- route_data=route_data,
- external_styles=external_styles,
- external_scripts=external_scripts,
- main_styles=add_cachebusters(main_styles),
- library_styles=add_cachebusters(library_styles),
- main_scripts=add_cachebusters(main_scripts),
- library_scripts=add_cachebusters(library_scripts),
- feature_set=features.get_features(),
- config_set=frontend_visible_config(app.config),
- oauth_set=get_oauth_config(),
- scope_set=scopes.app_scopes(app.config),
- vuln_priority_set=PRIORITY_LEVELS,
- enterprise_logo=app.config.get('ENTERPRISE_LOGO_URL', ''),
- mixpanel_key=app.config.get('MIXPANEL_KEY', ''),
- munchkin_key=app.config.get('MARKETO_MUNCHKIN_ID', ''),
- recaptcha_key=app.config.get('RECAPTCHA_SITE_KEY', ''),
- google_tagmanager_key=app.config.get('GOOGLE_TAGMANAGER_KEY', ''),
- google_anaytics_key=app.config.get('GOOGLE_ANALYTICS_KEY', ''),
- sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
- is_debug=str(app.config.get('DEBUGGING', False)).lower(),
- show_chat=features.SUPPORT_CHAT,
- aci_conversion=features.ACI_CONVERSION,
- has_billing=features.BILLING,
- contact_href=contact_href,
- hostname=app.config['SERVER_HOSTNAME'],
- preferred_scheme=app.config['PREFERRED_URL_SCHEME'],
- version_number=version_number,
- license_insufficient=license_validator.insufficient,
- license_expiring=license_validator.expiring_soon,
- **kwargs))
+ scopes_set = {scope.scope: scope._asdict() for scope in scopes.app_scopes(app.config).values()}
+ contents = render_template(name,
+ registry_state=app.config.get('REGISTRY_STATE', 'normal'),
+ route_data=route_data,
+ external_styles=external_styles,
+ external_scripts=external_scripts,
+ main_scripts=main_scripts,
+ feature_set=features.get_features(),
+ config_set=frontend_visible_config(app.config),
+ oauth_set=get_oauth_config(),
+ external_login_set=get_external_login_config(),
+ scope_set=scopes_set,
+ vuln_priority_set=PRIORITY_LEVELS,
+ mixpanel_key=app.config.get('MIXPANEL_KEY', ''),
+ munchkin_key=app.config.get('MARKETO_MUNCHKIN_ID', ''),
+ recaptcha_key=app.config.get('RECAPTCHA_SITE_KEY', ''),
+ google_tagmanager_key=app.config.get('GOOGLE_TAGMANAGER_KEY', ''),
+ google_anaytics_key=app.config.get('GOOGLE_ANALYTICS_KEY', ''),
+ sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
+ is_debug=str(app.config.get('DEBUGGING', False)).lower(),
+ aci_conversion=features.ACI_CONVERSION,
+ has_billing=features.BILLING,
+ onprem=not app.config.get('FEATURE_BILLING', False),
+ contact_href=contact_href,
+ has_contact=has_contact,
+ hostname=app.config['SERVER_HOSTNAME'],
+ preferred_scheme=app.config['PREFERRED_URL_SCHEME'],
+ version_number=version_number,
+ current_year=datetime.datetime.now().year,
+ kubernetes_namespace=IS_KUBERNETES and QE_NAMESPACE,
+ **kwargs)
+
+ resp = make_response(contents)
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
return resp
-
diff --git a/endpoints/common_models_interface.py b/endpoints/common_models_interface.py
new file mode 100644
index 000000000..95ccf5685
--- /dev/null
+++ b/endpoints/common_models_interface.py
@@ -0,0 +1,33 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+
+USER_FIELDS = ['uuid', 'username', 'email', 'given_name',
+ 'family_name', 'company', 'location']
+
+
+class User(namedtuple('User', USER_FIELDS)):
+ """
+ User represents a user.
+ """
+
+
+@add_metaclass(ABCMeta)
+class EndpointsCommonDataInterface(object):
+ """
+ Interface that represents all data store interactions required by the common endpoints lib.
+ """
+
+ @abstractmethod
+ def get_user(self, user_uuid):
+ """
+ Returns the User matching the given uuid, if any or None if none.
+ """
+
+ @abstractmethod
+ def get_namespace_uuid(self, namespace_name):
+ """
+ Returns the uuid of the Namespace with the given name, if any.
+ """
diff --git a/endpoints/common_models_pre_oci.py b/endpoints/common_models_pre_oci.py
new file mode 100644
index 000000000..1f5e01052
--- /dev/null
+++ b/endpoints/common_models_pre_oci.py
@@ -0,0 +1,22 @@
+from data import model
+from endpoints.common_models_interface import User, EndpointsCommonDataInterface
+
+
+class EndpointsCommonDataPreOCIModel(EndpointsCommonDataInterface):
+ def get_user(self, user_uuid):
+ user = model.user.get_user_by_uuid(user_uuid)
+ if user is None:
+ return None
+
+ return User(uuid=user.uuid, username=user.username, email=user.email,
+ given_name=user.given_name, family_name=user.family_name,
+ company=user.company, location=user.location)
+
+ def get_namespace_uuid(self, namespace_name):
+ user = model.user.get_namespace_user(namespace_name)
+ if user is None:
+ return None
+
+ return user.uuid
+
+pre_oci_model = EndpointsCommonDataPreOCIModel()
diff --git a/endpoints/csrf.py b/endpoints/csrf.py
index b2dbfcff1..11c225924 100644
--- a/endpoints/csrf.py
+++ b/endpoints/csrf.py
@@ -4,7 +4,9 @@ import base64
import hmac
from functools import wraps
-from flask import session, request
+from flask import session, request, Response
+
+import features
from app import app
from auth.auth_context import get_validated_oauth_token
@@ -15,24 +17,31 @@ logger = logging.getLogger(__name__)
OAUTH_CSRF_TOKEN_NAME = '_oauth_csrf_token'
_QUAY_CSRF_TOKEN_NAME = '_csrf_token'
+_QUAY_CSRF_HEADER_NAME = 'X-CSRF-Token'
-def generate_csrf_token(session_token_name=_QUAY_CSRF_TOKEN_NAME):
+QUAY_CSRF_UPDATED_HEADER_NAME = 'X-Next-CSRF-Token'
+
+
+def generate_csrf_token(session_token_name=_QUAY_CSRF_TOKEN_NAME, force=False):
""" If not present in the session, generates a new CSRF token with the given name
and places it into the session. Returns the generated token.
"""
- if session_token_name not in session:
+ if session_token_name not in session or force:
session[session_token_name] = base64.b64encode(os.urandom(48))
return session[session_token_name]
def verify_csrf(session_token_name=_QUAY_CSRF_TOKEN_NAME,
- request_token_name=_QUAY_CSRF_TOKEN_NAME):
+ request_token_name=_QUAY_CSRF_TOKEN_NAME,
+ check_header=True):
""" Verifies that the CSRF token with the given name is found in the session and
that the matching token is found in the request args or values.
"""
token = str(session.get(session_token_name, ''))
found_token = str(request.values.get(request_token_name, ''))
+ if check_header and not found_token:
+ found_token = str(request.headers.get(_QUAY_CSRF_HEADER_NAME, ''))
if not token or not found_token or not hmac.compare_digest(token, found_token):
msg = 'CSRF Failure. Session token (%s) was %s and request token (%s) was %s'
@@ -42,16 +51,19 @@ def verify_csrf(session_token_name=_QUAY_CSRF_TOKEN_NAME,
def csrf_protect(session_token_name=_QUAY_CSRF_TOKEN_NAME,
request_token_name=_QUAY_CSRF_TOKEN_NAME,
- all_methods=False):
+ all_methods=False,
+ check_header=True):
def inner(func):
@wraps(func)
def wrapper(*args, **kwargs):
- oauth_token = get_validated_oauth_token()
- if oauth_token is None:
+ # Verify the CSRF token.
+ if get_validated_oauth_token() is None:
if all_methods or (request.method != "GET" and request.method != "HEAD"):
- verify_csrf(session_token_name, request_token_name)
+ verify_csrf(session_token_name, request_token_name, check_header)
- return func(*args, **kwargs)
+ # Invoke the handler.
+ resp = func(*args, **kwargs)
+ return resp
return wrapper
return inner
diff --git a/endpoints/decorated.py b/endpoints/decorated.py
index 1e19996fe..88216216b 100644
--- a/endpoints/decorated.py
+++ b/endpoints/decorated.py
@@ -5,6 +5,7 @@ from flask_restful.utils.cors import crossdomain
from app import app
from data import model
+from data.readreplica import ReadOnlyModeException
from util.config.provider.baseprovider import CannotWriteConfigException
from util.useremails import CannotSendEmailException
@@ -15,7 +16,7 @@ logger = logging.getLogger(__name__)
@app.errorhandler(model.DataModelException)
def handle_dme(ex):
logger.exception(ex)
- response = jsonify({'message': ex.message})
+ response = jsonify({'message': str(ex)})
response.status_code = 400
return response
@@ -43,3 +44,11 @@ def handle_too_many_login_attempts(error):
response.headers['Retry-After'] = int(error.retry_after)
return response
+@app.errorhandler(ReadOnlyModeException)
+def handle_readonly(ex):
+ logger.exception(ex)
+ response = jsonify({'message': 'System is currently read-only. Pulls will succeed but all ' +
+ 'write operations are currently suspended.',
+ 'is_readonly': True})
+ response.status_code = 503
+ return response
diff --git a/endpoints/decorators.py b/endpoints/decorators.py
index b032b624a..ecafb1cdb 100644
--- a/endpoints/decorators.py
+++ b/endpoints/decorators.py
@@ -1,10 +1,78 @@
""" Various decorators for endpoint and API handlers. """
-import features
-from flask import abort
-from auth.auth_context import (get_validated_oauth_token, get_authenticated_user,
- get_validated_token, get_grant_context)
+import os
+import logging
+
from functools import wraps
+from flask import abort, request, make_response
+
+import features
+
+from app import app, ip_resolver, model_cache
+from auth.auth_context import get_authenticated_context, get_authenticated_user
+from data.database import RepositoryState
+from data.model.repository import get_repository, get_repository_state
+from data.model.repo_mirror import get_mirroring_robot, get_mirror
+from data.registry_model import registry_model
+from data.readreplica import ReadOnlyModeException
+from util.names import parse_namespace_repository, ImplicitLibraryNamespaceNotAllowed
+from util.http import abort
+from util.request import get_request_ip
+
+logger = logging.getLogger(__name__)
+
+
+def parse_repository_name(include_tag=False,
+ ns_kwarg_name='namespace_name',
+ repo_kwarg_name='repo_name',
+ tag_kwarg_name='tag_name',
+ incoming_repo_kwarg='repository'):
+ """ Decorator which parses the repository name found in the incoming_repo_kwarg argument,
+ and applies its pieces to the decorated function.
+ """
+ def inner(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ try:
+ repo_name_components = parse_namespace_repository(kwargs[incoming_repo_kwarg],
+ app.config['LIBRARY_NAMESPACE'],
+ include_tag=include_tag,
+ allow_library=features.LIBRARY_SUPPORT)
+ except ImplicitLibraryNamespaceNotAllowed:
+ abort(400, message='A namespace must be specified explicitly')
+
+ del kwargs[incoming_repo_kwarg]
+ kwargs[ns_kwarg_name] = repo_name_components[0]
+ kwargs[repo_kwarg_name] = repo_name_components[1]
+ if include_tag:
+ kwargs[tag_kwarg_name] = repo_name_components[2]
+ return func(*args, **kwargs)
+ return wrapper
+ return inner
+
+
+def param_required(param_name, allow_body=False):
+ """ Marks a route as requiring a parameter with the given name to exist in the request's arguments
+ or (if allow_body=True) in its body values. If the parameter is not present, the request will
+ fail with a 400.
+ """
+ def wrapper(wrapped):
+ @wraps(wrapped)
+ def decorated(*args, **kwargs):
+ if param_name not in request.args:
+ if not allow_body or param_name not in request.values:
+ abort(400, message='Required param: %s' % param_name)
+ return wrapped(*args, **kwargs)
+ return decorated
+ return wrapper
+
+
+def readonly_call_allowed(func):
+ """ Marks a method as allowing for invocation when the registry is in a read only state.
+ Only necessary on non-GET methods.
+ """
+ func.__readonly_call_allowed = True
+ return func
def anon_allowed(func):
@@ -21,6 +89,7 @@ def anon_protect(func):
def check_anon_protection(func):
""" Validates a method as requiring some form of valid user auth before it can be executed. """
+
@wraps(func)
def wrapper(*args, **kwargs):
# Skip if anonymous access is allowed.
@@ -28,9 +97,152 @@ def check_anon_protection(func):
return func(*args, **kwargs)
# Check for validated context. If none exists, fail with a 401.
- if (get_authenticated_user() or get_validated_oauth_token() or get_validated_token() or
- get_grant_context()):
+ if get_authenticated_context() and not get_authenticated_context().is_anonymous:
return func(*args, **kwargs)
- abort(401)
+ abort(401, message='Anonymous access is not allowed')
+
+ return wrapper
+
+
+def check_readonly(func):
+ """ Validates that a non-GET method is not invoked when the registry is in read-only mode,
+ unless explicitly marked as being allowed.
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ # Skip if a GET method.
+ if request.method == 'GET':
+ return func(*args, **kwargs)
+
+ # Skip if not in read only mode.
+ if app.config.get('REGISTRY_STATE', 'normal') != 'readonly':
+ return func(*args, **kwargs)
+
+ # Skip if readonly access is allowed.
+ if hasattr(func, '__readonly_call_allowed'):
+ return func(*args, **kwargs)
+
+ raise ReadOnlyModeException()
+ return wrapper
+
+
+def route_show_if(value):
+ """ Adds/shows the decorated route if the given value is True. """
+
+ def decorator(f):
+ @wraps(f)
+ def decorated_function(*args, **kwargs):
+ if not value:
+ abort(404)
+
+ return f(*args, **kwargs)
+ return decorated_function
+ return decorator
+
+
+def require_xhr_from_browser(func):
+ """ Requires that API GET calls made from browsers are made via XHR, in order to prevent
+ reflected text attacks.
+ """
+
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ if app.config.get('BROWSER_API_CALLS_XHR_ONLY', False):
+ if request.method == 'GET' and request.user_agent.browser:
+ has_xhr_header = request.headers.get('X-Requested-With') == 'XMLHttpRequest'
+ if not has_xhr_header and not app.config.get('DEBUGGING') == True:
+ logger.warning('Disallowed possible RTA to URL %s with user agent %s',
+ request.path, request.user_agent)
+ abort(400, message='API calls must be invoked with an X-Requested-With header ' +
+ 'if called from a browser')
+
+ return func(*args, **kwargs)
+ return wrapper
+
+
+def check_region_blacklisted(error_class=None, namespace_name_kwarg=None):
+ """ Decorator which checks if the incoming request is from a region geo IP blocked
+ for the current namespace. The first argument to the wrapped function must be
+ the namespace name.
+ """
+ def wrapper(wrapped):
+ @wraps(wrapped)
+ def decorated(*args, **kwargs):
+ if namespace_name_kwarg:
+ namespace_name = kwargs[namespace_name_kwarg]
+ else:
+ namespace_name = args[0]
+
+ region_blacklist = registry_model.get_cached_namespace_region_blacklist(model_cache,
+ namespace_name)
+ if region_blacklist:
+ # Resolve the IP information and block if on the namespace's blacklist.
+ remote_ip = get_request_ip()
+ resolved_ip_info = ip_resolver.resolve_ip(remote_ip)
+ logger.debug('Resolved IP information for IP %s: %s', remote_ip, resolved_ip_info)
+
+ if (resolved_ip_info and
+ resolved_ip_info.country_iso_code and
+ resolved_ip_info.country_iso_code in region_blacklist):
+ if error_class:
+ raise error_class()
+
+ abort(403, 'Pulls of this data have been restricted geographically')
+
+ return wrapped(*args, **kwargs)
+ return decorated
+ return wrapper
+
+
+def check_repository_state(f):
+ @wraps(f)
+ def wrapper(namespace_name, repo_name, *args, **kwargs):
+ """
+ Conditionally allow changes depending on the Repository's state.
+ NORMAL -> Pass
+ READ_ONLY -> Block all POST/PUT/DELETE
+ MIRROR -> Same as READ_ONLY, except treat the Mirroring Robot User as Normal
+ """
+ user = get_authenticated_user()
+ if user is None:
+ # NOTE: Remaining auth checks will be handled by subsequent decorators.
+ return f(namespace_name, repo_name, *args, **kwargs)
+
+ repository = get_repository(namespace_name, repo_name)
+ if not repository:
+ return f(namespace_name, repo_name, *args, **kwargs)
+
+ if repository.state == RepositoryState.READ_ONLY:
+ abort(405, '%s/%s is in read-only mode.' % (namespace_name, repo_name))
+
+ if repository.state == RepositoryState.MIRROR:
+ mirror = get_mirror(repository)
+ robot = mirror.internal_robot if mirror is not None else None
+
+ if mirror is None:
+ abort(500, 'Repository %s/%s is set as a mirror but the Mirror configuration is missing.' % (
+ namespace_name, repo_name))
+
+ elif robot is None:
+ abort(400, 'Repository %s/%s is configured for mirroring but no robot is assigned.' % (
+ namespace_name, repo_name))
+
+ elif user.id != robot.id:
+ abort(405,
+ 'Repository %s/%s is a mirror. Mirrored repositories cannot be modified directly.' % (
+ namespace_name, repo_name))
+
+ elif user.id == robot.id:
+ pass # User is designated robot for this mirror repo.
+
+ else:
+ msg = (
+ 'An internal error has occurred while verifying repository %s/%s state. Please report '
+ 'this to an administrator.'
+ ) % (namespace_name, repo_name)
+ raise Exception(msg)
+
+ return f(namespace_name, repo_name, *args, **kwargs)
return wrapper
diff --git a/endpoints/exception.py b/endpoints/exception.py
index d11266153..abc32cc54 100644
--- a/endpoints/exception.py
+++ b/endpoints/exception.py
@@ -1,6 +1,7 @@
from enum import Enum
from flask import url_for
+from werkzeug.exceptions import HTTPException
from auth.auth_context import get_authenticated_user
@@ -32,7 +33,7 @@ ERROR_DESCRIPTION = {
}
-class ApiException(Exception):
+class ApiException(HTTPException):
"""
Represents an error in the application/problem+json format.
@@ -58,9 +59,12 @@ class ApiException(Exception):
def __init__(self, error_type, status_code, error_description, payload=None):
Exception.__init__(self)
self.error_description = error_description
- self.status_code = status_code
+ self.code = status_code
self.payload = payload
self.error_type = error_type
+ self.data = self.to_dict()
+
+ super(ApiException, self).__init__(error_description, None)
def to_dict(self):
rv = dict(self.payload or ())
@@ -71,13 +75,13 @@ class ApiException(Exception):
rv['error_type'] = self.error_type.value # TODO: deprecate
rv['title'] = self.error_type.value
- rv['type'] = url_for('error', error_type=self.error_type.value, _external=True)
- rv['status'] = self.status_code
+ rv['type'] = url_for('api.error', error_type=self.error_type.value, _external=True)
+ rv['status'] = self.code
return rv
-class ExternalServiceTimeout(ApiException):
+class ExternalServiceError(ApiException):
def __init__(self, error_description, payload=None):
ApiException.__init__(self, ApiErrorType.external_service_timeout, 520, error_description, payload)
@@ -125,5 +129,5 @@ class NotFound(ApiException):
class DownstreamIssue(ApiException):
- def __init__(self, payload=None):
- ApiException.__init__(self, ApiErrorType.downstream_issue, 520, 'Downstream Issue', payload)
+ def __init__(self, error_description, payload=None):
+ ApiException.__init__(self, ApiErrorType.downstream_issue, 520, error_description, payload)
diff --git a/endpoints/githubtrigger.py b/endpoints/githubtrigger.py
index 1f0d9ca90..3b4b21f0a 100644
--- a/endpoints/githubtrigger.py
+++ b/endpoints/githubtrigger.py
@@ -6,10 +6,10 @@ from flask_login import current_user
import features
from app import app, github_trigger
+from auth.decorators import require_session_login
from auth.permissions import AdministerRepositoryPermission
-from auth.process import require_session_login
from data import model
-from endpoints.common import route_show_if, parse_repository_name
+from endpoints.decorators import route_show_if, parse_repository_name
from util.http import abort
@@ -31,11 +31,12 @@ def attach_github_build_trigger(namespace_name, repo_name):
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace_name, repo_name)
abort(404, message=msg)
+ elif repo.kind.name != 'image':
+ abort(501)
trigger = model.build.create_build_trigger(repo, 'github', token, current_user.db_user())
repo_path = '%s/%s' % (namespace_name, repo_name)
- full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
- trigger.uuid)
+ full_url = url_for('web.buildtrigger', path=repo_path, trigger=trigger.uuid)
logger.debug('Redirecting to full url: %s', full_url)
return redirect(full_url)
diff --git a/endpoints/gitlabtrigger.py b/endpoints/gitlabtrigger.py
index 4f51a2bdc..4d97caffe 100644
--- a/endpoints/gitlabtrigger.py
+++ b/endpoints/gitlabtrigger.py
@@ -6,10 +6,10 @@ from flask_login import current_user
import features
from app import app, gitlab_trigger
+from auth.decorators import require_session_login
from auth.permissions import AdministerRepositoryPermission
-from auth.process import require_session_login
from data import model
-from endpoints.common import route_show_if
+from endpoints.decorators import route_show_if
from util.http import abort
@@ -44,11 +44,12 @@ def attach_gitlab_build_trigger():
if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository)
abort(404, message=msg)
+ elif repo.kind.name != 'image':
+ abort(501)
trigger = model.build.create_build_trigger(repo, 'gitlab', token, current_user.db_user())
repo_path = '%s/%s' % (namespace, repository)
- full_url = '%s%s%s' % (url_for('web.repository', path=repo_path), '?tab=builds&newtrigger=',
- trigger.uuid)
+ full_url = url_for('web.buildtrigger', path=repo_path, trigger=trigger.uuid)
logger.debug('Redirecting to full url: %s', full_url)
return redirect(full_url)
diff --git a/endpoints/keyserver/__init__.py b/endpoints/keyserver/__init__.py
index 13248fcc7..f24b30421 100644
--- a/endpoints/keyserver/__init__.py
+++ b/endpoints/keyserver/__init__.py
@@ -1,18 +1,22 @@
import logging
+
from datetime import datetime, timedelta
from flask import Blueprint, jsonify, abort, request, make_response
from jwt import get_unverified_header
from app import app
-from data.interfaces.key_server import pre_oci_model as model, ServiceKeyDoesNotExist
-from data.model.log import log_action
+from data.logs_model import logs_model
+from endpoints.keyserver.models_interface import ServiceKeyDoesNotExist
+from endpoints.keyserver.models_pre_oci import pre_oci_model as model
from util.security import jwtutil
+from util.request import get_request_ip
logger = logging.getLogger(__name__)
key_server = Blueprint('key_server', __name__)
+
JWT_HEADER_NAME = 'Authorization'
JWT_AUDIENCE = app.config['PREFERRED_URL_SCHEME'] + '://' + app.config['SERVER_HOSTNAME']
@@ -91,7 +95,7 @@ def get_service_key(service, kid):
@key_server.route('/services//keys/', methods=['PUT'])
def put_service_key(service, kid):
- metadata = {'ip': request.remote_addr}
+ metadata = {'ip': get_request_ip()}
rotation_duration = request.args.get('rotation', None)
expiration_date = request.args.get('expiration', None)
@@ -125,17 +129,16 @@ def put_service_key(service, kid):
model.create_service_key('', kid, service, jwk, metadata, expiration_date,
rotation_duration=rotation_duration)
- key_log_metadata = {
+ logs_model.log_action('service_key_create', ip=get_request_ip(), metadata={
'kid': kid,
'preshared': False,
'service': service,
'name': '',
'expiration_date': expiration_date,
'user_agent': request.headers.get('User-Agent'),
- 'ip': request.remote_addr,
- }
+ 'ip': get_request_ip(),
+ })
- log_action('service_key_create', None, metadata=key_log_metadata, ip=request.remote_addr)
return make_response('', 202)
# Key is going to be rotated.
@@ -150,17 +153,16 @@ def put_service_key(service, kid):
except ServiceKeyDoesNotExist:
abort(404)
- key_log_metadata = {
+ logs_model.log_action('service_key_rotate', ip=get_request_ip(), metadata={
'kid': kid,
'signer_kid': signer_key.kid,
'service': service,
'name': signer_key.name,
'expiration_date': expiration_date,
'user_agent': request.headers.get('User-Agent'),
- 'ip': request.remote_addr,
- }
+ 'ip': get_request_ip(),
+ })
- log_action('service_key_rotate', None, metadata=key_log_metadata, ip=request.remote_addr)
return make_response('', 200)
@@ -187,16 +189,15 @@ def delete_service_key(service, kid):
except ServiceKeyDoesNotExist:
abort(404)
- key_log_metadata = {
+ logs_model.log_action('service_key_delete', ip=get_request_ip(), metadata={
'kid': kid,
'signer_kid': signer_key.kid,
'service': service,
'name': signer_key.name,
'user_agent': request.headers.get('User-Agent'),
- 'ip': request.remote_addr,
- }
+ 'ip': get_request_ip(),
+ })
- log_action('service_key_delete', None, metadata=key_log_metadata, ip=request.remote_addr)
return make_response('', 204)
abort(403)
diff --git a/endpoints/keyserver/models_interface.py b/endpoints/keyserver/models_interface.py
new file mode 100644
index 000000000..977c2f6b4
--- /dev/null
+++ b/endpoints/keyserver/models_interface.py
@@ -0,0 +1,65 @@
+from abc import ABCMeta, abstractmethod
+from collections import namedtuple
+
+from six import add_metaclass
+
+
+class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata',
+ 'created_date', 'expiration_date', 'rotation_duration',
+ 'approval'])):
+ """
+ Service Key represents a public key (JWK) being used by an instance of a particular service to
+ authenticate with other services.
+ """
+ pass
+
+
+class ServiceKeyException(Exception):
+ pass
+
+
+class ServiceKeyDoesNotExist(ServiceKeyException):
+ pass
+
+
+@add_metaclass(ABCMeta)
+class KeyServerDataInterface(object):
+ """
+ Interface that represents all data store interactions required by a JWT key service.
+ """
+
+ @abstractmethod
+ def list_service_keys(self, service):
+ """
+ Returns a list of service keys or an empty list if the service does not exist.
+ """
+ pass
+
+ @abstractmethod
+ def get_service_key(self, signer_kid, service=None, alive_only=None, approved_only=None):
+ """
+ Returns a service kid with the given kid or raises ServiceKeyNotFound.
+ """
+ pass
+
+ @abstractmethod
+ def create_service_key(self, name, kid, service, jwk, metadata, expiration_date,
+ rotation_duration=None):
+ """
+ Stores a service key.
+ """
+ pass
+
+ @abstractmethod
+ def replace_service_key(self, old_kid, kid, jwk, metadata, expiration_date):
+ """
+ Replaces a service with a new key or raises ServiceKeyNotFound.
+ """
+ pass
+
+ @abstractmethod
+ def delete_service_key(self, kid):
+ """
+ Deletes and returns a service key with the given kid or raises ServiceKeyNotFound.
+ """
+ pass
diff --git a/data/interfaces/key_server.py b/endpoints/keyserver/models_pre_oci.py
similarity index 51%
rename from data/interfaces/key_server.py
rename to endpoints/keyserver/models_pre_oci.py
index b9b6d324b..8dfb119b9 100644
--- a/data/interfaces/key_server.py
+++ b/endpoints/keyserver/models_pre_oci.py
@@ -1,71 +1,7 @@
-from abc import ABCMeta, abstractmethod
-from collections import namedtuple
-
-from six import add_metaclass
-
import data.model
-
-class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata',
- 'created_date', 'expiration_date', 'rotation_duration',
- 'approval'])):
- """
- Service Key represents a public key (JWK) being used by an instance of a particular service to
- authenticate with other services.
- """
- pass
-
-
-class ServiceKeyException(Exception):
- pass
-
-
-class ServiceKeyDoesNotExist(ServiceKeyException):
- pass
-
-
-# TODO(jzelinskie): maybe make this interface support superuser API
-@add_metaclass(ABCMeta)
-class KeyServerDataInterface(object):
- """
- Interface that represents all data store interactions required by a JWT key service.
- """
-
- @abstractmethod
- def list_service_keys(self, service):
- """
- Returns a list of service keys or an empty list if the service does not exist.
- """
- pass
-
- @abstractmethod
- def get_service_key(self, signer_kid, service=None, alive_only=None, approved_only=None):
- """
- Returns a service kid with the given kid or raises ServiceKeyNotFound.
- """
- pass
-
- @abstractmethod
- def create_service_key(self, name, kid, service, jwk, metadata, expiration_date,
- rotation_duration=None):
- """
- Stores a service key.
- """
- pass
-
- @abstractmethod
- def replace_service_key(self, old_kid, kid, jwk, metadata, expiration_date):
- """
- Replaces a service with a new key or raises ServiceKeyNotFound.
- """
- pass
-
- @abstractmethod
- def delete_service_key(self, kid):
- """
- Deletes and returns a service key with the given kid or raises ServiceKeyNotFound.
- """
- pass
+from endpoints.keyserver.models_interface import (KeyServerDataInterface, ServiceKey,
+ ServiceKeyDoesNotExist)
class PreOCIModel(KeyServerDataInterface):
@@ -121,4 +57,3 @@ def _db_key_to_servicekey(key):
rotation_duration=key.rotation_duration,
approval=key.approval,
)
-
diff --git a/endpoints/oauth/__init__.py b/endpoints/oauth/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/endpoints/oauth/login.py b/endpoints/oauth/login.py
new file mode 100644
index 000000000..89c81f9c6
--- /dev/null
+++ b/endpoints/oauth/login.py
@@ -0,0 +1,302 @@
+import logging
+import time
+import recaptcha2
+
+from collections import namedtuple
+from flask import request, redirect, url_for, Blueprint, abort, session
+from peewee import IntegrityError
+
+import features
+
+from app import app, analytics, get_app_url, oauth_login, authentication, url_scheme_and_hostname
+from auth.auth_context import get_authenticated_user
+from auth.decorators import require_session_login
+from data import model
+from data.users.shared import can_create_user
+from endpoints.common import common_login
+from endpoints.web import index, render_page_template_with_routedata
+from endpoints.csrf import csrf_protect, OAUTH_CSRF_TOKEN_NAME, generate_csrf_token
+from oauth.login import OAuthLoginException
+from util.validation import generate_valid_usernames
+from util.request import get_request_ip
+
+logger = logging.getLogger(__name__)
+client = app.config['HTTPCLIENT']
+oauthlogin = Blueprint('oauthlogin', __name__)
+
+oauthlogin_csrf_protect = csrf_protect(OAUTH_CSRF_TOKEN_NAME, 'state', all_methods=True,
+ check_header=False)
+
+
+OAuthResult = namedtuple('oauthresult', ['user_obj', 'service_name', 'error_message',
+ 'register_redirect', 'requires_verification'])
+
+def _oauthresult(user_obj=None, service_name=None, error_message=None, register_redirect=False,
+ requires_verification=False):
+ return OAuthResult(user_obj, service_name, error_message, register_redirect,
+ requires_verification)
+
+def _get_response(result):
+ if result.error_message is not None:
+ return _render_ologin_error(result.service_name, result.error_message, result.register_redirect)
+
+ return _perform_login(result.user_obj, result.service_name)
+
+def _conduct_oauth_login(auth_system, login_service, lid, lusername, lemail, metadata=None,
+ captcha_verified=False):
+ """ Conducts login from the result of an OAuth service's login flow and returns
+ the status of the login, as well as the followup step. """
+ service_id = login_service.service_id()
+ service_name = login_service.service_name()
+
+ # Check for an existing account *bound to this service*. If found, conduct login of that account
+ # and redirect.
+ user_obj = model.user.verify_federated_login(service_id, lid)
+ if user_obj is not None:
+ return _oauthresult(user_obj=user_obj, service_name=service_name)
+
+ # If the login service has a bound field name, and we have a defined internal auth type that is
+ # not the database, then search for an existing account with that matching field. This allows
+ # users to setup SSO while also being backed by something like LDAP.
+ bound_field_name = login_service.login_binding_field()
+ if auth_system.federated_service is not None and bound_field_name is not None:
+ # Perform lookup.
+ logger.debug('Got oauth bind field name of "%s"', bound_field_name)
+ lookup_value = None
+ if bound_field_name == 'sub':
+ lookup_value = lid
+ elif bound_field_name == 'username':
+ lookup_value = lusername
+ elif bound_field_name == 'email':
+ lookup_value = lemail
+
+ if lookup_value is None:
+ logger.error('Missing lookup value for OAuth login')
+ return _oauthresult(service_name=service_name,
+ error_message='Configuration error in this provider')
+
+ (user_obj, err) = auth_system.link_user(lookup_value)
+ if err is not None:
+ logger.debug('%s %s not found: %s', bound_field_name, lookup_value, err)
+ msg = '%s %s not found in backing auth system' % (bound_field_name, lookup_value)
+ return _oauthresult(service_name=service_name, error_message=msg)
+
+ # Found an existing user. Bind their internal auth account to this service as well.
+ result = _attach_service(login_service, user_obj, lid, lusername)
+ if result.error_message is not None:
+ return result
+
+ return _oauthresult(user_obj=user_obj, service_name=service_name)
+
+ # Otherwise, we need to create a new user account.
+ blacklisted_domains = app.config.get('BLACKLISTED_EMAIL_DOMAINS', [])
+ if not can_create_user(lemail, blacklisted_domains=blacklisted_domains):
+ error_message = 'User creation is disabled. Please contact your administrator'
+ return _oauthresult(service_name=service_name, error_message=error_message)
+
+ if features.RECAPTCHA and not captcha_verified:
+ return _oauthresult(service_name=service_name, requires_verification=True)
+
+ # Try to create the user
+ try:
+ # Generate a valid username.
+ new_username = None
+ for valid in generate_valid_usernames(lusername):
+ if model.user.get_user_or_org(valid):
+ continue
+
+ new_username = valid
+ break
+
+ requires_password = auth_system.requires_distinct_cli_password
+ prompts = model.user.get_default_user_prompts(features)
+ user_obj = model.user.create_federated_user(new_username, lemail, service_id, lid,
+ set_password_notification=requires_password,
+ metadata=metadata or {},
+ confirm_username=features.USERNAME_CONFIRMATION,
+ prompts=prompts,
+ email_required=features.MAILING)
+
+ # Success, tell analytics
+ analytics.track(user_obj.username, 'register', {'service': service_name.lower()})
+ return _oauthresult(user_obj=user_obj, service_name=service_name)
+
+ except model.InvalidEmailAddressException:
+ message = ("The e-mail address {0} is already associated "
+ "with an existing {1} account. \n"
+ "Please log in with your username and password and "
+ "associate your {2} account to use it in the future.")
+ message = message.format(lemail, app.config['REGISTRY_TITLE_SHORT'], service_name)
+ return _oauthresult(service_name=service_name, error_message=message,
+ register_redirect=True)
+
+ except model.DataModelException as ex:
+ return _oauthresult(service_name=service_name, error_message=str(ex))
+
+def _render_ologin_error(service_name, error_message=None, register_redirect=False):
+ """ Returns a Flask response indicating an OAuth error. """
+
+ user_creation = bool(features.USER_CREATION and features.DIRECT_LOGIN and
+ not features.INVITE_ONLY_USER_CREATION)
+ error_info = {
+ 'reason': 'ologinerror',
+ 'service_name': service_name,
+ 'error_message': error_message or 'Could not load user data. The token may have expired',
+ 'service_url': get_app_url(),
+ 'user_creation': user_creation,
+ 'register_redirect': register_redirect,
+ }
+
+ resp = index('', error_info=error_info)
+ resp.status_code = 400
+ return resp
+
+def _perform_login(user_obj, service_name):
+ """ Attempts to login the given user, returning the Flask result of whether the login succeeded.
+ """
+ success, _ = common_login(user_obj.uuid)
+ if success:
+ if model.user.has_user_prompts(user_obj):
+ return redirect(url_for('web.updateuser'))
+ else:
+ return redirect(url_for('web.index'))
+ else:
+ return _render_ologin_error(service_name, 'Could not login. Account may be disabled')
+
+def _attach_service(login_service, user_obj, lid, lusername):
+ """ Attaches the given user account to the given service, with the given service user ID and
+ service username.
+ """
+ metadata = {
+ 'service_username': lusername,
+ }
+
+ try:
+ model.user.attach_federated_login(user_obj, login_service.service_id(), lid,
+ metadata=metadata)
+ return _oauthresult(user_obj=user_obj)
+ except IntegrityError:
+ err = '%s account %s is already attached to a %s account' % (
+ login_service.service_name(), lusername, app.config['REGISTRY_TITLE_SHORT'])
+ return _oauthresult(service_name=login_service.service_name(), error_message=err)
+
+def _register_service(login_service):
+ """ Registers the given login service, adding its callback and attach routes to the blueprint. """
+
+ @oauthlogin_csrf_protect
+ def callback_func():
+ # Check for a callback error.
+ error = request.values.get('error', None)
+ if error:
+ return _render_ologin_error(login_service.service_name(), error)
+
+ # Exchange the OAuth code for login information.
+ code = request.values.get('code')
+ try:
+ lid, lusername, lemail = login_service.exchange_code_for_login(app.config, client, code, '')
+ except OAuthLoginException as ole:
+ logger.exception('Got login exception')
+ return _render_ologin_error(login_service.service_name(), str(ole))
+
+ # Conduct login.
+ metadata = {
+ 'service_username': lusername,
+ }
+
+ # Conduct OAuth login.
+ captcha_verified = (int(time.time()) - session.get('captcha_verified', 0)) <= 600
+ session['captcha_verified'] = 0
+
+ result = _conduct_oauth_login(authentication, login_service, lid, lusername, lemail,
+ metadata=metadata, captcha_verified=captcha_verified)
+ if result.requires_verification:
+ return render_page_template_with_routedata('oauthcaptcha.html',
+ recaptcha_site_key=app.config['RECAPTCHA_SITE_KEY'],
+ callback_url=request.base_url)
+
+ return _get_response(result)
+
+
+ @require_session_login
+ @oauthlogin_csrf_protect
+ def attach_func():
+ # Check for a callback error.
+ error = request.values.get('error', None)
+ if error:
+ return _render_ologin_error(login_service.service_name(), error)
+
+ # Exchange the OAuth code for login information.
+ code = request.values.get('code')
+ try:
+ lid, lusername, _ = login_service.exchange_code_for_login(app.config, client, code, '/attach')
+ except OAuthLoginException as ole:
+ return _render_ologin_error(login_service.service_name(), str(ole))
+
+ # Conduct attach.
+ user_obj = get_authenticated_user()
+ result = _attach_service(login_service, user_obj, lid, lusername)
+ if result.error_message is not None:
+ return _get_response(result)
+
+ return redirect(url_for('web.user_view', path=user_obj.username, tab='external'))
+
+ def captcha_func():
+ recaptcha_response = request.values.get('recaptcha_response', '')
+ result = recaptcha2.verify(app.config['RECAPTCHA_SECRET_KEY'],
+ recaptcha_response,
+ get_request_ip())
+
+ if not result['success']:
+ abort(400)
+
+ # Save that the captcha was verified.
+ session['captcha_verified'] = int(time.time())
+
+ # Redirect to the normal OAuth flow again, so that the user can now create an account.
+ csrf_token = generate_csrf_token(OAUTH_CSRF_TOKEN_NAME)
+ login_scopes = login_service.get_login_scopes()
+ auth_url = login_service.get_auth_url(url_scheme_and_hostname, '', csrf_token, login_scopes)
+ return redirect(auth_url)
+
+ @require_session_login
+ @oauthlogin_csrf_protect
+ def cli_token_func():
+ # Check for a callback error.
+ error = request.values.get('error', None)
+ if error:
+ return _render_ologin_error(login_service.service_name(), error)
+
+ # Exchange the OAuth code for the ID token.
+ code = request.values.get('code')
+ try:
+ idtoken, _ = login_service.exchange_code_for_tokens(app.config, client, code, '/cli')
+ except OAuthLoginException as ole:
+ return _render_ologin_error(login_service.service_name(), str(ole))
+
+ user_obj = get_authenticated_user()
+ return redirect(url_for('web.user_view', path=user_obj.username, tab='settings',
+ idtoken=idtoken))
+
+ oauthlogin.add_url_rule('/%s/callback/captcha' % login_service.service_id(),
+ '%s_oauth_captcha' % login_service.service_id(),
+ captcha_func,
+ methods=['POST'])
+
+ oauthlogin.add_url_rule('/%s/callback' % login_service.service_id(),
+ '%s_oauth_callback' % login_service.service_id(),
+ callback_func,
+ methods=['GET', 'POST'])
+
+ oauthlogin.add_url_rule('/%s/callback/attach' % login_service.service_id(),
+ '%s_oauth_attach' % login_service.service_id(),
+ attach_func,
+ methods=['GET', 'POST'])
+
+ oauthlogin.add_url_rule('/%s/callback/cli' % login_service.service_id(),
+ '%s_oauth_cli' % login_service.service_id(),
+ cli_token_func,
+ methods=['GET', 'POST'])
+
+# Register the routes for each of the login services.
+for current_service in oauth_login.services:
+ _register_service(current_service)
diff --git a/endpoints/oauth/test/test_login.py b/endpoints/oauth/test/test_login.py
new file mode 100644
index 000000000..12a26e7ee
--- /dev/null
+++ b/endpoints/oauth/test/test_login.py
@@ -0,0 +1,222 @@
+import pytest
+
+from mock import patch
+
+from data import model, database
+from data.users import get_users_handler, DatabaseUsers
+from endpoints.oauth.login import _conduct_oauth_login
+from oauth.services.github import GithubOAuthService
+from test.test_ldap import mock_ldap
+
+from test.fixtures import *
+
+@pytest.fixture(params=[None, 'username', 'email'])
+def login_service(request, app):
+ config = {'GITHUB': {}}
+ if request is not None:
+ config['GITHUB']['LOGIN_BINDING_FIELD'] = request.param
+
+ return GithubOAuthService(config, 'GITHUB')
+
+
+@pytest.fixture(params=['Database', 'LDAP'])
+def auth_system(request):
+ return _get_users_handler(request.param)
+
+def _get_users_handler(auth_type):
+ config = {}
+ config['AUTHENTICATION_TYPE'] = auth_type
+ config['LDAP_BASE_DN'] = ['dc=quay', 'dc=io']
+ config['LDAP_ADMIN_DN'] = 'uid=testy,ou=employees,dc=quay,dc=io'
+ config['LDAP_ADMIN_PASSWD'] = 'password'
+ config['LDAP_USER_RDN'] = ['ou=employees']
+
+ return get_users_handler(config, None, None)
+
+def test_existing_account(auth_system, login_service):
+ login_service_lid = 'someexternaluser'
+
+ # Create an existing bound federated user.
+ created_user = model.user.create_federated_user('someuser', 'example@example.com',
+ login_service.service_id(),
+ login_service_lid, False)
+ existing_user_count = database.User.select().count()
+
+ with mock_ldap():
+ result = _conduct_oauth_login(auth_system, login_service,
+ login_service_lid, login_service_lid,
+ 'example@example.com')
+
+ assert result.user_obj == created_user
+
+ # Ensure that no addtional users were created.
+ current_user_count = database.User.select().count()
+ assert current_user_count == existing_user_count
+
+
+def test_new_account_via_database(login_service):
+ existing_user_count = database.User.select().count()
+ login_service_lid = 'someexternaluser'
+ internal_auth = DatabaseUsers()
+
+ # Conduct login. Since the external user doesn't (yet) bind to a user in the database,
+ # a new user should be created and bound to the external service.
+ result = _conduct_oauth_login(internal_auth, login_service, login_service_lid, login_service_lid,
+ 'example@example.com')
+ assert result.user_obj is not None
+
+ current_user_count = database.User.select().count()
+ assert current_user_count == existing_user_count + 1
+
+ # Find the user and ensure it is bound.
+ new_user = model.user.get_user(login_service_lid)
+ federated_login = model.user.lookup_federated_login(new_user, login_service.service_id())
+ assert federated_login is not None
+
+ # Ensure that a notification was created.
+ assert list(model.notification.list_notifications(result.user_obj,
+ kind_name='password_required'))
+
+@pytest.mark.parametrize('open_creation, invite_only, has_invite, expect_success', [
+ # Open creation -> Success!
+ (True, False, False, True),
+
+ # Open creation + invite only + no invite -> Failure!
+ (True, True, False, False),
+
+ # Open creation + invite only + invite -> Success!
+ (True, True, True, True),
+
+ # Close creation -> Failure!
+ (False, False, False, False),
+])
+def test_flagged_user_creation(open_creation, invite_only, has_invite, expect_success, login_service):
+ login_service_lid = 'someexternaluser'
+ email = 'some@example.com'
+
+ if has_invite:
+ inviter = model.user.get_user('devtable')
+ team = model.team.get_organization_team('buynlarge', 'owners')
+ model.team.add_or_invite_to_team(inviter, team, email=email)
+
+ internal_auth = DatabaseUsers()
+
+ with patch('features.USER_CREATION', open_creation):
+ with patch('features.INVITE_ONLY_USER_CREATION', invite_only):
+ # Conduct login.
+ result = _conduct_oauth_login(internal_auth, login_service, login_service_lid, login_service_lid,
+ email)
+ assert (result.user_obj is not None) == expect_success
+ assert (result.error_message is None) == expect_success
+
+@pytest.mark.parametrize('binding_field, lid, lusername, lemail, expected_error', [
+ # No binding field + newly seen user -> New unlinked user
+ (None, 'someid', 'someunknownuser', 'someemail@example.com', None),
+
+ # sub binding field + unknown sub -> Error.
+ ('sub', 'someid', 'someuser', 'foo@bar.com',
+ 'sub someid not found in backing auth system'),
+
+ # username binding field + unknown username -> Error.
+ ('username', 'someid', 'someunknownuser', 'foo@bar.com',
+ 'username someunknownuser not found in backing auth system'),
+
+ # email binding field + unknown email address -> Error.
+ ('email', 'someid', 'someuser', 'someemail@example.com',
+ 'email someemail@example.com not found in backing auth system'),
+
+ # No binding field + newly seen user -> New unlinked user.
+ (None, 'someid', 'someuser', 'foo@bar.com', None),
+
+ # username binding field + valid username -> fully bound user.
+ ('username', 'someid', 'someuser', 'foo@bar.com', None),
+
+ # sub binding field + valid sub -> fully bound user.
+ ('sub', 'someuser', 'someusername', 'foo@bar.com', None),
+
+ # email binding field + valid email -> fully bound user.
+ ('email', 'someid', 'someuser', 'foo@bar.com', None),
+
+ # username binding field + valid username + invalid email -> fully bound user.
+ ('username', 'someid', 'someuser', 'another@email.com', None),
+
+ # email binding field + valid email + invalid username -> fully bound user.
+ ('email', 'someid', 'someotherusername', 'foo@bar.com', None),
+])
+def test_new_account_via_ldap(binding_field, lid, lusername, lemail, expected_error, app):
+ existing_user_count = database.User.select().count()
+
+ config = {'GITHUB': {}}
+ if binding_field is not None:
+ config['GITHUB']['LOGIN_BINDING_FIELD'] = binding_field
+
+ external_auth = GithubOAuthService(config, 'GITHUB')
+ internal_auth = _get_users_handler('LDAP')
+
+ with mock_ldap():
+ # Conduct OAuth login.
+ result = _conduct_oauth_login(internal_auth, external_auth, lid, lusername, lemail)
+ assert result.error_message == expected_error
+
+ current_user_count = database.User.select().count()
+ if expected_error is None:
+ # Ensure that the new user was created and that it is bound to both the
+ # external login service and to LDAP (if a binding_field was given).
+ assert current_user_count == existing_user_count + 1
+ assert result.user_obj is not None
+
+ # Check the service bindings.
+ external_login = model.user.lookup_federated_login(result.user_obj,
+ external_auth.service_id())
+ assert external_login is not None
+
+ internal_login = model.user.lookup_federated_login(result.user_obj,
+ internal_auth.federated_service)
+ if binding_field is not None:
+ assert internal_login is not None
+ else:
+ assert internal_login is None
+
+ # Ensure that no notification was created.
+ assert not list(model.notification.list_notifications(result.user_obj,
+ kind_name='password_required'))
+ else:
+ # Ensure that no addtional users were created.
+ assert current_user_count == existing_user_count
+
+
+def test_existing_account_in_ldap(app):
+ config = {'GITHUB': {'LOGIN_BINDING_FIELD': 'username'}}
+
+ external_auth = GithubOAuthService(config, 'GITHUB')
+ internal_auth = _get_users_handler('LDAP')
+
+ # Add an existing federated user bound to the LDAP account associated with `someuser`.
+ bound_user = model.user.create_federated_user('someuser', 'foo@bar.com',
+ internal_auth.federated_service, 'someuser', False)
+
+ existing_user_count = database.User.select().count()
+
+ with mock_ldap():
+ # Conduct OAuth login with the same lid and bound field. This should find the existing LDAP
+ # user (via the `username` binding), and then bind Github to it as well.
+ result = _conduct_oauth_login(internal_auth, external_auth, bound_user.username,
+ bound_user.username, bound_user.email)
+ assert result.error_message is None
+
+ # Ensure that the same user was returned, and that it is now bound to the Github account
+ # as well.
+ assert result.user_obj.id == bound_user.id
+
+ # Ensure that no additional users were created.
+ current_user_count = database.User.select().count()
+ assert current_user_count == existing_user_count
+
+ # Check the service bindings.
+ external_login = model.user.lookup_federated_login(result.user_obj,
+ external_auth.service_id())
+ assert external_login is not None
+
+ internal_login = model.user.lookup_federated_login(result.user_obj,
+ internal_auth.federated_service)
+ assert internal_login is not None
diff --git a/endpoints/oauthlogin.py b/endpoints/oauthlogin.py
deleted file mode 100644
index 17cb6da20..000000000
--- a/endpoints/oauthlogin.py
+++ /dev/null
@@ -1,371 +0,0 @@
-import logging
-import requests
-
-from flask import request, redirect, url_for, Blueprint
-from flask_login import current_user
-from peewee import IntegrityError
-
-import features
-
-from app import app, analytics, get_app_url, github_login, google_login, dex_login
-from auth.process import require_session_login
-from data import model
-from endpoints.common import common_login, route_show_if
-from endpoints.web import index
-from endpoints.csrf import csrf_protect, OAUTH_CSRF_TOKEN_NAME
-from util.security.jwtutil import decode, InvalidTokenError
-from util.validation import generate_valid_usernames
-
-logger = logging.getLogger(__name__)
-client = app.config['HTTPCLIENT']
-oauthlogin = Blueprint('oauthlogin', __name__)
-
-oauthlogin_csrf_protect = csrf_protect(OAUTH_CSRF_TOKEN_NAME, 'state', all_methods=True)
-
-def render_ologin_error(service_name, error_message=None, register_redirect=False):
- user_creation = bool(features.USER_CREATION and features.DIRECT_LOGIN)
- error_info = {
- 'reason': 'ologinerror',
- 'service_name': service_name,
- 'error_message': error_message or 'Could not load user data. The token may have expired',
- 'service_url': get_app_url(),
- 'user_creation': user_creation,
- 'register_redirect': register_redirect,
- }
-
- resp = index('', error_info=error_info)
- resp.status_code = 400
- return resp
-
-
-def get_user(service, token):
- token_param = {
- 'access_token': token,
- 'alt': 'json',
- }
- got_user = client.get(service.user_endpoint(), params=token_param)
- if got_user.status_code != requests.codes.ok:
- return {}
-
- return got_user.json()
-
-
-def conduct_oauth_login(service, user_id, username, email, metadata=None):
- service_name = service.service_name()
- to_login = model.user.verify_federated_login(service_name.lower(), user_id)
- if not to_login:
- # See if we can create a new user.
- if not features.USER_CREATION:
- error_message = 'User creation is disabled. Please contact your administrator'
- return render_ologin_error(service_name, error_message)
-
- # Try to create the user
- try:
- new_username = None
- for valid in generate_valid_usernames(username):
- if model.user.get_user_or_org(valid):
- continue
-
- new_username = valid
- break
-
- prompts = model.user.get_default_user_prompts(features)
- to_login = model.user.create_federated_user(new_username, email, service_name.lower(),
- user_id, set_password_notification=True,
- metadata=metadata or {},
- prompts=prompts)
-
- # Success, tell analytics
- analytics.track(to_login.username, 'register', {'service': service_name.lower()})
-
- except model.InvalidEmailAddressException:
- message = "The e-mail address %s is already associated " % (email, )
- message = message + "with an existing %s account." % (app.config['REGISTRY_TITLE_SHORT'], )
- message = message + "\nPlease log in with your username and password and "
- message = message + "associate your %s account to use it in the future." % (service_name, )
-
- return render_ologin_error(service_name, message, register_redirect=True)
-
- except model.DataModelException as ex:
- return render_ologin_error(service_name, ex.message)
-
- if common_login(to_login):
- if model.user.has_user_prompts(to_login):
- return redirect(url_for('web.updateuser'))
- else:
- return redirect(url_for('web.index'))
-
- return render_ologin_error(service_name)
-
-
-def get_email_username(user_data):
- username = user_data['email']
- at = username.find('@')
- if at > 0:
- username = username[0:at]
-
- return username
-
-
-@oauthlogin.route('/google/callback', methods=['GET'])
-@route_show_if(features.GOOGLE_LOGIN)
-@oauthlogin_csrf_protect
-def google_oauth_callback():
- error = request.args.get('error', None)
- if error:
- return render_ologin_error('Google', error)
-
- code = request.args.get('code')
- token = google_login.exchange_code_for_token(app.config, client, code, form_encode=True)
- if token is None:
- return render_ologin_error('Google')
-
- user_data = get_user(google_login, token)
- if not user_data or not user_data.get('id', None) or not user_data.get('email', None):
- return render_ologin_error('Google')
-
- if not user_data.get('verified_email', False):
- return render_ologin_error(
- 'Google',
- 'A verified e-mail address is required for login. Please verify your ' +
- 'e-mail address in Google and try again.',
- )
-
- username = get_email_username(user_data)
- metadata = {
- 'service_username': user_data['email']
- }
-
- return conduct_oauth_login(google_login, user_data['id'], username, user_data['email'],
- metadata=metadata)
-
-
-@oauthlogin.route('/github/callback', methods=['GET'])
-@route_show_if(features.GITHUB_LOGIN)
-@oauthlogin_csrf_protect
-def github_oauth_callback():
- error = request.args.get('error', None)
- if error:
- return render_ologin_error('GitHub', error)
-
- # Exchange the OAuth code.
- code = request.args.get('code')
- token = github_login.exchange_code_for_token(app.config, client, code)
- if token is None:
- return render_ologin_error('GitHub')
-
- # Retrieve the user's information.
- user_data = get_user(github_login, token)
- if not user_data or 'login' not in user_data:
- return render_ologin_error('GitHub')
-
- username = user_data['login']
- github_id = user_data['id']
-
- v3_media_type = {
- 'Accept': 'application/vnd.github.v3'
- }
-
- token_param = {
- 'access_token': token,
- }
-
- # Retrieve the user's orgnizations (if organization filtering is turned on)
- if github_login.allowed_organizations() is not None:
- get_orgs = client.get(github_login.orgs_endpoint(), params=token_param,
- headers={'Accept': 'application/vnd.github.moondragon+json'})
-
- organizations = set([org.get('login').lower() for org in get_orgs.json()])
- matching_organizations = organizations & set(github_login.allowed_organizations())
- if not matching_organizations:
- err = """You are not a member of an allowed GitHub organization.
- Please contact your system administrator if you believe this is in error."""
- return render_ologin_error('GitHub', err)
-
- # Find the e-mail address for the user: we will accept any email, but we prefer the primary
- get_email = client.get(github_login.email_endpoint(), params=token_param,
- headers=v3_media_type)
- if get_email.status_code / 100 != 2:
- return render_ologin_error('GitHub')
-
- found_email = None
- for user_email in get_email.json():
- if not github_login.is_enterprise() and not user_email['verified']:
- continue
-
- found_email = user_email['email']
- if user_email['primary']:
- break
-
- if found_email is None:
- err = 'There is no verified e-mail address attached to the GitHub account.'
- return render_ologin_error('GitHub', err)
-
- metadata = {
- 'service_username': username
- }
-
- return conduct_oauth_login(github_login, github_id, username, found_email, metadata=metadata)
-
-
-@oauthlogin.route('/google/callback/attach', methods=['GET'])
-@route_show_if(features.GOOGLE_LOGIN)
-@require_session_login
-@oauthlogin_csrf_protect
-def google_oauth_attach():
- code = request.args.get('code')
- token = google_login.exchange_code_for_token(app.config, client, code,
- redirect_suffix='/attach', form_encode=True)
- if token is None:
- return render_ologin_error('Google')
-
- user_data = get_user(google_login, token)
- if not user_data or not user_data.get('id', None):
- return render_ologin_error('Google')
-
- if not user_data.get('verified_email', False):
- return render_ologin_error(
- 'Google',
- 'A verified e-mail address is required for login. Please verify your ' +
- 'e-mail address in Google and try again.',
- )
-
- google_id = user_data['id']
- user_obj = current_user.db_user()
-
- username = get_email_username(user_data)
- metadata = {
- 'service_username': user_data['email']
- }
-
- try:
- model.user.attach_federated_login(user_obj, 'google', google_id, metadata=metadata)
- except IntegrityError:
- err = 'Google account %s is already attached to a %s account' % (
- username, app.config['REGISTRY_TITLE_SHORT'])
- return render_ologin_error('Google', err)
-
- return redirect(url_for('web.user_view', path=user_obj.username, tab='external'))
-
-
-@oauthlogin.route('/github/callback/attach', methods=['GET'])
-@route_show_if(features.GITHUB_LOGIN)
-@require_session_login
-@oauthlogin_csrf_protect
-def github_oauth_attach():
- code = request.args.get('code')
- token = github_login.exchange_code_for_token(app.config, client, code)
- if token is None:
- return render_ologin_error('GitHub')
-
- user_data = get_user(github_login, token)
- if not user_data:
- return render_ologin_error('GitHub')
-
- github_id = user_data['id']
- user_obj = current_user.db_user()
-
- username = user_data['login']
- metadata = {
- 'service_username': username
- }
-
- try:
- model.user.attach_federated_login(user_obj, 'github', github_id, metadata=metadata)
- except IntegrityError:
- err = 'Github account %s is already attached to a %s account' % (
- username, app.config['REGISTRY_TITLE_SHORT'])
-
- return render_ologin_error('GitHub', err)
-
- return redirect(url_for('web.user_view', path=user_obj.username, tab='external'))
-
-
-def decode_user_jwt(token, oidc_provider):
- try:
- return decode(token, oidc_provider.get_public_key(), algorithms=['RS256'],
- audience=oidc_provider.client_id(),
- issuer=oidc_provider.issuer)
- except InvalidTokenError:
- # Public key may have expired. Try to retrieve an updated public key and use it to decode.
- return decode(token, oidc_provider.get_public_key(force_refresh=True), algorithms=['RS256'],
- audience=oidc_provider.client_id(),
- issuer=oidc_provider.issuer)
-
-
-@oauthlogin.route('/dex/callback', methods=['GET', 'POST'])
-@route_show_if(features.DEX_LOGIN)
-@oauthlogin_csrf_protect
-def dex_oauth_callback():
- error = request.values.get('error', None)
- if error:
- return render_ologin_error(dex_login.public_title, error)
-
- code = request.values.get('code')
- if not code:
- return render_ologin_error(dex_login.public_title, 'Missing OAuth code')
-
- token = dex_login.exchange_code_for_token(app.config, client, code, client_auth=True,
- form_encode=True)
- if token is None:
- return render_ologin_error(dex_login.public_title)
-
- try:
- payload = decode_user_jwt(token, dex_login)
- except InvalidTokenError:
- logger.exception('Exception when decoding returned JWT')
- return render_ologin_error(
- dex_login.public_title,
- 'Could not decode response. Please contact your system administrator about this error.',
- )
-
- username = get_email_username(payload)
- metadata = {}
-
- dex_id = payload['sub']
- email_address = payload['email']
-
- if not payload.get('email_verified', False):
- return render_ologin_error(
- dex_login.public_title,
- 'A verified e-mail address is required for login. Please verify your ' +
- 'e-mail address in %s and try again.' % dex_login.public_title,
- )
-
-
- return conduct_oauth_login(dex_login, dex_id, username, email_address,
- metadata=metadata)
-
-
-@oauthlogin.route('/dex/callback/attach', methods=['GET', 'POST'])
-@route_show_if(features.DEX_LOGIN)
-@require_session_login
-@oauthlogin_csrf_protect
-def dex_oauth_attach():
- code = request.args.get('code')
- token = dex_login.exchange_code_for_token(app.config, client, code, redirect_suffix='/attach',
- client_auth=True, form_encode=True)
- if token is None:
- return render_ologin_error(dex_login.public_title)
-
- try:
- payload = decode_user_jwt(token, dex_login)
- except InvalidTokenError:
- logger.exception('Exception when decoding returned JWT')
- return render_ologin_error(
- dex_login.public_title,
- 'Could not decode response. Please contact your system administrator about this error.',
- )
-
- user_obj = current_user.db_user()
- dex_id = payload['sub']
- metadata = {}
-
- try:
- model.user.attach_federated_login(user_obj, 'dex', dex_id, metadata=metadata)
- except IntegrityError:
- err = '%s account is already attached to a %s account' % (dex_login.public_title,
- app.config['REGISTRY_TITLE_SHORT'])
- return render_ologin_error(dex_login.public_title, err)
-
- return redirect(url_for('web.user_view', path=user_obj.username, tab='external'))
diff --git a/endpoints/realtime.py b/endpoints/realtime.py
index cc113da0c..9112b8146 100644
--- a/endpoints/realtime.py
+++ b/endpoints/realtime.py
@@ -5,7 +5,7 @@ from flask import request, Blueprint, abort, Response
from flask_login import current_user
from app import userevents
-from auth.process import require_session_login
+from auth.decorators import require_session_login
from data.userevent import CannotReadUserEventsException
@@ -53,12 +53,17 @@ def user_test():
@require_session_login
def user_subscribe():
def wrapper(listener):
- yield 'data: %s\n\n' % json.dumps({})
+ logger.debug('Beginning streaming of user events')
+ try:
+ yield 'data: %s\n\n' % json.dumps({})
- for event_id, data in listener.event_stream():
- message = {'event': event_id, 'data': data}
- json_string = json.dumps(message)
- yield 'data: %s\n\n' % json_string
+ for event_id, data in listener.event_stream():
+ message = {'event': event_id, 'data': data}
+ json_string = json.dumps(message)
+ yield 'data: %s\n\n' % json_string
+ finally:
+ logger.debug('Closing listener due to exception')
+ listener.stop()
events = request.args.get('events', '').split(',')
if not events:
@@ -69,4 +74,10 @@ def user_subscribe():
except CannotReadUserEventsException:
abort(504)
- return Response(wrapper(listener), mimetype="text/event-stream")
+ def on_close():
+ logger.debug('Closing listener due to response close')
+ listener.stop()
+
+ r = Response(wrapper(listener), mimetype="text/event-stream")
+ r.call_on_close(on_close)
+ return r
diff --git a/endpoints/secscan.py b/endpoints/secscan.py
index 6ce803e1e..1f49bf23c 100644
--- a/endpoints/secscan.py
+++ b/endpoints/secscan.py
@@ -5,7 +5,7 @@ import features
from app import secscan_notification_queue
from flask import request, make_response, Blueprint, abort
-from endpoints.common import route_show_if
+from endpoints.decorators import route_show_if, anon_allowed
logger = logging.getLogger(__name__)
secscan = Blueprint('secscan', __name__)
@@ -25,3 +25,9 @@ def secscan_notification():
secscan_notification_queue.put(name, json.dumps(notification))
return make_response('Okay')
+
+
+@secscan.route('/_internal_ping')
+@anon_allowed
+def internal_ping():
+ return make_response('true', 200)
diff --git a/endpoints/test/__init__.py b/endpoints/test/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/endpoints/test/shared.py b/endpoints/test/shared.py
new file mode 100644
index 000000000..fa6430445
--- /dev/null
+++ b/endpoints/test/shared.py
@@ -0,0 +1,80 @@
+import datetime
+import json
+import base64
+
+from contextlib import contextmanager
+from data import model
+
+from flask import g
+from flask_principal import Identity
+
+CSRF_TOKEN_KEY = '_csrf_token'
+
+@contextmanager
+def client_with_identity(auth_username, client):
+ with client.session_transaction() as sess:
+ if auth_username and auth_username is not None:
+ loaded = model.user.get_user(auth_username)
+ sess['user_id'] = loaded.uuid
+ sess['login_time'] = datetime.datetime.now()
+ else:
+ sess['user_id'] = 'anonymous'
+
+ yield client
+
+ with client.session_transaction() as sess:
+ sess['user_id'] = None
+ sess['login_time'] = None
+ sess[CSRF_TOKEN_KEY] = None
+
+
+@contextmanager
+def toggle_feature(name, enabled):
+ """ Context manager which temporarily toggles a feature. """
+ import features
+ previous_value = getattr(features, name)
+ setattr(features, name, enabled)
+ yield
+ setattr(features, name, previous_value)
+
+
+def add_csrf_param(client, params):
+ """ Returns a params dict with the CSRF parameter added. """
+ params = params or {}
+
+ with client.session_transaction() as sess:
+ params[CSRF_TOKEN_KEY] = 'sometoken'
+ sess[CSRF_TOKEN_KEY] = 'sometoken'
+
+ return params
+
+
+def gen_basic_auth(username, password):
+ """ Generates a basic auth header. """
+ return 'Basic ' + base64.b64encode("%s:%s" % (username, password))
+
+
+def conduct_call(client, resource, url_for, method, params, body=None, expected_code=200,
+ headers=None, raw_body=None):
+ """ Conducts a call to a Flask endpoint. """
+ params = add_csrf_param(client, params)
+
+ final_url = url_for(resource, **params)
+
+ headers = headers or {}
+ headers.update({"Content-Type": "application/json"})
+
+ if body is not None:
+ body = json.dumps(body)
+
+ if raw_body is not None:
+ body = raw_body
+
+ # Required for anonymous calls to not exception.
+ g.identity = Identity(None, 'none')
+
+ rv = client.open(final_url, method=method, data=body, headers=headers)
+ msg = '%s %s: got %s expected: %s | %s' % (method, final_url, rv.status_code, expected_code,
+ rv.data)
+ assert rv.status_code == expected_code, msg
+ return rv
diff --git a/endpoints/test/test_anon_checked.py b/endpoints/test/test_anon_checked.py
new file mode 100644
index 000000000..94cc9b9aa
--- /dev/null
+++ b/endpoints/test/test_anon_checked.py
@@ -0,0 +1,27 @@
+import pytest
+
+from app import app
+from endpoints.v1 import v1_bp
+from endpoints.v2 import v2_bp
+from endpoints.verbs import verbs
+
+@pytest.mark.parametrize('blueprint', [
+ v2_bp,
+ v1_bp,
+ verbs,
+])
+def test_verify_blueprint(blueprint):
+ class Checker(object):
+ def __init__(self):
+ self.first_registration = True
+ self.app = app
+
+ def add_url_rule(self, rule, endpoint, view_function, methods=None):
+ result = ('__anon_protected' in dir(view_function) or
+ '__anon_allowed' in dir(view_function))
+ error_message = ('Missing anonymous access protection decorator on function ' +
+ '%s under blueprint %s' % (endpoint, blueprint.name))
+ assert result, error_message
+
+ for deferred_function in blueprint.deferred_functions:
+ deferred_function(Checker())
diff --git a/endpoints/test/test_building.py b/endpoints/test/test_building.py
new file mode 100644
index 000000000..222149785
--- /dev/null
+++ b/endpoints/test/test_building.py
@@ -0,0 +1,97 @@
+import pytest
+
+from data import model
+from buildtrigger.triggerutil import raise_if_skipped_build, SkipRequestException
+from endpoints.building import (start_build, PreparedBuild, MaximumBuildsQueuedException,
+ BuildTriggerDisabledException)
+
+from test.fixtures import *
+
+def test_maximum_builds(app):
+ # Change the maximum number of builds to 1.
+ user = model.user.create_user('foobar', 'password', 'foo@example.com')
+ user.maximum_queued_builds_count = 1
+ user.save()
+
+ repo = model.repository.create_repository('foobar', 'somerepo', user)
+
+ # Try to queue a build; should succeed.
+ prepared_build = PreparedBuild()
+ prepared_build.build_name = 'foo'
+ prepared_build.is_manual = True
+ prepared_build.dockerfile_id = 'foobar'
+ prepared_build.archive_url = 'someurl'
+ prepared_build.tags = ['latest']
+ prepared_build.subdirectory = '/'
+ prepared_build.context = '/'
+ prepared_build.metadata = {}
+
+ start_build(repo, prepared_build)
+
+ # Try to queue a second build; should fail.
+ with pytest.raises(MaximumBuildsQueuedException):
+ start_build(repo, prepared_build)
+
+
+def test_start_build_disabled_trigger(app):
+ trigger = model.build.list_build_triggers('devtable', 'building')[0]
+ trigger.enabled = False
+ trigger.save()
+
+ build = PreparedBuild(trigger=trigger)
+
+ with pytest.raises(BuildTriggerDisabledException):
+ start_build(trigger.repository, build)
+
+
+@pytest.mark.parametrize('ref, expected_tags', [
+ ('ref/heads/somebranch', ['somebranch']),
+ ('ref/heads/master', ['master', 'latest']),
+
+ ('ref/tags/somebranch', ['somebranch']),
+ ('ref/tags/master', ['master', 'latest']),
+
+ ('ref/heads/slash/branch', ['slash_branch']),
+ ('ref/tags/slash/tag', ['slash_tag']),
+
+ ('ref/heads/foobar#2', ['foobar_2']),
+])
+def test_tags_for_ref(ref, expected_tags):
+ prepared = PreparedBuild()
+ prepared.tags_from_ref(ref, default_branch='master')
+ assert set(prepared._tags) == set(expected_tags)
+
+
+@pytest.mark.parametrize('metadata, config', [
+ ({}, {}),
+ pytest.param({'ref': 'ref/heads/master'}, {'branchtag_regex': 'nothing'}, id='branchtag regex'),
+ pytest.param({
+ 'ref': 'ref/heads/master',
+ 'commit_info': {
+ 'message': '[skip build]',
+ },
+ }, {}, id='commit message'),
+])
+def test_skip(metadata, config):
+ prepared = PreparedBuild()
+ prepared.metadata = metadata
+ config = config
+
+ with pytest.raises(SkipRequestException):
+ raise_if_skipped_build(prepared, config)
+
+
+def test_does_not_skip():
+ prepared = PreparedBuild()
+ prepared.metadata = {
+ 'ref': 'ref/heads/master',
+ 'commit_info': {
+ 'message': 'some cool message',
+ },
+ }
+
+ config = {
+ 'branchtag_regex': '(master)|(heads/master)',
+ }
+
+ raise_if_skipped_build(prepared, config)
diff --git a/endpoints/test/test_common.py b/endpoints/test/test_common.py
new file mode 100644
index 000000000..d99033cde
--- /dev/null
+++ b/endpoints/test/test_common.py
@@ -0,0 +1,29 @@
+import pytest
+
+from endpoints.common import common_login
+from endpoints.csrf import QUAY_CSRF_UPDATED_HEADER_NAME
+
+from test.fixtures import *
+from endpoints.common_models_pre_oci import pre_oci_model as model
+
+@pytest.mark.parametrize('username, expect_success', [
+ # Valid users.
+ ('devtable', True),
+ ('public', True),
+
+ # Org.
+ ('buynlarge', False),
+
+ # Robot.
+ ('devtable+dtrobot', False),
+
+ # Unverified user.
+ ('unverified', False),
+])
+def test_common_login(username, expect_success, app):
+ uuid = model.get_namespace_uuid(username)
+ with app.app_context():
+ success, headers = common_login(uuid)
+ assert success == expect_success
+ if success:
+ assert QUAY_CSRF_UPDATED_HEADER_NAME in headers
diff --git a/endpoints/test/test_decorators.py b/endpoints/test/test_decorators.py
new file mode 100644
index 000000000..e7866e25d
--- /dev/null
+++ b/endpoints/test/test_decorators.py
@@ -0,0 +1,35 @@
+from data import model
+from endpoints.api import api
+from endpoints.api.repository import Repository
+from endpoints.test.shared import conduct_call
+from test.fixtures import *
+
+
+@pytest.mark.parametrize('user_agent, include_header, expected_code', [
+ ('curl/whatever', True, 200),
+ ('curl/whatever', False, 200),
+
+ ('Mozilla/whatever', True, 200),
+ ('Mozilla/5.0', True, 200),
+ ('Mozilla/5.0 (Windows NT 5.1; Win64; x64)', False, 400),
+])
+def test_require_xhr_from_browser(user_agent, include_header, expected_code, app, client):
+ # Create a public repo with a dot in its name.
+ user = model.user.get_user('devtable')
+ model.repository.create_repository('devtable', 'somerepo.bat', user, 'public')
+
+ # Retrieve the repository and ensure we either allow it through or fail, depending on the
+ # user agent and header.
+ params = {
+ 'repository': 'devtable/somerepo.bat'
+ }
+
+ headers = {
+ 'User-Agent': user_agent,
+ }
+
+ if include_header:
+ headers['X-Requested-With'] = 'XMLHttpRequest'
+
+ conduct_call(client, Repository, api.url_for, 'GET', params, headers=headers,
+ expected_code=expected_code)
diff --git a/endpoints/test/test_webhooks.py b/endpoints/test/test_webhooks.py
new file mode 100644
index 000000000..1061f106b
--- /dev/null
+++ b/endpoints/test/test_webhooks.py
@@ -0,0 +1,24 @@
+import base64
+import pytest
+
+from flask import url_for
+
+from data import model
+from endpoints.test.shared import conduct_call
+from test.fixtures import *
+
+def test_start_build_disabled_trigger(app, client):
+ trigger = model.build.list_build_triggers('devtable', 'building')[0]
+ trigger.enabled = False
+ trigger.save()
+
+ params = {
+ 'trigger_uuid': trigger.uuid,
+ }
+
+ headers = {
+ 'Authorization': 'Basic ' + base64.b64encode('devtable:password'),
+ }
+
+ conduct_call(client, 'webhooks.build_trigger_webhook', url_for, 'POST', params, None, 400,
+ headers=headers)
diff --git a/endpoints/trackhelper.py b/endpoints/trackhelper.py
deleted file mode 100644
index 0aa66cefa..000000000
--- a/endpoints/trackhelper.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import logging
-import random
-
-from urlparse import urlparse
-
-from flask import request
-
-from app import analytics, userevents
-from data import model
-from auth.registry_jwt_auth import get_granted_entity
-from auth.auth_context import (get_authenticated_user, get_validated_token,
- get_validated_oauth_token)
-
-logger = logging.getLogger(__name__)
-
-def track_and_log(event_name, repo_obj, analytics_name=None, analytics_sample=1, **kwargs):
- repo_name = repo_obj.name
- namespace_name = repo_obj.namespace_name,
- metadata = {
- 'repo': repo_name,
- 'namespace': namespace_name,
- }
- metadata.update(kwargs)
-
- analytics_id = 'anonymous'
-
- authenticated_oauth_token = get_validated_oauth_token()
- authenticated_user = get_authenticated_user()
- authenticated_token = get_validated_token() if not authenticated_user else None
-
- if not authenticated_user and not authenticated_token and not authenticated_oauth_token:
- entity = get_granted_entity()
- if entity:
- authenticated_user = entity.user
- authenticated_token = entity.token
- authenticated_oauth_token = entity.oauth
-
- logger.debug('Logging the %s to Mixpanel and the log system', event_name)
- if authenticated_oauth_token:
- metadata['oauth_token_id'] = authenticated_oauth_token.id
- metadata['oauth_token_application_id'] = authenticated_oauth_token.application.client_id
- metadata['oauth_token_application'] = authenticated_oauth_token.application.name
- analytics_id = 'oauth:{0}'.format(authenticated_oauth_token.id)
- elif authenticated_user:
- metadata['username'] = authenticated_user.username
- analytics_id = authenticated_user.username
- elif authenticated_token:
- metadata['token'] = authenticated_token.friendly_name
- metadata['token_code'] = authenticated_token.code
-
- if authenticated_token.kind:
- metadata['token_type'] = authenticated_token.kind.name
-
- analytics_id = 'token:{0}'.format(authenticated_token.code)
- else:
- metadata['public'] = True
- analytics_id = 'anonymous'
-
- # Publish the user event (if applicable)
- logger.debug('Checking publishing %s to the user events system', event_name)
- if authenticated_user and not authenticated_user.robot:
- logger.debug('Publishing %s to the user events system', event_name)
- user_event_data = {
- 'action': event_name,
- 'repository': repo_name,
- 'namespace': namespace_name,
- }
-
- event = userevents.get_event(authenticated_user.username)
- event.publish_event_data('docker-cli', user_event_data)
-
- # Save the action to mixpanel.
- if random.random() < analytics_sample:
- if analytics_name is None:
- analytics_name = event_name
-
- logger.debug('Logging the %s to Mixpanel', analytics_name)
-
- request_parsed = urlparse(request.url_root)
- extra_params = {
- 'repository': '%s/%s' % (namespace_name, repo_name),
- 'user-agent': request.user_agent.string,
- 'hostname': request_parsed.hostname,
- }
-
- analytics.track(analytics_id, analytics_name, extra_params)
-
- # Log the action to the database.
- logger.debug('Logging the %s to logs system', event_name)
- model.log.log_action(event_name, namespace_name, performer=authenticated_user,
- ip=request.remote_addr, metadata=metadata, repository=repo_obj)
- logger.debug('Track and log of %s complete', event_name)
diff --git a/endpoints/v1/__init__.py b/endpoints/v1/__init__.py
index 18ef430c4..2248222d2 100644
--- a/endpoints/v1/__init__.py
+++ b/endpoints/v1/__init__.py
@@ -1,14 +1,21 @@
-from flask import Blueprint, make_response
+import logging
-from app import metric_queue, license_validator
+from functools import wraps
+
+from flask import Blueprint, make_response, jsonify
+
+import features
+
+from app import metric_queue, app
+from data.readreplica import ReadOnlyModeException
from endpoints.decorators import anon_protect, anon_allowed
from util.metrics.metricqueue import time_blueprint
-
+from util.http import abort
v1_bp = Blueprint('v1', __name__)
-license_validator.enforce_license_before_request(v1_bp)
time_blueprint(v1_bp, metric_queue)
+logger = logging.getLogger(__name__)
# Note: This is *not* part of the Docker index spec. This is here for our own health check,
# since we have nginx handle the _ping below.
@@ -28,6 +35,42 @@ def ping():
return response
-from endpoints.v1 import index
-from endpoints.v1 import registry
-from endpoints.v1 import tag
+@v1_bp.app_errorhandler(ReadOnlyModeException)
+def handle_readonly(ex):
+ response = jsonify({'message': 'System is currently read-only. Pulls will succeed but all ' +
+ 'write operations are currently suspended.',
+ 'is_readonly': True})
+ response.status_code = 503
+ return response
+
+
+def check_v1_push_enabled(namespace_name_kwarg='namespace_name'):
+ """ Decorator which checks if V1 push is enabled for the current namespace. The first argument
+ to the wrapped function must be the namespace name or there must be a kwarg with the
+ name `namespace_name`.
+ """
+ def wrapper(wrapped):
+ @wraps(wrapped)
+ def decorated(*args, **kwargs):
+ if namespace_name_kwarg in kwargs:
+ namespace_name = kwargs[namespace_name_kwarg]
+ else:
+ namespace_name = args[0]
+
+ if features.RESTRICTED_V1_PUSH:
+ whitelist = app.config.get('V1_PUSH_WHITELIST') or []
+ logger.debug('V1 push is restricted to whitelist: %s', whitelist)
+ if namespace_name not in whitelist:
+ abort(405,
+ message=('V1 push support has been deprecated. To enable for this ' +
+ 'namespace, please contact support.'))
+
+ return wrapped(*args, **kwargs)
+ return decorated
+ return wrapper
+
+
+from endpoints.v1 import (
+ index,
+ registry,
+ tag,)
diff --git a/endpoints/v1/index.py b/endpoints/v1/index.py
index c4f815f98..3030b20e8 100644
--- a/endpoints/v1/index.py
+++ b/endpoints/v1/index.py
@@ -6,21 +6,24 @@ from functools import wraps
from flask import request, make_response, jsonify, session
-from data.interfaces.v1 import pre_oci_model as model
-from app import authentication, userevents, metric_queue
-from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
-from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
- ReadRepositoryPermission, CreateRepositoryPermission,
- repository_read_grant, repository_write_grant)
-from auth.process import process_auth, generate_signed_token
+from app import userevents, metric_queue, storage, docker_v2_signing_key
+from auth.auth_context import get_authenticated_context, get_authenticated_user
+from auth.credentials import validate_credentials, CredentialKind
+from auth.decorators import process_auth
+from auth.permissions import (
+ ModifyRepositoryPermission, UserAdminPermission, ReadRepositoryPermission,
+ CreateRepositoryPermission, repository_read_grant, repository_write_grant)
+from auth.signedgrant import generate_signed_token
+from data import model
+from data.registry_model import registry_model
+from data.registry_model.manifestbuilder import create_manifest_builder, lookup_manifest_builder
+from endpoints.decorators import (anon_protect, anon_allowed, parse_repository_name,
+ check_repository_state, check_readonly)
+from endpoints.v1 import v1_bp, check_v1_push_enabled
+from notifications import spawn_notification
+from util.audit import track_and_log
from util.http import abort
from util.names import REPOSITORY_NAME_REGEX
-from endpoints.common import parse_repository_name
-from endpoints.v1 import v1_bp
-from endpoints.trackhelper import track_and_log
-from endpoints.notificationhelper import spawn_notification
-from endpoints.decorators import anon_protect, anon_allowed
-
logger = logging.getLogger(__name__)
@@ -30,6 +33,18 @@ class GrantType(object):
WRITE_REPOSITORY = 'write'
+def ensure_namespace_enabled(f):
+ @wraps(f)
+ def wrapper(namespace_name, repo_name, *args, **kwargs):
+ namespace = model.user.get_namespace_user(namespace_name)
+ is_namespace_enabled = namespace is not None and namespace.enabled
+ if not is_namespace_enabled:
+ abort(400, message='Namespace is disabled. Please contact your system administrator.')
+
+ return f(namespace_name, repo_name, *args, **kwargs)
+ return wrapper
+
+
def generate_headers(scope=GrantType.READ_REPOSITORY, add_grant_for_status=None):
def decorator_method(f):
@wraps(f)
@@ -65,13 +80,16 @@ def generate_headers(scope=GrantType.READ_REPOSITORY, add_grant_for_status=None)
response.headers['X-Docker-Token'] = signature
return response
+
return wrapper
+
return decorator_method
@v1_bp.route('/users', methods=['POST'])
@v1_bp.route('/users/', methods=['POST'])
@anon_allowed
+@check_readonly
def create_user():
user_data = request.get_json()
if not user_data or not 'username' in user_data:
@@ -83,34 +101,32 @@ def create_user():
# UGH! we have to use this response when the login actually worked, in order
# to get the CLI to try again with a get, and then tell us login succeeded.
success = make_response('"Username or email already exists"', 400)
+ result, kind = validate_credentials(username, password)
+ if not result.auth_valid:
+ if kind == CredentialKind.token:
+ abort(400, 'Invalid access token.', issue='invalid-access-token')
- if username == '$token':
- if model.load_token(password):
- return success
- abort(400, 'Invalid access token.', issue='invalid-access-token')
+ if kind == CredentialKind.robot:
+ abort(400, 'Invalid robot account or password.', issue='robot-login-failure')
- elif username == '$oauthtoken':
- if model.validate_oauth_token(password):
- return success
- abort(400, 'Invalid oauth access token.', issue='invalid-oauth-access-token')
+ if kind == CredentialKind.oauth_token:
+ abort(400, 'Invalid oauth access token.', issue='invalid-oauth-access-token')
- elif '+' in username:
- if model.verify_robot(username, password):
- return success
- abort(400, 'Invalid robot account or password.', issue='robot-login-failure')
+ if kind == CredentialKind.user:
+ # Mark that the login failed.
+ event = userevents.get_event(username)
+ event.publish_event_data('docker-cli', {'action': 'loginfailure'})
+ abort(400, result.error_message, issue='login-failure')
- (verified, error_message) = authentication.verify_and_link_user(username, password,
- basic_auth=True)
- if verified:
+ # Default case: Just fail.
+ abort(400, result.error_message, issue='login-failure')
+
+ if result.has_nonrobot_user:
# Mark that the user was logged in.
event = userevents.get_event(username)
event.publish_event_data('docker-cli', {'action': 'login'})
- return success
- else:
- # Mark that the login failed.
- event = userevents.get_event(username)
- event.publish_event_data('docker-cli', {'action': 'loginfailure'})
- abort(400, error_message, issue='login-failure')
+
+ return success
@v1_bp.route('/users', methods=['GET'])
@@ -118,27 +134,20 @@ def create_user():
@process_auth
@anon_allowed
def get_user():
- if get_validated_oauth_token():
- return jsonify({
- 'username': '$oauthtoken',
- 'email': None,
- })
- elif get_authenticated_user():
- return jsonify({
- 'username': get_authenticated_user().username,
- 'email': get_authenticated_user().email,
- })
- elif get_validated_token():
- return jsonify({
- 'username': '$token',
- 'email': None,
- })
- abort(404)
+ context = get_authenticated_context()
+ if not context or context.is_anonymous:
+ abort(404)
+
+ return jsonify({
+ 'username': context.credential_username,
+ 'email': None,
+ })
@v1_bp.route('/users//', methods=['PUT'])
@process_auth
@anon_allowed
+@check_readonly
def update_user(username):
permission = UserAdminPermission(username)
if permission.can():
@@ -146,47 +155,52 @@ def update_user(username):
if 'password' in update_request:
logger.debug('Updating user password')
- model.change_user_password(get_authenticated_user(), update_request['password'])
+ model.user.change_password(get_authenticated_user(), update_request['password'])
return jsonify({
'username': get_authenticated_user().username,
- 'email': get_authenticated_user().email
+ 'email': get_authenticated_user().email,
})
+
abort(403)
@v1_bp.route('/repositories//', methods=['PUT'])
@process_auth
@parse_repository_name()
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@generate_headers(scope=GrantType.WRITE_REPOSITORY, add_grant_for_status=201)
@anon_allowed
+@check_readonly
def create_repository(namespace_name, repo_name):
# Verify that the repository name is valid.
if not REPOSITORY_NAME_REGEX.match(repo_name):
abort(400, message='Invalid repository name. Repository names cannot contain slashes.')
logger.debug('Looking up repository %s/%s', namespace_name, repo_name)
- repo = model.get_repository(namespace_name, repo_name)
-
- logger.debug('Found repository %s/%s', namespace_name, repo_name)
- if not repo and get_authenticated_user() is None:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None and get_authenticated_user() is None:
logger.debug('Attempt to create repository %s/%s without user auth', namespace_name, repo_name)
abort(401,
message='Cannot create a repository as a guest. Please login via "docker login" first.',
issue='no-login')
-
- elif repo:
+ elif repository_ref:
modify_perm = ModifyRepositoryPermission(namespace_name, repo_name)
if not modify_perm.can():
abort(403,
message='You do not have permission to modify repository %(namespace)s/%(repository)s',
- issue='no-repo-write-permission',
- namespace=namespace_name, repository=repo_name)
+ issue='no-repo-write-permission', namespace=namespace_name, repository=repo_name)
+ elif repository_ref.kind != 'image':
+ msg = ('This repository is for managing %s resources and not container images.' %
+ repository_ref.kind)
+ abort(405, message=msg, namespace=namespace_name)
else:
create_perm = CreateRepositoryPermission(namespace_name)
if not create_perm.can():
- logger.info('Attempt to create a new repo %s/%s with insufficient perms', namespace_name,
- repo_name)
+ logger.warning('Attempt to create a new repo %s/%s with insufficient perms', namespace_name,
+ repo_name)
msg = 'You do not have permission to create repositories in namespace "%(namespace)s"'
abort(403, message=msg, issue='no-create-permission', namespace=namespace_name)
@@ -194,7 +208,8 @@ def create_repository(namespace_name, repo_name):
logger.debug('Creating repository %s/%s with owner: %s', namespace_name, repo_name,
get_authenticated_user().username)
- model.create_repository(namespace_name, repo_name, get_authenticated_user())
+ repository_ref = model.repository.create_repository(namespace_name, repo_name,
+ get_authenticated_user())
if get_authenticated_user():
user_event_data = {
@@ -206,34 +221,52 @@ def create_repository(namespace_name, repo_name):
event = userevents.get_event(get_authenticated_user().username)
event.publish_event_data('docker-cli', user_event_data)
+ # Start a new builder for the repository and save its ID in the session.
+ assert repository_ref
+ builder = create_manifest_builder(repository_ref, storage, docker_v2_signing_key)
+ logger.debug('Started repo push with manifest builder %s', builder)
+ if builder is None:
+ abort(404, message='Unknown repository', issue='unknown-repo')
+
+ session['manifest_builder'] = builder.builder_id
return make_response('Created', 201)
@v1_bp.route('/repositories//images', methods=['PUT'])
@process_auth
@parse_repository_name()
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@generate_headers(scope=GrantType.WRITE_REPOSITORY)
@anon_allowed
+@check_readonly
def update_images(namespace_name, repo_name):
permission = ModifyRepositoryPermission(namespace_name, repo_name)
if permission.can():
logger.debug('Looking up repository')
- repo = model.get_repository(namespace_name, repo_name)
- if not repo:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name,
+ kind_filter='image')
+ if repository_ref is None:
# Make sure the repo actually exists.
abort(404, message='Unknown repository', issue='unknown-repo')
+ builder = lookup_manifest_builder(repository_ref, session.get('manifest_builder'), storage,
+ docker_v2_signing_key)
+ if builder is None:
+ abort(400)
+
# Generate a job for each notification that has been added to this repo
logger.debug('Adding notifications for repository')
-
- updated_tags = session.get('pushed_tags', {})
event_data = {
- 'updated_tags': updated_tags,
+ 'updated_tags': [tag.name for tag in builder.committed_tags],
}
- track_and_log('push_repo', repo)
- spawn_notification(repo, 'repo_push', event_data)
+ builder.done()
+
+ track_and_log('push_repo', repository_ref)
+ spawn_notification(repository_ref, 'repo_push', event_data)
metric_queue.repository_push.Inc(labelvalues=[namespace_name, repo_name, 'v1', True])
return make_response('Updated', 204)
@@ -243,24 +276,26 @@ def update_images(namespace_name, repo_name):
@v1_bp.route('/repositories//images', methods=['GET'])
@process_auth
@parse_repository_name()
+@ensure_namespace_enabled
@generate_headers(scope=GrantType.READ_REPOSITORY)
@anon_protect
def get_repository_images(namespace_name, repo_name):
- permission = ReadRepositoryPermission(namespace_name, repo_name)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name,
+ kind_filter='image')
- # TODO invalidate token?
- if permission.can() or model.repository_is_public(namespace_name, repo_name):
+ permission = ReadRepositoryPermission(namespace_name, repo_name)
+ if permission.can() or (repository_ref and repository_ref.is_public):
# We can't rely on permissions to tell us if a repo exists anymore
- logger.debug('Looking up repository')
- repo = model.get_repository(namespace_name, repo_name)
- if not repo:
+ if repository_ref is None:
abort(404, message='Unknown repository', issue='unknown-repo')
logger.debug('Building repository image response')
resp = make_response(json.dumps([]), 200)
resp.mimetype = 'application/json'
- track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01)
+ track_and_log('pull_repo', repository_ref,
+ analytics_name='pull_repo_100x',
+ analytics_sample=0.01)
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v1', True])
return resp
@@ -270,15 +305,23 @@ def get_repository_images(namespace_name, repo_name):
@v1_bp.route('/repositories//images', methods=['DELETE'])
@process_auth
@parse_repository_name()
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@generate_headers(scope=GrantType.WRITE_REPOSITORY)
@anon_allowed
+@check_readonly
def delete_repository_images(namespace_name, repo_name):
abort(501, 'Not Implemented', issue='not-implemented')
@v1_bp.route('/repositories//auth', methods=['PUT'])
@parse_repository_name()
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@anon_allowed
+@check_readonly
def put_repository_auth(namespace_name, repo_name):
abort(501, 'Not Implemented', issue='not-implemented')
@@ -287,43 +330,59 @@ def put_repository_auth(namespace_name, repo_name):
@process_auth
@anon_protect
def get_search():
- query = request.args.get('q')
+ query = request.args.get('q') or ''
+
+ try:
+ limit = min(100, max(1, int(request.args.get('n', 25))))
+ except ValueError:
+ limit = 25
+
+ try:
+ page = max(0, int(request.args.get('page', 1)))
+ except ValueError:
+ page = 1
username = None
user = get_authenticated_user()
if user is not None:
username = user.username
- results = []
- if query:
- _conduct_repo_search(username, query, results)
-
- data = {
- "query": query,
- "num_results": len(results),
- "results" : results
- }
-
+ data = _conduct_repo_search(username, query, limit, page)
resp = make_response(json.dumps(data), 200)
resp.mimetype = 'application/json'
return resp
-def _conduct_repo_search(username, query, results):
+def _conduct_repo_search(username, query, limit=25, page=1):
""" Finds matching repositories. """
- def can_read(repo):
- if repo.is_public:
- return True
+ # Note that we put a maximum limit of five pages here, because this API should only really ever
+ # be used by the Docker CLI, and it doesn't even paginate.
+ page = min(page, 5)
+ offset = (page - 1) * limit
- return ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
+ if query:
+ matching_repos = model.repository.get_filtered_matching_repositories(query,
+ filter_username=username,
+ offset=offset,
+ limit=limit + 1)
+ else:
+ matching_repos = []
- only_public = username is None
- matching_repos = model.get_sorted_matching_repositories(query, only_public, can_read, limit=5)
-
- for repo in matching_repos:
+ results = []
+ for repo in matching_repos[0:limit]:
results.append({
- 'name': repo.namespace_name + '/' + repo.name,
+ 'name': repo.namespace_user.username + '/' + repo.name,
'description': repo.description,
- 'is_public': repo.is_public,
- 'href': '/repository/' + repo.namespace_name + '/' + repo.name
+ 'is_public': model.repository.is_repository_public(repo),
+ 'href': '/repository/' + repo.namespace_user.username + '/' + repo.name
})
+
+ # Defined: https://docs.docker.com/v1.6/reference/api/registry_api/
+ return {
+ 'query': query,
+ 'num_results': len(results),
+ 'num_pages': page + 1 if len(matching_repos) > limit else page,
+ 'page': page,
+ 'page_size': limit,
+ 'results': results,
+ }
diff --git a/endpoints/v1/registry.py b/endpoints/v1/registry.py
index f0bcc11b8..14376cb19 100644
--- a/endpoints/v1/registry.py
+++ b/endpoints/v1/registry.py
@@ -7,49 +7,45 @@ from time import time
from flask import make_response, request, session, Response, redirect, abort as flask_abort
-from app import storage as store, app, metric_queue
+from app import storage as store, app, docker_v2_signing_key, metric_queue
from auth.auth_context import get_authenticated_user
-from auth.permissions import (ReadRepositoryPermission,
- ModifyRepositoryPermission)
-from auth.process import process_auth, extract_namespace_repo_from_session
-from auth.registry_jwt_auth import get_granted_username
-from data import model, database
-from data.interfaces.v1 import pre_oci_model as model
+from auth.decorators import extract_namespace_repo_from_session, process_auth
+from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
+from data import database
+from data.registry_model import registry_model
+from data.registry_model.blobuploader import upload_blob, BlobUploadSettings, BlobUploadException
+from data.registry_model.manifestbuilder import lookup_manifest_builder
from digest import checksums
-from endpoints.v1 import v1_bp
-from endpoints.decorators import anon_protect
+from endpoints.v1 import v1_bp, check_v1_push_enabled
+from endpoints.v1.index import ensure_namespace_enabled
+from endpoints.decorators import (anon_protect, check_region_blacklisted, check_repository_state,
+ check_readonly)
from util.http import abort, exact_abort
-from util.registry.filelike import SocketReader
-from util.registry import gzipstream
from util.registry.replication import queue_storage_replication
-from util.registry.torrent import PieceHasher
-
+from util.request import get_request_ip
logger = logging.getLogger(__name__)
-def _finish_image(namespace, repository, image_id):
- # Checksum is ok, we remove the marker
- blob_ref = model.update_image_uploading(namespace, repository, image_id, False)
-
- # Send a job to the work queue to replicate the image layer.
- queue_storage_replication(namespace, blob_ref)
-
-
def require_completion(f):
- """This make sure that the image push correctly finished."""
+ """ This make sure that the image push correctly finished. """
@wraps(f)
def wrapper(namespace, repository, *args, **kwargs):
image_id = kwargs['image_id']
- if model.is_image_uploading(namespace, repository, image_id):
- abort(400, 'Image %(image_id)s is being uploaded, retry later',
- issue='upload-in-progress', image_id=image_id)
+ repository_ref = registry_model.lookup_repository(namespace, repository)
+ if repository_ref is not None:
+ legacy_image = registry_model.get_legacy_image(repository_ref, image_id)
+ if legacy_image is not None and legacy_image.uploading:
+ abort(400, 'Image %(image_id)s is being uploaded, retry later', issue='upload-in-progress',
+ image_id=image_id)
+
return f(namespace, repository, *args, **kwargs)
return wrapper
def set_cache_headers(f):
"""Returns HTTP headers suitable for caching."""
+
@wraps(f)
def wrapper(*args, **kwargs):
# Set TTL to 1 year by default
@@ -59,8 +55,7 @@ def set_cache_headers(f):
headers = {
'Cache-Control': 'public, max-age={0}'.format(ttl),
'Expires': expires,
- 'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',
- }
+ 'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',}
if 'If-Modified-Since' in request.headers:
response = make_response('Not modified', 304)
response.headers.extend(headers)
@@ -69,31 +64,36 @@ def set_cache_headers(f):
# Prevent the Cookie to be sent when the object is cacheable
session.modified = False
return f(*args, **kwargs)
+
return wrapper
@v1_bp.route('/images//layer', methods=['HEAD'])
@process_auth
@extract_namespace_repo_from_session
+@ensure_namespace_enabled
@require_completion
@set_cache_headers
@anon_protect
def head_image_layer(namespace, repository, image_id, headers):
permission = ReadRepositoryPermission(namespace, repository)
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
logger.debug('Checking repo permissions')
- if permission.can() or model.repository_is_public(namespace, repository):
+ if permission.can() or (repository_ref is not None and repository_ref.is_public):
+ if repository_ref is None:
+ abort(404)
+
logger.debug('Looking up placement locations')
- locations, _ = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
- if locations is None:
+ legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True)
+ if legacy_image is None:
logger.debug('Could not find any blob placement locations')
- abort(404, 'Image %(image_id)s not found', issue='unknown-image',
- image_id=image_id)
+ abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
# Add the Accept-Ranges header if the storage engine supports resumable
# downloads.
extra_headers = {}
- if store.get_supports_resumable_downloads(locations):
+ if store.get_supports_resumable_downloads(legacy_image.blob.placements):
logger.debug('Storage supports resumable downloads')
extra_headers['Accept-Ranges'] = 'bytes'
@@ -108,22 +108,31 @@ def head_image_layer(namespace, repository, image_id, headers):
@v1_bp.route('/images//layer', methods=['GET'])
@process_auth
@extract_namespace_repo_from_session
+@ensure_namespace_enabled
@require_completion
@set_cache_headers
+@check_region_blacklisted()
@anon_protect
def get_image_layer(namespace, repository, image_id, headers):
permission = ReadRepositoryPermission(namespace, repository)
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
logger.debug('Checking repo permissions')
- if permission.can() or model.repository_is_public(namespace, repository):
- logger.debug('Looking up placement locations and path')
- locations, path = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
- if not locations or not path:
- abort(404, 'Image %(image_id)s not found', issue='unknown-image',
- image_id=image_id)
+ if permission.can() or (repository_ref is not None and repository_ref.is_public):
+ if repository_ref is None:
+ abort(404)
+
+ legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True)
+ if legacy_image is None:
+ abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
+
+ path = legacy_image.blob.storage_path
+ metric_queue.pull_byte_count.Inc(legacy_image.blob.compressed_size, labelvalues=['v1'])
+
try:
logger.debug('Looking up the direct download URL for path: %s', path)
- direct_download_url = store.get_direct_download_url(locations, path)
+ direct_download_url = store.get_direct_download_url(legacy_image.blob.placements, path,
+ get_request_ip())
if direct_download_url:
logger.debug('Returning direct download URL')
resp = redirect(direct_download_url)
@@ -132,11 +141,10 @@ def get_image_layer(namespace, repository, image_id, headers):
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
logger.debug('Streaming layer data')
- return Response(store.stream_read(locations, path), headers=headers)
+ return Response(store.stream_read(legacy_image.blob.placements, path), headers=headers)
except (IOError, AttributeError):
logger.exception('Image layer data not found')
- abort(404, 'Image %(image_id)s not found', issue='unknown-image',
- image_id=image_id)
+ abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
abort(403)
@@ -144,103 +152,94 @@ def get_image_layer(namespace, repository, image_id, headers):
@v1_bp.route('/images//layer', methods=['PUT'])
@process_auth
@extract_namespace_repo_from_session
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@anon_protect
+@check_readonly
def put_image_layer(namespace, repository, image_id):
logger.debug('Checking repo permissions')
permission = ModifyRepositoryPermission(namespace, repository)
if not permission.can():
abort(403)
- logger.debug('Retrieving image')
- if model.storage_exists(namespace, repository, image_id):
- exact_abort(409, 'Image already exists')
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
+ if repository_ref is None:
+ abort(403)
- v1_metadata = model.docker_v1_metadata(namespace, repository, image_id)
- if v1_metadata is None:
+ logger.debug('Checking for image in manifest builder')
+ builder = lookup_manifest_builder(repository_ref, session.get('manifest_builder'), store,
+ docker_v2_signing_key)
+ if builder is None:
+ abort(400)
+
+ layer = builder.lookup_layer(image_id)
+ if layer is None:
abort(404)
logger.debug('Storing layer data')
-
input_stream = request.stream
if request.headers.get('transfer-encoding') == 'chunked':
# Careful, might work only with WSGI servers supporting chunked
# encoding (Gunicorn)
input_stream = request.environ['wsgi.input']
- # Create a socket reader to read the input stream containing the layer data.
- sr = SocketReader(input_stream)
+ expiration_sec = app.config['PUSH_TEMP_TAG_EXPIRATION_SEC']
+ settings = BlobUploadSettings(maximum_blob_size=app.config['MAXIMUM_LAYER_SIZE'],
+ bittorrent_piece_size=app.config['BITTORRENT_PIECE_SIZE'],
+ committed_blob_expiration=expiration_sec)
+
+ extra_handlers = []
# Add a handler that copies the data into a temp file. This is used to calculate the tarsum,
# which is only needed for older versions of Docker.
- requires_tarsum = session.get('checksum_format') == 'tarsum'
+ requires_tarsum = bool(builder.get_layer_checksums(layer))
if requires_tarsum:
tmp, tmp_hndlr = store.temp_store_handler()
- sr.add_handler(tmp_hndlr)
+ extra_handlers.append(tmp_hndlr)
- # Add a handler to compute the compressed and uncompressed sizes of the layer.
- size_info, size_hndlr = gzipstream.calculate_size_handler()
- sr.add_handler(size_hndlr)
+ # Add a handler which computes the simple Docker V1 checksum.
+ h, sum_hndlr = checksums.simple_checksum_handler(layer.v1_metadata_string)
+ extra_handlers.append(sum_hndlr)
- # Add a handler to hash the chunks of the upload for torrenting
- piece_hasher = PieceHasher(app.config['BITTORRENT_PIECE_SIZE'])
- sr.add_handler(piece_hasher.update)
+ uploaded_blob = None
+ try:
+ with upload_blob(repository_ref, store, settings,
+ extra_blob_stream_handlers=extra_handlers) as manager:
+ manager.upload_chunk(app.config, input_stream)
+ uploaded_blob = manager.commit_to_blob(app.config)
+ except BlobUploadException:
+ logger.exception('Exception when writing image data')
+ abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
- # Add a handler which computes the checksum.
- h, sum_hndlr = checksums.simple_checksum_handler(v1_metadata.compat_json)
- sr.add_handler(sum_hndlr)
-
- # Add a handler which computes the content checksum only
- ch, content_sum_hndlr = checksums.content_checksum_handler()
- sr.add_handler(content_sum_hndlr)
-
- # Stream write the data to storage.
- locations, path = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
- with database.CloseForLongOperation(app.config):
- try:
- start_time = time()
- store.stream_write(locations, path, sr)
- metric_queue.chunk_size.Observe(size_info.compressed_size,
- labelvalues=[list(locations)[0]])
- metric_queue.chunk_upload_time.Observe(time() - start_time,
- labelvalues=[list(locations)[0]])
- except IOError:
- logger.exception('Exception when writing image data')
- abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
-
- # Save the size of the image.
- model.update_image_sizes(namespace, repository, image_id, size_info.compressed_size,
- size_info.uncompressed_size)
-
- # Save the BitTorrent pieces.
- model.create_bittorrent_pieces(namespace, repository, image_id, piece_hasher.final_piece_hashes())
-
- # Append the computed checksum.
+ # Compute the final checksum
csums = []
csums.append('sha256:{0}'.format(h.hexdigest()))
try:
if requires_tarsum:
tmp.seek(0)
- csums.append(checksums.compute_tarsum(tmp, v1_metadata.compat_json))
+ csums.append(checksums.compute_tarsum(tmp, layer.v1_metadata_string))
tmp.close()
except (IOError, checksums.TarError) as exc:
logger.debug('put_image_layer: Error when computing tarsum %s', exc)
- if v1_metadata.checksum is None:
- # We don't have a checksum stored yet, that's fine skipping the check.
- # Not removing the mark though, image is not downloadable yet.
- session['checksum'] = csums
- session['content_checksum'] = 'sha256:{0}'.format(ch.hexdigest())
- return make_response('true', 200)
+ # If there was already a precomputed checksum, validate against it now.
+ if builder.get_layer_checksums(layer):
+ checksum = builder.get_layer_checksums(layer)[0]
+ if not builder.validate_layer_checksum(layer, checksum):
+ logger.debug('put_image_checksum: Wrong checksum. Given: %s and expected: %s', checksum,
+ builder.get_layer_checksums(layer))
+ abort(400, 'Checksum mismatch for image: %(image_id)s', issue='checksum-mismatch',
+ image_id=image_id)
- # We check if the checksums provided matches one the one we computed
- if v1_metadata.checksum not in csums:
- logger.warning('put_image_layer: Wrong checksum')
- abort(400, 'Checksum mismatch; ignoring the layer for image %(image_id)s',
- issue='checksum-mismatch', image_id=image_id)
+ # Assign the blob to the layer in the manifest.
+ if not builder.assign_layer_blob(layer, uploaded_blob, csums):
+ abort(500, 'Something went wrong')
- # Mark the image as uploaded.
- _finish_image(namespace, repository, image_id)
+ # Send a job to the work queue to replicate the image layer.
+ # TODO: move this into a better place.
+ queue_storage_replication(namespace, uploaded_blob)
return make_response('true', 200)
@@ -248,66 +247,51 @@ def put_image_layer(namespace, repository, image_id):
@v1_bp.route('/images//checksum', methods=['PUT'])
@process_auth
@extract_namespace_repo_from_session
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@anon_protect
+@check_readonly
def put_image_checksum(namespace, repository, image_id):
logger.debug('Checking repo permissions')
permission = ModifyRepositoryPermission(namespace, repository)
if not permission.can():
abort(403)
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
+ if repository_ref is None:
+ abort(403)
+
# Docker Version < 0.10 (tarsum+sha):
old_checksum = request.headers.get('X-Docker-Checksum')
# Docker Version >= 0.10 (sha):
new_checksum = request.headers.get('X-Docker-Checksum-Payload')
- # Store whether we need to calculate the tarsum.
- if new_checksum:
- session['checksum_format'] = 'sha256'
- else:
- session['checksum_format'] = 'tarsum'
-
checksum = new_checksum or old_checksum
if not checksum:
abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum',
image_id=image_id)
- if not session.get('checksum'):
- abort(400, 'Checksum not found in Cookie for image %(image_id)s',
- issue='missing-checksum-cookie', image_id=image_id)
+ logger.debug('Checking for image in manifest builder')
+ builder = lookup_manifest_builder(repository_ref, session.get('manifest_builder'), store,
+ docker_v2_signing_key)
+ if builder is None:
+ abort(400)
- logger.debug('Looking up repo image')
- v1_metadata = model.docker_v1_metadata(namespace, repository, image_id)
- if not v1_metadata:
- abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
+ layer = builder.lookup_layer(image_id)
+ if layer is None:
+ abort(404)
- logger.debug('Looking up repo layer data')
- if not v1_metadata.compat_json:
- abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
+ if old_checksum:
+ builder.save_precomputed_checksum(layer, checksum)
+ return make_response('true', 200)
- logger.debug('Marking image path')
- if not model.is_image_uploading(namespace, repository, image_id):
- abort(409, 'Cannot set checksum for image %(image_id)s',
- issue='image-write-error', image_id=image_id)
-
- logger.debug('Storing image and content checksums')
-
- content_checksum = session.get('content_checksum', None)
- checksum_parts = checksum.split(':')
- if len(checksum_parts) != 2:
- abort(400, 'Invalid checksum format')
-
- model.store_docker_v1_checksums(namespace, repository, image_id, checksum, content_checksum)
-
- if checksum not in session.get('checksum', []):
- logger.debug('session checksums: %s', session.get('checksum', []))
- logger.debug('client supplied checksum: %s', checksum)
- logger.debug('put_image_checksum: Wrong checksum')
- abort(400, 'Checksum mismatch for image: %(image_id)s',
- issue='checksum-mismatch', image_id=image_id)
-
- # Mark the image as uploaded.
- _finish_image(namespace, repository, image_id)
+ if not builder.validate_layer_checksum(layer, checksum):
+ logger.debug('put_image_checksum: Wrong checksum. Given: %s and expected: %s', checksum,
+ builder.get_layer_checksums(layer))
+ abort(400, 'Checksum mismatch for image: %(image_id)s', issue='checksum-mismatch',
+ image_id=image_id)
return make_response('true', 200)
@@ -315,28 +299,29 @@ def put_image_checksum(namespace, repository, image_id):
@v1_bp.route('/images//json', methods=['GET'])
@process_auth
@extract_namespace_repo_from_session
+@ensure_namespace_enabled
@require_completion
@set_cache_headers
@anon_protect
def get_image_json(namespace, repository, image_id, headers):
logger.debug('Checking repo permissions')
permission = ReadRepositoryPermission(namespace, repository)
- if not permission.can() and not model.repository_is_public(namespace, repository):
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
+ if not permission.can() and not (repository_ref is not None and repository_ref.is_public):
abort(403)
logger.debug('Looking up repo image')
- v1_metadata = model.docker_v1_metadata(namespace, repository, image_id)
- if v1_metadata is None:
+ legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True)
+ if legacy_image is None:
flask_abort(404)
- logger.debug('Looking up repo layer size')
- size = model.get_image_size(namespace, repository, image_id)
+ size = legacy_image.blob.compressed_size
if size is not None:
# Note: X-Docker-Size is optional and we *can* end up with a NULL image_size,
# so handle this case rather than failing.
headers['X-Docker-Size'] = str(size)
- response = make_response(v1_metadata.compat_json, 200)
+ response = make_response(legacy_image.v1_metadata_string, 200)
response.headers.extend(headers)
return response
@@ -344,21 +329,26 @@ def get_image_json(namespace, repository, image_id, headers):
@v1_bp.route('/images//ancestry', methods=['GET'])
@process_auth
@extract_namespace_repo_from_session
+@ensure_namespace_enabled
@require_completion
@set_cache_headers
@anon_protect
def get_image_ancestry(namespace, repository, image_id, headers):
logger.debug('Checking repo permissions')
permission = ReadRepositoryPermission(namespace, repository)
- if not permission.can() and not model.repository_is_public(namespace, repository):
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
+ if not permission.can() and not (repository_ref is not None and repository_ref.is_public):
abort(403)
- ancestry_docker_ids = model.image_ancestry(namespace, repository, image_id)
- if ancestry_docker_ids is None:
+ logger.debug('Looking up repo image')
+ legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_parents=True)
+ if legacy_image is None:
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
- # We can not use jsonify here because we are returning a list not an object
- response = make_response(json.dumps(ancestry_docker_ids), 200)
+ # NOTE: We can not use jsonify here because we are returning a list not an object.
+ ancestor_ids = ([legacy_image.docker_image_id] +
+ [a.docker_image_id for a in legacy_image.parents])
+ response = make_response(json.dumps(ancestor_ids), 200)
response.headers.extend(headers)
return response
@@ -366,13 +356,26 @@ def get_image_ancestry(namespace, repository, image_id, headers):
@v1_bp.route('/images//json', methods=['PUT'])
@process_auth
@extract_namespace_repo_from_session
+@check_v1_push_enabled()
+@ensure_namespace_enabled
+@check_repository_state
@anon_protect
+@check_readonly
def put_image_json(namespace, repository, image_id):
logger.debug('Checking repo permissions')
permission = ModifyRepositoryPermission(namespace, repository)
if not permission.can():
abort(403)
+ repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image')
+ if repository_ref is None:
+ abort(403)
+
+ builder = lookup_manifest_builder(repository_ref, session.get('manifest_builder'), store,
+ docker_v2_signing_key)
+ if builder is None:
+ abort(400)
+
logger.debug('Parsing image JSON')
try:
uploaded_metadata = request.data
@@ -381,60 +384,24 @@ def put_image_json(namespace, repository, image_id):
pass
if not data or not isinstance(data, dict):
- abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
- issue='invalid-request', image_id=image_id, json=request.data)
+ abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s', issue='invalid-request',
+ image_id=image_id, json=request.data)
if 'id' not in data:
- abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
- issue='invalid-request', image_id=image_id)
+ abort(400, 'Missing key `id` in JSON for image: %(image_id)s', issue='invalid-request',
+ image_id=image_id)
if image_id != data['id']:
- abort(400, 'JSON data contains invalid id for image: %(image_id)s',
- issue='invalid-request', image_id=image_id)
+ abort(400, 'JSON data contains invalid id for image: %(image_id)s', issue='invalid-request',
+ image_id=image_id)
logger.debug('Looking up repo image')
-
- if not model.repository_exists(namespace, repository):
- abort(404, 'Repository does not exist: %(namespace)s/%(repository)s', issue='no-repo',
- namespace=namespace, repository=repository)
-
- v1_metadata = model.docker_v1_metadata(namespace, repository, image_id)
- if v1_metadata is None:
- username = get_authenticated_user() and get_authenticated_user().username
- if not username:
- username = get_granted_username()
-
- logger.debug('Image not found, creating or linking image with initiating user context: %s',
- username)
- location_pref = store.preferred_locations[0]
- model.create_or_link_image(username, namespace, repository, image_id, location_pref)
- v1_metadata = model.docker_v1_metadata(namespace, repository, image_id)
-
- # Create a temporary tag to prevent this image from getting garbage collected while the push
- # is in progress.
- model.create_temp_hidden_tag(namespace, repository, image_id,
- app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
-
- parent_id = data.get('parent', None)
- if parent_id:
- logger.debug('Looking up parent image')
- if model.docker_v1_metadata(namespace, repository, parent_id) is None:
- abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
- issue='invalid-request', image_id=image_id, parent_id=parent_id)
-
- logger.debug('Checking if image already exists')
- if v1_metadata and not model.is_image_uploading(namespace, repository, image_id):
- exact_abort(409, 'Image already exists')
-
- model.update_image_uploading(namespace, repository, image_id, True)
-
- # If we reach that point, it means that this is a new image or a retry
- # on a failed push, save the metadata
- command_list = data.get('container_config', {}).get('Cmd', None)
- command = json.dumps(command_list) if command_list else None
-
- logger.debug('Setting image metadata')
- model.update_docker_v1_metadata(namespace, repository, image_id, data.get('created'),
- data.get('comment'), command, uploaded_metadata, parent_id)
+ location_pref = store.preferred_locations[0]
+ username = get_authenticated_user() and get_authenticated_user().username
+ layer = builder.start_layer(image_id, uploaded_metadata, location_pref, username,
+ app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
+ if layer is None:
+ abort(400, 'Image %(image_id)s has invalid metadata',
+ issue='invalid-request', image_id=image_id)
return make_response('true', 200)
diff --git a/endpoints/v1/tag.py b/endpoints/v1/tag.py
index 917cc6a6f..67f37e098 100644
--- a/endpoints/v1/tag.py
+++ b/endpoints/v1/tag.py
@@ -3,18 +3,16 @@ import json
from flask import abort, request, jsonify, make_response, session
-
+from app import storage, docker_v2_signing_key
+from auth.decorators import process_auth
+from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
+from data.registry_model import registry_model
+from data.registry_model.manifestbuilder import lookup_manifest_builder
+from endpoints.decorators import (anon_protect, parse_repository_name, check_repository_state,
+ check_readonly)
+from endpoints.v1 import v1_bp, check_v1_push_enabled
+from util.audit import track_and_log
from util.names import TAG_ERROR, TAG_REGEX
-from auth.permissions import (ReadRepositoryPermission,
- ModifyRepositoryPermission)
-from auth.process import process_auth
-from data import model
-from data.interfaces.v1 import pre_oci_model as model
-from endpoints.common import parse_repository_name
-from endpoints.decorators import anon_protect
-from endpoints.v1 import v1_bp
-from endpoints.trackhelper import track_and_log
-
logger = logging.getLogger(__name__)
@@ -25,10 +23,12 @@ logger = logging.getLogger(__name__)
@parse_repository_name()
def get_tags(namespace_name, repo_name):
permission = ReadRepositoryPermission(namespace_name, repo_name)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name, kind_filter='image')
+ if permission.can() or (repository_ref is not None and repository_ref.is_public):
+ if repository_ref is None:
+ abort(404)
- if permission.can() or model.repository_is_public(namespace_name, repo_name):
- tags = model.list_tags(namespace_name, repo_name)
- tag_map = {tag.name: tag.image.docker_image_id for tag in tags}
+ tag_map = registry_model.get_legacy_tags_map(repository_ref, storage)
return jsonify(tag_map)
abort(403)
@@ -40,9 +40,12 @@ def get_tags(namespace_name, repo_name):
@parse_repository_name()
def get_tag(namespace_name, repo_name, tag):
permission = ReadRepositoryPermission(namespace_name, repo_name)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name, kind_filter='image')
+ if permission.can() or (repository_ref is not None and repository_ref.is_public):
+ if repository_ref is None:
+ abort(404)
- if permission.can() or model.repository_is_public(namespace_name, repo_name):
- image_id = model.find_image_id_by_tag(namespace_name, repo_name, tag)
+ image_id = registry_model.get_tag_legacy_image_id(repository_ref, tag, storage)
if image_id is None:
abort(404)
@@ -57,21 +60,40 @@ def get_tag(namespace_name, repo_name, tag):
@process_auth
@anon_protect
@parse_repository_name()
+@check_repository_state
+@check_v1_push_enabled()
+@check_readonly
def put_tag(namespace_name, repo_name, tag):
permission = ModifyRepositoryPermission(namespace_name, repo_name)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name, kind_filter='image')
- if permission.can():
+ if permission.can() and repository_ref is not None:
if not TAG_REGEX.match(tag):
abort(400, TAG_ERROR)
image_id = json.loads(request.data)
- model.create_or_update_tag(namespace_name, repo_name, image_id, tag)
- # Store the updated tag.
- if 'pushed_tags' not in session:
- session['pushed_tags'] = {}
+ # Check for the image ID first in a builder (for an in-progress push).
+ builder = lookup_manifest_builder(repository_ref, session.get('manifest_builder'), storage,
+ docker_v2_signing_key)
+ if builder is not None:
+ layer = builder.lookup_layer(image_id)
+ if layer is not None:
+ commited_tag = builder.commit_tag_and_manifest(tag, layer)
+ if commited_tag is None:
+ abort(400)
- session['pushed_tags'][tag] = image_id
+ return make_response('Created', 200)
+
+ # Check if there is an existing image we should use (for PUT calls outside of a normal push
+ # operation).
+ legacy_image = registry_model.get_legacy_image(repository_ref, image_id)
+ if legacy_image is None:
+ abort(400)
+
+ if registry_model.retarget_tag(repository_ref, tag, legacy_image, storage,
+ docker_v2_signing_key) is None:
+ abort(400)
return make_response('Created', 200)
@@ -82,12 +104,18 @@ def put_tag(namespace_name, repo_name, tag):
@process_auth
@anon_protect
@parse_repository_name()
+@check_repository_state
+@check_v1_push_enabled()
+@check_readonly
def delete_tag(namespace_name, repo_name, tag):
permission = ModifyRepositoryPermission(namespace_name, repo_name)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name, kind_filter='image')
- if permission.can():
- model.delete_tag(namespace_name, repo_name, tag)
- track_and_log('delete_tag', model.get_repository(namespace_name, repo_name), tag=tag)
+ if permission.can() and repository_ref is not None:
+ if not registry_model.delete_tag(repository_ref, tag):
+ abort(404)
+
+ track_and_log('delete_tag', repository_ref, tag=tag)
return make_response('Deleted', 200)
abort(403)
diff --git a/endpoints/v2/__init__.py b/endpoints/v2/__init__.py
index d6af69db0..845ad258f 100644
--- a/endpoints/v2/__init__.py
+++ b/endpoints/v2/__init__.py
@@ -10,33 +10,30 @@ from semantic_version import Spec
import features
-from app import app, metric_queue, get_app_url, license_validator
-from auth.auth_context import get_grant_context
-from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
- AdministerRepositoryPermission)
+from app import app, metric_queue, get_app_url
+from auth.auth_context import get_authenticated_context
+from auth.permissions import (
+ ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission)
from auth.registry_jwt_auth import process_registry_jwt_auth, get_auth_headers
-from data import model
-from endpoints.decorators import anon_protect, anon_allowed
-from endpoints.v2.errors import V2RegistryException, Unauthorized
+from data.registry_model import registry_model
+from data.readreplica import ReadOnlyModeException
+from endpoints.decorators import anon_protect, anon_allowed, route_show_if
+from endpoints.v2.errors import (V2RegistryException, Unauthorized, Unsupported, NameUnknown,
+ ReadOnlyMode)
from util.http import abort
from util.metrics.metricqueue import time_blueprint
from util.registry.dockerver import docker_version
from util.pagination import encrypt_page_token, decrypt_page_token
-
logger = logging.getLogger(__name__)
-
v2_bp = Blueprint('v2', __name__)
-license_validator.enforce_license_before_request(v2_bp)
time_blueprint(v2_bp, metric_queue)
@v2_bp.app_errorhandler(V2RegistryException)
def handle_registry_v2_exception(error):
- response = jsonify({
- 'errors': [error.as_dict()]
- })
+ response = jsonify({'errors': [error.as_dict()]})
response.status_code = error.http_status_code
if response.status_code == 401:
@@ -45,14 +42,24 @@ def handle_registry_v2_exception(error):
return response
-_MAX_RESULTS_PER_PAGE = 50
+@v2_bp.app_errorhandler(ReadOnlyModeException)
+def handle_readonly(ex):
+ error = ReadOnlyMode()
+ response = jsonify({'errors': [error.as_dict()]})
+ response.status_code = error.http_status_code
+ logger.debug('sending response: %s', response.get_data())
+ return response
-def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
+_MAX_RESULTS_PER_PAGE = app.config.get('V2_PAGINATION_SIZE', 100)
+
+
+def paginate(start_id_kwarg_name='start_id', limit_kwarg_name='limit',
callback_kwarg_name='pagination_callback'):
"""
Decorates a handler adding a parsed pagination token and a callback to encode a response token.
"""
+
def wrapper(func):
@wraps(func)
def wrapped(*args, **kwargs):
@@ -62,20 +69,19 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
requested_limit = 0
limit = max(min(requested_limit, _MAX_RESULTS_PER_PAGE), 1)
- next_page_token = request.args.get('next_page', None)
+ next_page_token = request.args.get('next_page', request.args.get('last', None))
# Decrypt the next page token, if any.
- offset = 0
+ start_id = None
page_info = decrypt_page_token(next_page_token)
if page_info is not None:
- # Note: we use offset here instead of ID >= n because one of the V2 queries is a UNION.
- offset = page_info.get('offset', 0)
+ start_id = page_info.get('start_id', None)
- def callback(num_results, response):
- if num_results < limit:
+ def callback(results, response):
+ if len(results) <= limit:
return
- next_page_token = encrypt_page_token({'offset': limit + offset})
+ next_page_token = encrypt_page_token({'start_id': max([obj.id for obj in results])})
link_url = os.path.join(get_app_url(), url_for(request.endpoint, **request.view_args))
link_param = urlencode({'n': limit, 'next_page': next_page_token})
@@ -83,7 +89,7 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
response.headers['Link'] = link
kwargs[limit_kwarg_name] = limit
- kwargs[offset_kwarg_name] = offset
+ kwargs[start_id_kwarg_name] = start_id
kwargs[callback_kwarg_name] = callback
return func(*args, **kwargs)
return wrapped
@@ -94,26 +100,39 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False):
def wrapper(func):
@wraps(func)
def wrapped(namespace_name, repo_name, *args, **kwargs):
- logger.debug('Checking permission %s for repo: %s/%s', permission_class,
- namespace_name, repo_name)
+ logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace_name,
+ repo_name)
+
permission = permission_class(namespace_name, repo_name)
- if (permission.can() or
- (allow_public and
- model.repository.repository_is_public(namespace_name, repo_name))):
+ if permission.can():
return func(namespace_name, repo_name, *args, **kwargs)
+
repository = namespace_name + '/' + repo_name
+ if allow_public:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None or not repository_ref.is_public:
+ raise Unauthorized(repository=repository, scopes=scopes)
+
+ if repository_ref.kind != 'image':
+ msg = 'This repository is for managing %s and not container images.' % repository_ref.kind
+ raise Unsupported(detail=msg)
+
+ if repository_ref.is_public:
+ if not features.ANONYMOUS_ACCESS:
+ raise Unauthorized(repository=repository, scopes=scopes)
+
+ return func(namespace_name, repo_name, *args, **kwargs)
+
raise Unauthorized(repository=repository, scopes=scopes)
return wrapped
return wrapper
-require_repo_read = _require_repo_permission(ReadRepositoryPermission,
- scopes=['pull'],
+require_repo_read = _require_repo_permission(ReadRepositoryPermission, scopes=['pull'],
allow_public=True)
-require_repo_write = _require_repo_permission(ModifyRepositoryPermission,
- scopes=['pull', 'push'])
-require_repo_admin = _require_repo_permission(AdministerRepositoryPermission,
- scopes=['pull', 'push'])
+require_repo_write = _require_repo_permission(ModifyRepositoryPermission, scopes=['pull', 'push'])
+require_repo_admin = _require_repo_permission(AdministerRepositoryPermission, scopes=[
+ 'pull', 'push'])
def get_input_stream(flask_request):
@@ -122,18 +141,6 @@ def get_input_stream(flask_request):
return flask_request.stream
-def route_show_if(value):
- def decorator(f):
- @wraps(f)
- def decorated_function(*args, **kwargs):
- if not value:
- abort(404)
-
- return f(*args, **kwargs)
- return decorated_function
- return decorator
-
-
@v2_bp.route('/')
@route_show_if(features.ADVERTISE_V2)
@process_registry_jwt_auth()
@@ -144,12 +151,12 @@ def v2_support_enabled():
# Check if our version is one of the blacklisted versions, if we can't
# identify the version (None) we will fail open and assume that it is
# newer and therefore should not be blacklisted.
- if Spec(app.config['BLACKLIST_V2_SPEC']).match(docker_ver) and docker_ver is not None:
+ if docker_ver is not None and Spec(app.config['BLACKLIST_V2_SPEC']).match(docker_ver):
abort(404)
response = make_response('true', 200)
- if get_grant_context() is None:
+ if get_authenticated_context() is None:
response = make_response('true', 401)
response.headers.extend(get_auth_headers())
@@ -161,5 +168,4 @@ from endpoints.v2 import (
catalog,
manifest,
tag,
- v2auth,
-)
+ v2auth,)
diff --git a/endpoints/v2/blob.py b/endpoints/v2/blob.py
index ba0acf9ad..141c37990 100644
--- a/endpoints/v2/blob.py
+++ b/endpoints/v2/blob.py
@@ -1,30 +1,31 @@
import logging
import re
-import time
from flask import url_for, request, redirect, Response, abort as flask_abort
-import resumablehashlib
-
-from app import storage, app, get_app_url, metric_queue
+from app import storage, app, get_app_url, metric_queue, model_cache
from auth.registry_jwt_auth import process_registry_jwt_auth
+from auth.permissions import ReadRepositoryPermission
from data import database
-from data.interfaces.v2 import pre_oci_model as model
+from data.registry_model import registry_model
+from data.registry_model.blobuploader import (create_blob_upload, retrieve_blob_upload_manager,
+ complete_when_uploaded, BlobUploadSettings,
+ BlobUploadException, BlobTooLargeException,
+ BlobRangeMismatchException)
from digest import digest_tools
-from endpoints.common import parse_repository_name
+from endpoints.decorators import (anon_protect, anon_allowed, parse_repository_name,
+ check_region_blacklisted, check_readonly)
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
-from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported,
- NameUnknown)
-from endpoints.decorators import anon_protect
+from endpoints.v2.errors import (
+ BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge,
+ InvalidRequest, BlobDownloadGeoBlocked)
from util.cache import cache_control
-from util.registry.filelike import wrap_with_handler, StreamSlice
-from util.registry.gzipstream import calculate_size_handler
-from util.registry.torrent import PieceHasher
+from util.names import parse_namespace_repository
+from util.request import get_request_ip
logger = logging.getLogger(__name__)
-
BASE_BLOB_ROUTE = '//blobs/'
BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN)
RANGE_HEADER_REGEX = re.compile(r'^bytes=([0-9]+)-([0-9]+)$')
@@ -39,23 +40,23 @@ class _InvalidRangeHeader(Exception):
@parse_repository_name()
@process_registry_jwt_auth(scopes=['pull'])
@require_repo_read
-@anon_protect
+@anon_allowed
@cache_control(max_age=31436000)
def check_blob_exists(namespace_name, repo_name, digest):
# Find the blob.
- blob = model.get_blob_by_digest(namespace_name, repo_name, digest)
+ blob = registry_model.get_cached_repo_blob(model_cache, namespace_name, repo_name, digest)
if blob is None:
raise BlobUnknown()
# Build the response headers.
headers = {
'Docker-Content-Digest': digest,
- 'Content-Length': blob.size,
+ 'Content-Length': blob.compressed_size,
'Content-Type': BLOB_CONTENT_TYPE,
}
# If our storage supports range requests, let the client know.
- if storage.get_supports_resumable_downloads(blob.locations):
+ if storage.get_supports_resumable_downloads(blob.placements):
headers['Accept-Ranges'] = 'bytes'
# Write the response to the client.
@@ -66,11 +67,12 @@ def check_blob_exists(namespace_name, repo_name, digest):
@parse_repository_name()
@process_registry_jwt_auth(scopes=['pull'])
@require_repo_read
-@anon_protect
+@anon_allowed
+@check_region_blacklisted(BlobDownloadGeoBlocked)
@cache_control(max_age=31536000)
def download_blob(namespace_name, repo_name, digest):
# Find the blob.
- blob = model.get_blob_by_digest(namespace_name, repo_name, digest)
+ blob = registry_model.get_cached_repo_blob(model_cache, namespace_name, repo_name, digest)
if blob is None:
raise BlobUnknown()
@@ -78,15 +80,15 @@ def download_blob(namespace_name, repo_name, digest):
headers = {'Docker-Content-Digest': digest}
# If our storage supports range requests, let the client know.
- if storage.get_supports_resumable_downloads(blob.locations):
+ if storage.get_supports_resumable_downloads(blob.placements):
headers['Accept-Ranges'] = 'bytes'
- # Find the storage path for the blob.
- path = model.get_blob_path(blob)
+ metric_queue.pull_byte_count.Inc(blob.compressed_size, labelvalues=['v2'])
# Short-circuit by redirecting if the storage supports it.
+ path = blob.storage_path
logger.debug('Looking up the direct download URL for path: %s', path)
- direct_download_url = storage.get_direct_download_url(blob.locations, path)
+ direct_download_url = storage.get_direct_download_url(blob.placements, path, get_request_ip())
if direct_download_url:
logger.debug('Returning direct download URL')
resp = redirect(direct_download_url)
@@ -98,69 +100,135 @@ def download_blob(namespace_name, repo_name, digest):
with database.CloseForLongOperation(app.config):
# Stream the response to the client.
return Response(
- storage.stream_read(blob.locations, path),
+ storage.stream_read(blob.placements, path),
headers=headers.update({
- 'Content-Length': blob.size,
+ 'Content-Length': blob.compressed_size,
'Content-Type': BLOB_CONTENT_TYPE,
}),
)
+def _try_to_mount_blob(repository_ref, mount_blob_digest):
+ """ Attempts to mount a blob requested by the user from another repository. """
+ logger.debug('Got mount request for blob `%s` into `%s`', mount_blob_digest, repository_ref)
+ from_repo = request.args.get('from', None)
+ if from_repo is None:
+ raise InvalidRequest(message='Missing `from` repository argument')
+
+ # Ensure the user has access to the repository.
+ logger.debug('Got mount request for blob `%s` under repository `%s` into `%s`',
+ mount_blob_digest, from_repo, repository_ref)
+ from_namespace, from_repo_name = parse_namespace_repository(from_repo,
+ app.config['LIBRARY_NAMESPACE'],
+ include_tag=False)
+
+ from_repository_ref = registry_model.lookup_repository(from_namespace, from_repo_name)
+ if from_repository_ref is None:
+ logger.debug('Could not find from repo: `%s/%s`', from_namespace, from_repo_name)
+ return None
+
+ # First check permission.
+ read_permission = ReadRepositoryPermission(from_namespace, from_repo_name).can()
+ if not read_permission:
+ # If no direct permission, check if the repostory is public.
+ if not from_repository_ref.is_public:
+ logger.debug('No permission to mount blob `%s` under repository `%s` into `%s`',
+ mount_blob_digest, from_repo, repository_ref)
+ return None
+
+ # Lookup if the mount blob's digest exists in the repository.
+ mount_blob = registry_model.get_cached_repo_blob(model_cache, from_namespace, from_repo_name,
+ mount_blob_digest)
+ if mount_blob is None:
+ logger.debug('Blob `%s` under repository `%s` not found', mount_blob_digest, from_repo)
+ return None
+
+ logger.debug('Mounting blob `%s` under repository `%s` into `%s`', mount_blob_digest,
+ from_repo, repository_ref)
+
+ # Mount the blob into the current repository and return that we've completed the operation.
+ expiration_sec = app.config['PUSH_TEMP_TAG_EXPIRATION_SEC']
+ mounted = registry_model.mount_blob_into_repository(mount_blob, repository_ref, expiration_sec)
+ if not mounted:
+ logger.debug('Could not mount blob `%s` under repository `%s` not found', mount_blob_digest,
+ from_repo)
+ return
+
+ # Return the response for the blob indicating that it was mounted, and including its content
+ # digest.
+ logger.debug('Mounted blob `%s` under repository `%s` into `%s`', mount_blob_digest,
+ from_repo, repository_ref)
+
+ namespace_name = repository_ref.namespace_name
+ repo_name = repository_ref.name
+
+ return Response(
+ status=201,
+ headers={
+ 'Docker-Content-Digest': mount_blob_digest,
+ 'Location':
+ get_app_url() + url_for('v2.download_blob',
+ repository='%s/%s' % (namespace_name, repo_name),
+ digest=mount_blob_digest),
+ },
+ )
+
@v2_bp.route('//blobs/uploads/', methods=['POST'])
@parse_repository_name()
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def start_blob_upload(namespace_name, repo_name):
- # Begin the blob upload process in the database and storage.
- location_name = storage.preferred_locations[0]
- new_upload_uuid, upload_metadata = storage.initiate_chunked_upload(location_name)
- repository_exists = model.create_blob_upload(namespace_name, repo_name, new_upload_uuid,
- location_name, upload_metadata)
- if not repository_exists:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
raise NameUnknown()
+ # Check for mounting of a blob from another repository.
+ mount_blob_digest = request.args.get('mount', None)
+ if mount_blob_digest is not None:
+ response = _try_to_mount_blob(repository_ref, mount_blob_digest)
+ if response is not None:
+ return response
+
+ # Begin the blob upload process.
+ blob_uploader = create_blob_upload(repository_ref, storage, _upload_settings())
+ if blob_uploader is None:
+ logger.debug('Could not create a blob upload for `%s/%s`', namespace_name, repo_name)
+ raise InvalidRequest(message='Unable to start blob upload for unknown repository')
+
+ # Check if the blob will be uploaded now or in followup calls. If the `digest` is given, then
+ # the upload will occur as a monolithic chunk in this call. Otherwise, we return a redirect
+ # for the client to upload the chunks as distinct operations.
digest = request.args.get('digest', None)
if digest is None:
# Short-circuit because the user will send the blob data in another request.
return Response(
status=202,
headers={
- 'Docker-Upload-UUID': new_upload_uuid,
+ 'Docker-Upload-UUID': blob_uploader.blob_upload_id,
'Range': _render_range(0),
- 'Location': get_app_url() + url_for('v2.upload_chunk',
- repository='%s/%s' % (namespace_name, repo_name),
- upload_uuid=new_upload_uuid)
+ 'Location':
+ get_app_url() + url_for('v2.upload_chunk',
+ repository='%s/%s' % (namespace_name, repo_name),
+ upload_uuid=blob_uploader.blob_upload_id)
},
)
- # The user plans to send us the entire body right now.
- # Find the upload.
- blob_upload = model.blob_upload_by_uuid(namespace_name, repo_name, new_upload_uuid)
- if blob_upload is None:
- raise BlobUploadUnknown()
-
- # Upload the chunk to storage while calculating some metadata and updating
- # the upload state.
- updated_blob_upload = _upload_chunk(blob_upload, request.headers.get('range'))
- if updated_blob_upload is None:
- _abort_range_not_satisfiable(blob_upload.byte_count, new_upload_uuid)
-
- # Save the upload state to the database.
- model.update_blob_upload(updated_blob_upload)
-
- # Finalize the upload process in the database and storage.
- _finish_upload(namespace_name, repo_name, updated_blob_upload, digest)
+ # Upload the data sent and commit it to a blob.
+ with complete_when_uploaded(blob_uploader):
+ _upload_chunk(blob_uploader, digest)
# Write the response to the client.
return Response(
status=201,
headers={
'Docker-Content-Digest': digest,
- 'Location': get_app_url() + url_for('v2.download_blob',
- repository='%s/%s' % (namespace_name, repo_name),
- digest=digest),
+ 'Location':
+ get_app_url() + url_for('v2.download_blob',
+ repository='%s/%s' % (namespace_name, repo_name),
+ digest=digest),
},
)
@@ -171,15 +239,19 @@ def start_blob_upload(namespace_name, repo_name):
@require_repo_write
@anon_protect
def fetch_existing_upload(namespace_name, repo_name, upload_uuid):
- blob_upload = model.blob_upload_by_uuid(namespace_name, repo_name, upload_uuid)
- if blob_upload is None:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ uploader = retrieve_blob_upload_manager(repository_ref, upload_uuid, storage, _upload_settings())
+ if uploader is None:
raise BlobUploadUnknown()
return Response(
status=204,
headers={
'Docker-Upload-UUID': upload_uuid,
- 'Range': _render_range(blob_upload.byte_count+1), # byte ranges are exclusive
+ 'Range': _render_range(uploader.blob_upload.byte_count + 1), # byte ranges are exclusive
},
)
@@ -189,27 +261,25 @@ def fetch_existing_upload(namespace_name, repo_name, upload_uuid):
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def upload_chunk(namespace_name, repo_name, upload_uuid):
- # Find the upload.
- blob_upload = model.blob_upload_by_uuid(namespace_name, repo_name, upload_uuid)
- if blob_upload is None:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ uploader = retrieve_blob_upload_manager(repository_ref, upload_uuid, storage, _upload_settings())
+ if uploader is None:
raise BlobUploadUnknown()
- # Upload the chunk to storage while calculating some metadata and updating
- # the upload state.
- updated_blob_upload = _upload_chunk(blob_upload, request.headers.get('range'))
- if updated_blob_upload is None:
- _abort_range_not_satisfiable(blob_upload.byte_count, upload_uuid)
-
- # Save the upload state to the database.
- model.update_blob_upload(updated_blob_upload)
+ # Upload the chunk for the blob.
+ _upload_chunk(uploader)
# Write the response to the client.
return Response(
status=204,
headers={
'Location': _current_request_url(),
- 'Range': _render_range(updated_blob_upload.byte_count, with_bytes_prefix=False),
+ 'Range': _render_range(uploader.blob_upload.byte_count, with_bytes_prefix=False),
'Docker-Upload-UUID': upload_uuid,
},
)
@@ -220,6 +290,7 @@ def upload_chunk(namespace_name, repo_name, upload_uuid):
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
# Ensure the digest is present before proceeding.
digest = request.args.get('digest', None)
@@ -227,28 +298,26 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
raise BlobUploadInvalid(detail={'reason': 'Missing digest arg on monolithic upload'})
# Find the upload.
- blob_upload = model.blob_upload_by_uuid(namespace_name, repo_name, upload_uuid)
- if blob_upload is None:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ uploader = retrieve_blob_upload_manager(repository_ref, upload_uuid, storage, _upload_settings())
+ if uploader is None:
raise BlobUploadUnknown()
- # Upload the chunk to storage while calculating some metadata and updating
- # the upload state.
- updated_blob_upload = _upload_chunk(blob_upload, request.headers.get('range'))
- if updated_blob_upload is None:
- _abort_range_not_satisfiable(blob_upload.byte_count, upload_uuid)
-
- # Finalize the upload process in the database and storage.
- _finish_upload(namespace_name, repo_name, updated_blob_upload, digest)
+ # Upload the chunk for the blob and commit it once complete.
+ with complete_when_uploaded(uploader):
+ _upload_chunk(uploader, digest)
# Write the response to the client.
- return Response(
- status=201,
- headers={
- 'Docker-Content-Digest': digest,
- 'Location': get_app_url() + url_for('v2.download_blob',
- repository='%s/%s' % (namespace_name, repo_name),
- digest=digest),
- }
+ return Response(status=201, headers={
+ 'Docker-Content-Digest': digest,
+ 'Location':
+ get_app_url() + url_for('v2.download_blob',
+ repository='%s/%s' % (namespace_name, repo_name),
+ digest=digest),
+ },
)
@@ -257,17 +326,17 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def cancel_upload(namespace_name, repo_name, upload_uuid):
- blob_upload = model.blob_upload_by_uuid(namespace_name, repo_name, upload_uuid)
- if blob_upload is None:
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ uploader = retrieve_blob_upload_manager(repository_ref, upload_uuid, storage, _upload_settings())
+ if uploader is None:
raise BlobUploadUnknown()
- # We delete the record for the upload first, since if the partial upload in
- # storage fails to delete, it doesn't break anything.
- model.delete_blob_upload(namespace_name, repo_name, upload_uuid)
- storage.cancel_chunked_upload({blob_upload.location_name}, blob_upload.uuid,
- blob_upload.storage_metadata)
-
+ uploader.cancel_upload()
return Response(status=204)
@@ -276,6 +345,7 @@ def cancel_upload(namespace_name, repo_name, upload_uuid):
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def delete_digest(namespace_name, repo_name, upload_uuid):
# We do not support deleting arbitrary digests, as they break repo images.
raise Unsupported()
@@ -297,11 +367,13 @@ def _abort_range_not_satisfiable(valid_end, upload_uuid):
Writes a failure response for scenarios where the registry cannot function
with the provided range.
- TODO(jzelinskie): Unify this with the V2RegistryException class.
+ TODO: Unify this with the V2RegistryException class.
"""
- flask_abort(Response(status=416, headers={'Location': _current_request_url(),
- 'Range': '0-{0}'.format(valid_end),
- 'Docker-Upload-UUID': upload_uuid}))
+ flask_abort(
+ Response(status=416, headers={
+ 'Location': _current_request_url(),
+ 'Range': '0-{0}'.format(valid_end),
+ 'Docker-Upload-UUID': upload_uuid}))
def _parse_range_header(range_header_text):
@@ -340,173 +412,39 @@ def _start_offset_and_length(range_header):
return start_offset, length
-def _upload_chunk(blob_upload, range_header):
+def _upload_settings():
+ """ Returns the settings for instantiating a blob upload manager. """
+ expiration_sec = app.config['PUSH_TEMP_TAG_EXPIRATION_SEC']
+ settings = BlobUploadSettings(maximum_blob_size=app.config['MAXIMUM_LAYER_SIZE'],
+ bittorrent_piece_size=app.config['BITTORRENT_PIECE_SIZE'],
+ committed_blob_expiration=expiration_sec)
+ return settings
+
+
+def _upload_chunk(blob_uploader, commit_digest=None):
+ """ Performs uploading of a chunk of data in the current request's stream, via the blob uploader
+ given. If commit_digest is specified, the upload is committed to a blob once the stream's
+ data has been read and stored.
"""
- Calculates metadata while uploading a chunk to storage.
+ start_offset, length = _start_offset_and_length(request.headers.get('range'))
+ if None in {start_offset, length}:
+ raise InvalidRequest(message='Invalid range header')
- Returns a BlobUpload object or None if there was a failure.
- """
- # Get the offset and length of the current chunk.
- start_offset, length = _start_offset_and_length(range_header)
- if blob_upload is None or None in {start_offset, length}:
- logger.error('Invalid arguments provided to _upload_chunk')
- return None
+ input_fp = get_input_stream(request)
- if start_offset > 0 and start_offset > blob_upload.byte_count:
- logger.error('start_offset provided to _upload_chunk greater than blob.upload.byte_count')
- return None
+ try:
+ # Upload the data received.
+ blob_uploader.upload_chunk(app.config, input_fp, start_offset, length, metric_queue)
- location_set = {blob_upload.location_name}
-
- upload_error = None
- with database.CloseForLongOperation(app.config):
- input_fp = get_input_stream(request)
-
- if start_offset > 0 and start_offset < blob_upload.byte_count:
- # Skip the bytes which were received on a previous push, which are already stored and
- # included in the sha calculation
- overlap_size = blob_upload.byte_count - start_offset
- input_fp = StreamSlice(input_fp, overlap_size)
-
- # Update our upload bounds to reflect the skipped portion of the overlap
- start_offset = blob_upload.byte_count
- length = max(length - overlap_size, 0)
-
- # We use this to escape early in case we have already processed all of the bytes the user
- # wants to upload
- if length == 0:
- return blob_upload
-
- input_fp = wrap_with_handler(input_fp, blob_upload.sha_state.update)
-
- # Add a hasher for calculating SHA1s for torrents if this is the first chunk and/or we have
- # already calculated hash data for the previous chunk(s).
- piece_hasher = None
- if blob_upload.chunk_count == 0 or blob_upload.piece_sha_state:
- initial_sha1_value = blob_upload.piece_sha_state or resumablehashlib.sha1()
- initial_sha1_pieces_value = blob_upload.piece_hashes or ''
-
- piece_hasher = PieceHasher(app.config['BITTORRENT_PIECE_SIZE'], start_offset,
- initial_sha1_pieces_value, initial_sha1_value)
-
- input_fp = wrap_with_handler(input_fp, piece_hasher.update)
-
- # If this is the first chunk and we're starting at the 0 offset, add a handler to gunzip the
- # stream so we can determine the uncompressed size. We'll throw out this data if another chunk
- # comes in, but in the common case the docker client only sends one chunk.
- size_info = None
- if start_offset == 0 and blob_upload.chunk_count == 0:
- size_info, fn = calculate_size_handler()
- input_fp = wrap_with_handler(input_fp, fn)
-
- start_time = time.time()
- length_written, new_metadata, upload_error = storage.stream_upload_chunk(
- location_set,
- blob_upload.uuid,
- start_offset,
- length,
- input_fp,
- blob_upload.storage_metadata,
- content_type=BLOB_CONTENT_TYPE,
- )
-
- if upload_error is not None:
- logger.error('storage.stream_upload_chunk returned error %s', upload_error)
- return None
-
- # Update the chunk upload time metric.
- metric_queue.chunk_upload_time.Observe(time.time() - start_time,
- labelvalues=[length_written, list(location_set)[0]])
-
- # If we determined an uncompressed size and this is the first chunk, add it to the blob.
- # Otherwise, we clear the size from the blob as it was uploaded in multiple chunks.
- if size_info is not None and blob_upload.chunk_count == 0 and size_info.is_valid:
- blob_upload.uncompressed_byte_count = size_info.uncompressed_size
- elif length_written > 0:
- # Otherwise, if we wrote some bytes and the above conditions were not met, then we don't
- # know the uncompressed size.
- blob_upload.uncompressed_byte_count = None
-
- if piece_hasher is not None:
- blob_upload.piece_hashes = piece_hasher.piece_hashes
- blob_upload.piece_sha_state = piece_hasher.hash_fragment
-
- blob_upload.storage_metadata = new_metadata
- blob_upload.byte_count += length_written
- blob_upload.chunk_count += 1
-
- return blob_upload
-
-
-def _validate_digest(blob_upload, expected_digest):
- """
- Verifies that the digest's SHA matches that of the uploaded data.
- """
- computed_digest = digest_tools.sha256_digest_from_hashlib(blob_upload.sha_state)
- if not digest_tools.digests_equal(computed_digest, expected_digest):
- logger.error('Digest mismatch for upload %s: Expected digest %s, found digest %s',
- blob_upload.uuid, expected_digest, computed_digest)
- raise BlobUploadInvalid(detail={'reason': 'Digest mismatch on uploaded blob'})
-
-
-def _finalize_blob_storage(blob_upload, expected_digest):
- """
- When an upload is successful, this ends the uploading process from the
- storage's perspective.
-
- Returns True if the blob already existed.
- """
- final_blob_location = digest_tools.content_path(expected_digest)
-
- # Move the storage into place, or if this was a re-upload, cancel it
- with database.CloseForLongOperation(app.config):
- already_existed = storage.exists({blob_upload.location_name}, final_blob_location)
- if already_existed:
- # It already existed, clean up our upload which served as proof that the
- # uploader had the blob.
- storage.cancel_chunked_upload({blob_upload.location_name}, blob_upload.uuid,
- blob_upload.storage_metadata)
-
- else:
- # We were the first ones to upload this image (at least to this location)
- # Let's copy it into place
- storage.complete_chunked_upload({blob_upload.location_name}, blob_upload.uuid,
- final_blob_location, blob_upload.storage_metadata)
- return already_existed
-
-
-def _finalize_blob_database(namespace_name, repo_name, blob_upload, digest, already_existed):
- """
- When an upload is successful, this ends the uploading process from the
- database's perspective.
- """
- # Create the blob and temporarily tag it.
- blob_storage = model.create_blob_and_temp_tag(
- namespace_name,
- repo_name,
- digest,
- blob_upload,
- app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],
- )
-
- # If it doesn't already exist, create the BitTorrent pieces for the blob.
- if blob_upload.piece_sha_state is not None and not already_existed:
- piece_bytes = blob_upload.piece_hashes + blob_upload.piece_sha_state.digest()
- model.save_bittorrent_pieces(blob_storage, app.config['BITTORRENT_PIECE_SIZE'], piece_bytes)
-
- # Delete the blob upload.
- model.delete_blob_upload(namespace_name, repo_name, blob_upload.uuid)
-
-
-def _finish_upload(namespace_name, repo_name, blob_upload, digest):
- """
- When an upload is successful, this ends the uploading process.
- """
- _validate_digest(blob_upload, digest)
- _finalize_blob_database(
- namespace_name,
- repo_name,
- blob_upload,
- digest,
- _finalize_blob_storage(blob_upload, digest),
- )
+ if commit_digest is not None:
+ # Commit the upload to a blob.
+ return blob_uploader.commit_to_blob(app.config, commit_digest)
+ except BlobTooLargeException as ble:
+ raise LayerTooLarge(uploaded=ble.uploaded, max_allowed=ble.max_allowed)
+ except BlobRangeMismatchException:
+ logger.exception('Exception when uploading blob to %s', blob_uploader.blob_upload_id)
+ _abort_range_not_satisfiable(blob_uploader.blob_upload.byte_count,
+ blob_uploader.blob_upload_id)
+ except BlobUploadException:
+ logger.exception('Exception when uploading blob to %s', blob_uploader.blob_upload_id)
+ raise BlobUploadInvalid()
diff --git a/endpoints/v2/catalog.py b/endpoints/v2/catalog.py
index 8ae243460..240ab6ac5 100644
--- a/endpoints/v2/catalog.py
+++ b/endpoints/v2/catalog.py
@@ -1,25 +1,55 @@
+from collections import namedtuple
+
from flask import jsonify
-from auth.registry_jwt_auth import process_registry_jwt_auth, get_granted_entity
+import features
+
+from app import model_cache
+from auth.auth_context import get_authenticated_user, get_authenticated_context
+from auth.registry_jwt_auth import process_registry_jwt_auth
+from data import model
+from data.cache import cache_key
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp, paginate
-from data.interfaces.v2 import pre_oci_model as model
+
+
+class Repository(namedtuple('Repository', ['id', 'namespace_name', 'name'])):
+ pass
+
@v2_bp.route('/_catalog', methods=['GET'])
@process_registry_jwt_auth()
@anon_protect
@paginate()
-def catalog_search(limit, offset, pagination_callback):
- username = None
- entity = get_granted_entity()
- if entity:
- username = entity.user.username
+def catalog_search(start_id, limit, pagination_callback):
+ def _load_catalog():
+ include_public = bool(features.PUBLIC_CATALOG)
+ if not include_public and not get_authenticated_user():
+ return []
+
+ username = get_authenticated_user().username if get_authenticated_user() else None
+ if username and not get_authenticated_user().enabled:
+ return []
+
+ query = model.repository.get_visible_repositories(username,
+ kind_filter='image',
+ include_public=include_public,
+ start_id=start_id,
+ limit=limit + 1)
+ # NOTE: The repository ID is in `rid` (not `id`) here, as per the requirements of
+ # the `get_visible_repositories` call.
+ return [Repository(repo.rid, repo.namespace_user.username, repo.name)._asdict()
+ for repo in query]
+
+ context_key = get_authenticated_context().unique_key if get_authenticated_context() else None
+ catalog_cache_key = cache_key.for_catalog_page(context_key, start_id, limit)
+ visible_repositories = [Repository(**repo_dict) for repo_dict
+ in model_cache.retrieve(catalog_cache_key, _load_catalog)]
- visible_repositories = model.get_visible_repositories(username, limit+1, offset)
response = jsonify({
'repositories': ['%s/%s' % (repo.namespace_name, repo.name)
for repo in visible_repositories][0:limit],
})
- pagination_callback(len(visible_repositories), response)
+ pagination_callback(visible_repositories, response)
return response
diff --git a/endpoints/v2/errors.py b/endpoints/v2/errors.py
index 0f8a5284e..1479984db 100644
--- a/endpoints/v2/errors.py
+++ b/endpoints/v2/errors.py
@@ -1,131 +1,168 @@
+import bitmath
+
+
class V2RegistryException(Exception):
- def __init__(self, error_code_str, message, detail, http_status_code=400,
- repository=None, scopes=None):
+ def __init__(self, error_code_str, message, detail, http_status_code=400, repository=None,
+ scopes=None, is_read_only=False):
super(V2RegistryException, self).__init__(message)
self.http_status_code = http_status_code
self.repository = repository
self.scopes = scopes
+ self.is_read_only = is_read_only
self._error_code_str = error_code_str
self._detail = detail
def as_dict(self):
- return {
+ error_dict = {
'code': self._error_code_str,
- 'message': self.message,
+ 'message': str(self),
'detail': self._detail if self._detail is not None else {},
}
+ if self.is_read_only:
+ error_dict['is_readonly'] = True
+
+ return error_dict
+
class BlobUnknown(V2RegistryException):
def __init__(self, detail=None):
- super(BlobUnknown, self).__init__('BLOB_UNKNOWN',
- 'blob unknown to registry',
- detail,
- 404)
+ super(BlobUnknown, self).__init__('BLOB_UNKNOWN', 'blob unknown to registry', detail, 404)
class BlobUploadInvalid(V2RegistryException):
def __init__(self, detail=None):
- super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID',
- 'blob upload invalid',
- detail)
+ super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID', 'blob upload invalid', detail)
class BlobUploadUnknown(V2RegistryException):
def __init__(self, detail=None):
super(BlobUploadUnknown, self).__init__('BLOB_UPLOAD_UNKNOWN',
- 'blob upload unknown to registry',
- detail,
- 404)
+ 'blob upload unknown to registry', detail, 404)
class DigestInvalid(V2RegistryException):
def __init__(self, detail=None):
super(DigestInvalid, self).__init__('DIGEST_INVALID',
- 'provided digest did not match uploaded content',
- detail)
+ 'provided digest did not match uploaded content', detail)
class ManifestBlobUnknown(V2RegistryException):
def __init__(self, detail=None):
super(ManifestBlobUnknown, self).__init__('MANIFEST_BLOB_UNKNOWN',
- 'manifest blob unknown to registry',
- detail)
+ 'manifest blob unknown to registry', detail)
class ManifestInvalid(V2RegistryException):
def __init__(self, detail=None, http_status_code=400):
- super(ManifestInvalid, self).__init__('MANIFEST_INVALID',
- 'manifest invalid',
- detail,
+ super(ManifestInvalid, self).__init__('MANIFEST_INVALID', 'manifest invalid', detail,
http_status_code)
class ManifestUnknown(V2RegistryException):
def __init__(self, detail=None):
- super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN',
- 'manifest unknown',
- detail,
- 404)
+ super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN', 'manifest unknown', detail, 404)
+
+
+class TagExpired(V2RegistryException):
+ def __init__(self, message=None, detail=None):
+ super(TagExpired, self).__init__('TAG_EXPIRED',
+ message or 'Tag has expired',
+ detail,
+ 404)
class ManifestUnverified(V2RegistryException):
def __init__(self, detail=None):
super(ManifestUnverified, self).__init__('MANIFEST_UNVERIFIED',
- 'manifest failed signature verification',
- detail)
+ 'manifest failed signature verification', detail)
class NameInvalid(V2RegistryException):
- def __init__(self, detail=None):
- super(NameInvalid, self).__init__('NAME_INVALID',
- 'invalid repository name',
- detail)
+ def __init__(self, detail=None, message=None):
+ super(NameInvalid, self).__init__('NAME_INVALID', message or 'invalid repository name', detail)
class NameUnknown(V2RegistryException):
def __init__(self, detail=None):
- super(NameUnknown, self).__init__('NAME_UNKNOWN',
- 'repository name not known to registry',
- detail,
- 404)
+ super(NameUnknown, self).__init__('NAME_UNKNOWN', 'repository name not known to registry',
+ detail, 404)
class SizeInvalid(V2RegistryException):
def __init__(self, detail=None):
super(SizeInvalid, self).__init__('SIZE_INVALID',
- 'provided length did not match content length',
- detail)
+ 'provided length did not match content length', detail)
class TagAlreadyExists(V2RegistryException):
def __init__(self, detail=None):
- super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS',
- 'tag was already pushed',
- detail,
+ super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS', 'tag was already pushed', detail,
409)
+
class TagInvalid(V2RegistryException):
def __init__(self, detail=None):
- super(TagInvalid, self).__init__('TAG_INVALID',
- 'manifest tag did not match URI',
- detail)
+ super(TagInvalid, self).__init__('TAG_INVALID', 'manifest tag did not match URI', detail)
+
+
+class LayerTooLarge(V2RegistryException):
+ def __init__(self, uploaded=None, max_allowed=None):
+ detail = {}
+ message = 'Uploaded blob is larger than allowed by this registry'
+
+ if uploaded is not None and max_allowed is not None:
+ detail = {
+ 'reason': '%s is greater than maximum allowed size %s' % (uploaded, max_allowed),
+ 'max_allowed': max_allowed,
+ 'uploaded': uploaded,}
+
+ up_str = bitmath.Byte(uploaded).best_prefix().format("{value:.2f} {unit}")
+ max_str = bitmath.Byte(max_allowed).best_prefix().format("{value:.2f} {unit}")
+ message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str,
+ max_str)
class Unauthorized(V2RegistryException):
def __init__(self, detail=None, repository=None, scopes=None):
- super(Unauthorized, self).__init__('UNAUTHORIZED',
- 'access to the requested resource is not authorized',
- detail,
- 401,
- repository=repository,
- scopes=scopes)
+ super(Unauthorized,
+ self).__init__('UNAUTHORIZED', 'access to the requested resource is not authorized',
+ detail, 401, repository=repository, scopes=scopes)
class Unsupported(V2RegistryException):
+ def __init__(self, detail=None, message=None):
+ super(Unsupported, self).__init__('UNSUPPORTED', message or 'The operation is unsupported.',
+ detail, 405)
+
+
+class InvalidLogin(V2RegistryException):
+ def __init__(self, message=None):
+ super(InvalidLogin, self).__init__('UNAUTHORIZED', message or
+ 'Specified credentials are invalid', {}, 401)
+
+
+class InvalidRequest(V2RegistryException):
+ def __init__(self, message=None):
+ super(InvalidRequest, self).__init__('INVALID_REQUEST', message or 'Invalid request', {}, 400)
+
+
+class NamespaceDisabled(V2RegistryException):
+ def __init__(self, message=None):
+ message = message or 'This namespace is disabled. Please contact your system administrator.'
+ super(NamespaceDisabled, self).__init__('DENIED', message, {}, 405)
+
+
+class BlobDownloadGeoBlocked(V2RegistryException):
def __init__(self, detail=None):
- super(Unsupported, self).__init__('UNSUPPORTED',
- 'The operation is unsupported.',
- detail,
- 405)
+ message = ('The region from which you are pulling has been geo-ip blocked. ' +
+ 'Please contact the namespace owner.')
+ super(BlobDownloadGeoBlocked, self).__init__('DENIED', message, detail, 403)
+
+
+class ReadOnlyMode(V2RegistryException):
+ def __init__(self, detail=None):
+ message = ('System is currently read-only. Pulls will succeed but all write operations ' +
+ 'are currently suspended.')
+ super(ReadOnlyMode, self).__init__('DENIED', message, detail, 405, is_read_only=True)
diff --git a/endpoints/v2/manifest.py b/endpoints/v2/manifest.py
index a4155add2..b71b3bb3f 100644
--- a/endpoints/v2/manifest.py
+++ b/endpoints/v2/manifest.py
@@ -6,58 +6,74 @@ from flask import request, url_for, Response
import features
-from app import docker_v2_signing_key, app, metric_queue
+from app import app, metric_queue, storage
from auth.registry_jwt_auth import process_registry_jwt_auth
-from data.interfaces.v2 import pre_oci_model as model, Label
from digest import digest_tools
-from endpoints.common import parse_repository_name
-from endpoints.decorators import anon_protect
+from data.registry_model import registry_model
+from data.model.oci.manifest import CreateManifestException
+from endpoints.decorators import anon_protect, parse_repository_name, check_readonly
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
-from endpoints.v2.errors import (BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid,
- NameInvalid)
-from endpoints.trackhelper import track_and_log
-from endpoints.notificationhelper import spawn_notification
+from endpoints.v2.errors import (ManifestInvalid, ManifestUnknown, NameInvalid, TagExpired,
+ NameUnknown)
from image.docker import ManifestException
-from image.docker.schema1 import DockerSchema1Manifest, DockerSchema1ManifestBuilder
-from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES
+from image.docker.schema1 import DOCKER_SCHEMA1_MANIFEST_CONTENT_TYPE, DOCKER_SCHEMA1_CONTENT_TYPES
+from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES, OCI_CONTENT_TYPES
+from image.docker.schemas import parse_manifest_from_bytes
+from notifications import spawn_notification
+from util.audit import track_and_log
+from util.bytes import Bytes
from util.names import VALID_TAG_PATTERN
from util.registry.replication import queue_replication_batch
-from util.validation import is_json
logger = logging.getLogger(__name__)
-
BASE_MANIFEST_ROUTE = '//manifests/'
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
MANIFEST_TAGNAME_ROUTE = BASE_MANIFEST_ROUTE.format(VALID_TAG_PATTERN)
+
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET'])
@parse_repository_name()
@process_registry_jwt_auth(scopes=['pull'])
@require_repo_read
@anon_protect
def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
- manifest = model.get_manifest_by_tag(namespace_name, repo_name, manifest_ref)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ tag = registry_model.get_repo_tag(repository_ref, manifest_ref)
+ if tag is None:
+ if registry_model.has_expired_tag(repository_ref, manifest_ref):
+ logger.debug('Found expired tag %s for repository %s/%s', manifest_ref, namespace_name,
+ repo_name)
+ msg = 'Tag %s was deleted or has expired. To pull, revive via time machine' % manifest_ref
+ raise TagExpired(msg)
+
+ raise ManifestUnknown()
+
+ manifest = registry_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
if manifest is None:
- has_tag = model.has_active_tag(namespace_name, repo_name, manifest_ref)
- if not has_tag:
- raise ManifestUnknown()
+ # Something went wrong.
+ raise ManifestInvalid()
- manifest = _generate_and_store_manifest(namespace_name, repo_name, manifest_ref)
- if manifest is None:
- raise ManifestUnknown()
+ manifest_bytes, manifest_digest, manifest_media_type = _rewrite_schema_if_necessary(
+ namespace_name, repo_name, manifest_ref, manifest)
+ if manifest_bytes is None:
+ raise ManifestUnknown()
- repo = model.get_repository(namespace_name, repo_name)
- if repo is not None:
- track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01,
- tag=manifest_ref)
- metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
+ track_and_log('pull_repo', repository_ref, analytics_name='pull_repo_100x', analytics_sample=0.01,
+ tag=manifest_ref)
+ metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
return Response(
- manifest.json,
+ manifest_bytes.as_unicode(),
status=200,
- headers={'Content-Type': manifest.media_type, 'Docker-Content-Digest': manifest.digest},
+ headers={
+ 'Content-Type': manifest_media_type,
+ 'Docker-Content-Digest': manifest_digest,
+ },
)
@@ -67,163 +83,153 @@ def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
@require_repo_read
@anon_protect
def fetch_manifest_by_digest(namespace_name, repo_name, manifest_ref):
- manifest = model.get_manifest_by_digest(namespace_name, repo_name, manifest_ref)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ manifest = registry_model.lookup_manifest_by_digest(repository_ref, manifest_ref)
if manifest is None:
- # Without a tag name to reference, we can't make an attempt to generate the manifest
raise ManifestUnknown()
- repo = model.get_repository(namespace_name, repo_name)
- if repo is not None:
- track_and_log('pull_repo', repo, manifest_digest=manifest_ref)
- metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
+ manifest_bytes, manifest_digest, manifest_media_type = _rewrite_schema_if_necessary(
+ namespace_name, repo_name, '$digest', manifest)
+ if manifest_digest is None:
+ raise ManifestUnknown()
- return Response(manifest.json, status=200, headers={'Content-Type': manifest.media_type,
- 'Docker-Content-Digest': manifest.digest})
+ track_and_log('pull_repo', repository_ref, manifest_digest=manifest_ref)
+ metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
+
+ return Response(manifest_bytes.as_unicode(), status=200, headers={
+ 'Content-Type': manifest_media_type,
+ 'Docker-Content-Digest': manifest_digest,
+ })
+
+
+def _rewrite_schema_if_necessary(namespace_name, repo_name, tag_name, manifest):
+ # As per the Docker protocol, if the manifest is not schema version 1 and the manifest's
+ # media type is not in the Accept header, we return a schema 1 version of the manifest for
+ # the amd64+linux platform, if any, or None if none.
+ # See: https://docs.docker.com/registry/spec/manifest-v2-2
+ mimetypes = [mimetype for mimetype, _ in request.accept_mimetypes]
+ if manifest.media_type in mimetypes:
+ return manifest.internal_manifest_bytes, manifest.digest, manifest.media_type
+
+ # Short-circuit check: If the mimetypes is empty or just `application/json`, verify we have
+ # a schema 1 manifest and return it.
+ if not mimetypes or mimetypes == ['application/json']:
+ if manifest.media_type in DOCKER_SCHEMA1_CONTENT_TYPES:
+ return manifest.internal_manifest_bytes, manifest.digest, manifest.media_type
+
+ logger.debug('Manifest `%s` not compatible against %s; checking for conversion', manifest.digest,
+ request.accept_mimetypes)
+ converted = registry_model.convert_manifest(manifest, namespace_name, repo_name, tag_name,
+ mimetypes, storage)
+ if converted is not None:
+ return converted.bytes, converted.digest, converted.media_type
+
+ # For back-compat, we always default to schema 1 if the manifest could not be converted.
+ schema1 = registry_model.get_schema1_parsed_manifest(manifest, namespace_name, repo_name,
+ tag_name, storage)
+ if schema1 is None:
+ return None, None, None
+
+ return schema1.bytes, schema1.digest, schema1.media_type
def _reject_manifest2_schema2(func):
@wraps(func)
def wrapped(*args, **kwargs):
- if request.content_type in DOCKER_SCHEMA2_CONTENT_TYPES:
+ namespace_name = kwargs['namespace_name']
+ if registry_model.supports_schema2(namespace_name):
+ return func(*args, **kwargs)
+
+ if _doesnt_accept_schema_v1() or \
+ request.content_type in DOCKER_SCHEMA2_CONTENT_TYPES | OCI_CONTENT_TYPES:
raise ManifestInvalid(detail={'message': 'manifest schema version not supported'},
http_status_code=415)
return func(*args, **kwargs)
+
return wrapped
+def _doesnt_accept_schema_v1():
+ # If the client doesn't specify anything, still give them Schema v1.
+ return len(request.accept_mimetypes) != 0 and \
+ DOCKER_SCHEMA1_MANIFEST_CONTENT_TYPE not in request.accept_mimetypes
+
+
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['PUT'])
-@_reject_manifest2_schema2
@parse_repository_name()
+@_reject_manifest2_schema2
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def write_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
- try:
- manifest = DockerSchema1Manifest(request.data)
- except ManifestException as me:
- logger.exception("failed to parse manifest when writing by tagname")
- raise ManifestInvalid(detail={'message': 'failed to parse manifest: %s' % me.message})
-
- if manifest.tag != manifest_ref:
- raise TagInvalid()
-
- return _write_manifest_and_log(namespace_name, repo_name, manifest)
+ parsed = _parse_manifest()
+ return _write_manifest_and_log(namespace_name, repo_name, manifest_ref, parsed)
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['PUT'])
-@_reject_manifest2_schema2
@parse_repository_name()
+@_reject_manifest2_schema2
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def write_manifest_by_digest(namespace_name, repo_name, manifest_ref):
- try:
- manifest = DockerSchema1Manifest(request.data)
- except ManifestException as me:
- logger.exception("failed to parse manifest when writing by digest")
- raise ManifestInvalid(detail={'message': 'failed to parse manifest: %s' % me.message})
-
- if manifest.digest != manifest_ref:
+ parsed = _parse_manifest()
+ if parsed.digest != manifest_ref:
raise ManifestInvalid(detail={'message': 'manifest digest mismatch'})
- return _write_manifest_and_log(namespace_name, repo_name, manifest)
+ if parsed.schema_version != 2:
+ return _write_manifest_and_log(namespace_name, repo_name, parsed.tag, parsed)
+ # If the manifest is schema version 2, then this cannot be a normal tag-based push, as the
+ # manifest does not contain the tag and this call was not given a tag name. Instead, we write the
+ # manifest with a temporary tag, as it is being pushed as part of a call for a manifest list.
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
-def _write_manifest(namespace_name, repo_name, manifest):
- if (manifest.namespace == '' and
- features.LIBRARY_SUPPORT and
- namespace_name == app.config['LIBRARY_NAMESPACE']):
- pass
- elif manifest.namespace != namespace_name:
- raise NameInvalid()
-
- if manifest.repo_name != repo_name:
- raise NameInvalid()
-
- # Ensure that the repository exists.
- repo = model.get_repository(namespace_name, repo_name)
- if repo is None:
- raise NameInvalid()
-
- if not manifest.layers:
- logger.info("manifest provided with no layers")
- raise ManifestInvalid(detail={'message': 'manifest does not reference any layers'})
-
- # Ensure all the blobs in the manifest exist.
- storage_map = model.lookup_blobs_by_digest(namespace_name, repo_name, manifest.checksums)
- for layer in manifest.layers:
- digest_str = str(layer.digest)
- if digest_str not in storage_map:
- raise BlobUnknown(detail={'digest': digest_str})
-
- # Lookup all the images and their parent images (if any) inside the manifest.
- # This will let us know which v1 images we need to synthesize and which ones are invalid.
- all_image_ids = list(manifest.parent_image_ids | manifest.image_ids)
- images_map = model.get_docker_v1_metadata_by_image_id(namespace_name, repo_name, all_image_ids)
-
- # Rewrite any v1 image IDs that do not match the checksum in the database.
- try:
- rewritten_images = list(manifest.rewrite_invalid_image_ids(images_map))
- for rewritten_image in rewritten_images:
- model.synthesize_v1_image(
- repo,
- storage_map[rewritten_image.content_checksum],
- rewritten_image.image_id,
- rewritten_image.created,
- rewritten_image.comment,
- rewritten_image.command,
- rewritten_image.compat_json,
- rewritten_image.parent_image_id,
- )
- except ManifestException as me:
- logger.exception("exception when rewriting v1 metadata")
- raise ManifestInvalid(detail={'message': 'failed synthesizing v1 metadata: %s' % me.message})
-
- # Store the manifest pointing to the tag.
- leaf_layer_id = rewritten_images[-1].image_id
- newly_created = model.save_manifest(namespace_name, repo_name, manifest.tag, leaf_layer_id,
- manifest.digest, manifest.bytes)
- if newly_created:
- labels = []
- for key, value in manifest.layers[-1].v1_metadata.labels.iteritems():
- media_type = 'application/json' if is_json(value) else 'text/plain'
- labels.append(Label(key=key, value=value, source_type='manifest', media_type=media_type))
- model.create_manifest_labels(namespace_name, repo_name, manifest.digest, labels)
-
- return repo, storage_map
-
-
-def _write_manifest_and_log(namespace_name, repo_name, manifest):
- repo, storage_map = _write_manifest(namespace_name, repo_name, manifest)
-
- # Queue all blob manifests for replication.
- if features.STORAGE_REPLICATION:
- with queue_replication_batch(namespace_name) as queue_storage_replication:
- for layer in manifest.layers:
- digest_str = str(layer.digest)
- queue_storage_replication(storage_map[digest_str])
-
- track_and_log('push_repo', repo, tag=manifest.tag)
- spawn_notification(repo, 'repo_push', {'updated_tags': [manifest.tag]})
- metric_queue.repository_push.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
+ expiration_sec = app.config['PUSH_TEMP_TAG_EXPIRATION_SEC']
+ manifest = registry_model.create_manifest_with_temp_tag(repository_ref, parsed, expiration_sec,
+ storage)
+ if manifest is None:
+ raise ManifestInvalid()
return Response(
'OK',
status=202,
headers={
'Docker-Content-Digest': manifest.digest,
- 'Location': url_for('v2.fetch_manifest_by_digest',
- repository='%s/%s' % (namespace_name, repo_name),
- manifest_ref=manifest.digest),
+ 'Location':
+ url_for('v2.fetch_manifest_by_digest',
+ repository='%s/%s' % (namespace_name, repo_name),
+ manifest_ref=manifest.digest),
},
)
+def _parse_manifest():
+ content_type = request.content_type or DOCKER_SCHEMA1_MANIFEST_CONTENT_TYPE
+ if content_type == 'application/json':
+ # For back-compat.
+ content_type = DOCKER_SCHEMA1_MANIFEST_CONTENT_TYPE
+
+ try:
+ return parse_manifest_from_bytes(Bytes.for_string_or_unicode(request.data), content_type)
+ except ManifestException as me:
+ logger.exception("failed to parse manifest when writing by tagname")
+ raise ManifestInvalid(detail={'message': 'failed to parse manifest: %s' % me})
+
+
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
@parse_repository_name()
@process_registry_jwt_auth(scopes=['pull', 'push'])
@require_repo_write
@anon_protect
+@check_readonly
def delete_manifest_by_digest(namespace_name, repo_name, manifest_ref):
"""
Delete the manifest specified by the digest.
@@ -231,44 +237,88 @@ def delete_manifest_by_digest(namespace_name, repo_name, manifest_ref):
Note: there is no equivalent method for deleting by tag name because it is
forbidden by the spec.
"""
- tags = model.delete_manifest_by_digest(namespace_name, repo_name, manifest_ref)
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ manifest = registry_model.lookup_manifest_by_digest(repository_ref, manifest_ref)
+ if manifest is None:
+ raise ManifestUnknown()
+
+ tags = registry_model.delete_tags_for_manifest(manifest)
if not tags:
raise ManifestUnknown()
for tag in tags:
- track_and_log('delete_tag', tag.repository, tag=tag.name, digest=manifest_ref)
+ track_and_log('delete_tag', repository_ref, tag=tag.name, digest=manifest_ref)
return Response(status=202)
-def _generate_and_store_manifest(namespace_name, repo_name, tag_name):
- # Find the v1 metadata for this image and its parents.
- v1_metadata = model.get_docker_v1_metadata_by_tag(namespace_name, repo_name, tag_name)
- parents_v1_metadata = model.get_parents_docker_v1_metadata(namespace_name, repo_name,
- v1_metadata.image_id)
+def _write_manifest_and_log(namespace_name, repo_name, tag_name, manifest_impl):
+ repository_ref, manifest, tag = _write_manifest(namespace_name, repo_name, tag_name,
+ manifest_impl)
- # If the manifest is being generated under the library namespace, then we make its namespace
- # empty.
- manifest_namespace = namespace_name
- if features.LIBRARY_SUPPORT and namespace_name == app.config['LIBRARY_NAMESPACE']:
- manifest_namespace = ''
+ # Queue all blob manifests for replication.
+ if features.STORAGE_REPLICATION:
+ blobs = registry_model.get_manifest_local_blobs(manifest)
+ if blobs is None:
+ logger.error('Could not lookup blobs for manifest `%s`', manifest.digest)
+ else:
+ with queue_replication_batch(namespace_name) as queue_storage_replication:
+ for blob_digest in blobs:
+ queue_storage_replication(blob_digest)
- # Create and populate the manifest builder
- builder = DockerSchema1ManifestBuilder(manifest_namespace, repo_name, tag_name)
+ track_and_log('push_repo', repository_ref, tag=tag_name)
+ spawn_notification(repository_ref, 'repo_push', {'updated_tags': [tag_name]})
+ metric_queue.repository_push.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
- # Add the leaf layer
- builder.add_layer(v1_metadata.content_checksum, v1_metadata.compat_json)
+ return Response(
+ 'OK',
+ status=202,
+ headers={
+ 'Docker-Content-Digest': manifest.digest,
+ 'Location':
+ url_for('v2.fetch_manifest_by_digest',
+ repository='%s/%s' % (namespace_name, repo_name),
+ manifest_ref=manifest.digest),
+ },
+ )
- for parent_v1_metadata in parents_v1_metadata:
- builder.add_layer(parent_v1_metadata.content_checksum, parent_v1_metadata.compat_json)
- # Sign the manifest with our signing key.
- manifest = builder.build(docker_v2_signing_key)
+def _write_manifest(namespace_name, repo_name, tag_name, manifest_impl):
+ # NOTE: These extra checks are needed for schema version 1 because the manifests
+ # contain the repo namespace, name and tag name.
+ if manifest_impl.schema_version == 1:
+ if (manifest_impl.namespace == '' and features.LIBRARY_SUPPORT and
+ namespace_name == app.config['LIBRARY_NAMESPACE']):
+ pass
+ elif manifest_impl.namespace != namespace_name:
+ raise NameInvalid()
- # Write the manifest to the DB.
- model.create_manifest_and_update_tag(namespace_name, repo_name, tag_name, manifest.digest,
- manifest.bytes)
- return manifest
+ if manifest_impl.repo_name != repo_name:
+ raise NameInvalid()
-def _determine_media_type(value):
- media_type_name = 'application/json' if is_json(value) else 'text/plain'
+ try:
+ if not manifest_impl.layers:
+ raise ManifestInvalid(detail={'message': 'manifest does not reference any layers'})
+ except ManifestException as me:
+ raise ManifestInvalid(detail={'message': str(me)})
+
+ # Ensure that the repository exists.
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
+ raise NameUnknown()
+
+ # Create the manifest(s) and retarget the tag to point to it.
+ try:
+ manifest, tag = registry_model.create_manifest_and_retarget_tag(repository_ref, manifest_impl,
+ tag_name, storage,
+ raise_on_error=True)
+ except CreateManifestException as cme:
+ raise ManifestInvalid(detail={'message': str(cme)})
+
+ if manifest is None:
+ raise ManifestInvalid()
+
+ return repository_ref, manifest, tag
diff --git a/endpoints/v2/tag.py b/endpoints/v2/tag.py
index 6b1ce20ad..779a78351 100644
--- a/endpoints/v2/tag.py
+++ b/endpoints/v2/tag.py
@@ -1,11 +1,12 @@
from flask import jsonify
+from app import model_cache
from auth.registry_jwt_auth import process_registry_jwt_auth
-from endpoints.common import parse_repository_name
+from data.registry_model import registry_model
+from endpoints.decorators import anon_protect, parse_repository_name
from endpoints.v2 import v2_bp, require_repo_read, paginate
from endpoints.v2.errors import NameUnknown
-from endpoints.decorators import anon_protect
-from data.interfaces.v2 import pre_oci_model as model
+
@v2_bp.route('//tags/list', methods=['GET'])
@parse_repository_name()
@@ -13,16 +14,19 @@ from data.interfaces.v2 import pre_oci_model as model
@require_repo_read
@anon_protect
@paginate()
-def list_all_tags(namespace_name, repo_name, limit, offset, pagination_callback):
- repo = model.get_repository(namespace_name, repo_name)
- if repo is None:
+def list_all_tags(namespace_name, repo_name, start_id, limit, pagination_callback):
+ repository_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repository_ref is None:
raise NameUnknown()
- tags = model.repository_tags(namespace_name, repo_name, limit, offset)
+ # NOTE: We add 1 to the limit because that's how pagination_callback knows if there are
+ # additional tags.
+ tags = registry_model.lookup_cached_active_repository_tags(model_cache, repository_ref, start_id,
+ limit + 1)
response = jsonify({
'name': '{0}/{1}'.format(namespace_name, repo_name),
- 'tags': [tag.name for tag in tags],
+ 'tags': [tag.name for tag in tags][0:limit],
})
- pagination_callback(len(tags), response)
+ pagination_callback(tags, response)
return response
diff --git a/endpoints/v2/test/test_blob.py b/endpoints/v2/test/test_blob.py
new file mode 100644
index 000000000..cd3b0932d
--- /dev/null
+++ b/endpoints/v2/test/test_blob.py
@@ -0,0 +1,127 @@
+import hashlib
+import pytest
+
+from mock import patch
+from flask import url_for
+from playhouse.test_utils import assert_query_count
+
+from app import instance_keys, app as realapp
+from auth.auth_context_type import ValidatedAuthContext
+from data import model
+from data.cache import InMemoryDataModelCache
+from data.database import ImageStorageLocation
+from endpoints.test.shared import conduct_call
+from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
+from test.fixtures import *
+
+@pytest.mark.parametrize('method, endpoint', [
+ ('GET', 'download_blob'),
+ ('HEAD', 'check_blob_exists'),
+])
+def test_blob_caching(method, endpoint, client, app):
+ digest = 'sha256:' + hashlib.sha256("a").hexdigest()
+ location = ImageStorageLocation.get(name='local_us')
+ model.blob.store_blob_record_and_temp_link('devtable', 'simple', digest, location, 1, 10000000)
+
+ params = {
+ 'repository': 'devtable/simple',
+ 'digest': digest,
+ }
+
+ user = model.user.get_user('devtable')
+ access = [{
+ 'type': 'repository',
+ 'name': 'devtable/simple',
+ 'actions': ['pull'],
+ }]
+
+ context, subject = build_context_and_subject(ValidatedAuthContext(user=user))
+ token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600,
+ instance_keys)
+
+ headers = {
+ 'Authorization': 'Bearer %s' % token,
+ }
+
+ # Run without caching to make sure the request works. This also preloads some of
+ # our global model caches.
+ conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200,
+ headers=headers)
+
+ with patch('endpoints.v2.blob.model_cache', InMemoryDataModelCache()):
+ # First request should make a DB query to retrieve the blob.
+ conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200,
+ headers=headers)
+
+ # Subsequent requests should use the cached blob.
+ with assert_query_count(0):
+ conduct_call(client, 'v2.' + endpoint, url_for, method, params, expected_code=200,
+ headers=headers)
+
+@pytest.mark.parametrize('mount_digest, source_repo, username, expect_success', [
+ # Unknown blob.
+ ('sha256:unknown', 'devtable/simple', 'devtable', False),
+
+ # Blob not in repo.
+ ('sha256:' + hashlib.sha256("a").hexdigest(), 'devtable/complex', 'devtable', False),
+
+ # Blob in repo.
+ ('sha256:' + hashlib.sha256("b").hexdigest(), 'devtable/complex', 'devtable', True),
+
+ # No access to repo.
+ ('sha256:' + hashlib.sha256("b").hexdigest(), 'devtable/complex', 'public', False),
+
+ # Public repo.
+ ('sha256:' + hashlib.sha256("c").hexdigest(), 'public/publicrepo', 'devtable', True),
+])
+def test_blob_mounting(mount_digest, source_repo, username, expect_success, client, app):
+ location = ImageStorageLocation.get(name='local_us')
+
+ # Store and link some blobs.
+ digest = 'sha256:' + hashlib.sha256("a").hexdigest()
+ model.blob.store_blob_record_and_temp_link('devtable', 'simple', digest, location, 1, 10000000)
+
+ digest = 'sha256:' + hashlib.sha256("b").hexdigest()
+ model.blob.store_blob_record_and_temp_link('devtable', 'complex', digest, location, 1, 10000000)
+
+ digest = 'sha256:' + hashlib.sha256("c").hexdigest()
+ model.blob.store_blob_record_and_temp_link('public', 'publicrepo', digest, location, 1, 10000000)
+
+ params = {
+ 'repository': 'devtable/building',
+ 'mount': mount_digest,
+ 'from': source_repo,
+ }
+
+ user = model.user.get_user(username)
+ access = [{
+ 'type': 'repository',
+ 'name': 'devtable/building',
+ 'actions': ['pull', 'push'],
+ }]
+
+ if source_repo.find(username) == 0:
+ access.append({
+ 'type': 'repository',
+ 'name': source_repo,
+ 'actions': ['pull'],
+ })
+
+ context, subject = build_context_and_subject(ValidatedAuthContext(user=user))
+ token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600,
+ instance_keys)
+
+ headers = {
+ 'Authorization': 'Bearer %s' % token,
+ }
+
+ expected_code = 201 if expect_success else 202
+ conduct_call(client, 'v2.start_blob_upload', url_for, 'POST', params, expected_code=expected_code,
+ headers=headers)
+
+ if expect_success:
+ # Ensure the blob now exists under the repo.
+ model.blob.get_repo_blob_by_digest('devtable', 'building', mount_digest)
+ else:
+ with pytest.raises(model.blob.BlobDoesNotExist):
+ model.blob.get_repo_blob_by_digest('devtable', 'building', mount_digest)
diff --git a/endpoints/v2/test/test_manifest.py b/endpoints/v2/test/test_manifest.py
new file mode 100644
index 000000000..960501052
--- /dev/null
+++ b/endpoints/v2/test/test_manifest.py
@@ -0,0 +1,55 @@
+import hashlib
+import pytest
+import time
+
+from mock import patch
+
+from flask import url_for
+from playhouse.test_utils import count_queries
+
+from app import instance_keys, app as realapp
+from auth.auth_context_type import ValidatedAuthContext
+from data import model
+from endpoints.test.shared import conduct_call
+from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
+from test.fixtures import *
+
+def test_e2e_query_count_manifest_norewrite(client, app):
+ tag_manifest = model.tag.load_tag_manifest('devtable', 'simple', 'latest')
+
+ params = {
+ 'repository': 'devtable/simple',
+ 'manifest_ref': tag_manifest.digest,
+ }
+
+ user = model.user.get_user('devtable')
+ access = [{
+ 'type': 'repository',
+ 'name': 'devtable/simple',
+ 'actions': ['pull', 'push'],
+ }]
+
+ context, subject = build_context_and_subject(ValidatedAuthContext(user=user))
+ token = generate_bearer_token(realapp.config['SERVER_HOSTNAME'], subject, context, access, 600,
+ instance_keys)
+
+ headers = {
+ 'Authorization': 'Bearer %s' % token,
+ }
+
+ # Conduct a call to prime the instance key and other caches.
+ conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202,
+ headers=headers, raw_body=tag_manifest.json_data)
+
+ timecode = time.time()
+ def get_time():
+ return timecode + 10
+
+ with patch('time.time', get_time):
+ # Necessary in order to have the tag updates not occur in the same second, which is the
+ # granularity supported currently.
+ with count_queries() as counter:
+ conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202,
+ headers=headers, raw_body=tag_manifest.json_data)
+
+ assert counter.count <= 27
diff --git a/endpoints/v2/test/test_manifest_cornercases.py b/endpoints/v2/test/test_manifest_cornercases.py
new file mode 100644
index 000000000..b08242343
--- /dev/null
+++ b/endpoints/v2/test/test_manifest_cornercases.py
@@ -0,0 +1,138 @@
+import hashlib
+
+from contextlib import contextmanager
+
+from app import storage, docker_v2_signing_key
+from data import model, database
+from data.registry_model import registry_model
+from endpoints.v2.manifest import _write_manifest
+from image.docker.schema1 import DockerSchema1ManifestBuilder
+
+from test.fixtures import *
+
+
+ADMIN_ACCESS_USER = 'devtable'
+REPO = 'simple'
+FIRST_TAG = 'first'
+SECOND_TAG = 'second'
+THIRD_TAG = 'third'
+
+
+@contextmanager
+def set_tag_expiration_policy(namespace, expiration_s=0):
+ namespace_user = model.user.get_user(namespace)
+ model.user.change_user_tag_expiration(namespace_user, expiration_s)
+ yield
+
+
+def _perform_cleanup():
+ database.RepositoryTag.delete().where(database.RepositoryTag.hidden == True).execute()
+ repo_object = model.repository.get_repository(ADMIN_ACCESS_USER, REPO)
+ model.gc.garbage_collect_repo(repo_object)
+
+
+def test_missing_link(initialized_db):
+ """ Tests for a corner case that could result in missing a link to a blob referenced by a
+ manifest. The test exercises the case as follows:
+
+ 1) Push a manifest of a single layer with a Docker ID `FIRST_ID`, pointing
+ to blob `FIRST_BLOB`. The database should contain the tag referencing the layer, with
+ no changed ID and the blob not being GCed.
+
+ 2) Push a manifest of two layers:
+
+ Layer 1: `FIRST_ID` with blob `SECOND_BLOB`: Will result in a new synthesized ID
+ Layer 2: `SECOND_ID` with blob `THIRD_BLOB`: Will result in `SECOND_ID` pointing to the
+ `THIRD_BLOB`, with a parent pointing to the new synthesized ID's layer.
+
+ 3) Push a manifest of two layers:
+
+ Layer 1: `THIRD_ID` with blob `FOURTH_BLOB`: Will result in a new `THIRD_ID` layer
+ Layer 2: `FIRST_ID` with blob `THIRD_BLOB`: Since `FIRST_ID` already points to `SECOND_BLOB`,
+ this will synthesize a new ID. With the current bug, the synthesized ID will match
+ that of `SECOND_ID`, leaving `THIRD_ID` unlinked and therefore, after a GC, missing
+ `FOURTH_BLOB`.
+ """
+ with set_tag_expiration_policy('devtable', 0):
+ location_name = storage.preferred_locations[0]
+ location = database.ImageStorageLocation.get(name=location_name)
+
+ # Create first blob.
+ first_blob_sha = 'sha256:' + hashlib.sha256("FIRST").hexdigest()
+ model.blob.store_blob_record_and_temp_link(ADMIN_ACCESS_USER, REPO, first_blob_sha, location, 0, 0, 0)
+
+ # Push the first manifest.
+ first_manifest = (DockerSchema1ManifestBuilder(ADMIN_ACCESS_USER, REPO, FIRST_TAG)
+ .add_layer(first_blob_sha, '{"id": "first"}')
+ .build(docker_v2_signing_key))
+
+ _write_manifest(ADMIN_ACCESS_USER, REPO, FIRST_TAG, first_manifest)
+
+ # Delete all temp tags and perform GC.
+ _perform_cleanup()
+
+ # Ensure that the first blob still exists, along with the first tag.
+ assert model.blob.get_repo_blob_by_digest(ADMIN_ACCESS_USER, REPO, first_blob_sha) is not None
+
+ repository_ref = registry_model.lookup_repository(ADMIN_ACCESS_USER, REPO)
+ found_tag = registry_model.get_repo_tag(repository_ref, FIRST_TAG, include_legacy_image=True)
+ assert found_tag is not None
+ assert found_tag.legacy_image.docker_image_id == 'first'
+
+ # Create the second and third blobs.
+ second_blob_sha = 'sha256:' + hashlib.sha256("SECOND").hexdigest()
+ third_blob_sha = 'sha256:' + hashlib.sha256("THIRD").hexdigest()
+
+ model.blob.store_blob_record_and_temp_link(ADMIN_ACCESS_USER, REPO, second_blob_sha, location, 0, 0, 0)
+ model.blob.store_blob_record_and_temp_link(ADMIN_ACCESS_USER, REPO, third_blob_sha, location, 0, 0, 0)
+
+ # Push the second manifest.
+ second_manifest = (DockerSchema1ManifestBuilder(ADMIN_ACCESS_USER, REPO, SECOND_TAG)
+ .add_layer(third_blob_sha, '{"id": "second", "parent": "first"}')
+ .add_layer(second_blob_sha, '{"id": "first"}')
+ .build(docker_v2_signing_key))
+
+ _write_manifest(ADMIN_ACCESS_USER, REPO, SECOND_TAG, second_manifest)
+
+ # Delete all temp tags and perform GC.
+ _perform_cleanup()
+
+ # Ensure that the first and second blobs still exists, along with the second tag.
+ assert registry_model.get_repo_blob_by_digest(repository_ref, first_blob_sha) is not None
+ assert registry_model.get_repo_blob_by_digest(repository_ref, second_blob_sha) is not None
+ assert registry_model.get_repo_blob_by_digest(repository_ref, third_blob_sha) is not None
+
+ found_tag = registry_model.get_repo_tag(repository_ref, FIRST_TAG, include_legacy_image=True)
+ assert found_tag is not None
+ assert found_tag.legacy_image.docker_image_id == 'first'
+
+ # Ensure the IDs have changed.
+ found_tag = registry_model.get_repo_tag(repository_ref, SECOND_TAG, include_legacy_image=True)
+ assert found_tag is not None
+ assert found_tag.legacy_image.docker_image_id != 'second'
+
+ # Create the fourth blob.
+ fourth_blob_sha = 'sha256:' + hashlib.sha256("FOURTH").hexdigest()
+ model.blob.store_blob_record_and_temp_link(ADMIN_ACCESS_USER, REPO, fourth_blob_sha, location, 0, 0, 0)
+
+ # Push the third manifest.
+ third_manifest = (DockerSchema1ManifestBuilder(ADMIN_ACCESS_USER, REPO, THIRD_TAG)
+ .add_layer(third_blob_sha, '{"id": "second", "parent": "first"}')
+ .add_layer(fourth_blob_sha, '{"id": "first"}') # Note the change in BLOB from the second manifest.
+ .build(docker_v2_signing_key))
+
+ _write_manifest(ADMIN_ACCESS_USER, REPO, THIRD_TAG, third_manifest)
+
+ # Delete all temp tags and perform GC.
+ _perform_cleanup()
+
+ # Ensure all blobs are present.
+ assert registry_model.get_repo_blob_by_digest(repository_ref, first_blob_sha) is not None
+ assert registry_model.get_repo_blob_by_digest(repository_ref, second_blob_sha) is not None
+ assert registry_model.get_repo_blob_by_digest(repository_ref, third_blob_sha) is not None
+ assert registry_model.get_repo_blob_by_digest(repository_ref, fourth_blob_sha) is not None
+
+ # Ensure new synthesized IDs were created.
+ second_tag = registry_model.get_repo_tag(repository_ref, SECOND_TAG, include_legacy_image=True)
+ third_tag = registry_model.get_repo_tag(repository_ref, THIRD_TAG, include_legacy_image=True)
+ assert second_tag.legacy_image.docker_image_id != third_tag.legacy_image.docker_image_id
diff --git a/endpoints/v2/test/test_v2_tuf.py b/endpoints/v2/test/test_v2_tuf.py
new file mode 100644
index 000000000..efd0c0ce9
--- /dev/null
+++ b/endpoints/v2/test/test_v2_tuf.py
@@ -0,0 +1,68 @@
+import pytest
+import flask
+
+from flask_principal import Identity, Principal
+from mock import Mock
+
+from auth import permissions
+from endpoints.v2.v2auth import _get_tuf_root
+from test import testconfig
+from util.security.registry_jwt import QUAY_TUF_ROOT, SIGNER_TUF_ROOT, DISABLED_TUF_ROOT
+
+
+
+def admin_identity(namespace, reponame):
+ identity = Identity('admin')
+ identity.provides.add(permissions._RepositoryNeed(namespace, reponame, 'admin'))
+ identity.provides.add(permissions._OrganizationRepoNeed(namespace, 'admin'))
+ return identity
+
+
+def write_identity(namespace, reponame):
+ identity = Identity('writer')
+ identity.provides.add(permissions._RepositoryNeed(namespace, reponame, 'write'))
+ identity.provides.add(permissions._OrganizationRepoNeed(namespace, 'write'))
+ return identity
+
+
+def read_identity(namespace, reponame):
+ identity = Identity('reader')
+ identity.provides.add(permissions._RepositoryNeed(namespace, reponame, 'read'))
+ identity.provides.add(permissions._OrganizationRepoNeed(namespace, 'read'))
+ return identity
+
+
+def app_with_principal():
+ app = flask.Flask(__name__)
+ app.config.from_object(testconfig.TestConfig())
+ principal = Principal(app)
+ return app, principal
+
+
+@pytest.mark.parametrize('identity,expected', [
+ (Identity('anon'), QUAY_TUF_ROOT),
+ (read_identity("namespace", "repo"), QUAY_TUF_ROOT),
+ (read_identity("different", "repo"), QUAY_TUF_ROOT),
+ (admin_identity("different", "repo"), QUAY_TUF_ROOT),
+ (write_identity("different", "repo"), QUAY_TUF_ROOT),
+ (admin_identity("namespace", "repo"), SIGNER_TUF_ROOT),
+ (write_identity("namespace", "repo"), SIGNER_TUF_ROOT),
+])
+def test_get_tuf_root(identity, expected):
+ app, principal = app_with_principal()
+ with app.test_request_context('/'):
+ principal.set_identity(identity)
+ actual = _get_tuf_root(Mock(), "namespace", "repo")
+ assert actual == expected, "should be %s, but was %s" % (expected, actual)
+
+
+@pytest.mark.parametrize('trust_enabled,tuf_root', [
+ (True, QUAY_TUF_ROOT),
+ (False, DISABLED_TUF_ROOT),
+])
+def test_trust_disabled(trust_enabled,tuf_root):
+ app, principal = app_with_principal()
+ with app.test_request_context('/'):
+ principal.set_identity(read_identity("namespace", "repo"))
+ actual = _get_tuf_root(Mock(trust_enabled=trust_enabled), "namespace", "repo")
+ assert actual == tuf_root, "should be %s, but was %s" % (tuf_root, actual)
diff --git a/endpoints/v2/test/test_v2auth.py b/endpoints/v2/test/test_v2auth.py
new file mode 100644
index 000000000..60c8f34b1
--- /dev/null
+++ b/endpoints/v2/test/test_v2auth.py
@@ -0,0 +1,150 @@
+import base64
+
+from flask import url_for
+
+from app import instance_keys, app as original_app
+from data.model.user import regenerate_robot_token, get_robot_and_metadata, get_user
+from endpoints.test.shared import conduct_call
+from util.security.registry_jwt import decode_bearer_token, CLAIM_TUF_ROOTS
+
+from test.fixtures import *
+
+
+def get_robot_password(username):
+ parent_name, robot_shortname = username.split('+', 1)
+ parent = get_user(parent_name)
+ _, token, _ = get_robot_and_metadata(robot_shortname, parent)
+ return token
+
+
+@pytest.mark.parametrize('scope, username, password, expected_code, expected_scopes', [
+ # Invalid repository.
+ ('repository:devtable/simple/foo/bar/baz:pull', 'devtable', 'password', 400, []),
+
+ # Invalid scopes.
+ ('some_invalid_scope', 'devtable', 'password', 400, []),
+
+ # Invalid credentials.
+ ('repository:devtable/simple:pull', 'devtable', 'invalid', 401, []),
+
+ # Valid credentials.
+ ('repository:devtable/simple:pull', 'devtable', 'password', 200,
+ ['devtable/simple:pull']),
+
+ ('repository:devtable/simple:push', 'devtable', 'password', 200,
+ ['devtable/simple:push']),
+
+ ('repository:devtable/simple:pull,push', 'devtable', 'password', 200,
+ ['devtable/simple:push,pull']),
+
+ ('repository:devtable/simple:pull,push,*', 'devtable', 'password', 200,
+ ['devtable/simple:push,pull,*']),
+
+ ('repository:buynlarge/orgrepo:pull,push,*', 'devtable', 'password', 200,
+ ['buynlarge/orgrepo:push,pull,*']),
+
+ ('', 'devtable', 'password', 200, []),
+
+ # No credentials, non-public repo.
+ ('repository:devtable/simple:pull', None, None, 200, ['devtable/simple:']),
+
+ # No credentials, public repo.
+ ('repository:public/publicrepo:pull', None, None, 200, ['public/publicrepo:pull']),
+
+ # Reader only.
+ ('repository:buynlarge/orgrepo:pull,push,*', 'reader', 'password', 200,
+ ['buynlarge/orgrepo:pull']),
+
+ # Unknown repository.
+ ('repository:devtable/unknownrepo:pull,push', 'devtable', 'password', 200,
+ ['devtable/unknownrepo:push,pull']),
+
+ # Unknown repository in another namespace.
+ ('repository:somenamespace/unknownrepo:pull,push', 'devtable', 'password', 200,
+ ['somenamespace/unknownrepo:']),
+
+ # Disabled namespace.
+ (['repository:devtable/simple:pull,push', 'repository:disabled/complex:pull'],
+ 'devtable', 'password', 405,
+ []),
+
+ # Multiple scopes.
+ (['repository:devtable/simple:pull,push', 'repository:devtable/complex:pull'],
+ 'devtable', 'password', 200,
+ ['devtable/simple:push,pull', 'devtable/complex:pull']),
+
+ # Multiple scopes with restricted behavior.
+ (['repository:devtable/simple:pull,push', 'repository:public/publicrepo:pull,push'],
+ 'devtable', 'password', 200,
+ ['devtable/simple:push,pull', 'public/publicrepo:pull']),
+
+ (['repository:devtable/simple:pull,push,*', 'repository:public/publicrepo:pull,push,*'],
+ 'devtable', 'password', 200,
+ ['devtable/simple:push,pull,*', 'public/publicrepo:pull']),
+
+ # Read Only State
+ ('repository:devtable/readonly:pull,push,*', 'devtable', 'password', 200,
+ ['devtable/readonly:pull']),
+
+ # Mirror State as a typical User
+ ('repository:devtable/mirrored:pull,push,*', 'devtable', 'password', 200,
+ ['devtable/mirrored:pull']),
+
+ # Mirror State as the robot User should have write access
+ ('repository:devtable/mirrored:pull,push,*', 'devtable+dtrobot', get_robot_password, 200,
+ ['devtable/mirrored:push,pull']),
+
+ # Organization repository, org admin
+ ('repository:buynlarge/orgrepo:pull,push,*', 'devtable', 'password', 200,
+ ['buynlarge/orgrepo:push,pull,*']),
+
+ # Organization repository, org creator
+ ('repository:buynlarge/orgrepo:pull,push,*', 'creator', 'password', 200,
+ ['buynlarge/orgrepo:']),
+
+ # Organization repository, org reader
+ ('repository:buynlarge/orgrepo:pull,push,*', 'reader', 'password', 200,
+ ['buynlarge/orgrepo:pull']),
+
+ # Organization repository, freshuser
+ ('repository:buynlarge/orgrepo:pull,push,*', 'freshuser', 'password', 200,
+ ['buynlarge/orgrepo:']),
+])
+def test_generate_registry_jwt(scope, username, password, expected_code, expected_scopes,
+ app, client):
+ params = {
+ 'service': original_app.config['SERVER_HOSTNAME'],
+ 'scope': scope,
+ }
+
+ if callable(password):
+ password = password(username)
+
+ headers = {}
+ if username and password:
+ headers['Authorization'] = 'Basic %s' % (base64.b64encode('%s:%s' % (username, password)))
+
+ resp = conduct_call(client, 'v2.generate_registry_jwt', url_for, 'GET', params, {}, expected_code,
+ headers=headers)
+ if expected_code != 200:
+ return
+
+ token = resp.json['token']
+ decoded = decode_bearer_token(token, instance_keys, original_app.config)
+ assert decoded['iss'] == 'quay'
+ assert decoded['aud'] == original_app.config['SERVER_HOSTNAME']
+ assert decoded['sub'] == username if username else '(anonymous)'
+
+ expected_access = []
+ for scope in expected_scopes:
+ name, actions_str = scope.split(':')
+ actions = actions_str.split(',') if actions_str else []
+
+ expected_access.append({
+ 'type': 'repository',
+ 'name': name,
+ 'actions': actions,
+ })
+
+ assert decoded['access'] == expected_access
+ assert len(decoded['context'][CLAIM_TUF_ROOTS]) == len(expected_scopes)
diff --git a/endpoints/v2/v2auth.py b/endpoints/v2/v2auth.py
index 45f248961..c3a6aa3ce 100644
--- a/endpoints/v2/v2auth.py
+++ b/endpoints/v2/v2auth.py
@@ -1,41 +1,43 @@
import logging
import re
-from cachetools import lru_cache
-from flask import request, jsonify, abort
+from collections import namedtuple
+from cachetools.func import lru_cache
+from flask import request, jsonify
+import features
from app import app, userevents, instance_keys
-from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
+from auth.auth_context import get_authenticated_context, get_authenticated_user
+from auth.decorators import process_basic_auth
from auth.permissions import (ModifyRepositoryPermission, ReadRepositoryPermission,
CreateRepositoryPermission, AdministerRepositoryPermission)
-from auth.process import process_auth
-from endpoints.v2 import v2_bp
+from data import model
+from data.database import RepositoryState
+from data.registry_model import registry_model
+from data.registry_model.datatypes import RepositoryReference
+from data.model.repo_mirror import get_mirroring_robot
from endpoints.decorators import anon_protect
-from data.interfaces.v2 import pre_oci_model as model
+from endpoints.v2 import v2_bp
+from endpoints.v2.errors import (InvalidLogin, NameInvalid, InvalidRequest, Unsupported,
+ Unauthorized, NamespaceDisabled)
from util.cache import no_cache
from util.names import parse_namespace_repository, REPOSITORY_NAME_REGEX
-from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
-
+from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject,
+ QUAY_TUF_ROOT, SIGNER_TUF_ROOT, DISABLED_TUF_ROOT)
logger = logging.getLogger(__name__)
-
TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour
-SCOPE_REGEX_TEMPLATE = r'^repository:((?:{}\/)?((?:[\.a-zA-Z0-9_\-]+\/)?[\.a-zA-Z0-9_\-]+)):((?:push|pull|\*)(?:,(?:push|pull|\*))*)$'
-
-
-@lru_cache(maxsize=1)
-def get_scope_regex():
- hostname = re.escape(app.config['SERVER_HOSTNAME'])
- scope_regex_string = SCOPE_REGEX_TEMPLATE.format(hostname)
- return re.compile(scope_regex_string)
+SCOPE_REGEX_TEMPLATE = r'^repository:((?:{}\/)?((?:[\.a-zA-Z0-9_\-]+\/)*[\.a-zA-Z0-9_\-]+)):((?:push|pull|\*)(?:,(?:push|pull|\*))*)$'
+scopeResult = namedtuple('scopeResult', ['actions', 'namespace', 'repository', 'registry_and_repo',
+ 'tuf_root'])
@v2_bp.route('/auth')
-@process_auth
+@process_basic_auth
@no_cache
@anon_protect
-def generate_registry_jwt():
+def generate_registry_jwt(auth_result):
"""
This endpoint will generate a JWT conforming to the Docker Registry v2 Auth Spec:
https://docs.docker.com/registry/spec/auth/token/
@@ -43,121 +45,217 @@ def generate_registry_jwt():
audience_param = request.args.get('service')
logger.debug('Request audience: %s', audience_param)
- scope_param = request.args.get('scope') or ''
- logger.debug('Scope request: %s', scope_param)
+ scope_params = request.args.getlist('scope') or []
+ logger.debug('Scope request: %s', scope_params)
- user = get_authenticated_user()
- logger.debug('Authenticated user: %s', user)
+ auth_header = request.headers.get('authorization', '')
+ auth_credentials_sent = bool(auth_header)
- token = get_validated_token()
- logger.debug('Authenticated token: %s', token)
+ # Load the auth context and verify thatg we've directly received credentials.
+ has_valid_auth_context = False
+ if get_authenticated_context():
+ has_valid_auth_context = not get_authenticated_context().is_anonymous
- oauthtoken = get_validated_oauth_token()
- logger.debug('Authenticated OAuth token: %s', oauthtoken)
-
- auth_credentials_sent = bool(request.headers.get('authorization', ''))
- if auth_credentials_sent and not user and not token:
+ if auth_credentials_sent and not has_valid_auth_context:
# The auth credentials sent for the user are invalid.
- logger.debug('Invalid auth credentials')
- abort(401)
+ raise InvalidLogin(auth_result.error_message)
+ if not has_valid_auth_context and len(scope_params) == 0:
+ # In this case, we are doing an auth flow, and it's not an anonymous pull.
+ logger.debug('No user and no token sent for empty scope list')
+ raise Unauthorized()
+
+ # Build the access list for the authenticated context.
access = []
+ scope_results = []
+ for scope_param in scope_params:
+ scope_result = _authorize_or_downscope_request(scope_param, has_valid_auth_context)
+ if scope_result is None:
+ continue
+
+ scope_results.append(scope_result)
+ access.append({
+ 'type': 'repository',
+ 'name': scope_result.registry_and_repo,
+ 'actions': scope_result.actions,
+ })
+
+ # Issue user events.
user_event_data = {
'action': 'login',
}
- if len(scope_param) > 0:
- match = get_scope_regex().match(scope_param)
- if match is None:
- logger.debug('Match: %s', match)
- logger.debug('len: %s', len(scope_param))
- logger.warning('Unable to decode repository and actions: %s', scope_param)
- abort(400)
-
- logger.debug('Match: %s', match.groups())
-
- registry_and_repo = match.group(1)
- namespace_and_repo = match.group(2)
- actions = match.group(3).split(',')
-
- lib_namespace = app.config['LIBRARY_NAMESPACE']
- namespace, reponame = parse_namespace_repository(namespace_and_repo, lib_namespace)
-
- # Ensure that we are never creating an invalid repository.
- if not REPOSITORY_NAME_REGEX.match(reponame):
- logger.debug('Found invalid repository name in auth flow: %s', reponame)
- abort(400)
-
- final_actions = []
-
- if 'push' in actions:
- # If there is no valid user or token, then the repository cannot be
- # accessed.
- if user is not None or token is not None:
- # Lookup the repository. If it exists, make sure the entity has modify
- # permission. Otherwise, make sure the entity has create permission.
- repo = model.get_repository(namespace, reponame)
- if repo:
- if ModifyRepositoryPermission(namespace, reponame).can():
- final_actions.append('push')
- else:
- logger.debug('No permission to modify repository %s/%s', namespace, reponame)
- else:
- if CreateRepositoryPermission(namespace).can() and user is not None:
- logger.debug('Creating repository: %s/%s', namespace, reponame)
- model.create_repository(namespace, reponame, user)
- final_actions.append('push')
- else:
- logger.debug('No permission to create repository %s/%s', namespace, reponame)
-
- if 'pull' in actions:
- # Grant pull if the user can read the repo or it is public.
- if (ReadRepositoryPermission(namespace, reponame).can() or
- model.repository_is_public(namespace, reponame)):
- final_actions.append('pull')
- else:
- logger.debug('No permission to pull repository %s/%s', namespace, reponame)
-
- if '*' in actions:
- # Grant * user is admin
- if (AdministerRepositoryPermission(namespace, reponame).can()):
- final_actions.append('*')
- else:
- logger.debug("No permission to administer repository %s/%s", namespace, reponame)
-
- # Add the access for the JWT.
- access.append({
- 'type': 'repository',
- 'name': registry_and_repo,
- 'actions': final_actions,
- })
-
- # Set the user event data for the auth.
- if 'push' in final_actions:
+ # Set the user event data for when authed.
+ if len(scope_results) > 0:
+ if 'push' in scope_results[0].actions:
user_action = 'push_start'
- elif 'pull' in final_actions:
+ elif 'pull' in scope_results[0].actions:
user_action = 'pull_start'
else:
user_action = 'login'
user_event_data = {
'action': user_action,
- 'repository': reponame,
- 'namespace': namespace,
+ 'namespace': scope_results[0].namespace,
+ 'repository': scope_results[0].repository,
}
- elif user is None and token is None:
- # In this case, we are doing an auth flow, and it's not an anonymous pull
- logger.debug('No user and no token sent for empty scope list')
- abort(401)
-
# Send the user event.
- if user is not None:
- event = userevents.get_event(user.username)
+ if get_authenticated_user() is not None:
+ event = userevents.get_event(get_authenticated_user().username)
event.publish_event_data('docker-cli', user_event_data)
# Build the signed JWT.
- context, subject = build_context_and_subject(user, token, oauthtoken)
+ tuf_roots = {'%s/%s' % (scope_result.namespace, scope_result.repository): scope_result.tuf_root
+ for scope_result in scope_results}
+ context, subject = build_context_and_subject(get_authenticated_context(), tuf_roots=tuf_roots)
token = generate_bearer_token(audience_param, subject, context, access,
TOKEN_VALIDITY_LIFETIME_S, instance_keys)
return jsonify({'token': token})
+
+
+@lru_cache(maxsize=1)
+def _get_scope_regex():
+ hostname = re.escape(app.config['SERVER_HOSTNAME'])
+ scope_regex_string = SCOPE_REGEX_TEMPLATE.format(hostname)
+ return re.compile(scope_regex_string)
+
+
+def _get_tuf_root(repository_ref, namespace, reponame):
+ if not features.SIGNING or repository_ref is None or not repository_ref.trust_enabled:
+ return DISABLED_TUF_ROOT
+
+ # Users with write access to a repository will see signer-rooted TUF metadata
+ if ModifyRepositoryPermission(namespace, reponame).can():
+ return SIGNER_TUF_ROOT
+ return QUAY_TUF_ROOT
+
+
+def _authorize_or_downscope_request(scope_param, has_valid_auth_context):
+ # TODO: The complexity of this function is difficult to follow and maintain. Refactor/Cleanup.
+ if len(scope_param) == 0:
+ if not has_valid_auth_context:
+ # In this case, we are doing an auth flow, and it's not an anonymous pull.
+ logger.debug('No user and no token sent for empty scope list')
+ raise Unauthorized()
+
+ return None
+
+ match = _get_scope_regex().match(scope_param)
+ if match is None:
+ logger.debug('Match: %s', match)
+ logger.debug('len: %s', len(scope_param))
+ logger.warning('Unable to decode repository and actions: %s', scope_param)
+ raise InvalidRequest('Unable to decode repository and actions: %s' % scope_param)
+
+ logger.debug('Match: %s', match.groups())
+
+ registry_and_repo = match.group(1)
+ namespace_and_repo = match.group(2)
+ requested_actions = match.group(3).split(',')
+
+ lib_namespace = app.config['LIBRARY_NAMESPACE']
+ namespace, reponame = parse_namespace_repository(namespace_and_repo, lib_namespace)
+
+ # Ensure that we are never creating an invalid repository.
+ if not REPOSITORY_NAME_REGEX.match(reponame):
+ logger.debug('Found invalid repository name in auth flow: %s', reponame)
+ if len(namespace_and_repo.split('/')) > 1:
+ msg = 'Nested repositories are not supported. Found: %s' % namespace_and_repo
+ raise NameInvalid(message=msg)
+
+ raise NameInvalid(message='Invalid repository name: %s' % namespace_and_repo)
+
+ # Ensure the namespace is enabled.
+ if registry_model.is_existing_disabled_namespace(namespace):
+ msg = 'Namespace %s has been disabled. Please contact a system administrator.' % namespace
+ raise NamespaceDisabled(message=msg)
+
+ final_actions = []
+
+ repository_ref = registry_model.lookup_repository(namespace, reponame)
+ repo_is_public = repository_ref is not None and repository_ref.is_public
+ invalid_repo_message = ''
+ if repository_ref is not None and repository_ref.kind != 'image':
+ invalid_repo_message = ((
+ 'This repository is for managing %s ' + 'and not container images.') % repository_ref.kind)
+
+ if 'push' in requested_actions:
+ # Check if there is a valid user or token, as otherwise the repository cannot be
+ # accessed.
+ if has_valid_auth_context:
+ user = get_authenticated_user()
+
+ # Lookup the repository. If it exists, make sure the entity has modify
+ # permission. Otherwise, make sure the entity has create permission.
+ if repository_ref:
+ if ModifyRepositoryPermission(namespace, reponame).can():
+ if repository_ref is not None and repository_ref.kind != 'image':
+ raise Unsupported(message=invalid_repo_message)
+
+ # Check for different repository states.
+ if repository_ref.state == RepositoryState.NORMAL:
+ # In NORMAL mode, if the user has permission, then they can push.
+ final_actions.append('push')
+ elif repository_ref.state == RepositoryState.MIRROR:
+ # In MIRROR mode, only the mirroring robot can push.
+ mirror = model.repo_mirror.get_mirror(repository_ref.id)
+ robot = mirror.internal_robot if mirror is not None else None
+ if robot is not None and user is not None and robot == user:
+ assert robot.robot
+ final_actions.append('push')
+ else:
+ logger.debug('Repository %s/%s push requested for non-mirror robot %s: %s', namespace,
+ reponame, robot, user)
+ elif repository_ref.state == RepositoryState.READ_ONLY:
+ # No pushing allowed in read-only state.
+ pass
+ else:
+ logger.warning('Unknown state for repository %s: %s', repository_ref, repository_ref.state)
+ else:
+ logger.debug('No permission to modify repository %s/%s', namespace, reponame)
+ else:
+ # TODO: Push-to-create functionality should be configurable
+ if CreateRepositoryPermission(namespace).can() and user is not None:
+ logger.debug('Creating repository: %s/%s', namespace, reponame)
+ repository_ref = RepositoryReference.for_repo_obj(
+ model.repository.create_repository(namespace, reponame, user))
+ final_actions.append('push')
+ else:
+ logger.debug('No permission to create repository %s/%s', namespace, reponame)
+
+ if 'pull' in requested_actions:
+ # Grant pull if the user can read the repo or it is public.
+ if ReadRepositoryPermission(namespace, reponame).can() or repo_is_public:
+ if repository_ref is not None and repository_ref.kind != 'image':
+ raise Unsupported(message=invalid_repo_message)
+
+ final_actions.append('pull')
+ else:
+ logger.debug('No permission to pull repository %s/%s', namespace, reponame)
+
+ if '*' in requested_actions:
+ # Grant * user is admin
+ if AdministerRepositoryPermission(namespace, reponame).can():
+ if repository_ref is not None and repository_ref.kind != 'image':
+ raise Unsupported(message=invalid_repo_message)
+
+ if repository_ref and repository_ref.state in (RepositoryState.MIRROR,
+ RepositoryState.READ_ONLY):
+ logger.debug('No permission to administer repository %s/%s', namespace, reponame)
+ else:
+ assert repository_ref.state == RepositoryState.NORMAL
+ final_actions.append('*')
+ else:
+ logger.debug("No permission to administer repository %s/%s", namespace, reponame)
+
+ # Final sanity checks.
+ if 'push' in final_actions:
+ assert repository_ref.state != RepositoryState.READ_ONLY
+
+ if '*' in final_actions:
+ assert repository_ref.state == RepositoryState.NORMAL
+
+ return scopeResult(actions=final_actions, namespace=namespace, repository=reponame,
+ registry_and_repo=registry_and_repo,
+ tuf_root=_get_tuf_root(repository_ref, namespace, reponame))
diff --git a/endpoints/verbs/__init__.py b/endpoints/verbs/__init__.py
index ff2c28f76..1a7898ab8 100644
--- a/endpoints/verbs/__init__.py
+++ b/endpoints/verbs/__init__.py
@@ -1,37 +1,51 @@
-import logging
import hashlib
+import json
+import logging
+import uuid
from flask import redirect, Blueprint, abort, send_file, make_response, request
import features
-from app import app, signer, storage, metric_queue, license_validator
+from app import app, signer, storage, metric_queue, config_provider, ip_resolver, instance_keys
from auth.auth_context import get_authenticated_user
+from auth.decorators import process_auth
from auth.permissions import ReadRepositoryPermission
-from auth.process import process_auth
from data import database
-from data.interfaces.verbs import pre_oci_model as model
-from endpoints.common import route_show_if, parse_repository_name
-from endpoints.decorators import anon_protect
-from endpoints.trackhelper import track_and_log
+from data import model
+from data.registry_model import registry_model
+from endpoints.decorators import (anon_protect, anon_allowed, route_show_if, parse_repository_name,
+ check_region_blacklisted)
from endpoints.v2.blob import BLOB_DIGEST_ROUTE
from image.appc import AppCImageFormatter
+from image.docker import ManifestException
from image.docker.squashed import SquashedDockerImageFormatter
from storage import Storage
+from util.audit import track_and_log, wrap_repository
+from util.http import exact_abort
from util.registry.filelike import wrap_with_handler
from util.registry.queuefile import QueueFile
from util.registry.queueprocess import QueueProcess
+from util.registry.tarlayerformat import TarLayerFormatterReporter
from util.registry.torrent import (make_torrent, per_user_torrent_filename, public_torrent_filename,
- PieceHasher)
-
+ PieceHasher, TorrentConfiguration)
logger = logging.getLogger(__name__)
verbs = Blueprint('verbs', __name__)
-license_validator.enforce_license_before_request(verbs)
+
+LAYER_MIMETYPE = 'binary/octet-stream'
-def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
+class VerbReporter(TarLayerFormatterReporter):
+ def __init__(self, kind):
+ self.kind = kind
+
+ def report_pass(self, pass_count):
+ metric_queue.verb_action_passes.Inc(labelvalues=[self.kind, pass_count])
+
+
+def _open_stream(formatter, tag, schema1_manifest, derived_image_id, handlers, reporter):
"""
This method generates a stream of data which will be replicated and read from the queue files.
This method runs in a separate process.
@@ -39,24 +53,27 @@ def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
# For performance reasons, we load the full image list here, cache it, then disconnect from
# the database.
with database.UseThenDisconnect(app.config):
- image_list = list(model.get_manifest_layers_with_blobs(repo_image))
+ layers = registry_model.list_parsed_manifest_layers(tag.repository, schema1_manifest, storage,
+ include_placements=True)
- def get_next_image():
- for current_image in image_list:
- yield current_image
+ def image_stream_getter(store, blob):
+ def get_stream_for_storage():
+ current_image_stream = store.stream_read_file(blob.placements, blob.storage_path)
+ logger.debug('Returning blob %s: %s', blob.digest, blob.storage_path)
+ return current_image_stream
+ return get_stream_for_storage
- def get_next_layer():
+ def tar_stream_getter_iterator():
# Re-Initialize the storage engine because some may not respond well to forking (e.g. S3)
- store = Storage(app, metric_queue)
- for current_image in image_list:
- current_image_path = model.get_blob_path(current_image.blob)
- current_image_stream = store.stream_read_file(current_image.blob.locations,
- current_image_path)
+ store = Storage(app, metric_queue, config_provider=config_provider, ip_resolver=ip_resolver)
- logger.debug('Returning image layer %s: %s', current_image.image_id, current_image_path)
- yield current_image_stream
+ # Note: We reverse because we have to start at the leaf layer and move upward,
+ # as per the spec for the formatters.
+ for layer in reversed(layers):
+ yield image_stream_getter(store, layer.blob)
- stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image, get_next_layer)
+ stream = formatter.build_stream(tag, schema1_manifest, derived_image_id, layers,
+ tar_stream_getter_iterator, reporter=reporter)
for handler_fn in handlers:
stream = wrap_with_handler(stream, handler_fn)
@@ -71,32 +88,40 @@ def _sign_derived_image(verb, derived_image, queue_file):
try:
signature = signer.detached_sign(queue_file)
except:
- logger.exception('Exception when signing %s deriving image %s', verb, derived_image.ref)
+ logger.exception('Exception when signing %s deriving image %s', verb, derived_image)
return
# Setup the database (since this is a new process) and then disconnect immediately
# once the operation completes.
if not queue_file.raised_exception:
with database.UseThenDisconnect(app.config):
- model.set_derived_image_signature(derived_image, signer.name, signature)
+ registry_model.set_derived_image_signature(derived_image, signer.name, signature)
def _write_derived_image_to_storage(verb, derived_image, queue_file):
""" Read from the generated stream and write it back to the storage engine. This method runs in a
separate process.
"""
+
def handle_exception(ex):
- logger.debug('Exception when building %s derived image %s: %s', verb, derived_image.ref, ex)
+ logger.debug('Exception when building %s derived image %s: %s', verb, derived_image, ex)
with database.UseThenDisconnect(app.config):
- model.delete_derived_image(derived_image)
+ registry_model.delete_derived_image(derived_image)
queue_file.add_exception_handler(handle_exception)
# Re-Initialize the storage engine because some may not respond well to forking (e.g. S3)
- store = Storage(app, metric_queue)
- image_path = model.get_blob_path(derived_image.blob)
- store.stream_write(derived_image.blob.locations, image_path, queue_file)
+ store = Storage(app, metric_queue, config_provider=config_provider, ip_resolver=ip_resolver)
+
+ try:
+ store.stream_write(derived_image.blob.placements, derived_image.blob.storage_path, queue_file)
+ except IOError as ex:
+ logger.debug('Exception when writing %s derived image %s: %s', verb, derived_image, ex)
+
+ with database.UseThenDisconnect(app.config):
+ registry_model.delete_derived_image(derived_image)
+
queue_file.close()
@@ -105,21 +130,23 @@ def _torrent_for_blob(blob, is_public):
with an error if the state is not valid (e.g. non-public, non-user request).
"""
# Make sure the storage has a size.
- if not blob.size:
+ if not blob.compressed_size:
abort(404)
# Lookup the torrent information for the storage.
- torrent_info = model.get_torrent_info(blob)
+ torrent_info = registry_model.get_torrent_info(blob)
if torrent_info is None:
abort(404)
# Lookup the webseed path for the storage.
- path = model.get_blob_path(blob)
- webseed = storage.get_direct_download_url(blob.locations, path,
+ webseed = storage.get_direct_download_url(blob.placements, blob.storage_path,
expires_in=app.config['BITTORRENT_WEBSEED_LIFETIME'])
if webseed is None:
# We cannot support webseeds for storages that cannot provide direct downloads.
- abort(make_response('Storage engine does not support seeding.', 501))
+ exact_abort(501, 'Storage engine does not support seeding.')
+
+ # Load the config for building torrents.
+ torrent_config = TorrentConfiguration.from_app_config(instance_keys, app.config)
# Build the filename for the torrent.
if is_public:
@@ -129,19 +156,20 @@ def _torrent_for_blob(blob, is_public):
if not user:
abort(403)
- name = per_user_torrent_filename(user.uuid, blob.uuid)
+ name = per_user_torrent_filename(torrent_config, user.uuid, blob.uuid)
# Return the torrent file.
- torrent_file = make_torrent(name, webseed, blob.size, torrent_info.piece_length,
- torrent_info.pieces)
+ torrent_file = make_torrent(torrent_config, name, webseed, blob.compressed_size,
+ torrent_info.piece_length, torrent_info.pieces)
- headers = {'Content-Type': 'application/x-bittorrent',
- 'Content-Disposition': 'attachment; filename={0}.torrent'.format(name)}
+ headers = {
+ 'Content-Type': 'application/x-bittorrent',
+ 'Content-Disposition': 'attachment; filename={0}.torrent'.format(name)}
return make_response(torrent_file, 200, headers)
-def _torrent_repo_verb(repo_image, tag, verb, **kwargs):
+def _torrent_repo_verb(repository, tag, manifest, verb, **kwargs):
""" Handles returning a torrent for the given verb on the given image and tag. """
if not features.BITTORRENT:
# Torrent feature is not enabled.
@@ -149,45 +177,89 @@ def _torrent_repo_verb(repo_image, tag, verb, **kwargs):
# Lookup an *existing* derived storage for the verb. If the verb's image storage doesn't exist,
# we cannot create it here, so we 406.
- derived_image = model.lookup_derived_image(repo_image, verb, varying_metadata={'tag': tag})
+ derived_image = registry_model.lookup_derived_image(manifest, verb, storage,
+ varying_metadata={'tag': tag.name},
+ include_placements=True)
if derived_image is None:
abort(406)
# Return the torrent.
- public_repo = model.repository_is_public(repo_image.repository.namespace_name,
- repo_image.repository.name)
- torrent = _torrent_for_blob(derived_image.blob, public_repo)
+ torrent = _torrent_for_blob(derived_image.blob, model.repository.is_repository_public(repository))
# Log the action.
- track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, torrent=True, **kwargs)
+ track_and_log('repo_verb', wrap_repository(repository), tag=tag.name, verb=verb, torrent=True,
+ **kwargs)
return torrent
-def _verify_repo_verb(_, namespace, repository, tag, verb, checker=None):
- permission = ReadRepositoryPermission(namespace, repository)
- if not permission.can() and not model.repository_is_public(namespace, repository):
+def _verify_repo_verb(_, namespace, repo_name, tag_name, verb, checker=None):
+ permission = ReadRepositoryPermission(namespace, repo_name)
+ repo = model.repository.get_repository(namespace, repo_name)
+ repo_is_public = repo is not None and model.repository.is_repository_public(repo)
+ if not permission.can() and not repo_is_public:
+ logger.debug('No permission to read repository %s/%s for user %s with verb %s', namespace,
+ repo_name, get_authenticated_user(), verb)
abort(403)
+ if repo is not None and repo.kind.name != 'image':
+ logger.debug('Repository %s/%s for user %s is not an image repo', namespace, repo_name,
+ get_authenticated_user())
+ abort(405)
+
+ # Make sure the repo's namespace isn't disabled.
+ if not registry_model.is_namespace_enabled(namespace):
+ abort(400)
+
# Lookup the requested tag.
- tag_image = model.get_tag_image(namespace, repository, tag)
- if tag_image is None:
+ repo_ref = registry_model.lookup_repository(namespace, repo_name)
+ if repo_ref is None:
+ abort(404)
+
+ tag = registry_model.get_repo_tag(repo_ref, tag_name)
+ if tag is None:
+ logger.debug('Tag %s does not exist in repository %s/%s for user %s', tag, namespace, repo_name,
+ get_authenticated_user())
+ abort(404)
+
+ # Get its associated manifest.
+ manifest = registry_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
+ if manifest is None:
+ logger.debug('Could not get manifest on %s/%s:%s::%s', namespace, repo_name, tag.name, verb)
+ abort(404)
+
+ # Retrieve the schema1-compatible version of the manifest.
+ try:
+ schema1_manifest = registry_model.get_schema1_parsed_manifest(manifest, namespace,
+ repo_name, tag.name,
+ storage)
+ except ManifestException:
+ logger.exception('Could not get manifest on %s/%s:%s::%s', namespace, repo_name, tag.name, verb)
+ abort(400)
+
+ if schema1_manifest is None:
abort(404)
# If there is a data checker, call it first.
if checker is not None:
- if not checker(tag_image):
- logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
+ if not checker(tag, schema1_manifest):
+ logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repo_name, tag.name, verb)
abort(404)
- return tag_image
+ # Preload the tag's repository information, so it gets cached.
+ assert tag.repository.namespace_name
+ assert tag.repository.name
+
+ return tag, manifest, schema1_manifest
-def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwargs):
- # Verify that the image exists and that we have access to it.
- repo_image = _verify_repo_verb(storage, namespace, repository, tag, verb, checker)
+def _repo_verb_signature(namespace, repository, tag_name, verb, checker=None, **kwargs):
+ # Verify that the tag exists and that we have access to it.
+ tag, manifest, _ = _verify_repo_verb(storage, namespace, repository, tag_name, verb, checker)
+
+ # Find the derived image storage for the verb.
+ derived_image = registry_model.lookup_derived_image(manifest, verb, storage,
+ varying_metadata={'tag': tag.name})
- # derived_image the derived image storage for the verb.
- derived_image = model.lookup_derived_image(repo_image, verb, varying_metadata={'tag': tag})
if derived_image is None or derived_image.blob.uploading:
return make_response('', 202)
@@ -196,7 +268,7 @@ def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwarg
abort(404)
# Lookup the signature for the verb.
- signature_value = model.get_derived_image_signature(derived_image, signer.name)
+ signature_value = registry_model.get_derived_image_signature(derived_image, signer.name)
if signature_value is None:
abort(404)
@@ -204,44 +276,74 @@ def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwarg
return make_response(signature_value)
-def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=None, **kwargs):
+@check_region_blacklisted()
+def _repo_verb(namespace, repository, tag_name, verb, formatter, sign=False, checker=None,
+ **kwargs):
# Verify that the image exists and that we have access to it.
- repo_image = _verify_repo_verb(storage, namespace, repository, tag, verb, checker)
+ logger.debug('Verifying repo verb %s for repository %s/%s with user %s with mimetype %s',
+ verb, namespace, repository, get_authenticated_user(), request.accept_mimetypes.best)
+ tag, manifest, schema1_manifest = _verify_repo_verb(storage, namespace, repository,
+ tag_name, verb, checker)
+
+ # Load the repository for later.
+ repo = model.repository.get_repository(namespace, repository)
+ if repo is None:
+ abort(404)
# Check for torrent. If found, we return a torrent for the repo verb image (if the derived
# image already exists).
if request.accept_mimetypes.best == 'application/x-bittorrent':
metric_queue.repository_pull.Inc(labelvalues=[namespace, repository, verb + '+torrent', True])
- return _torrent_repo_verb(repo_image, tag, verb, **kwargs)
+ return _torrent_repo_verb(repo, tag, manifest, verb, **kwargs)
# Log the action.
- track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs)
+ track_and_log('repo_verb', wrap_repository(repo), tag=tag.name, verb=verb, **kwargs)
metric_queue.repository_pull.Inc(labelvalues=[namespace, repository, verb, True])
+ is_readonly = app.config.get('REGISTRY_STATE', 'normal') == 'readonly'
+
# Lookup/create the derived image for the verb and repo image.
- derived_image = model.lookup_or_create_derived_image(repo_image, verb,
- storage.preferred_locations[0],
- varying_metadata={'tag': tag})
- if not derived_image.blob.uploading:
- logger.debug('Derived %s image %s exists in storage', verb, derived_image.ref)
- derived_layer_path = model.get_blob_path(derived_image.blob)
+ if is_readonly:
+ derived_image = registry_model.lookup_derived_image(
+ manifest, verb, storage,
+ varying_metadata={'tag': tag.name},
+ include_placements=True)
+ else:
+ derived_image = registry_model.lookup_or_create_derived_image(
+ manifest, verb, storage.preferred_locations[0], storage,
+ varying_metadata={'tag': tag.name},
+ include_placements=True)
+ if derived_image is None:
+ logger.error('Could not create or lookup a derived image for manifest %s', manifest)
+ abort(400)
+
+ if derived_image is not None and not derived_image.blob.uploading:
+ logger.debug('Derived %s image %s exists in storage', verb, derived_image)
is_head_request = request.method == 'HEAD'
- download_url = storage.get_direct_download_url(derived_image.blob.locations, derived_layer_path,
+
+ metric_queue.pull_byte_count.Inc(derived_image.blob.compressed_size, labelvalues=[verb])
+
+ download_url = storage.get_direct_download_url(derived_image.blob.placements,
+ derived_image.blob.storage_path,
head=is_head_request)
if download_url:
- logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image.ref)
+ logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image)
return redirect(download_url)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
- logger.debug('Sending cached derived %s image %s', verb, derived_image.ref)
- return send_file(storage.stream_read_file(derived_image.blob.locations, derived_layer_path))
+ logger.debug('Sending cached derived %s image %s', verb, derived_image)
+ return send_file(
+ storage.stream_read_file(derived_image.blob.placements, derived_image.blob.storage_path),
+ mimetype=LAYER_MIMETYPE)
- logger.debug('Building and returning derived %s image %s', verb, derived_image.ref)
+ logger.debug('Building and returning derived %s image', verb)
- # Calculate a derived image ID.
- derived_image_id = hashlib.sha256(repo_image.image_id + ':' + verb).hexdigest()
+ # Close the database connection before any process forking occurs. This is important because
+ # the Postgres driver does not react kindly to forking, so we need to make sure it is closed
+ # so that each process will get its own unique connection.
+ database.close_db_filter(None)
def _cleanup():
# Close any existing DB connection once the process has exited.
@@ -250,48 +352,68 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
hasher = PieceHasher(app.config['BITTORRENT_PIECE_SIZE'])
def _store_metadata_and_cleanup():
+ if is_readonly:
+ return
+
with database.UseThenDisconnect(app.config):
- model.set_torrent_info(derived_image.blob, app.config['BITTORRENT_PIECE_SIZE'],
- hasher.final_piece_hashes())
- model.set_blob_size(derived_image.blob, hasher.hashed_bytes)
+ registry_model.set_torrent_info(derived_image.blob, app.config['BITTORRENT_PIECE_SIZE'],
+ hasher.final_piece_hashes())
+ registry_model.set_derived_image_size(derived_image, hasher.hashed_bytes)
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
+ unique_id = (derived_image.unique_id
+ if derived_image is not None
+ else hashlib.sha256('%s:%s' % (verb, uuid.uuid4())).hexdigest())
handlers = [hasher.update]
- args = (formatter, repo_image, tag, derived_image_id, handlers)
- queue_process = QueueProcess(_open_stream,
- 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
- args, finished=_store_metadata_and_cleanup)
+ reporter = VerbReporter(verb)
+ args = (formatter, tag, schema1_manifest, unique_id, handlers, reporter)
+ queue_process = QueueProcess(
+ _open_stream,
+ 8 * 1024,
+ 10 * 1024 * 1024, # 8K/10M chunk/max
+ args,
+ finished=_store_metadata_and_cleanup)
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
- storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
- # If signing is required, add a QueueFile for signing the image as we stream it out.
- signing_queue_file = None
- if sign and signer.name:
- signing_queue_file = QueueFile(queue_process.create_queue(), 'signing')
+ if not is_readonly:
+ storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
+
+ # If signing is required, add a QueueFile for signing the image as we stream it out.
+ signing_queue_file = None
+ if sign and signer.name:
+ signing_queue_file = QueueFile(queue_process.create_queue(), 'signing')
# Start building.
queue_process.run()
# Start the storage saving.
- storage_args = (verb, derived_image, storage_queue_file)
- QueueProcess.run_process(_write_derived_image_to_storage, storage_args, finished=_cleanup)
+ if not is_readonly:
+ storage_args = (verb, derived_image, storage_queue_file)
+ QueueProcess.run_process(_write_derived_image_to_storage, storage_args, finished=_cleanup)
- if sign and signer.name:
- signing_args = (verb, derived_image, signing_queue_file)
- QueueProcess.run_process(_sign_derived_image, signing_args, finished=_cleanup)
+ if sign and signer.name:
+ signing_args = (verb, derived_image, signing_queue_file)
+ QueueProcess.run_process(_sign_derived_image, signing_args, finished=_cleanup)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
# Return the client's data.
- return send_file(client_queue_file)
+ return send_file(client_queue_file, mimetype=LAYER_MIMETYPE)
def os_arch_checker(os, arch):
- def checker(repo_image):
- image_json = repo_image.compat_metadata
+ def checker(tag, manifest):
+ try:
+ image_json = json.loads(manifest.leaf_layer.raw_v1_metadata)
+ except ValueError:
+ logger.exception('Could not parse leaf layer JSON for manifest %s', manifest)
+ return False
+ except TypeError:
+ logger.exception('Could not parse leaf layer JSON for manifest %s', manifest)
+ return False
# Verify the architecture and os.
operating_system = image_json.get('os', 'linux')
@@ -325,11 +447,13 @@ def get_aci_signature(server, namespace, repository, tag, os, arch):
@route_show_if(features.ACI_CONVERSION)
@anon_protect
-@verbs.route('/aci/////aci///', methods=['GET', 'HEAD'])
+@verbs.route('/aci/////aci///', methods=[
+ 'GET', 'HEAD'])
@process_auth
def get_aci_image(server, namespace, repository, tag, os, arch):
- return _repo_verb(namespace, repository, tag, 'aci', AppCImageFormatter(),
- sign=True, checker=os_arch_checker(os, arch), os=os, arch=arch)
+ return _repo_verb(namespace, repository, tag, 'aci',
+ AppCImageFormatter(), sign=True, checker=os_arch_checker(os, arch), os=os,
+ arch=arch)
@anon_protect
@@ -344,20 +468,36 @@ def get_squashed_tag(namespace, repository, tag):
@verbs.route('/torrent{0}'.format(BLOB_DIGEST_ROUTE), methods=['GET'])
@process_auth
@parse_repository_name()
+@check_region_blacklisted(namespace_name_kwarg='namespace_name')
def get_tag_torrent(namespace_name, repo_name, digest):
+ repo = model.repository.get_repository(namespace_name, repo_name)
+ repo_is_public = repo is not None and model.repository.is_repository_public(repo)
+
permission = ReadRepositoryPermission(namespace_name, repo_name)
- public_repo = model.repository_is_public(namespace_name, repo_name)
- if not permission.can() and not public_repo:
+ if not permission.can() and not repo_is_public:
abort(403)
user = get_authenticated_user()
- if user is None and not public_repo:
+ if user is None and not repo_is_public:
# We can not generate a private torrent cluster without a user uuid (e.g. token auth)
abort(403)
- blob = model.get_repo_blob_by_digest(namespace_name, repo_name, digest)
+ if repo is not None and repo.kind.name != 'image':
+ abort(405)
+
+ repo_ref = registry_model.lookup_repository(namespace_name, repo_name)
+ if repo_ref is None:
+ abort(404)
+
+ blob = registry_model.get_repo_blob_by_digest(repo_ref, digest, include_placements=True)
if blob is None:
abort(404)
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'torrent', True])
- return _torrent_for_blob(blob, public_repo)
+ return _torrent_for_blob(blob, repo_is_public)
+
+
+@verbs.route('/_internal_ping')
+@anon_allowed
+def internal_ping():
+ return make_response('true', 200)
diff --git a/endpoints/verbs/test/test_security.py b/endpoints/verbs/test/test_security.py
new file mode 100644
index 000000000..eeb79c567
--- /dev/null
+++ b/endpoints/verbs/test/test_security.py
@@ -0,0 +1,74 @@
+import pytest
+
+from flask import url_for
+from endpoints.test.shared import conduct_call, gen_basic_auth
+from test.fixtures import *
+
+NO_ACCESS_USER = 'freshuser'
+READ_ACCESS_USER = 'reader'
+ADMIN_ACCESS_USER = 'devtable'
+CREATOR_ACCESS_USER = 'creator'
+
+PUBLIC_REPO = 'public/publicrepo'
+PRIVATE_REPO = 'devtable/shared'
+ORG_REPO = 'buynlarge/orgrepo'
+ANOTHER_ORG_REPO = 'buynlarge/anotherorgrepo'
+
+ACI_ARGS = {
+ 'server': 'someserver',
+ 'tag': 'fake',
+ 'os': 'linux',
+ 'arch': 'x64',}
+
+
+@pytest.mark.parametrize('user', [
+ (0, None),
+ (1, NO_ACCESS_USER),
+ (2, READ_ACCESS_USER),
+ (3, CREATOR_ACCESS_USER),
+ (4, ADMIN_ACCESS_USER),])
+@pytest.mark.parametrize(
+ 'endpoint,method,repository,single_repo_path,params,expected_statuses',
+ [
+ ('get_aci_signature', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)),
+ ('get_aci_signature', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
+ ('get_aci_signature', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
+ ('get_aci_signature', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)),
+
+ # get_aci_image
+ ('get_aci_image', 'GET', PUBLIC_REPO, False, ACI_ARGS, (404, 404, 404, 404, 404)),
+ ('get_aci_image', 'GET', PRIVATE_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
+ ('get_aci_image', 'GET', ORG_REPO, False, ACI_ARGS, (403, 403, 404, 403, 404)),
+ ('get_aci_image', 'GET', ANOTHER_ORG_REPO, False, ACI_ARGS, (403, 403, 403, 403, 404)),
+
+ # get_squashed_tag
+ ('get_squashed_tag', 'GET', PUBLIC_REPO, False, dict(tag='fake'), (404, 404, 404, 404, 404)),
+ ('get_squashed_tag', 'GET', PRIVATE_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)),
+ ('get_squashed_tag', 'GET', ORG_REPO, False, dict(tag='fake'), (403, 403, 404, 403, 404)),
+ ('get_squashed_tag', 'GET', ANOTHER_ORG_REPO, False, dict(tag='fake'), (403, 403, 403, 403,
+ 404)),
+
+ # get_tag_torrent
+ ('get_tag_torrent', 'GET', PUBLIC_REPO, True, dict(digest='sha256:1234'), (404, 404, 404, 404,
+ 404)),
+ ('get_tag_torrent', 'GET', PRIVATE_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403,
+ 404)),
+ ('get_tag_torrent', 'GET', ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 404, 403,
+ 404)),
+ ('get_tag_torrent', 'GET', ANOTHER_ORG_REPO, True, dict(digest='sha256:1234'), (403, 403, 403,
+ 403, 404)),])
+def test_verbs_security(user, endpoint, method, repository, single_repo_path, params,
+ expected_statuses, app, client):
+ headers = {}
+ if user[1] is not None:
+ headers['Authorization'] = gen_basic_auth(user[1], 'password')
+
+ if single_repo_path:
+ params['repository'] = repository
+ else:
+ (namespace, repo_name) = repository.split('/')
+ params['namespace'] = namespace
+ params['repository'] = repo_name
+
+ conduct_call(client, 'verbs.' + endpoint, url_for, method, params,
+ expected_code=expected_statuses[user[0]], headers=headers)
diff --git a/endpoints/web.py b/endpoints/web.py
index f3a6f7ce7..df1f775b9 100644
--- a/endpoints/web.py
+++ b/endpoints/web.py
@@ -1,40 +1,48 @@
+import os
import json
import logging
from datetime import timedelta, datetime
-from cachetools import lru_cache
+from cachetools.func import lru_cache
from flask import (abort, redirect, request, url_for, make_response, Response, render_template,
- Blueprint, jsonify, send_file)
+ Blueprint, jsonify, send_file, session)
from flask_login import current_user
import features
from app import (app, billing as stripe, build_logs, avatar, signer, log_archive, config_provider,
- get_app_url, instance_keys, user_analytics)
+ get_app_url, instance_keys, user_analytics, storage)
from auth import scopes
from auth.auth_context import get_authenticated_user
+from auth.basic import has_basic_auth
+from auth.decorators import require_session_login, process_oauth, process_auth_or_cookie
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
SuperUserPermission, AdministerRepositoryPermission,
ModifyRepositoryPermission, OrganizationMemberPermission)
-from auth.process import require_session_login, process_oauth, has_basic_auth, process_auth_or_cookie
from buildtrigger.basehandler import BuildTriggerHandler
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger
from buildtrigger.customhandler import CustomBuildTrigger
from buildtrigger.triggerutil import TriggerProviderException
from data import model
-from data.database import db
+from data.database import db, RepositoryTag, TagToRepositoryTag
from endpoints.api.discovery import swagger_route_data
-from endpoints.common import (common_login, render_page_template, route_show_if, param_required,
- parse_repository_name)
+from endpoints.common import common_login, render_page_template
from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf
-from endpoints.decorators import anon_protect, anon_allowed
+from endpoints.decorators import (anon_protect, anon_allowed, route_show_if, parse_repository_name,
+ param_required)
from health.healthcheck import get_healthchecker
from util.cache import no_cache
from util.headers import parse_basic_auth
from util.invoice import renderInvoiceToPdf
-from util.systemlogs import build_logs_archive
+from util.saas.useranalytics import build_error_callback
from util.useremails import send_email_changed
+from util.registry.gzipinputstream import GzipInputStream
+from util.request import get_request_ip
+from _init import ROOT_DIR
+
+
+PGP_KEY_MIMETYPE = 'application/pgp-keys'
@lru_cache(maxsize=1)
@@ -59,6 +67,10 @@ STATUS_TAGS = app.config['STATUS_TAGS']
def index(path, **kwargs):
return render_page_template_with_routedata('index.html', **kwargs)
+@web.route('/_internal_ping')
+@anon_allowed
+def internal_ping():
+ return make_response('true', 200)
@web.route('/500', methods=['GET'])
def internal_error_display():
@@ -71,13 +83,25 @@ def not_found_error_display(e = None):
resp.status_code = 404
return resp
+@web.route('/opensearch.xml')
+def opensearch():
+ template = render_template('opensearch.xml',
+ baseurl=get_app_url(),
+ registry_title=app.config.get('REGISTRY_TITLE', 'Quay'))
+ resp = make_response(template)
+ resp.headers['Content-Type'] = 'application/xml'
+ return resp
+
+
@web.route('/organization/', methods=['GET'])
+@web.route('/organization//', methods=['GET'])
@no_cache
def org_view(path):
return index('')
@web.route('/user/', methods=['GET'])
+@web.route('/user//', methods=['GET'])
@no_cache
def user_view(path):
return index('')
@@ -91,7 +115,8 @@ def aci_signing_key():
if not signer.name:
abort(404)
- return send_file(signer.open_public_key_file())
+ return send_file(signer.open_public_key_file(), mimetype=PGP_KEY_MIMETYPE)
+
@web.route('/plans/')
@no_cache
@@ -100,6 +125,12 @@ def plans():
return index('')
+@web.route('/search')
+@no_cache
+def search():
+ return index('')
+
+
@web.route('/guide/')
@no_cache
def guide():
@@ -140,6 +171,27 @@ def setup():
return index('')
+@web.route('/upgradeprogress/')
+@no_cache
+@route_show_if(not features.BILLING)
+@route_show_if(app.config.get('V3_UPGRADE_MODE') == 'background')
+def upgrade_progress():
+ total_tags = RepositoryTag.select().where(RepositoryTag.hidden == False).count()
+ if total_tags == 0:
+ return jsonify({
+ 'progress': 1.0,
+ 'tags_remaining': 0,
+ 'total_tags': 0,
+ })
+
+ upgraded_tags = TagToRepositoryTag.select().count()
+ return jsonify({
+ 'progress': float(upgraded_tags) / total_tags,
+ 'tags_remaining': total_tags - upgraded_tags,
+ 'total_tags': total_tags,
+ })
+
+
@web.route('/signin/')
@no_cache
def signin(redirect=None):
@@ -183,9 +235,18 @@ def confirm_invite():
def repository(path):
return index('')
-@web.route('/starred/')
+
+@web.route('/repository//trigger/', methods=['GET'])
@no_cache
-def starred():
+def buildtrigger(path, trigger):
+ return index('')
+
+
+@route_show_if(features.APP_REGISTRY)
+@web.route('/application/', defaults={'path': ''})
+@web.route('/application/', methods=['GET'])
+@no_cache
+def application(path):
return index('')
@@ -199,7 +260,7 @@ def security():
@no_cache
@route_show_if(features.BILLING)
def enterprise():
- return index('')
+ return redirect('/plans?tab=enterprise')
@web.route('/__exp/ |