Merge remote-tracking branch 'upstream/master' into python-registry-v2
This commit is contained in:
commit
210ed7cf02
148 changed files with 1829 additions and 445 deletions
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import datetime
|
||||
|
||||
from app import app
|
||||
from app import app, metric_queue
|
||||
from flask import Blueprint, request, make_response, jsonify, session
|
||||
from flask.ext.restful import Resource, abort, Api, reqparse
|
||||
from flask.ext.restful.utils.cors import crossdomain
|
||||
|
@ -20,6 +20,7 @@ from auth.auth_context import get_authenticated_user, get_validated_oauth_token
|
|||
from auth.auth import process_oauth
|
||||
from endpoints.csrf import csrf_protect
|
||||
from endpoints.decorators import check_anon_protection
|
||||
from util.saas.metricqueue import time_decorator
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -28,7 +29,7 @@ api = Api()
|
|||
api.init_app(api_bp)
|
||||
api.decorators = [csrf_protect,
|
||||
crossdomain(origin='*', headers=['Authorization', 'Content-Type']),
|
||||
process_oauth]
|
||||
process_oauth, time_decorator(api_bp.name, metric_queue)]
|
||||
|
||||
|
||||
class ApiException(Exception):
|
||||
|
|
|
@ -3,8 +3,10 @@
|
|||
import logging
|
||||
import json
|
||||
import datetime
|
||||
import hashlib
|
||||
|
||||
from flask import request
|
||||
from rfc3987 import parse as uri_parse
|
||||
|
||||
from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
|
||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||
|
@ -134,8 +136,11 @@ def build_status_view(build_obj):
|
|||
}
|
||||
}
|
||||
|
||||
if can_write and build_obj.resource_key is not None:
|
||||
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
|
||||
if can_write:
|
||||
if build_obj.resource_key is not None:
|
||||
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
|
||||
elif job_config.get('archive_url', None):
|
||||
resp['archive_url'] = job_config['archive_url']
|
||||
|
||||
return resp
|
||||
|
||||
|
@ -148,14 +153,15 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
'RepositoryBuildRequest': {
|
||||
'type': 'object',
|
||||
'description': 'Description of a new repository build.',
|
||||
'required': [
|
||||
'file_id',
|
||||
],
|
||||
'properties': {
|
||||
'file_id': {
|
||||
'type': 'string',
|
||||
'description': 'The file id that was generated when the build spec was uploaded',
|
||||
},
|
||||
'archive_url': {
|
||||
'type': 'string',
|
||||
'description': 'The URL of the .tar.gz to build. Must start with "http" or "https".',
|
||||
},
|
||||
'subdirectory': {
|
||||
'type': 'string',
|
||||
'description': 'Subdirectory in which the Dockerfile can be found',
|
||||
|
@ -204,7 +210,26 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
logger.debug('User requested repository initialization.')
|
||||
request_json = request.get_json()
|
||||
|
||||
dockerfile_id = request_json['file_id']
|
||||
dockerfile_id = request_json.get('file_id', None)
|
||||
archive_url = request_json.get('archive_url', None)
|
||||
|
||||
if not dockerfile_id and not archive_url:
|
||||
raise InvalidRequest('file_id or archive_url required')
|
||||
|
||||
if archive_url:
|
||||
archive_match = None
|
||||
try:
|
||||
archive_match = uri_parse(archive_url, 'URI')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if not archive_match:
|
||||
raise InvalidRequest('Invalid Archive URL: Must be a valid URI')
|
||||
|
||||
scheme = archive_match.get('scheme', None)
|
||||
if scheme != 'http' and scheme != 'https':
|
||||
raise InvalidRequest('Invalid Archive URL: Must be http or https')
|
||||
|
||||
subdir = request_json['subdirectory'] if 'subdirectory' in request_json else ''
|
||||
tags = request_json.get('docker_tags', ['latest'])
|
||||
pull_robot_name = request_json.get('pull_robot', None)
|
||||
|
@ -228,18 +253,24 @@ class RepositoryBuildList(RepositoryParamResource):
|
|||
# Check if the dockerfile resource has already been used. If so, then it
|
||||
# can only be reused if the user has access to the repository in which the
|
||||
# dockerfile was previously built.
|
||||
associated_repository = model.build.get_repository_for_resource(dockerfile_id)
|
||||
if associated_repository:
|
||||
if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
|
||||
associated_repository.name):
|
||||
raise Unauthorized()
|
||||
if dockerfile_id:
|
||||
associated_repository = model.build.get_repository_for_resource(dockerfile_id)
|
||||
if associated_repository:
|
||||
if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
|
||||
associated_repository.name):
|
||||
raise Unauthorized()
|
||||
|
||||
# Start the build.
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
|
||||
build_name = (user_files.get_file_checksum(dockerfile_id)
|
||||
if dockerfile_id
|
||||
else hashlib.sha224(archive_url).hexdigest()[0:7])
|
||||
|
||||
prepared = PreparedBuild()
|
||||
prepared.build_name = user_files.get_file_checksum(dockerfile_id)
|
||||
prepared.build_name = build_name
|
||||
prepared.dockerfile_id = dockerfile_id
|
||||
prepared.archive_url = archive_url
|
||||
prepared.tags = tags
|
||||
prepared.subdirectory = subdir
|
||||
prepared.is_manual = True
|
||||
|
|
|
@ -278,6 +278,46 @@ class OrganizationMemberList(ApiResource):
|
|||
class OrganizationMember(ApiResource):
|
||||
""" Resource for managing individual organization members. """
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('getOrganizationMember')
|
||||
def get(self, orgname, membername):
|
||||
""" Retrieves the details of a member of the organization.
|
||||
"""
|
||||
permission = AdministerOrganizationPermission(orgname)
|
||||
if permission.can():
|
||||
# Lookup the user.
|
||||
member = model.user.get_user(membername)
|
||||
if not member:
|
||||
raise NotFound()
|
||||
|
||||
organization = model.user.get_user_or_org(orgname)
|
||||
if not organization:
|
||||
raise NotFound()
|
||||
|
||||
# Lookup the user's information in the organization.
|
||||
teams = list(model.team.get_user_teams_within_org(membername, organization))
|
||||
if not teams:
|
||||
raise NotFound()
|
||||
|
||||
repo_permissions = model.permission.list_organization_member_permissions(organization, member)
|
||||
|
||||
def local_team_view(team):
|
||||
return {
|
||||
'name': team.name,
|
||||
'avatar': avatar.get_data_for_team(team),
|
||||
}
|
||||
|
||||
return {
|
||||
'name': member.username,
|
||||
'kind': 'robot' if member.robot else 'user',
|
||||
'avatar': avatar.get_data_for_user(member),
|
||||
'teams': [local_team_view(team) for team in teams],
|
||||
'repositories': [permission.repository.name for permission in repo_permissions]
|
||||
}
|
||||
|
||||
raise Unauthorized()
|
||||
|
||||
|
||||
@require_scope(scopes.ORG_ADMIN)
|
||||
@nickname('removeOrganizationMember')
|
||||
def delete(self, orgname, membername):
|
||||
|
|
|
@ -26,7 +26,8 @@ def notification_view(note):
|
|||
'uuid': note.uuid,
|
||||
'event': note.event.name,
|
||||
'method': note.method.name,
|
||||
'config': config
|
||||
'config': config,
|
||||
'title': note.title,
|
||||
}
|
||||
|
||||
|
||||
|
@ -55,7 +56,11 @@ class RepositoryNotificationList(RepositoryParamResource):
|
|||
'config': {
|
||||
'type': 'object',
|
||||
'description': 'JSON config information for the specific method of notification'
|
||||
}
|
||||
},
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'The human-readable title of the notification',
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -78,7 +83,8 @@ class RepositoryNotificationList(RepositoryParamResource):
|
|||
raise request_error(message=ex.message)
|
||||
|
||||
new_notification = model.notification.create_repo_notification(repo, parsed['event'],
|
||||
parsed['method'], parsed['config'])
|
||||
parsed['method'], parsed['config'],
|
||||
parsed.get('title', None))
|
||||
|
||||
resp = notification_view(new_notification)
|
||||
log_action('add_repo_notification', namespace,
|
||||
|
|
|
@ -461,6 +461,7 @@ class TriggerBuildList(RepositoryParamResource):
|
|||
}
|
||||
|
||||
|
||||
FIELD_VALUE_LIMIT = 30
|
||||
|
||||
@resource('/v1/repository/<repopath:repository>/trigger/<trigger_uuid>/fields/<field_name>')
|
||||
@internal_only
|
||||
|
@ -479,7 +480,7 @@ class BuildTriggerFieldValues(RepositoryParamResource):
|
|||
user_permission = UserAdminPermission(trigger.connected_user.username)
|
||||
if user_permission.can():
|
||||
handler = BuildTriggerHandler.get_handler(trigger, config)
|
||||
values = handler.list_field_values(field_name)
|
||||
values = handler.list_field_values(field_name, limit=FIELD_VALUE_LIMIT)
|
||||
|
||||
if values is None:
|
||||
raise NotFound()
|
||||
|
|
|
@ -28,7 +28,8 @@ def start_build(repository, prepared_build, pull_robot_name=None):
|
|||
'build_subdir': prepared_build.subdirectory,
|
||||
'trigger_metadata': prepared_build.metadata or {},
|
||||
'is_manual': prepared_build.is_manual,
|
||||
'manual_user': get_authenticated_user().username if get_authenticated_user() else None
|
||||
'manual_user': get_authenticated_user().username if get_authenticated_user() else None,
|
||||
'archive_url': prepared_build.archive_url
|
||||
}
|
||||
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
|
@ -83,6 +84,7 @@ class PreparedBuild(object):
|
|||
"""
|
||||
def __init__(self, trigger=None):
|
||||
self._dockerfile_id = None
|
||||
self._archive_url = None
|
||||
self._tags = None
|
||||
self._build_name = None
|
||||
self._subdirectory = None
|
||||
|
@ -124,6 +126,17 @@ class PreparedBuild(object):
|
|||
def trigger(self):
|
||||
return self._trigger
|
||||
|
||||
@property
|
||||
def archive_url(self):
|
||||
return self._archive_url
|
||||
|
||||
@archive_url.setter
|
||||
def archive_url(self, value):
|
||||
if self._archive_url:
|
||||
raise Exception('Property archive_url already set')
|
||||
|
||||
self._archive_url = value
|
||||
|
||||
@property
|
||||
def dockerfile_id(self):
|
||||
return self._dockerfile_id
|
||||
|
|
|
@ -4,8 +4,9 @@ import requests
|
|||
import re
|
||||
|
||||
from flask.ext.mail import Message
|
||||
from app import mail, app
|
||||
from app import mail, app, OVERRIDE_CONFIG_DIRECTORY
|
||||
from data import model
|
||||
from util.config.validator import SSL_FILENAMES
|
||||
from workers.queueworker import JobException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -20,6 +21,11 @@ class NotificationMethodPerformException(JobException):
|
|||
pass
|
||||
|
||||
|
||||
SSLClientCert = None
|
||||
if app.config['PREFERRED_URL_SCHEME'] == 'https':
|
||||
# TODO(jschorr): move this into the config provider library
|
||||
SSLClientCert = [OVERRIDE_CONFIG_DIRECTORY + f for f in SSL_FILENAMES]
|
||||
|
||||
class NotificationMethod(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
@ -177,7 +183,7 @@ class WebhookMethod(NotificationMethod):
|
|||
headers = {'Content-type': 'application/json'}
|
||||
|
||||
try:
|
||||
resp = requests.post(url, data=json.dumps(payload), headers=headers)
|
||||
resp = requests.post(url, data=json.dumps(payload), headers=headers, cert=SSLClientCert)
|
||||
if resp.status_code/100 != 2:
|
||||
error_message = '%s response for webhook to url: %s' % (resp.status_code, url)
|
||||
logger.error(error_message)
|
||||
|
|
|
@ -197,7 +197,7 @@ class BuildTriggerHandler(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def list_field_values(self, field_name):
|
||||
def list_field_values(self, field_name, limit=None):
|
||||
"""
|
||||
Lists all values for the given custom trigger field. For example, a trigger might have a
|
||||
field named "branches", and this method would return all branches.
|
||||
|
@ -434,7 +434,7 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
|
||||
return data
|
||||
|
||||
def list_field_values(self, field_name):
|
||||
def list_field_values(self, field_name, limit=None):
|
||||
source = self.config['build_source']
|
||||
(namespace, name) = source.split('/')
|
||||
|
||||
|
@ -457,14 +457,22 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
if not result:
|
||||
return None
|
||||
|
||||
return data.keys()
|
||||
tags = list(data.keys())
|
||||
if limit:
|
||||
tags = tags[0:limit]
|
||||
|
||||
return tags
|
||||
|
||||
if field_name == 'branch_name':
|
||||
(result, data, _) = repository.get_branches()
|
||||
if not result:
|
||||
return None
|
||||
|
||||
return data.keys()
|
||||
branches = list(data.keys())
|
||||
if limit:
|
||||
branches = branches[0:limit]
|
||||
|
||||
return branches
|
||||
|
||||
return None
|
||||
|
||||
|
@ -548,7 +556,7 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
|
|||
|
||||
def handle_trigger_request(self, request):
|
||||
payload = request.get_json()
|
||||
if not 'push' in payload:
|
||||
if not payload or not 'push' in payload:
|
||||
logger.debug('Skipping BitBucket request due to missing push data in payload')
|
||||
raise SkipRequestException()
|
||||
|
||||
|
@ -1039,7 +1047,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
|
|||
return self._prepare_build(ref, commit_sha, True, repo=repo)
|
||||
|
||||
|
||||
def list_field_values(self, field_name):
|
||||
def list_field_values(self, field_name, limit=None):
|
||||
if field_name == 'refs':
|
||||
branches = self.list_field_values('branch_name')
|
||||
tags = self.list_field_values('tag_name')
|
||||
|
@ -1053,7 +1061,11 @@ class GithubBuildTrigger(BuildTriggerHandler):
|
|||
gh_client = self._get_client()
|
||||
source = config['build_source']
|
||||
repo = gh_client.get_repo(source)
|
||||
return [tag.name for tag in repo.get_tags()]
|
||||
gh_tags = repo.get_tags()
|
||||
if limit:
|
||||
gh_tags = repo.get_tags()[0:limit]
|
||||
|
||||
return [tag.name for tag in gh_tags]
|
||||
except GitHubBadCredentialsException:
|
||||
return []
|
||||
except GithubException:
|
||||
|
@ -1066,7 +1078,11 @@ class GithubBuildTrigger(BuildTriggerHandler):
|
|||
gh_client = self._get_client()
|
||||
source = config['build_source']
|
||||
repo = gh_client.get_repo(source)
|
||||
branches = [branch.name for branch in repo.get_branches()]
|
||||
gh_branches = repo.get_branches()
|
||||
if limit:
|
||||
gh_branches = repo.get_branches()[0:limit]
|
||||
|
||||
branches = [branch.name for branch in gh_branches]
|
||||
|
||||
if not repo.default_branch in branches:
|
||||
branches.insert(0, repo.default_branch)
|
||||
|
@ -1417,7 +1433,7 @@ class GitLabBuildTrigger(BuildTriggerHandler):
|
|||
|
||||
return contents
|
||||
|
||||
def list_field_values(self, field_name):
|
||||
def list_field_values(self, field_name, limit=None):
|
||||
if field_name == 'refs':
|
||||
branches = self.list_field_values('branch_name')
|
||||
tags = self.list_field_values('tag_name')
|
||||
|
@ -1434,12 +1450,20 @@ class GitLabBuildTrigger(BuildTriggerHandler):
|
|||
tags = gl_client.getrepositorytags(repo['id'])
|
||||
if tags is False:
|
||||
return []
|
||||
|
||||
if limit:
|
||||
tags = tags[0:limit]
|
||||
|
||||
return [tag['name'] for tag in tags]
|
||||
|
||||
if field_name == 'branch_name':
|
||||
branches = gl_client.getbranches(repo['id'])
|
||||
if branches is False:
|
||||
return []
|
||||
|
||||
if limit:
|
||||
branches = branches[0:limit]
|
||||
|
||||
return [branch['name'] for branch in branches]
|
||||
|
||||
return None
|
||||
|
|
|
@ -1,10 +1,13 @@
|
|||
from flask import Blueprint, make_response
|
||||
|
||||
from app import metric_queue
|
||||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
from util.saas.metricqueue import time_blueprint
|
||||
|
||||
|
||||
v1_bp = Blueprint('v1', __name__)
|
||||
|
||||
time_blueprint(v1_bp, metric_queue)
|
||||
|
||||
# Note: This is *not* part of the Docker index spec. This is here for our own health check,
|
||||
# since we have nginx handle the _ping below.
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import logging
|
||||
import json
|
||||
import features
|
||||
|
||||
from flask import make_response, request, session, Response, redirect, abort as flask_abort
|
||||
from functools import wraps
|
||||
from datetime import datetime
|
||||
from time import time
|
||||
|
||||
from app import storage as store, image_diff_queue, app
|
||||
from app import storage as store, image_diff_queue, image_replication_queue, app
|
||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||
from auth.auth_context import get_authenticated_user, get_grant_user_context
|
||||
from digest import checksums
|
||||
|
@ -36,6 +37,30 @@ def set_uploading_flag(repo_image, is_image_uploading):
|
|||
repo_image.storage.save()
|
||||
|
||||
|
||||
def _finish_image(namespace, repository, repo_image):
|
||||
# Checksum is ok, we remove the marker
|
||||
set_uploading_flag(repo_image, False)
|
||||
|
||||
image_id = repo_image.docker_image_id
|
||||
|
||||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
|
||||
# Send a job to the work queue to replicate the image layer.
|
||||
if features.STORAGE_REPLICATION:
|
||||
image_replication_queue.put([repo_image.storage.uuid], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'storage_id': repo_image.storage.uuid,
|
||||
}))
|
||||
|
||||
|
||||
def require_completion(f):
|
||||
"""This make sure that the image push correctly finished."""
|
||||
@wraps(f)
|
||||
|
@ -210,7 +235,11 @@ def put_image_layer(namespace, repository, image_id):
|
|||
|
||||
# Stream write the data to storage.
|
||||
with database.CloseForLongOperation(app.config):
|
||||
store.stream_write(repo_image.storage.locations, layer_path, sr)
|
||||
try:
|
||||
store.stream_write(repo_image.storage.locations, layer_path, sr)
|
||||
except IOError:
|
||||
logger.exception('Exception when writing image data')
|
||||
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
|
||||
|
||||
# Append the computed checksum.
|
||||
csums = []
|
||||
|
@ -243,18 +272,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
abort(400, 'Checksum mismatch; ignoring the layer for image %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
|
||||
# Checksum is ok, we remove the marker
|
||||
set_uploading_flag(repo_image, False)
|
||||
|
||||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
# Mark the image as uploaded.
|
||||
_finish_image(namespace, repository, repo_image)
|
||||
|
||||
return make_response('true', 200)
|
||||
|
||||
|
@ -316,18 +335,8 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
|
||||
# Checksum is ok, we remove the marker
|
||||
set_uploading_flag(repo_image, False)
|
||||
|
||||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
# Mark the image as uploaded.
|
||||
_finish_image(namespace, repository, repo_image)
|
||||
|
||||
return make_response('true', 200)
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ from flask import Blueprint, make_response, url_for, request, jsonify
|
|||
from functools import wraps
|
||||
from urlparse import urlparse
|
||||
|
||||
from app import metric_queue
|
||||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
from endpoints.v2.errors import V2RegistryException
|
||||
from auth.jwt_auth import process_jwt_auth
|
||||
|
@ -15,13 +16,14 @@ from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermissi
|
|||
AdministerRepositoryPermission)
|
||||
from data import model
|
||||
from util.http import abort
|
||||
from util.saas.metricqueue import time_blueprint
|
||||
from app import app
|
||||
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
v2_bp = Blueprint('v2', __name__)
|
||||
|
||||
time_blueprint(v2_bp, metric_queue)
|
||||
|
||||
@v2_bp.app_errorhandler(V2RegistryException)
|
||||
def handle_registry_v2_exception(error):
|
||||
|
|
Reference in a new issue