Merge remote-tracking branch 'origin/allyourbaseimage'
Conflicts: test/data/test.db test/test_api_usage.py
This commit is contained in:
commit
f4642be11a
126 changed files with 647 additions and 207332 deletions
|
@ -25,7 +25,7 @@ from auth.permissions import (ReadRepositoryPermission,
|
|||
AdministerOrganizationPermission,
|
||||
OrganizationMemberPermission,
|
||||
ViewTeamPermission)
|
||||
from endpoints.common import common_login
|
||||
from endpoints.common import common_login, truthy_param
|
||||
from util.cache import cache_control
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
@ -390,7 +390,7 @@ def get_matching_entities(prefix):
|
|||
if permission.can():
|
||||
robot_namespace = namespace_name
|
||||
|
||||
if request.args.get('includeTeams', False):
|
||||
if truthy_param(request.args.get('includeTeams', False)):
|
||||
teams = model.get_matching_teams(prefix, organization)
|
||||
|
||||
except model.InvalidOrganizationException:
|
||||
|
@ -984,20 +984,16 @@ def list_repos():
|
|||
page = request.args.get('page', None)
|
||||
limit = request.args.get('limit', None)
|
||||
namespace_filter = request.args.get('namespace', None)
|
||||
include_public = request.args.get('public', 'true')
|
||||
include_private = request.args.get('private', 'true')
|
||||
sort = request.args.get('sort', 'false')
|
||||
include_count = request.args.get('count', 'false')
|
||||
include_public = truthy_param(request.args.get('public', True))
|
||||
include_private = truthy_param(request.args.get('private', True))
|
||||
sort = truthy_param(request.args.get('sort', False))
|
||||
include_count = truthy_param(request.args.get('count', False))
|
||||
|
||||
try:
|
||||
limit = int(limit) if limit else None
|
||||
except TypeError:
|
||||
limit = None
|
||||
|
||||
include_public = include_public == 'true'
|
||||
include_private = include_private == 'true'
|
||||
include_count = include_count == 'true'
|
||||
sort = sort == 'true'
|
||||
if page:
|
||||
try:
|
||||
page = int(page)
|
||||
|
@ -1012,7 +1008,6 @@ def list_repos():
|
|||
if include_count:
|
||||
repo_count = model.get_visible_repository_count(username,
|
||||
include_public=include_public,
|
||||
sort=sort,
|
||||
namespace=namespace_filter)
|
||||
|
||||
repo_query = model.get_visible_repositories(username, limit=limit, page=page,
|
||||
|
@ -1089,14 +1084,19 @@ def delete_repository(namespace, repository):
|
|||
|
||||
|
||||
def image_view(image):
|
||||
extended_props = image
|
||||
if image.storage and image.storage.id:
|
||||
extended_props = image.storage
|
||||
|
||||
command = extended_props.command
|
||||
return {
|
||||
'id': image.docker_image_id,
|
||||
'created': image.created,
|
||||
'comment': image.comment,
|
||||
'command': json.loads(image.command) if image.command else None,
|
||||
'created': extended_props.created,
|
||||
'comment': extended_props.comment,
|
||||
'command': json.loads(command) if command else None,
|
||||
'ancestors': image.ancestors,
|
||||
'dbid': image.id,
|
||||
'size': image.image_size,
|
||||
'size': extended_props.image_size,
|
||||
}
|
||||
|
||||
|
||||
|
@ -1432,7 +1432,14 @@ def get_image(namespace, repository, image_id):
|
|||
def get_image_changes(namespace, repository, image_id):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
diffs_path = store.image_file_diffs_path(namespace, repository, image_id)
|
||||
image = model.get_repo_image(namespace, repository, image_id)
|
||||
|
||||
if not image:
|
||||
abort(404)
|
||||
|
||||
uuid = image.storage and image.storage.uuid
|
||||
diffs_path = store.image_file_diffs_path(namespace, repository, image_id,
|
||||
uuid)
|
||||
|
||||
try:
|
||||
response_json = store.get_content(diffs_path)
|
||||
|
@ -1449,7 +1456,8 @@ def get_image_changes(namespace, repository, image_id):
|
|||
def delete_full_tag(namespace, repository, tag):
|
||||
permission = AdministerRepositoryPermission(namespace, repository)
|
||||
if permission.can():
|
||||
model.delete_tag_and_images(namespace, repository, tag)
|
||||
model.delete_tag(namespace, repository, tag)
|
||||
model.garbage_collect_repository(namespace, repository)
|
||||
|
||||
username = current_user.db_user().username
|
||||
log_action('delete_tag', namespace,
|
||||
|
|
|
@ -14,6 +14,10 @@ from auth.permissions import QuayDeferredPermissionUser
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def truthy_param(param):
|
||||
return param not in {False, 'false', 'False', '0', 'FALSE', '', 'null'}
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(username):
|
||||
logger.debug('Loading user: %s' % username)
|
||||
|
|
|
@ -33,6 +33,11 @@ def generate_headers(role='read'):
|
|||
session['namespace'] = namespace
|
||||
session['repository'] = repository
|
||||
|
||||
if get_authenticated_user():
|
||||
session['username'] = get_authenticated_user().username
|
||||
else:
|
||||
session.pop('username', None)
|
||||
|
||||
# We run our index and registry on the same hosts for now
|
||||
registry_server = urlparse.urlparse(request.url).netloc
|
||||
response.headers['X-Docker-Endpoints'] = registry_server
|
||||
|
@ -179,8 +184,9 @@ def create_repository(namespace, repository):
|
|||
if existing.docker_image_id in new_repo_images:
|
||||
added_images.pop(existing.docker_image_id)
|
||||
|
||||
username = get_authenticated_user() and get_authenticated_user().username
|
||||
for image_description in added_images.values():
|
||||
model.create_image(image_description['id'], repo)
|
||||
model.create_or_link_image(image_description['id'], repo, username)
|
||||
|
||||
response = make_response('Created', 201)
|
||||
|
||||
|
|
|
@ -44,8 +44,12 @@ def require_completion(f):
|
|||
"""This make sure that the image push correctly finished."""
|
||||
@wraps(f)
|
||||
def wrapper(namespace, repository, *args, **kwargs):
|
||||
if store.exists(store.image_mark_path(namespace, repository,
|
||||
kwargs['image_id'])):
|
||||
image_id = kwargs['image_id']
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
|
||||
if store.exists(store.image_mark_path(namespace, repository, image_id,
|
||||
uuid)):
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||
issue='upload-in-progress', image_id=kwargs['image_id'])
|
||||
|
||||
|
@ -85,14 +89,18 @@ def set_cache_headers(f):
|
|||
def get_image_layer(namespace, repository, image_id, headers):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
path = store.image_layer_path(namespace, repository, image_id)
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
|
||||
path = store.image_layer_path(namespace, repository, image_id, uuid)
|
||||
direct_download_url = store.get_direct_download_url(path)
|
||||
if direct_download_url:
|
||||
return redirect(direct_download_url)
|
||||
try:
|
||||
return Response(store.stream_read(path), headers=headers)
|
||||
except IOError:
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
@ -105,14 +113,17 @@ def put_image_layer(namespace, repository, image_id):
|
|||
if not permission.can():
|
||||
abort(403)
|
||||
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
try:
|
||||
json_data = store.get_content(store.image_json_path(namespace, repository,
|
||||
image_id))
|
||||
image_id, uuid))
|
||||
except IOError:
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
layer_path = store.image_layer_path(namespace, repository, image_id)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id)
|
||||
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
||||
|
||||
if store.exists(layer_path) and not store.exists(mark_path):
|
||||
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
||||
|
@ -149,7 +160,7 @@ def put_image_layer(namespace, repository, image_id):
|
|||
try:
|
||||
checksum = store.get_content(store.image_checksum_path(namespace,
|
||||
repository,
|
||||
image_id))
|
||||
image_id, uuid))
|
||||
except IOError:
|
||||
# We don't have a checksum stored yet, that's fine skipping the check.
|
||||
# Not removing the mark though, image is not downloadable yet.
|
||||
|
@ -193,15 +204,18 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
abort(400, 'Checksum not found in Cookie for image %(imaage_id)s',
|
||||
issue='missing-checksum-cookie', image_id=image_id)
|
||||
|
||||
if not store.exists(store.image_json_path(namespace, repository, image_id)):
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
if not store.exists(store.image_json_path(namespace, repository, image_id,
|
||||
uuid)):
|
||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
||||
if not store.exists(mark_path):
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||
issue='image-write-error', image_id=image_id)
|
||||
|
||||
err = store_checksum(namespace, repository, image_id, checksum)
|
||||
err = store_checksum(namespace, repository, image_id, uuid, checksum)
|
||||
if err:
|
||||
abort(400, err)
|
||||
|
||||
|
@ -238,20 +252,24 @@ def get_image_json(namespace, repository, image_id, headers):
|
|||
repository):
|
||||
abort(403)
|
||||
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
|
||||
try:
|
||||
data = store.get_content(store.image_json_path(namespace, repository,
|
||||
image_id))
|
||||
image_id, uuid))
|
||||
except IOError:
|
||||
flask_abort(404)
|
||||
|
||||
try:
|
||||
size = store.get_size(store.image_layer_path(namespace, repository,
|
||||
image_id))
|
||||
image_id, uuid))
|
||||
headers['X-Docker-Size'] = str(size)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
checksum_path = store.image_checksum_path(namespace, repository, image_id)
|
||||
checksum_path = store.image_checksum_path(namespace, repository, image_id,
|
||||
uuid)
|
||||
if store.exists(checksum_path):
|
||||
headers['X-Docker-Checksum'] = store.get_content(checksum_path)
|
||||
|
||||
|
@ -271,39 +289,45 @@ def get_image_ancestry(namespace, repository, image_id, headers):
|
|||
repository):
|
||||
abort(403)
|
||||
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
|
||||
try:
|
||||
data = store.get_content(store.image_ancestry_path(namespace, repository,
|
||||
image_id))
|
||||
image_id, uuid))
|
||||
except IOError:
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
|
||||
response = make_response(json.dumps(json.loads(data)), 200)
|
||||
response.headers.extend(headers)
|
||||
return response
|
||||
|
||||
|
||||
def generate_ancestry(namespace, repository, image_id, parent_id=None):
|
||||
def generate_ancestry(namespace, repository, image_id, uuid, parent_id=None,
|
||||
parent_uuid=None):
|
||||
if not parent_id:
|
||||
store.put_content(store.image_ancestry_path(namespace, repository,
|
||||
image_id),
|
||||
image_id, uuid),
|
||||
json.dumps([image_id]))
|
||||
return
|
||||
data = store.get_content(store.image_ancestry_path(namespace, repository,
|
||||
parent_id))
|
||||
parent_id, parent_uuid))
|
||||
data = json.loads(data)
|
||||
data.insert(0, image_id)
|
||||
store.put_content(store.image_ancestry_path(namespace, repository,
|
||||
image_id),
|
||||
image_id, uuid),
|
||||
json.dumps(data))
|
||||
|
||||
|
||||
def store_checksum(namespace, repository, image_id, checksum):
|
||||
def store_checksum(namespace, repository, image_id, uuid, checksum):
|
||||
checksum_parts = checksum.split(':')
|
||||
if len(checksum_parts) != 2:
|
||||
return 'Invalid checksum format'
|
||||
|
||||
# We store the checksum
|
||||
checksum_path = store.image_checksum_path(namespace, repository, image_id)
|
||||
checksum_path = store.image_checksum_path(namespace, repository, image_id,
|
||||
uuid)
|
||||
store.put_content(checksum_path, checksum)
|
||||
|
||||
|
||||
|
@ -327,58 +351,69 @@ def put_image_json(namespace, repository, image_id):
|
|||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
|
||||
# Read the checksum
|
||||
checksum = request.headers.get('X-Docker-Checksum')
|
||||
if checksum:
|
||||
# Storing the checksum is optional at this stage
|
||||
err = store_checksum(namespace, repository, image_id, checksum)
|
||||
err = store_checksum(namespace, repository, image_id, uuid, checksum)
|
||||
if err:
|
||||
abort(400, err, issue='write-error')
|
||||
|
||||
else:
|
||||
# We cleanup any old checksum in case it's a retry after a fail
|
||||
store.remove(store.image_checksum_path(namespace, repository, image_id))
|
||||
store.remove(store.image_checksum_path(namespace, repository, image_id,
|
||||
uuid))
|
||||
if image_id != data['id']:
|
||||
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
|
||||
parent_id = data.get('parent')
|
||||
|
||||
parent_image = None
|
||||
if parent_id:
|
||||
parent_image = model.get_repo_image(namespace, repository, parent_id)
|
||||
parent_uuid = (parent_image and parent_image.storage and
|
||||
parent_image.storage.uuid)
|
||||
|
||||
if (parent_id and not
|
||||
store.exists(store.image_json_path(namespace, repository, parent_id))):
|
||||
store.exists(store.image_json_path(namespace, repository, parent_id,
|
||||
parent_uuid))):
|
||||
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
||||
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
||||
|
||||
json_path = store.image_json_path(namespace, repository, image_id)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id)
|
||||
json_path = store.image_json_path(namespace, repository, image_id, uuid)
|
||||
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
||||
if store.exists(json_path) and not store.exists(mark_path):
|
||||
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
||||
|
||||
# If we reach that point, it means that this is a new image or a retry
|
||||
# on a failed push
|
||||
# save the metadata
|
||||
if parent_id:
|
||||
parent_obj = model.get_image_by_id(namespace, repository, parent_id)
|
||||
else:
|
||||
parent_obj = None
|
||||
|
||||
command_list = data.get('container_config', {}).get('Cmd', None)
|
||||
command = json.dumps(command_list) if command_list else None
|
||||
model.set_image_metadata(image_id, namespace, repository,
|
||||
data.get('created'), data.get('comment'), command,
|
||||
parent_obj)
|
||||
parent_image)
|
||||
store.put_content(mark_path, 'true')
|
||||
store.put_content(json_path, request.data)
|
||||
generate_ancestry(namespace, repository, image_id, parent_id)
|
||||
generate_ancestry(namespace, repository, image_id, uuid, parent_id,
|
||||
parent_uuid)
|
||||
return make_response('true', 200)
|
||||
|
||||
|
||||
def process_image_changes(namespace, repository, image_id):
|
||||
logger.debug('Generating diffs for image: %s' % image_id)
|
||||
|
||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||
|
||||
image_diffs_path = store.image_file_diffs_path(namespace, repository,
|
||||
image_id)
|
||||
image_id, uuid)
|
||||
image_trie_path = store.image_file_trie_path(namespace, repository,
|
||||
image_id)
|
||||
image_id, uuid)
|
||||
|
||||
if store.exists(image_diffs_path):
|
||||
logger.debug('Diffs already exist for image: %s' % image_id)
|
||||
|
@ -400,7 +435,7 @@ def process_image_changes(namespace, repository, image_id):
|
|||
parent_trie.frombytes(parent_trie_bytes)
|
||||
|
||||
# Read in the file entries from the layer tar file
|
||||
layer_path = store.image_layer_path(namespace, repository, image_id)
|
||||
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
|
||||
with store.stream_read_file(layer_path) as layer_tar_stream:
|
||||
removed_files = set()
|
||||
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
||||
|
|
Reference in a new issue