2013-09-25 21:50:03 +00:00
|
|
|
import logging
|
2013-09-30 20:14:48 +00:00
|
|
|
import json
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2014-01-24 22:00:42 +00:00
|
|
|
from flask import (make_response, request, session, Response, redirect,
|
2014-02-26 21:03:00 +00:00
|
|
|
Blueprint, abort as flask_abort)
|
2013-09-25 21:50:03 +00:00
|
|
|
from functools import wraps
|
|
|
|
from datetime import datetime
|
|
|
|
from time import time
|
|
|
|
|
2014-05-21 23:50:37 +00:00
|
|
|
from app import storage as store, image_diff_queue
|
2014-01-24 20:01:40 +00:00
|
|
|
from auth.auth import process_auth, extract_namespace_repo_from_session
|
2013-10-18 18:31:14 +00:00
|
|
|
from util import checksums, changes
|
2014-01-24 20:01:40 +00:00
|
|
|
from util.http import abort
|
2013-09-26 17:42:24 +00:00
|
|
|
from auth.permissions import (ReadRepositoryPermission,
|
|
|
|
ModifyRepositoryPermission)
|
2013-09-26 19:58:11 +00:00
|
|
|
from data import model
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2014-04-03 21:31:46 +00:00
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
registry = Blueprint('registry', __name__)
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-02 01:19:52 +00:00
|
|
|
profile = logging.getLogger('application.profiler')
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
class SocketReader(object):
|
2013-10-15 20:12:53 +00:00
|
|
|
def __init__(self, fp):
|
|
|
|
self._fp = fp
|
|
|
|
self.handlers = []
|
|
|
|
|
|
|
|
def add_handler(self, handler):
|
|
|
|
self.handlers.append(handler)
|
|
|
|
|
|
|
|
def read(self, n=-1):
|
|
|
|
buf = self._fp.read(n)
|
|
|
|
if not buf:
|
|
|
|
return ''
|
|
|
|
for handler in self.handlers:
|
|
|
|
handler(buf)
|
|
|
|
return buf
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
|
2014-05-02 20:59:46 +00:00
|
|
|
def image_is_uploading(namespace, repository, image_id, repo_image):
|
|
|
|
if repo_image and repo_image.storage and repo_image.storage.uploading is not None:
|
|
|
|
return repo_image.storage.uploading
|
|
|
|
|
|
|
|
logger.warning('Setting legacy upload flag')
|
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
|
|
|
return store.exists(mark_path)
|
|
|
|
|
|
|
|
|
|
|
|
def mark_upload_complete(namespace, repository, image_id, repo_image):
|
|
|
|
if repo_image and repo_image.storage and repo_image.storage.uploading is not None:
|
|
|
|
repo_image.storage.uploading = False
|
|
|
|
repo_image.storage.save()
|
|
|
|
else:
|
|
|
|
logger.warning('Removing legacy upload flag')
|
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
|
|
|
if store.exists(mark_path):
|
|
|
|
store.remove(mark_path)
|
|
|
|
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
def require_completion(f):
|
|
|
|
"""This make sure that the image push correctly finished."""
|
|
|
|
@wraps(f)
|
2013-09-26 00:00:22 +00:00
|
|
|
def wrapper(namespace, repository, *args, **kwargs):
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id = kwargs['image_id']
|
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-05-02 20:59:46 +00:00
|
|
|
if image_is_uploading(namespace, repository, image_id, repo_image):
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='upload-in-progress', image_id=kwargs['image_id'])
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-26 00:00:22 +00:00
|
|
|
return f(namespace, repository, *args, **kwargs)
|
2013-09-25 21:50:03 +00:00
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
def set_cache_headers(f):
|
|
|
|
"""Returns HTTP headers suitable for caching."""
|
|
|
|
@wraps(f)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
# Set TTL to 1 year by default
|
|
|
|
ttl = 31536000
|
|
|
|
expires = datetime.fromtimestamp(int(time()) + ttl)
|
|
|
|
expires = expires.strftime('%a, %d %b %Y %H:%M:%S GMT')
|
|
|
|
headers = {
|
|
|
|
'Cache-Control': 'public, max-age={0}'.format(ttl),
|
|
|
|
'Expires': expires,
|
|
|
|
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',
|
|
|
|
}
|
|
|
|
if 'If-Modified-Since' in request.headers:
|
|
|
|
response = make_response('Not modified', 304)
|
|
|
|
response.headers.extend(headers)
|
|
|
|
return response
|
|
|
|
kwargs['headers'] = headers
|
|
|
|
# Prevent the Cookie to be sent when the object is cacheable
|
|
|
|
session.modified = False
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/layer', methods=['GET'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
2013-09-25 21:50:03 +00:00
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
2013-09-26 00:00:22 +00:00
|
|
|
def get_image_layer(namespace, repository, image_id, headers):
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-28 04:05:32 +00:00
|
|
|
if permission.can() or model.repository_is_public(namespace, repository):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up the layer path')
|
2014-02-16 22:38:47 +00:00
|
|
|
path = store.image_layer_path(namespace, repository, image_id, uuid)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Looking up the direct download URL')
|
2013-12-04 00:39:07 +00:00
|
|
|
direct_download_url = store.get_direct_download_url(path)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2013-12-04 00:39:07 +00:00
|
|
|
if direct_download_url:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Returning direct download URL')
|
2013-12-04 00:39:07 +00:00
|
|
|
return redirect(direct_download_url)
|
2013-09-28 04:05:32 +00:00
|
|
|
try:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Streaming layer data')
|
2013-12-04 00:39:07 +00:00
|
|
|
return Response(store.stream_read(path), headers=headers)
|
2013-09-28 04:05:32 +00:00
|
|
|
except IOError:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Image not found')
|
2014-02-16 22:38:47 +00:00
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-28 04:05:32 +00:00
|
|
|
abort(403)
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/layer', methods=['PUT'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
def put_image_layer(namespace, repository, image_id):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ModifyRepositoryPermission(namespace, repository)
|
|
|
|
if not permission.can():
|
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving image data')
|
2013-09-26 00:00:22 +00:00
|
|
|
json_data = store.get_content(store.image_json_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
except IOError:
|
2014-02-16 22:38:47 +00:00
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving image path info')
|
2014-02-16 22:38:47 +00:00
|
|
|
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-05-02 20:59:46 +00:00
|
|
|
if (store.exists(layer_path) and not
|
|
|
|
image_is_uploading(namespace, repository, image_id, repo_image)):
|
2014-01-29 19:08:14 +00:00
|
|
|
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Storing layer data')
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
input_stream = request.stream
|
|
|
|
if request.headers.get('transfer-encoding') == 'chunked':
|
|
|
|
# Careful, might work only with WSGI servers supporting chunked
|
|
|
|
# encoding (Gunicorn)
|
|
|
|
input_stream = request.environ['wsgi.input']
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# compute checksums
|
|
|
|
csums = []
|
|
|
|
sr = SocketReader(input_stream)
|
2013-11-11 21:41:33 +00:00
|
|
|
tmp, store_hndlr = store.temp_store_handler()
|
2013-09-25 21:50:03 +00:00
|
|
|
sr.add_handler(store_hndlr)
|
|
|
|
h, sum_hndlr = checksums.simple_checksum_handler(json_data)
|
|
|
|
sr.add_handler(sum_hndlr)
|
|
|
|
store.stream_write(layer_path, sr)
|
|
|
|
csums.append('sha256:{0}'.format(h.hexdigest()))
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-01-03 21:32:00 +00:00
|
|
|
image_size = tmp.tell()
|
2014-01-03 21:42:38 +00:00
|
|
|
|
|
|
|
# Save the size of the image.
|
|
|
|
model.set_image_size(image_id, namespace, repository, image_size)
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
tmp.seek(0)
|
|
|
|
csums.append(checksums.compute_tarsum(tmp, json_data))
|
|
|
|
tmp.close()
|
|
|
|
except (IOError, checksums.TarError) as e:
|
|
|
|
logger.debug('put_image_layer: Error when computing tarsum '
|
2013-10-08 15:29:42 +00:00
|
|
|
'{0}'.format(e))
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2013-09-26 00:00:22 +00:00
|
|
|
checksum = store.get_content(store.image_checksum_path(namespace,
|
|
|
|
repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
except IOError:
|
|
|
|
# We don't have a checksum stored yet, that's fine skipping the check.
|
|
|
|
# Not removing the mark though, image is not downloadable yet.
|
|
|
|
session['checksum'] = csums
|
|
|
|
return make_response('true', 200)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# We check if the checksums provided matches one the one we computed
|
|
|
|
if checksum not in csums:
|
2013-12-31 21:22:27 +00:00
|
|
|
logger.warning('put_image_layer: Wrong checksum')
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(400, 'Checksum mismatch; ignoring the layer for image %(image_id)s',
|
|
|
|
issue='checksum-mismatch', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# Checksum is ok, we remove the marker
|
2014-05-02 20:59:46 +00:00
|
|
|
mark_upload_complete(namespace, repository, image_id, repo_image)
|
2013-10-18 21:27:09 +00:00
|
|
|
|
|
|
|
# The layer is ready for download, send a job to the work queue to
|
|
|
|
# process it.
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Adding layer to diff queue')
|
2014-04-11 22:34:47 +00:00
|
|
|
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
2013-10-18 21:27:09 +00:00
|
|
|
'namespace': namespace,
|
|
|
|
'repository': repository,
|
|
|
|
'image_id': image_id,
|
|
|
|
}))
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
return make_response('true', 200)
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/checksum', methods=['PUT'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
def put_image_checksum(namespace, repository, image_id):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ModifyRepositoryPermission(namespace, repository)
|
|
|
|
if not permission.can():
|
|
|
|
abort(403)
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
checksum = request.headers.get('X-Docker-Checksum')
|
|
|
|
if not checksum:
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if not session.get('checksum'):
|
2014-05-08 15:50:38 +00:00
|
|
|
abort(400, 'Checksum not found in Cookie for image %(image_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='missing-checksum-cookie', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Looking up repo layer data')
|
2014-02-16 22:38:47 +00:00
|
|
|
if not store.exists(store.image_json_path(namespace, repository, image_id,
|
|
|
|
uuid)):
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Marking image path')
|
2014-05-02 20:59:46 +00:00
|
|
|
if not image_is_uploading(namespace, repository, image_id, repo_image):
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(409, 'Cannot set checksum for image %(image_id)s',
|
|
|
|
issue='image-write-error', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Storing image checksum')
|
2014-02-16 22:38:47 +00:00
|
|
|
err = store_checksum(namespace, repository, image_id, uuid, checksum)
|
2013-09-25 21:50:03 +00:00
|
|
|
if err:
|
2014-01-24 19:12:04 +00:00
|
|
|
abort(400, err)
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if checksum not in session.get('checksum', []):
|
2013-10-02 18:06:12 +00:00
|
|
|
logger.debug('session checksums: %s' % session.get('checksum', []))
|
|
|
|
logger.debug('client supplied checksum: %s' % checksum)
|
2013-09-25 21:50:03 +00:00
|
|
|
logger.debug('put_image_layer: Wrong checksum')
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
|
|
|
issue='checksum-mismatch', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# Checksum is ok, we remove the marker
|
2014-05-02 20:59:46 +00:00
|
|
|
mark_upload_complete(namespace, repository, image_id, repo_image)
|
2013-10-18 21:27:09 +00:00
|
|
|
|
|
|
|
# The layer is ready for download, send a job to the work queue to
|
|
|
|
# process it.
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Adding layer to diff queue')
|
2014-04-11 22:34:47 +00:00
|
|
|
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
2013-10-18 21:27:09 +00:00
|
|
|
'namespace': namespace,
|
|
|
|
'repository': repository,
|
|
|
|
'image_id': image_id,
|
|
|
|
}))
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
return make_response('true', 200)
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/json', methods=['GET'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
2013-09-25 21:50:03 +00:00
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
2013-09-26 00:00:22 +00:00
|
|
|
def get_image_json(namespace, repository, image_id, headers):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
2013-10-08 15:29:42 +00:00
|
|
|
if not permission.can() and not model.repository_is_public(namespace,
|
|
|
|
repository):
|
2013-09-26 17:42:24 +00:00
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo layer data')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2013-09-26 00:00:22 +00:00
|
|
|
data = store.get_content(store.image_json_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
except IOError:
|
2014-02-26 21:03:00 +00:00
|
|
|
flask_abort(404)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo layer size')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-05-02 20:59:46 +00:00
|
|
|
size = repo_image.image_size or repo_image.storage.image_size
|
2013-09-25 21:50:03 +00:00
|
|
|
headers['X-Docker-Size'] = str(size)
|
|
|
|
except OSError:
|
|
|
|
pass
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving checksum')
|
2014-02-16 22:38:47 +00:00
|
|
|
checksum_path = store.image_checksum_path(namespace, repository, image_id,
|
|
|
|
uuid)
|
2013-09-25 21:50:03 +00:00
|
|
|
if store.exists(checksum_path):
|
|
|
|
headers['X-Docker-Checksum'] = store.get_content(checksum_path)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
response = make_response(data, 200)
|
|
|
|
response.headers.extend(headers)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/ancestry', methods=['GET'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
2013-09-25 21:50:03 +00:00
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
2013-09-26 00:00:22 +00:00
|
|
|
def get_image_ancestry(namespace, repository, image_id, headers):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
2013-10-08 15:29:42 +00:00
|
|
|
if not permission.can() and not model.repository_is_public(namespace,
|
|
|
|
repository):
|
2013-09-26 17:42:24 +00:00
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up image data')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2013-09-26 00:00:22 +00:00
|
|
|
data = store.get_content(store.image_ancestry_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
except IOError:
|
2014-02-16 22:38:47 +00:00
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Converting to <-> from JSON')
|
2013-09-26 20:32:09 +00:00
|
|
|
response = make_response(json.dumps(json.loads(data)), 200)
|
2013-09-25 21:50:03 +00:00
|
|
|
response.headers.extend(headers)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Done')
|
2013-09-25 21:50:03 +00:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
def generate_ancestry(namespace, repository, image_id, uuid, parent_id=None,
|
|
|
|
parent_uuid=None):
|
2013-09-25 21:50:03 +00:00
|
|
|
if not parent_id:
|
2013-09-26 00:00:22 +00:00
|
|
|
store.put_content(store.image_ancestry_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid),
|
2013-09-26 00:00:22 +00:00
|
|
|
json.dumps([image_id]))
|
2013-09-25 21:50:03 +00:00
|
|
|
return
|
2013-09-26 00:00:22 +00:00
|
|
|
data = store.get_content(store.image_ancestry_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
parent_id, parent_uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
data = json.loads(data)
|
|
|
|
data.insert(0, image_id)
|
2013-09-26 00:00:22 +00:00
|
|
|
store.put_content(store.image_ancestry_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid),
|
2013-09-26 00:00:22 +00:00
|
|
|
json.dumps(data))
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
def store_checksum(namespace, repository, image_id, uuid, checksum):
|
2013-09-25 21:50:03 +00:00
|
|
|
checksum_parts = checksum.split(':')
|
|
|
|
if len(checksum_parts) != 2:
|
|
|
|
return 'Invalid checksum format'
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# We store the checksum
|
2014-02-16 22:38:47 +00:00
|
|
|
checksum_path = store.image_checksum_path(namespace, repository, image_id,
|
|
|
|
uuid)
|
2013-09-25 21:50:03 +00:00
|
|
|
store.put_content(checksum_path, checksum)
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/json', methods=['PUT'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
def put_image_json(namespace, repository, image_id):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ModifyRepositoryPermission(namespace, repository)
|
|
|
|
if not permission.can():
|
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Parsing image JSON')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
|
|
|
data = json.loads(request.data)
|
|
|
|
except json.JSONDecodeError:
|
|
|
|
pass
|
|
|
|
if not data or not isinstance(data, dict):
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id, json=request.data)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if 'id' not in data:
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# Read the checksum
|
|
|
|
checksum = request.headers.get('X-Docker-Checksum')
|
|
|
|
if checksum:
|
|
|
|
# Storing the checksum is optional at this stage
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Storing image checksum')
|
2014-02-16 22:38:47 +00:00
|
|
|
err = store_checksum(namespace, repository, image_id, uuid, checksum)
|
2013-09-25 21:50:03 +00:00
|
|
|
if err:
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(400, err, issue='write-error')
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
else:
|
|
|
|
# We cleanup any old checksum in case it's a retry after a fail
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Cleanup old checksum')
|
2014-02-16 22:38:47 +00:00
|
|
|
store.remove(store.image_checksum_path(namespace, repository, image_id,
|
|
|
|
uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
if image_id != data['id']:
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
parent_id = data.get('parent')
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
parent_image = None
|
|
|
|
if parent_id:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up parent image')
|
2014-02-16 22:38:47 +00:00
|
|
|
parent_image = model.get_repo_image(namespace, repository, parent_id)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
parent_uuid = (parent_image and parent_image.storage and
|
|
|
|
parent_image.storage.uuid)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
if parent_id:
|
|
|
|
profile.debug('Looking up parent image data')
|
|
|
|
|
2014-01-24 22:00:42 +00:00
|
|
|
if (parent_id and not
|
2014-02-16 22:38:47 +00:00
|
|
|
store.exists(store.image_json_path(namespace, repository, parent_id,
|
|
|
|
parent_uuid))):
|
2014-01-24 19:12:04 +00:00
|
|
|
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
2014-05-02 20:59:46 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up image storage paths')
|
2014-02-16 22:38:47 +00:00
|
|
|
json_path = store.image_json_path(namespace, repository, image_id, uuid)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Checking if image already exists')
|
2014-05-02 20:59:46 +00:00
|
|
|
if (store.exists(json_path) and not
|
|
|
|
image_is_uploading(namespace, repository, image_id, repo_image)):
|
2014-01-29 19:08:14 +00:00
|
|
|
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# If we reach that point, it means that this is a new image or a retry
|
|
|
|
# on a failed push
|
2013-09-26 19:58:11 +00:00
|
|
|
# save the metadata
|
2014-01-13 21:32:51 +00:00
|
|
|
command_list = data.get('container_config', {}).get('Cmd', None)
|
|
|
|
command = json.dumps(command_list) if command_list else None
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Setting image metadata')
|
2013-09-26 19:58:11 +00:00
|
|
|
model.set_image_metadata(image_id, namespace, repository,
|
2014-01-13 21:32:51 +00:00
|
|
|
data.get('created'), data.get('comment'), command,
|
2014-02-16 22:38:47 +00:00
|
|
|
parent_image)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Putting json path')
|
2013-09-25 21:50:03 +00:00
|
|
|
store.put_content(json_path, request.data)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Generating image ancestry')
|
2014-02-16 22:38:47 +00:00
|
|
|
generate_ancestry(namespace, repository, image_id, uuid, parent_id,
|
|
|
|
parent_uuid)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Done')
|
2013-09-25 21:50:03 +00:00
|
|
|
return make_response('true', 200)
|
2013-10-01 16:13:25 +00:00
|
|
|
|
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
def process_image_changes(namespace, repository, image_id):
|
2013-10-18 18:31:14 +00:00
|
|
|
logger.debug('Generating diffs for image: %s' % image_id)
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
|
|
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
|
|
|
|
2013-10-18 18:31:14 +00:00
|
|
|
image_diffs_path = store.image_file_diffs_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid)
|
2013-10-19 00:19:05 +00:00
|
|
|
image_trie_path = store.image_file_trie_path(namespace, repository,
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id, uuid)
|
2013-10-19 00:19:05 +00:00
|
|
|
|
2013-10-18 18:31:14 +00:00
|
|
|
if store.exists(image_diffs_path):
|
|
|
|
logger.debug('Diffs already exist for image: %s' % image_id)
|
2013-10-19 00:19:05 +00:00
|
|
|
return image_trie_path
|
2013-10-18 18:31:14 +00:00
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
image = model.get_image_by_id(namespace, repository, image_id)
|
2013-10-18 18:31:14 +00:00
|
|
|
parents = model.get_parent_images(image)
|
|
|
|
|
|
|
|
# Compute the diffs and fs for the parent first if necessary
|
|
|
|
parent_trie_path = None
|
|
|
|
if parents:
|
|
|
|
parent_trie_path = process_image_changes(namespace, repository,
|
|
|
|
parents[-1].docker_image_id)
|
|
|
|
|
|
|
|
# Read in the collapsed layer state of the filesystem for the parent
|
|
|
|
parent_trie = changes.empty_fs()
|
|
|
|
if parent_trie_path:
|
2013-10-20 06:39:45 +00:00
|
|
|
parent_trie_bytes = store.get_content(parent_trie_path)
|
|
|
|
parent_trie.frombytes(parent_trie_bytes)
|
2013-10-18 18:31:14 +00:00
|
|
|
|
|
|
|
# Read in the file entries from the layer tar file
|
2014-02-16 22:38:47 +00:00
|
|
|
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
|
2013-10-18 18:31:14 +00:00
|
|
|
with store.stream_read_file(layer_path) as layer_tar_stream:
|
|
|
|
removed_files = set()
|
|
|
|
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
|
|
|
removed_files)
|
|
|
|
|
|
|
|
new_metadata = changes.compute_new_diffs_and_fs(parent_trie, layer_files,
|
|
|
|
removed_files)
|
|
|
|
(new_trie, added, changed, removed) = new_metadata
|
|
|
|
|
|
|
|
# Write out the new trie
|
2013-10-19 00:19:05 +00:00
|
|
|
store.put_content(image_trie_path, new_trie.tobytes())
|
2013-10-18 18:31:14 +00:00
|
|
|
|
|
|
|
# Write out the diffs
|
|
|
|
diffs = {}
|
|
|
|
sections = ('added', 'changed', 'removed')
|
|
|
|
for section, source_trie in zip(sections, new_metadata[1:]):
|
|
|
|
diffs[section] = list(source_trie)
|
2013-10-18 21:05:51 +00:00
|
|
|
diffs[section].sort()
|
2013-10-18 18:31:14 +00:00
|
|
|
store.put_content(image_diffs_path, json.dumps(diffs, indent=2))
|
|
|
|
|
2013-10-19 00:19:05 +00:00
|
|
|
return image_trie_path
|