2013-09-25 21:50:03 +00:00
|
|
|
import logging
|
2013-09-30 20:14:48 +00:00
|
|
|
import json
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2014-01-24 22:00:42 +00:00
|
|
|
from flask import (make_response, request, session, Response, redirect,
|
2014-02-26 21:03:00 +00:00
|
|
|
Blueprint, abort as flask_abort)
|
2013-09-25 21:50:03 +00:00
|
|
|
from functools import wraps
|
|
|
|
from datetime import datetime
|
|
|
|
from time import time
|
|
|
|
|
2014-05-21 23:50:37 +00:00
|
|
|
from app import storage as store, image_diff_queue
|
2014-01-24 20:01:40 +00:00
|
|
|
from auth.auth import process_auth, extract_namespace_repo_from_session
|
2013-10-18 18:31:14 +00:00
|
|
|
from util import checksums, changes
|
2014-06-11 20:55:38 +00:00
|
|
|
from util.http import abort, exact_abort
|
2013-09-26 17:42:24 +00:00
|
|
|
from auth.permissions import (ReadRepositoryPermission,
|
|
|
|
ModifyRepositoryPermission)
|
2013-09-26 19:58:11 +00:00
|
|
|
from data import model
|
2014-09-29 21:00:47 +00:00
|
|
|
from util import gzipstream
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2014-04-03 21:31:46 +00:00
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
registry = Blueprint('registry', __name__)
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2014-05-02 01:19:52 +00:00
|
|
|
profile = logging.getLogger('application.profiler')
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
class SocketReader(object):
|
2013-10-15 20:12:53 +00:00
|
|
|
def __init__(self, fp):
|
|
|
|
self._fp = fp
|
|
|
|
self.handlers = []
|
|
|
|
|
|
|
|
def add_handler(self, handler):
|
|
|
|
self.handlers.append(handler)
|
|
|
|
|
|
|
|
def read(self, n=-1):
|
|
|
|
buf = self._fp.read(n)
|
|
|
|
if not buf:
|
|
|
|
return ''
|
|
|
|
for handler in self.handlers:
|
|
|
|
handler(buf)
|
|
|
|
return buf
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2014-08-12 06:06:44 +00:00
|
|
|
def tell(self):
|
|
|
|
raise IOError('Stream is not seekable.')
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
def image_is_uploading(repo_image):
|
|
|
|
if repo_image is None:
|
|
|
|
return False
|
|
|
|
|
2014-06-12 23:27:43 +00:00
|
|
|
return repo_image.storage.uploading
|
2014-05-02 20:59:46 +00:00
|
|
|
|
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
def set_uploading_flag(repo_image, is_image_uploading):
|
|
|
|
repo_image.storage.uploading = is_image_uploading
|
|
|
|
repo_image.storage.save()
|
|
|
|
|
2014-05-02 20:59:46 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
def require_completion(f):
|
|
|
|
"""This make sure that the image push correctly finished."""
|
|
|
|
@wraps(f)
|
2013-09-26 00:00:22 +00:00
|
|
|
def wrapper(namespace, repository, *args, **kwargs):
|
2014-02-16 22:38:47 +00:00
|
|
|
image_id = kwargs['image_id']
|
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-06-11 19:37:45 +00:00
|
|
|
if image_is_uploading(repo_image):
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='upload-in-progress', image_id=kwargs['image_id'])
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-26 00:00:22 +00:00
|
|
|
return f(namespace, repository, *args, **kwargs)
|
2013-09-25 21:50:03 +00:00
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
|
|
|
def set_cache_headers(f):
|
|
|
|
"""Returns HTTP headers suitable for caching."""
|
|
|
|
@wraps(f)
|
|
|
|
def wrapper(*args, **kwargs):
|
|
|
|
# Set TTL to 1 year by default
|
|
|
|
ttl = 31536000
|
|
|
|
expires = datetime.fromtimestamp(int(time()) + ttl)
|
|
|
|
expires = expires.strftime('%a, %d %b %Y %H:%M:%S GMT')
|
|
|
|
headers = {
|
|
|
|
'Cache-Control': 'public, max-age={0}'.format(ttl),
|
|
|
|
'Expires': expires,
|
|
|
|
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',
|
|
|
|
}
|
|
|
|
if 'If-Modified-Since' in request.headers:
|
|
|
|
response = make_response('Not modified', 304)
|
|
|
|
response.headers.extend(headers)
|
|
|
|
return response
|
|
|
|
kwargs['headers'] = headers
|
|
|
|
# Prevent the Cookie to be sent when the object is cacheable
|
|
|
|
session.modified = False
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
return wrapper
|
|
|
|
|
|
|
|
|
2014-07-03 01:18:46 +00:00
|
|
|
@registry.route('/images/<image_id>/layer', methods=['HEAD'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
2013-09-25 21:50:03 +00:00
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
2014-07-03 01:18:46 +00:00
|
|
|
def head_image_layer(namespace, repository, image_id, headers):
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-28 04:05:32 +00:00
|
|
|
if permission.can() or model.repository_is_public(namespace, repository):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-07-07 20:21:27 +00:00
|
|
|
if not repo_image:
|
|
|
|
profile.debug('Image not found')
|
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-07-02 04:39:59 +00:00
|
|
|
extra_headers = {}
|
|
|
|
|
2014-09-09 19:54:03 +00:00
|
|
|
# Add the Accept-Ranges header if the storage engine supports resumable
|
2014-07-07 20:21:27 +00:00
|
|
|
# downloads.
|
2014-09-09 19:54:03 +00:00
|
|
|
if store.get_supports_resumable_downloads(repo_image.storage.locations):
|
|
|
|
profile.debug('Storage supports resumable downloads')
|
2014-07-07 22:19:52 +00:00
|
|
|
extra_headers['Accept-Ranges'] = 'bytes'
|
2014-07-03 01:18:46 +00:00
|
|
|
|
2014-07-07 20:21:27 +00:00
|
|
|
resp = make_response('')
|
2014-07-07 22:19:52 +00:00
|
|
|
resp.headers.extend(headers)
|
2014-07-07 20:21:27 +00:00
|
|
|
resp.headers.extend(extra_headers)
|
|
|
|
return resp
|
2014-07-07 22:19:52 +00:00
|
|
|
|
2014-07-03 01:18:46 +00:00
|
|
|
abort(403)
|
|
|
|
|
|
|
|
|
|
|
|
@registry.route('/images/<image_id>/layer', methods=['GET'])
|
|
|
|
@process_auth
|
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
|
|
|
def get_image_layer(namespace, repository, image_id, headers):
|
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
|
|
|
|
|
|
|
profile.debug('Checking repo permissions')
|
|
|
|
if permission.can() or model.repository_is_public(namespace, repository):
|
|
|
|
profile.debug('Looking up repo image')
|
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-07-02 04:39:59 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up the layer path')
|
2014-06-11 19:37:45 +00:00
|
|
|
try:
|
|
|
|
path = store.image_layer_path(repo_image.storage.uuid)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
profile.debug('Looking up the direct download URL')
|
2014-06-17 20:03:43 +00:00
|
|
|
direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path)
|
2014-06-11 19:37:45 +00:00
|
|
|
|
|
|
|
if direct_download_url:
|
|
|
|
profile.debug('Returning direct download URL')
|
2014-07-02 04:39:59 +00:00
|
|
|
resp = redirect(direct_download_url)
|
|
|
|
return resp
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Streaming layer data')
|
2014-11-05 17:27:38 +00:00
|
|
|
|
|
|
|
# TODO: DATABASE: We should disconnect from the database here, so that
|
|
|
|
# we're not holding the DB handle during this long download.
|
2014-07-07 22:19:52 +00:00
|
|
|
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
|
2014-06-11 19:37:45 +00:00
|
|
|
except (IOError, AttributeError):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Image not found')
|
2014-02-16 22:38:47 +00:00
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-07-07 22:19:52 +00:00
|
|
|
|
2013-09-28 04:05:32 +00:00
|
|
|
abort(403)
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/layer', methods=['PUT'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
def put_image_layer(namespace, repository, image_id):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ModifyRepositoryPermission(namespace, repository)
|
|
|
|
if not permission.can():
|
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving image data')
|
2014-06-11 19:37:45 +00:00
|
|
|
uuid = repo_image.storage.uuid
|
2014-06-17 20:03:43 +00:00
|
|
|
json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
2014-06-11 19:37:45 +00:00
|
|
|
except (IOError, AttributeError):
|
2014-02-16 22:38:47 +00:00
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Retrieving image path info')
|
2014-06-11 19:37:45 +00:00
|
|
|
layer_path = store.image_layer_path(uuid)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
if (store.exists(repo_image.storage.locations, layer_path) and not
|
2014-06-11 19:37:45 +00:00
|
|
|
image_is_uploading(repo_image)):
|
2014-06-11 20:55:38 +00:00
|
|
|
exact_abort(409, 'Image already exists')
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Storing layer data')
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
input_stream = request.stream
|
|
|
|
if request.headers.get('transfer-encoding') == 'chunked':
|
|
|
|
# Careful, might work only with WSGI servers supporting chunked
|
|
|
|
# encoding (Gunicorn)
|
|
|
|
input_stream = request.environ['wsgi.input']
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-09-29 21:00:47 +00:00
|
|
|
# Create a socket reader to read the input stream containing the layer data.
|
2013-09-25 21:50:03 +00:00
|
|
|
sr = SocketReader(input_stream)
|
2014-09-29 21:00:47 +00:00
|
|
|
|
2014-10-20 17:11:33 +00:00
|
|
|
# Add a handler that copies the data into a temp file. This is used to calculate the tarsum,
|
|
|
|
# which is only needed for older versions of Docker.
|
|
|
|
requires_tarsum = session.get('checksum_format') == 'tarsum'
|
|
|
|
if requires_tarsum:
|
|
|
|
tmp, tmp_hndlr = store.temp_store_handler()
|
|
|
|
sr.add_handler(tmp_hndlr)
|
|
|
|
|
|
|
|
# Add a handler to compute the compressed and uncompressed sizes of the layer.
|
|
|
|
size_info, size_hndlr = gzipstream.calculate_size_handler()
|
2014-09-29 21:00:47 +00:00
|
|
|
sr.add_handler(size_hndlr)
|
|
|
|
|
|
|
|
# Add a handler which computes the checksum.
|
2013-09-25 21:50:03 +00:00
|
|
|
h, sum_hndlr = checksums.simple_checksum_handler(json_data)
|
|
|
|
sr.add_handler(sum_hndlr)
|
2014-09-29 21:00:47 +00:00
|
|
|
|
2014-11-05 17:27:38 +00:00
|
|
|
# TODO: DATABASE: We should disconnect from the database here and reconnect AFTER, so that
|
|
|
|
# we're not holding the DB handle during this long upload.
|
|
|
|
|
2014-09-29 21:00:47 +00:00
|
|
|
# Stream write the data to storage.
|
2014-06-17 20:03:43 +00:00
|
|
|
store.stream_write(repo_image.storage.locations, layer_path, sr)
|
2014-09-29 21:00:47 +00:00
|
|
|
|
|
|
|
# Append the computed checksum.
|
|
|
|
csums = []
|
2013-09-25 21:50:03 +00:00
|
|
|
csums.append('sha256:{0}'.format(h.hexdigest()))
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-01-03 21:42:38 +00:00
|
|
|
# Save the size of the image.
|
2014-10-20 17:11:33 +00:00
|
|
|
model.set_image_size(image_id, namespace, repository, size_info.compressed_size,
|
|
|
|
size_info.uncompressed_size)
|
|
|
|
|
|
|
|
if requires_tarsum:
|
|
|
|
tmp.seek(0)
|
|
|
|
csums.append(checksums.compute_tarsum(tmp, json_data))
|
|
|
|
tmp.close()
|
2014-01-03 21:42:38 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
except (IOError, checksums.TarError) as e:
|
|
|
|
logger.debug('put_image_layer: Error when computing tarsum '
|
2013-10-08 15:29:42 +00:00
|
|
|
'{0}'.format(e))
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-06-12 23:27:43 +00:00
|
|
|
if repo_image.storage.checksum is None:
|
2013-09-25 21:50:03 +00:00
|
|
|
# We don't have a checksum stored yet, that's fine skipping the check.
|
|
|
|
# Not removing the mark though, image is not downloadable yet.
|
|
|
|
session['checksum'] = csums
|
|
|
|
return make_response('true', 200)
|
2014-01-28 23:29:45 +00:00
|
|
|
|
2014-06-12 23:27:43 +00:00
|
|
|
checksum = repo_image.storage.checksum
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# We check if the checksums provided matches one the one we computed
|
|
|
|
if checksum not in csums:
|
2013-12-31 21:22:27 +00:00
|
|
|
logger.warning('put_image_layer: Wrong checksum')
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(400, 'Checksum mismatch; ignoring the layer for image %(image_id)s',
|
|
|
|
issue='checksum-mismatch', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# Checksum is ok, we remove the marker
|
2014-06-11 19:37:45 +00:00
|
|
|
set_uploading_flag(repo_image, False)
|
2013-10-18 21:27:09 +00:00
|
|
|
|
|
|
|
# The layer is ready for download, send a job to the work queue to
|
|
|
|
# process it.
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Adding layer to diff queue')
|
2014-04-11 22:34:47 +00:00
|
|
|
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
2013-10-18 21:27:09 +00:00
|
|
|
'namespace': namespace,
|
|
|
|
'repository': repository,
|
|
|
|
'image_id': image_id,
|
|
|
|
}))
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
return make_response('true', 200)
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/checksum', methods=['PUT'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
def put_image_checksum(namespace, repository, image_id):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ModifyRepositoryPermission(namespace, repository)
|
|
|
|
if not permission.can():
|
|
|
|
abort(403)
|
|
|
|
|
2014-10-20 17:11:33 +00:00
|
|
|
# Docker Version < 0.10 (tarsum+sha):
|
|
|
|
old_checksum = request.headers.get('X-Docker-Checksum')
|
|
|
|
|
|
|
|
# Docker Version >= 0.10 (sha):
|
|
|
|
new_checksum = request.headers.get('X-Docker-Checksum-Payload')
|
|
|
|
|
|
|
|
# Store whether we need to calculate the tarsum.
|
|
|
|
if new_checksum:
|
|
|
|
session['checksum_format'] = 'sha256'
|
|
|
|
else:
|
|
|
|
session['checksum_format'] = 'tarsum'
|
|
|
|
|
|
|
|
checksum = new_checksum or old_checksum
|
2013-09-25 21:50:03 +00:00
|
|
|
if not checksum:
|
2014-06-11 19:37:45 +00:00
|
|
|
abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum',
|
|
|
|
image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if not session.get('checksum'):
|
2014-05-08 15:50:38 +00:00
|
|
|
abort(400, 'Checksum not found in Cookie for image %(image_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='missing-checksum-cookie', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-10-17 21:57:10 +00:00
|
|
|
if not repo_image or not repo_image.storage:
|
|
|
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
uuid = repo_image.storage.uuid
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Looking up repo layer data')
|
2014-06-17 20:03:43 +00:00
|
|
|
if not store.exists(repo_image.storage.locations, store.image_json_path(uuid)):
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Marking image path')
|
2014-06-11 19:37:45 +00:00
|
|
|
if not image_is_uploading(repo_image):
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(409, 'Cannot set checksum for image %(image_id)s',
|
|
|
|
issue='image-write-error', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Storing image checksum')
|
2014-06-11 19:37:45 +00:00
|
|
|
err = store_checksum(repo_image.storage, checksum)
|
2013-09-25 21:50:03 +00:00
|
|
|
if err:
|
2014-01-24 19:12:04 +00:00
|
|
|
abort(400, err)
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if checksum not in session.get('checksum', []):
|
2013-10-02 18:06:12 +00:00
|
|
|
logger.debug('session checksums: %s' % session.get('checksum', []))
|
|
|
|
logger.debug('client supplied checksum: %s' % checksum)
|
2014-06-12 02:00:37 +00:00
|
|
|
logger.debug('put_image_checksum: Wrong checksum')
|
2014-01-28 23:29:45 +00:00
|
|
|
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
|
|
|
issue='checksum-mismatch', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# Checksum is ok, we remove the marker
|
2014-06-11 19:37:45 +00:00
|
|
|
set_uploading_flag(repo_image, False)
|
2013-10-18 21:27:09 +00:00
|
|
|
|
|
|
|
# The layer is ready for download, send a job to the work queue to
|
|
|
|
# process it.
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Adding layer to diff queue')
|
2014-04-11 22:34:47 +00:00
|
|
|
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
2013-10-18 21:27:09 +00:00
|
|
|
'namespace': namespace,
|
|
|
|
'repository': repository,
|
|
|
|
'image_id': image_id,
|
|
|
|
}))
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
return make_response('true', 200)
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/json', methods=['GET'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
2013-09-25 21:50:03 +00:00
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
2013-09-26 00:00:22 +00:00
|
|
|
def get_image_json(namespace, repository, image_id, headers):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
2013-10-08 15:29:42 +00:00
|
|
|
if not permission.can() and not model.repository_is_public(namespace,
|
|
|
|
repository):
|
2013-09-26 17:42:24 +00:00
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo layer data')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-06-17 20:03:43 +00:00
|
|
|
uuid = repo_image.storage.uuid
|
|
|
|
data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
2014-06-11 19:37:45 +00:00
|
|
|
except (IOError, AttributeError):
|
2014-02-26 21:03:00 +00:00
|
|
|
flask_abort(404)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo layer size')
|
2014-06-12 23:51:01 +00:00
|
|
|
size = repo_image.storage.image_size
|
2014-06-11 20:55:38 +00:00
|
|
|
headers['X-Docker-Size'] = str(size)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
response = make_response(data, 200)
|
|
|
|
response.headers.extend(headers)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/ancestry', methods=['GET'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
2013-09-25 21:50:03 +00:00
|
|
|
@require_completion
|
|
|
|
@set_cache_headers
|
2013-09-26 00:00:22 +00:00
|
|
|
def get_image_ancestry(namespace, repository, image_id, headers):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ReadRepositoryPermission(namespace, repository)
|
2013-10-08 15:29:42 +00:00
|
|
|
if not permission.can() and not model.repository_is_public(namespace,
|
|
|
|
repository):
|
2013-09-26 17:42:24 +00:00
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up image data')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-06-17 20:03:43 +00:00
|
|
|
uuid = repo_image.storage.uuid
|
|
|
|
data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
2014-06-11 19:37:45 +00:00
|
|
|
except (IOError, AttributeError):
|
2014-02-16 22:38:47 +00:00
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Converting to <-> from JSON')
|
2013-09-26 20:32:09 +00:00
|
|
|
response = make_response(json.dumps(json.loads(data)), 200)
|
2013-09-25 21:50:03 +00:00
|
|
|
response.headers.extend(headers)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Done')
|
2013-09-25 21:50:03 +00:00
|
|
|
return response
|
|
|
|
|
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
def generate_ancestry(image_id, uuid, locations, parent_id=None, parent_uuid=None,
|
|
|
|
parent_locations=None):
|
2013-09-25 21:50:03 +00:00
|
|
|
if not parent_id:
|
2014-06-17 20:03:43 +00:00
|
|
|
store.put_content(locations, store.image_ancestry_path(uuid), json.dumps([image_id]))
|
2013-09-25 21:50:03 +00:00
|
|
|
return
|
2014-08-18 17:35:03 +00:00
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
data = store.get_content(parent_locations, store.image_ancestry_path(parent_uuid))
|
2013-09-25 21:50:03 +00:00
|
|
|
data = json.loads(data)
|
|
|
|
data.insert(0, image_id)
|
2014-06-17 20:03:43 +00:00
|
|
|
store.put_content(locations, store.image_ancestry_path(uuid), json.dumps(data))
|
2013-09-25 21:50:03 +00:00
|
|
|
|
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
def store_checksum(image_storage, checksum):
|
2013-09-25 21:50:03 +00:00
|
|
|
checksum_parts = checksum.split(':')
|
|
|
|
if len(checksum_parts) != 2:
|
|
|
|
return 'Invalid checksum format'
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# We store the checksum
|
2014-06-11 19:37:45 +00:00
|
|
|
image_storage.checksum = checksum
|
|
|
|
image_storage.save()
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
|
2013-12-30 22:05:27 +00:00
|
|
|
@registry.route('/images/<image_id>/json', methods=['PUT'])
|
2013-09-25 21:50:03 +00:00
|
|
|
@process_auth
|
2013-09-26 00:00:22 +00:00
|
|
|
@extract_namespace_repo_from_session
|
|
|
|
def put_image_json(namespace, repository, image_id):
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Checking repo permissions')
|
2013-09-26 17:42:24 +00:00
|
|
|
permission = ModifyRepositoryPermission(namespace, repository)
|
|
|
|
if not permission.can():
|
|
|
|
abort(403)
|
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Parsing image JSON')
|
2013-09-25 21:50:03 +00:00
|
|
|
try:
|
2014-06-11 19:37:45 +00:00
|
|
|
data = json.loads(request.data.decode('utf8'))
|
|
|
|
except ValueError:
|
2013-09-25 21:50:03 +00:00
|
|
|
pass
|
2014-06-11 19:37:45 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if not data or not isinstance(data, dict):
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id, json=request.data)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if 'id' not in data:
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up repo image')
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-07-03 21:18:14 +00:00
|
|
|
if not repo_image:
|
|
|
|
profile.debug('Image not found')
|
|
|
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
|
|
|
image_id=image_id)
|
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
uuid = repo_image.storage.uuid
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
if image_id != data['id']:
|
2014-01-24 22:00:42 +00:00
|
|
|
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id)
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
parent_id = data.get('parent')
|
2014-02-16 22:38:47 +00:00
|
|
|
|
|
|
|
parent_image = None
|
|
|
|
if parent_id:
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up parent image')
|
2014-02-16 22:38:47 +00:00
|
|
|
parent_image = model.get_repo_image(namespace, repository, parent_id)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
parent_uuid = parent_image and parent_image.storage.uuid
|
2014-06-17 20:37:48 +00:00
|
|
|
parent_locations = parent_image and parent_image.storage.locations
|
2014-02-16 22:38:47 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
if parent_id:
|
|
|
|
profile.debug('Looking up parent image data')
|
|
|
|
|
2014-01-24 22:00:42 +00:00
|
|
|
if (parent_id and not
|
2014-06-17 20:37:48 +00:00
|
|
|
store.exists(parent_locations, store.image_json_path(parent_uuid))):
|
2014-01-24 19:12:04 +00:00
|
|
|
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
2014-01-28 23:29:45 +00:00
|
|
|
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
2014-05-02 20:59:46 +00:00
|
|
|
|
2014-05-02 01:19:52 +00:00
|
|
|
profile.debug('Looking up image storage paths')
|
2014-06-11 19:37:45 +00:00
|
|
|
json_path = store.image_json_path(uuid)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Checking if image already exists')
|
2014-06-17 20:03:43 +00:00
|
|
|
if (store.exists(repo_image.storage.locations, json_path) and not
|
2014-06-11 19:37:45 +00:00
|
|
|
image_is_uploading(repo_image)):
|
2014-06-11 20:55:38 +00:00
|
|
|
exact_abort(409, 'Image already exists')
|
2014-01-24 19:12:04 +00:00
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
set_uploading_flag(repo_image, True)
|
|
|
|
|
2013-09-25 21:50:03 +00:00
|
|
|
# If we reach that point, it means that this is a new image or a retry
|
|
|
|
# on a failed push
|
2013-09-26 19:58:11 +00:00
|
|
|
# save the metadata
|
2014-01-13 21:32:51 +00:00
|
|
|
command_list = data.get('container_config', {}).get('Cmd', None)
|
|
|
|
command = json.dumps(command_list) if command_list else None
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Setting image metadata')
|
2013-09-26 19:58:11 +00:00
|
|
|
model.set_image_metadata(image_id, namespace, repository,
|
2014-01-13 21:32:51 +00:00
|
|
|
data.get('created'), data.get('comment'), command,
|
2014-09-29 21:00:47 +00:00
|
|
|
parent_image)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Putting json path')
|
2014-06-17 20:03:43 +00:00
|
|
|
store.put_content(repo_image.storage.locations, json_path, request.data)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Generating image ancestry')
|
2014-08-18 17:35:03 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid,
|
|
|
|
parent_locations)
|
|
|
|
except IOError as ioe:
|
|
|
|
profile.debug('Error when generating ancestry: %s' % ioe.message)
|
|
|
|
abort(404)
|
2014-05-02 01:19:52 +00:00
|
|
|
|
|
|
|
profile.debug('Done')
|
2013-09-25 21:50:03 +00:00
|
|
|
return make_response('true', 200)
|
2013-10-01 16:13:25 +00:00
|
|
|
|
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
def process_image_changes(namespace, repository, image_id):
|
2013-10-18 18:31:14 +00:00
|
|
|
logger.debug('Generating diffs for image: %s' % image_id)
|
|
|
|
|
2014-02-16 22:38:47 +00:00
|
|
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
2014-06-30 17:51:42 +00:00
|
|
|
if not repo_image:
|
|
|
|
logger.warning('No image for id: %s', image_id)
|
|
|
|
return None, None
|
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
uuid = repo_image.storage.uuid
|
2014-02-16 22:38:47 +00:00
|
|
|
|
2014-06-11 19:37:45 +00:00
|
|
|
image_diffs_path = store.image_file_diffs_path(uuid)
|
|
|
|
image_trie_path = store.image_file_trie_path(uuid)
|
2013-10-19 00:19:05 +00:00
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
if store.exists(repo_image.storage.locations, image_diffs_path):
|
2013-10-18 18:31:14 +00:00
|
|
|
logger.debug('Diffs already exist for image: %s' % image_id)
|
2014-06-30 17:51:42 +00:00
|
|
|
return image_trie_path, repo_image.storage.locations
|
2013-10-18 18:31:14 +00:00
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
image = model.get_image_by_id(namespace, repository, image_id)
|
2014-06-28 00:04:26 +00:00
|
|
|
parents = model.get_parent_images(namespace, repository, image)
|
2013-10-18 18:31:14 +00:00
|
|
|
|
|
|
|
# Compute the diffs and fs for the parent first if necessary
|
|
|
|
parent_trie_path = None
|
|
|
|
if parents:
|
2014-06-17 20:03:43 +00:00
|
|
|
parent_trie_path, parent_locations = process_image_changes(namespace, repository,
|
|
|
|
parents[-1].docker_image_id)
|
2013-10-18 18:31:14 +00:00
|
|
|
|
|
|
|
# Read in the collapsed layer state of the filesystem for the parent
|
|
|
|
parent_trie = changes.empty_fs()
|
|
|
|
if parent_trie_path:
|
2014-06-17 20:03:43 +00:00
|
|
|
parent_trie_bytes = store.get_content(parent_locations, parent_trie_path)
|
2013-10-20 06:39:45 +00:00
|
|
|
parent_trie.frombytes(parent_trie_bytes)
|
2013-10-18 18:31:14 +00:00
|
|
|
|
|
|
|
# Read in the file entries from the layer tar file
|
2014-06-11 19:37:45 +00:00
|
|
|
layer_path = store.image_layer_path(uuid)
|
2014-06-17 20:03:43 +00:00
|
|
|
with store.stream_read_file(image.storage.locations, layer_path) as layer_tar_stream:
|
2013-10-18 18:31:14 +00:00
|
|
|
removed_files = set()
|
|
|
|
layer_files = changes.files_and_dirs_from_tar(layer_tar_stream,
|
|
|
|
removed_files)
|
|
|
|
|
|
|
|
new_metadata = changes.compute_new_diffs_and_fs(parent_trie, layer_files,
|
|
|
|
removed_files)
|
|
|
|
(new_trie, added, changed, removed) = new_metadata
|
|
|
|
|
|
|
|
# Write out the new trie
|
2014-06-17 20:03:43 +00:00
|
|
|
store.put_content(image.storage.locations, image_trie_path, new_trie.tobytes())
|
2013-10-18 18:31:14 +00:00
|
|
|
|
|
|
|
# Write out the diffs
|
|
|
|
diffs = {}
|
|
|
|
sections = ('added', 'changed', 'removed')
|
|
|
|
for section, source_trie in zip(sections, new_metadata[1:]):
|
|
|
|
diffs[section] = list(source_trie)
|
2013-10-18 21:05:51 +00:00
|
|
|
diffs[section].sort()
|
2014-06-17 20:03:43 +00:00
|
|
|
store.put_content(image.storage.locations, image_diffs_path, json.dumps(diffs, indent=2))
|
2013-10-18 18:31:14 +00:00
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
return image_trie_path, image.storage.locations
|