Move shared V1/V2 code into common methods and fix verbs

This commit is contained in:
Joseph Schorr 2015-08-13 17:14:17 -04:00
parent 0e93dbb832
commit d246e68e68
5 changed files with 42 additions and 28 deletions

View file

@ -360,6 +360,27 @@ def get_repo_image_by_storage_checksum(namespace, repository_name, storage_check
raise InvalidImageException(msg) raise InvalidImageException(msg)
def get_image_json(image, store):
""" Returns the JSON definition data for this image. """
if image.v1_json_metadata:
return image.v1_json_metadata
return store.get_content(image.storage.locations, store.image_json_path(image.storage.uuid))
def get_image_ancestors(image, include_image=True):
""" Returns a query of the full ancestors of an image, including itself. """
ancestors = image.ancestors.split('/')[1:-1]
image_ids = [ancestor_id for ancestor_id in ancestors if ancestor_id]
if include_image:
image_ids.append(image.id)
if not image_ids:
return []
return Image.select().where(Image.id << image_ids)
def synthesize_v1_image(namespace, repository_name, storage_checksum, docker_image_id, def synthesize_v1_image(namespace, repository_name, storage_checksum, docker_image_id,
created_date_str, comment, command, v1_json_metadata, parent_docker_id): created_date_str, comment, command, v1_json_metadata, parent_docker_id):
""" Find an existing image with this docker image id, and if none exists, write one with the """ Find an existing image with this docker image id, and if none exists, write one with the

View file

@ -216,3 +216,14 @@ def get_repo_storage_by_checksum(namespace, repository_name, checksum):
return _get_storage(filter_to_repo_and_checksum) return _get_storage(filter_to_repo_and_checksum)
except InvalidImageException: except InvalidImageException:
raise InvalidImageException('No storage found with checksum {0}'.format(checksum)) raise InvalidImageException('No storage found with checksum {0}'.format(checksum))
def get_layer_path(storage_record, store):
""" Returns the path in the storage engine to the layer data referenced by the storage row. """
if not storage_record.cas_path:
return store.v1_image_layer_path(storage_record.uuid)
return store.blob_path(storage_record.checksum)

View file

@ -168,14 +168,13 @@ def put_image_layer(namespace, repository, image_id):
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id) repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
try: try:
logger.debug('Retrieving image data') logger.debug('Retrieving image data')
uuid = repo_image.storage.uuid json_data = model.image.get_image_json(repo_image, store)
json_data = (repo_image.v1_json_metadata or
store.get_content(repo_image.storage.locations, store.image_json_path(uuid)))
except (IOError, AttributeError): except (IOError, AttributeError):
logger.exception('Exception when retrieving image data') logger.exception('Exception when retrieving image data')
abort(404, 'Image %(image_id)s not found', issue='unknown-image', abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id) image_id=image_id)
uuid = repo_image.storage.uuid
layer_path = store.v1_image_layer_path(uuid) layer_path = store.v1_image_layer_path(uuid)
logger.info('Storing layer at v1 path: %s', layer_path) logger.info('Storing layer at v1 path: %s', layer_path)
@ -296,11 +295,8 @@ def put_image_checksum(namespace, repository, image_id):
if not repo_image or not repo_image.storage: if not repo_image or not repo_image.storage:
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id) abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
uuid = repo_image.storage.uuid
logger.debug('Looking up repo layer data') logger.debug('Looking up repo layer data')
if (repo_image.v1_json_metadata is None and if not model.image.get_image_json(repo_image, store):
not store.exists(repo_image.storage.locations, store.image_json_path(uuid))):
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id) abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
logger.debug('Marking image path') logger.debug('Marking image path')
@ -353,9 +349,7 @@ def get_image_json(namespace, repository, image_id, headers):
logger.debug('Looking up repo layer data') logger.debug('Looking up repo layer data')
try: try:
uuid = repo_image.storage.uuid data = repo_image.get_image_json(repo_image, store)
data = (repo_image.v1_json_metadata or
store.get_content(repo_image.storage.locations, store.image_json_path(uuid)))
except (IOError, AttributeError): except (IOError, AttributeError):
flask_abort(404) flask_abort(404)
@ -469,10 +463,7 @@ def put_image_json(namespace, repository, image_id):
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s', abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
issue='invalid-request', image_id=image_id, parent_id=parent_id) issue='invalid-request', image_id=image_id, parent_id=parent_id)
json_path = store.image_json_path(repo_image.storage.uuid) if not image_is_uploading(repo_image) and model.image.get_image_json(repo_image, store):
if (not image_is_uploading(repo_image) and
(repo_image.v1_json_metadata is not None or
store.exists(repo_image.storage.locations, json_path))):
exact_abort(409, 'Image already exists') exact_abort(409, 'Image already exists')
set_uploading_flag(repo_image, True) set_uploading_flag(repo_image, True)

View file

@ -334,10 +334,7 @@ def __get_and_backfill_image_metadata(image):
if image_metadata is None: if image_metadata is None:
logger.warning('Loading metadata from storage for image id: %s', image.id) logger.warning('Loading metadata from storage for image id: %s', image.id)
metadata_path = storage.image_json_path(image.storage.uuid) image.v1_json_metadata = model.image.get_image_json(image, storage)
image_metadata = storage.get_content(image.storage.locations, metadata_path)
image.v1_json_metadata = image_metadata
logger.info('Saving backfilled metadata for image id: %s', image.id) logger.info('Saving backfilled metadata for image id: %s', image.id)
image.save() image.save()

View file

@ -119,13 +119,10 @@ def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
abort(404) abort(404)
# If there is a data checker, call it first. # If there is a data checker, call it first.
uuid = repo_image.storage.uuid
image_json = None image_json = None
if checker is not None: if checker is not None:
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid)) image_json = json.loads(model.image.get_image_json(repo_image, store))
image_json = json.loads(image_json_data)
if not checker(image_json): if not checker(image_json):
logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb) logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
abort(404) abort(404)
@ -172,7 +169,7 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
if not derived.uploading: if not derived.uploading:
logger.debug('Derived %s image %s exists in storage', verb, derived.uuid) logger.debug('Derived %s image %s exists in storage', verb, derived.uuid)
derived_layer_path = store.image_layer_path(derived.uuid) derived_layer_path = model.storage.get_layer_path(derived, store)
download_url = store.get_direct_download_url(derived.locations, derived_layer_path) download_url = store.get_direct_download_url(derived.locations, derived_layer_path)
if download_url: if download_url:
logger.debug('Redirecting to download URL for derived %s image %s', verb, derived.uuid) logger.debug('Redirecting to download URL for derived %s image %s', verb, derived.uuid)
@ -185,16 +182,13 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
return send_file(store.stream_read_file(derived.locations, derived_layer_path)) return send_file(store.stream_read_file(derived.locations, derived_layer_path))
# Load the ancestry for the image. # Load the ancestry for the image.
uuid = repo_image.storage.uuid full_image_list = model.image.get_image_ancestors(repo_image)
logger.debug('Building and returning derived %s image %s', verb, derived.uuid) logger.debug('Building and returning derived %s image %s', verb, derived.uuid)
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
full_image_list = json.loads(ancestry_data)
# Load the image's JSON layer. # Load the image's JSON layer.
if not image_json: if not image_json:
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid)) image_json = json.loads(model.image.get_image_json(repo_image, store))
image_json = json.loads(image_json_data)
# Calculate a synthetic image ID. # Calculate a synthetic image ID.
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest() synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest()