2018-08-17 21:57:27 +00:00
|
|
|
# pylint: disable=protected-access
|
2018-08-28 17:02:26 +00:00
|
|
|
import logging
|
2018-08-17 21:57:27 +00:00
|
|
|
|
2018-08-17 22:34:10 +00:00
|
|
|
from collections import defaultdict
|
|
|
|
|
2018-08-27 19:01:27 +00:00
|
|
|
from peewee import IntegrityError
|
|
|
|
|
2018-08-17 20:45:27 +00:00
|
|
|
from data import database
|
2018-08-13 22:09:05 +00:00
|
|
|
from data import model
|
|
|
|
from data.registry_model.interface import RegistryDataInterface
|
2018-08-23 20:36:04 +00:00
|
|
|
from data.registry_model.datatypes import (Tag, RepositoryReference, Manifest, LegacyImage, Label,
|
2018-08-28 17:02:26 +00:00
|
|
|
SecurityScanStatus, ManifestLayer, Blob, DerivedImage,
|
2018-09-14 17:35:14 +00:00
|
|
|
TorrentInfo, BlobUpload)
|
2018-08-28 17:02:26 +00:00
|
|
|
from image.docker.schema1 import DockerSchema1ManifestBuilder, ManifestException
|
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2018-08-13 22:09:05 +00:00
|
|
|
|
|
|
|
|
|
|
|
class PreOCIModel(RegistryDataInterface):
|
|
|
|
"""
|
|
|
|
PreOCIModel implements the data model for the registry API using a database schema
|
|
|
|
before it was changed to support the OCI specification.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def find_matching_tag(self, repository_ref, tag_names):
|
|
|
|
""" Finds an alive tag in the repository matching one of the given tag names and returns it
|
|
|
|
or None if none.
|
|
|
|
"""
|
2018-08-17 21:57:27 +00:00
|
|
|
found_tag = model.tag.find_matching_tag(repository_ref._db_id, tag_names)
|
2018-08-28 17:34:58 +00:00
|
|
|
assert found_tag is None or not found_tag.hidden
|
2018-08-13 22:09:05 +00:00
|
|
|
return Tag.for_repository_tag(found_tag)
|
|
|
|
|
|
|
|
def get_most_recent_tag(self, repository_ref):
|
|
|
|
""" Returns the most recently pushed alive tag in the repository, if any. If none, returns
|
|
|
|
None.
|
|
|
|
"""
|
2018-08-17 21:57:27 +00:00
|
|
|
found_tag = model.tag.get_most_recent_tag(repository_ref._db_id)
|
2018-08-28 17:34:58 +00:00
|
|
|
assert found_tag is None or not found_tag.hidden
|
2018-08-13 22:09:05 +00:00
|
|
|
return Tag.for_repository_tag(found_tag)
|
|
|
|
|
2018-08-17 20:45:27 +00:00
|
|
|
def lookup_repository(self, namespace_name, repo_name, kind_filter=None):
|
|
|
|
""" Looks up and returns a reference to the repository with the given namespace and name,
|
|
|
|
or None if none. """
|
|
|
|
repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter)
|
|
|
|
return RepositoryReference.for_repo_obj(repo)
|
|
|
|
|
2018-08-28 17:02:26 +00:00
|
|
|
def get_manifest_for_tag(self, tag, backfill_if_necessary=False):
|
2018-08-17 20:45:27 +00:00
|
|
|
""" Returns the manifest associated with the given tag. """
|
|
|
|
try:
|
2018-08-17 21:57:27 +00:00
|
|
|
tag_manifest = database.TagManifest.get(tag_id=tag._db_id)
|
2018-08-17 20:45:27 +00:00
|
|
|
except database.TagManifest.DoesNotExist:
|
2018-08-28 17:02:26 +00:00
|
|
|
if backfill_if_necessary:
|
|
|
|
return self.backfill_manifest_for_tag(tag)
|
|
|
|
|
2018-08-17 20:45:27 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
return Manifest.for_tag_manifest(tag_manifest)
|
|
|
|
|
2018-08-21 21:26:32 +00:00
|
|
|
def lookup_manifest_by_digest(self, repository_ref, manifest_digest, allow_dead=False,
|
|
|
|
include_legacy_image=False):
|
2018-08-17 20:45:27 +00:00
|
|
|
""" Looks up the manifest with the given digest under the given repository and returns it
|
|
|
|
or None if none. """
|
2018-08-17 21:57:27 +00:00
|
|
|
repo = model.repository.lookup_repository(repository_ref._db_id)
|
2018-08-17 20:45:27 +00:00
|
|
|
if repo is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
2018-08-21 21:26:32 +00:00
|
|
|
tag_manifest = model.tag.load_manifest_by_digest(repo.namespace_user.username,
|
|
|
|
repo.name,
|
|
|
|
manifest_digest,
|
|
|
|
allow_dead=allow_dead)
|
|
|
|
except model.tag.InvalidManifestException:
|
2018-08-21 18:27:10 +00:00
|
|
|
return None
|
2018-08-17 20:45:27 +00:00
|
|
|
|
2018-08-21 21:26:32 +00:00
|
|
|
legacy_image = None
|
|
|
|
if include_legacy_image:
|
|
|
|
legacy_image = self.get_legacy_image(repository_ref, tag_manifest.tag.image.docker_image_id,
|
|
|
|
include_parents=True)
|
|
|
|
|
|
|
|
return Manifest.for_tag_manifest(tag_manifest, legacy_image)
|
2018-08-17 20:45:27 +00:00
|
|
|
|
2018-08-17 22:34:10 +00:00
|
|
|
def get_legacy_images(self, repository_ref):
|
|
|
|
"""
|
|
|
|
Returns an iterator of all the LegacyImage's defined in the matching repository.
|
|
|
|
"""
|
|
|
|
repo = model.repository.lookup_repository(repository_ref._db_id)
|
|
|
|
if repo is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
all_images = model.image.get_repository_images_without_placements(repo)
|
|
|
|
all_images_map = {image.id: image for image in all_images}
|
|
|
|
|
|
|
|
all_tags = model.tag.list_repository_tags(repo.namespace_user.username, repo.name)
|
|
|
|
tags_by_image_id = defaultdict(list)
|
|
|
|
for tag in all_tags:
|
|
|
|
tags_by_image_id[tag.image_id].append(tag)
|
|
|
|
|
|
|
|
return [LegacyImage.for_image(image, images_map=all_images_map, tags_map=tags_by_image_id)
|
|
|
|
for image in all_images]
|
|
|
|
|
2018-09-13 19:51:14 +00:00
|
|
|
def get_legacy_image(self, repository_ref, docker_image_id, include_parents=False,
|
|
|
|
include_blob=False):
|
2018-08-17 22:34:10 +00:00
|
|
|
"""
|
|
|
|
Returns the matching LegacyImages under the matching repository, if any. If none,
|
|
|
|
returns None.
|
|
|
|
"""
|
|
|
|
repo = model.repository.lookup_repository(repository_ref._db_id)
|
|
|
|
if repo is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
image = model.image.get_image(repository_ref._db_id, docker_image_id)
|
|
|
|
if image is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
parent_images_map = None
|
|
|
|
if include_parents:
|
|
|
|
parent_images = model.image.get_parent_images(repo.namespace_user.username, repo.name, image)
|
|
|
|
parent_images_map = {image.id: image for image in parent_images}
|
|
|
|
|
2018-09-13 19:51:14 +00:00
|
|
|
blob = None
|
|
|
|
if include_blob:
|
|
|
|
placements = list(model.storage.get_storage_locations(image.storage.uuid))
|
|
|
|
blob = Blob.for_image_storage(image.storage,
|
|
|
|
storage_path=model.storage.get_layer_path(image.storage),
|
|
|
|
placements=placements)
|
|
|
|
|
|
|
|
return LegacyImage.for_image(image, images_map=parent_images_map, blob=blob)
|
2018-08-17 22:34:10 +00:00
|
|
|
|
2018-08-21 21:26:32 +00:00
|
|
|
def create_manifest_label(self, manifest, key, value, source_type_name, media_type_name=None):
|
|
|
|
""" Creates a label on the manifest with the given key and value. """
|
|
|
|
try:
|
|
|
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
|
|
|
except database.TagManifest.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
label = model.label.create_manifest_label(tag_manifest, key, value, source_type_name,
|
|
|
|
media_type_name)
|
|
|
|
return Label.for_label(label)
|
|
|
|
|
|
|
|
def list_manifest_labels(self, manifest, key_prefix=None):
|
|
|
|
""" Returns all labels found on the manifest. If specified, the key_prefix will filter the
|
|
|
|
labels returned to those keys that start with the given prefix.
|
|
|
|
"""
|
|
|
|
labels = model.label.list_manifest_labels(manifest._db_id, prefix_filter=key_prefix)
|
|
|
|
return [Label.for_label(l) for l in labels]
|
|
|
|
|
|
|
|
def get_manifest_label(self, manifest, label_uuid):
|
|
|
|
""" Returns the label with the specified UUID on the manifest or None if none. """
|
|
|
|
return Label.for_label(model.label.get_manifest_label(label_uuid, manifest._db_id))
|
|
|
|
|
|
|
|
def delete_manifest_label(self, manifest, label_uuid):
|
|
|
|
""" Delete the label with the specified UUID on the manifest. Returns the label deleted
|
|
|
|
or None if none.
|
|
|
|
"""
|
|
|
|
return Label.for_label(model.label.delete_manifest_label(label_uuid, manifest._db_id))
|
2018-08-13 22:09:05 +00:00
|
|
|
|
2018-08-22 19:06:11 +00:00
|
|
|
def list_repository_tags(self, repository_ref, include_legacy_images=False):
|
|
|
|
"""
|
|
|
|
Returns a list of all the active tags in the repository. Note that this can be a *heavy*
|
|
|
|
operation on repositories with a lot of tags, and should be avoided for more targetted
|
|
|
|
operations wherever possible.
|
|
|
|
"""
|
|
|
|
# NOTE: include_legacy_images isn't used here because `list_active_repo_tags` includes the
|
|
|
|
# information already, so we might as well just use it. However, the new model classes will
|
|
|
|
# *not* include it by default, so we make it a parameter now.
|
|
|
|
tags = model.tag.list_active_repo_tags(repository_ref._db_id)
|
|
|
|
return [Tag.for_repository_tag(tag,
|
|
|
|
legacy_image=LegacyImage.for_image(tag.image),
|
|
|
|
manifest_digest=(tag.tagmanifest.digest
|
|
|
|
if hasattr(tag, 'tagmanifest')
|
|
|
|
else None))
|
|
|
|
for tag in tags]
|
|
|
|
|
2018-09-14 19:30:54 +00:00
|
|
|
def list_repository_tag_history(self, repository_ref, page=1, size=100, specific_tag_name=None, active_tags_only=False):
|
2018-08-22 19:06:11 +00:00
|
|
|
"""
|
|
|
|
Returns the history of all tags in the repository (unless filtered). This includes tags that
|
|
|
|
have been made in-active due to newer versions of those tags coming into service.
|
|
|
|
"""
|
|
|
|
tags, manifest_map, has_more = model.tag.list_repository_tag_history(repository_ref._db_id,
|
|
|
|
page, size,
|
2018-09-14 19:30:54 +00:00
|
|
|
specific_tag_name,
|
|
|
|
active_tags_only)
|
2018-08-22 19:06:11 +00:00
|
|
|
return [Tag.for_repository_tag(tag, manifest_map.get(tag.id),
|
|
|
|
legacy_image=LegacyImage.for_image(tag.image))
|
|
|
|
for tag in tags], has_more
|
|
|
|
|
|
|
|
def get_repo_tag(self, repository_ref, tag_name, include_legacy_image=False):
|
|
|
|
"""
|
|
|
|
Returns the latest, *active* tag found in the repository, with the matching name
|
|
|
|
or None if none.
|
|
|
|
"""
|
2018-08-28 17:02:26 +00:00
|
|
|
assert isinstance(tag_name, basestring)
|
2018-08-22 19:06:11 +00:00
|
|
|
tag = model.tag.get_active_tag_for_repo(repository_ref._db_id, tag_name)
|
|
|
|
if tag is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
legacy_image = LegacyImage.for_image(tag.image) if include_legacy_image else None
|
|
|
|
tag_manifest = model.tag.get_tag_manifest(tag)
|
|
|
|
manifest_digest = tag_manifest.digest if tag_manifest else None
|
|
|
|
return Tag.for_repository_tag(tag, legacy_image=legacy_image, manifest_digest=manifest_digest)
|
|
|
|
|
|
|
|
def retarget_tag(self, repository_ref, tag_name, manifest_or_legacy_image,
|
|
|
|
is_reversion=False):
|
|
|
|
"""
|
|
|
|
Creates, updates or moves a tag to a new entry in history, pointing to the manifest or
|
|
|
|
legacy image specified. If is_reversion is set to True, this operation is considered a
|
|
|
|
reversion over a previous tag move operation. Returns the updated Tag or None on error.
|
|
|
|
"""
|
|
|
|
# TODO: unify this.
|
|
|
|
if not is_reversion:
|
|
|
|
if isinstance(manifest_or_legacy_image, Manifest):
|
|
|
|
raise NotImplementedError('Not yet implemented')
|
|
|
|
else:
|
|
|
|
model.tag.create_or_update_tag_for_repo(repository_ref._db_id, tag_name,
|
|
|
|
manifest_or_legacy_image.docker_image_id)
|
|
|
|
else:
|
|
|
|
if isinstance(manifest_or_legacy_image, Manifest):
|
2018-08-27 16:52:27 +00:00
|
|
|
model.tag.restore_tag_to_manifest(repository_ref._db_id, tag_name,
|
|
|
|
manifest_or_legacy_image.digest)
|
2018-08-22 19:06:11 +00:00
|
|
|
else:
|
2018-08-27 16:52:27 +00:00
|
|
|
model.tag.restore_tag_to_image(repository_ref._db_id, tag_name,
|
|
|
|
manifest_or_legacy_image.docker_image_id)
|
2018-08-22 19:06:11 +00:00
|
|
|
|
2018-08-27 19:01:27 +00:00
|
|
|
# Generate a manifest for the tag, if necessary.
|
|
|
|
tag = self.get_repo_tag(repository_ref, tag_name, include_legacy_image=True)
|
|
|
|
if tag is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
self.backfill_manifest_for_tag(tag)
|
|
|
|
return tag
|
2018-08-22 19:06:11 +00:00
|
|
|
|
|
|
|
def delete_tag(self, repository_ref, tag_name):
|
|
|
|
"""
|
|
|
|
Deletes the latest, *active* tag with the given name in the repository.
|
|
|
|
"""
|
|
|
|
repo = model.repository.lookup_repository(repository_ref._db_id)
|
|
|
|
if repo is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
deleted_tag = model.tag.delete_tag(repo.namespace_user.username, repo.name, tag_name)
|
|
|
|
return Tag.for_repository_tag(deleted_tag)
|
|
|
|
|
|
|
|
def change_repository_tag_expiration(self, tag, expiration_date):
|
|
|
|
""" Sets the expiration date of the tag under the matching repository to that given. If the
|
|
|
|
expiration date is None, then the tag will not expire. Returns a tuple of the previous
|
|
|
|
expiration timestamp in seconds (if any), and whether the operation succeeded.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
tag_obj = database.RepositoryTag.get(id=tag._db_id)
|
|
|
|
except database.RepositoryTag.DoesNotExist:
|
|
|
|
return (None, False)
|
|
|
|
|
|
|
|
return model.tag.change_tag_expiration(tag_obj, expiration_date)
|
|
|
|
|
|
|
|
def get_legacy_images_owned_by_tag(self, tag):
|
|
|
|
""" Returns all legacy images *solely owned and used* by the given tag. """
|
|
|
|
try:
|
|
|
|
tag_obj = database.RepositoryTag.get(id=tag._db_id)
|
|
|
|
except database.RepositoryTag.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Collect the IDs of all images that the tag uses.
|
|
|
|
tag_image_ids = set()
|
|
|
|
tag_image_ids.add(tag_obj.image.id)
|
|
|
|
tag_image_ids.update(tag_obj.image.ancestor_id_list())
|
|
|
|
|
|
|
|
# Remove any images shared by other tags.
|
|
|
|
for current_tag in model.tag.list_active_repo_tags(tag_obj.repository_id):
|
|
|
|
if current_tag == tag_obj:
|
|
|
|
continue
|
|
|
|
|
|
|
|
tag_image_ids.discard(current_tag.image.id)
|
|
|
|
tag_image_ids = tag_image_ids.difference(current_tag.image.ancestor_id_list())
|
|
|
|
if not tag_image_ids:
|
|
|
|
return []
|
|
|
|
|
|
|
|
if not tag_image_ids:
|
|
|
|
return []
|
|
|
|
|
|
|
|
# Load the images we need to return.
|
|
|
|
images = database.Image.select().where(database.Image.id << list(tag_image_ids))
|
|
|
|
all_image_ids = set()
|
|
|
|
for image in images:
|
|
|
|
all_image_ids.add(image.id)
|
|
|
|
all_image_ids.update(image.ancestor_id_list())
|
|
|
|
|
|
|
|
# Build a map of all the images and their parents.
|
|
|
|
images_map = {}
|
|
|
|
all_images = database.Image.select().where(database.Image.id << list(all_image_ids))
|
|
|
|
for image in all_images:
|
|
|
|
images_map[image.id] = image
|
|
|
|
|
|
|
|
return [LegacyImage.for_image(image, images_map=images_map) for image in images]
|
|
|
|
|
2018-08-23 20:36:04 +00:00
|
|
|
def get_security_status(self, manifest_or_legacy_image):
|
|
|
|
""" Returns the security status for the given manifest or legacy image or None if none. """
|
|
|
|
image = None
|
|
|
|
|
|
|
|
if isinstance(manifest_or_legacy_image, Manifest):
|
|
|
|
try:
|
|
|
|
tag_manifest = database.TagManifest.get(id=manifest_or_legacy_image._db_id)
|
|
|
|
image = tag_manifest.tag.image
|
|
|
|
except database.TagManifest.DoesNotExist:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
image = database.Image.get(id=manifest_or_legacy_image._db_id)
|
|
|
|
except database.Image.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
if image.security_indexed_engine is not None and image.security_indexed_engine >= 0:
|
|
|
|
return SecurityScanStatus.SCANNED if image.security_indexed else SecurityScanStatus.FAILED
|
|
|
|
|
|
|
|
return SecurityScanStatus.QUEUED
|
|
|
|
|
2018-08-27 19:01:27 +00:00
|
|
|
def backfill_manifest_for_tag(self, tag):
|
|
|
|
""" Backfills a manifest for the V1 tag specified.
|
|
|
|
If a manifest already exists for the tag, returns that manifest.
|
|
|
|
|
|
|
|
NOTE: This method will only be necessary until we've completed the backfill, at which point
|
|
|
|
it should be removed.
|
|
|
|
"""
|
|
|
|
import features
|
|
|
|
|
|
|
|
from app import app, docker_v2_signing_key
|
|
|
|
|
|
|
|
# Ensure that there isn't already a manifest for the tag.
|
|
|
|
tag_manifest = model.tag.get_tag_manifest(tag._db_id)
|
|
|
|
if tag_manifest is not None:
|
|
|
|
return Manifest.for_tag_manifest(tag_manifest)
|
|
|
|
|
|
|
|
# Create the manifest.
|
|
|
|
try:
|
|
|
|
tag_obj = database.RepositoryTag.get(id=tag._db_id)
|
|
|
|
except database.RepositoryTag.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2018-08-28 17:34:58 +00:00
|
|
|
assert not tag_obj.hidden
|
|
|
|
|
2018-08-27 19:01:27 +00:00
|
|
|
repo = tag_obj.repository
|
|
|
|
namespace_name = repo.namespace_user.username
|
|
|
|
repo_name = repo.name
|
|
|
|
|
|
|
|
# Find the v1 metadata for this image and its parents.
|
|
|
|
repo_image = tag_obj.image
|
|
|
|
parents = model.image.get_parent_images(namespace_name, repo_name, repo_image)
|
|
|
|
|
|
|
|
# If the manifest is being generated under the library namespace, then we make its namespace
|
|
|
|
# empty.
|
|
|
|
manifest_namespace = namespace_name
|
|
|
|
if features.LIBRARY_SUPPORT and namespace_name == app.config['LIBRARY_NAMESPACE']:
|
|
|
|
manifest_namespace = ''
|
|
|
|
|
|
|
|
# Create and populate the manifest builder
|
|
|
|
builder = DockerSchema1ManifestBuilder(manifest_namespace, repo_name, tag.name)
|
|
|
|
|
|
|
|
# Add the leaf layer
|
|
|
|
builder.add_layer(repo_image.storage.content_checksum, repo_image.v1_json_metadata)
|
|
|
|
|
|
|
|
for parent_image in parents:
|
|
|
|
builder.add_layer(parent_image.storage.content_checksum, parent_image.v1_json_metadata)
|
|
|
|
|
|
|
|
# Sign the manifest with our signing key.
|
|
|
|
manifest = builder.build(docker_v2_signing_key)
|
|
|
|
|
|
|
|
# Write the manifest to the DB.
|
|
|
|
blob_query = model.storage.lookup_repo_storages_by_content_checksum(repo,
|
|
|
|
manifest.checksums)
|
|
|
|
|
|
|
|
storage_map = {blob.content_checksum: blob.id for blob in blob_query}
|
|
|
|
try:
|
|
|
|
tag_manifest, _ = model.tag.associate_generated_tag_manifest(namespace_name, repo_name,
|
|
|
|
tag.name, manifest, storage_map)
|
|
|
|
except IntegrityError:
|
|
|
|
tag_manifest = model.tag.get_tag_manifest(tag_obj)
|
|
|
|
|
|
|
|
return Manifest.for_tag_manifest(tag_manifest)
|
|
|
|
|
2018-08-28 17:02:26 +00:00
|
|
|
def is_namespace_enabled(self, namespace_name):
|
|
|
|
""" Returns whether the given namespace exists and is enabled. """
|
|
|
|
namespace = model.user.get_namespace_user(namespace_name)
|
|
|
|
return namespace is not None and namespace.enabled
|
|
|
|
|
|
|
|
def list_manifest_layers(self, manifest, include_placements=False):
|
|
|
|
""" Returns an *ordered list* of the layers found in the manifest, starting at the base and
|
|
|
|
working towards the leaf, including the associated Blob and its placements (if specified).
|
|
|
|
Returns None if the manifest could not be parsed and validated.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
parsed = manifest.get_parsed_manifest()
|
|
|
|
except ManifestException:
|
|
|
|
logger.exception('Could not parse and validate manifest `%s`', manifest._db_id)
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
|
|
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
|
|
|
except database.TagManifest.DoesNotExist:
|
|
|
|
logger.exception('Could not find tag manifest for manifest `%s`', manifest._db_id)
|
|
|
|
return None
|
|
|
|
|
|
|
|
repo = tag_manifest.tag.repository
|
|
|
|
blob_query = model.storage.lookup_repo_storages_by_content_checksum(repo, parsed.checksums)
|
|
|
|
storage_map = {blob.content_checksum: blob for blob in blob_query}
|
|
|
|
|
|
|
|
manifest_layers = []
|
|
|
|
for layer in parsed.layers:
|
|
|
|
digest_str = str(layer.digest)
|
|
|
|
if digest_str not in storage_map:
|
|
|
|
logger.error('Missing digest `%s` for manifest `%s`', layer.digest, manifest._db_id)
|
|
|
|
return None
|
|
|
|
|
|
|
|
image_storage = storage_map[digest_str]
|
2018-08-29 02:58:19 +00:00
|
|
|
assert image_storage.cas_path is not None
|
|
|
|
|
2018-08-28 17:02:26 +00:00
|
|
|
placements = None
|
|
|
|
if include_placements:
|
|
|
|
placements = list(model.storage.get_storage_locations(image_storage.uuid))
|
|
|
|
|
|
|
|
blob = Blob.for_image_storage(image_storage,
|
|
|
|
storage_path=model.storage.get_layer_path(image_storage),
|
|
|
|
placements=placements)
|
|
|
|
manifest_layers.append(ManifestLayer(layer, blob))
|
|
|
|
|
|
|
|
return manifest_layers
|
|
|
|
|
|
|
|
def lookup_derived_image(self, manifest, verb, varying_metadata=None, include_placements=False):
|
|
|
|
"""
|
|
|
|
Looks up the derived image for the given manifest, verb and optional varying metadata and
|
|
|
|
returns it or None if none.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
|
|
|
except database.TagManifest.DoesNotExist:
|
|
|
|
logger.exception('Could not find tag manifest for manifest `%s`', manifest._db_id)
|
|
|
|
return None
|
|
|
|
|
|
|
|
repo_image = tag_manifest.tag.image
|
|
|
|
derived = model.image.find_derived_storage_for_image(repo_image, verb, varying_metadata)
|
|
|
|
return self._build_derived(derived, verb, varying_metadata, include_placements)
|
|
|
|
|
|
|
|
def lookup_or_create_derived_image(self, manifest, verb, storage_location, varying_metadata=None,
|
|
|
|
include_placements=False):
|
|
|
|
"""
|
|
|
|
Looks up the derived image for the given maniest, verb and optional varying metadata
|
|
|
|
and returns it. If none exists, a new derived image is created.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
|
|
|
except database.TagManifest.DoesNotExist:
|
|
|
|
logger.exception('Could not find tag manifest for manifest `%s`', manifest._db_id)
|
|
|
|
return None
|
|
|
|
|
|
|
|
repo_image = tag_manifest.tag.image
|
|
|
|
derived = model.image.find_or_create_derived_storage(repo_image, verb, storage_location,
|
|
|
|
varying_metadata)
|
|
|
|
return self._build_derived(derived, verb, varying_metadata, include_placements)
|
|
|
|
|
|
|
|
def _build_derived(self, derived, verb, varying_metadata, include_placements):
|
|
|
|
if derived is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
derived_storage = derived.derivative
|
|
|
|
placements = None
|
|
|
|
if include_placements:
|
|
|
|
placements = list(model.storage.get_storage_locations(derived_storage.uuid))
|
|
|
|
|
|
|
|
blob = Blob.for_image_storage(derived_storage,
|
|
|
|
storage_path=model.storage.get_layer_path(derived_storage),
|
|
|
|
placements=placements)
|
|
|
|
|
|
|
|
return DerivedImage.for_derived_storage(derived, verb, varying_metadata, blob)
|
|
|
|
|
|
|
|
def get_derived_image_signature(self, derived_image, signer_name):
|
|
|
|
"""
|
|
|
|
Returns the signature associated with the derived image and a specific signer or None if none.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
|
|
|
except database.DerivedStorageForImage.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
storage = derived_storage.derivative
|
|
|
|
signature_entry = model.storage.lookup_storage_signature(storage, signer_name)
|
|
|
|
if signature_entry is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return signature_entry.signature
|
|
|
|
|
|
|
|
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
|
|
|
"""
|
|
|
|
Sets the calculated signature for the given derived image and signer to that specified.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
|
|
|
except database.DerivedStorageForImage.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
storage = derived_storage.derivative
|
|
|
|
signature_entry = model.storage.find_or_create_storage_signature(storage, signer_name)
|
|
|
|
signature_entry.signature = signature
|
|
|
|
signature_entry.uploading = False
|
|
|
|
signature_entry.save()
|
|
|
|
|
|
|
|
def delete_derived_image(self, derived_image):
|
|
|
|
"""
|
|
|
|
Deletes a derived image and all of its storage.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
|
|
|
except database.DerivedStorageForImage.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
model.image.delete_derived_storage(derived_storage)
|
|
|
|
|
|
|
|
def set_derived_image_size(self, derived_image, compressed_size):
|
|
|
|
"""
|
|
|
|
Sets the compressed size on the given derived image.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
|
|
|
except database.DerivedStorageForImage.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
storage_entry = derived_storage.derivative
|
|
|
|
storage_entry.image_size = compressed_size
|
|
|
|
storage_entry.uploading = False
|
|
|
|
storage_entry.save()
|
|
|
|
|
|
|
|
def get_torrent_info(self, blob):
|
|
|
|
"""
|
|
|
|
Returns the torrent information associated with the given blob or None if none.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
image_storage = database.ImageStorage.get(id=blob._db_id)
|
|
|
|
except database.ImageStorage.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
|
|
|
torrent_info = model.storage.get_torrent_info(image_storage)
|
|
|
|
except model.TorrentInfoDoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return TorrentInfo.for_torrent_info(torrent_info)
|
|
|
|
|
|
|
|
def set_torrent_info(self, blob, piece_length, pieces):
|
|
|
|
"""
|
|
|
|
Sets the torrent infomation associated with the given blob to that specified.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
image_storage = database.ImageStorage.get(id=blob._db_id)
|
|
|
|
except database.ImageStorage.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
torrent_info = model.storage.save_torrent_info(image_storage, piece_length, pieces)
|
|
|
|
return TorrentInfo.for_torrent_info(torrent_info)
|
|
|
|
|
2018-09-14 17:35:14 +00:00
|
|
|
def get_repo_blob_by_digest(self, repository_ref, blob_digest, include_placements=False):
|
2018-08-28 17:02:26 +00:00
|
|
|
"""
|
|
|
|
Returns the blob in the repository with the given digest, if any or None if none. Note that
|
|
|
|
there may be multiple records in the same repository for the same blob digest, so the return
|
|
|
|
value of this function may change.
|
|
|
|
"""
|
|
|
|
try:
|
2018-09-14 17:35:14 +00:00
|
|
|
image_storage = model.blob.get_repository_blob_by_digest(repository_ref._db_id, blob_digest)
|
2018-08-28 17:02:26 +00:00
|
|
|
except model.BlobDoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2018-08-29 02:58:19 +00:00
|
|
|
assert image_storage.cas_path is not None
|
|
|
|
|
2018-08-28 17:02:26 +00:00
|
|
|
placements = None
|
|
|
|
if include_placements:
|
|
|
|
placements = list(model.storage.get_storage_locations(image_storage.uuid))
|
|
|
|
|
|
|
|
return Blob.for_image_storage(image_storage,
|
|
|
|
storage_path=model.storage.get_layer_path(image_storage),
|
|
|
|
placements=placements)
|
|
|
|
|
2018-09-14 17:35:14 +00:00
|
|
|
def create_blob_upload(self, repository_ref, new_upload_id, location_name, storage_metadata):
|
|
|
|
""" Creates a new blob upload and returns a reference. If the blob upload could not be
|
|
|
|
created, returns None. """
|
|
|
|
repo = model.repository.lookup_repository(repository_ref._db_id)
|
|
|
|
if repo is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
try:
|
|
|
|
upload_record = model.blob.initiate_upload(repo.namespace_user.username, repo.name,
|
|
|
|
new_upload_id, location_name, storage_metadata)
|
|
|
|
return BlobUpload.for_upload(upload_record)
|
|
|
|
except database.Repository.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def lookup_blob_upload(self, repository_ref, blob_upload_id):
|
|
|
|
""" Looks up the blob upload withn the given ID under the specified repository and returns it
|
|
|
|
or None if none.
|
|
|
|
"""
|
|
|
|
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload_id)
|
|
|
|
if upload_record is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return BlobUpload.for_upload(upload_record)
|
|
|
|
|
|
|
|
def update_blob_upload(self, blob_upload, uncompressed_byte_count, piece_hashes, piece_sha_state,
|
|
|
|
storage_metadata, byte_count, chunk_count, sha_state):
|
|
|
|
""" Updates the fields of the blob upload to match those given. Returns the updated blob upload
|
|
|
|
or None if the record does not exists.
|
|
|
|
"""
|
|
|
|
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
|
|
|
if upload_record is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
upload_record.uncompressed_byte_count = uncompressed_byte_count
|
|
|
|
upload_record.piece_hashes = piece_hashes
|
|
|
|
upload_record.piece_sha_state = piece_sha_state
|
|
|
|
upload_record.storage_metadata = storage_metadata
|
|
|
|
upload_record.byte_count = byte_count
|
|
|
|
upload_record.chunk_count = chunk_count
|
|
|
|
upload_record.sha_state = sha_state
|
|
|
|
upload_record.save()
|
|
|
|
return BlobUpload.for_upload(upload_record)
|
|
|
|
|
|
|
|
def delete_blob_upload(self, blob_upload):
|
|
|
|
""" Deletes a blob upload record. """
|
|
|
|
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
|
|
|
if upload_record is not None:
|
|
|
|
upload_record.delete_instance()
|
|
|
|
|
2018-09-14 18:08:23 +00:00
|
|
|
def commit_blob_upload(self, blob_upload, blob_digest_str, blob_expiration_seconds):
|
|
|
|
""" Commits the blob upload into a blob and sets an expiration before that blob will be GCed.
|
|
|
|
"""
|
|
|
|
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
|
|
|
if upload_record is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
repository = upload_record.repository
|
|
|
|
namespace_name = repository.namespace_user.username
|
|
|
|
repo_name = repository.name
|
|
|
|
|
|
|
|
# Create the blob and temporarily tag it.
|
|
|
|
location_obj = model.storage.get_image_location_for_name(blob_upload.location_name)
|
|
|
|
blob_record = model.blob.store_blob_record_and_temp_link(
|
|
|
|
namespace_name, repo_name, blob_digest_str, location_obj.id, blob_upload.byte_count,
|
|
|
|
blob_expiration_seconds, blob_upload.uncompressed_byte_count)
|
|
|
|
|
|
|
|
# Delete the blob upload.
|
|
|
|
upload_record.delete_instance()
|
|
|
|
return Blob.for_image_storage(blob_record,
|
|
|
|
storage_path=model.storage.get_layer_path(blob_record))
|
|
|
|
|
2018-08-22 19:06:11 +00:00
|
|
|
|
2018-08-13 22:09:05 +00:00
|
|
|
pre_oci_model = PreOCIModel()
|