Add new methods to registry data model interface in prep for moving verbs to using it
This commit is contained in:
parent
7fa3506723
commit
6c5c2f1a75
9 changed files with 585 additions and 29 deletions
|
@ -7,6 +7,23 @@ from data.database import (Repository, Namespace, ImageStorage, Image, ImageStor
|
||||||
BlobUpload, ImageStorageLocation, db_random_func)
|
BlobUpload, ImageStorageLocation, db_random_func)
|
||||||
|
|
||||||
|
|
||||||
|
def get_repository_blob_by_digest(repository, blob_digest):
|
||||||
|
""" Find the content-addressable blob linked to the specified repository.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
storage_id_query = (ImageStorage
|
||||||
|
.select(ImageStorage.id)
|
||||||
|
.join(Image)
|
||||||
|
.where(Image.repository == repository,
|
||||||
|
ImageStorage.content_checksum == blob_digest,
|
||||||
|
ImageStorage.uploading == False)
|
||||||
|
.limit(1))
|
||||||
|
|
||||||
|
return storage_model.get_storage_by_subquery(storage_id_query)
|
||||||
|
except InvalidImageException:
|
||||||
|
raise BlobDoesNotExist('Blob does not exist with digest: {0}'.format(blob_digest))
|
||||||
|
|
||||||
|
|
||||||
def get_repo_blob_by_digest(namespace, repo_name, blob_digest):
|
def get_repo_blob_by_digest(namespace, repo_name, blob_digest):
|
||||||
""" Find the content-addressable blob linked to the specified repository.
|
""" Find the content-addressable blob linked to the specified repository.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -489,44 +489,33 @@ def find_or_create_derived_storage(source_image, transformation_name, preferred_
|
||||||
new_storage = storage.create_v1_storage(preferred_location)
|
new_storage = storage.create_v1_storage(preferred_location)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
DerivedStorageForImage.create(source_image=source_image, derivative=new_storage,
|
derived = DerivedStorageForImage.create(source_image=source_image, derivative=new_storage,
|
||||||
transformation=trans, uniqueness_hash=uniqueness_hash)
|
transformation=trans, uniqueness_hash=uniqueness_hash)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
# Storage was created while this method executed. Just return the existing.
|
# Storage was created while this method executed. Just return the existing.
|
||||||
new_storage.delete_instance(recursive=True)
|
new_storage.delete_instance(recursive=True)
|
||||||
return find_derived_storage_for_image(source_image, transformation_name, varying_metadata)
|
return find_derived_storage_for_image(source_image, transformation_name, varying_metadata)
|
||||||
|
|
||||||
return new_storage
|
return derived
|
||||||
|
|
||||||
|
|
||||||
def find_derived_storage_for_image(source_image, transformation_name, varying_metadata=None):
|
def find_derived_storage_for_image(source_image, transformation_name, varying_metadata=None):
|
||||||
uniqueness_hash = _get_uniqueness_hash(varying_metadata)
|
uniqueness_hash = _get_uniqueness_hash(varying_metadata)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
found = (ImageStorage
|
found = (DerivedStorageForImage
|
||||||
.select(ImageStorage, DerivedStorageForImage)
|
.select(ImageStorage, DerivedStorageForImage)
|
||||||
.join(DerivedStorageForImage)
|
.join(ImageStorage)
|
||||||
|
.switch(DerivedStorageForImage)
|
||||||
.join(ImageStorageTransformation)
|
.join(ImageStorageTransformation)
|
||||||
.where(DerivedStorageForImage.source_image == source_image,
|
.where(DerivedStorageForImage.source_image == source_image,
|
||||||
ImageStorageTransformation.name == transformation_name,
|
ImageStorageTransformation.name == transformation_name,
|
||||||
DerivedStorageForImage.uniqueness_hash == uniqueness_hash)
|
DerivedStorageForImage.uniqueness_hash == uniqueness_hash)
|
||||||
.get())
|
.get())
|
||||||
|
|
||||||
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
|
|
||||||
return found
|
return found
|
||||||
except ImageStorage.DoesNotExist:
|
except DerivedStorageForImage.DoesNotExist:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def delete_derived_storage_by_uuid(storage_uuid):
|
def delete_derived_storage(derived_storage):
|
||||||
try:
|
derived_storage.derivative.delete_instance(recursive=True)
|
||||||
image_storage = storage.get_storage_by_uuid(storage_uuid)
|
|
||||||
except InvalidImageException:
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
DerivedStorageForImage.get(derivative=image_storage)
|
|
||||||
except DerivedStorageForImage.DoesNotExist:
|
|
||||||
return
|
|
||||||
|
|
||||||
image_storage.delete_instance(recursive=True)
|
|
||||||
|
|
|
@ -336,7 +336,7 @@ def get_storage_locations(uuid):
|
||||||
|
|
||||||
def save_torrent_info(storage_object, piece_length, pieces):
|
def save_torrent_info(storage_object, piece_length, pieces):
|
||||||
try:
|
try:
|
||||||
TorrentInfo.create(storage=storage_object, piece_length=piece_length, pieces=pieces)
|
return TorrentInfo.create(storage=storage_object, piece_length=piece_length, pieces=pieces)
|
||||||
except IntegrityError:
|
except IntegrityError:
|
||||||
# TorrentInfo already exists for this storage.
|
# TorrentInfo already exists for this storage.
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -30,6 +30,9 @@ def datatype(name, static_fields):
|
||||||
|
|
||||||
raise AttributeError('Unknown field `%s`' % name)
|
raise AttributeError('Unknown field `%s`' % name)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s> #%s' % (name, self._db_id)
|
||||||
|
|
||||||
return DataType
|
return DataType
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,11 @@
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
from enum import Enum, unique
|
from enum import Enum, unique
|
||||||
|
|
||||||
|
from cachetools import lru_cache
|
||||||
|
|
||||||
|
from data import model
|
||||||
from data.registry_model.datatype import datatype, requiresinput
|
from data.registry_model.datatype import datatype, requiresinput
|
||||||
from image.docker.schema1 import DockerSchema1Manifest
|
from image.docker.schema1 import DockerSchema1Manifest
|
||||||
|
|
||||||
|
@ -13,6 +19,34 @@ class RepositoryReference(datatype('Repository', [])):
|
||||||
|
|
||||||
return RepositoryReference(db_id=repo_obj.id)
|
return RepositoryReference(db_id=repo_obj.id)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def for_id(cls, repo_id):
|
||||||
|
return RepositoryReference(db_id=repo_id)
|
||||||
|
|
||||||
|
@property
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def _repository_obj(self):
|
||||||
|
return model.repository.lookup_repository(self._db_id)
|
||||||
|
|
||||||
|
def namespace_name(self):
|
||||||
|
""" Returns the namespace name of this repository.
|
||||||
|
"""
|
||||||
|
repository = self._repository_obj
|
||||||
|
if repository is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return repository.namespace_user.username
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
""" Returns the name of this repository.
|
||||||
|
"""
|
||||||
|
repository = self._repository_obj
|
||||||
|
if repository is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return repository.name
|
||||||
|
|
||||||
|
|
||||||
class Label(datatype('Label', ['key', 'value', 'uuid', 'source_type_name', 'media_type_name'])):
|
class Label(datatype('Label', ['key', 'value', 'uuid', 'source_type_name', 'media_type_name'])):
|
||||||
""" Label represents a label on a manifest. """
|
""" Label represents a label on a manifest. """
|
||||||
|
@ -40,7 +74,15 @@ class Tag(datatype('Tag', ['name', 'reversion', 'manifest_digest', 'lifetime_sta
|
||||||
lifetime_start_ts=repository_tag.lifetime_start_ts,
|
lifetime_start_ts=repository_tag.lifetime_start_ts,
|
||||||
lifetime_end_ts=repository_tag.lifetime_end_ts,
|
lifetime_end_ts=repository_tag.lifetime_end_ts,
|
||||||
manifest_digest=manifest_digest,
|
manifest_digest=manifest_digest,
|
||||||
inputs=dict(legacy_image=legacy_image))
|
inputs=dict(legacy_image=legacy_image,
|
||||||
|
repository=RepositoryReference.for_id(repository_tag.repository_id)))
|
||||||
|
|
||||||
|
@property
|
||||||
|
@requiresinput('repository')
|
||||||
|
def repository(self, repository):
|
||||||
|
""" Returns the repository under which this tag lives.
|
||||||
|
"""
|
||||||
|
return repository
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@requiresinput('legacy_image')
|
@requiresinput('legacy_image')
|
||||||
|
@ -124,3 +166,76 @@ class SecurityScanStatus(Enum):
|
||||||
SCANNED = 'scanned'
|
SCANNED = 'scanned'
|
||||||
FAILED = 'failed'
|
FAILED = 'failed'
|
||||||
QUEUED = 'queued'
|
QUEUED = 'queued'
|
||||||
|
|
||||||
|
|
||||||
|
class ManifestLayer(namedtuple('ManifestLayer', ['layer_info', 'blob'])):
|
||||||
|
""" Represents a single layer in a manifest. The `layer_info` data will be manifest-type specific,
|
||||||
|
but will have a few expected fields (such as `digest`). The `blob` represents the associated
|
||||||
|
blob for this layer, optionally with placements.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def estimated_size(self, estimate_multiplier):
|
||||||
|
""" Returns the estimated size of this layer. If the layers' blob has an uncompressed size,
|
||||||
|
it is used. Otherwise, the compressed_size field in the layer is multiplied by the
|
||||||
|
multiplier.
|
||||||
|
"""
|
||||||
|
if self.blob.uncompressed_size:
|
||||||
|
return self.blob.uncompressed_size
|
||||||
|
|
||||||
|
return (self.layer_info.compressed_size or 0) * estimate_multiplier
|
||||||
|
|
||||||
|
|
||||||
|
class Blob(datatype('Blob', ['uuid', 'digest', 'compressed_size', 'uncompressed_size',
|
||||||
|
'uploading'])):
|
||||||
|
""" Blob represents a content-addressable piece of storage. """
|
||||||
|
@classmethod
|
||||||
|
def for_image_storage(cls, image_storage, storage_path, placements=None):
|
||||||
|
if image_storage is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return Blob(db_id=image_storage.id,
|
||||||
|
uuid=image_storage.uuid,
|
||||||
|
inputs=dict(placements=placements, storage_path=storage_path),
|
||||||
|
digest=image_storage.content_checksum,
|
||||||
|
compressed_size=image_storage.image_size,
|
||||||
|
uncompressed_size=image_storage.uncompressed_size,
|
||||||
|
uploading=image_storage.uploading)
|
||||||
|
|
||||||
|
@property
|
||||||
|
@requiresinput('storage_path')
|
||||||
|
def storage_path(self, storage_path):
|
||||||
|
""" Returns the path of this blob in storage. """
|
||||||
|
# TODO: change this to take in the storage engine?
|
||||||
|
return storage_path
|
||||||
|
|
||||||
|
@property
|
||||||
|
@requiresinput('placements')
|
||||||
|
def placements(self, placements):
|
||||||
|
""" Returns all the storage placements at which the Blob can be found. """
|
||||||
|
return placements
|
||||||
|
|
||||||
|
|
||||||
|
class DerivedImage(datatype('DerivedImage', ['verb', 'varying_metadata', 'blob'])):
|
||||||
|
""" DerivedImage represents an image derived from a manifest via some form of verb. """
|
||||||
|
@classmethod
|
||||||
|
def for_derived_storage(cls, derived, verb, varying_metadata, blob):
|
||||||
|
return DerivedImage(db_id=derived.id,
|
||||||
|
verb=verb,
|
||||||
|
varying_metadata=varying_metadata,
|
||||||
|
blob=blob)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unique_id(self):
|
||||||
|
""" Returns a unique ID for this derived image. This call will consistently produce the same
|
||||||
|
unique ID across calls in the same code base.
|
||||||
|
"""
|
||||||
|
return hashlib.sha256('%s:%s' % (self.verb, self._db_id)).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
class TorrentInfo(datatype('TorrentInfo', ['pieces', 'piece_length'])):
|
||||||
|
""" TorrentInfo represents information to pull a blob via torrent. """
|
||||||
|
@classmethod
|
||||||
|
def for_torrent_info(cls, torrent_info):
|
||||||
|
return TorrentInfo(db_id=torrent_info.id,
|
||||||
|
pieces=torrent_info.pieces,
|
||||||
|
piece_length=torrent_info.piece_length)
|
||||||
|
|
|
@ -26,11 +26,12 @@ class RegistryDataInterface(object):
|
||||||
or None if none. """
|
or None if none. """
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_manifest_for_tag(self, tag):
|
def get_manifest_for_tag(self, tag, backfill_if_necessary=False):
|
||||||
""" Returns the manifest associated with the given tag. """
|
""" Returns the manifest associated with the given tag. """
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def lookup_manifest_by_digest(self, repository_ref, manifest_digest, allow_dead=False):
|
def lookup_manifest_by_digest(self, repository_ref, manifest_digest, allow_dead=False,
|
||||||
|
include_legacy_image=False):
|
||||||
""" Looks up the manifest with the given digest under the given repository and returns it
|
""" Looks up the manifest with the given digest under the given repository and returns it
|
||||||
or None if none. """
|
or None if none. """
|
||||||
|
|
||||||
|
@ -131,3 +132,73 @@ class RegistryDataInterface(object):
|
||||||
NOTE: This method will only be necessary until we've completed the backfill, at which point
|
NOTE: This method will only be necessary until we've completed the backfill, at which point
|
||||||
it should be removed.
|
it should be removed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def is_namespace_enabled(self, namespace_name):
|
||||||
|
""" Returns whether the given namespace exists and is enabled. """
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def list_manifest_layers(self, manifest, include_placements=False):
|
||||||
|
""" Returns an *ordered list* of the layers found in the manifest, starting at the base and
|
||||||
|
working towards the leaf, including the associated Blob and its placements (if specified).
|
||||||
|
Returns None if the manifest could not be parsed and validated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def lookup_derived_image(self, manifest, verb, varying_metadata=None, include_placements=False):
|
||||||
|
"""
|
||||||
|
Looks up the derived image for the given manifest, verb and optional varying metadata and
|
||||||
|
returns it or None if none.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def lookup_or_create_derived_image(self, manifest, verb, storage_location, varying_metadata=None,
|
||||||
|
include_placements=False):
|
||||||
|
"""
|
||||||
|
Looks up the derived image for the given maniest, verb and optional varying metadata
|
||||||
|
and returns it. If none exists, a new derived image is created.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_derived_image_signature(self, derived_image, signer_name):
|
||||||
|
"""
|
||||||
|
Returns the signature associated with the derived image and a specific signer or None if none.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
||||||
|
"""
|
||||||
|
Sets the calculated signature for the given derived image and signer to that specified.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def delete_derived_image(self, derived_image):
|
||||||
|
"""
|
||||||
|
Deletes a derived image and all of its storage.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_derived_image_size(self, derived_image, compressed_size):
|
||||||
|
"""
|
||||||
|
Sets the compressed size on the given derived image.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_torrent_info(self, blob):
|
||||||
|
"""
|
||||||
|
Returns the torrent information associated with the given blob or None if none.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def set_torrent_info(self, blob, piece_length, pieces):
|
||||||
|
"""
|
||||||
|
Sets the torrent infomation associated with the given blob to that specified.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_repo_blob_by_digest(self, repo_ref, blob_digest, include_placements=False):
|
||||||
|
"""
|
||||||
|
Returns the blob in the repository with the given digest, if any or None if none. Note that
|
||||||
|
there may be multiple records in the same repository for the same blob digest, so the return
|
||||||
|
value of this function may change.
|
||||||
|
"""
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
|
import logging
|
||||||
|
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
@ -8,8 +9,12 @@ from data import database
|
||||||
from data import model
|
from data import model
|
||||||
from data.registry_model.interface import RegistryDataInterface
|
from data.registry_model.interface import RegistryDataInterface
|
||||||
from data.registry_model.datatypes import (Tag, RepositoryReference, Manifest, LegacyImage, Label,
|
from data.registry_model.datatypes import (Tag, RepositoryReference, Manifest, LegacyImage, Label,
|
||||||
SecurityScanStatus)
|
SecurityScanStatus, ManifestLayer, Blob, DerivedImage,
|
||||||
from image.docker.schema1 import DockerSchema1ManifestBuilder
|
TorrentInfo)
|
||||||
|
from image.docker.schema1 import DockerSchema1ManifestBuilder, ManifestException
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class PreOCIModel(RegistryDataInterface):
|
class PreOCIModel(RegistryDataInterface):
|
||||||
|
@ -38,11 +43,14 @@ class PreOCIModel(RegistryDataInterface):
|
||||||
repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter)
|
repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter)
|
||||||
return RepositoryReference.for_repo_obj(repo)
|
return RepositoryReference.for_repo_obj(repo)
|
||||||
|
|
||||||
def get_manifest_for_tag(self, tag):
|
def get_manifest_for_tag(self, tag, backfill_if_necessary=False):
|
||||||
""" Returns the manifest associated with the given tag. """
|
""" Returns the manifest associated with the given tag. """
|
||||||
try:
|
try:
|
||||||
tag_manifest = database.TagManifest.get(tag_id=tag._db_id)
|
tag_manifest = database.TagManifest.get(tag_id=tag._db_id)
|
||||||
except database.TagManifest.DoesNotExist:
|
except database.TagManifest.DoesNotExist:
|
||||||
|
if backfill_if_necessary:
|
||||||
|
return self.backfill_manifest_for_tag(tag)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
return Manifest.for_tag_manifest(tag_manifest)
|
return Manifest.for_tag_manifest(tag_manifest)
|
||||||
|
@ -171,6 +179,7 @@ class PreOCIModel(RegistryDataInterface):
|
||||||
Returns the latest, *active* tag found in the repository, with the matching name
|
Returns the latest, *active* tag found in the repository, with the matching name
|
||||||
or None if none.
|
or None if none.
|
||||||
"""
|
"""
|
||||||
|
assert isinstance(tag_name, basestring)
|
||||||
tag = model.tag.get_active_tag_for_repo(repository_ref._db_id, tag_name)
|
tag = model.tag.get_active_tag_for_repo(repository_ref._db_id, tag_name)
|
||||||
if tag is None:
|
if tag is None:
|
||||||
return None
|
return None
|
||||||
|
@ -355,5 +364,200 @@ class PreOCIModel(RegistryDataInterface):
|
||||||
|
|
||||||
return Manifest.for_tag_manifest(tag_manifest)
|
return Manifest.for_tag_manifest(tag_manifest)
|
||||||
|
|
||||||
|
def is_namespace_enabled(self, namespace_name):
|
||||||
|
""" Returns whether the given namespace exists and is enabled. """
|
||||||
|
namespace = model.user.get_namespace_user(namespace_name)
|
||||||
|
return namespace is not None and namespace.enabled
|
||||||
|
|
||||||
|
def list_manifest_layers(self, manifest, include_placements=False):
|
||||||
|
""" Returns an *ordered list* of the layers found in the manifest, starting at the base and
|
||||||
|
working towards the leaf, including the associated Blob and its placements (if specified).
|
||||||
|
Returns None if the manifest could not be parsed and validated.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
parsed = manifest.get_parsed_manifest()
|
||||||
|
except ManifestException:
|
||||||
|
logger.exception('Could not parse and validate manifest `%s`', manifest._db_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
||||||
|
except database.TagManifest.DoesNotExist:
|
||||||
|
logger.exception('Could not find tag manifest for manifest `%s`', manifest._db_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
repo = tag_manifest.tag.repository
|
||||||
|
blob_query = model.storage.lookup_repo_storages_by_content_checksum(repo, parsed.checksums)
|
||||||
|
storage_map = {blob.content_checksum: blob for blob in blob_query}
|
||||||
|
|
||||||
|
manifest_layers = []
|
||||||
|
for layer in parsed.layers:
|
||||||
|
digest_str = str(layer.digest)
|
||||||
|
if digest_str not in storage_map:
|
||||||
|
logger.error('Missing digest `%s` for manifest `%s`', layer.digest, manifest._db_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
image_storage = storage_map[digest_str]
|
||||||
|
placements = None
|
||||||
|
if include_placements:
|
||||||
|
placements = list(model.storage.get_storage_locations(image_storage.uuid))
|
||||||
|
|
||||||
|
blob = Blob.for_image_storage(image_storage,
|
||||||
|
storage_path=model.storage.get_layer_path(image_storage),
|
||||||
|
placements=placements)
|
||||||
|
manifest_layers.append(ManifestLayer(layer, blob))
|
||||||
|
|
||||||
|
return manifest_layers
|
||||||
|
|
||||||
|
def lookup_derived_image(self, manifest, verb, varying_metadata=None, include_placements=False):
|
||||||
|
"""
|
||||||
|
Looks up the derived image for the given manifest, verb and optional varying metadata and
|
||||||
|
returns it or None if none.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
||||||
|
except database.TagManifest.DoesNotExist:
|
||||||
|
logger.exception('Could not find tag manifest for manifest `%s`', manifest._db_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
repo_image = tag_manifest.tag.image
|
||||||
|
derived = model.image.find_derived_storage_for_image(repo_image, verb, varying_metadata)
|
||||||
|
return self._build_derived(derived, verb, varying_metadata, include_placements)
|
||||||
|
|
||||||
|
def lookup_or_create_derived_image(self, manifest, verb, storage_location, varying_metadata=None,
|
||||||
|
include_placements=False):
|
||||||
|
"""
|
||||||
|
Looks up the derived image for the given maniest, verb and optional varying metadata
|
||||||
|
and returns it. If none exists, a new derived image is created.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
tag_manifest = database.TagManifest.get(id=manifest._db_id)
|
||||||
|
except database.TagManifest.DoesNotExist:
|
||||||
|
logger.exception('Could not find tag manifest for manifest `%s`', manifest._db_id)
|
||||||
|
return None
|
||||||
|
|
||||||
|
repo_image = tag_manifest.tag.image
|
||||||
|
derived = model.image.find_or_create_derived_storage(repo_image, verb, storage_location,
|
||||||
|
varying_metadata)
|
||||||
|
return self._build_derived(derived, verb, varying_metadata, include_placements)
|
||||||
|
|
||||||
|
def _build_derived(self, derived, verb, varying_metadata, include_placements):
|
||||||
|
if derived is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
derived_storage = derived.derivative
|
||||||
|
placements = None
|
||||||
|
if include_placements:
|
||||||
|
placements = list(model.storage.get_storage_locations(derived_storage.uuid))
|
||||||
|
|
||||||
|
blob = Blob.for_image_storage(derived_storage,
|
||||||
|
storage_path=model.storage.get_layer_path(derived_storage),
|
||||||
|
placements=placements)
|
||||||
|
|
||||||
|
return DerivedImage.for_derived_storage(derived, verb, varying_metadata, blob)
|
||||||
|
|
||||||
|
def get_derived_image_signature(self, derived_image, signer_name):
|
||||||
|
"""
|
||||||
|
Returns the signature associated with the derived image and a specific signer or None if none.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||||
|
except database.DerivedStorageForImage.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
storage = derived_storage.derivative
|
||||||
|
signature_entry = model.storage.lookup_storage_signature(storage, signer_name)
|
||||||
|
if signature_entry is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return signature_entry.signature
|
||||||
|
|
||||||
|
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
||||||
|
"""
|
||||||
|
Sets the calculated signature for the given derived image and signer to that specified.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||||
|
except database.DerivedStorageForImage.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
storage = derived_storage.derivative
|
||||||
|
signature_entry = model.storage.find_or_create_storage_signature(storage, signer_name)
|
||||||
|
signature_entry.signature = signature
|
||||||
|
signature_entry.uploading = False
|
||||||
|
signature_entry.save()
|
||||||
|
|
||||||
|
def delete_derived_image(self, derived_image):
|
||||||
|
"""
|
||||||
|
Deletes a derived image and all of its storage.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||||
|
except database.DerivedStorageForImage.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
model.image.delete_derived_storage(derived_storage)
|
||||||
|
|
||||||
|
def set_derived_image_size(self, derived_image, compressed_size):
|
||||||
|
"""
|
||||||
|
Sets the compressed size on the given derived image.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||||
|
except database.DerivedStorageForImage.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
storage_entry = derived_storage.derivative
|
||||||
|
storage_entry.image_size = compressed_size
|
||||||
|
storage_entry.uploading = False
|
||||||
|
storage_entry.save()
|
||||||
|
|
||||||
|
def get_torrent_info(self, blob):
|
||||||
|
"""
|
||||||
|
Returns the torrent information associated with the given blob or None if none.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
image_storage = database.ImageStorage.get(id=blob._db_id)
|
||||||
|
except database.ImageStorage.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
torrent_info = model.storage.get_torrent_info(image_storage)
|
||||||
|
except model.TorrentInfoDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return TorrentInfo.for_torrent_info(torrent_info)
|
||||||
|
|
||||||
|
def set_torrent_info(self, blob, piece_length, pieces):
|
||||||
|
"""
|
||||||
|
Sets the torrent infomation associated with the given blob to that specified.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
image_storage = database.ImageStorage.get(id=blob._db_id)
|
||||||
|
except database.ImageStorage.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
torrent_info = model.storage.save_torrent_info(image_storage, piece_length, pieces)
|
||||||
|
return TorrentInfo.for_torrent_info(torrent_info)
|
||||||
|
|
||||||
|
def get_repo_blob_by_digest(self, repo_ref, blob_digest, include_placements=False):
|
||||||
|
"""
|
||||||
|
Returns the blob in the repository with the given digest, if any or None if none. Note that
|
||||||
|
there may be multiple records in the same repository for the same blob digest, so the return
|
||||||
|
value of this function may change.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
image_storage = model.blob.get_repository_blob_by_digest(repo_ref._db_id, blob_digest)
|
||||||
|
except model.BlobDoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
placements = None
|
||||||
|
if include_placements:
|
||||||
|
placements = list(model.storage.get_storage_locations(image_storage.uuid))
|
||||||
|
|
||||||
|
return Blob.for_image_storage(image_storage,
|
||||||
|
storage_path=model.storage.get_layer_path(image_storage),
|
||||||
|
placements=placements)
|
||||||
|
|
||||||
|
|
||||||
pre_oci_model = PreOCIModel()
|
pre_oci_model = PreOCIModel()
|
||||||
|
|
|
@ -8,7 +8,8 @@ from app import docker_v2_signing_key
|
||||||
from data import model
|
from data import model
|
||||||
from data.database import (TagManifestLabelMap, TagManifestToManifest, Manifest, ManifestBlob,
|
from data.database import (TagManifestLabelMap, TagManifestToManifest, Manifest, ManifestBlob,
|
||||||
ManifestLegacyImage, ManifestLabel, TagManifest, RepositoryTag, Image,
|
ManifestLegacyImage, ManifestLabel, TagManifest, RepositoryTag, Image,
|
||||||
TagManifestLabel, TagManifest, TagManifestLabel)
|
TagManifestLabel, TagManifest, TagManifestLabel, DerivedStorageForImage,
|
||||||
|
TorrentInfo)
|
||||||
from data.registry_model.registry_pre_oci_model import PreOCIModel
|
from data.registry_model.registry_pre_oci_model import PreOCIModel
|
||||||
from data.registry_model.datatypes import RepositoryReference
|
from data.registry_model.datatypes import RepositoryReference
|
||||||
|
|
||||||
|
@ -363,3 +364,160 @@ def test_backfill_manifest_for_tag(repo_namespace, repo_name, clear_rows, pre_oc
|
||||||
parsed_manifest = manifest.get_parsed_manifest()
|
parsed_manifest = manifest.get_parsed_manifest()
|
||||||
assert parsed_manifest.leaf_layer_v1_image_id == legacy_image.docker_image_id
|
assert parsed_manifest.leaf_layer_v1_image_id == legacy_image.docker_image_id
|
||||||
assert parsed_manifest.parent_image_ids == {p.docker_image_id for p in legacy_image.parents}
|
assert parsed_manifest.parent_image_ids == {p.docker_image_id for p in legacy_image.parents}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('repo_namespace, repo_name', [
|
||||||
|
('devtable', 'simple'),
|
||||||
|
('devtable', 'complex'),
|
||||||
|
('devtable', 'history'),
|
||||||
|
('buynlarge', 'orgrepo'),
|
||||||
|
])
|
||||||
|
def test_backfill_manifest_on_lookup(repo_namespace, repo_name, clear_rows, pre_oci_model):
|
||||||
|
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||||
|
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||||
|
assert tags
|
||||||
|
|
||||||
|
for tag in tags:
|
||||||
|
assert not tag.manifest_digest
|
||||||
|
assert not pre_oci_model.get_manifest_for_tag(tag)
|
||||||
|
|
||||||
|
manifest = pre_oci_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
|
||||||
|
assert manifest
|
||||||
|
|
||||||
|
updated_tag = pre_oci_model.get_repo_tag(repository_ref, tag.name)
|
||||||
|
assert updated_tag.manifest_digest == manifest.digest
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('namespace, expect_enabled', [
|
||||||
|
('devtable', True),
|
||||||
|
('buynlarge', True),
|
||||||
|
|
||||||
|
('disabled', False),
|
||||||
|
])
|
||||||
|
def test_is_namespace_enabled(namespace, expect_enabled, pre_oci_model):
|
||||||
|
assert pre_oci_model.is_namespace_enabled(namespace) == expect_enabled
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('repo_namespace, repo_name', [
|
||||||
|
('devtable', 'simple'),
|
||||||
|
('devtable', 'complex'),
|
||||||
|
('devtable', 'history'),
|
||||||
|
('buynlarge', 'orgrepo'),
|
||||||
|
])
|
||||||
|
def test_list_manifest_layers(repo_namespace, repo_name, pre_oci_model):
|
||||||
|
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||||
|
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||||
|
assert tags
|
||||||
|
|
||||||
|
for tag in tags:
|
||||||
|
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||||
|
assert manifest
|
||||||
|
|
||||||
|
with assert_query_count(4):
|
||||||
|
layers = pre_oci_model.list_manifest_layers(manifest)
|
||||||
|
assert layers
|
||||||
|
|
||||||
|
layers = pre_oci_model.list_manifest_layers(manifest, include_placements=True)
|
||||||
|
assert layers
|
||||||
|
|
||||||
|
parsed_layers = list(manifest.get_parsed_manifest().layers)
|
||||||
|
assert len(layers) == len(parsed_layers)
|
||||||
|
|
||||||
|
for index, manifest_layer in enumerate(layers):
|
||||||
|
assert manifest_layer.layer_info == parsed_layers[index]
|
||||||
|
assert manifest_layer.blob.digest == str(parsed_layers[index].digest)
|
||||||
|
assert manifest_layer.blob.storage_path
|
||||||
|
assert manifest_layer.blob.placements
|
||||||
|
|
||||||
|
repo_blob = pre_oci_model.get_repo_blob_by_digest(repository_ref, manifest_layer.blob.digest)
|
||||||
|
assert repo_blob.digest == manifest_layer.blob.digest
|
||||||
|
|
||||||
|
assert manifest_layer.estimated_size(1) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_derived_image(pre_oci_model):
|
||||||
|
# Clear all existing derived storage.
|
||||||
|
DerivedStorageForImage.delete().execute()
|
||||||
|
|
||||||
|
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||||
|
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||||
|
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||||
|
|
||||||
|
# Ensure the squashed image doesn't exist.
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}) is None
|
||||||
|
|
||||||
|
# Create a new one.
|
||||||
|
squashed = pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {})
|
||||||
|
assert pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {}) == squashed
|
||||||
|
assert squashed.unique_id
|
||||||
|
|
||||||
|
# Check and set the size.
|
||||||
|
assert squashed.blob.compressed_size is None
|
||||||
|
pre_oci_model.set_derived_image_size(squashed, 1234)
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}).blob.compressed_size == 1234
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}).unique_id == squashed.unique_id
|
||||||
|
|
||||||
|
# Ensure its returned now.
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}) == squashed
|
||||||
|
|
||||||
|
# Ensure different metadata results in a different derived image.
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {'foo': 'bar'}) is None
|
||||||
|
|
||||||
|
squashed_foo = pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us',
|
||||||
|
{'foo': 'bar'})
|
||||||
|
assert squashed_foo != squashed
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {'foo': 'bar'}) == squashed_foo
|
||||||
|
|
||||||
|
assert squashed.unique_id != squashed_foo.unique_id
|
||||||
|
|
||||||
|
# Lookup with placements.
|
||||||
|
squashed = pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {},
|
||||||
|
include_placements=True)
|
||||||
|
assert squashed.blob.placements
|
||||||
|
|
||||||
|
# Delete the derived image.
|
||||||
|
pre_oci_model.delete_derived_image(squashed)
|
||||||
|
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_derived_image_signatures(pre_oci_model):
|
||||||
|
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||||
|
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||||
|
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||||
|
|
||||||
|
derived = pre_oci_model.lookup_derived_image(manifest, 'squash', {})
|
||||||
|
assert derived
|
||||||
|
|
||||||
|
signature = pre_oci_model.get_derived_image_signature(derived, 'gpg2')
|
||||||
|
assert signature is None
|
||||||
|
|
||||||
|
pre_oci_model.set_derived_image_signature(derived, 'gpg2', 'foo')
|
||||||
|
assert pre_oci_model.get_derived_image_signature(derived, 'gpg2') == 'foo'
|
||||||
|
|
||||||
|
|
||||||
|
def test_torrent_info(pre_oci_model):
|
||||||
|
# Remove all existing info.
|
||||||
|
TorrentInfo.delete().execute()
|
||||||
|
|
||||||
|
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||||
|
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||||
|
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||||
|
|
||||||
|
layers = pre_oci_model.list_manifest_layers(manifest)
|
||||||
|
assert layers
|
||||||
|
|
||||||
|
assert pre_oci_model.get_torrent_info(layers[0].blob) is None
|
||||||
|
pre_oci_model.set_torrent_info(layers[0].blob, 2, 'foo')
|
||||||
|
|
||||||
|
torrent_info = pre_oci_model.get_torrent_info(layers[0].blob)
|
||||||
|
assert torrent_info is not None
|
||||||
|
assert torrent_info.piece_length == 2
|
||||||
|
assert torrent_info.pieces == 'foo'
|
||||||
|
|
||||||
|
# Try setting it again. Nothing should happen.
|
||||||
|
pre_oci_model.set_torrent_info(layers[0].blob, 3, 'bar')
|
||||||
|
|
||||||
|
torrent_info = pre_oci_model.get_torrent_info(layers[0].blob)
|
||||||
|
assert torrent_info is not None
|
||||||
|
assert torrent_info.piece_length == 2
|
||||||
|
assert torrent_info.pieces == 'foo'
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
Joseph Schorr <joseph.schorr@coreos.com> (@josephschorr)
|
|
Reference in a new issue