From 36db133b86663565738411cbce0b567a5b173c93 Mon Sep 17 00:00:00 2001 From: Joseph Schorr Date: Mon, 29 Oct 2018 15:54:01 -0400 Subject: [PATCH] Pull out common shared code in the data model interface --- data/registry_model/registry_pre_oci_model.py | 250 +---------------- data/registry_model/shared.py | 256 ++++++++++++++++++ 2 files changed, 260 insertions(+), 246 deletions(-) create mode 100644 data/registry_model/shared.py diff --git a/data/registry_model/registry_pre_oci_model.py b/data/registry_model/registry_pre_oci_model.py index f3bb14953..9748cf74d 100644 --- a/data/registry_model/registry_pre_oci_model.py +++ b/data/registry_model/registry_pre_oci_model.py @@ -8,13 +8,11 @@ from peewee import IntegrityError from data import database from data import model -from data.cache import cache_key from data.database import db_transaction from data.registry_model.interface import RegistryDataInterface -from data.registry_model.datatype import FromDictionaryException -from data.registry_model.datatypes import (Tag, RepositoryReference, Manifest, LegacyImage, Label, - SecurityScanStatus, ManifestLayer, Blob, DerivedImage, - TorrentInfo, BlobUpload) +from data.registry_model.datatypes import (Tag, Manifest, LegacyImage, Label, + SecurityScanStatus, ManifestLayer, Blob, DerivedImage) +from data.registry_model.shared import SharedModel from data.registry_model.label_handlers import apply_label_to_manifest from image.docker.schema1 import (DockerSchema1ManifestBuilder, ManifestException, DockerSchema1Manifest) @@ -24,7 +22,7 @@ from util.validation import is_json logger = logging.getLogger(__name__) -class PreOCIModel(RegistryDataInterface): +class PreOCIModel(SharedModel, RegistryDataInterface): """ PreOCIModel implements the data model for the registry API using a database schema before it was changed to support the OCI specification. @@ -46,12 +44,6 @@ class PreOCIModel(RegistryDataInterface): assert found_tag is None or not found_tag.hidden return Tag.for_repository_tag(found_tag) - def lookup_repository(self, namespace_name, repo_name, kind_filter=None): - """ Looks up and returns a reference to the repository with the given namespace and name, - or None if none. """ - repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter) - return RepositoryReference.for_repo_obj(repo) - def get_manifest_for_tag(self, tag, backfill_if_necessary=False): """ Returns the manifest associated with the given tag. """ try: @@ -535,16 +527,6 @@ class PreOCIModel(RegistryDataInterface): return Manifest.for_tag_manifest(tag_manifest) - def is_existing_disabled_namespace(self, namespace_name): - """ Returns whether the given namespace exists and is disabled. """ - namespace = model.user.get_namespace_user(namespace_name) - return namespace is not None and not namespace.enabled - - def is_namespace_enabled(self, namespace_name): - """ Returns whether the given namespace exists and is enabled. """ - namespace = model.user.get_namespace_user(namespace_name) - return namespace is not None and namespace.enabled - def list_manifest_layers(self, manifest, include_placements=False): """ Returns an *ordered list* of the layers found in the manifest, starting at the base and working towards the leaf, including the associated Blob and its placements (if specified). @@ -634,230 +616,6 @@ class PreOCIModel(RegistryDataInterface): return DerivedImage.for_derived_storage(derived, verb, varying_metadata, blob) - def get_derived_image_signature(self, derived_image, signer_name): - """ - Returns the signature associated with the derived image and a specific signer or None if none. - """ - try: - derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) - except database.DerivedStorageForImage.DoesNotExist: - return None - - storage = derived_storage.derivative - signature_entry = model.storage.lookup_storage_signature(storage, signer_name) - if signature_entry is None: - return None - - return signature_entry.signature - - def set_derived_image_signature(self, derived_image, signer_name, signature): - """ - Sets the calculated signature for the given derived image and signer to that specified. - """ - try: - derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) - except database.DerivedStorageForImage.DoesNotExist: - return None - - storage = derived_storage.derivative - signature_entry = model.storage.find_or_create_storage_signature(storage, signer_name) - signature_entry.signature = signature - signature_entry.uploading = False - signature_entry.save() - - def delete_derived_image(self, derived_image): - """ - Deletes a derived image and all of its storage. - """ - try: - derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) - except database.DerivedStorageForImage.DoesNotExist: - return None - - model.image.delete_derived_storage(derived_storage) - - def set_derived_image_size(self, derived_image, compressed_size): - """ - Sets the compressed size on the given derived image. - """ - try: - derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) - except database.DerivedStorageForImage.DoesNotExist: - return None - - storage_entry = derived_storage.derivative - storage_entry.image_size = compressed_size - storage_entry.uploading = False - storage_entry.save() - - def get_torrent_info(self, blob): - """ - Returns the torrent information associated with the given blob or None if none. - """ - try: - image_storage = database.ImageStorage.get(id=blob._db_id) - except database.ImageStorage.DoesNotExist: - return None - - try: - torrent_info = model.storage.get_torrent_info(image_storage) - except model.TorrentInfoDoesNotExist: - return None - - return TorrentInfo.for_torrent_info(torrent_info) - - def set_torrent_info(self, blob, piece_length, pieces): - """ - Sets the torrent infomation associated with the given blob to that specified. - """ - try: - image_storage = database.ImageStorage.get(id=blob._db_id) - except database.ImageStorage.DoesNotExist: - return None - - torrent_info = model.storage.save_torrent_info(image_storage, piece_length, pieces) - return TorrentInfo.for_torrent_info(torrent_info) - - def get_cached_repo_blob(self, model_cache, namespace_name, repo_name, blob_digest): - """ - Returns the blob in the repository with the given digest if any or None if none. - Caches the result in the caching system. - """ - def load_blob(): - repository_ref = self.lookup_repository(namespace_name, repo_name) - if repository_ref is None: - return None - - blob_found = self.get_repo_blob_by_digest(repository_ref, blob_digest, - include_placements=True) - if blob_found is None: - return None - - return blob_found.asdict() - - blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, blob_digest, 2) - blob_dict = model_cache.retrieve(blob_cache_key, load_blob) - - try: - return Blob.from_dict(blob_dict) if blob_dict is not None else None - except FromDictionaryException: - # The data was stale in some way. Simply reload. - repository_ref = self.lookup_repository(namespace_name, repo_name) - if repository_ref is None: - return None - - return self.get_repo_blob_by_digest(repository_ref, blob_digest, include_placements=True) - - def get_repo_blob_by_digest(self, repository_ref, blob_digest, include_placements=False): - """ - Returns the blob in the repository with the given digest, if any or None if none. Note that - there may be multiple records in the same repository for the same blob digest, so the return - value of this function may change. - """ - try: - image_storage = model.blob.get_repository_blob_by_digest(repository_ref._db_id, blob_digest) - except model.BlobDoesNotExist: - return None - - assert image_storage.cas_path is not None - - placements = None - if include_placements: - placements = list(model.storage.get_storage_locations(image_storage.uuid)) - - return Blob.for_image_storage(image_storage, - storage_path=model.storage.get_layer_path(image_storage), - placements=placements) - - def create_blob_upload(self, repository_ref, new_upload_id, location_name, storage_metadata): - """ Creates a new blob upload and returns a reference. If the blob upload could not be - created, returns None. """ - repo = model.repository.lookup_repository(repository_ref._db_id) - if repo is None: - return None - - try: - upload_record = model.blob.initiate_upload(repo.namespace_user.username, repo.name, - new_upload_id, location_name, storage_metadata) - return BlobUpload.for_upload(upload_record) - except database.Repository.DoesNotExist: - return None - - def lookup_blob_upload(self, repository_ref, blob_upload_id): - """ Looks up the blob upload withn the given ID under the specified repository and returns it - or None if none. - """ - upload_record = model.blob.get_blob_upload_by_uuid(blob_upload_id) - if upload_record is None: - return None - - return BlobUpload.for_upload(upload_record) - - def update_blob_upload(self, blob_upload, uncompressed_byte_count, piece_hashes, piece_sha_state, - storage_metadata, byte_count, chunk_count, sha_state): - """ Updates the fields of the blob upload to match those given. Returns the updated blob upload - or None if the record does not exists. - """ - upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id) - if upload_record is None: - return None - - upload_record.uncompressed_byte_count = uncompressed_byte_count - upload_record.piece_hashes = piece_hashes - upload_record.piece_sha_state = piece_sha_state - upload_record.storage_metadata = storage_metadata - upload_record.byte_count = byte_count - upload_record.chunk_count = chunk_count - upload_record.sha_state = sha_state - upload_record.save() - return BlobUpload.for_upload(upload_record) - - def delete_blob_upload(self, blob_upload): - """ Deletes a blob upload record. """ - upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id) - if upload_record is not None: - upload_record.delete_instance() - - def commit_blob_upload(self, blob_upload, blob_digest_str, blob_expiration_seconds): - """ Commits the blob upload into a blob and sets an expiration before that blob will be GCed. - """ - upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id) - if upload_record is None: - return None - - repository = upload_record.repository - namespace_name = repository.namespace_user.username - repo_name = repository.name - - # Create the blob and temporarily tag it. - location_obj = model.storage.get_image_location_for_name(blob_upload.location_name) - blob_record = model.blob.store_blob_record_and_temp_link( - namespace_name, repo_name, blob_digest_str, location_obj.id, blob_upload.byte_count, - blob_expiration_seconds, blob_upload.uncompressed_byte_count) - - # Delete the blob upload. - upload_record.delete_instance() - return Blob.for_image_storage(blob_record, - storage_path=model.storage.get_layer_path(blob_record)) - - def mount_blob_into_repository(self, blob, target_repository_ref, expiration_sec): - """ - Mounts the blob from another repository into the specified target repository, and adds an - expiration before that blob is automatically GCed. This function is useful during push - operations if an existing blob from another repository is being pushed. Returns False if - the mounting fails. - """ - repo = model.repository.lookup_repository(target_repository_ref._db_id) - if repo is None: - return False - - namespace_name = repo.namespace_user.username - repo_name = repo.name - - storage = model.blob.temp_link_blob(namespace_name, repo_name, blob.digest, - expiration_sec) - return bool(storage) - def set_tags_expiration_for_manifest(self, manifest, expiration_sec): """ Sets the expiration on all tags that point to the given manifest to that specified. diff --git a/data/registry_model/shared.py b/data/registry_model/shared.py new file mode 100644 index 000000000..c226b27e8 --- /dev/null +++ b/data/registry_model/shared.py @@ -0,0 +1,256 @@ +# pylint: disable=protected-access +import logging + +from data import database +from data import model +from data.cache import cache_key +from data.registry_model.datatype import FromDictionaryException +from data.registry_model.datatypes import RepositoryReference, Blob, TorrentInfo, BlobUpload + +logger = logging.getLogger(__name__) + + +class SharedModel: + """ + SharedModel implements those data model operations for the registry API that are unchanged + between the old and new data models. + """ + def lookup_repository(self, namespace_name, repo_name, kind_filter=None): + """ Looks up and returns a reference to the repository with the given namespace and name, + or None if none. """ + repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter) + return RepositoryReference.for_repo_obj(repo) + + def is_existing_disabled_namespace(self, namespace_name): + """ Returns whether the given namespace exists and is disabled. """ + namespace = model.user.get_namespace_user(namespace_name) + return namespace is not None and not namespace.enabled + + def is_namespace_enabled(self, namespace_name): + """ Returns whether the given namespace exists and is enabled. """ + namespace = model.user.get_namespace_user(namespace_name) + return namespace is not None and namespace.enabled + + def get_derived_image_signature(self, derived_image, signer_name): + """ + Returns the signature associated with the derived image and a specific signer or None if none. + """ + try: + derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) + except database.DerivedStorageForImage.DoesNotExist: + return None + + storage = derived_storage.derivative + signature_entry = model.storage.lookup_storage_signature(storage, signer_name) + if signature_entry is None: + return None + + return signature_entry.signature + + def set_derived_image_signature(self, derived_image, signer_name, signature): + """ + Sets the calculated signature for the given derived image and signer to that specified. + """ + try: + derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) + except database.DerivedStorageForImage.DoesNotExist: + return None + + storage = derived_storage.derivative + signature_entry = model.storage.find_or_create_storage_signature(storage, signer_name) + signature_entry.signature = signature + signature_entry.uploading = False + signature_entry.save() + + def delete_derived_image(self, derived_image): + """ + Deletes a derived image and all of its storage. + """ + try: + derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) + except database.DerivedStorageForImage.DoesNotExist: + return None + + model.image.delete_derived_storage(derived_storage) + + def set_derived_image_size(self, derived_image, compressed_size): + """ + Sets the compressed size on the given derived image. + """ + try: + derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id) + except database.DerivedStorageForImage.DoesNotExist: + return None + + storage_entry = derived_storage.derivative + storage_entry.image_size = compressed_size + storage_entry.uploading = False + storage_entry.save() + + def get_torrent_info(self, blob): + """ + Returns the torrent information associated with the given blob or None if none. + """ + try: + image_storage = database.ImageStorage.get(id=blob._db_id) + except database.ImageStorage.DoesNotExist: + return None + + try: + torrent_info = model.storage.get_torrent_info(image_storage) + except model.TorrentInfoDoesNotExist: + return None + + return TorrentInfo.for_torrent_info(torrent_info) + + def set_torrent_info(self, blob, piece_length, pieces): + """ + Sets the torrent infomation associated with the given blob to that specified. + """ + try: + image_storage = database.ImageStorage.get(id=blob._db_id) + except database.ImageStorage.DoesNotExist: + return None + + torrent_info = model.storage.save_torrent_info(image_storage, piece_length, pieces) + return TorrentInfo.for_torrent_info(torrent_info) + + def get_cached_repo_blob(self, model_cache, namespace_name, repo_name, blob_digest): + """ + Returns the blob in the repository with the given digest if any or None if none. + Caches the result in the caching system. + """ + def load_blob(): + repository_ref = self.lookup_repository(namespace_name, repo_name) + if repository_ref is None: + return None + + blob_found = self.get_repo_blob_by_digest(repository_ref, blob_digest, + include_placements=True) + if blob_found is None: + return None + + return blob_found.asdict() + + blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, blob_digest, 2) + blob_dict = model_cache.retrieve(blob_cache_key, load_blob) + + try: + return Blob.from_dict(blob_dict) if blob_dict is not None else None + except FromDictionaryException: + # The data was stale in some way. Simply reload. + repository_ref = self.lookup_repository(namespace_name, repo_name) + if repository_ref is None: + return None + + return self.get_repo_blob_by_digest(repository_ref, blob_digest, include_placements=True) + + def get_repo_blob_by_digest(self, repository_ref, blob_digest, include_placements=False): + """ + Returns the blob in the repository with the given digest, if any or None if none. Note that + there may be multiple records in the same repository for the same blob digest, so the return + value of this function may change. + """ + try: + image_storage = model.blob.get_repository_blob_by_digest(repository_ref._db_id, blob_digest) + except model.BlobDoesNotExist: + return None + + assert image_storage.cas_path is not None + + placements = None + if include_placements: + placements = list(model.storage.get_storage_locations(image_storage.uuid)) + + return Blob.for_image_storage(image_storage, + storage_path=model.storage.get_layer_path(image_storage), + placements=placements) + + def create_blob_upload(self, repository_ref, new_upload_id, location_name, storage_metadata): + """ Creates a new blob upload and returns a reference. If the blob upload could not be + created, returns None. """ + repo = model.repository.lookup_repository(repository_ref._db_id) + if repo is None: + return None + + try: + upload_record = model.blob.initiate_upload(repo.namespace_user.username, repo.name, + new_upload_id, location_name, storage_metadata) + return BlobUpload.for_upload(upload_record) + except database.Repository.DoesNotExist: + return None + + def lookup_blob_upload(self, repository_ref, blob_upload_id): + """ Looks up the blob upload withn the given ID under the specified repository and returns it + or None if none. + """ + upload_record = model.blob.get_blob_upload_by_uuid(blob_upload_id) + if upload_record is None: + return None + + return BlobUpload.for_upload(upload_record) + + def update_blob_upload(self, blob_upload, uncompressed_byte_count, piece_hashes, piece_sha_state, + storage_metadata, byte_count, chunk_count, sha_state): + """ Updates the fields of the blob upload to match those given. Returns the updated blob upload + or None if the record does not exists. + """ + upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id) + if upload_record is None: + return None + + upload_record.uncompressed_byte_count = uncompressed_byte_count + upload_record.piece_hashes = piece_hashes + upload_record.piece_sha_state = piece_sha_state + upload_record.storage_metadata = storage_metadata + upload_record.byte_count = byte_count + upload_record.chunk_count = chunk_count + upload_record.sha_state = sha_state + upload_record.save() + return BlobUpload.for_upload(upload_record) + + def delete_blob_upload(self, blob_upload): + """ Deletes a blob upload record. """ + upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id) + if upload_record is not None: + upload_record.delete_instance() + + def commit_blob_upload(self, blob_upload, blob_digest_str, blob_expiration_seconds): + """ Commits the blob upload into a blob and sets an expiration before that blob will be GCed. + """ + upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id) + if upload_record is None: + return None + + repository = upload_record.repository + namespace_name = repository.namespace_user.username + repo_name = repository.name + + # Create the blob and temporarily tag it. + location_obj = model.storage.get_image_location_for_name(blob_upload.location_name) + blob_record = model.blob.store_blob_record_and_temp_link( + namespace_name, repo_name, blob_digest_str, location_obj.id, blob_upload.byte_count, + blob_expiration_seconds, blob_upload.uncompressed_byte_count) + + # Delete the blob upload. + upload_record.delete_instance() + return Blob.for_image_storage(blob_record, + storage_path=model.storage.get_layer_path(blob_record)) + + def mount_blob_into_repository(self, blob, target_repository_ref, expiration_sec): + """ + Mounts the blob from another repository into the specified target repository, and adds an + expiration before that blob is automatically GCed. This function is useful during push + operations if an existing blob from another repository is being pushed. Returns False if + the mounting fails. + """ + repo = model.repository.lookup_repository(target_repository_ref._db_id) + if repo is None: + return False + + namespace_name = repo.namespace_user.username + repo_name = repo.name + + storage = model.blob.temp_link_blob(namespace_name, repo_name, blob.digest, + expiration_sec) + return bool(storage)