Implement V2 interfaces and remaining V1 interfaces
Also adds some tests to registry tests for V1 stuff. Note: All *registry* tests currently pass, but as verbs are not yet converted, the verb tests in registry_tests.py currently fail.
This commit is contained in:
parent
d67991987b
commit
db60df827d
21 changed files with 588 additions and 338 deletions
|
@ -3,7 +3,7 @@ from uuid import uuid4
|
|||
from data.model import (tag, _basequery, BlobDoesNotExist, InvalidBlobUpload, db_transaction,
|
||||
storage as storage_model, InvalidImageException)
|
||||
from data.database import (Repository, Namespace, ImageStorage, Image, ImageStoragePlacement,
|
||||
BlobUpload)
|
||||
BlobUpload, ImageStorageLocation)
|
||||
|
||||
|
||||
def get_repo_blob_by_digest(namespace, repo_name, blob_digest):
|
||||
|
@ -63,7 +63,9 @@ def get_blob_upload(namespace, repo_name, upload_uuid):
|
|||
"""
|
||||
try:
|
||||
return (BlobUpload
|
||||
.select()
|
||||
.select(BlobUpload, ImageStorageLocation)
|
||||
.join(ImageStorageLocation)
|
||||
.switch(BlobUpload)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Namespace.id == Repository.namespace_user))
|
||||
.where(Repository.name == repo_name, Namespace.username == namespace,
|
||||
|
|
|
@ -333,6 +333,16 @@ def load_tag_manifest(namespace, repo_name, tag_name):
|
|||
raise InvalidManifestException(msg)
|
||||
|
||||
|
||||
def delete_manifest_by_digest(namespace, repo_name, digest):
|
||||
tag_manifests = list(_load_repo_manifests(namespace, repo_name)
|
||||
.where(TagManifest.digest == digest))
|
||||
|
||||
for tag_manifest in tag_manifests:
|
||||
delete_tag(namespace, repo_name, tag_manifest.tag.name)
|
||||
|
||||
return [tag_manifest.tag for tag_manifest in tag_manifests]
|
||||
|
||||
|
||||
def load_manifest_by_digest(namespace, repo_name, digest):
|
||||
try:
|
||||
return (_load_repo_manifests(namespace, repo_name)
|
||||
|
|
249
data/model/v1.py
249
data/model/v1.py
|
@ -1,249 +0,0 @@
|
|||
from app import app, storage as store
|
||||
from data import model
|
||||
from data.model import db_transaction
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
def placement_locations_docker_v1(namespace_name, repo_name, image_id):
|
||||
""" Returns all the placements for the image with the given V1 Docker ID, found under the
|
||||
given repository or None if no image was found.
|
||||
"""
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return None
|
||||
|
||||
return repo_image.storage.locations
|
||||
|
||||
|
||||
def placement_locations_and_path_docker_v1(namespace_name, repo_name, image_id):
|
||||
""" Returns a tuple of the placements and storage path location for the image with the
|
||||
given V1 Docker ID, found under the given repository or None if no image was found.
|
||||
"""
|
||||
repo_image = model.image.get_repo_image_extended(namespace_name, repo_name, image_id)
|
||||
if not repo_image or repo_image.storage is None:
|
||||
return None, None
|
||||
|
||||
return repo_image.storage.locations, model.storage.get_layer_path(repo_image.storage)
|
||||
|
||||
|
||||
def docker_v1_metadata(namespace_name, repo_name, image_id):
|
||||
""" Returns various pieces of metadata associated with an image with the given V1 Docker ID,
|
||||
including the checksum and its V1 JSON metadata.
|
||||
"""
|
||||
repo_image = model.image.get_repo_image(namespace_name, repo_name, image_id)
|
||||
if repo_image is None:
|
||||
return None
|
||||
|
||||
return AttrDict({
|
||||
'namespace_name': namespace_name,
|
||||
'repo_name': repo_name,
|
||||
'image_id': image_id,
|
||||
'checksum': repo_image.v1_checksum,
|
||||
'compat_json': repo_image.v1_json_metadata,
|
||||
})
|
||||
|
||||
|
||||
def update_docker_v1_metadata(namespace_name, repo_name, image_id, created_date_str, comment,
|
||||
command, compat_json, parent_image_id=None):
|
||||
""" Updates various pieces of V1 metadata associated with a particular image. """
|
||||
parent_image = None
|
||||
if parent_image_id is not None:
|
||||
parent_image = model.image.get_repo_image(namespace_name, repo_name, parent_image_id)
|
||||
|
||||
model.image.set_image_metadata(image_id, namespace_name, repo_name, created_date_str, comment,
|
||||
command, compat_json, parent=parent_image)
|
||||
|
||||
|
||||
def storage_exists(namespace_name, repo_name, image_id):
|
||||
""" Returns whether storage already exists for the image with the V1 Docker ID under the
|
||||
given repository.
|
||||
"""
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return False
|
||||
|
||||
if repo_image.storage.uploading:
|
||||
return False
|
||||
|
||||
layer_path = model.storage.get_layer_path(repo_image.storage)
|
||||
return store.exists(repo_image.storage.locations, layer_path)
|
||||
|
||||
|
||||
def store_docker_v1_checksums(namespace_name, repo_name, image_id, checksum, content_checksum):
|
||||
""" Stores the various V1 checksums for the image with the V1 Docker ID. """
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return
|
||||
|
||||
with db_transaction():
|
||||
repo_image.storage.content_checksum = content_checksum
|
||||
repo_image.v1_checksum = checksum
|
||||
|
||||
repo_image.storage.save()
|
||||
repo_image.save()
|
||||
|
||||
|
||||
def is_image_uploading(namespace_name, repo_name, image_id):
|
||||
""" Returns whether the image with the V1 Docker ID is currently marked as uploading. """
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return False
|
||||
|
||||
return repo_image.storage.uploading
|
||||
|
||||
|
||||
def update_image_uploading(namespace_name, repo_name, image_id, is_uploading):
|
||||
""" Marks the image with the V1 Docker ID with the given uploading status. """
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return
|
||||
|
||||
repo_image.storage.uploading = is_uploading
|
||||
repo_image.storage.save()
|
||||
return repo_image.storage
|
||||
|
||||
|
||||
def update_image_sizes(namespace_name, repo_name, image_id, size, uncompressed_size):
|
||||
""" Updates the sizing information for the image with the given V1 Docker ID. """
|
||||
model.storage.set_image_storage_metadata(image_id, namespace_name, repo_name, size,
|
||||
uncompressed_size)
|
||||
|
||||
|
||||
def get_image_size(namespace_name, repo_name, image_id):
|
||||
""" Returns the wire size of the image with the given Docker V1 ID. """
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return None
|
||||
|
||||
return repo_image.storage.image_size
|
||||
|
||||
|
||||
def create_bittorrent_pieces(namespace_name, repo_name, image_id, pieces_bytes):
|
||||
""" Saves the bittorrent piece hashes for the image with the given Docker V1 ID. """
|
||||
repo_image = model.image.get_repo_image_and_storage(namespace_name, repo_name, image_id)
|
||||
if repo_image is None or repo_image.storage is None:
|
||||
return
|
||||
|
||||
model.storage.save_torrent_info(repo_image.storage, app.config['BITTORRENT_PIECE_SIZE'],
|
||||
pieces_bytes)
|
||||
|
||||
|
||||
def image_ancestry(namespace_name, repo_name, image_id):
|
||||
""" Returns a list containing the full ancestry of Docker V1 IDs, in order, for the image with
|
||||
the givne Docker V1 ID.
|
||||
"""
|
||||
try:
|
||||
image = model.image.get_image_by_id(namespace_name, repo_name, image_id)
|
||||
except model.InvalidImageException:
|
||||
return None
|
||||
|
||||
parents = model.image.get_parent_images(namespace_name, repo_name, image)
|
||||
ancestry_docker_ids = [image.docker_image_id]
|
||||
ancestry_docker_ids.extend([parent.docker_image_id for parent in parents])
|
||||
return ancestry_docker_ids
|
||||
|
||||
|
||||
def repository_exists(namespace_name, repo_name):
|
||||
""" Returns whether the repository with the given name and namespace exists. """
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
return repo is not None
|
||||
|
||||
|
||||
def create_or_link_image(username, namespace_name, repo_name, image_id, storage_location):
|
||||
""" Adds the given image to the given repository, by either linking to an existing image
|
||||
visible to the user with the given username, or creating a new one if no existing image
|
||||
matches.
|
||||
"""
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
model.image.find_create_or_link_image(image_id, repo, username, {}, storage_location)
|
||||
|
||||
|
||||
def create_temp_hidden_tag(namespace_name, repo_name, image_id, expiration):
|
||||
""" Creates a hidden tag under the matching namespace pointing to the image with the given V1
|
||||
Docker ID.
|
||||
"""
|
||||
repo_image = model.image.get_repo_image(namespace_name, repo_name, image_id)
|
||||
if repo_image is None:
|
||||
return
|
||||
|
||||
repo = repo_image.repository
|
||||
model.tag.create_temporary_hidden_tag(repo, repo_image, expiration)
|
||||
|
||||
|
||||
def list_tags(namespace_name, repo_name):
|
||||
""" Returns all the tags defined in the repository with the given namespace and name. """
|
||||
return model.tag.list_repository_tags(namespace_name, repo_name)
|
||||
|
||||
|
||||
def create_or_update_tag(namespace_name, repo_name, image_id, tag_name):
|
||||
""" Creates or updates a tag under the matching repository to point to the image with the given
|
||||
Docker V1 ID.
|
||||
"""
|
||||
model.tag.create_or_update_tag(namespace_name, repo_name, tag_name, image_id)
|
||||
|
||||
|
||||
def find_image_id_by_tag(namespace_name, repo_name, tag_name):
|
||||
""" Returns the Docker V1 image ID for the HEAD image for the tag with the given name under
|
||||
the matching repository, or None if none.
|
||||
"""
|
||||
try:
|
||||
tag_image = model.tag.get_tag_image(namespace_name, repo_name, tag_name)
|
||||
except model.DataModelException:
|
||||
return None
|
||||
|
||||
return tag_image.docker_image_id
|
||||
|
||||
|
||||
def delete_tag(namespace_name, repo_name, tag_name):
|
||||
""" Deletes the given tag from the given repository. """
|
||||
model.tag.delete_tag(namespace_name, repo_name, tag_name)
|
||||
|
||||
|
||||
def load_token(password):
|
||||
try:
|
||||
model.token.load_token_data(password)
|
||||
return True
|
||||
except model.InvalidTokenException:
|
||||
return False
|
||||
|
||||
|
||||
def verify_robot(username, password):
|
||||
try:
|
||||
model.user.verify_robot(username, password)
|
||||
return True
|
||||
except model.InvalidRobotException:
|
||||
return False
|
||||
|
||||
|
||||
def change_user_password(user, new_password):
|
||||
model.user.change_password(user, new_password)
|
||||
|
||||
|
||||
def change_user_email(user, new_email_address):
|
||||
model.user.update_email(user, new_email_address)
|
||||
|
||||
|
||||
def get_repository(namespace_name, repo_name):
|
||||
#repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
return Repository()
|
||||
|
||||
|
||||
def create_repository(namespace_name, repo_name, user):
|
||||
#repo = model.repository.create_repository(namespace_name, repo_name, user)
|
||||
pass
|
||||
|
||||
|
||||
def repository_is_public(namespace_name, repo_name):
|
||||
# return model.repository.repository_is_public(namespace_name, repo_name)
|
||||
pass
|
||||
|
||||
|
||||
def validate_oauth_token(password):
|
||||
if model.oauth_access_token(password):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_sorted_matching_repositories(search_term, only_public, can_read, limit):
|
||||
matching_repos = model.repository.get_sorted_matching_repositories(query, only_public, can_read,
|
||||
limit=5)
|
||||
return [Repository()]
|
183
data/model/v2.py
183
data/model/v2.py
|
@ -1,183 +0,0 @@
|
|||
from image import Blob, BlobUpload, ManifestJSON, Repository, Tag
|
||||
from image.docker.v1 import DockerV1Metadata
|
||||
|
||||
def create_repository(namespace_name, repo_name, user):
|
||||
model.repository.create_repository(namespace, reponame, user)
|
||||
|
||||
|
||||
def repository_is_public(namespace_name, repo_name):
|
||||
model.repository.repository_is_public(namespace, reponame)):
|
||||
|
||||
|
||||
def get_repository(namespace_name, repo_name):
|
||||
repo = model.repository.get_repository(namespace_name, repo_name)
|
||||
if repo is None:
|
||||
return None
|
||||
|
||||
return Repository(
|
||||
id=repo.id,
|
||||
name=repo.name,
|
||||
namespace_name=repo.namespace_user.username,
|
||||
)
|
||||
|
||||
|
||||
def get_active_tag(namespace_name, repo_name, tag_name):
|
||||
try:
|
||||
return model.tag.get_active_tag(namespace_name, repo_name, tag_name)
|
||||
except RepositoryTag.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_manifest_by_tag(namespace_name, repo_name, tag_name):
|
||||
try:
|
||||
manifest = model.tag.load_tag_manifest(namespace_name, repo_name, manifest_ref)
|
||||
return ManifestJSON(digest=digest, json=manifest.json_data)
|
||||
except model.InvalidManifestException:
|
||||
return None
|
||||
|
||||
|
||||
def get_manifest_by_digest(namespace_name, repo_name, digest):
|
||||
try:
|
||||
manifest = model.tag.load_manifest_by_digest(namespace_name, repo_name, manifest_ref)
|
||||
return ManifestJSON(digest=digest, json=manifest.json_data)
|
||||
except model.InvalidManifestException:
|
||||
return None
|
||||
|
||||
|
||||
def get_tag_by_manifest_digest(namespace_name, repo_name, digest):
|
||||
return Tag()
|
||||
|
||||
|
||||
def delete_tag(namespace_name, repo_name, tag_name):
|
||||
model.tag.delete_tag(namespace_name, repo_name, tag.name)
|
||||
return True
|
||||
|
||||
|
||||
def get_docker_v1_metadata_by_tag(namespace_name, repo_name, tag_name):
|
||||
if not repo_image:
|
||||
return None
|
||||
|
||||
return DockerV1Metadata(
|
||||
namespace_name=namespace_name,
|
||||
repo_name=repo_name,
|
||||
image_id=image_id,
|
||||
checksum=repo_image.v1_checksum,
|
||||
content_checksum=repo_image.content_checksum,
|
||||
compat_json=repo_image.v1_json_metadata,
|
||||
)
|
||||
|
||||
|
||||
def get_docker_v1_metadata_by_image_id(namespace_name, repo_name, image_ids):
|
||||
images_query = model.image.lookup_repository_images(repo, all_image_ids)
|
||||
return {image.docker_image_id: DockerV1Metadata(namespace_name=namespace_name,
|
||||
repo_name=repo_name,
|
||||
image_id=image.docker_image_id,
|
||||
checksum=image.v1_checksum,
|
||||
content_checksum=image.content_checksum,
|
||||
compat_json=image.v1_json_metadata)
|
||||
for image in images_query}
|
||||
|
||||
|
||||
def get_parents_docker_v1_metadata(namespace_name, repo_name, image_id):
|
||||
# Old implementation:
|
||||
# parents = model.image.get_parent_images(namespace_name, repo_name, image)
|
||||
|
||||
# desired:
|
||||
# return a list of the AttrDict in docker_v1_metadata
|
||||
return []
|
||||
|
||||
|
||||
def create_manifest_and_update_tag(namespace_name, repo_name, tag_name, manifest_digest, manifest_bytes):
|
||||
try:
|
||||
model.tag.associate_generated_tag_manifest(namespace_name, repo_name, tag_name,
|
||||
manifest.digest, manifest.bytes)
|
||||
except IntegrityError:
|
||||
# It's already there!
|
||||
pass
|
||||
|
||||
|
||||
def synthesize_v1_image(repo, storage, image_id, created, comment, command, compat_json, parent_image_id):
|
||||
model.image.synthesize_v1_image(repo, storage, image_id, created, comment, command, compat_json, parent_image_id)
|
||||
|
||||
|
||||
def save_manifest(namespace_name, repo_name, tag_name, leaf_layer_id, manifest_digest, manifest_bytes):
|
||||
model.tag.store_tag_manifest(namespace_name, repo_name, tag_name, leaf_layer_id, manifest_digest,
|
||||
manifest_bytes)
|
||||
|
||||
|
||||
def repository_tags(namespace_name, repo_name, limit, offset):
|
||||
return [Tag()]
|
||||
|
||||
|
||||
def get_visible_repositories(username, limit, offset):
|
||||
return [Repository()]
|
||||
|
||||
|
||||
def create_blob_upload(namespace_name, repo_name, upload_uuid, location_name, storage_metadata):
|
||||
"""
|
||||
Creates a blob upload.
|
||||
|
||||
Returns False if the upload's repository does not exist.
|
||||
"""
|
||||
|
||||
try:
|
||||
model.blob.initiate_upload(namespace_name, repo_name, new_upload_uuid, location_name,
|
||||
upload_metadata)
|
||||
return True
|
||||
except database.Repository.DoesNotExist:
|
||||
return False
|
||||
|
||||
|
||||
def blob_upload_by_uuid(uuid):
|
||||
try:
|
||||
found = model.blob.get_blob_upload(namespace_name, repo_name, upload_uuid)
|
||||
except model.InvalidBlobUpload:
|
||||
raise BlobUploadUnknown()
|
||||
|
||||
return BlobUpload(
|
||||
uuid=uuid,
|
||||
byte_count=found.byte_count,
|
||||
uncompressed_byte_count=found.uncompressed_byte_count,
|
||||
chunk_count=found.chunk_count,
|
||||
location_name=found.location.name,
|
||||
storage_metadata=found.storage_metadata,
|
||||
)
|
||||
|
||||
|
||||
def update_blob_upload(blob_upload):
|
||||
# old implementation:
|
||||
# blob_upload.save()
|
||||
pass
|
||||
|
||||
|
||||
def delete_blob_upload(uuid):
|
||||
try:
|
||||
found = model.blob.get_blob_upload(namespace_name, repo_name, upload_uuid)
|
||||
except model.InvalidBlobUpload:
|
||||
raise BlobUploadUnknown()
|
||||
|
||||
found.delete_instance()
|
||||
|
||||
def create_blob_and_temp_tag(namespace_name, repo_name, expected_digest, upload_obj):
|
||||
return model.blob.store_blob_record_and_temp_link(namespace_name, repo_name, expected_digest,
|
||||
upload_obj.location,
|
||||
upload_obj.byte_count,
|
||||
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],
|
||||
upload_obj.uncompressed_byte_count)
|
||||
|
||||
|
||||
def get_blob_by_digest(namespace_name, repo_name, digest):
|
||||
try:
|
||||
return model.blob.get_repo_blob_by_digest(namespace_name, repo_name, digest)
|
||||
except model.BlobDoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def create_bittorrent_pieces(blob_storage, piece_size, piece_bytes)
|
||||
model.storage.save_torrent_info(blob_storage.id, piece_size, piece_bytes)
|
||||
|
||||
|
||||
def get_blob_path(blob):
|
||||
# Once everything is moved over, this could be in util.registry and not even
|
||||
# touch the database.
|
||||
model.storage.get_layer_path(blob)
|
Reference in a new issue