Merge pull request #3278 from quay/joseph.schorr/QUAY-1124/new-data-model
Begin work on the new registry data model implementation
This commit is contained in:
commit
876ebc9b2f
12 changed files with 886 additions and 482 deletions
|
@ -1385,6 +1385,58 @@ class Manifest(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class TagKind(BaseModel):
|
||||
""" TagKind describes the various kinds of tags that can be found in the registry.
|
||||
"""
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class Tag(BaseModel):
|
||||
""" Tag represents a user-facing alias for referencing a Manifest or as an alias to another tag.
|
||||
"""
|
||||
name = CharField()
|
||||
repository = ForeignKeyField(Repository)
|
||||
manifest = ForeignKeyField(Manifest, null=True)
|
||||
lifetime_start_ms = BigIntegerField(default=get_epoch_timestamp_ms)
|
||||
lifetime_end_ms = BigIntegerField(null=True, index=True)
|
||||
hidden = BooleanField(default=False)
|
||||
reversion = BooleanField(default=False)
|
||||
tag_kind = EnumField(TagKind)
|
||||
linked_tag = ForeignKeyField('self', null=True, backref='tag_parents')
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
read_slaves = (read_slave,)
|
||||
indexes = (
|
||||
(('repository', 'name'), False),
|
||||
(('repository', 'name', 'hidden'), False),
|
||||
(('repository', 'name', 'tag_kind'), False),
|
||||
|
||||
# This unique index prevents deadlocks when concurrently moving and deleting tags
|
||||
(('repository', 'name', 'lifetime_end_ms'), True),
|
||||
)
|
||||
|
||||
|
||||
class ManifestChild(BaseModel):
|
||||
""" ManifestChild represents a relationship between a manifest and its child manifest(s).
|
||||
Multiple manifests can share the same children. Note that since Manifests are stored
|
||||
per-repository, the repository here is a bit redundant, but we do so to make cleanup easier.
|
||||
"""
|
||||
repository = ForeignKeyField(Repository)
|
||||
manifest = ForeignKeyField(Manifest)
|
||||
child_manifest = ForeignKeyField(Manifest)
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
read_slaves = (read_slave,)
|
||||
indexes = (
|
||||
(('repository', 'manifest'), False),
|
||||
(('repository', 'child_manifest'), False),
|
||||
(('repository', 'manifest', 'child_manifest'), False),
|
||||
(('manifest', 'child_manifest'), True),
|
||||
)
|
||||
|
||||
|
||||
class ManifestLabel(BaseModel):
|
||||
""" ManifestLabel represents a label applied to a Manifest, within a repository.
|
||||
Note that since Manifests are stored per-repository, the repository here is
|
||||
|
@ -1467,11 +1519,19 @@ class TagManifestLabelMap(BaseModel):
|
|||
broken_manifest = BooleanField(index=True, default=False)
|
||||
|
||||
|
||||
class TagToRepositoryTag(BaseModel):
|
||||
""" NOTE: Only used for the duration of the migrations. """
|
||||
repository = ForeignKeyField(Repository, index=True)
|
||||
tag = ForeignKeyField(Tag, index=True, unique=True)
|
||||
repository_tag = ForeignKeyField(RepositoryTag, index=True, unique=True)
|
||||
|
||||
|
||||
appr_classes = set([ApprTag, ApprTagKind, ApprBlobPlacementLocation, ApprManifestList,
|
||||
ApprManifestBlob, ApprBlob, ApprManifestListManifest, ApprManifest,
|
||||
ApprBlobPlacement])
|
||||
v22_classes = set([Manifest, ManifestLabel, ManifestBlob, ManifestLegacyImage])
|
||||
transition_classes = set([TagManifestToManifest, TagManifestLabelMap])
|
||||
v22_classes = set([Manifest, ManifestLabel, ManifestBlob, ManifestLegacyImage, TagKind,
|
||||
ManifestChild, Tag])
|
||||
transition_classes = set([TagManifestToManifest, TagManifestLabelMap, TagToRepositoryTag])
|
||||
|
||||
is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel
|
||||
all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)]
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
"""Add Tag, TagKind and ManifestChild tables
|
||||
|
||||
Revision ID: 10f45ee2310b
|
||||
Revises: 13411de1c0ff
|
||||
Create Date: 2018-10-29 15:22:53.552216
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '10f45ee2310b'
|
||||
down_revision = '13411de1c0ff'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from util.migrate import UTF8CharField
|
||||
|
||||
def upgrade(tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tagkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagkind'))
|
||||
)
|
||||
op.create_index('tagkind_name', 'tagkind', ['name'], unique=True)
|
||||
op.create_table('manifestchild',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('child_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['child_manifest_id'], ['manifest.id'], name=op.f('fk_manifestchild_child_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestchild_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestchild_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestchild'))
|
||||
)
|
||||
op.create_index('manifestchild_child_manifest_id', 'manifestchild', ['child_manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_manifest_id', 'manifestchild', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_manifest_id_child_manifest_id', 'manifestchild', ['manifest_id', 'child_manifest_id'], unique=True)
|
||||
op.create_index('manifestchild_repository_id', 'manifestchild', ['repository_id'], unique=False)
|
||||
op.create_index('manifestchild_repository_id_child_manifest_id', 'manifestchild', ['repository_id', 'child_manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_repository_id_manifest_id', 'manifestchild', ['repository_id', 'manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_repository_id_manifest_id_child_manifest_id', 'manifestchild', ['repository_id', 'manifest_id', 'child_manifest_id'], unique=False)
|
||||
op.create_table('tag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lifetime_start_ms', sa.BigInteger(), nullable=False),
|
||||
sa.Column('lifetime_end_ms', sa.BigInteger(), nullable=True),
|
||||
sa.Column('hidden', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.Column('reversion', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.Column('tag_kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('linked_tag_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['linked_tag_id'], ['tag.id'], name=op.f('fk_tag_linked_tag_id_tag')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_tag_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['tag_kind_id'], ['tagkind.id'], name=op.f('fk_tag_tag_kind_id_tagkind')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tag'))
|
||||
)
|
||||
op.create_index('tag_lifetime_end_ms', 'tag', ['lifetime_end_ms'], unique=False)
|
||||
op.create_index('tag_linked_tag_id', 'tag', ['linked_tag_id'], unique=False)
|
||||
op.create_index('tag_manifest_id', 'tag', ['manifest_id'], unique=False)
|
||||
op.create_index('tag_repository_id', 'tag', ['repository_id'], unique=False)
|
||||
op.create_index('tag_repository_id_name', 'tag', ['repository_id', 'name'], unique=False)
|
||||
op.create_index('tag_repository_id_name_hidden', 'tag', ['repository_id', 'name', 'hidden'], unique=False)
|
||||
op.create_index('tag_repository_id_name_lifetime_end_ms', 'tag', ['repository_id', 'name', 'lifetime_end_ms'], unique=True)
|
||||
op.create_index('tag_repository_id_name_tag_kind_id', 'tag', ['repository_id', 'name', 'tag_kind_id'], unique=False)
|
||||
op.create_index('tag_tag_kind_id', 'tag', ['tag_kind_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(tables.tagkind,
|
||||
[
|
||||
{'name': 'tag'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('tag', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('tag_kind_id', tester.TestDataType.Foreign('tagkind')),
|
||||
('name', tester.TestDataType.String),
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('lifetime_start_ms', tester.TestDataType.BigInteger),
|
||||
])
|
||||
|
||||
tester.populate_table('manifestchild', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('child_manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tag')
|
||||
op.drop_table('manifestchild')
|
||||
op.drop_table('tagkind')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,44 @@
|
|||
"""Add TagToRepositoryTag table
|
||||
|
||||
Revision ID: 67f0abd172ae
|
||||
Revises: 10f45ee2310b
|
||||
Create Date: 2018-10-30 11:31:06.615488
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '67f0abd172ae'
|
||||
down_revision = '10f45ee2310b'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tagtorepositorytag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('tag_id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_tag_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tagtorepositorytag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['repository_tag_id'], ['repositorytag.id'], name=op.f('fk_tagtorepositorytag_repository_tag_id_repositorytag')),
|
||||
sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], name=op.f('fk_tagtorepositorytag_tag_id_tag')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagtorepositorytag'))
|
||||
)
|
||||
op.create_index('tagtorepositorytag_repository_id', 'tagtorepositorytag', ['repository_id'], unique=False)
|
||||
op.create_index('tagtorepositorytag_repository_tag_id', 'tagtorepositorytag', ['repository_tag_id'], unique=True)
|
||||
op.create_index('tagtorepositorytag_tag_id', 'tagtorepositorytag', ['tag_id'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
tester.populate_table('tagtorepositorytag', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('tag_id', tester.TestDataType.Foreign('tag')),
|
||||
('repository_tag_id', tester.TestDataType.Foreign('repositorytag')),
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester):
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tagtorepositorytag')
|
||||
# ### end Alembic commands ###
|
|
@ -12,7 +12,7 @@ from data.database import (RepositoryTag, Repository, Image, ImageStorage, Names
|
|||
RepositoryNotification, Label, TagManifestLabel, get_epoch_timestamp,
|
||||
db_for_update, Manifest, ManifestLabel, ManifestBlob,
|
||||
ManifestLegacyImage, TagManifestToManifest,
|
||||
TagManifestLabelMap)
|
||||
TagManifestLabelMap, TagToRepositoryTag, Tag, get_epoch_timestamp_ms)
|
||||
from util.timedeltastring import convert_to_timedelta
|
||||
|
||||
|
||||
|
@ -259,8 +259,10 @@ def create_or_update_tag(namespace_name, repository_name, tag_name, tag_docker_i
|
|||
|
||||
return create_or_update_tag_for_repo(repo.id, tag_name, tag_docker_image_id, reversion=reversion)
|
||||
|
||||
def create_or_update_tag_for_repo(repository_id, tag_name, tag_docker_image_id, reversion=False):
|
||||
now_ts = get_epoch_timestamp()
|
||||
def create_or_update_tag_for_repo(repository_id, tag_name, tag_docker_image_id, reversion=False,
|
||||
oci_manifest=None):
|
||||
now_ms = get_epoch_timestamp_ms()
|
||||
now_ts = int(now_ms / 1000)
|
||||
|
||||
with db_transaction():
|
||||
try:
|
||||
|
@ -270,6 +272,17 @@ def create_or_update_tag_for_repo(repository_id, tag_name, tag_docker_image_id,
|
|||
RepositoryTag.name == tag_name), now_ts)).get()
|
||||
tag.lifetime_end_ts = now_ts
|
||||
tag.save()
|
||||
|
||||
# Check for an OCI tag.
|
||||
try:
|
||||
oci_tag = db_for_update(Tag
|
||||
.select()
|
||||
.join(TagToRepositoryTag)
|
||||
.where(TagToRepositoryTag.repository_tag == tag)).get()
|
||||
oci_tag.lifetime_end_ms = now_ms
|
||||
oci_tag.save()
|
||||
except Tag.DoesNotExist:
|
||||
pass
|
||||
except RepositoryTag.DoesNotExist:
|
||||
pass
|
||||
except IntegrityError:
|
||||
|
@ -283,8 +296,16 @@ def create_or_update_tag_for_repo(repository_id, tag_name, tag_docker_image_id,
|
|||
raise DataModelException('Invalid image with id: %s' % tag_docker_image_id)
|
||||
|
||||
try:
|
||||
return RepositoryTag.create(repository=repository_id, image=image_obj, name=tag_name,
|
||||
lifetime_start_ts=now_ts, reversion=reversion)
|
||||
created = RepositoryTag.create(repository=repository_id, image=image_obj, name=tag_name,
|
||||
lifetime_start_ts=now_ts, reversion=reversion)
|
||||
if oci_manifest:
|
||||
# Create the OCI tag as well.
|
||||
oci_tag = Tag.create(repository=repository_id, manifest=oci_manifest, name=tag_name,
|
||||
lifetime_start_ms=now_ms, reversion=reversion,
|
||||
tag_kind=Tag.tag_kind.get_id('tag'))
|
||||
TagToRepositoryTag.create(tag=oci_tag, repository_tag=created, repository=repository_id)
|
||||
|
||||
return created
|
||||
except IntegrityError:
|
||||
msg = 'Tag with name %s and lifetime start %s already exists'
|
||||
raise TagAlreadyCreatedException(msg % (tag_name, now_ts))
|
||||
|
@ -302,7 +323,9 @@ def create_temporary_hidden_tag(repo, image_obj, expiration_s):
|
|||
|
||||
|
||||
def delete_tag(namespace_name, repository_name, tag_name):
|
||||
now_ts = get_epoch_timestamp()
|
||||
now_ms = get_epoch_timestamp_ms()
|
||||
now_ts = int(now_ms / 1000)
|
||||
|
||||
with db_transaction():
|
||||
try:
|
||||
query = _tag_alive(RepositoryTag
|
||||
|
@ -320,6 +343,15 @@ def delete_tag(namespace_name, repository_name, tag_name):
|
|||
|
||||
found.lifetime_end_ts = now_ts
|
||||
found.save()
|
||||
|
||||
try:
|
||||
oci_tag_query = TagToRepositoryTag.select().where(TagToRepositoryTag.repository_tag == found)
|
||||
oci_tag = db_for_update(oci_tag_query).get().tag
|
||||
oci_tag.lifetime_end_ms = now_ms
|
||||
oci_tag.save()
|
||||
except TagToRepositoryTag.DoesNotExist:
|
||||
pass
|
||||
|
||||
return found
|
||||
|
||||
|
||||
|
@ -362,6 +394,24 @@ def _delete_tags(repo, query_modifier=None):
|
|||
return set()
|
||||
|
||||
with db_transaction():
|
||||
# Delete any associated new-style OCI tags.
|
||||
# NOTE: This will *not* work once we have tags pointing to other tags (e.g. for channels),
|
||||
# but this should all be changed over to new-style-only before we make *that* change.
|
||||
oci_tags_to_delete = list(Tag
|
||||
.select()
|
||||
.join(TagToRepositoryTag)
|
||||
.where(TagToRepositoryTag.repository == repo,
|
||||
TagToRepositoryTag.repository_tag << tags_to_delete))
|
||||
|
||||
if oci_tags_to_delete:
|
||||
# Delete the mapping entries.
|
||||
(TagToRepositoryTag.delete().where(TagToRepositoryTag.repository == repo,
|
||||
TagToRepositoryTag.repository_tag << tags_to_delete)
|
||||
.execute())
|
||||
|
||||
# Delete the tags themselves.
|
||||
Tag.delete().where(Tag.id << oci_tags_to_delete).execute()
|
||||
|
||||
# TODO(jschorr): Update to not use TagManifest once that table has been deprecated.
|
||||
tag_manifests_to_delete = list(TagManifest
|
||||
.select()
|
||||
|
@ -548,10 +598,16 @@ def restore_tag_to_manifest(repo_obj, tag_name, manifest_digest):
|
|||
except DataModelException:
|
||||
existing_image = None
|
||||
|
||||
# Change the tag manifest to point to the updated image.
|
||||
docker_image_id = tag_manifest.tag.image.docker_image_id
|
||||
oci_manifest = None
|
||||
try:
|
||||
oci_manifest = Manifest.get(repository=repo_obj, digest=manifest_digest)
|
||||
except Manifest.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Change the tag and tag manifest to point to the updated image.
|
||||
updated_tag = create_or_update_tag_for_repo(repo_obj, tag_name, docker_image_id,
|
||||
reversion=True)
|
||||
reversion=True, oci_manifest=oci_manifest)
|
||||
tag_manifest.tag = updated_tag
|
||||
tag_manifest.save()
|
||||
return existing_image
|
||||
|
@ -601,9 +657,13 @@ def store_tag_manifest_for_repo(repository_id, tag_name, manifest, leaf_layer_id
|
|||
""" Stores a tag manifest for a specific tag name in the database. Returns the TagManifest
|
||||
object, as well as a boolean indicating whether the TagManifest was created.
|
||||
"""
|
||||
# Create the new-style OCI manifest and its blobs.
|
||||
oci_manifest = _populate_manifest_and_blobs(repository_id, manifest, storage_id_map,
|
||||
leaf_layer_id=leaf_layer_id)
|
||||
|
||||
# Create the tag for the tag manifest.
|
||||
tag = create_or_update_tag_for_repo(repository_id, tag_name, leaf_layer_id,
|
||||
reversion=reversion)
|
||||
reversion=reversion, oci_manifest=oci_manifest)
|
||||
|
||||
# Add a tag manifest pointing to that tag.
|
||||
try:
|
||||
|
@ -612,7 +672,8 @@ def store_tag_manifest_for_repo(repository_id, tag_name, manifest, leaf_layer_id
|
|||
manifest.save()
|
||||
return manifest, False
|
||||
except TagManifest.DoesNotExist:
|
||||
return _create_manifest(tag, manifest, storage_id_map), True
|
||||
created = _associate_manifest(tag, oci_manifest)
|
||||
return created, True
|
||||
|
||||
|
||||
def get_active_tag(namespace, repo_name, tag_name):
|
||||
|
@ -661,10 +722,42 @@ def associate_generated_tag_manifest(namespace, repo_name, tag_name, manifest, s
|
|||
manifest.save()
|
||||
return manifest, False
|
||||
except TagManifest.DoesNotExist:
|
||||
return _create_manifest(tag, manifest, storage_id_map), True
|
||||
oci_manifest = _populate_manifest_and_blobs(tag.repository, manifest, storage_id_map)
|
||||
|
||||
with db_transaction():
|
||||
try:
|
||||
(Tag
|
||||
.select()
|
||||
.join(TagToRepositoryTag)
|
||||
.where(TagToRepositoryTag.repository_tag == tag)).get()
|
||||
except Tag.DoesNotExist:
|
||||
oci_tag = Tag.create(repository=tag.repository, manifest=oci_manifest, name=tag_name,
|
||||
reversion=tag.reversion,
|
||||
lifetime_start_ms=tag.lifetime_start_ts * 1000,
|
||||
lifetime_end_ms=(tag.lifetime_end_ts * 1000
|
||||
if tag.lifetime_end_ts else None),
|
||||
tag_kind=Tag.tag_kind.get_id('tag'))
|
||||
TagToRepositoryTag.create(tag=oci_tag, repository_tag=tag, repository=tag.repository)
|
||||
|
||||
return _associate_manifest(tag, oci_manifest), True
|
||||
|
||||
|
||||
def _create_manifest(tag, manifest, storage_id_map):
|
||||
def _associate_manifest(tag, oci_manifest):
|
||||
with db_transaction():
|
||||
tag_manifest = TagManifest.create(tag=tag, digest=oci_manifest.digest,
|
||||
json_data=oci_manifest.manifest_bytes)
|
||||
TagManifestToManifest.create(tag_manifest=tag_manifest, manifest=oci_manifest)
|
||||
return tag_manifest
|
||||
|
||||
|
||||
def _populate_manifest_and_blobs(repository, manifest, storage_id_map, leaf_layer_id=None):
|
||||
leaf_layer_id = leaf_layer_id or manifest.leaf_layer_v1_image_id
|
||||
try:
|
||||
legacy_image = Image.get(Image.docker_image_id == leaf_layer_id,
|
||||
Image.repository == repository)
|
||||
except Image.DoesNotExist:
|
||||
raise DataModelException('Invalid image with id: %s' % leaf_layer_id)
|
||||
|
||||
storage_ids = set()
|
||||
for blob_digest in manifest.blob_digests:
|
||||
image_storage_id = storage_id_map.get(blob_digest)
|
||||
|
@ -677,12 +770,7 @@ def _create_manifest(tag, manifest, storage_id_map):
|
|||
|
||||
storage_ids.add(image_storage_id)
|
||||
|
||||
manifest_row = populate_manifest(tag.repository, manifest, tag.image, storage_ids)
|
||||
|
||||
with db_transaction():
|
||||
tag_manifest = TagManifest.create(tag=tag, digest=manifest.digest, json_data=manifest.bytes)
|
||||
TagManifestToManifest.create(tag_manifest=tag_manifest, manifest=manifest_row)
|
||||
return tag_manifest
|
||||
return populate_manifest(repository, manifest, legacy_image, storage_ids)
|
||||
|
||||
|
||||
def populate_manifest(repository, manifest, legacy_image, storage_ids):
|
||||
|
@ -800,15 +888,41 @@ def change_tag_expiration(tag, expiration_date):
|
|||
if end_ts == tag.lifetime_end_ts:
|
||||
return (None, True)
|
||||
|
||||
# Note: We check not just the ID of the tag but also its lifetime_end_ts, to ensure that it has
|
||||
# not changed while we were updatings it expiration.
|
||||
result = (RepositoryTag
|
||||
.update(lifetime_end_ts=end_ts)
|
||||
.where(RepositoryTag.id == tag.id,
|
||||
RepositoryTag.lifetime_end_ts == tag.lifetime_end_ts)
|
||||
.execute())
|
||||
return set_tag_end_ts(tag, end_ts)
|
||||
|
||||
return (tag.lifetime_end_ts, result > 0)
|
||||
|
||||
def set_tag_end_ts(tag, end_ts):
|
||||
""" Sets the end timestamp for a tag. Should only be called by change_tag_expiration
|
||||
or tests.
|
||||
"""
|
||||
end_ms = end_ts * 1000
|
||||
|
||||
with db_transaction():
|
||||
# Note: We check not just the ID of the tag but also its lifetime_end_ts, to ensure that it has
|
||||
# not changed while we were updating it expiration.
|
||||
result = (RepositoryTag
|
||||
.update(lifetime_end_ts=end_ts)
|
||||
.where(RepositoryTag.id == tag.id,
|
||||
RepositoryTag.lifetime_end_ts == tag.lifetime_end_ts)
|
||||
.execute())
|
||||
|
||||
# Check for a mapping to an OCI tag.
|
||||
try:
|
||||
oci_tag = (Tag
|
||||
.select()
|
||||
.join(TagToRepositoryTag)
|
||||
.where(TagToRepositoryTag.repository_tag == tag)
|
||||
.get())
|
||||
|
||||
(Tag
|
||||
.update(lifetime_end_ms=end_ms)
|
||||
.where(Tag.id == oci_tag.id,
|
||||
Tag.lifetime_end_ms == oci_tag.lifetime_end_ms)
|
||||
.execute())
|
||||
except Tag.DoesNotExist:
|
||||
pass
|
||||
|
||||
return (tag.lifetime_end_ts, result > 0)
|
||||
|
||||
|
||||
def find_matching_tag(repo_id, tag_names):
|
||||
|
|
|
@ -12,7 +12,7 @@ from playhouse.test_utils import assert_query_count
|
|||
from data import model, database
|
||||
from data.database import (Image, ImageStorage, DerivedStorageForImage, Label, TagManifestLabel,
|
||||
ApprBlob, Manifest, TagManifest, TagManifestToManifest, ManifestLabel,
|
||||
TagManifestLabelMap, ManifestBlob)
|
||||
TagManifestLabelMap, ManifestBlob, Tag, TagToRepositoryTag)
|
||||
from image.docker.schema1 import DockerSchema1ManifestBuilder
|
||||
from test.fixtures import *
|
||||
|
||||
|
@ -223,6 +223,11 @@ def assert_gc_integrity(expect_storage_removed=True):
|
|||
for blob_row in ApprBlob.select():
|
||||
storage.get_content({preferred}, storage.blob_path(blob_row.digest))
|
||||
|
||||
# Ensure there are no danglings OCI tags.
|
||||
oci_tags = {t.id for t in Tag.select()}
|
||||
referenced_oci_tags = {t.tag_id for t in TagToRepositoryTag.select()}
|
||||
assert not (oci_tags - referenced_oci_tags)
|
||||
|
||||
|
||||
def test_has_garbage(default_tag_policy, initialized_db):
|
||||
""" Remove all existing repositories, then add one without garbage, check, then add one with
|
||||
|
|
|
@ -9,12 +9,13 @@ from mock import patch
|
|||
|
||||
from app import docker_v2_signing_key
|
||||
from data.database import (Image, RepositoryTag, ImageStorage, Repository, Manifest, ManifestBlob,
|
||||
ManifestLegacyImage, TagManifestToManifest)
|
||||
ManifestLegacyImage, TagManifestToManifest, Tag, TagToRepositoryTag)
|
||||
from data.model.repository import create_repository
|
||||
from data.model.tag import (list_active_repo_tags, create_or_update_tag, delete_tag,
|
||||
get_matching_tags, _tag_alive, get_matching_tags_for_images,
|
||||
change_tag_expiration, get_active_tag, store_tag_manifest_for_testing,
|
||||
get_most_recent_tag, get_active_tag_for_repo)
|
||||
get_most_recent_tag, get_active_tag_for_repo,
|
||||
create_or_update_tag_for_repo, set_tag_end_ts)
|
||||
from data.model.image import find_create_or_link_image
|
||||
from image.docker.schema1 import DockerSchema1ManifestBuilder
|
||||
from util.timedeltastring import convert_to_timedelta
|
||||
|
@ -24,12 +25,13 @@ from test.fixtures import *
|
|||
|
||||
def _get_expected_tags(image):
|
||||
expected_query = (RepositoryTag
|
||||
.select()
|
||||
.join(Image)
|
||||
.where(RepositoryTag.hidden == False)
|
||||
.where((Image.id == image.id) | (Image.ancestors ** ('%%/%s/%%' % image.id))))
|
||||
.select()
|
||||
.join(Image)
|
||||
.where(RepositoryTag.hidden == False)
|
||||
.where((Image.id == image.id) | (Image.ancestors ** ('%%/%s/%%' % image.id))))
|
||||
return set([tag.id for tag in _tag_alive(expected_query)])
|
||||
|
||||
|
||||
@pytest.mark.parametrize('max_subqueries,max_image_lookup_count', [
|
||||
(1, 1),
|
||||
(10, 10),
|
||||
|
@ -45,6 +47,12 @@ def test_get_matching_tags(max_subqueries, max_image_lookup_count, initialized_d
|
|||
expected_tags = _get_expected_tags(image)
|
||||
assert matching_tags == expected_tags, "mismatch for image %s" % image.id
|
||||
|
||||
oci_tags = list(Tag
|
||||
.select()
|
||||
.join(TagToRepositoryTag)
|
||||
.where(TagToRepositoryTag.repository_tag << expected_tags))
|
||||
assert len(oci_tags) == len(expected_tags)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('max_subqueries,max_image_lookup_count', [
|
||||
(1, 1),
|
||||
|
@ -116,6 +124,13 @@ def test_get_matching_tag_ids_images_filtered(initialized_db):
|
|||
assert matching_tags_ids == expected_tag_ids
|
||||
|
||||
|
||||
def _get_oci_tag(tag):
|
||||
return (Tag
|
||||
.select()
|
||||
.join(TagToRepositoryTag)
|
||||
.where(TagToRepositoryTag.repository_tag == tag)).get()
|
||||
|
||||
|
||||
def assert_tags(repository, *args):
|
||||
tags = list(list_active_repo_tags(repository))
|
||||
assert len(tags) == len(args)
|
||||
|
@ -128,12 +143,39 @@ def assert_tags(repository, *args):
|
|||
|
||||
tags_dict[tag.name] = tag
|
||||
|
||||
oci_tag = _get_oci_tag(tag)
|
||||
assert oci_tag.name == tag.name
|
||||
assert not oci_tag.hidden
|
||||
assert oci_tag.reversion == tag.reversion
|
||||
|
||||
if tag.lifetime_end_ts:
|
||||
assert oci_tag.lifetime_end_ms == (tag.lifetime_end_ts * 1000)
|
||||
else:
|
||||
assert oci_tag.lifetime_end_ms is None
|
||||
|
||||
for expected in args:
|
||||
assert expected in tags_dict
|
||||
|
||||
|
||||
def test_create_reversion_tag(initialized_db):
|
||||
repository = create_repository('devtable', 'somenewrepo', None)
|
||||
manifest = Manifest.get()
|
||||
image1 = find_create_or_link_image('foobarimage1', repository, None, {}, 'local_us')
|
||||
|
||||
footag = create_or_update_tag_for_repo(repository, 'foo', image1.docker_image_id,
|
||||
oci_manifest=manifest, reversion=True)
|
||||
assert footag.reversion
|
||||
|
||||
oci_tag = _get_oci_tag(footag)
|
||||
assert oci_tag.name == footag.name
|
||||
assert not oci_tag.hidden
|
||||
assert oci_tag.reversion == footag.reversion
|
||||
|
||||
|
||||
def test_list_active_tags(initialized_db):
|
||||
# Create a new repository.
|
||||
repository = create_repository('devtable', 'somenewrepo', None)
|
||||
manifest = Manifest.get()
|
||||
|
||||
# Create some images.
|
||||
image1 = find_create_or_link_image('foobarimage1', repository, None, {}, 'local_us')
|
||||
|
@ -143,50 +185,62 @@ def test_list_active_tags(initialized_db):
|
|||
assert_tags(repository)
|
||||
|
||||
# Add some new tags.
|
||||
footag = create_or_update_tag('devtable', 'somenewrepo', 'foo', image1.docker_image_id)
|
||||
bartag = create_or_update_tag('devtable', 'somenewrepo', 'bar', image1.docker_image_id)
|
||||
footag = create_or_update_tag_for_repo(repository, 'foo', image1.docker_image_id,
|
||||
oci_manifest=manifest)
|
||||
bartag = create_or_update_tag_for_repo(repository, 'bar', image1.docker_image_id,
|
||||
oci_manifest=manifest)
|
||||
|
||||
# Since timestamps are stored on a second-granuality, we need to make the tags "start"
|
||||
# Since timestamps are stored on a second-granularity, we need to make the tags "start"
|
||||
# before "now", so when we recreate them below, they don't conflict.
|
||||
footag.lifetime_start_ts -= 5
|
||||
bartag.lifetime_start_ts -= 5
|
||||
|
||||
footag.save()
|
||||
|
||||
bartag.lifetime_start_ts -= 5
|
||||
bartag.save()
|
||||
|
||||
footag_oci = _get_oci_tag(footag)
|
||||
footag_oci.lifetime_start_ms -= 5000
|
||||
footag_oci.save()
|
||||
|
||||
bartag_oci = _get_oci_tag(bartag)
|
||||
bartag_oci.lifetime_start_ms -= 5000
|
||||
bartag_oci.save()
|
||||
|
||||
# Make sure they are returned.
|
||||
assert_tags(repository, 'foo', 'bar')
|
||||
|
||||
# Mark as a tag as expiring in the far future, and make sure it is still returned.
|
||||
footag.lifetime_end_ts = footag.lifetime_start_ts + 10000000
|
||||
footag.save()
|
||||
set_tag_end_ts(footag, footag.lifetime_start_ts + 10000000)
|
||||
|
||||
# Make sure they are returned.
|
||||
assert_tags(repository, 'foo', 'bar')
|
||||
|
||||
# Delete a tag and make sure it isn't returned.
|
||||
footag = delete_tag('devtable', 'somenewrepo', 'foo')
|
||||
footag.lifetime_end_ts -= 4
|
||||
footag.save()
|
||||
set_tag_end_ts(footag, footag.lifetime_end_ts - 4)
|
||||
|
||||
assert_tags(repository, 'bar')
|
||||
|
||||
# Add a new foo again.
|
||||
footag = create_or_update_tag('devtable', 'somenewrepo', 'foo', image1.docker_image_id)
|
||||
footag = create_or_update_tag_for_repo(repository, 'foo', image1.docker_image_id,
|
||||
oci_manifest=manifest)
|
||||
footag.lifetime_start_ts -= 3
|
||||
footag.save()
|
||||
|
||||
footag_oci = _get_oci_tag(footag)
|
||||
footag_oci.lifetime_start_ms -= 3000
|
||||
footag_oci.save()
|
||||
|
||||
assert_tags(repository, 'foo', 'bar')
|
||||
|
||||
# Mark as a tag as expiring in the far future, and make sure it is still returned.
|
||||
footag.lifetime_end_ts = footag.lifetime_start_ts + 10000000
|
||||
footag.save()
|
||||
set_tag_end_ts(footag, footag.lifetime_start_ts + 10000000)
|
||||
|
||||
# Make sure they are returned.
|
||||
assert_tags(repository, 'foo', 'bar')
|
||||
|
||||
# "Move" foo by updating it and make sure we don't get duplicates.
|
||||
create_or_update_tag('devtable', 'somenewrepo', 'foo', image2.docker_image_id)
|
||||
create_or_update_tag_for_repo(repository, 'foo', image2.docker_image_id, oci_manifest=manifest)
|
||||
assert_tags(repository, 'foo', 'bar')
|
||||
|
||||
|
||||
|
@ -201,7 +255,10 @@ def test_list_active_tags(initialized_db):
|
|||
def test_change_tag_expiration(expiration_offset, expected_offset, initialized_db):
|
||||
repository = create_repository('devtable', 'somenewrepo', None)
|
||||
image1 = find_create_or_link_image('foobarimage1', repository, None, {}, 'local_us')
|
||||
footag = create_or_update_tag('devtable', 'somenewrepo', 'foo', image1.docker_image_id)
|
||||
|
||||
manifest = Manifest.get()
|
||||
footag = create_or_update_tag_for_repo(repository, 'foo', image1.docker_image_id,
|
||||
oci_manifest=manifest)
|
||||
|
||||
expiration_date = None
|
||||
if expiration_offset is not None:
|
||||
|
@ -211,15 +268,19 @@ def test_change_tag_expiration(expiration_offset, expected_offset, initialized_d
|
|||
|
||||
# Lookup the tag again.
|
||||
footag_updated = get_active_tag('devtable', 'somenewrepo', 'foo')
|
||||
oci_tag = _get_oci_tag(footag_updated)
|
||||
|
||||
if expected_offset is None:
|
||||
assert footag_updated.lifetime_end_ts is None
|
||||
assert oci_tag.lifetime_end_ms is None
|
||||
else:
|
||||
start_date = datetime.utcfromtimestamp(footag_updated.lifetime_start_ts)
|
||||
end_date = datetime.utcfromtimestamp(footag_updated.lifetime_end_ts)
|
||||
expected_end_date = start_date + convert_to_timedelta(expected_offset)
|
||||
assert (expected_end_date - end_date).total_seconds() < 5 # variance in test
|
||||
|
||||
assert oci_tag.lifetime_end_ms == (footag_updated.lifetime_end_ts * 1000)
|
||||
|
||||
|
||||
def random_storages():
|
||||
return list(ImageStorage.select().where(~(ImageStorage.content_checksum >> None)).limit(10))
|
||||
|
|
|
@ -8,13 +8,11 @@ from peewee import IntegrityError
|
|||
|
||||
from data import database
|
||||
from data import model
|
||||
from data.cache import cache_key
|
||||
from data.database import db_transaction
|
||||
from data.registry_model.interface import RegistryDataInterface
|
||||
from data.registry_model.datatype import FromDictionaryException
|
||||
from data.registry_model.datatypes import (Tag, RepositoryReference, Manifest, LegacyImage, Label,
|
||||
SecurityScanStatus, ManifestLayer, Blob, DerivedImage,
|
||||
TorrentInfo, BlobUpload)
|
||||
from data.registry_model.datatypes import (Tag, Manifest, LegacyImage, Label,
|
||||
SecurityScanStatus, ManifestLayer, Blob, DerivedImage)
|
||||
from data.registry_model.shared import SharedModel
|
||||
from data.registry_model.label_handlers import apply_label_to_manifest
|
||||
from image.docker.schema1 import (DockerSchema1ManifestBuilder, ManifestException,
|
||||
DockerSchema1Manifest)
|
||||
|
@ -24,7 +22,7 @@ from util.validation import is_json
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PreOCIModel(RegistryDataInterface):
|
||||
class PreOCIModel(SharedModel, RegistryDataInterface):
|
||||
"""
|
||||
PreOCIModel implements the data model for the registry API using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
|
@ -46,12 +44,6 @@ class PreOCIModel(RegistryDataInterface):
|
|||
assert found_tag is None or not found_tag.hidden
|
||||
return Tag.for_repository_tag(found_tag)
|
||||
|
||||
def lookup_repository(self, namespace_name, repo_name, kind_filter=None):
|
||||
""" Looks up and returns a reference to the repository with the given namespace and name,
|
||||
or None if none. """
|
||||
repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter)
|
||||
return RepositoryReference.for_repo_obj(repo)
|
||||
|
||||
def get_manifest_for_tag(self, tag, backfill_if_necessary=False):
|
||||
""" Returns the manifest associated with the given tag. """
|
||||
try:
|
||||
|
@ -530,21 +522,12 @@ class PreOCIModel(RegistryDataInterface):
|
|||
try:
|
||||
tag_manifest, _ = model.tag.associate_generated_tag_manifest(namespace_name, repo_name,
|
||||
tag.name, manifest, storage_map)
|
||||
assert tag_manifest
|
||||
except IntegrityError:
|
||||
tag_manifest = model.tag.get_tag_manifest(tag_obj)
|
||||
|
||||
return Manifest.for_tag_manifest(tag_manifest)
|
||||
|
||||
def is_existing_disabled_namespace(self, namespace_name):
|
||||
""" Returns whether the given namespace exists and is disabled. """
|
||||
namespace = model.user.get_namespace_user(namespace_name)
|
||||
return namespace is not None and not namespace.enabled
|
||||
|
||||
def is_namespace_enabled(self, namespace_name):
|
||||
""" Returns whether the given namespace exists and is enabled. """
|
||||
namespace = model.user.get_namespace_user(namespace_name)
|
||||
return namespace is not None and namespace.enabled
|
||||
|
||||
def list_manifest_layers(self, manifest, include_placements=False):
|
||||
""" Returns an *ordered list* of the layers found in the manifest, starting at the base and
|
||||
working towards the leaf, including the associated Blob and its placements (if specified).
|
||||
|
@ -634,230 +617,6 @@ class PreOCIModel(RegistryDataInterface):
|
|||
|
||||
return DerivedImage.for_derived_storage(derived, verb, varying_metadata, blob)
|
||||
|
||||
def get_derived_image_signature(self, derived_image, signer_name):
|
||||
"""
|
||||
Returns the signature associated with the derived image and a specific signer or None if none.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
storage = derived_storage.derivative
|
||||
signature_entry = model.storage.lookup_storage_signature(storage, signer_name)
|
||||
if signature_entry is None:
|
||||
return None
|
||||
|
||||
return signature_entry.signature
|
||||
|
||||
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
||||
"""
|
||||
Sets the calculated signature for the given derived image and signer to that specified.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
storage = derived_storage.derivative
|
||||
signature_entry = model.storage.find_or_create_storage_signature(storage, signer_name)
|
||||
signature_entry.signature = signature
|
||||
signature_entry.uploading = False
|
||||
signature_entry.save()
|
||||
|
||||
def delete_derived_image(self, derived_image):
|
||||
"""
|
||||
Deletes a derived image and all of its storage.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
model.image.delete_derived_storage(derived_storage)
|
||||
|
||||
def set_derived_image_size(self, derived_image, compressed_size):
|
||||
"""
|
||||
Sets the compressed size on the given derived image.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
storage_entry = derived_storage.derivative
|
||||
storage_entry.image_size = compressed_size
|
||||
storage_entry.uploading = False
|
||||
storage_entry.save()
|
||||
|
||||
def get_torrent_info(self, blob):
|
||||
"""
|
||||
Returns the torrent information associated with the given blob or None if none.
|
||||
"""
|
||||
try:
|
||||
image_storage = database.ImageStorage.get(id=blob._db_id)
|
||||
except database.ImageStorage.DoesNotExist:
|
||||
return None
|
||||
|
||||
try:
|
||||
torrent_info = model.storage.get_torrent_info(image_storage)
|
||||
except model.TorrentInfoDoesNotExist:
|
||||
return None
|
||||
|
||||
return TorrentInfo.for_torrent_info(torrent_info)
|
||||
|
||||
def set_torrent_info(self, blob, piece_length, pieces):
|
||||
"""
|
||||
Sets the torrent infomation associated with the given blob to that specified.
|
||||
"""
|
||||
try:
|
||||
image_storage = database.ImageStorage.get(id=blob._db_id)
|
||||
except database.ImageStorage.DoesNotExist:
|
||||
return None
|
||||
|
||||
torrent_info = model.storage.save_torrent_info(image_storage, piece_length, pieces)
|
||||
return TorrentInfo.for_torrent_info(torrent_info)
|
||||
|
||||
def get_cached_repo_blob(self, model_cache, namespace_name, repo_name, blob_digest):
|
||||
"""
|
||||
Returns the blob in the repository with the given digest if any or None if none.
|
||||
Caches the result in the caching system.
|
||||
"""
|
||||
def load_blob():
|
||||
repository_ref = self.lookup_repository(namespace_name, repo_name)
|
||||
if repository_ref is None:
|
||||
return None
|
||||
|
||||
blob_found = self.get_repo_blob_by_digest(repository_ref, blob_digest,
|
||||
include_placements=True)
|
||||
if blob_found is None:
|
||||
return None
|
||||
|
||||
return blob_found.asdict()
|
||||
|
||||
blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, blob_digest, 2)
|
||||
blob_dict = model_cache.retrieve(blob_cache_key, load_blob)
|
||||
|
||||
try:
|
||||
return Blob.from_dict(blob_dict) if blob_dict is not None else None
|
||||
except FromDictionaryException:
|
||||
# The data was stale in some way. Simply reload.
|
||||
repository_ref = self.lookup_repository(namespace_name, repo_name)
|
||||
if repository_ref is None:
|
||||
return None
|
||||
|
||||
return self.get_repo_blob_by_digest(repository_ref, blob_digest, include_placements=True)
|
||||
|
||||
def get_repo_blob_by_digest(self, repository_ref, blob_digest, include_placements=False):
|
||||
"""
|
||||
Returns the blob in the repository with the given digest, if any or None if none. Note that
|
||||
there may be multiple records in the same repository for the same blob digest, so the return
|
||||
value of this function may change.
|
||||
"""
|
||||
try:
|
||||
image_storage = model.blob.get_repository_blob_by_digest(repository_ref._db_id, blob_digest)
|
||||
except model.BlobDoesNotExist:
|
||||
return None
|
||||
|
||||
assert image_storage.cas_path is not None
|
||||
|
||||
placements = None
|
||||
if include_placements:
|
||||
placements = list(model.storage.get_storage_locations(image_storage.uuid))
|
||||
|
||||
return Blob.for_image_storage(image_storage,
|
||||
storage_path=model.storage.get_layer_path(image_storage),
|
||||
placements=placements)
|
||||
|
||||
def create_blob_upload(self, repository_ref, new_upload_id, location_name, storage_metadata):
|
||||
""" Creates a new blob upload and returns a reference. If the blob upload could not be
|
||||
created, returns None. """
|
||||
repo = model.repository.lookup_repository(repository_ref._db_id)
|
||||
if repo is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
upload_record = model.blob.initiate_upload(repo.namespace_user.username, repo.name,
|
||||
new_upload_id, location_name, storage_metadata)
|
||||
return BlobUpload.for_upload(upload_record)
|
||||
except database.Repository.DoesNotExist:
|
||||
return None
|
||||
|
||||
def lookup_blob_upload(self, repository_ref, blob_upload_id):
|
||||
""" Looks up the blob upload withn the given ID under the specified repository and returns it
|
||||
or None if none.
|
||||
"""
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload_id)
|
||||
if upload_record is None:
|
||||
return None
|
||||
|
||||
return BlobUpload.for_upload(upload_record)
|
||||
|
||||
def update_blob_upload(self, blob_upload, uncompressed_byte_count, piece_hashes, piece_sha_state,
|
||||
storage_metadata, byte_count, chunk_count, sha_state):
|
||||
""" Updates the fields of the blob upload to match those given. Returns the updated blob upload
|
||||
or None if the record does not exists.
|
||||
"""
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
||||
if upload_record is None:
|
||||
return None
|
||||
|
||||
upload_record.uncompressed_byte_count = uncompressed_byte_count
|
||||
upload_record.piece_hashes = piece_hashes
|
||||
upload_record.piece_sha_state = piece_sha_state
|
||||
upload_record.storage_metadata = storage_metadata
|
||||
upload_record.byte_count = byte_count
|
||||
upload_record.chunk_count = chunk_count
|
||||
upload_record.sha_state = sha_state
|
||||
upload_record.save()
|
||||
return BlobUpload.for_upload(upload_record)
|
||||
|
||||
def delete_blob_upload(self, blob_upload):
|
||||
""" Deletes a blob upload record. """
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
||||
if upload_record is not None:
|
||||
upload_record.delete_instance()
|
||||
|
||||
def commit_blob_upload(self, blob_upload, blob_digest_str, blob_expiration_seconds):
|
||||
""" Commits the blob upload into a blob and sets an expiration before that blob will be GCed.
|
||||
"""
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
||||
if upload_record is None:
|
||||
return None
|
||||
|
||||
repository = upload_record.repository
|
||||
namespace_name = repository.namespace_user.username
|
||||
repo_name = repository.name
|
||||
|
||||
# Create the blob and temporarily tag it.
|
||||
location_obj = model.storage.get_image_location_for_name(blob_upload.location_name)
|
||||
blob_record = model.blob.store_blob_record_and_temp_link(
|
||||
namespace_name, repo_name, blob_digest_str, location_obj.id, blob_upload.byte_count,
|
||||
blob_expiration_seconds, blob_upload.uncompressed_byte_count)
|
||||
|
||||
# Delete the blob upload.
|
||||
upload_record.delete_instance()
|
||||
return Blob.for_image_storage(blob_record,
|
||||
storage_path=model.storage.get_layer_path(blob_record))
|
||||
|
||||
def mount_blob_into_repository(self, blob, target_repository_ref, expiration_sec):
|
||||
"""
|
||||
Mounts the blob from another repository into the specified target repository, and adds an
|
||||
expiration before that blob is automatically GCed. This function is useful during push
|
||||
operations if an existing blob from another repository is being pushed. Returns False if
|
||||
the mounting fails.
|
||||
"""
|
||||
repo = model.repository.lookup_repository(target_repository_ref._db_id)
|
||||
if repo is None:
|
||||
return False
|
||||
|
||||
namespace_name = repo.namespace_user.username
|
||||
repo_name = repo.name
|
||||
|
||||
storage = model.blob.temp_link_blob(namespace_name, repo_name, blob.digest,
|
||||
expiration_sec)
|
||||
return bool(storage)
|
||||
|
||||
def set_tags_expiration_for_manifest(self, manifest, expiration_sec):
|
||||
"""
|
||||
Sets the expiration on all tags that point to the given manifest to that specified.
|
||||
|
|
256
data/registry_model/shared.py
Normal file
256
data/registry_model/shared.py
Normal file
|
@ -0,0 +1,256 @@
|
|||
# pylint: disable=protected-access
|
||||
import logging
|
||||
|
||||
from data import database
|
||||
from data import model
|
||||
from data.cache import cache_key
|
||||
from data.registry_model.datatype import FromDictionaryException
|
||||
from data.registry_model.datatypes import RepositoryReference, Blob, TorrentInfo, BlobUpload
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SharedModel:
|
||||
"""
|
||||
SharedModel implements those data model operations for the registry API that are unchanged
|
||||
between the old and new data models.
|
||||
"""
|
||||
def lookup_repository(self, namespace_name, repo_name, kind_filter=None):
|
||||
""" Looks up and returns a reference to the repository with the given namespace and name,
|
||||
or None if none. """
|
||||
repo = model.repository.get_repository(namespace_name, repo_name, kind_filter=kind_filter)
|
||||
return RepositoryReference.for_repo_obj(repo)
|
||||
|
||||
def is_existing_disabled_namespace(self, namespace_name):
|
||||
""" Returns whether the given namespace exists and is disabled. """
|
||||
namespace = model.user.get_namespace_user(namespace_name)
|
||||
return namespace is not None and not namespace.enabled
|
||||
|
||||
def is_namespace_enabled(self, namespace_name):
|
||||
""" Returns whether the given namespace exists and is enabled. """
|
||||
namespace = model.user.get_namespace_user(namespace_name)
|
||||
return namespace is not None and namespace.enabled
|
||||
|
||||
def get_derived_image_signature(self, derived_image, signer_name):
|
||||
"""
|
||||
Returns the signature associated with the derived image and a specific signer or None if none.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
storage = derived_storage.derivative
|
||||
signature_entry = model.storage.lookup_storage_signature(storage, signer_name)
|
||||
if signature_entry is None:
|
||||
return None
|
||||
|
||||
return signature_entry.signature
|
||||
|
||||
def set_derived_image_signature(self, derived_image, signer_name, signature):
|
||||
"""
|
||||
Sets the calculated signature for the given derived image and signer to that specified.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
storage = derived_storage.derivative
|
||||
signature_entry = model.storage.find_or_create_storage_signature(storage, signer_name)
|
||||
signature_entry.signature = signature
|
||||
signature_entry.uploading = False
|
||||
signature_entry.save()
|
||||
|
||||
def delete_derived_image(self, derived_image):
|
||||
"""
|
||||
Deletes a derived image and all of its storage.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
model.image.delete_derived_storage(derived_storage)
|
||||
|
||||
def set_derived_image_size(self, derived_image, compressed_size):
|
||||
"""
|
||||
Sets the compressed size on the given derived image.
|
||||
"""
|
||||
try:
|
||||
derived_storage = database.DerivedStorageForImage.get(id=derived_image._db_id)
|
||||
except database.DerivedStorageForImage.DoesNotExist:
|
||||
return None
|
||||
|
||||
storage_entry = derived_storage.derivative
|
||||
storage_entry.image_size = compressed_size
|
||||
storage_entry.uploading = False
|
||||
storage_entry.save()
|
||||
|
||||
def get_torrent_info(self, blob):
|
||||
"""
|
||||
Returns the torrent information associated with the given blob or None if none.
|
||||
"""
|
||||
try:
|
||||
image_storage = database.ImageStorage.get(id=blob._db_id)
|
||||
except database.ImageStorage.DoesNotExist:
|
||||
return None
|
||||
|
||||
try:
|
||||
torrent_info = model.storage.get_torrent_info(image_storage)
|
||||
except model.TorrentInfoDoesNotExist:
|
||||
return None
|
||||
|
||||
return TorrentInfo.for_torrent_info(torrent_info)
|
||||
|
||||
def set_torrent_info(self, blob, piece_length, pieces):
|
||||
"""
|
||||
Sets the torrent infomation associated with the given blob to that specified.
|
||||
"""
|
||||
try:
|
||||
image_storage = database.ImageStorage.get(id=blob._db_id)
|
||||
except database.ImageStorage.DoesNotExist:
|
||||
return None
|
||||
|
||||
torrent_info = model.storage.save_torrent_info(image_storage, piece_length, pieces)
|
||||
return TorrentInfo.for_torrent_info(torrent_info)
|
||||
|
||||
def get_cached_repo_blob(self, model_cache, namespace_name, repo_name, blob_digest):
|
||||
"""
|
||||
Returns the blob in the repository with the given digest if any or None if none.
|
||||
Caches the result in the caching system.
|
||||
"""
|
||||
def load_blob():
|
||||
repository_ref = self.lookup_repository(namespace_name, repo_name)
|
||||
if repository_ref is None:
|
||||
return None
|
||||
|
||||
blob_found = self.get_repo_blob_by_digest(repository_ref, blob_digest,
|
||||
include_placements=True)
|
||||
if blob_found is None:
|
||||
return None
|
||||
|
||||
return blob_found.asdict()
|
||||
|
||||
blob_cache_key = cache_key.for_repository_blob(namespace_name, repo_name, blob_digest, 2)
|
||||
blob_dict = model_cache.retrieve(blob_cache_key, load_blob)
|
||||
|
||||
try:
|
||||
return Blob.from_dict(blob_dict) if blob_dict is not None else None
|
||||
except FromDictionaryException:
|
||||
# The data was stale in some way. Simply reload.
|
||||
repository_ref = self.lookup_repository(namespace_name, repo_name)
|
||||
if repository_ref is None:
|
||||
return None
|
||||
|
||||
return self.get_repo_blob_by_digest(repository_ref, blob_digest, include_placements=True)
|
||||
|
||||
def get_repo_blob_by_digest(self, repository_ref, blob_digest, include_placements=False):
|
||||
"""
|
||||
Returns the blob in the repository with the given digest, if any or None if none. Note that
|
||||
there may be multiple records in the same repository for the same blob digest, so the return
|
||||
value of this function may change.
|
||||
"""
|
||||
try:
|
||||
image_storage = model.blob.get_repository_blob_by_digest(repository_ref._db_id, blob_digest)
|
||||
except model.BlobDoesNotExist:
|
||||
return None
|
||||
|
||||
assert image_storage.cas_path is not None
|
||||
|
||||
placements = None
|
||||
if include_placements:
|
||||
placements = list(model.storage.get_storage_locations(image_storage.uuid))
|
||||
|
||||
return Blob.for_image_storage(image_storage,
|
||||
storage_path=model.storage.get_layer_path(image_storage),
|
||||
placements=placements)
|
||||
|
||||
def create_blob_upload(self, repository_ref, new_upload_id, location_name, storage_metadata):
|
||||
""" Creates a new blob upload and returns a reference. If the blob upload could not be
|
||||
created, returns None. """
|
||||
repo = model.repository.lookup_repository(repository_ref._db_id)
|
||||
if repo is None:
|
||||
return None
|
||||
|
||||
try:
|
||||
upload_record = model.blob.initiate_upload(repo.namespace_user.username, repo.name,
|
||||
new_upload_id, location_name, storage_metadata)
|
||||
return BlobUpload.for_upload(upload_record)
|
||||
except database.Repository.DoesNotExist:
|
||||
return None
|
||||
|
||||
def lookup_blob_upload(self, repository_ref, blob_upload_id):
|
||||
""" Looks up the blob upload withn the given ID under the specified repository and returns it
|
||||
or None if none.
|
||||
"""
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload_id)
|
||||
if upload_record is None:
|
||||
return None
|
||||
|
||||
return BlobUpload.for_upload(upload_record)
|
||||
|
||||
def update_blob_upload(self, blob_upload, uncompressed_byte_count, piece_hashes, piece_sha_state,
|
||||
storage_metadata, byte_count, chunk_count, sha_state):
|
||||
""" Updates the fields of the blob upload to match those given. Returns the updated blob upload
|
||||
or None if the record does not exists.
|
||||
"""
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
||||
if upload_record is None:
|
||||
return None
|
||||
|
||||
upload_record.uncompressed_byte_count = uncompressed_byte_count
|
||||
upload_record.piece_hashes = piece_hashes
|
||||
upload_record.piece_sha_state = piece_sha_state
|
||||
upload_record.storage_metadata = storage_metadata
|
||||
upload_record.byte_count = byte_count
|
||||
upload_record.chunk_count = chunk_count
|
||||
upload_record.sha_state = sha_state
|
||||
upload_record.save()
|
||||
return BlobUpload.for_upload(upload_record)
|
||||
|
||||
def delete_blob_upload(self, blob_upload):
|
||||
""" Deletes a blob upload record. """
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
||||
if upload_record is not None:
|
||||
upload_record.delete_instance()
|
||||
|
||||
def commit_blob_upload(self, blob_upload, blob_digest_str, blob_expiration_seconds):
|
||||
""" Commits the blob upload into a blob and sets an expiration before that blob will be GCed.
|
||||
"""
|
||||
upload_record = model.blob.get_blob_upload_by_uuid(blob_upload.upload_id)
|
||||
if upload_record is None:
|
||||
return None
|
||||
|
||||
repository = upload_record.repository
|
||||
namespace_name = repository.namespace_user.username
|
||||
repo_name = repository.name
|
||||
|
||||
# Create the blob and temporarily tag it.
|
||||
location_obj = model.storage.get_image_location_for_name(blob_upload.location_name)
|
||||
blob_record = model.blob.store_blob_record_and_temp_link(
|
||||
namespace_name, repo_name, blob_digest_str, location_obj.id, blob_upload.byte_count,
|
||||
blob_expiration_seconds, blob_upload.uncompressed_byte_count)
|
||||
|
||||
# Delete the blob upload.
|
||||
upload_record.delete_instance()
|
||||
return Blob.for_image_storage(blob_record,
|
||||
storage_path=model.storage.get_layer_path(blob_record))
|
||||
|
||||
def mount_blob_into_repository(self, blob, target_repository_ref, expiration_sec):
|
||||
"""
|
||||
Mounts the blob from another repository into the specified target repository, and adds an
|
||||
expiration before that blob is automatically GCed. This function is useful during push
|
||||
operations if an existing blob from another repository is being pushed. Returns False if
|
||||
the mounting fails.
|
||||
"""
|
||||
repo = model.repository.lookup_repository(target_repository_ref._db_id)
|
||||
if repo is None:
|
||||
return False
|
||||
|
||||
namespace_name = repo.namespace_user.username
|
||||
repo_name = repo.name
|
||||
|
||||
storage = model.blob.temp_link_blob(namespace_name, repo_name, blob.digest,
|
||||
expiration_sec)
|
||||
return bool(storage)
|
|
@ -13,7 +13,7 @@ from data import model
|
|||
from data.database import (TagManifestLabelMap, TagManifestToManifest, Manifest, ManifestBlob,
|
||||
ManifestLegacyImage, ManifestLabel, TagManifest, RepositoryTag, Image,
|
||||
TagManifestLabel, TagManifest, TagManifestLabel, DerivedStorageForImage,
|
||||
TorrentInfo, close_db_filter)
|
||||
TorrentInfo, Tag, TagToRepositoryTag, close_db_filter)
|
||||
from data.cache.impl import InMemoryDataModelCache
|
||||
from data.registry_model.registry_pre_oci_model import PreOCIModel
|
||||
from data.registry_model.datatypes import RepositoryReference
|
||||
|
@ -21,9 +21,10 @@ from image.docker.schema1 import DockerSchema1ManifestBuilder
|
|||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.fixture()
|
||||
def pre_oci_model(initialized_db):
|
||||
return PreOCIModel()
|
||||
|
||||
@pytest.fixture(params=[PreOCIModel])
|
||||
def registry_model(request, initialized_db):
|
||||
return request.param()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('names, expected', [
|
||||
|
@ -33,10 +34,10 @@ def pre_oci_model(initialized_db):
|
|||
(['latest', 'prod', 'another'], {'latest', 'prod'}),
|
||||
(['foo', 'prod'], {'prod'}),
|
||||
])
|
||||
def test_find_matching_tag(names, expected, pre_oci_model):
|
||||
def test_find_matching_tag(names, expected, registry_model):
|
||||
repo = model.repository.get_repository('devtable', 'simple')
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found = pre_oci_model.find_matching_tag(repository_ref, names)
|
||||
found = registry_model.find_matching_tag(repository_ref, names)
|
||||
if expected is None:
|
||||
assert found is None
|
||||
else:
|
||||
|
@ -49,10 +50,10 @@ def test_find_matching_tag(names, expected, pre_oci_model):
|
|||
('devtable', 'simple', {'latest', 'prod'}),
|
||||
('buynlarge', 'orgrepo', {'latest', 'prod'}),
|
||||
])
|
||||
def test_get_most_recent_tag(repo_namespace, repo_name, expected, pre_oci_model):
|
||||
def test_get_most_recent_tag(repo_namespace, repo_name, expected, registry_model):
|
||||
repo = model.repository.get_repository(repo_namespace, repo_name)
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found = pre_oci_model.get_most_recent_tag(repository_ref)
|
||||
found = registry_model.get_most_recent_tag(repository_ref)
|
||||
if expected is None:
|
||||
assert found is None
|
||||
else:
|
||||
|
@ -64,8 +65,8 @@ def test_get_most_recent_tag(repo_namespace, repo_name, expected, pre_oci_model)
|
|||
('buynlarge', 'orgrepo', True),
|
||||
('buynlarge', 'unknownrepo', False),
|
||||
])
|
||||
def test_lookup_repository(repo_namespace, repo_name, expected, pre_oci_model):
|
||||
repo_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
def test_lookup_repository(repo_namespace, repo_name, expected, registry_model):
|
||||
repo_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
if expected:
|
||||
assert repo_ref
|
||||
else:
|
||||
|
@ -76,22 +77,22 @@ def test_lookup_repository(repo_namespace, repo_name, expected, pre_oci_model):
|
|||
('devtable', 'simple'),
|
||||
('buynlarge', 'orgrepo'),
|
||||
])
|
||||
def test_lookup_manifests(repo_namespace, repo_name, pre_oci_model):
|
||||
def test_lookup_manifests(repo_namespace, repo_name, registry_model):
|
||||
repo = model.repository.get_repository(repo_namespace, repo_name)
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found_tag = pre_oci_model.find_matching_tag(repository_ref, ['latest'])
|
||||
found_manifest = pre_oci_model.get_manifest_for_tag(found_tag)
|
||||
found = pre_oci_model.lookup_manifest_by_digest(repository_ref, found_manifest.digest,
|
||||
found_tag = registry_model.find_matching_tag(repository_ref, ['latest'])
|
||||
found_manifest = registry_model.get_manifest_for_tag(found_tag)
|
||||
found = registry_model.lookup_manifest_by_digest(repository_ref, found_manifest.digest,
|
||||
include_legacy_image=True)
|
||||
assert found._db_id == found_manifest._db_id
|
||||
assert found.digest == found_manifest.digest
|
||||
assert found.legacy_image
|
||||
|
||||
|
||||
def test_lookup_unknown_manifest(pre_oci_model):
|
||||
def test_lookup_unknown_manifest(registry_model):
|
||||
repo = model.repository.get_repository('devtable', 'simple')
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found = pre_oci_model.lookup_manifest_by_digest(repository_ref, 'sha256:deadbeef')
|
||||
found = registry_model.lookup_manifest_by_digest(repository_ref, 'sha256:deadbeef')
|
||||
assert found is None
|
||||
|
||||
|
||||
|
@ -101,18 +102,18 @@ def test_lookup_unknown_manifest(pre_oci_model):
|
|||
('devtable', 'history'),
|
||||
('buynlarge', 'orgrepo'),
|
||||
])
|
||||
def test_legacy_images(repo_namespace, repo_name, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
legacy_images = pre_oci_model.get_legacy_images(repository_ref)
|
||||
def test_legacy_images(repo_namespace, repo_name, registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
legacy_images = registry_model.get_legacy_images(repository_ref)
|
||||
assert len(legacy_images)
|
||||
|
||||
found_tags = set()
|
||||
for image in legacy_images:
|
||||
found_image = pre_oci_model.get_legacy_image(repository_ref, image.docker_image_id,
|
||||
found_image = registry_model.get_legacy_image(repository_ref, image.docker_image_id,
|
||||
include_parents=True)
|
||||
|
||||
with assert_query_count(5 if found_image.parents else 4):
|
||||
found_image = pre_oci_model.get_legacy_image(repository_ref, image.docker_image_id,
|
||||
found_image = registry_model.get_legacy_image(repository_ref, image.docker_image_id,
|
||||
include_parents=True, include_blob=True)
|
||||
assert found_image.docker_image_id == image.docker_image_id
|
||||
assert found_image.parents == image.parents
|
||||
|
@ -130,76 +131,76 @@ def test_legacy_images(repo_namespace, repo_name, pre_oci_model):
|
|||
[p._db_id for p in found_image.parents])
|
||||
|
||||
# Try without parents and ensure it raises an exception.
|
||||
found_image = pre_oci_model.get_legacy_image(repository_ref, image.docker_image_id,
|
||||
found_image = registry_model.get_legacy_image(repository_ref, image.docker_image_id,
|
||||
include_parents=False)
|
||||
with pytest.raises(Exception):
|
||||
assert not found_image.parents
|
||||
|
||||
assert found_tags
|
||||
|
||||
unknown = pre_oci_model.get_legacy_image(repository_ref, 'unknown', include_parents=True)
|
||||
unknown = registry_model.get_legacy_image(repository_ref, 'unknown', include_parents=True)
|
||||
assert unknown is None
|
||||
|
||||
|
||||
def test_manifest_labels(pre_oci_model):
|
||||
def test_manifest_labels(registry_model):
|
||||
repo = model.repository.get_repository('devtable', 'simple')
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found_tag = pre_oci_model.find_matching_tag(repository_ref, ['latest'])
|
||||
found_manifest = pre_oci_model.get_manifest_for_tag(found_tag)
|
||||
found_tag = registry_model.find_matching_tag(repository_ref, ['latest'])
|
||||
found_manifest = registry_model.get_manifest_for_tag(found_tag)
|
||||
|
||||
# Create a new label.
|
||||
created = pre_oci_model.create_manifest_label(found_manifest, 'foo', 'bar', 'api')
|
||||
created = registry_model.create_manifest_label(found_manifest, 'foo', 'bar', 'api')
|
||||
assert created.key == 'foo'
|
||||
assert created.value == 'bar'
|
||||
assert created.source_type_name == 'api'
|
||||
assert created.media_type_name == 'text/plain'
|
||||
|
||||
# Ensure we can look it up.
|
||||
assert pre_oci_model.get_manifest_label(found_manifest, created.uuid) == created
|
||||
assert registry_model.get_manifest_label(found_manifest, created.uuid) == created
|
||||
|
||||
# Ensure it is in our list of labels.
|
||||
assert created in pre_oci_model.list_manifest_labels(found_manifest)
|
||||
assert created in pre_oci_model.list_manifest_labels(found_manifest, key_prefix='fo')
|
||||
assert created in registry_model.list_manifest_labels(found_manifest)
|
||||
assert created in registry_model.list_manifest_labels(found_manifest, key_prefix='fo')
|
||||
|
||||
# Ensure it is *not* in our filtered list.
|
||||
assert created not in pre_oci_model.list_manifest_labels(found_manifest, key_prefix='ba')
|
||||
assert created not in registry_model.list_manifest_labels(found_manifest, key_prefix='ba')
|
||||
|
||||
# Delete the label and ensure it is gone.
|
||||
assert pre_oci_model.delete_manifest_label(found_manifest, created.uuid)
|
||||
assert pre_oci_model.get_manifest_label(found_manifest, created.uuid) is None
|
||||
assert created not in pre_oci_model.list_manifest_labels(found_manifest)
|
||||
assert registry_model.delete_manifest_label(found_manifest, created.uuid)
|
||||
assert registry_model.get_manifest_label(found_manifest, created.uuid) is None
|
||||
assert created not in registry_model.list_manifest_labels(found_manifest)
|
||||
|
||||
|
||||
def test_manifest_label_handlers(pre_oci_model):
|
||||
def test_manifest_label_handlers(registry_model):
|
||||
repo = model.repository.get_repository('devtable', 'simple')
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found_tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
found_manifest = pre_oci_model.get_manifest_for_tag(found_tag)
|
||||
found_tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
found_manifest = registry_model.get_manifest_for_tag(found_tag)
|
||||
|
||||
# Ensure the tag has no expiration.
|
||||
assert found_tag.lifetime_end_ts is None
|
||||
|
||||
# Create a new label with an expires-after.
|
||||
pre_oci_model.create_manifest_label(found_manifest, 'quay.expires-after', '2h', 'api')
|
||||
registry_model.create_manifest_label(found_manifest, 'quay.expires-after', '2h', 'api')
|
||||
|
||||
# Ensure the tag now has an expiration.
|
||||
updated_tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
updated_tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
assert updated_tag.lifetime_end_ts == (updated_tag.lifetime_start_ts + (60 * 60 * 2))
|
||||
|
||||
|
||||
def test_batch_labels(pre_oci_model):
|
||||
def test_batch_labels(registry_model):
|
||||
repo = model.repository.get_repository('devtable', 'history')
|
||||
repository_ref = RepositoryReference.for_repo_obj(repo)
|
||||
found_tag = pre_oci_model.find_matching_tag(repository_ref, ['latest'])
|
||||
found_manifest = pre_oci_model.get_manifest_for_tag(found_tag)
|
||||
found_tag = registry_model.find_matching_tag(repository_ref, ['latest'])
|
||||
found_manifest = registry_model.get_manifest_for_tag(found_tag)
|
||||
|
||||
with pre_oci_model.batch_create_manifest_labels(found_manifest) as add_label:
|
||||
with registry_model.batch_create_manifest_labels(found_manifest) as add_label:
|
||||
add_label('foo', '1', 'api')
|
||||
add_label('bar', '2', 'api')
|
||||
add_label('baz', '3', 'api')
|
||||
|
||||
# Ensure we can look them up.
|
||||
assert len(pre_oci_model.list_manifest_labels(found_manifest)) == 3
|
||||
assert len(registry_model.list_manifest_labels(found_manifest)) == 3
|
||||
|
||||
|
||||
@pytest.mark.parametrize('repo_namespace, repo_name', [
|
||||
|
@ -208,38 +209,38 @@ def test_batch_labels(pre_oci_model):
|
|||
('devtable', 'history'),
|
||||
('buynlarge', 'orgrepo'),
|
||||
])
|
||||
def test_repository_tags(repo_namespace, repo_name, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
def test_repository_tags(repo_namespace, repo_name, registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
|
||||
with assert_query_count(1):
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref, include_legacy_images=True)
|
||||
tags = registry_model.list_repository_tags(repository_ref, include_legacy_images=True)
|
||||
assert len(tags)
|
||||
|
||||
for tag in tags:
|
||||
with assert_query_count(2):
|
||||
found_tag = pre_oci_model.get_repo_tag(repository_ref, tag.name, include_legacy_image=True)
|
||||
found_tag = registry_model.get_repo_tag(repository_ref, tag.name, include_legacy_image=True)
|
||||
assert found_tag == tag
|
||||
|
||||
if found_tag.legacy_image is None:
|
||||
continue
|
||||
|
||||
with assert_query_count(2):
|
||||
found_image = pre_oci_model.get_legacy_image(repository_ref,
|
||||
found_image = registry_model.get_legacy_image(repository_ref,
|
||||
found_tag.legacy_image.docker_image_id)
|
||||
assert found_image == found_tag.legacy_image
|
||||
|
||||
|
||||
def test_repository_tag_history(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'history')
|
||||
def test_repository_tag_history(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'history')
|
||||
|
||||
with assert_query_count(2):
|
||||
history, has_more = pre_oci_model.list_repository_tag_history(repository_ref)
|
||||
history, has_more = registry_model.list_repository_tag_history(repository_ref)
|
||||
assert not has_more
|
||||
assert len(history) == 2
|
||||
|
||||
# Ensure the latest tag is marked expired, since there is an expired one.
|
||||
with assert_query_count(1):
|
||||
assert pre_oci_model.has_expired_tag(repository_ref, 'latest')
|
||||
assert registry_model.has_expired_tag(repository_ref, 'latest')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('repo_namespace, repo_name', [
|
||||
|
@ -252,35 +253,35 @@ def test_repository_tag_history(pre_oci_model):
|
|||
False,
|
||||
True,
|
||||
])
|
||||
def test_delete_tags(repo_namespace, repo_name, via_manifest, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||
def test_delete_tags(repo_namespace, repo_name, via_manifest, registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = registry_model.list_repository_tags(repository_ref)
|
||||
assert len(tags)
|
||||
|
||||
# Save history before the deletions.
|
||||
previous_history, _ = pre_oci_model.list_repository_tag_history(repository_ref, size=1000)
|
||||
previous_history, _ = registry_model.list_repository_tag_history(repository_ref, size=1000)
|
||||
assert len(previous_history) >= len(tags)
|
||||
|
||||
# Delete every tag in the repository.
|
||||
for tag in tags:
|
||||
if via_manifest:
|
||||
assert pre_oci_model.delete_tag(repository_ref, tag.name)
|
||||
assert registry_model.delete_tag(repository_ref, tag.name)
|
||||
else:
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||
manifest = registry_model.get_manifest_for_tag(tag)
|
||||
if manifest is not None:
|
||||
assert pre_oci_model.delete_tags_for_manifest(manifest)
|
||||
assert registry_model.delete_tags_for_manifest(manifest)
|
||||
|
||||
# Make sure the tag is no longer found.
|
||||
with assert_query_count(1):
|
||||
found_tag = pre_oci_model.get_repo_tag(repository_ref, tag.name, include_legacy_image=True)
|
||||
found_tag = registry_model.get_repo_tag(repository_ref, tag.name, include_legacy_image=True)
|
||||
assert found_tag is None
|
||||
|
||||
# Ensure all tags have been deleted.
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||
tags = registry_model.list_repository_tags(repository_ref)
|
||||
assert not len(tags)
|
||||
|
||||
# Ensure that the tags all live in history.
|
||||
history, _ = pre_oci_model.list_repository_tag_history(repository_ref, size=1000)
|
||||
history, _ = registry_model.list_repository_tag_history(repository_ref, size=1000)
|
||||
assert len(history) == len(previous_history)
|
||||
|
||||
|
||||
|
@ -288,12 +289,12 @@ def test_delete_tags(repo_namespace, repo_name, via_manifest, pre_oci_model):
|
|||
True,
|
||||
False,
|
||||
])
|
||||
def test_retarget_tag_history(use_manifest, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'history')
|
||||
history, _ = pre_oci_model.list_repository_tag_history(repository_ref)
|
||||
def test_retarget_tag_history(use_manifest, registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'history')
|
||||
history, _ = registry_model.list_repository_tag_history(repository_ref)
|
||||
|
||||
if use_manifest:
|
||||
manifest_or_legacy_image = pre_oci_model.lookup_manifest_by_digest(repository_ref,
|
||||
manifest_or_legacy_image = registry_model.lookup_manifest_by_digest(repository_ref,
|
||||
history[1].manifest_digest,
|
||||
allow_dead=True)
|
||||
else:
|
||||
|
@ -301,7 +302,7 @@ def test_retarget_tag_history(use_manifest, pre_oci_model):
|
|||
|
||||
# Retarget the tag.
|
||||
assert manifest_or_legacy_image
|
||||
updated_tag = pre_oci_model.retarget_tag(repository_ref, 'latest', manifest_or_legacy_image,
|
||||
updated_tag = registry_model.retarget_tag(repository_ref, 'latest', manifest_or_legacy_image,
|
||||
is_reversion=True)
|
||||
|
||||
# Ensure the tag has changed targets.
|
||||
|
@ -311,39 +312,39 @@ def test_retarget_tag_history(use_manifest, pre_oci_model):
|
|||
assert updated_tag.legacy_image == manifest_or_legacy_image
|
||||
|
||||
# Ensure history has been updated.
|
||||
new_history, _ = pre_oci_model.list_repository_tag_history(repository_ref)
|
||||
new_history, _ = registry_model.list_repository_tag_history(repository_ref)
|
||||
assert len(new_history) == len(history) + 1
|
||||
|
||||
|
||||
def test_retarget_tag(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
|
||||
history, _ = pre_oci_model.list_repository_tag_history(repository_ref)
|
||||
def test_retarget_tag(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'complex')
|
||||
history, _ = registry_model.list_repository_tag_history(repository_ref)
|
||||
|
||||
prod_tag = pre_oci_model.get_repo_tag(repository_ref, 'prod', include_legacy_image=True)
|
||||
prod_tag = registry_model.get_repo_tag(repository_ref, 'prod', include_legacy_image=True)
|
||||
|
||||
# Retarget the tag.
|
||||
updated_tag = pre_oci_model.retarget_tag(repository_ref, 'latest', prod_tag.legacy_image)
|
||||
updated_tag = registry_model.retarget_tag(repository_ref, 'latest', prod_tag.legacy_image)
|
||||
|
||||
# Ensure the tag has changed targets.
|
||||
assert updated_tag.legacy_image == prod_tag.legacy_image
|
||||
|
||||
# Ensure history has been updated.
|
||||
new_history, _ = pre_oci_model.list_repository_tag_history(repository_ref)
|
||||
new_history, _ = registry_model.list_repository_tag_history(repository_ref)
|
||||
assert len(new_history) == len(history) + 1
|
||||
|
||||
|
||||
def test_change_repository_tag_expiration(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
def test_change_repository_tag_expiration(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
assert tag.lifetime_end_ts is None
|
||||
|
||||
new_datetime = datetime.utcnow() + timedelta(days=2)
|
||||
previous, okay = pre_oci_model.change_repository_tag_expiration(tag, new_datetime)
|
||||
previous, okay = registry_model.change_repository_tag_expiration(tag, new_datetime)
|
||||
|
||||
assert okay
|
||||
assert previous is None
|
||||
|
||||
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
assert tag.lifetime_end_ts is not None
|
||||
|
||||
|
||||
|
@ -355,31 +356,33 @@ def test_change_repository_tag_expiration(pre_oci_model):
|
|||
('devtable', 'gargantuan', ['v2.0', 'v3.0', 'v4.0', 'v5.0', 'v6.0']),
|
||||
])
|
||||
def test_get_legacy_images_owned_by_tag(repo_namespace, repo_name, expected_non_empty,
|
||||
pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||
registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = registry_model.list_repository_tags(repository_ref)
|
||||
assert len(tags)
|
||||
|
||||
non_empty = set()
|
||||
for tag in tags:
|
||||
if pre_oci_model.get_legacy_images_owned_by_tag(tag):
|
||||
if registry_model.get_legacy_images_owned_by_tag(tag):
|
||||
non_empty.add(tag.name)
|
||||
|
||||
assert non_empty == set(expected_non_empty)
|
||||
|
||||
|
||||
def test_get_security_status(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref, include_legacy_images=True)
|
||||
def test_get_security_status(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tags = registry_model.list_repository_tags(repository_ref, include_legacy_images=True)
|
||||
assert len(tags)
|
||||
|
||||
for tag in tags:
|
||||
assert pre_oci_model.get_security_status(tag.legacy_image)
|
||||
assert registry_model.get_security_status(tag.legacy_image)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def clear_rows(initialized_db):
|
||||
# Remove all new-style rows so we can backfill.
|
||||
TagToRepositoryTag.delete().execute()
|
||||
Tag.delete().execute()
|
||||
TagManifestLabelMap.delete().execute()
|
||||
ManifestLabel.delete().execute()
|
||||
ManifestBlob.delete().execute()
|
||||
|
@ -396,24 +399,24 @@ def clear_rows(initialized_db):
|
|||
('devtable', 'history'),
|
||||
('buynlarge', 'orgrepo'),
|
||||
])
|
||||
def test_backfill_manifest_for_tag(repo_namespace, repo_name, clear_rows, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||
def test_backfill_manifest_for_tag(repo_namespace, repo_name, clear_rows, registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = registry_model.list_repository_tags(repository_ref)
|
||||
assert tags
|
||||
|
||||
for tag in tags:
|
||||
assert not tag.manifest_digest
|
||||
assert pre_oci_model.backfill_manifest_for_tag(tag)
|
||||
assert registry_model.backfill_manifest_for_tag(tag)
|
||||
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref, include_legacy_images=True)
|
||||
tags = registry_model.list_repository_tags(repository_ref, include_legacy_images=True)
|
||||
assert tags
|
||||
for tag in tags:
|
||||
assert tag.manifest_digest
|
||||
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||
manifest = registry_model.get_manifest_for_tag(tag)
|
||||
assert manifest
|
||||
|
||||
legacy_image = pre_oci_model.get_legacy_image(repository_ref, tag.legacy_image.docker_image_id,
|
||||
legacy_image = registry_model.get_legacy_image(repository_ref, tag.legacy_image.docker_image_id,
|
||||
include_parents=True)
|
||||
|
||||
parsed_manifest = manifest.get_parsed_manifest()
|
||||
|
@ -427,19 +430,19 @@ def test_backfill_manifest_for_tag(repo_namespace, repo_name, clear_rows, pre_oc
|
|||
('devtable', 'history'),
|
||||
('buynlarge', 'orgrepo'),
|
||||
])
|
||||
def test_backfill_manifest_on_lookup(repo_namespace, repo_name, clear_rows, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||
def test_backfill_manifest_on_lookup(repo_namespace, repo_name, clear_rows, registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = registry_model.list_repository_tags(repository_ref)
|
||||
assert tags
|
||||
|
||||
for tag in tags:
|
||||
assert not tag.manifest_digest
|
||||
assert not pre_oci_model.get_manifest_for_tag(tag)
|
||||
assert not registry_model.get_manifest_for_tag(tag)
|
||||
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
|
||||
manifest = registry_model.get_manifest_for_tag(tag, backfill_if_necessary=True)
|
||||
assert manifest
|
||||
|
||||
updated_tag = pre_oci_model.get_repo_tag(repository_ref, tag.name)
|
||||
updated_tag = registry_model.get_repo_tag(repository_ref, tag.name)
|
||||
assert updated_tag.manifest_digest == manifest.digest
|
||||
|
||||
|
||||
|
@ -449,8 +452,8 @@ def test_backfill_manifest_on_lookup(repo_namespace, repo_name, clear_rows, pre_
|
|||
|
||||
('disabled', False),
|
||||
])
|
||||
def test_is_namespace_enabled(namespace, expect_enabled, pre_oci_model):
|
||||
assert pre_oci_model.is_namespace_enabled(namespace) == expect_enabled
|
||||
def test_is_namespace_enabled(namespace, expect_enabled, registry_model):
|
||||
assert registry_model.is_namespace_enabled(namespace) == expect_enabled
|
||||
|
||||
|
||||
@pytest.mark.parametrize('repo_namespace, repo_name', [
|
||||
|
@ -459,20 +462,20 @@ def test_is_namespace_enabled(namespace, expect_enabled, pre_oci_model):
|
|||
('devtable', 'history'),
|
||||
('buynlarge', 'orgrepo'),
|
||||
])
|
||||
def test_list_manifest_layers(repo_namespace, repo_name, pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = pre_oci_model.list_repository_tags(repository_ref)
|
||||
def test_list_manifest_layers(repo_namespace, repo_name, registry_model):
|
||||
repository_ref = registry_model.lookup_repository(repo_namespace, repo_name)
|
||||
tags = registry_model.list_repository_tags(repository_ref)
|
||||
assert tags
|
||||
|
||||
for tag in tags:
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||
manifest = registry_model.get_manifest_for_tag(tag)
|
||||
assert manifest
|
||||
|
||||
with assert_query_count(4):
|
||||
layers = pre_oci_model.list_manifest_layers(manifest)
|
||||
layers = registry_model.list_manifest_layers(manifest)
|
||||
assert layers
|
||||
|
||||
layers = pre_oci_model.list_manifest_layers(manifest, include_placements=True)
|
||||
layers = registry_model.list_manifest_layers(manifest, include_placements=True)
|
||||
assert layers
|
||||
|
||||
parsed_layers = list(manifest.get_parsed_manifest().layers)
|
||||
|
@ -484,123 +487,123 @@ def test_list_manifest_layers(repo_namespace, repo_name, pre_oci_model):
|
|||
assert manifest_layer.blob.storage_path
|
||||
assert manifest_layer.blob.placements
|
||||
|
||||
repo_blob = pre_oci_model.get_repo_blob_by_digest(repository_ref, manifest_layer.blob.digest)
|
||||
repo_blob = registry_model.get_repo_blob_by_digest(repository_ref, manifest_layer.blob.digest)
|
||||
assert repo_blob.digest == manifest_layer.blob.digest
|
||||
|
||||
assert manifest_layer.estimated_size(1) is not None
|
||||
|
||||
|
||||
def test_derived_image(pre_oci_model):
|
||||
def test_derived_image(registry_model):
|
||||
# Clear all existing derived storage.
|
||||
DerivedStorageForImage.delete().execute()
|
||||
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = registry_model.get_manifest_for_tag(tag)
|
||||
|
||||
# Ensure the squashed image doesn't exist.
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}) is None
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {}) is None
|
||||
|
||||
# Create a new one.
|
||||
squashed = pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {})
|
||||
assert pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {}) == squashed
|
||||
squashed = registry_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {})
|
||||
assert registry_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {}) == squashed
|
||||
assert squashed.unique_id
|
||||
|
||||
# Check and set the size.
|
||||
assert squashed.blob.compressed_size is None
|
||||
pre_oci_model.set_derived_image_size(squashed, 1234)
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}).blob.compressed_size == 1234
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}).unique_id == squashed.unique_id
|
||||
registry_model.set_derived_image_size(squashed, 1234)
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {}).blob.compressed_size == 1234
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {}).unique_id == squashed.unique_id
|
||||
|
||||
# Ensure its returned now.
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}) == squashed
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {}) == squashed
|
||||
|
||||
# Ensure different metadata results in a different derived image.
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {'foo': 'bar'}) is None
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {'foo': 'bar'}) is None
|
||||
|
||||
squashed_foo = pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us',
|
||||
squashed_foo = registry_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us',
|
||||
{'foo': 'bar'})
|
||||
assert squashed_foo != squashed
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {'foo': 'bar'}) == squashed_foo
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {'foo': 'bar'}) == squashed_foo
|
||||
|
||||
assert squashed.unique_id != squashed_foo.unique_id
|
||||
|
||||
# Lookup with placements.
|
||||
squashed = pre_oci_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {},
|
||||
squashed = registry_model.lookup_or_create_derived_image(manifest, 'squash', 'local_us', {},
|
||||
include_placements=True)
|
||||
assert squashed.blob.placements
|
||||
|
||||
# Delete the derived image.
|
||||
pre_oci_model.delete_derived_image(squashed)
|
||||
assert pre_oci_model.lookup_derived_image(manifest, 'squash', {}) is None
|
||||
registry_model.delete_derived_image(squashed)
|
||||
assert registry_model.lookup_derived_image(manifest, 'squash', {}) is None
|
||||
|
||||
|
||||
def test_derived_image_signatures(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||
def test_derived_image_signatures(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = registry_model.get_manifest_for_tag(tag)
|
||||
|
||||
derived = pre_oci_model.lookup_derived_image(manifest, 'squash', {})
|
||||
derived = registry_model.lookup_derived_image(manifest, 'squash', {})
|
||||
assert derived
|
||||
|
||||
signature = pre_oci_model.get_derived_image_signature(derived, 'gpg2')
|
||||
signature = registry_model.get_derived_image_signature(derived, 'gpg2')
|
||||
assert signature is None
|
||||
|
||||
pre_oci_model.set_derived_image_signature(derived, 'gpg2', 'foo')
|
||||
assert pre_oci_model.get_derived_image_signature(derived, 'gpg2') == 'foo'
|
||||
registry_model.set_derived_image_signature(derived, 'gpg2', 'foo')
|
||||
assert registry_model.get_derived_image_signature(derived, 'gpg2') == 'foo'
|
||||
|
||||
|
||||
def test_torrent_info(pre_oci_model):
|
||||
def test_torrent_info(registry_model):
|
||||
# Remove all existing info.
|
||||
TorrentInfo.delete().execute()
|
||||
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = pre_oci_model.get_manifest_for_tag(tag)
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = registry_model.get_manifest_for_tag(tag)
|
||||
|
||||
layers = pre_oci_model.list_manifest_layers(manifest)
|
||||
layers = registry_model.list_manifest_layers(manifest)
|
||||
assert layers
|
||||
|
||||
assert pre_oci_model.get_torrent_info(layers[0].blob) is None
|
||||
pre_oci_model.set_torrent_info(layers[0].blob, 2, 'foo')
|
||||
assert registry_model.get_torrent_info(layers[0].blob) is None
|
||||
registry_model.set_torrent_info(layers[0].blob, 2, 'foo')
|
||||
|
||||
# Set it again exactly, which should be a no-op.
|
||||
pre_oci_model.set_torrent_info(layers[0].blob, 2, 'foo')
|
||||
registry_model.set_torrent_info(layers[0].blob, 2, 'foo')
|
||||
|
||||
# Check the information we've set.
|
||||
torrent_info = pre_oci_model.get_torrent_info(layers[0].blob)
|
||||
torrent_info = registry_model.get_torrent_info(layers[0].blob)
|
||||
assert torrent_info is not None
|
||||
assert torrent_info.piece_length == 2
|
||||
assert torrent_info.pieces == 'foo'
|
||||
|
||||
# Try setting it again. Nothing should happen.
|
||||
pre_oci_model.set_torrent_info(layers[0].blob, 3, 'bar')
|
||||
registry_model.set_torrent_info(layers[0].blob, 3, 'bar')
|
||||
|
||||
torrent_info = pre_oci_model.get_torrent_info(layers[0].blob)
|
||||
torrent_info = registry_model.get_torrent_info(layers[0].blob)
|
||||
assert torrent_info is not None
|
||||
assert torrent_info.piece_length == 2
|
||||
assert torrent_info.pieces == 'foo'
|
||||
|
||||
|
||||
def test_blob_uploads(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
def test_blob_uploads(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
|
||||
blob_upload = pre_oci_model.create_blob_upload(repository_ref, str(uuid.uuid4()),
|
||||
blob_upload = registry_model.create_blob_upload(repository_ref, str(uuid.uuid4()),
|
||||
'local_us', {'some': 'metadata'})
|
||||
assert blob_upload
|
||||
assert blob_upload.storage_metadata == {'some': 'metadata'}
|
||||
assert blob_upload.location_name == 'local_us'
|
||||
|
||||
# Ensure we can find the blob upload.
|
||||
assert pre_oci_model.lookup_blob_upload(repository_ref, blob_upload.upload_id) == blob_upload
|
||||
assert registry_model.lookup_blob_upload(repository_ref, blob_upload.upload_id) == blob_upload
|
||||
|
||||
# Update and ensure the changes are saved.
|
||||
assert pre_oci_model.update_blob_upload(blob_upload, 1, 'the-pieces_hash',
|
||||
assert registry_model.update_blob_upload(blob_upload, 1, 'the-pieces_hash',
|
||||
blob_upload.piece_sha_state,
|
||||
{'new': 'metadata'}, 2, 3,
|
||||
blob_upload.sha_state)
|
||||
|
||||
updated = pre_oci_model.lookup_blob_upload(repository_ref, blob_upload.upload_id)
|
||||
updated = registry_model.lookup_blob_upload(repository_ref, blob_upload.upload_id)
|
||||
assert updated
|
||||
assert updated.uncompressed_byte_count == 1
|
||||
assert updated.piece_hashes == 'the-pieces_hash'
|
||||
|
@ -609,45 +612,45 @@ def test_blob_uploads(pre_oci_model):
|
|||
assert updated.chunk_count == 3
|
||||
|
||||
# Delete the upload.
|
||||
pre_oci_model.delete_blob_upload(blob_upload)
|
||||
registry_model.delete_blob_upload(blob_upload)
|
||||
|
||||
# Ensure it can no longer be found.
|
||||
assert not pre_oci_model.lookup_blob_upload(repository_ref, blob_upload.upload_id)
|
||||
assert not registry_model.lookup_blob_upload(repository_ref, blob_upload.upload_id)
|
||||
|
||||
|
||||
def test_commit_blob_upload(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
blob_upload = pre_oci_model.create_blob_upload(repository_ref, str(uuid.uuid4()),
|
||||
def test_commit_blob_upload(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
blob_upload = registry_model.create_blob_upload(repository_ref, str(uuid.uuid4()),
|
||||
'local_us', {'some': 'metadata'})
|
||||
|
||||
# Commit the blob upload and make sure it is written as a blob.
|
||||
digest = 'sha256:' + hashlib.sha256('hello').hexdigest()
|
||||
blob = pre_oci_model.commit_blob_upload(blob_upload, digest, 60)
|
||||
blob = registry_model.commit_blob_upload(blob_upload, digest, 60)
|
||||
assert blob.digest == digest
|
||||
|
||||
# Ensure the upload can no longer be found.
|
||||
assert not pre_oci_model.lookup_blob_upload(repository_ref, blob_upload.upload_id)
|
||||
assert not registry_model.lookup_blob_upload(repository_ref, blob_upload.upload_id)
|
||||
|
||||
|
||||
def test_mount_blob_into_repository(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
latest_tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = pre_oci_model.get_manifest_for_tag(latest_tag)
|
||||
def test_mount_blob_into_repository(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
latest_tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = registry_model.get_manifest_for_tag(latest_tag)
|
||||
|
||||
target_repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
|
||||
target_repository_ref = registry_model.lookup_repository('devtable', 'complex')
|
||||
|
||||
layers = pre_oci_model.list_manifest_layers(manifest, include_placements=True)
|
||||
layers = registry_model.list_manifest_layers(manifest, include_placements=True)
|
||||
assert layers
|
||||
|
||||
for layer in layers:
|
||||
# Ensure the blob doesn't exist under the repository.
|
||||
assert not pre_oci_model.get_repo_blob_by_digest(target_repository_ref, layer.blob.digest)
|
||||
assert not registry_model.get_repo_blob_by_digest(target_repository_ref, layer.blob.digest)
|
||||
|
||||
# Mount the blob into the repository.
|
||||
assert pre_oci_model.mount_blob_into_repository(layer.blob, target_repository_ref, 60)
|
||||
assert registry_model.mount_blob_into_repository(layer.blob, target_repository_ref, 60)
|
||||
|
||||
# Ensure it now exists.
|
||||
found = pre_oci_model.get_repo_blob_by_digest(target_repository_ref, layer.blob.digest)
|
||||
found = registry_model.get_repo_blob_by_digest(target_repository_ref, layer.blob.digest)
|
||||
assert found == layer.blob
|
||||
|
||||
|
||||
|
@ -655,20 +658,20 @@ class SomeException(Exception):
|
|||
pass
|
||||
|
||||
|
||||
def test_get_cached_repo_blob(pre_oci_model):
|
||||
def test_get_cached_repo_blob(registry_model):
|
||||
model_cache = InMemoryDataModelCache()
|
||||
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
latest_tag = pre_oci_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = pre_oci_model.get_manifest_for_tag(latest_tag)
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
latest_tag = registry_model.get_repo_tag(repository_ref, 'latest')
|
||||
manifest = registry_model.get_manifest_for_tag(latest_tag)
|
||||
|
||||
layers = pre_oci_model.list_manifest_layers(manifest, include_placements=True)
|
||||
layers = registry_model.list_manifest_layers(manifest, include_placements=True)
|
||||
assert layers
|
||||
|
||||
blob = layers[0].blob
|
||||
|
||||
# Load a blob to add it to the cache.
|
||||
found = pre_oci_model.get_cached_repo_blob(model_cache, 'devtable', 'simple', blob.digest)
|
||||
found = registry_model.get_cached_repo_blob(model_cache, 'devtable', 'simple', blob.digest)
|
||||
assert found.digest == blob.digest
|
||||
assert found.uuid == blob.uuid
|
||||
assert found.compressed_size == blob.compressed_size
|
||||
|
@ -683,7 +686,7 @@ def test_get_cached_repo_blob(pre_oci_model):
|
|||
with patch('data.registry_model.registry_pre_oci_model.model.blob.get_repository_blob_by_digest',
|
||||
fail):
|
||||
# Make sure we can load again, which should hit the cache.
|
||||
cached = pre_oci_model.get_cached_repo_blob(model_cache, 'devtable', 'simple', blob.digest)
|
||||
cached = registry_model.get_cached_repo_blob(model_cache, 'devtable', 'simple', blob.digest)
|
||||
assert cached.digest == blob.digest
|
||||
assert cached.uuid == blob.uuid
|
||||
assert cached.compressed_size == blob.compressed_size
|
||||
|
@ -694,13 +697,13 @@ def test_get_cached_repo_blob(pre_oci_model):
|
|||
# Try another blob, which should fail since the DB is not connected and the cache
|
||||
# does not contain the blob.
|
||||
with pytest.raises(SomeException):
|
||||
pre_oci_model.get_cached_repo_blob(model_cache, 'devtable', 'simple', 'some other digest')
|
||||
registry_model.get_cached_repo_blob(model_cache, 'devtable', 'simple', 'some other digest')
|
||||
|
||||
|
||||
def test_create_manifest_and_retarget_tag(pre_oci_model):
|
||||
repository_ref = pre_oci_model.lookup_repository('devtable', 'simple')
|
||||
latest_tag = pre_oci_model.get_repo_tag(repository_ref, 'latest', include_legacy_image=True)
|
||||
manifest = pre_oci_model.get_manifest_for_tag(latest_tag).get_parsed_manifest()
|
||||
def test_create_manifest_and_retarget_tag(registry_model):
|
||||
repository_ref = registry_model.lookup_repository('devtable', 'simple')
|
||||
latest_tag = registry_model.get_repo_tag(repository_ref, 'latest', include_legacy_image=True)
|
||||
manifest = registry_model.get_manifest_for_tag(latest_tag).get_parsed_manifest()
|
||||
|
||||
builder = DockerSchema1ManifestBuilder('devtable', 'simple', 'anothertag')
|
||||
builder.add_layer(manifest.blob_digests[0],
|
||||
|
@ -708,7 +711,7 @@ def test_create_manifest_and_retarget_tag(pre_oci_model):
|
|||
sample_manifest = builder.build(docker_v2_signing_key)
|
||||
assert sample_manifest is not None
|
||||
|
||||
another_manifest, tag = pre_oci_model.create_manifest_and_retarget_tag(repository_ref,
|
||||
another_manifest, tag = registry_model.create_manifest_and_retarget_tag(repository_ref,
|
||||
sample_manifest,
|
||||
'anothertag')
|
||||
assert another_manifest is not None
|
||||
|
@ -717,5 +720,5 @@ def test_create_manifest_and_retarget_tag(pre_oci_model):
|
|||
assert tag.name == 'anothertag'
|
||||
assert another_manifest.get_parsed_manifest().manifest_dict == sample_manifest.manifest_dict
|
||||
|
||||
layers = pre_oci_model.list_manifest_layers(another_manifest)
|
||||
layers = registry_model.list_manifest_layers(another_manifest)
|
||||
assert len(layers) == 1
|
|
@ -46,10 +46,10 @@ def test_e2e_query_count_manifest_norewrite(client, app):
|
|||
return timecode + 10
|
||||
|
||||
with patch('time.time', get_time):
|
||||
# Necessary in order to have the tag updates not occurr in the same second, which is the
|
||||
# Necessary in order to have the tag updates not occur in the same second, which is the
|
||||
# granularity supported currently.
|
||||
with count_queries() as counter:
|
||||
conduct_call(client, 'v2.write_manifest_by_digest', url_for, 'PUT', params, expected_code=202,
|
||||
headers=headers, raw_body=tag_manifest.json_data)
|
||||
|
||||
assert counter.count <= 16
|
||||
assert counter.count <= 25
|
||||
|
|
|
@ -20,7 +20,8 @@ from data.database import (db, all_models, Role, TeamRole, Visibility, LoginServ
|
|||
QuayRegion, QuayService, UserRegion, OAuthAuthorizationCode,
|
||||
ServiceKeyApprovalType, MediaType, LabelSourceType, UserPromptKind,
|
||||
RepositoryKind, User, DisableReason, DeletedNamespace, appr_classes,
|
||||
ApprTagKind, ApprBlobPlacementLocation, Repository)
|
||||
ApprTagKind, ApprBlobPlacementLocation, Repository, TagKind,
|
||||
ManifestChild)
|
||||
from data import model
|
||||
from data.queue import WorkQueue
|
||||
from data.registry_model import registry_model
|
||||
|
@ -450,6 +451,8 @@ def initialize_database():
|
|||
DisableReason.create(name='successive_build_failures')
|
||||
DisableReason.create(name='successive_build_internal_errors')
|
||||
|
||||
TagKind.create(name='tag')
|
||||
|
||||
|
||||
def wipe_database():
|
||||
logger.debug('Wiping all data from the DB.')
|
||||
|
@ -910,7 +913,7 @@ def populate_database(minimal=False, with_storage=False):
|
|||
model.repositoryactioncount.update_repository_score(to_count)
|
||||
|
||||
|
||||
WHITELISTED_EMPTY_MODELS = ['DeletedNamespace', 'LogEntry2']
|
||||
WHITELISTED_EMPTY_MODELS = ['DeletedNamespace', 'LogEntry2', 'ManifestChild']
|
||||
|
||||
def find_models_missing_data():
|
||||
# As a sanity check we are going to make sure that all db tables have some data, unless explicitly
|
||||
|
|
|
@ -2,7 +2,7 @@ from app import docker_v2_signing_key
|
|||
from data import model
|
||||
from data.database import (TagManifestLabelMap, TagManifestToManifest, Manifest, ManifestBlob,
|
||||
ManifestLegacyImage, ManifestLabel, TagManifest, RepositoryTag, Image,
|
||||
TagManifestLabel)
|
||||
TagManifestLabel, Tag, TagToRepositoryTag)
|
||||
from image.docker.schema1 import DockerSchema1ManifestBuilder
|
||||
from workers.manifestbackfillworker import backfill_manifest
|
||||
from workers.labelbackfillworker import backfill_label
|
||||
|
@ -12,6 +12,8 @@ from test.fixtures import *
|
|||
@pytest.fixture()
|
||||
def clear_rows(initialized_db):
|
||||
# Remove all new-style rows so we can backfill.
|
||||
TagToRepositoryTag.delete().execute()
|
||||
Tag.delete().execute()
|
||||
TagManifestLabelMap.delete().execute()
|
||||
ManifestLabel.delete().execute()
|
||||
ManifestBlob.delete().execute()
|
||||
|
|
Reference in a new issue