Merge remote-tracking branch 'upstream/master' into python-registry-v2

This commit is contained in:
Jake Moshenko 2015-11-11 16:41:40 -05:00
commit ab340e20ea
43 changed files with 862 additions and 248 deletions

View file

@ -19,7 +19,7 @@ class RedisBuildLogs(object):
def __init__(self, redis_config):
args = dict(redis_config)
args.update({'socket_connect_timeout': 5})
args.update({'socket_connect_timeout': 5, 'socket_timeout': 5})
self._redis_config = redis_config
self._redis = redis.StrictRedis(**args)

View file

@ -574,7 +574,7 @@ class Image(BaseModel):
security_indexed = BooleanField(default=False)
security_indexed_engine = IntegerField(default=-1)
parent = ForeignKeyField('self', index=True, null=True, related_name='children')
parent_id = IntegerField(index=True, null=True)
class Meta:
database = db

View file

@ -1,7 +1,9 @@
"""add support for quay's security indexer
Revision ID: 57dad559ff2d
Revises: 154f2befdfbe
Create Date: 2015-07-13 16:51:41.669249
"""
# revision identifiers, used by Alembic.
@ -17,14 +19,13 @@ def upgrade(tables):
op.add_column('image', sa.Column('security_indexed', sa.Boolean(), nullable=False, default=False, server_default=sa.sql.expression.false()))
op.add_column('image', sa.Column('security_indexed_engine', sa.Integer(), nullable=False, default=-1, server_default="-1"))
op.create_index('image_parent_id', 'image', ['parent_id'], unique=False)
op.create_foreign_key(op.f('fk_image_parent_id_image'), 'image', 'image', ['parent_id'], ['id'])
### end Alembic commands ###
op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed'])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('image_security_indexed_engine_security_indexed', 'image')
op.drop_constraint(op.f('fk_image_parent_id_image'), 'image', type_='foreignkey')
op.drop_index('image_parent_id', table_name='image')
op.drop_column('image', 'security_indexed')
op.drop_column('image', 'security_indexed_engine')

View file

@ -49,7 +49,7 @@ def get_repo_image(namespace_name, repository_name, docker_image_id):
def get_repo_image_extended(namespace_name, repository_name, docker_image_id):
def limit_to_image_id(query):
return query.where(Image.docker_image_id == docker_image_id).limit(1)
return query.where(Image.docker_image_id == docker_image_id)
images = get_repository_images_base(namespace_name, repository_name, limit_to_image_id)
if not images:
@ -195,14 +195,21 @@ def _find_or_link_image(existing_image, repo_obj, username, translations, prefer
copied_storage.locations = {placement.location.name
for placement in copied_storage.imagestorageplacement_set}
translated_parent_id = None
if new_image_ancestry != '/':
translated_parent_id = int(new_image_ancestry.split('/')[-2])
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
repository=repo_obj, storage=copied_storage,
repository=repo_obj,
storage=copied_storage,
ancestors=new_image_ancestry,
command=existing_image.command,
created=existing_image.created,
comment=existing_image.comment,
v1_json_metadata=existing_image.v1_json_metadata,
aggregate_size=existing_image.aggregate_size)
aggregate_size=existing_image.aggregate_size,
parent_id=translated_parent_id,
v1_checksum=existing_image.v1_checksum)
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
@ -301,7 +308,7 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
if parent:
fetched.ancestors = '%s%s/' % (parent.ancestors, parent.id)
fetched.parent = parent
fetched.parent_id = parent.id
fetched.save()
return fetched
@ -417,3 +424,19 @@ def synthesize_v1_image(repo, image_storage, docker_image_id, created_date_str,
return Image.create(docker_image_id=docker_image_id, ancestors=ancestors, comment=comment,
command=command, v1_json_metadata=v1_json_metadata, created=created,
storage=image_storage, repository=repo)
def ensure_image_locations(*names):
with db_transaction():
locations = ImageStorageLocation.select().where(ImageStorageLocation.name << names)
insert_names = list(names)
for location in locations:
insert_names.remove(location.name)
if not insert_names:
return
data = [{'name': name} for name in insert_names]
ImageStorageLocation.insert_many(data).execute()

View file

@ -137,12 +137,12 @@ def garbage_collect_repo(repo):
# iterable of tuples containing [(k, v), (k, v), ...]
all_repo_images = Image.select(Image.id, Image.storage).where(Image.repository == repo).tuples()
images_to_storages = dict(all_repo_images)
to_remove = set(images_to_storages.keys()).difference(referenced_ancestors)
to_remove = list(set(images_to_storages.keys()).difference(referenced_ancestors))
if len(to_remove) > 0:
logger.info('Cleaning up unreferenced images: %s', to_remove)
storage_id_whitelist = {images_to_storages[to_remove_id] for to_remove_id in to_remove}
Image.delete().where(Image.id << list(to_remove)).execute()
Image.delete().where(Image.id << to_remove).execute()
if len(to_remove) > 0:
logger.info('Garbage collecting storage for images: %s', to_remove)