Move the upload flag to the database, and use the database stored image size rather than going to s3.
This commit is contained in:
parent
21304a5678
commit
ef9fe871fc
7 changed files with 35 additions and 36 deletions
|
@ -220,7 +220,8 @@ class ImageStorage(BaseModel):
|
||||||
created = DateTimeField(null=True)
|
created = DateTimeField(null=True)
|
||||||
comment = TextField(null=True)
|
comment = TextField(null=True)
|
||||||
command = TextField(null=True)
|
command = TextField(null=True)
|
||||||
image_size = BigIntegerField(null=True)
|
image_size = BigIntegerField(null=True)
|
||||||
|
uploading = BooleanField(default=True, null=True)
|
||||||
|
|
||||||
|
|
||||||
class Image(BaseModel):
|
class Image(BaseModel):
|
||||||
|
|
|
@ -817,7 +817,7 @@ def get_repository(namespace_name, repository_name):
|
||||||
|
|
||||||
def get_repo_image(namespace_name, repository_name, image_id):
|
def get_repo_image(namespace_name, repository_name, image_id):
|
||||||
query = (Image
|
query = (Image
|
||||||
.select()
|
.select(Image, ImageStorage)
|
||||||
.join(Repository)
|
.join(Repository)
|
||||||
.switch(Image)
|
.switch(Image)
|
||||||
.join(ImageStorage, JOIN_LEFT_OUTER)
|
.join(ImageStorage, JOIN_LEFT_OUTER)
|
||||||
|
|
|
@ -40,16 +40,35 @@ class SocketReader(object):
|
||||||
return buf
|
return buf
|
||||||
|
|
||||||
|
|
||||||
|
def image_is_uploading(namespace, repository, image_id, repo_image):
|
||||||
|
if repo_image and repo_image.storage and repo_image.storage.uploading is not None:
|
||||||
|
return repo_image.storage.uploading
|
||||||
|
|
||||||
|
logger.warning('Setting legacy upload flag')
|
||||||
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||||
|
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
||||||
|
return store.exists(mark_path)
|
||||||
|
|
||||||
|
|
||||||
|
def mark_upload_complete(namespace, repository, image_id, repo_image):
|
||||||
|
if repo_image and repo_image.storage and repo_image.storage.uploading is not None:
|
||||||
|
repo_image.storage.uploading = False
|
||||||
|
repo_image.storage.save()
|
||||||
|
else:
|
||||||
|
logger.warning('Removing legacy upload flag')
|
||||||
|
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
||||||
|
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
||||||
|
if store.exists(mark_path):
|
||||||
|
store.remove(mark_path)
|
||||||
|
|
||||||
|
|
||||||
def require_completion(f):
|
def require_completion(f):
|
||||||
"""This make sure that the image push correctly finished."""
|
"""This make sure that the image push correctly finished."""
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def wrapper(namespace, repository, *args, **kwargs):
|
def wrapper(namespace, repository, *args, **kwargs):
|
||||||
image_id = kwargs['image_id']
|
image_id = kwargs['image_id']
|
||||||
repo_image = model.get_repo_image(namespace, repository, image_id)
|
repo_image = model.get_repo_image(namespace, repository, image_id)
|
||||||
uuid = repo_image and repo_image.storage and repo_image.storage.uuid
|
if image_is_uploading(namespace, repository, image_id, repo_image):
|
||||||
|
|
||||||
if store.exists(store.image_mark_path(namespace, repository, image_id,
|
|
||||||
uuid)):
|
|
||||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||||
issue='upload-in-progress', image_id=kwargs['image_id'])
|
issue='upload-in-progress', image_id=kwargs['image_id'])
|
||||||
|
|
||||||
|
@ -139,9 +158,9 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
|
|
||||||
profile.debug('Retrieving image path info')
|
profile.debug('Retrieving image path info')
|
||||||
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
|
layer_path = store.image_layer_path(namespace, repository, image_id, uuid)
|
||||||
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
|
||||||
|
|
||||||
if store.exists(layer_path) and not store.exists(mark_path):
|
if (store.exists(layer_path) and not
|
||||||
|
image_is_uploading(namespace, repository, image_id, repo_image)):
|
||||||
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Storing layer data')
|
profile.debug('Storing layer data')
|
||||||
|
@ -192,7 +211,7 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
issue='checksum-mismatch', image_id=image_id)
|
issue='checksum-mismatch', image_id=image_id)
|
||||||
|
|
||||||
# Checksum is ok, we remove the marker
|
# Checksum is ok, we remove the marker
|
||||||
store.remove(mark_path)
|
mark_upload_complete(namespace, repository, image_id, repo_image)
|
||||||
|
|
||||||
# The layer is ready for download, send a job to the work queue to
|
# The layer is ready for download, send a job to the work queue to
|
||||||
# process it.
|
# process it.
|
||||||
|
@ -234,8 +253,7 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Marking image path')
|
profile.debug('Marking image path')
|
||||||
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
if not image_is_uploading(namespace, repository, image_id, repo_image):
|
||||||
if not store.exists(mark_path):
|
|
||||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||||
issue='image-write-error', image_id=image_id)
|
issue='image-write-error', image_id=image_id)
|
||||||
|
|
||||||
|
@ -252,7 +270,7 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
issue='checksum-mismatch', image_id=image_id)
|
issue='checksum-mismatch', image_id=image_id)
|
||||||
|
|
||||||
# Checksum is ok, we remove the marker
|
# Checksum is ok, we remove the marker
|
||||||
store.remove(mark_path)
|
mark_upload_complete(namespace, repository, image_id, repo_image)
|
||||||
|
|
||||||
# The layer is ready for download, send a job to the work queue to
|
# The layer is ready for download, send a job to the work queue to
|
||||||
# process it.
|
# process it.
|
||||||
|
@ -291,8 +309,7 @@ def get_image_json(namespace, repository, image_id, headers):
|
||||||
|
|
||||||
profile.debug('Looking up repo layer size')
|
profile.debug('Looking up repo layer size')
|
||||||
try:
|
try:
|
||||||
size = store.get_size(store.image_layer_path(namespace, repository,
|
size = repo_image.image_size or repo_image.storage.image_size
|
||||||
image_id, uuid))
|
|
||||||
headers['X-Docker-Size'] = str(size)
|
headers['X-Docker-Size'] = str(size)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
@ -429,13 +446,13 @@ def put_image_json(namespace, repository, image_id):
|
||||||
parent_uuid))):
|
parent_uuid))):
|
||||||
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
||||||
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
||||||
|
|
||||||
profile.debug('Looking up image storage paths')
|
profile.debug('Looking up image storage paths')
|
||||||
json_path = store.image_json_path(namespace, repository, image_id, uuid)
|
json_path = store.image_json_path(namespace, repository, image_id, uuid)
|
||||||
mark_path = store.image_mark_path(namespace, repository, image_id, uuid)
|
|
||||||
|
|
||||||
profile.debug('Checking if image already exists')
|
profile.debug('Checking if image already exists')
|
||||||
if store.exists(json_path) and not store.exists(mark_path):
|
if (store.exists(json_path) and not
|
||||||
|
image_is_uploading(namespace, repository, image_id, repo_image)):
|
||||||
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
abort(409, 'Image already exists', issue='image-exists', image_id=image_id)
|
||||||
|
|
||||||
# If we reach that point, it means that this is a new image or a retry
|
# If we reach that point, it means that this is a new image or a retry
|
||||||
|
@ -449,9 +466,6 @@ def put_image_json(namespace, repository, image_id):
|
||||||
data.get('created'), data.get('comment'), command,
|
data.get('created'), data.get('comment'), command,
|
||||||
parent_image)
|
parent_image)
|
||||||
|
|
||||||
profile.debug('Putting mark path')
|
|
||||||
store.put_content(mark_path, 'true')
|
|
||||||
|
|
||||||
profile.debug('Putting json path')
|
profile.debug('Putting json path')
|
||||||
store.put_content(json_path, request.data)
|
store.put_content(json_path, request.data)
|
||||||
|
|
||||||
|
|
|
@ -92,6 +92,3 @@ class BaseStorage(object):
|
||||||
|
|
||||||
def remove(self, path):
|
def remove(self, path):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def get_size(self, path):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
|
@ -80,7 +80,3 @@ class LocalStorage(BaseStorage):
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_size(self, path):
|
|
||||||
path = self._init_path(path)
|
|
||||||
return os.path.getsize(path)
|
|
||||||
|
|
|
@ -171,12 +171,3 @@ class S3Storage(BaseStorage):
|
||||||
path += '/'
|
path += '/'
|
||||||
for key in self._s3_bucket.list(prefix=path):
|
for key in self._s3_bucket.list(prefix=path):
|
||||||
key.delete()
|
key.delete()
|
||||||
|
|
||||||
def get_size(self, path):
|
|
||||||
self._initialize_s3()
|
|
||||||
path = self._init_path(path)
|
|
||||||
# Lookup does a HEAD HTTP Request on the object
|
|
||||||
key = self._s3_bucket.lookup(path)
|
|
||||||
if not key:
|
|
||||||
raise OSError('No such key: \'{0}\''.format(path))
|
|
||||||
return key.size
|
|
||||||
|
|
Binary file not shown.
Reference in a new issue