Save the compressed image size on blob upload completion
This commit is contained in:
parent
d246e68e68
commit
cf030e2a98
2 changed files with 10 additions and 3 deletions
|
@ -27,7 +27,7 @@ def get_repo_blob_by_digest(namespace, repo_name, blob_digest):
|
||||||
return found
|
return found
|
||||||
|
|
||||||
|
|
||||||
def store_blob_record_and_temp_link(namespace, repo_name, blob_digest, location_obj,
|
def store_blob_record_and_temp_link(namespace, repo_name, blob_digest, location_obj, byte_count,
|
||||||
link_expiration_s):
|
link_expiration_s):
|
||||||
""" Store a record of the blob and temporarily link it to the specified repository.
|
""" Store a record of the blob and temporarily link it to the specified repository.
|
||||||
"""
|
"""
|
||||||
|
@ -36,9 +36,12 @@ def store_blob_record_and_temp_link(namespace, repo_name, blob_digest, location_
|
||||||
repo = _basequery.get_existing_repository(namespace, repo_name)
|
repo = _basequery.get_existing_repository(namespace, repo_name)
|
||||||
try:
|
try:
|
||||||
storage = ImageStorage.get(checksum=blob_digest)
|
storage = ImageStorage.get(checksum=blob_digest)
|
||||||
|
storage.image_size = byte_count
|
||||||
|
storage.save()
|
||||||
|
|
||||||
ImageStoragePlacement.get(storage=storage, location=location_obj)
|
ImageStoragePlacement.get(storage=storage, location=location_obj)
|
||||||
except ImageStorage.DoesNotExist:
|
except ImageStorage.DoesNotExist:
|
||||||
storage = ImageStorage.create(checksum=blob_digest, uploading=False)
|
storage = ImageStorage.create(checksum=blob_digest, uploading=False, image_size=byte_count)
|
||||||
ImageStoragePlacement.create(storage=storage, location=location_obj)
|
ImageStoragePlacement.create(storage=storage, location=location_obj)
|
||||||
except ImageStoragePlacement.DoesNotExist:
|
except ImageStoragePlacement.DoesNotExist:
|
||||||
ImageStoragePlacement.create(storage=storage, location=location_obj)
|
ImageStoragePlacement.create(storage=storage, location=location_obj)
|
||||||
|
|
|
@ -214,15 +214,19 @@ def _upload_chunk(namespace, repo_name, upload_uuid):
|
||||||
|
|
||||||
|
|
||||||
def _finish_upload(namespace, repo_name, upload_obj, expected_digest):
|
def _finish_upload(namespace, repo_name, upload_obj, expected_digest):
|
||||||
|
# Verify that the digest's SHA matches that of the uploaded data.
|
||||||
computed_digest = digest_tools.sha256_digest_from_hashlib(upload_obj.sha_state)
|
computed_digest = digest_tools.sha256_digest_from_hashlib(upload_obj.sha_state)
|
||||||
if not digest_tools.digests_equal(computed_digest, expected_digest):
|
if not digest_tools.digests_equal(computed_digest, expected_digest):
|
||||||
raise BlobUploadInvalid()
|
raise BlobUploadInvalid()
|
||||||
|
|
||||||
|
# Mark the blob as uploaded.
|
||||||
final_blob_location = digest_tools.content_path(expected_digest)
|
final_blob_location = digest_tools.content_path(expected_digest)
|
||||||
storage.complete_chunked_upload({upload_obj.location.name}, upload_obj.uuid, final_blob_location)
|
storage.complete_chunked_upload({upload_obj.location.name}, upload_obj.uuid, final_blob_location)
|
||||||
model.blob.store_blob_record_and_temp_link(namespace, repo_name, expected_digest,
|
model.blob.store_blob_record_and_temp_link(namespace, repo_name, expected_digest,
|
||||||
upload_obj.location,
|
upload_obj.location, upload_obj.byte_count,
|
||||||
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
|
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'])
|
||||||
|
|
||||||
|
# Delete the upload tracking row.
|
||||||
upload_obj.delete_instance()
|
upload_obj.delete_instance()
|
||||||
|
|
||||||
response = make_response('', 201)
|
response = make_response('', 201)
|
||||||
|
|
Reference in a new issue