mv data/types image

This change also merges formats into the new image module.
This commit is contained in:
Jimmy Zelinskie 2016-08-02 18:45:30 -04:00
parent a516c08deb
commit 32a6c22b43
14 changed files with 342 additions and 258 deletions

View file

@ -216,7 +216,7 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
# Ensure the digest is present before proceeding.
digest = request.args.get('digest', None)
if digest is None:
raise BlobUploadInvalid()
raise BlobUploadInvalid(detail={'reason': 'Missing digest arg on monolithic upload'})
# Find the upload.
blob_upload = v2.blob_upload_by_uuid(namespace_name, repo_name, upload_uuid)
@ -271,6 +271,9 @@ def delete_digest(namespace_name, repo_name, upload_uuid):
def _render_range(num_uploaded_bytes, with_bytes_prefix=True):
"""
Returns a string formatted to be used in the Range header.
"""
return '{0}0-{1}'.format('bytes=' if with_bytes_prefix else '', num_uploaded_bytes - 1)
@ -327,6 +330,7 @@ def _start_offset_and_length(headers):
start_offset, length = _parse_range_header(range_header)
except _InvalidRangeHeader:
return None, None
return start_offset, length
@ -339,6 +343,7 @@ def _upload_chunk(blob_upload, start_offset, length):
# Check for invalidate arguments.
if None in {blob_upload, start_offset, length}:
return None
if start_offset > 0 and start_offset > blob_upload.byte_count:
return None
@ -425,7 +430,7 @@ def _validate_digest(blob_upload, expected_digest):
computed_digest = digest_tools.sha256_digest_from_hashlib(blob_upload.sha_state)
if not digest_tools.digests_equal(computed_digest, expected_digest):
logger.error('Digest mismatch for upload %s: Expected digest %s, found digest %s',
upload_obj.uuid, expected_digest, computed_digest)
blob_upload.uuid, expected_digest, computed_digest)
raise BlobUploadInvalid(detail={'reason': 'Digest mismatch on uploaded blob'})