v2/blob: _upload_chunk parse range header
This commit is contained in:
parent
9f743fd6cd
commit
c77a7bc0b9
1 changed files with 8 additions and 8 deletions
|
@ -140,7 +140,7 @@ def start_blob_upload(namespace_name, repo_name):
|
|||
|
||||
# Upload the chunk to storage while calculating some metadata and updating
|
||||
# the upload state.
|
||||
updated_blob_upload = _upload_chunk(blob_upload, *_start_offset_and_length(request.headers))
|
||||
updated_blob_upload = _upload_chunk(blob_upload, request.headers.get('range'))
|
||||
if updated_blob_upload is None:
|
||||
_abort_range_not_satisfiable(updated_blob_upload.byte_count, new_upload_uuid)
|
||||
|
||||
|
@ -193,7 +193,7 @@ def upload_chunk(namespace_name, repo_name, upload_uuid):
|
|||
|
||||
# Upload the chunk to storage while calculating some metadata and updating
|
||||
# the upload state.
|
||||
updated_blob_upload = _upload_chunk(blob_upload, *_start_offset_and_length(request.headers))
|
||||
updated_blob_upload = _upload_chunk(blob_upload, request.headers.get('range'))
|
||||
if updated_blob_upload is None:
|
||||
_abort_range_not_satisfiable(updated_blob_upload.byte_count, upload_uuid)
|
||||
|
||||
|
@ -229,7 +229,7 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
|
|||
|
||||
# Upload the chunk to storage while calculating some metadata and updating
|
||||
# the upload state.
|
||||
updated_blob_upload = _upload_chunk(blob_upload, *_start_offset_and_length(request.headers))
|
||||
updated_blob_upload = _upload_chunk(blob_upload, request.headers.get('range'))
|
||||
if updated_blob_upload is None:
|
||||
_abort_range_not_satisfiable(updated_blob_upload.byte_count, upload_uuid)
|
||||
|
||||
|
@ -318,14 +318,13 @@ def _parse_range_header(range_header_text):
|
|||
return (start, length)
|
||||
|
||||
|
||||
def _start_offset_and_length(headers):
|
||||
def _start_offset_and_length(range_header):
|
||||
"""
|
||||
Returns a tuple of the start offset and the length.
|
||||
If the range header doesn't exist, defaults to (0, -1).
|
||||
If parsing fails, returns (None, None).
|
||||
"""
|
||||
start_offset, length = 0, -1
|
||||
range_header = headers.get('range', None)
|
||||
if range_header is not None:
|
||||
try:
|
||||
start_offset, length = _parse_range_header(range_header)
|
||||
|
@ -335,15 +334,16 @@ def _start_offset_and_length(headers):
|
|||
return start_offset, length
|
||||
|
||||
|
||||
def _upload_chunk(blob_upload, start_offset, length):
|
||||
def _upload_chunk(blob_upload, range_header):
|
||||
"""
|
||||
Calculates metadata while uploading a chunk to storage.
|
||||
|
||||
Returns a BlobUpload object or None if there was a failure.
|
||||
"""
|
||||
# Check for invalidate arguments.
|
||||
# Get the offset and length of the current chunk.
|
||||
start_offset, length = _start_offset_and_length(range_header)
|
||||
if None in {blob_upload, start_offset, length}:
|
||||
logger.error('None provided as argument to _upload_chunk')
|
||||
logger.error('Invalid arguments provided to _upload_chunk')
|
||||
return None
|
||||
|
||||
if start_offset > 0 and start_offset > blob_upload.byte_count:
|
||||
|
|
Reference in a new issue