Close for long operation before we upload chunks

This commit is contained in:
Joseph Schorr 2015-12-07 14:07:10 -05:00
parent 4b01c915c7
commit bcd7f45905

View file

@ -190,23 +190,26 @@ def _upload_chunk(namespace, repo_name, upload_uuid):
if start_offset > 0 and start_offset > found.byte_count: if start_offset > 0 and start_offset > found.byte_count:
_range_not_satisfiable(found.byte_count) _range_not_satisfiable(found.byte_count)
input_fp = get_input_stream(request) location_set = {found.location.name}
if start_offset > 0 and start_offset < found.byte_count: with database.CloseForLongOperation(app.config):
# Skip the bytes which were received on a previous push, which are already stored and input_fp = get_input_stream(request)
# included in the sha calculation
input_fp = StreamSlice(input_fp, found.byte_count - start_offset)
start_offset = found.byte_count
input_fp = wrap_with_handler(input_fp, found.sha_state.update) if start_offset > 0 and start_offset < found.byte_count:
# Skip the bytes which were received on a previous push, which are already stored and
# included in the sha calculation
input_fp = StreamSlice(input_fp, found.byte_count - start_offset)
start_offset = found.byte_count
try: input_fp = wrap_with_handler(input_fp, found.sha_state.update)
length_written, new_metadata = storage.stream_upload_chunk({found.location.name}, upload_uuid,
start_offset, length, input_fp, try:
found.storage_metadata, length_written, new_metadata = storage.stream_upload_chunk(location_set, upload_uuid,
content_type=BLOB_CONTENT_TYPE) start_offset, length, input_fp,
except InvalidChunkException: found.storage_metadata,
_range_not_satisfiable(found.byte_count) content_type=BLOB_CONTENT_TYPE)
except InvalidChunkException:
_range_not_satisfiable(found.byte_count)
found.storage_metadata = new_metadata found.storage_metadata = new_metadata
found.byte_count += length_written found.byte_count += length_written