diff --git a/endpoints/v2/blob.py b/endpoints/v2/blob.py index f5cfb8601..42439650a 100644 --- a/endpoints/v2/blob.py +++ b/endpoints/v2/blob.py @@ -196,23 +196,26 @@ def _upload_chunk(namespace, repo_name, upload_uuid): if start_offset > 0 and start_offset > found.byte_count: _range_not_satisfiable(found.byte_count) - input_fp = get_input_stream(request) + location_set = {found.location.name} - if start_offset > 0 and start_offset < found.byte_count: - # Skip the bytes which were received on a previous push, which are already stored and - # included in the sha calculation - input_fp = StreamSlice(input_fp, found.byte_count - start_offset) - start_offset = found.byte_count + with database.CloseForLongOperation(app.config): + input_fp = get_input_stream(request) - input_fp = wrap_with_handler(input_fp, found.sha_state.update) + if start_offset > 0 and start_offset < found.byte_count: + # Skip the bytes which were received on a previous push, which are already stored and + # included in the sha calculation + input_fp = StreamSlice(input_fp, found.byte_count - start_offset) + start_offset = found.byte_count - try: - length_written, new_metadata = storage.stream_upload_chunk({found.location.name}, upload_uuid, - start_offset, length, input_fp, - found.storage_metadata, - content_type=BLOB_CONTENT_TYPE) - except InvalidChunkException: - _range_not_satisfiable(found.byte_count) + input_fp = wrap_with_handler(input_fp, found.sha_state.update) + + try: + length_written, new_metadata = storage.stream_upload_chunk(location_set, upload_uuid, + start_offset, length, input_fp, + found.storage_metadata, + content_type=BLOB_CONTENT_TYPE) + except InvalidChunkException: + _range_not_satisfiable(found.byte_count) found.storage_metadata = new_metadata found.byte_count += length_written