Add additional Swift chunking tests

This commit is contained in:
Joseph Schorr 2017-11-28 09:31:40 +02:00
parent 773ea9fc65
commit 3bbcb93977
2 changed files with 33 additions and 14 deletions

View file

@ -266,8 +266,8 @@ class SwiftStorage(BaseStorage):
if length == 0: if length == 0:
return 0, storage_metadata, None return 0, storage_metadata, None
# Note: Swift limits segments to a maximum of 5GB, so we keep writing segments until we # Note: Swift limits segments in size, so we need to sub-divide chunks into segments
# are finished hitting the data limit. # based on the configured maximum.
total_bytes_written = 0 total_bytes_written = 0
upload_error = None upload_error = None
read_until_end = length == filelike.READ_UNTIL_END read_until_end = length == filelike.READ_UNTIL_END
@ -289,6 +289,7 @@ class SwiftStorage(BaseStorage):
offset = offset + bytes_written offset = offset + bytes_written
total_bytes_written = total_bytes_written + bytes_written total_bytes_written = total_bytes_written + bytes_written
if bytes_written == 0 or (not read_until_end and length <= 0): if bytes_written == 0 or (not read_until_end and length <= 0):
return total_bytes_written, storage_metadata, upload_error return total_bytes_written, storage_metadata, upload_error

View file

@ -4,7 +4,7 @@ import hashlib
import copy import copy
from collections import defaultdict from collections import defaultdict
from mock import MagicMock from mock import MagicMock, patch
from storage import StorageContext from storage import StorageContext
from storage.swift import SwiftStorage from storage.swift import SwiftStorage
@ -84,7 +84,7 @@ class FakeSwift(object):
if ('container-name/' + key).startswith(prefix): if ('container-name/' + key).startswith(prefix):
new_contents.append((key, value['content'])) new_contents.append((key, value['content']))
new_contents.sort(key=lambda value: value[0]) new_contents.sort(key=lambda value: int(value[0].split('/')[-1]))
data = dict(data) data = dict(data)
data['content'] = ''.join([nc[1] for nc in new_contents]) data['content'] = ''.join([nc[1] for nc in new_contents])
@ -204,21 +204,39 @@ def test_checksum():
swift.put_content('somepath', 'hello world!') swift.put_content('somepath', 'hello world!')
assert swift.get_checksum('somepath') is not None assert swift.get_checksum('somepath') is not None
def test_chunked_upload(): @pytest.mark.parametrize('read_until_end', [
(True,),
(False,),
])
@pytest.mark.parametrize('max_chunk_size', [
(10000000),
(10),
(5),
(2),
(1),
])
@pytest.mark.parametrize('chunks', [
(['this', 'is', 'some', 'chunked', 'data', '']),
(['this is a very large chunk of data', '']),
(['h', 'e', 'l', 'l', 'o', '']),
])
def test_chunked_upload(chunks, max_chunk_size, read_until_end):
swift = FakeSwiftStorage(**base_args) swift = FakeSwiftStorage(**base_args)
uuid, metadata = swift.initiate_chunked_upload() uuid, metadata = swift.initiate_chunked_upload()
chunks = ['this', 'is', 'some', 'chunked', 'data', '']
offset = 0 offset = 0
for chunk in chunks:
bytes_written, metadata, error = swift.stream_upload_chunk(uuid, offset, len(chunk),
io.BytesIO(chunk), metadata)
assert error is None
assert len(chunk) == bytes_written
offset += len(chunk)
swift.complete_chunked_upload(uuid, 'somepath', metadata) with patch('storage.swift._MAXIMUM_SEGMENT_SIZE', max_chunk_size):
assert swift.get_content('somepath') == ''.join(chunks) for chunk in chunks:
chunk_length = len(chunk) if not read_until_end else -1
bytes_written, metadata, error = swift.stream_upload_chunk(uuid, offset, chunk_length,
io.BytesIO(chunk), metadata)
assert error is None
assert len(chunk) == bytes_written
offset += len(chunk)
swift.complete_chunked_upload(uuid, 'somepath', metadata)
assert swift.get_content('somepath') == ''.join(chunks)
def test_cancel_chunked_upload(): def test_cancel_chunked_upload():
swift = FakeSwiftStorage(**base_args) swift = FakeSwiftStorage(**base_args)