Fix stream_write to properly raise an exception on failure, instead of just silently failing
This was causing problems for customers using georeplication over unstable storage engines Also adds tests for stream_write and copy, to ensure we detect failure
This commit is contained in:
parent
a048ff3633
commit
3a0adfcb11
3 changed files with 59 additions and 18 deletions
|
@ -7,6 +7,8 @@ import pytest
|
|||
import moto
|
||||
import boto
|
||||
|
||||
from moto import mock_s3
|
||||
|
||||
from storage import S3Storage, StorageContext
|
||||
from storage.cloud import _CloudStorage, _PartUploadMetadata
|
||||
from storage.cloud import _CHUNKS_KEY
|
||||
|
@ -56,28 +58,29 @@ def test_basicop(storage_engine):
|
|||
assert not storage_engine.exists(_TEST_PATH)
|
||||
|
||||
|
||||
def test_copy_samecreds(storage_engine):
|
||||
@pytest.mark.parametrize('bucket, username, password', [
|
||||
pytest.param(_TEST_BUCKET, _TEST_USER, _TEST_PASSWORD, id='same credentials'),
|
||||
pytest.param('another_bucket', 'blech', 'password', id='different credentials'),
|
||||
])
|
||||
def test_copy(bucket, username, password, storage_engine):
|
||||
# Copy the content to another engine.
|
||||
another_engine = S3Storage(_TEST_CONTEXT, 'another/path', _TEST_BUCKET, _TEST_USER,
|
||||
_TEST_PASSWORD)
|
||||
storage_engine.copy_to(another_engine, _TEST_PATH)
|
||||
|
||||
# Verify it can be retrieved.
|
||||
assert another_engine.get_content(_TEST_PATH) == _TEST_CONTENT
|
||||
|
||||
|
||||
def test_copy_differentcreds(storage_engine):
|
||||
# Copy the content to another engine.
|
||||
another_engine = S3Storage(_TEST_CONTEXT, 'another/path', 'another_bucket', 'blech',
|
||||
'password')
|
||||
boto.connect_s3().create_bucket('another_bucket')
|
||||
|
||||
storage_engine.copy_to(another_engine, _TEST_PATH)
|
||||
|
||||
# Verify it can be retrieved.
|
||||
assert another_engine.get_content(_TEST_PATH) == _TEST_CONTENT
|
||||
|
||||
|
||||
def test_copy_with_error(storage_engine):
|
||||
another_engine = S3Storage(_TEST_CONTEXT, 'another/path', 'anotherbucket', 'foo',
|
||||
'bar')
|
||||
|
||||
with pytest.raises(IOError):
|
||||
storage_engine.copy_to(another_engine, _TEST_PATH)
|
||||
|
||||
|
||||
def test_stream_read(storage_engine):
|
||||
# Read the streaming content.
|
||||
data = ''.join(storage_engine.stream_read(_TEST_PATH))
|
||||
|
@ -95,6 +98,18 @@ def test_stream_write(storage_engine):
|
|||
assert storage_engine.get_content(_TEST_PATH) == new_data
|
||||
|
||||
|
||||
def test_stream_write_error():
|
||||
with mock_s3():
|
||||
# Create an engine but not the bucket.
|
||||
engine = S3Storage(_TEST_CONTEXT, 'some/path', _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)
|
||||
|
||||
# Attempt to write to the uncreated bucket, which should raise an error.
|
||||
with pytest.raises(IOError):
|
||||
engine.stream_write(_TEST_PATH, StringIO('hello world'), content_type='Cool/Type')
|
||||
|
||||
assert not engine.exists(_TEST_PATH)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('chunk_count', [
|
||||
0,
|
||||
1,
|
||||
|
@ -107,7 +122,7 @@ def test_stream_write(storage_engine):
|
|||
def test_chunk_upload(storage_engine, chunk_count, force_client_side):
|
||||
if chunk_count == 0 and force_client_side:
|
||||
return
|
||||
|
||||
|
||||
upload_id, metadata = storage_engine.initiate_chunked_upload()
|
||||
final_data = ''
|
||||
|
||||
|
|
Reference in a new issue