diff --git a/storage/basestorage.py b/storage/basestorage.py index 28e49c8fa..e5db17788 100644 --- a/storage/basestorage.py +++ b/storage/basestorage.py @@ -1,8 +1,11 @@ +import logging import tempfile from digest.digest_tools import content_path from util.registry.filelike import READ_UNTIL_END +logger = logging.getLogger(__name__) + class StoragePaths(object): shared_images = 'sharedimages' @@ -104,6 +107,7 @@ class BaseStorage(StoragePaths): out_fp.write(buf) bytes_copied += len(buf) except IOError: + logger.error('Failed to stream_write_to_fp: %s', err) break return bytes_copied diff --git a/storage/cloud.py b/storage/cloud.py index 2db756d7f..9b6c41eaa 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -175,6 +175,8 @@ class _CloudStorage(BaseStorageV2): # We are going to reuse this but be VERY careful to only read the number of bytes written to it buf = StringIO.StringIO() + chunk_sizes = [] + num_part = 1 total_bytes_written = 0 while size == filelike.READ_UNTIL_END or total_bytes_written < size: @@ -190,6 +192,8 @@ class _CloudStorage(BaseStorageV2): if bytes_staged == 0: break + chunk_sizes.append(bytes_staged) + buf.seek(0) mp.upload_part_from_file(buf, num_part, size=bytes_staged) total_bytes_written += bytes_staged @@ -202,7 +206,11 @@ class _CloudStorage(BaseStorageV2): if total_bytes_written > 0: app.metric_queue.put('MultipartUploadSuccess', 1) - mp.complete_upload() + try: + mp.complete_upload() + except: + logger.error('complete_upload failed: chunk_sizes: %s', chunk_sizes) + raise return total_bytes_written def list_directory(self, path=None):