Log stream_write_to_fp ioerrors

This commit is contained in:
Silas Sewell 2015-12-07 15:40:18 -05:00
parent 0876fc754a
commit 76fd744453
2 changed files with 13 additions and 1 deletions

View file

@ -1,8 +1,11 @@
import logging
import tempfile
from digest.digest_tools import content_path
from util.registry.filelike import READ_UNTIL_END
logger = logging.getLogger(__name__)
class StoragePaths(object):
shared_images = 'sharedimages'
@ -104,6 +107,7 @@ class BaseStorage(StoragePaths):
out_fp.write(buf)
bytes_copied += len(buf)
except IOError:
logger.error('Failed to stream_write_to_fp: %s', err)
break
return bytes_copied

View file

@ -175,6 +175,8 @@ class _CloudStorage(BaseStorageV2):
# We are going to reuse this but be VERY careful to only read the number of bytes written to it
buf = StringIO.StringIO()
chunk_sizes = []
num_part = 1
total_bytes_written = 0
while size == filelike.READ_UNTIL_END or total_bytes_written < size:
@ -190,6 +192,8 @@ class _CloudStorage(BaseStorageV2):
if bytes_staged == 0:
break
chunk_sizes.append(bytes_staged)
buf.seek(0)
mp.upload_part_from_file(buf, num_part, size=bytes_staged)
total_bytes_written += bytes_staged
@ -202,7 +206,11 @@ class _CloudStorage(BaseStorageV2):
if total_bytes_written > 0:
app.metric_queue.put('MultipartUploadSuccess', 1)
mp.complete_upload()
try:
mp.complete_upload()
except:
logger.error('complete_upload failed: chunk_sizes: %s', chunk_sizes)
raise
return total_bytes_written
def list_directory(self, path=None):