Adapt the s3 key object to the python stream file interface, one that is compatible with tarfile.
This commit is contained in:
parent
2df40957c7
commit
cccfe29c77
1 changed files with 26 additions and 2 deletions
|
@ -1,6 +1,6 @@
|
|||
|
||||
import cStringIO as StringIO
|
||||
import os
|
||||
import logging
|
||||
|
||||
import boto.s3.connection
|
||||
import boto.s3.key
|
||||
|
@ -8,6 +8,30 @@ import boto.s3.key
|
|||
from . import Storage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StreamReadKeyAsFile(object):
|
||||
def __init__(self, key):
|
||||
self._key = key
|
||||
self._finished = False
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, tb):
|
||||
self._key.close(fast=True)
|
||||
|
||||
def read(self, amt=None):
|
||||
if self._finished:
|
||||
return None
|
||||
|
||||
resp = self._key.read(amt)
|
||||
if not resp:
|
||||
self._finished = True
|
||||
return resp
|
||||
|
||||
|
||||
class S3Storage(Storage):
|
||||
|
||||
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
|
||||
|
@ -65,7 +89,7 @@ class S3Storage(Storage):
|
|||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
if not key.exists():
|
||||
raise IOError('No such key: \'{0}\''.format(path))
|
||||
return key
|
||||
return StreamReadKeyAsFile(key)
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
|
|
Reference in a new issue