Try to use a 301 redirect to download directly from s3. Allow the s3 and userfiles to run without internet access.
This commit is contained in:
parent
82229fd8c8
commit
bf85013ef6
4 changed files with 50 additions and 14 deletions
|
@ -5,7 +5,7 @@ import logging
|
|||
import boto.s3.connection
|
||||
import boto.s3.key
|
||||
|
||||
from basestorage import Storage
|
||||
from storage.basestorage import Storage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -35,10 +35,20 @@ class StreamReadKeyAsFile(object):
|
|||
class S3Storage(Storage):
|
||||
|
||||
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
|
||||
self._s3_conn = \
|
||||
boto.s3.connection.S3Connection(s3_access_key, s3_secret_key)
|
||||
self._s3_bucket = self._s3_conn.get_bucket(s3_bucket)
|
||||
self._initialized = False
|
||||
self._bucket = s3_bucket
|
||||
self._access_key = s3_access_key
|
||||
self._secret_key = s3_secret_key
|
||||
self._root_path = storage_path
|
||||
self._s3_conn = None
|
||||
self._s3_bucket = None
|
||||
|
||||
def _initialize_s3(self):
|
||||
if not self._initialized:
|
||||
self._s3_conn = boto.s3.connection.S3Connection(self._access_key,
|
||||
self._secret_key)
|
||||
self._s3_bucket = self._s3_conn.get_bucket(self._s3_bucket)
|
||||
self._initialized = True
|
||||
|
||||
def _debug_key(self, key):
|
||||
"""Used for debugging only."""
|
||||
|
@ -59,6 +69,7 @@ class S3Storage(Storage):
|
|||
return path
|
||||
|
||||
def get_content(self, path):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
if not key.exists():
|
||||
|
@ -66,12 +77,20 @@ class S3Storage(Storage):
|
|||
return key.get_contents_as_string()
|
||||
|
||||
def put_content(self, path, content):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
key.set_contents_from_string(content, encrypt_key=True)
|
||||
return path
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
k = boto.s3.key.Key(self._s3_bucket, path)
|
||||
return k.generate_url(expires_in)
|
||||
|
||||
def stream_read(self, path):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
if not key.exists():
|
||||
|
@ -83,6 +102,7 @@ class S3Storage(Storage):
|
|||
yield buf
|
||||
|
||||
def stream_read_file(self, path):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
if not key.exists():
|
||||
|
@ -91,6 +111,7 @@ class S3Storage(Storage):
|
|||
|
||||
def stream_write(self, path, fp):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_s3()
|
||||
buffer_size = 5 * 1024 * 1024
|
||||
if self.buffer_size > buffer_size:
|
||||
buffer_size = self.buffer_size
|
||||
|
@ -111,6 +132,7 @@ class S3Storage(Storage):
|
|||
mp.complete_upload()
|
||||
|
||||
def list_directory(self, path=None):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
if not path.endswith('/'):
|
||||
path += '/'
|
||||
|
@ -131,11 +153,13 @@ class S3Storage(Storage):
|
|||
raise OSError('No such directory: \'{0}\''.format(path))
|
||||
|
||||
def exists(self, path):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
return key.exists()
|
||||
|
||||
def remove(self, path):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
key = boto.s3.key.Key(self._s3_bucket, path)
|
||||
if key.exists():
|
||||
|
@ -149,6 +173,7 @@ class S3Storage(Storage):
|
|||
key.delete()
|
||||
|
||||
def get_size(self, path):
|
||||
self._initialize_s3()
|
||||
path = self._init_path(path)
|
||||
# Lookup does a HEAD HTTP Request on the object
|
||||
key = self._s3_bucket.lookup(path)
|
||||
|
|
Reference in a new issue