User the secure s3 endpoint and store our files encrypted.

This commit is contained in:
yackob03 2013-10-31 11:32:08 -04:00
parent 65aad1a2d9
commit b96f678df8
2 changed files with 127 additions and 129 deletions

View file

@ -20,8 +20,7 @@ class S3FileWriteException(Exception):
class UserRequestFiles(object): class UserRequestFiles(object):
def __init__(self, s3_access_key, s3_secret_key, bucket_name): def __init__(self, s3_access_key, s3_secret_key, bucket_name):
self._s3_conn = boto.connect_s3(s3_access_key, s3_secret_key, self._s3_conn = boto.connect_s3(s3_access_key, s3_secret_key)
is_secure=False)
self._bucket_name = bucket_name self._bucket_name = bucket_name
self._bucket = self._s3_conn.get_bucket(bucket_name) self._bucket = self._s3_conn.get_bucket(bucket_name)
self._access_key = s3_access_key self._access_key = s3_access_key
@ -34,7 +33,8 @@ class UserRequestFiles(object):
file_id = str(uuid4()) file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id) full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key) k = Key(self._bucket, full_key)
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type}) url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
encrypt_key=True)
return (url, file_id) return (url, file_id)
def store_file(self, flask_file): def store_file(self, flask_file):
@ -43,7 +43,7 @@ class UserRequestFiles(object):
k = Key(self._bucket, full_key) k = Key(self._bucket, full_key)
logger.debug('Setting s3 content type to: %s' % flask_file.content_type) logger.debug('Setting s3 content type to: %s' % flask_file.content_type)
k.set_metadata('Content-Type', flask_file.content_type) k.set_metadata('Content-Type', flask_file.content_type)
bytes_written = k.set_contents_from_file(flask_file) bytes_written = k.set_contents_from_file(flask_file, encrypt_key=True)
if bytes_written == 0: if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3') raise S3FileWriteException('Unable to write file to S3')

View file

@ -12,148 +12,146 @@ logger = logging.getLogger(__name__)
class StreamReadKeyAsFile(object): class StreamReadKeyAsFile(object):
def __init__(self, key): def __init__(self, key):
self._key = key self._key = key
self._finished = False self._finished = False
def __enter__(self): def __enter__(self):
return self return self
def __exit__(self, type, value, tb): def __exit__(self, type, value, tb):
self._key.close(fast=True) self._key.close(fast=True)
def read(self, amt=None): def read(self, amt=None):
if self._finished: if self._finished:
return None return None
resp = self._key.read(amt) resp = self._key.read(amt)
if not resp: if not resp:
self._finished = True self._finished = True
return resp return resp
class S3Storage(Storage): class S3Storage(Storage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
self._s3_conn = \ self._s3_conn = \
boto.s3.connection.S3Connection(s3_access_key, boto.s3.connection.S3Connection(s3_access_key, s3_secret_key)
s3_secret_key, self._s3_bucket = self._s3_conn.get_bucket(s3_bucket)
is_secure=False) self._root_path = storage_path
self._s3_bucket = self._s3_conn.get_bucket(s3_bucket)
self._root_path = storage_path
def _debug_key(self, key): def _debug_key(self, key):
"""Used for debugging only.""" """Used for debugging only."""
orig_meth = key.bucket.connection.make_request orig_meth = key.bucket.connection.make_request
def new_meth(*args, **kwargs): def new_meth(*args, **kwargs):
print '#' * 16 print '#' * 16
print args print args
print kwargs print kwargs
print '#' * 16 print '#' * 16
return orig_meth(*args, **kwargs) return orig_meth(*args, **kwargs)
key.bucket.connection.make_request = new_meth key.bucket.connection.make_request = new_meth
def _init_path(self, path=None): def _init_path(self, path=None):
path = os.path.join(self._root_path, path) if path else self._root_path path = os.path.join(self._root_path, path) if path else self._root_path
if path and path[0] == '/': if path and path[0] == '/':
return path[1:] return path[1:]
return path return path
def get_content(self, path): def get_content(self, path):
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if not key.exists(): if not key.exists():
raise IOError('No such key: \'{0}\''.format(path)) raise IOError('No such key: \'{0}\''.format(path))
return key.get_contents_as_string() return key.get_contents_as_string()
def put_content(self, path, content): def put_content(self, path, content):
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
key.set_contents_from_string(content) key.set_contents_from_string(content, encrypt_key=True)
return path return path
def stream_read(self, path): def stream_read(self, path):
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if not key.exists(): if not key.exists():
raise IOError('No such key: \'{0}\''.format(path)) raise IOError('No such key: \'{0}\''.format(path))
while True: while True:
buf = key.read(self.buffer_size) buf = key.read(self.buffer_size)
if not buf: if not buf:
break break
yield buf yield buf
def stream_read_file(self, path): def stream_read_file(self, path):
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if not key.exists(): if not key.exists():
raise IOError('No such key: \'{0}\''.format(path)) raise IOError('No such key: \'{0}\''.format(path))
return StreamReadKeyAsFile(key) return StreamReadKeyAsFile(key)
def stream_write(self, path, fp): def stream_write(self, path, fp):
# Minimum size of upload part size on S3 is 5MB # Minimum size of upload part size on S3 is 5MB
buffer_size = 5 * 1024 * 1024 buffer_size = 5 * 1024 * 1024
if self.buffer_size > buffer_size: if self.buffer_size > buffer_size:
buffer_size = self.buffer_size buffer_size = self.buffer_size
path = self._init_path(path) path = self._init_path(path)
mp = self._s3_bucket.initiate_multipart_upload(path) mp = self._s3_bucket.initiate_multipart_upload(path, encrypt_key=True)
num_part = 1 num_part = 1
while True: while True:
try: try:
buf = fp.read(buffer_size) buf = fp.read(buffer_size)
if not buf: if not buf:
break break
io = StringIO.StringIO(buf) io = StringIO.StringIO(buf)
mp.upload_part_from_file(io, num_part) mp.upload_part_from_file(io, num_part)
num_part += 1 num_part += 1
io.close() io.close()
except IOError: except IOError:
break break
mp.complete_upload() mp.complete_upload()
def list_directory(self, path=None): def list_directory(self, path=None):
path = self._init_path(path) path = self._init_path(path)
if not path.endswith('/'): if not path.endswith('/'):
path += '/' path += '/'
ln = 0 ln = 0
if self._root_path != '/': if self._root_path != '/':
ln = len(self._root_path) ln = len(self._root_path)
exists = False exists = False
for key in self._s3_bucket.list(prefix=path, delimiter='/'): for key in self._s3_bucket.list(prefix=path, delimiter='/'):
exists = True exists = True
name = key.name name = key.name
if name.endswith('/'): if name.endswith('/'):
yield name[ln:-1] yield name[ln:-1]
else: else:
yield name[ln:] yield name[ln:]
if exists is False: if exists is False:
# In order to be compliant with the LocalStorage API. Even though # In order to be compliant with the LocalStorage API. Even though
# S3 does not have a concept of folders. # S3 does not have a concept of folders.
raise OSError('No such directory: \'{0}\''.format(path)) raise OSError('No such directory: \'{0}\''.format(path))
def exists(self, path): def exists(self, path):
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
return key.exists() return key.exists()
def remove(self, path): def remove(self, path):
path = self._init_path(path) path = self._init_path(path)
key = boto.s3.key.Key(self._s3_bucket, path) key = boto.s3.key.Key(self._s3_bucket, path)
if key.exists(): if key.exists():
# It's a file # It's a file
key.delete() key.delete()
return return
# We assume it's a directory # We assume it's a directory
if not path.endswith('/'): if not path.endswith('/'):
path += '/' path += '/'
for key in self._s3_bucket.list(prefix=path): for key in self._s3_bucket.list(prefix=path):
key.delete() key.delete()
def get_size(self, path): def get_size(self, path):
path = self._init_path(path) path = self._init_path(path)
# Lookup does a HEAD HTTP Request on the object # Lookup does a HEAD HTTP Request on the object
key = self._s3_bucket.lookup(path) key = self._s3_bucket.lookup(path)
if not key: if not key:
raise OSError('No such key: \'{0}\''.format(path)) raise OSError('No such key: \'{0}\''.format(path))
return key.size return key.size