First attempt at making config loadable through string config overrides in an env variable.

This commit is contained in:
jakedt 2014-04-03 17:31:46 -04:00
parent b95d3ec329
commit e87ffa20cf
21 changed files with 367 additions and 397 deletions

View file

@ -0,0 +1,36 @@
from storage.local import LocalStorage
from storage.s3 import S3Storage
from storage.fakestorage import FakeStorage
class Storage(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
storage_type = app.config.get('STORAGE_TYPE', 'LocalStorage')
path = app.config.get('STORAGE_PATH', '')
if storage_type == 'LocalStorage':
storage = LocalStorage(path)
elif storage_type == 'S3Storage':
access_key = app.config.get('STORAGE_AWS_ACCESS_KEY', '')
secret_key = app.config.get('STORAGE_AWS_SECRET_KEY', '')
bucket = app.config.get('STORAGE_S3_BUCKET', '')
storage = S3Storage(path, access_key, secret_key, bucket)
else:
storage = FakeStorage()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['storage'] = storage
return storage
def __getattr__(self, name):
return getattr(self.state, name, None)

View file

@ -1,7 +1,7 @@
import tempfile
class Storage(object):
class BaseStorage(object):
"""Storage is organized as follow:
$ROOT/images/<image_id>/json

24
storage/fakestorage.py Normal file
View file

@ -0,0 +1,24 @@
from storage.basestorage import BaseStorage
class FakeStorage(BaseStorage):
def _init_path(self, path=None, create=False):
return path
def get_content(self, path):
raise IOError('Fake files are fake!')
def put_content(self, path, content):
return path
def stream_read(self, path):
yield ''
def stream_write(self, path, fp):
pass
def remove(self, path):
pass
def exists(self, path):
return False

View file

@ -2,85 +2,85 @@
import os
import shutil
from basestorage import Storage
from storage.basestorage import BaseStorage
class LocalStorage(Storage):
class LocalStorage(BaseStorage):
def __init__(self, storage_path):
self._root_path = storage_path
def __init__(self, storage_path):
self._root_path = storage_path
def _init_path(self, path=None, create=False):
path = os.path.join(self._root_path, path) if path else self._root_path
if create is True:
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
def _init_path(self, path=None, create=False):
path = os.path.join(self._root_path, path) if path else self._root_path
if create is True:
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
return path
def get_content(self, path):
path = self._init_path(path)
with open(path, mode='r') as f:
return f.read()
def get_content(self, path):
path = self._init_path(path)
with open(path, mode='r') as f:
return f.read()
def put_content(self, path, content):
path = self._init_path(path, create=True)
with open(path, mode='w') as f:
f.write(content)
return path
def put_content(self, path, content):
path = self._init_path(path, create=True)
with open(path, mode='w') as f:
f.write(content)
return path
def stream_read(self, path):
path = self._init_path(path)
with open(path, mode='rb') as f:
while True:
buf = f.read(self.buffer_size)
if not buf:
break
yield buf
def stream_read(self, path):
path = self._init_path(path)
with open(path, mode='rb') as f:
while True:
buf = f.read(self.buffer_size)
if not buf:
break
yield buf
def stream_read_file(self, path):
path = self._init_path(path)
return open(path, mode='rb')
def stream_read_file(self, path):
path = self._init_path(path)
return open(path, mode='rb')
def stream_write(self, path, fp):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
while True:
try:
buf = fp.read(self.buffer_size)
if not buf:
break
f.write(buf)
except IOError:
break
def list_directory(self, path=None):
path = self._init_path(path)
prefix = path[len(self._root_path) + 1:] + '/'
exists = False
for d in os.listdir(path):
exists = True
yield prefix + d
if exists is False:
# Raises OSError even when the directory is empty
# (to be consistent with S3)
raise OSError('No such directory: \'{0}\''.format(path))
def exists(self, path):
path = self._init_path(path)
return os.path.exists(path)
def remove(self, path):
path = self._init_path(path)
if os.path.isdir(path):
shutil.rmtree(path)
return
def stream_write(self, path, fp):
# Size is mandatory
path = self._init_path(path, create=True)
with open(path, mode='wb') as f:
while True:
try:
os.remove(path)
except OSError:
pass
buf = fp.read(self.buffer_size)
if not buf:
break
f.write(buf)
except IOError:
break
def get_size(self, path):
path = self._init_path(path)
return os.path.getsize(path)
def list_directory(self, path=None):
path = self._init_path(path)
prefix = path[len(self._root_path) + 1:] + '/'
exists = False
for d in os.listdir(path):
exists = True
yield prefix + d
if exists is False:
# Raises OSError even when the directory is empty
# (to be consistent with S3)
raise OSError('No such directory: \'{0}\''.format(path))
def exists(self, path):
path = self._init_path(path)
return os.path.exists(path)
def remove(self, path):
path = self._init_path(path)
if os.path.isdir(path):
shutil.rmtree(path)
return
try:
os.remove(path)
except OSError:
pass
def get_size(self, path):
path = self._init_path(path)
return os.path.getsize(path)

View file

@ -5,7 +5,7 @@ import logging
import boto.s3.connection
import boto.s3.key
from storage.basestorage import Storage
from storage.basestorage import BaseStorage
logger = logging.getLogger(__name__)
@ -32,7 +32,7 @@ class StreamReadKeyAsFile(object):
return resp
class S3Storage(Storage):
class S3Storage(BaseStorage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
self._initialized = False