Add the basics of geographic data distribution and get the tests to work.
This commit is contained in:
parent
2bf12996f5
commit
bf98575feb
23 changed files with 198 additions and 100 deletions
|
@ -1,6 +1,7 @@
|
|||
from storage.local import LocalStorage
|
||||
from storage.s3 import S3Storage
|
||||
from storage.fakestorage import FakeStorage
|
||||
from storage.distributedstorage import DistributedStorage
|
||||
|
||||
|
||||
class Storage(object):
|
||||
|
@ -12,25 +13,32 @@ class Storage(object):
|
|||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
storage_type = app.config.get('STORAGE_TYPE', 'LocalStorage')
|
||||
path = app.config.get('STORAGE_PATH', '')
|
||||
# storage_type = app.config.get('STORAGE_TYPE', 'LocalStorage')
|
||||
# path = app.config.get('STORAGE_PATH', '')
|
||||
|
||||
if storage_type == 'LocalStorage':
|
||||
storage = LocalStorage(path)
|
||||
storages = {}
|
||||
for location, storage_params in app.config.get('DISTRIBUTED_STORAGE_CONFIG').items():
|
||||
driver = storage_params[0]
|
||||
|
||||
elif storage_type == 'S3Storage':
|
||||
access_key = app.config.get('STORAGE_AWS_ACCESS_KEY', '')
|
||||
secret_key = app.config.get('STORAGE_AWS_SECRET_KEY', '')
|
||||
bucket = app.config.get('STORAGE_S3_BUCKET', '')
|
||||
storage = S3Storage(path, access_key, secret_key, bucket)
|
||||
if driver == 'LocalStorage':
|
||||
storage = LocalStorage(*storage_params[1:])
|
||||
elif driver == 'S3Storage':
|
||||
storage = S3Storage(*storage_params[1:])
|
||||
else:
|
||||
storage = FakeStorage()
|
||||
|
||||
else:
|
||||
storage = FakeStorage()
|
||||
storages[location] = storage
|
||||
|
||||
preference = app.config.get('DISTRIBUTED_STORAGE_PREFERENCE', None)
|
||||
if not preference:
|
||||
preference = storages.keys()
|
||||
|
||||
d_storage = DistributedStorage(storages, preference)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['storage'] = storage
|
||||
return storage
|
||||
app.extensions['storage'] = d_storage
|
||||
return d_storage
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
||||
return getattr(self.state, name, None)
|
||||
|
|
|
@ -1,33 +1,8 @@
|
|||
import tempfile
|
||||
|
||||
|
||||
class BaseStorage(object):
|
||||
|
||||
"""Storage is organized as follow:
|
||||
$ROOT/images/<image_id>/json
|
||||
$ROOT/images/<image_id>/layer
|
||||
$ROOT/repositories/<namespace>/<repository_name>/<tag_name>
|
||||
"""
|
||||
|
||||
# Useful if we want to change those locations later without rewriting
|
||||
# the code which uses Storage
|
||||
repositories = 'repositories'
|
||||
images = 'images'
|
||||
class StoragePaths(object):
|
||||
shared_images = 'sharedimages'
|
||||
# Set the IO buffer to 64kB
|
||||
buffer_size = 64 * 1024
|
||||
|
||||
@staticmethod
|
||||
def temp_store_handler():
|
||||
tmpf = tempfile.TemporaryFile()
|
||||
|
||||
def fn(buf):
|
||||
try:
|
||||
tmpf.write(buf)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
return tmpf, fn
|
||||
|
||||
def image_path(self, storage_uuid):
|
||||
return '{0}/{1}/'.format(self.shared_images, storage_uuid)
|
||||
|
@ -52,6 +27,33 @@ class BaseStorage(object):
|
|||
base_path = self.image_path(storage_uuid)
|
||||
return '{0}diffs.json'.format(base_path)
|
||||
|
||||
|
||||
class BaseStorage(StoragePaths):
|
||||
"""Storage is organized as follow:
|
||||
$ROOT/images/<image_id>/json
|
||||
$ROOT/images/<image_id>/layer
|
||||
$ROOT/repositories/<namespace>/<repository_name>/<tag_name>
|
||||
"""
|
||||
|
||||
# Useful if we want to change those locations later without rewriting
|
||||
# the code which uses Storage
|
||||
repositories = 'repositories'
|
||||
images = 'images'
|
||||
# Set the IO buffer to 64kB
|
||||
buffer_size = 64 * 1024
|
||||
|
||||
@staticmethod
|
||||
def temp_store_handler():
|
||||
tmpf = tempfile.TemporaryFile()
|
||||
|
||||
def fn(buf):
|
||||
try:
|
||||
tmpf.write(buf)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
return tmpf, fn
|
||||
|
||||
def get_direct_download_url(self, path, expires_in=60):
|
||||
return None
|
||||
|
||||
|
|
41
storage/distributedstorage.py
Normal file
41
storage/distributedstorage.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
import random
|
||||
import logging
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from storage.basestorage import StoragePaths, BaseStorage
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _location_aware(unbound_func):
|
||||
@wraps(unbound_func)
|
||||
def wrapper(self, locations, *args, **kwargs):
|
||||
storage = None
|
||||
for preferred in self.preferred_locations:
|
||||
if preferred in locations:
|
||||
storage = self._storages[preferred]
|
||||
|
||||
if not storage:
|
||||
storage = self._storages[random.sample(locations, 1)[0]]
|
||||
|
||||
storage_func = getattr(storage, unbound_func.__name__)
|
||||
return storage_func(*args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
class DistributedStorage(StoragePaths):
|
||||
def __init__(self, storages, preferred_locations=[]):
|
||||
self._storages = dict(storages)
|
||||
self.preferred_locations = list(preferred_locations)
|
||||
|
||||
get_direct_download_url = _location_aware(BaseStorage.get_direct_download_url)
|
||||
get_content = _location_aware(BaseStorage.get_content)
|
||||
put_content = _location_aware(BaseStorage.put_content)
|
||||
stream_read = _location_aware(BaseStorage.stream_read)
|
||||
stream_read_file = _location_aware(BaseStorage.stream_read_file)
|
||||
stream_write = _location_aware(BaseStorage.stream_write)
|
||||
list_directory = _location_aware(BaseStorage.list_directory)
|
||||
exists = _location_aware(BaseStorage.exists)
|
||||
remove = _location_aware(BaseStorage.remove)
|
Reference in a new issue