Add a test for swift path computation
This commit is contained in:
parent
c6d7eba98d
commit
909e7d45b7
7 changed files with 74 additions and 27 deletions
|
@ -15,7 +15,6 @@ from collections import namedtuple
|
|||
|
||||
from util.registry import filelike
|
||||
from storage.basestorage import BaseStorageV2, InvalidChunkException
|
||||
import app
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -48,8 +47,8 @@ class StreamReadKeyAsFile(BufferedIOBase):
|
|||
|
||||
|
||||
class _CloudStorage(BaseStorageV2):
|
||||
def __init__(self, connection_class, key_class, connect_kwargs, upload_params, storage_path,
|
||||
access_key, secret_key, bucket_name):
|
||||
def __init__(self, metric_queue, connection_class, key_class, connect_kwargs, upload_params,
|
||||
storage_path, access_key, secret_key, bucket_name):
|
||||
super(_CloudStorage, self).__init__()
|
||||
|
||||
self.automatic_chunk_size = 5 * 1024 * 1024
|
||||
|
@ -65,6 +64,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
self._connect_kwargs = connect_kwargs
|
||||
self._cloud_conn = None
|
||||
self._cloud_bucket = None
|
||||
self._metric_queue = metric_queue
|
||||
|
||||
def _initialize_cloud_conn(self):
|
||||
if not self._initialized:
|
||||
|
@ -161,7 +161,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
if content_encoding is not None:
|
||||
metadata['Content-Encoding'] = content_encoding
|
||||
|
||||
app.metric_queue.put('MultipartUploadStart', 1)
|
||||
self._metric_queue.put('MultipartUploadStart', 1)
|
||||
return self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
||||
**self._upload_params)
|
||||
|
||||
|
@ -198,7 +198,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
except IOError as ex:
|
||||
logger.warn('stream write error: %s', ex)
|
||||
error = ex
|
||||
app.metric_queue.put('MultipartUploadFailure', 1)
|
||||
self._metric_queue.put('MultipartUploadFailure', 1)
|
||||
if cancel_on_error:
|
||||
mp.cancel_upload()
|
||||
return 0, error
|
||||
|
@ -206,7 +206,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
break
|
||||
|
||||
if total_bytes_written > 0:
|
||||
app.metric_queue.put('MultipartUploadSuccess', 1)
|
||||
self._metric_queue.put('MultipartUploadSuccess', 1)
|
||||
mp.complete_upload()
|
||||
return total_bytes_written, error
|
||||
|
||||
|
@ -380,7 +380,8 @@ class _CloudStorage(BaseStorageV2):
|
|||
|
||||
|
||||
class S3Storage(_CloudStorage):
|
||||
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket, host=None):
|
||||
def __init__(self, metric_queue, storage_path, s3_access_key, s3_secret_key, s3_bucket,
|
||||
host=None):
|
||||
upload_params = {
|
||||
'encrypt_key': True,
|
||||
}
|
||||
|
@ -390,7 +391,7 @@ class S3Storage(_CloudStorage):
|
|||
raise ValueError('host name must not start with http:// or https://')
|
||||
|
||||
connect_kwargs['host'] = host
|
||||
super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
|
||||
super(S3Storage, self).__init__(metric_queue, boto.s3.connection.S3Connection, boto.s3.key.Key,
|
||||
connect_kwargs, upload_params, storage_path, s3_access_key,
|
||||
s3_secret_key, s3_bucket)
|
||||
|
||||
|
@ -414,12 +415,12 @@ class S3Storage(_CloudStorage):
|
|||
</CORSConfiguration>""")
|
||||
|
||||
class GoogleCloudStorage(_CloudStorage):
|
||||
def __init__(self, storage_path, access_key, secret_key, bucket_name):
|
||||
def __init__(self, metric_queue, storage_path, access_key, secret_key, bucket_name):
|
||||
upload_params = {}
|
||||
connect_kwargs = {}
|
||||
super(GoogleCloudStorage, self).__init__(boto.gs.connection.GSConnection, boto.gs.key.Key,
|
||||
connect_kwargs, upload_params, storage_path,
|
||||
access_key, secret_key, bucket_name)
|
||||
super(GoogleCloudStorage, self).__init__(metric_queue, boto.gs.connection.GSConnection,
|
||||
boto.gs.key.Key, connect_kwargs, upload_params,
|
||||
storage_path, access_key, secret_key, bucket_name)
|
||||
|
||||
def setup(self):
|
||||
self.get_cloud_bucket().set_cors_xml("""<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
@ -474,16 +475,17 @@ class GoogleCloudStorage(_CloudStorage):
|
|||
|
||||
|
||||
class RadosGWStorage(_CloudStorage):
|
||||
def __init__(self, hostname, is_secure, storage_path, access_key, secret_key, bucket_name):
|
||||
def __init__(self, metric_queue, hostname, is_secure, storage_path, access_key, secret_key,
|
||||
bucket_name):
|
||||
upload_params = {}
|
||||
connect_kwargs = {
|
||||
'host': hostname,
|
||||
'is_secure': is_secure,
|
||||
'calling_format': boto.s3.connection.OrdinaryCallingFormat(),
|
||||
}
|
||||
super(RadosGWStorage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key,
|
||||
connect_kwargs, upload_params, storage_path, access_key,
|
||||
secret_key, bucket_name)
|
||||
super(RadosGWStorage, self).__init__(metric_queue, boto.s3.connection.S3Connection,
|
||||
boto.s3.key.Key, connect_kwargs, upload_params,
|
||||
storage_path, access_key, secret_key, bucket_name)
|
||||
|
||||
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
|
|
Reference in a new issue