Merge pull request #1713 from coreos-inc/enable-iam
Enable IAM support for S3 storage
This commit is contained in:
commit
2caa82d091
4 changed files with 14 additions and 12 deletions
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
|
||||
from peewee import JOIN_LEFT_OUTER, fn, SQL, IntegrityError
|
||||
from peewee import SQL, IntegrityError
|
||||
from cachetools import lru_cache
|
||||
from collections import namedtuple
|
||||
|
||||
|
|
|
@ -76,10 +76,10 @@ angular.module("core-config-setup", ['angularFileUpload'])
|
|||
],
|
||||
|
||||
'S3Storage': [
|
||||
{'name': 's3_access_key', 'title': 'AWS Access Key', 'placeholder': 'accesskeyhere', 'kind': 'text'},
|
||||
{'name': 's3_secret_key', 'title': 'AWS Secret Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||
{'name': 's3_bucket', 'title': 'S3 Bucket', 'placeholder': 'my-cool-bucket', 'kind': 'text'},
|
||||
{'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'},
|
||||
{'name': 's3_access_key', 'title': 'AWS Access Key (optional if using IAM)', 'placeholder': 'accesskeyhere', 'kind': 'text', 'optional': true},
|
||||
{'name': 's3_secret_key', 'title': 'AWS Secret Key (optional if using IAM)', 'placeholder': 'secretkeyhere', 'kind': 'text', 'optional': true},
|
||||
{'name': 'host', 'title': 'S3 Host (optional)', 'placeholder': 's3.amazonaws.com', 'kind': 'text', 'optional': true}
|
||||
],
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ class StreamReadKeyAsFile(BufferedIOBase):
|
|||
|
||||
class _CloudStorage(BaseStorageV2):
|
||||
def __init__(self, metric_queue, connection_class, key_class, connect_kwargs, upload_params,
|
||||
storage_path, access_key, secret_key, bucket_name):
|
||||
storage_path, bucket_name, access_key=None, secret_key=None):
|
||||
super(_CloudStorage, self).__init__()
|
||||
|
||||
self.automatic_chunk_size = 5 * 1024 * 1024
|
||||
|
@ -258,6 +258,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
# First try to copy directly via boto, but only if the storages are the
|
||||
# same type, with the same access information.
|
||||
if (self.__class__ == destination.__class__ and
|
||||
self._access_key and self._secret_key and
|
||||
self._access_key == destination._access_key and
|
||||
self._secret_key == destination._secret_key):
|
||||
logger.debug('Copying file from %s to %s via a direct boto copy', self._cloud_bucket,
|
||||
|
@ -397,7 +398,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
|
||||
|
||||
class S3Storage(_CloudStorage):
|
||||
def __init__(self, metric_queue, storage_path, s3_access_key, s3_secret_key, s3_bucket,
|
||||
def __init__(self, metric_queue, storage_path, s3_bucket, s3_access_key=None, s3_secret_key=None,
|
||||
host=None):
|
||||
upload_params = {
|
||||
'encrypt_key': True,
|
||||
|
@ -409,8 +410,9 @@ class S3Storage(_CloudStorage):
|
|||
|
||||
connect_kwargs['host'] = host
|
||||
super(S3Storage, self).__init__(metric_queue, boto.s3.connection.S3Connection, boto.s3.key.Key,
|
||||
connect_kwargs, upload_params, storage_path, s3_access_key,
|
||||
s3_secret_key, s3_bucket)
|
||||
connect_kwargs, upload_params, storage_path, s3_bucket,
|
||||
access_key=s3_access_key or None,
|
||||
secret_key=s3_secret_key or None)
|
||||
|
||||
def setup(self):
|
||||
self.get_cloud_bucket().set_cors_xml("""<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
@ -437,7 +439,7 @@ class GoogleCloudStorage(_CloudStorage):
|
|||
connect_kwargs = {}
|
||||
super(GoogleCloudStorage, self).__init__(metric_queue, boto.gs.connection.GSConnection,
|
||||
boto.gs.key.Key, connect_kwargs, upload_params,
|
||||
storage_path, access_key, secret_key, bucket_name)
|
||||
storage_path, bucket_name, access_key, secret_key)
|
||||
|
||||
def setup(self):
|
||||
self.get_cloud_bucket().set_cors_xml("""<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
@ -502,7 +504,7 @@ class RadosGWStorage(_CloudStorage):
|
|||
}
|
||||
super(RadosGWStorage, self).__init__(metric_queue, boto.s3.connection.S3Connection,
|
||||
boto.s3.key.Key, connect_kwargs, upload_params,
|
||||
storage_path, access_key, secret_key, bucket_name)
|
||||
storage_path, bucket_name, access_key, secret_key)
|
||||
|
||||
# TODO remove when radosgw supports cors: http://tracker.ceph.com/issues/8718#change-38624
|
||||
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
||||
|
|
|
@ -20,7 +20,7 @@ class TestCloudStorage(unittest.TestCase):
|
|||
|
||||
# Create a test bucket and put some test content.
|
||||
boto.connect_s3().create_bucket(_TEST_BUCKET)
|
||||
self.engine = S3Storage(None, 'some/path', _TEST_USER, _TEST_PASSWORD, _TEST_BUCKET)
|
||||
self.engine = S3Storage(None, 'some/path', _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)
|
||||
self.engine.put_content(_TEST_PATH, _TEST_CONTENT)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -50,7 +50,7 @@ class TestCloudStorage(unittest.TestCase):
|
|||
|
||||
def test_copy_samecreds(self):
|
||||
# Copy the content to another engine.
|
||||
another_engine = S3Storage(None, 'another/path', _TEST_USER, _TEST_PASSWORD, _TEST_BUCKET)
|
||||
another_engine = S3Storage(None, 'another/path', _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)
|
||||
self.engine.copy_to(another_engine, _TEST_PATH)
|
||||
|
||||
# Verify it can be retrieved.
|
||||
|
@ -58,7 +58,7 @@ class TestCloudStorage(unittest.TestCase):
|
|||
|
||||
def test_copy_differentcreds(self):
|
||||
# Copy the content to another engine.
|
||||
another_engine = S3Storage(None, 'another/path', 'blech', 'password', 'another_bucket')
|
||||
another_engine = S3Storage(None, 'another/path', 'another_bucket', 'blech', 'password')
|
||||
boto.connect_s3().create_bucket('another_bucket')
|
||||
|
||||
self.engine.copy_to(another_engine, _TEST_PATH)
|
||||
|
|
Reference in a new issue