From b3c23886186a1a3cea210f597d537f6209683da8 Mon Sep 17 00:00:00 2001 From: Matt Jibson Date: Wed, 18 Nov 2015 17:19:33 -0500 Subject: [PATCH] Allow setting of boto's S3 host for SIGv4 The problem only happens when a user has configured the new AWS Frankfurt region for their S3 backend. It is the only region to require the new v4 signature. All other regions support both v2 and v4. I'm not sure which version is used by default on US Standard. We could attempt to figure out where the bucket is hosted based on its DNS resolution and auto-populate the host field that way. But I think the amount of effort to have that work correctly outweighs its benefit for such a simple solution. fixes #863 fixes #764 --- static/js/core-config-setup.js | 3 ++- storage/cloud.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/static/js/core-config-setup.js b/static/js/core-config-setup.js index 13b49b865..e100a01b5 100644 --- a/static/js/core-config-setup.js +++ b/static/js/core-config-setup.js @@ -69,7 +69,8 @@ angular.module("core-config-setup", ['angularFileUpload']) {'name': 's3_access_key', 'title': 'AWS Access Key', 'placeholder': 'accesskeyhere', 'kind': 'text'}, {'name': 's3_secret_key', 'title': 'AWS Secret Key', 'placeholder': 'secretkeyhere', 'kind': 'text'}, {'name': 's3_bucket', 'title': 'S3 Bucket', 'placeholder': 'my-cool-bucket', 'kind': 'text'}, - {'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'} + {'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'}, + {'name': 'host', 'title': 'S3 Host (optional)', 'placeholder': 's3.amazonaws.com', 'kind': 'text', 'optional': true} ], 'GoogleCloudStorage': [ diff --git a/storage/cloud.py b/storage/cloud.py index cc2750dae..5e7ab772e 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -250,11 +250,13 @@ class _CloudStorage(BaseStorage): class S3Storage(_CloudStorage): - def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket): + def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket, host=None): upload_params = { 'encrypt_key': True, } connect_kwargs = {} + if host: + connect_kwargs['host'] = host super(S3Storage, self).__init__(boto.s3.connection.S3Connection, boto.s3.key.Key, connect_kwargs, upload_params, storage_path, s3_access_key, s3_secret_key, s3_bucket)