From 756e8ec84868a1f678b3efab5a18def9bc97ef9c Mon Sep 17 00:00:00 2001 From: Jake Moshenko Date: Tue, 9 Sep 2014 16:52:53 -0400 Subject: [PATCH] Send the content type through to the cloud engines. --- data/userfiles.py | 6 ++++-- storage/basestorage.py | 2 +- storage/cloud.py | 16 +++++++++++++--- storage/fakestorage.py | 2 +- storage/local.py | 2 +- 5 files changed, 20 insertions(+), 8 deletions(-) diff --git a/data/userfiles.py b/data/userfiles.py index e6d21c1c1..c3113802f 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -33,8 +33,10 @@ class UserfilesHandlers(View): # encoding (Gunicorn) input_stream = request.environ['wsgi.input'] + c_type = request.headers.get('Content-Type', None) + path = self._files.get_file_id_path(file_id) - self._storage.stream_write(self._locations, path, input_stream) + self._storage.stream_write(self._locations, path, input_stream, c_type) return make_response('Okay') @@ -78,7 +80,7 @@ class DelegateUserfiles(object): def store_file(self, file_like_obj, content_type): file_id = str(uuid4()) path = self.get_file_id_path(file_id) - self._storage.stream_write(self._locations, path, file_like_obj) + self._storage.stream_write(self._locations, path, file_like_obj, content_type) return file_id def get_file_url(self, file_id, expires_in=300, requires_cors=False): diff --git a/storage/basestorage.py b/storage/basestorage.py index aa6434b8e..78d49aa1f 100644 --- a/storage/basestorage.py +++ b/storage/basestorage.py @@ -75,7 +75,7 @@ class BaseStorage(StoragePaths): def stream_read_file(self, path): raise NotImplementedError - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None): raise NotImplementedError def list_directory(self, path=None): diff --git a/storage/cloud.py b/storage/cloud.py index a576a6401..28325e187 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -125,14 +125,20 @@ class _CloudStorage(BaseStorage): raise IOError('No such key: \'{0}\''.format(path)) return StreamReadKeyAsFile(key) - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None): # Minimum size of upload part size on S3 is 5MB self._initialize_cloud_conn() buffer_size = 5 * 1024 * 1024 if self.buffer_size > buffer_size: buffer_size = self.buffer_size path = self._init_path(path) - mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params) + + metadata = {} + if content_type is not None: + metadata['Content-Type'] = content_type + + mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata, + **self._upload_params) num_part = 1 while True: try: @@ -215,11 +221,15 @@ class GoogleCloudStorage(_CloudStorage): connect_kwargs, upload_params, storage_path, access_key, secret_key, bucket_name) - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None): # Minimum size of upload part size on S3 is 5MB self._initialize_cloud_conn() path = self._init_path(path) key = self._key_class(self._cloud_bucket, path) + + if content_type is not None: + key.set_metadata('Content-Type', content_type) + key.set_contents_from_stream(fp) diff --git a/storage/fakestorage.py b/storage/fakestorage.py index 5761acf2f..232f5af24 100644 --- a/storage/fakestorage.py +++ b/storage/fakestorage.py @@ -14,7 +14,7 @@ class FakeStorage(BaseStorage): def stream_read(self, path): yield '' - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None): pass def remove(self, path): diff --git a/storage/local.py b/storage/local.py index 55e79077b..a800645a8 100644 --- a/storage/local.py +++ b/storage/local.py @@ -41,7 +41,7 @@ class LocalStorage(BaseStorage): path = self._init_path(path) return open(path, mode='rb') - def stream_write(self, path, fp): + def stream_write(self, path, fp, content_type=None): # Size is mandatory path = self._init_path(path, create=True) with open(path, mode='wb') as f: