Send the content type through to the cloud engines.
This commit is contained in:
parent
29d40db5ea
commit
756e8ec848
5 changed files with 20 additions and 8 deletions
|
@ -33,8 +33,10 @@ class UserfilesHandlers(View):
|
|||
# encoding (Gunicorn)
|
||||
input_stream = request.environ['wsgi.input']
|
||||
|
||||
c_type = request.headers.get('Content-Type', None)
|
||||
|
||||
path = self._files.get_file_id_path(file_id)
|
||||
self._storage.stream_write(self._locations, path, input_stream)
|
||||
self._storage.stream_write(self._locations, path, input_stream, c_type)
|
||||
|
||||
return make_response('Okay')
|
||||
|
||||
|
@ -78,7 +80,7 @@ class DelegateUserfiles(object):
|
|||
def store_file(self, file_like_obj, content_type):
|
||||
file_id = str(uuid4())
|
||||
path = self.get_file_id_path(file_id)
|
||||
self._storage.stream_write(self._locations, path, file_like_obj)
|
||||
self._storage.stream_write(self._locations, path, file_like_obj, content_type)
|
||||
return file_id
|
||||
|
||||
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
|
||||
|
|
|
@ -75,7 +75,7 @@ class BaseStorage(StoragePaths):
|
|||
def stream_read_file(self, path):
|
||||
raise NotImplementedError
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
raise NotImplementedError
|
||||
|
||||
def list_directory(self, path=None):
|
||||
|
|
|
@ -125,14 +125,20 @@ class _CloudStorage(BaseStorage):
|
|||
raise IOError('No such key: \'{0}\''.format(path))
|
||||
return StreamReadKeyAsFile(key)
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_cloud_conn()
|
||||
buffer_size = 5 * 1024 * 1024
|
||||
if self.buffer_size > buffer_size:
|
||||
buffer_size = self.buffer_size
|
||||
path = self._init_path(path)
|
||||
mp = self._cloud_bucket.initiate_multipart_upload(path, **self._upload_params)
|
||||
|
||||
metadata = {}
|
||||
if content_type is not None:
|
||||
metadata['Content-Type'] = content_type
|
||||
|
||||
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
||||
**self._upload_params)
|
||||
num_part = 1
|
||||
while True:
|
||||
try:
|
||||
|
@ -215,11 +221,15 @@ class GoogleCloudStorage(_CloudStorage):
|
|||
connect_kwargs, upload_params, storage_path,
|
||||
access_key, secret_key, bucket_name)
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
key = self._key_class(self._cloud_bucket, path)
|
||||
|
||||
if content_type is not None:
|
||||
key.set_metadata('Content-Type', content_type)
|
||||
|
||||
key.set_contents_from_stream(fp)
|
||||
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ class FakeStorage(BaseStorage):
|
|||
def stream_read(self, path):
|
||||
yield ''
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
pass
|
||||
|
||||
def remove(self, path):
|
||||
|
|
|
@ -41,7 +41,7 @@ class LocalStorage(BaseStorage):
|
|||
path = self._init_path(path)
|
||||
return open(path, mode='rb')
|
||||
|
||||
def stream_write(self, path, fp):
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
# Size is mandatory
|
||||
path = self._init_path(path, create=True)
|
||||
with open(path, mode='wb') as f:
|
||||
|
|
Reference in a new issue