Finish the build logs archiver, add handlers for cloud and local that handle gzip encoded archived content.
This commit is contained in:
parent
2455c17f96
commit
8b3a3178b0
10 changed files with 82 additions and 18 deletions
|
@ -128,7 +128,7 @@ class _CloudStorage(BaseStorage):
|
|||
raise IOError('No such key: \'{0}\''.format(path))
|
||||
return StreamReadKeyAsFile(key)
|
||||
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_cloud_conn()
|
||||
buffer_size = 5 * 1024 * 1024
|
||||
|
@ -140,6 +140,9 @@ class _CloudStorage(BaseStorage):
|
|||
if content_type is not None:
|
||||
metadata['Content-Type'] = content_type
|
||||
|
||||
if content_encoding is not None:
|
||||
metadata['Content-Encoding'] = content_encoding
|
||||
|
||||
mp = self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
||||
**self._upload_params)
|
||||
num_part = 1
|
||||
|
@ -224,7 +227,7 @@ class GoogleCloudStorage(_CloudStorage):
|
|||
connect_kwargs, upload_params, storage_path,
|
||||
access_key, secret_key, bucket_name)
|
||||
|
||||
def stream_write(self, path, fp, content_type=None):
|
||||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
# Minimum size of upload part size on S3 is 5MB
|
||||
self._initialize_cloud_conn()
|
||||
path = self._init_path(path)
|
||||
|
@ -233,6 +236,9 @@ class GoogleCloudStorage(_CloudStorage):
|
|||
if content_type is not None:
|
||||
key.set_metadata('Content-Type', content_type)
|
||||
|
||||
if content_encoding is not None:
|
||||
key.set_metadata('Content-Encoding', content_encoding)
|
||||
|
||||
key.set_contents_from_stream(fp)
|
||||
|
||||
|
||||
|
|
Reference in a new issue