Merge remote-tracking branch 'upstream/master' into python-registry-v2
This commit is contained in:
commit
210ed7cf02
148 changed files with 1829 additions and 445 deletions
|
@ -14,8 +14,8 @@ from uuid import uuid4
|
|||
from collections import namedtuple
|
||||
|
||||
from util.registry import filelike
|
||||
|
||||
from storage.basestorage import BaseStorageV2, InvalidChunkException
|
||||
import app
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -161,6 +161,7 @@ class _CloudStorage(BaseStorageV2):
|
|||
if content_encoding is not None:
|
||||
metadata['Content-Encoding'] = content_encoding
|
||||
|
||||
app.metric_queue.put('MultipartUploadStart', 1)
|
||||
return self._cloud_bucket.initiate_multipart_upload(path, metadata=metadata,
|
||||
**self._upload_params)
|
||||
|
||||
|
@ -194,11 +195,13 @@ class _CloudStorage(BaseStorageV2):
|
|||
total_bytes_written += bytes_staged
|
||||
num_part += 1
|
||||
except IOError:
|
||||
app.metric_queue.put('MultipartUploadFailure', 1)
|
||||
if cancel_on_error:
|
||||
mp.cancel_upload()
|
||||
return 0
|
||||
|
||||
if total_bytes_written > 0:
|
||||
app.metric_queue.put('MultipartUploadSuccess', 1)
|
||||
mp.complete_upload()
|
||||
return total_bytes_written
|
||||
|
||||
|
@ -253,6 +256,28 @@ class _CloudStorage(BaseStorageV2):
|
|||
|
||||
return k.etag[1:-1][:7]
|
||||
|
||||
def copy_to(self, destination, path):
|
||||
# First try to copy directly via boto, but only if the storages are the
|
||||
# same type, with the same access information.
|
||||
if (self.__class__ == destination.__class__ and
|
||||
self._access_key == destination._access_key and
|
||||
self._secret_key == destination._secret_key):
|
||||
logger.debug('Copying file from %s to %s via a direct boto copy', self._cloud_bucket,
|
||||
destination._cloud_bucket)
|
||||
|
||||
source_path = self._init_path(path)
|
||||
source_key = self._key_class(self._cloud_bucket, source_path)
|
||||
|
||||
dest_path = destination._init_path(path)
|
||||
source_key.copy(destination._cloud_bucket, dest_path)
|
||||
return
|
||||
|
||||
# Fallback to a slower, default copy.
|
||||
logger.debug('Copying file from %s to %s via a streamed copy', self._cloud_bucket,
|
||||
destination)
|
||||
with self.stream_read_file(path) as fp:
|
||||
destination.stream_write(path, fp)
|
||||
|
||||
def _rel_upload_path(self, uuid):
|
||||
return 'uploads/{0}'.format(uuid)
|
||||
|
||||
|
@ -371,7 +396,6 @@ class S3Storage(_CloudStorage):
|
|||
</CORSRule>
|
||||
</CORSConfiguration>""")
|
||||
|
||||
|
||||
class GoogleCloudStorage(_CloudStorage):
|
||||
def __init__(self, storage_path, access_key, secret_key, bucket_name):
|
||||
upload_params = {}
|
||||
|
|
Reference in a new issue