Add automatic storage replication

Adds a worker to automatically replicate data between storages and update the database accordingly
This commit is contained in:
Joseph Schorr 2015-06-28 13:29:22 +03:00 committed by Joseph Schorr
parent c693afca6a
commit 724b1607d7
18 changed files with 259 additions and 35 deletions

View file

@ -222,6 +222,28 @@ class _CloudStorage(BaseStorage):
return k.etag[1:-1][:7]
def copy_to(self, destination, path):
# First try to copy directly via boto, but only if the storages are the
# same type, with the same access information.
if (self.__class__ == destination.__class__ and
self._access_key == destination._access_key and
self._secret_key == destination._secret_key):
logger.debug('Copying file from %s to %s via a direct boto copy', self._cloud_bucket,
destination._cloud_bucket)
source_path = self._init_path(path)
source_key = self._key_class(self._cloud_bucket, source_path)
dest_path = destination._init_path(path)
source_key.copy(destination._cloud_bucket, dest_path)
return
# Fallback to a slower, default copy.
logger.debug('Copying file from %s to %s via a streamed copy', self._cloud_bucket,
destination)
with self.stream_read_file(path) as fp:
destination.stream_write(path, fp)
class S3Storage(_CloudStorage):
def __init__(self, storage_path, s3_access_key, s3_secret_key, s3_bucket):
@ -252,7 +274,6 @@ class S3Storage(_CloudStorage):
</CORSRule>
</CORSConfiguration>""")
class GoogleCloudStorage(_CloudStorage):
def __init__(self, storage_path, access_key, secret_key, bucket_name):
upload_params = {}