diff --git a/storage/cloud.py b/storage/cloud.py index 6b6296042..d204efea9 100644 --- a/storage/cloud.py +++ b/storage/cloud.py @@ -307,9 +307,11 @@ class _CloudStorage(BaseStorageV2): def _client_side_chunk_join(self, final_path, chunk_list): # If there's only one chunk, just "move" (copy and delete) the key and call it a day. if len(chunk_list) == 1: - chunk_path = chunk_list[0].path + chunk_path = self._init_path(chunk_list[0].path) + abs_final_path = self._init_path(final_path) + # Let the copy raise an exception if it fails. - self._cloud_bucket.copy_key(final_path, self._bucket_name, chunk_path) + self._cloud_bucket.copy_key(abs_final_path, self._bucket_name, chunk_path) # Attempt to clean up the old chunk. try: @@ -317,7 +319,7 @@ class _CloudStorage(BaseStorageV2): except IOError: # We failed to delete a chunk. This sucks, but we shouldn't fail the push. msg = 'Failed to clean up chunk %s for move of %s' - logger.exception(msg, chunk_path, final_path) + logger.exception(msg, chunk_path, abs_final_path) else: # Concatenate and write all the chunks as one key. concatenated = filelike.FilelikeStreamConcat(self._chunk_generator(chunk_list)) @@ -326,7 +328,7 @@ class _CloudStorage(BaseStorageV2): # Attempt to clean up all the chunks. for chunk in chunk_list: try: - self._cloud_bucket.delete_key(chunk.path) + self._cloud_bucket.delete_key(self._init_path(chunk.path)) except IOError: # We failed to delete a chunk. This sucks, but we shouldn't fail the push. msg = 'Failed to clean up chunk %s for reupload of %s'