2015-12-07 20:40:18 +00:00
|
|
|
import logging
|
2013-11-11 21:41:33 +00:00
|
|
|
import tempfile
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
from digest.digest_tools import content_path
|
2015-09-02 21:31:44 +00:00
|
|
|
from util.registry.filelike import READ_UNTIL_END
|
2013-11-11 21:41:33 +00:00
|
|
|
|
2015-12-07 20:40:18 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
class StoragePaths(object):
|
|
|
|
shared_images = 'sharedimages'
|
|
|
|
|
2014-06-17 20:37:48 +00:00
|
|
|
@staticmethod
|
|
|
|
def temp_store_handler():
|
|
|
|
tmpf = tempfile.TemporaryFile()
|
2014-11-24 21:07:38 +00:00
|
|
|
|
2014-06-17 20:37:48 +00:00
|
|
|
def fn(buf):
|
|
|
|
try:
|
|
|
|
tmpf.write(buf)
|
|
|
|
except IOError:
|
|
|
|
pass
|
2014-11-24 21:07:38 +00:00
|
|
|
|
2014-06-17 20:37:48 +00:00
|
|
|
return tmpf, fn
|
|
|
|
|
2015-11-16 18:51:44 +00:00
|
|
|
def _image_path(self, storage_uuid):
|
2014-06-17 20:03:43 +00:00
|
|
|
return '{0}/{1}/'.format(self.shared_images, storage_uuid)
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
def v1_image_layer_path(self, storage_uuid):
|
2015-11-16 18:51:44 +00:00
|
|
|
base_path = self._image_path(storage_uuid)
|
2014-06-17 20:03:43 +00:00
|
|
|
return '{0}layer'.format(base_path)
|
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
def blob_path(self, digest_str):
|
|
|
|
return content_path(digest_str)
|
2014-06-17 20:03:43 +00:00
|
|
|
|
|
|
|
def image_file_trie_path(self, storage_uuid):
|
2015-11-16 18:51:44 +00:00
|
|
|
base_path = self._image_path(storage_uuid)
|
2014-06-17 20:03:43 +00:00
|
|
|
return '{0}files.trie'.format(base_path)
|
|
|
|
|
2013-11-07 04:21:12 +00:00
|
|
|
|
2014-06-17 20:03:43 +00:00
|
|
|
class BaseStorage(StoragePaths):
|
2015-08-26 21:08:42 +00:00
|
|
|
def __init__(self):
|
|
|
|
# Set the IO buffer to 64kB
|
|
|
|
self.buffer_size = 64 * 1024
|
2013-11-07 04:21:12 +00:00
|
|
|
|
2015-01-16 21:10:40 +00:00
|
|
|
def setup(self):
|
|
|
|
""" Called to perform any storage system setup. """
|
|
|
|
pass
|
|
|
|
|
2015-09-14 21:49:35 +00:00
|
|
|
def validate(self, client):
|
|
|
|
""" Called to perform any custom storage system validation. The client is an HTTP
|
|
|
|
client to use for any external calls. """
|
2015-07-27 18:32:02 +00:00
|
|
|
pass
|
|
|
|
|
2014-09-09 19:54:03 +00:00
|
|
|
def get_direct_download_url(self, path, expires_in=60, requires_cors=False):
|
2013-12-04 00:39:07 +00:00
|
|
|
return None
|
|
|
|
|
2014-09-09 19:54:03 +00:00
|
|
|
def get_direct_upload_url(self, path, mime_type, requires_cors=True):
|
|
|
|
return None
|
|
|
|
|
|
|
|
def get_supports_resumable_downloads(self):
|
2014-07-02 04:39:59 +00:00
|
|
|
return False
|
|
|
|
|
2013-11-07 04:21:12 +00:00
|
|
|
def get_content(self, path):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def put_content(self, path, content):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def stream_read(self, path):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def stream_read_file(self, path):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-09-11 19:33:10 +00:00
|
|
|
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
2013-11-07 04:21:12 +00:00
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def list_directory(self, path=None):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def exists(self, path):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def remove(self, path):
|
|
|
|
raise NotImplementedError
|
2014-09-09 19:54:03 +00:00
|
|
|
|
|
|
|
def get_checksum(self, path):
|
2014-11-24 21:07:38 +00:00
|
|
|
raise NotImplementedError
|
2015-07-06 19:00:07 +00:00
|
|
|
|
2015-09-02 21:31:44 +00:00
|
|
|
def stream_write_to_fp(self, in_fp, out_fp, num_bytes=READ_UNTIL_END):
|
2015-08-26 21:08:42 +00:00
|
|
|
""" Copy the specified number of bytes from the input file stream to the output stream. If
|
|
|
|
num_bytes < 0 copy until the stream ends.
|
|
|
|
"""
|
|
|
|
bytes_copied = 0
|
2015-09-02 21:31:44 +00:00
|
|
|
while bytes_copied < num_bytes or num_bytes == READ_UNTIL_END:
|
2015-08-26 21:08:42 +00:00
|
|
|
size_to_read = min(num_bytes - bytes_copied, self.buffer_size)
|
|
|
|
if size_to_read < 0:
|
|
|
|
size_to_read = self.buffer_size
|
|
|
|
|
2015-12-10 04:16:33 +00:00
|
|
|
buf = in_fp.read(size_to_read)
|
|
|
|
if not buf:
|
2015-08-26 21:08:42 +00:00
|
|
|
break
|
2015-12-10 04:16:33 +00:00
|
|
|
out_fp.write(buf)
|
|
|
|
bytes_copied += len(buf)
|
2015-08-26 21:08:42 +00:00
|
|
|
|
|
|
|
return bytes_copied
|
|
|
|
|
2015-06-28 10:29:22 +00:00
|
|
|
def copy_to(self, destination, path):
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-07-06 19:00:07 +00:00
|
|
|
|
2015-08-12 20:39:32 +00:00
|
|
|
class InvalidChunkException(RuntimeError):
|
2015-07-06 19:00:07 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class BaseStorageV2(BaseStorage):
|
2015-08-26 21:08:42 +00:00
|
|
|
def initiate_chunked_upload(self):
|
|
|
|
""" Start a new chunked upload, returning the uuid and any associated storage metadata
|
2015-07-06 19:00:07 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-11-30 20:45:45 +00:00
|
|
|
def stream_upload_chunk(self, uuid, offset, length, in_fp, storage_metadata, content_type=None):
|
2015-07-06 19:00:07 +00:00
|
|
|
""" Upload the specified amount of data from the given file pointer to the chunked destination
|
2015-08-26 21:08:42 +00:00
|
|
|
specified, starting at the given offset. Returns the number of bytes uploaded, and a new
|
|
|
|
version of the storage_metadata. Raises InvalidChunkException if the offset or length can
|
2015-09-02 21:31:44 +00:00
|
|
|
not be accepted. Pass length as -1 to upload as much data from the in_fp as possible.
|
2015-07-06 19:00:07 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-08-26 21:08:42 +00:00
|
|
|
def complete_chunked_upload(self, uuid, final_path, storage_metadata):
|
2015-07-06 19:00:07 +00:00
|
|
|
""" Complete the chunked upload and store the final results in the path indicated.
|
2015-08-26 21:08:42 +00:00
|
|
|
Returns nothing.
|
2015-07-06 19:00:07 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-08-26 21:08:42 +00:00
|
|
|
def cancel_chunked_upload(self, uuid, storage_metadata):
|
2015-08-12 20:39:32 +00:00
|
|
|
""" Cancel the chunked upload and clean up any outstanding partially uploaded data.
|
2015-08-26 21:08:42 +00:00
|
|
|
Returns nothing.
|
2015-08-12 20:39:32 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|