Another huge batch of registry v2 changes

Add patch support and resumeable sha
Implement all actual registry methods
Add a simple database generation option
This commit is contained in:
Jake Moshenko 2015-08-12 16:39:32 -04:00
parent 5ba3521e67
commit e1b3e9e6ae
29 changed files with 1095 additions and 430 deletions

View file

@ -1,5 +1,7 @@
import tempfile
from digest.digest_tools import content_path
class StoragePaths(object):
shared_images = 'sharedimages'
@ -23,13 +25,12 @@ class StoragePaths(object):
base_path = self.image_path(storage_uuid)
return '{0}json'.format(base_path)
def image_layer_path(self, storage_uuid):
def v1_image_layer_path(self, storage_uuid):
base_path = self.image_path(storage_uuid)
return '{0}layer'.format(base_path)
def image_ancestry_path(self, storage_uuid):
base_path = self.image_path(storage_uuid)
return '{0}ancestry'.format(base_path)
def blob_path(self, digest_str):
return content_path(digest_str)
def image_file_trie_path(self, storage_uuid):
base_path = self.image_path(storage_uuid)
@ -99,26 +100,30 @@ class BaseStorage(StoragePaths):
raise NotImplementedError
class DigestInvalidException(RuntimeError):
class InvalidChunkException(RuntimeError):
pass
class BaseStorageV2(BaseStorage):
def initiate_chunked_upload(self):
""" Start a new chunked upload, and return a handle with which the upload can be referenced.
def initiate_chunked_upload(self, upload_uuid):
""" Start a new chunked upload
"""
raise NotImplementedError
def stream_upload_chunk(self, uuid, offset, length, in_fp):
def stream_upload_chunk(self, uuid, offset, length, in_fp, hash_obj):
""" Upload the specified amount of data from the given file pointer to the chunked destination
specified, starting at the given offset. Returns the number of bytes written.
specified, starting at the given offset. Raises InvalidChunkException if the offset or
length can not be accepted.
"""
raise NotImplementedError
def complete_chunked_upload(self, uuid, final_path, digest_to_verify):
def complete_chunked_upload(self, uuid, final_path):
""" Complete the chunked upload and store the final results in the path indicated.
"""
raise NotImplementedError
def cancel_chunked_upload(self, uuid):
""" Cancel the chunked upload and clean up any outstanding partially uploaded data.
"""
raise NotImplementedError