More changes for registry-v2 in python.
Implement the minimal changes to the local filesystem storage driver and feed them through the distributed storage driver. Create a digest package which contains digest_tools and checksums. Fix the tests to use the new v1 endpoint locations. Fix repository.delete_instance to properly filter the generated queries to avoid most subquery deletes, but still generate them when not explicitly filtered.
This commit is contained in:
parent
acbcc2e206
commit
bea8b9ac53
23 changed files with 397 additions and 179 deletions
|
@ -3,10 +3,13 @@ import shutil
|
|||
import hashlib
|
||||
import io
|
||||
|
||||
from storage.basestorage import BaseStorage
|
||||
from uuid import uuid4
|
||||
|
||||
from storage.basestorage import BaseStorageV2
|
||||
from digest import digest_tools
|
||||
|
||||
|
||||
class LocalStorage(BaseStorage):
|
||||
class LocalStorage(BaseStorageV2):
|
||||
|
||||
def __init__(self, storage_path):
|
||||
self._root_path = storage_path
|
||||
|
@ -46,15 +49,26 @@ class LocalStorage(BaseStorage):
|
|||
def stream_write(self, path, fp, content_type=None, content_encoding=None):
|
||||
# Size is mandatory
|
||||
path = self._init_path(path, create=True)
|
||||
with open(path, mode='wb') as f:
|
||||
while True:
|
||||
try:
|
||||
buf = fp.read(self.buffer_size)
|
||||
if not buf:
|
||||
break
|
||||
f.write(buf)
|
||||
except IOError:
|
||||
with open(path, mode='wb') as out_fp:
|
||||
self._stream_write_to_fp(fp, out_fp)
|
||||
|
||||
def _stream_write_to_fp(self, in_fp, out_fp, num_bytes=-1):
|
||||
""" Copy the specified number of bytes from the input file stream to the output stream. If
|
||||
num_bytes < 0 copy until the stream ends.
|
||||
"""
|
||||
bytes_copied = 0
|
||||
bytes_remaining = num_bytes
|
||||
while bytes_remaining > 0 or num_bytes < 0:
|
||||
try:
|
||||
buf = in_fp.read(self.buffer_size)
|
||||
if not buf:
|
||||
break
|
||||
out_fp.write(buf)
|
||||
bytes_copied += len(buf)
|
||||
except IOError:
|
||||
break
|
||||
|
||||
return bytes_copied
|
||||
|
||||
def list_directory(self, path=None):
|
||||
path = self._init_path(path)
|
||||
|
@ -92,3 +106,36 @@ class LocalStorage(BaseStorage):
|
|||
break
|
||||
sha_hash.update(buf)
|
||||
return sha_hash.hexdigest()[:7]
|
||||
|
||||
|
||||
def _rel_upload_path(self, uuid):
|
||||
return 'uploads/{0}'.format(uuid)
|
||||
|
||||
|
||||
def initiate_chunked_upload(self):
|
||||
new_uuid = str(uuid4())
|
||||
|
||||
# Just create an empty file at the path
|
||||
with open(self._init_path(self._rel_upload_path(new_uuid), create=True), 'w'):
|
||||
pass
|
||||
|
||||
return new_uuid
|
||||
|
||||
def stream_upload_chunk(self, uuid, offset, length, in_fp):
|
||||
with open(self._init_path(self._rel_upload_path(uuid)), 'r+b') as upload_storage:
|
||||
upload_storage.seek(offset)
|
||||
return self._stream_write_to_fp(in_fp, upload_storage, length)
|
||||
|
||||
def complete_chunked_upload(self, uuid, final_path, digest_to_verify):
|
||||
content_path = self._rel_upload_path(uuid)
|
||||
content_digest = digest_tools.sha256_digest_from_generator(self.stream_read(content_path))
|
||||
|
||||
if not digest_tools.digests_equal(content_digest, digest_to_verify):
|
||||
msg = 'Given: {0} Computed: {1}'.format(digest_to_verify, content_digest)
|
||||
raise digest_tools.InvalidDigestException(msg)
|
||||
|
||||
final_path = self._init_path(final_path, create=True)
|
||||
shutil.move(self._init_path(content_path), final_path)
|
||||
|
||||
|
||||
|
||||
|
|
Reference in a new issue