Fix resumable upload support and add another test
This commit is contained in:
parent
7ca04f41dd
commit
09f8ad695b
2 changed files with 55 additions and 11 deletions
|
@ -167,7 +167,7 @@ def _range_not_satisfiable(valid_end):
|
|||
flask_abort(invalid_range)
|
||||
|
||||
|
||||
def _parse_range_header(range_header_text, valid_start):
|
||||
def _parse_range_header(range_header_text):
|
||||
""" Parses the range header, and returns a tuple of the start offset and the length,
|
||||
or raises an _InvalidRangeHeader exception.
|
||||
"""
|
||||
|
@ -178,7 +178,7 @@ def _parse_range_header(range_header_text, valid_start):
|
|||
start = int(found.group(1))
|
||||
length = int(found.group(2)) - start
|
||||
|
||||
if start != valid_start or length <= 0:
|
||||
if length <= 0:
|
||||
raise _InvalidRangeHeader()
|
||||
|
||||
return (start, length)
|
||||
|
@ -197,7 +197,7 @@ def _upload_chunk(namespace, repo_name, upload_uuid):
|
|||
range_header = request.headers.get('range', None)
|
||||
if range_header is not None:
|
||||
try:
|
||||
start_offset, length = _parse_range_header(range_header, found.byte_count)
|
||||
start_offset, length = _parse_range_header(range_header)
|
||||
except _InvalidRangeHeader:
|
||||
_range_not_satisfiable(found.byte_count)
|
||||
|
||||
|
@ -205,10 +205,12 @@ def _upload_chunk(namespace, repo_name, upload_uuid):
|
|||
_range_not_satisfiable(found.byte_count)
|
||||
|
||||
input_fp = get_input_stream(request)
|
||||
|
||||
if start_offset > 0 and start_offset < found.byte_count:
|
||||
# Skip the bytes which were received on a previous push, which are already stored and
|
||||
# included in the sha calculation
|
||||
input_fp = StreamSlice(input_fp, found.byte_count - start_offset)
|
||||
start_offset = found.byte_count
|
||||
|
||||
input_fp = wrap_with_handler(input_fp, found.sha_state.update)
|
||||
|
||||
|
|
|
@ -130,8 +130,6 @@ def get_new_database_uri():
|
|||
|
||||
|
||||
class RegistryTestCaseMixin(LiveServerTestCase):
|
||||
maxDiff = None
|
||||
|
||||
def create_app(self):
|
||||
global _PORT_NUMBER
|
||||
_PORT_NUMBER = _PORT_NUMBER + 1
|
||||
|
@ -366,11 +364,19 @@ class V2RegistryPushMixin(V2RegistryMixin):
|
|||
self.conduct('PATCH', location, data=contents, expected_code=204, auth='jwt')
|
||||
else:
|
||||
for chunk in chunks:
|
||||
(start_byte, end_byte) = chunk
|
||||
if len(chunk) == 3:
|
||||
(start_byte, end_byte, expected_code) = chunk
|
||||
else:
|
||||
(start_byte, end_byte) = chunk
|
||||
expected_code = 204
|
||||
|
||||
contents_chunk = full_contents[start_byte:end_byte]
|
||||
self.conduct('PATCH', location, data=contents_chunk, expected_code=204, auth='jwt',
|
||||
self.conduct('PATCH', location, data=contents_chunk, expected_code=expected_code, auth='jwt',
|
||||
headers={'Range': 'bytes=%s-%s' % (start_byte, end_byte)})
|
||||
|
||||
if expected_code != 204:
|
||||
return
|
||||
|
||||
# Finish the layer upload with a PUT.
|
||||
self.conduct('PUT', location, params=dict(digest=checksum), expected_code=201, auth='jwt')
|
||||
|
||||
|
@ -622,11 +628,10 @@ class V2RegistryTests(V2RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMix
|
|||
self.assertEquals(len(blobs.items()), 1)
|
||||
self.assertEquals(blobs.items()[0][1], contents)
|
||||
|
||||
def test_partial_upload_resend_below_5mb(self):
|
||||
size = 1024 * 1024 * 2
|
||||
def test_partial_upload_way_below_5mb(self):
|
||||
size = 1024
|
||||
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
||||
|
||||
chunks = [(0, 10), (0, 100), (100, size)]
|
||||
chunks = [(0, 100), (100, size)]
|
||||
|
||||
images = {
|
||||
'someid': {
|
||||
|
@ -643,6 +648,43 @@ class V2RegistryTests(V2RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMix
|
|||
self.assertEquals(len(blobs.items()), 1)
|
||||
self.assertEquals(blobs.items()[0][1], contents)
|
||||
|
||||
def test_partial_upload_resend_below_5mb(self):
|
||||
size = 150
|
||||
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
||||
|
||||
chunks = [(0, 100), (10, size)]
|
||||
|
||||
images = {
|
||||
'someid': {
|
||||
'contents': contents,
|
||||
'chunks': chunks
|
||||
}
|
||||
}
|
||||
|
||||
# Push the chunked upload.
|
||||
self.do_push('devtable', 'newrepo', 'devtable', 'password', images)
|
||||
|
||||
# Pull the image back and verify the contents.
|
||||
blobs = self.do_pull('devtable', 'newrepo', 'devtable', 'password')
|
||||
self.assertEquals(len(blobs.items()), 1)
|
||||
self.assertEquals(blobs.items()[0][1], contents)
|
||||
|
||||
def test_partial_upload_try_resend_with_gap(self):
|
||||
size = 150
|
||||
contents = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(size))
|
||||
|
||||
chunks = [(0, 100), (101, size, 416)]
|
||||
|
||||
images = {
|
||||
'someid': {
|
||||
'contents': contents,
|
||||
'chunks': chunks
|
||||
}
|
||||
}
|
||||
|
||||
# Attempt to push the chunked upload, which should fail.
|
||||
self.do_push('devtable', 'newrepo', 'devtable', 'password', images)
|
||||
|
||||
|
||||
class V1PushV2PullRegistryTests(V2RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
||||
RegistryTestCaseMixin, LiveServerTestCase):
|
||||
|
|
Reference in a new issue