initial import for Open Source 🎉
This commit is contained in:
parent
1898c361f3
commit
9c0dd3b722
2048 changed files with 218743 additions and 0 deletions
47
util/registry/test/test_dockerver.py
Normal file
47
util/registry/test/test_dockerver.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
import pytest
|
||||
|
||||
from util.registry.dockerver import docker_version
|
||||
from semantic_version import Version, Spec
|
||||
|
||||
@pytest.mark.parametrize('ua_string, ver_info', [
|
||||
# Old "semantic" versioning.
|
||||
('docker/1.6.0 go/go1.4.2 git-commit/1234567 kernel/4.2.0-18-generic os/linux arch/amd64',
|
||||
Version('1.6.0')),
|
||||
('docker/1.7.1 go/go1.4.2 kernel/4.1.7-15.23.amzn1.x86_64 os/linux arch/amd64',
|
||||
Version('1.7.1')),
|
||||
('docker/1.6.2 go/go1.4.2 git-commit/7c8fca2-dirty kernel/4.0.5 os/linux arch/amd64',
|
||||
Version('1.6.2')),
|
||||
('docker/1.9.0 go/go1.4.2 git-commit/76d6bc9 kernel/3.16.0-4-amd64 os/linux arch/amd64',
|
||||
Version('1.9.0')),
|
||||
('docker/1.9.1 go/go1.4.2 git-commit/a34a1d5 kernel/3.10.0-229.20.1.el7.x86_64 os/linux arch/amd64',
|
||||
Version('1.9.1')),
|
||||
('docker/1.8.2-circleci go/go1.4.2 git-commit/a8b52f5 kernel/3.13.0-71-generic os/linux arch/amd64',
|
||||
Version('1.8.2')),
|
||||
('Go 1.1 package http', Version('1.5.0')),
|
||||
('curl', None),
|
||||
('docker/1.8 stuff', Version('1.8', partial=True)),
|
||||
|
||||
# Newer date-based versioning: YY.MM.revnum
|
||||
('docker/17.03.0 my_version_sucks', Version('17.3.0')),
|
||||
('docker/17.03.0-foobar my_version_sucks', Version('17.3.0')),
|
||||
('docker/17.10.2 go/go1.4.2 git-commit/a34a1d5 kernel/3.10.0-229.20.1.el7.x86_64 os/linux arch/amd64',
|
||||
Version('17.10.2')),
|
||||
('docker/17.00.4 my_version_sucks', Version('17.0.4')),
|
||||
('docker/17.12.00 my_version_sucks', Version('17.12.0')),
|
||||
])
|
||||
def test_parsing(ua_string, ver_info):
|
||||
parsed_ver = docker_version(ua_string)
|
||||
assert parsed_ver == ver_info, 'Expected %s, Found %s' % (ver_info, parsed_ver)
|
||||
|
||||
@pytest.mark.parametrize('spec, no_match_cases, match_cases', [
|
||||
(Spec('<1.6.0'), ['1.6.0', '1.6.1', '1.9.0', '100.5.2'], ['0.0.0', '1.5.99']),
|
||||
(Spec('<1.9.0'), ['1.9.0', '100.5.2'], ['0.0.0', '1.5.99', '1.6.0', '1.6.1']),
|
||||
(Spec('<1.6.0,>0.0.1'), ['1.6.0', '1.6.1', '1.9.0', '0.0.0'], ['1.5.99']),
|
||||
(Spec('>17.3.0'), ['17.3.0', '1.13.0'], ['17.4.0', '17.12.1']),
|
||||
])
|
||||
def test_specs(spec, no_match_cases, match_cases):
|
||||
for no_match_case in no_match_cases:
|
||||
assert not spec.match(Version(no_match_case))
|
||||
|
||||
for match_case in match_cases:
|
||||
assert spec.match(Version(match_case))
|
132
util/registry/test/test_filelike.py
Normal file
132
util/registry/test/test_filelike.py
Normal file
|
@ -0,0 +1,132 @@
|
|||
from StringIO import StringIO
|
||||
from util.registry.filelike import FilelikeStreamConcat, LimitingStream, StreamSlice
|
||||
|
||||
def somegenerator():
|
||||
yield 'some'
|
||||
yield 'cool'
|
||||
yield 'file-contents'
|
||||
|
||||
def test_parts():
|
||||
gens = iter([StringIO(s) for s in somegenerator()])
|
||||
fileobj = FilelikeStreamConcat(gens)
|
||||
|
||||
assert fileobj.read(2) == 'so'
|
||||
assert fileobj.read(3) == 'mec'
|
||||
assert fileobj.read(7) == 'oolfile'
|
||||
assert fileobj.read(-1) == '-contents'
|
||||
|
||||
def test_entire():
|
||||
gens = iter([StringIO(s) for s in somegenerator()])
|
||||
fileobj = FilelikeStreamConcat(gens)
|
||||
assert fileobj.read(-1) == 'somecoolfile-contents'
|
||||
|
||||
def test_nolimit():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj)
|
||||
assert stream.read(-1) == 'this is a cool test'
|
||||
assert len('this is a cool test') == stream.tell()
|
||||
|
||||
def test_simplelimit():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 4)
|
||||
assert stream.read(-1) == 'this'
|
||||
assert 4 == stream.tell()
|
||||
|
||||
def test_simplelimit_readdefined():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 4)
|
||||
assert stream.read(2) == 'th'
|
||||
assert 2 == stream.tell()
|
||||
|
||||
def test_nolimit_readdefined():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, -1)
|
||||
assert stream.read(2) == 'th'
|
||||
assert 2 == stream.tell()
|
||||
|
||||
def test_limit_multiread():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 7)
|
||||
assert stream.read(4) == 'this'
|
||||
assert stream.read(3) == ' is'
|
||||
assert stream.read(2) == ''
|
||||
assert 7 == stream.tell()
|
||||
|
||||
def test_limit_multiread2():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 7)
|
||||
assert stream.read(4) == 'this'
|
||||
assert stream.read(-1) == ' is'
|
||||
assert 7 == stream.tell()
|
||||
|
||||
def test_seek():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj)
|
||||
stream.seek(2)
|
||||
|
||||
assert stream.read(2) == 'is'
|
||||
assert 4 == stream.tell()
|
||||
|
||||
def test_seek_withlimit():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 3)
|
||||
stream.seek(2)
|
||||
|
||||
assert stream.read(2) == 'i'
|
||||
assert 3 == stream.tell()
|
||||
|
||||
def test_seek_pastlimit():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 3)
|
||||
stream.seek(4)
|
||||
|
||||
assert stream.read(1) == ''
|
||||
assert 3 == stream.tell()
|
||||
|
||||
def test_seek_to_tell():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = LimitingStream(fileobj, 3)
|
||||
stream.seek(stream.tell())
|
||||
|
||||
assert stream.read(4) == 'thi'
|
||||
assert 3 == stream.tell()
|
||||
|
||||
def test_none_read():
|
||||
class NoneReader(object):
|
||||
def read(self, size=None):
|
||||
return None
|
||||
|
||||
stream = StreamSlice(NoneReader(), 0)
|
||||
assert stream.read(-1) == None
|
||||
assert stream.tell() == 0
|
||||
|
||||
def test_noslice():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = StreamSlice(fileobj, 0)
|
||||
assert stream.read(-1) == 'this is a cool test'
|
||||
assert len('this is a cool test') == stream.tell()
|
||||
|
||||
def test_startindex():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = StreamSlice(fileobj, 5)
|
||||
assert stream.read(-1) == 'is a cool test'
|
||||
assert len('is a cool test') == stream.tell()
|
||||
|
||||
def test_startindex_limitedread():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = StreamSlice(fileobj, 5)
|
||||
assert stream.read(4) == 'is a'
|
||||
assert 4 == stream.tell()
|
||||
|
||||
def test_slice():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = StreamSlice(fileobj, 5, 9)
|
||||
assert stream.read(-1) == 'is a'
|
||||
assert len('is a') == stream.tell()
|
||||
|
||||
def test_slice_explictread():
|
||||
fileobj = StringIO('this is a cool test')
|
||||
stream = StreamSlice(fileobj, 5, 9)
|
||||
assert stream.read(2) == 'is'
|
||||
assert stream.read(5) == ' a'
|
||||
assert len('is a') == stream.tell()
|
98
util/registry/test/test_generatorfile.py
Normal file
98
util/registry/test/test_generatorfile.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
from _pyio import BufferedReader
|
||||
|
||||
import magic
|
||||
|
||||
from util.registry.generatorfile import GeneratorFile
|
||||
|
||||
def sample_generator():
|
||||
yield 'this'
|
||||
yield 'is'
|
||||
yield 'a'
|
||||
yield 'test'
|
||||
|
||||
def test_basic_generator():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
assert f.tell() == 0
|
||||
assert f.read() == "thisisatest"
|
||||
assert f.tell() == len("thisisatest")
|
||||
|
||||
def test_same_lengths():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
assert f.read(4) == "this"
|
||||
assert f.tell() == 4
|
||||
|
||||
assert f.read(2) == "is"
|
||||
assert f.tell() == 6
|
||||
|
||||
assert f.read(1) == "a"
|
||||
assert f.tell() == 7
|
||||
|
||||
assert f.read(4) == "test"
|
||||
assert f.tell() == 11
|
||||
|
||||
def test_indexed_lengths():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
assert f.read(6) == "thisis"
|
||||
assert f.tell() == 6
|
||||
|
||||
assert f.read(5) == "atest"
|
||||
assert f.tell() == 11
|
||||
|
||||
def test_misindexed_lengths():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
assert f.read(6) == "thisis"
|
||||
assert f.tell() == 6
|
||||
|
||||
assert f.read(3) == "ate"
|
||||
assert f.tell() == 9
|
||||
|
||||
assert f.read(2) == "st"
|
||||
assert f.tell() == 11
|
||||
|
||||
assert f.read(2) == ""
|
||||
assert f.tell() == 11
|
||||
|
||||
def test_misindexed_lengths_2():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
assert f.read(8) == "thisisat"
|
||||
assert f.tell() == 8
|
||||
|
||||
assert f.read(1) == "e"
|
||||
assert f.tell() == 9
|
||||
|
||||
assert f.read(2) == "st"
|
||||
assert f.tell() == 11
|
||||
|
||||
assert f.read(2) == ""
|
||||
assert f.tell() == 11
|
||||
|
||||
def test_overly_long():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
assert f.read(60) == "thisisatest"
|
||||
assert f.tell() == 11
|
||||
|
||||
def test_with_bufferedreader():
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
buffered = BufferedReader(f)
|
||||
assert buffered.peek(10) == "thisisatest"
|
||||
assert buffered.read(10) == "thisisates"
|
||||
|
||||
def mimed_html_generator():
|
||||
yield '<html>'
|
||||
yield '<body>'
|
||||
yield 'sometext' * 1024
|
||||
yield '</body>'
|
||||
yield '</html>'
|
||||
|
||||
def test_magic():
|
||||
mgc = magic.Magic(mime=True)
|
||||
|
||||
with GeneratorFile(mimed_html_generator()) as f:
|
||||
buffered = BufferedReader(f)
|
||||
file_header_bytes = buffered.peek(1024)
|
||||
assert mgc.from_buffer(file_header_bytes) == "text/html"
|
||||
|
||||
with GeneratorFile(sample_generator()) as f:
|
||||
buffered = BufferedReader(f)
|
||||
file_header_bytes = buffered.peek(1024)
|
||||
assert mgc.from_buffer(file_header_bytes) == "text/plain"
|
112
util/registry/test/test_queuefile.py
Normal file
112
util/registry/test/test_queuefile.py
Normal file
|
@ -0,0 +1,112 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
from util.registry.queueprocess import QueueResult
|
||||
from util.registry.queuefile import QueueFile
|
||||
|
||||
class FakeQueue(object):
|
||||
def __init__(self):
|
||||
self.items = []
|
||||
|
||||
def get(self, block):
|
||||
return self.items.pop(0)
|
||||
|
||||
def put(self, data):
|
||||
self.items.append(data)
|
||||
|
||||
|
||||
def test_basic():
|
||||
queue = FakeQueue()
|
||||
queue.put(QueueResult('hello world', None))
|
||||
queue.put(QueueResult('! how goes there?', None))
|
||||
queue.put(QueueResult(None, None))
|
||||
|
||||
queuefile = QueueFile(queue)
|
||||
assert queuefile.read() == 'hello world! how goes there?'
|
||||
|
||||
def test_chunk_reading():
|
||||
queue = FakeQueue()
|
||||
queue.put(QueueResult('hello world', None))
|
||||
queue.put(QueueResult('! how goes there?', None))
|
||||
queue.put(QueueResult(None, None))
|
||||
|
||||
queuefile = QueueFile(queue)
|
||||
data = ''
|
||||
|
||||
while True:
|
||||
result = queuefile.read(size=2)
|
||||
if not result:
|
||||
break
|
||||
|
||||
data += result
|
||||
|
||||
assert data == 'hello world! how goes there?'
|
||||
|
||||
def test_unhandled_exception():
|
||||
queue = FakeQueue()
|
||||
queue.put(QueueResult('hello world', None))
|
||||
queue.put(QueueResult(None, IOError('some exception')))
|
||||
queue.put(QueueResult('! how goes there?', None))
|
||||
queue.put(QueueResult(None, None))
|
||||
|
||||
queuefile = QueueFile(queue)
|
||||
|
||||
with pytest.raises(IOError):
|
||||
queuefile.read(size=12)
|
||||
|
||||
def test_handled_exception():
|
||||
queue = FakeQueue()
|
||||
queue.put(QueueResult('hello world', None))
|
||||
queue.put(QueueResult(None, IOError('some exception')))
|
||||
queue.put(QueueResult('! how goes there?', None))
|
||||
queue.put(QueueResult(None, None))
|
||||
|
||||
ex_found = [None]
|
||||
|
||||
def handler(ex):
|
||||
ex_found[0] = ex
|
||||
|
||||
queuefile = QueueFile(queue)
|
||||
queuefile.add_exception_handler(handler)
|
||||
queuefile.read(size=12)
|
||||
|
||||
assert ex_found[0] is not None
|
||||
|
||||
def test_binary_data():
|
||||
queue = FakeQueue()
|
||||
|
||||
# Generate some binary data.
|
||||
binary_data = os.urandom(1024)
|
||||
queue.put(QueueResult(binary_data, None))
|
||||
queue.put(QueueResult(None, None))
|
||||
|
||||
queuefile = QueueFile(queue)
|
||||
found_data = ''
|
||||
while True:
|
||||
current_data = queuefile.read(size=37)
|
||||
if len(current_data) == 0:
|
||||
break
|
||||
|
||||
found_data = found_data + current_data
|
||||
|
||||
assert found_data == binary_data
|
||||
|
||||
def test_empty_data():
|
||||
queue = FakeQueue()
|
||||
|
||||
# Generate some empty binary data.
|
||||
binary_data = '\0' * 1024
|
||||
queue.put(QueueResult(binary_data, None))
|
||||
queue.put(QueueResult(None, None))
|
||||
|
||||
queuefile = QueueFile(queue)
|
||||
found_data = ''
|
||||
while True:
|
||||
current_data = queuefile.read(size=37)
|
||||
if len(current_data) == 0:
|
||||
break
|
||||
|
||||
found_data = found_data + current_data
|
||||
|
||||
assert found_data == binary_data
|
492
util/registry/test/test_streamlayerformat.py
Normal file
492
util/registry/test/test_streamlayerformat.py
Normal file
|
@ -0,0 +1,492 @@
|
|||
import tarfile
|
||||
|
||||
import pytest
|
||||
|
||||
from StringIO import StringIO
|
||||
from util.registry.streamlayerformat import StreamLayerMerger
|
||||
from util.registry.aufs import AUFS_WHITEOUT
|
||||
from util.registry.tarlayerformat import TarLayerReadException
|
||||
|
||||
def create_layer(*file_pairs):
|
||||
output = StringIO()
|
||||
with tarfile.open(fileobj=output, mode='w:gz') as tar:
|
||||
for current_filename, current_contents in file_pairs:
|
||||
if current_contents is None:
|
||||
# This is a deleted file.
|
||||
if current_filename.endswith('/'):
|
||||
current_filename = current_filename[:-1]
|
||||
|
||||
parts = current_filename.split('/')
|
||||
if len(parts) > 1:
|
||||
current_filename = '/'.join(parts[:-1]) + '/' + AUFS_WHITEOUT + parts[-1]
|
||||
else:
|
||||
current_filename = AUFS_WHITEOUT + parts[-1]
|
||||
|
||||
current_contents = ''
|
||||
|
||||
if current_contents.startswith('linkto:'):
|
||||
info = tarfile.TarInfo(name=current_filename)
|
||||
info.linkname = current_contents[len('linkto:'):]
|
||||
info.type = tarfile.LNKTYPE
|
||||
tar.addfile(info)
|
||||
else:
|
||||
info = tarfile.TarInfo(name=current_filename)
|
||||
info.size = len(current_contents)
|
||||
tar.addfile(info, fileobj=StringIO(current_contents))
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
|
||||
def create_empty_layer():
|
||||
return ''
|
||||
|
||||
|
||||
def squash_layers(layers, path_prefix=None):
|
||||
def getter_for_layer(layer):
|
||||
return lambda: StringIO(layer)
|
||||
|
||||
def layer_stream_getter():
|
||||
return [getter_for_layer(layer) for layer in layers]
|
||||
|
||||
merger = StreamLayerMerger(layer_stream_getter, path_prefix=path_prefix)
|
||||
merged_data = ''.join(merger.get_generator())
|
||||
return merged_data
|
||||
|
||||
|
||||
def assertHasFile(squashed, filename, contents):
|
||||
with tarfile.open(fileobj=StringIO(squashed), mode='r:*') as tar:
|
||||
member = tar.getmember(filename)
|
||||
assert contents == '\n'.join(tar.extractfile(member).readlines())
|
||||
|
||||
|
||||
def assertDoesNotHaveFile(squashed, filename):
|
||||
with tarfile.open(fileobj=StringIO(squashed), mode='r:*') as tar:
|
||||
try:
|
||||
member = tar.getmember(filename)
|
||||
except Exception as ex:
|
||||
return
|
||||
|
||||
assert False, 'Filename %s found' % filename
|
||||
|
||||
|
||||
def test_single_layer():
|
||||
tar_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
squashed = squash_layers([tar_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'another_file', 'bar')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
|
||||
def test_multiple_layers():
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('top_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'another_file', 'bar')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertHasFile(squashed, 'top_file', 'top')
|
||||
|
||||
def test_multiple_layers_dot():
|
||||
second_layer = create_layer(
|
||||
('./some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('./third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('top_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, './some_file', 'foo')
|
||||
assertHasFile(squashed, 'another_file', 'bar')
|
||||
assertHasFile(squashed, './third_file', 'meh')
|
||||
assertHasFile(squashed, 'top_file', 'top')
|
||||
|
||||
def test_multiple_layers_overwrite():
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('another_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertHasFile(squashed, 'another_file', 'top')
|
||||
|
||||
def test_multiple_layers_overwrite_base_dot():
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('./another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('another_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertHasFile(squashed, 'another_file', 'top')
|
||||
assertDoesNotHaveFile(squashed, './another_file')
|
||||
|
||||
def test_multiple_layers_overwrite_top_dot():
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('./another_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertHasFile(squashed, './another_file', 'top')
|
||||
assertDoesNotHaveFile(squashed, 'another_file')
|
||||
|
||||
def test_deleted_file():
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('another_file', None))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertDoesNotHaveFile(squashed, 'another_file')
|
||||
|
||||
def test_deleted_readded_file():
|
||||
third_layer = create_layer(
|
||||
('another_file', 'bar'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', None),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('another_file', 'newagain'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertHasFile(squashed, 'another_file', 'newagain')
|
||||
|
||||
def test_deleted_in_lower_layer():
|
||||
third_layer = create_layer(
|
||||
('deleted_file', 'bar'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('deleted_file', None),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('top_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'some_file', 'foo')
|
||||
assertHasFile(squashed, 'third_file', 'meh')
|
||||
assertHasFile(squashed, 'top_file', 'top')
|
||||
assertDoesNotHaveFile(squashed, 'deleted_file')
|
||||
|
||||
def test_deleted_in_lower_layer_with_added_dot():
|
||||
third_layer = create_layer(
|
||||
('./deleted_file', 'something'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('deleted_file', None))
|
||||
|
||||
squashed = squash_layers([second_layer, third_layer])
|
||||
assertDoesNotHaveFile(squashed, 'deleted_file')
|
||||
|
||||
def test_deleted_in_lower_layer_with_deleted_dot():
|
||||
third_layer = create_layer(
|
||||
('./deleted_file', 'something'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('./deleted_file', None))
|
||||
|
||||
squashed = squash_layers([second_layer, third_layer])
|
||||
assertDoesNotHaveFile(squashed, 'deleted_file')
|
||||
|
||||
def test_directory():
|
||||
second_layer = create_layer(
|
||||
('foo/some_file', 'foo'),
|
||||
('foo/another_file', 'bar'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('foo/some_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'foo/some_file', 'top')
|
||||
assertHasFile(squashed, 'foo/another_file', 'bar')
|
||||
|
||||
def test_sub_directory():
|
||||
second_layer = create_layer(
|
||||
('foo/some_file', 'foo'),
|
||||
('foo/bar/another_file', 'bar'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('foo/some_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertHasFile(squashed, 'foo/some_file', 'top')
|
||||
assertHasFile(squashed, 'foo/bar/another_file', 'bar')
|
||||
|
||||
def test_delete_directory():
|
||||
second_layer = create_layer(
|
||||
('foo/some_file', 'foo'),
|
||||
('foo/another_file', 'bar'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('foo/', None))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertDoesNotHaveFile(squashed, 'foo/some_file')
|
||||
assertDoesNotHaveFile(squashed, 'foo/another_file')
|
||||
|
||||
def test_delete_sub_directory():
|
||||
second_layer = create_layer(
|
||||
('foo/some_file', 'foo'),
|
||||
('foo/bar/another_file', 'bar'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('foo/bar/', None))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertDoesNotHaveFile(squashed, 'foo/bar/another_file')
|
||||
assertHasFile(squashed, 'foo/some_file', 'foo')
|
||||
|
||||
def test_delete_sub_directory_with_dot():
|
||||
second_layer = create_layer(
|
||||
('foo/some_file', 'foo'),
|
||||
('foo/bar/another_file', 'bar'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('./foo/bar/', None))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertDoesNotHaveFile(squashed, 'foo/bar/another_file')
|
||||
assertHasFile(squashed, 'foo/some_file', 'foo')
|
||||
|
||||
def test_delete_sub_directory_with_subdot():
|
||||
second_layer = create_layer(
|
||||
('./foo/some_file', 'foo'),
|
||||
('./foo/bar/another_file', 'bar'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('foo/bar/', None))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer])
|
||||
|
||||
assertDoesNotHaveFile(squashed, 'foo/bar/another_file')
|
||||
assertDoesNotHaveFile(squashed, './foo/bar/another_file')
|
||||
assertHasFile(squashed, './foo/some_file', 'foo')
|
||||
|
||||
def test_delete_directory_recreate():
|
||||
third_layer = create_layer(
|
||||
('foo/some_file', 'foo'),
|
||||
('foo/another_file', 'bar'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('foo/', None))
|
||||
|
||||
first_layer = create_layer(
|
||||
('foo/some_file', 'baz'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'foo/some_file', 'baz')
|
||||
assertDoesNotHaveFile(squashed, 'foo/another_file')
|
||||
|
||||
def test_delete_directory_prefix():
|
||||
third_layer = create_layer(
|
||||
('foobar/some_file', 'foo'),
|
||||
('foo/another_file', 'bar'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('foo/', None))
|
||||
|
||||
squashed = squash_layers([second_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'foobar/some_file', 'foo')
|
||||
assertDoesNotHaveFile(squashed, 'foo/another_file')
|
||||
|
||||
|
||||
def test_delete_directory_pre_prefix():
|
||||
third_layer = create_layer(
|
||||
('foobar/baz/some_file', 'foo'),
|
||||
('foo/another_file', 'bar'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('foo/', None))
|
||||
|
||||
squashed = squash_layers([second_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'foobar/baz/some_file', 'foo')
|
||||
assertDoesNotHaveFile(squashed, 'foo/another_file')
|
||||
|
||||
|
||||
def test_delete_root_directory():
|
||||
third_layer = create_layer(
|
||||
('build/first_file', 'foo'),
|
||||
('build/second_file', 'bar'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('build', None))
|
||||
|
||||
squashed = squash_layers([second_layer, third_layer])
|
||||
|
||||
assertDoesNotHaveFile(squashed, 'build/first_file')
|
||||
assertDoesNotHaveFile(squashed, 'build/second_file')
|
||||
|
||||
|
||||
def test_tar_empty_layer():
|
||||
third_layer = create_layer(
|
||||
('build/first_file', 'foo'),
|
||||
('build/second_file', 'bar'))
|
||||
|
||||
empty_layer = create_layer()
|
||||
|
||||
squashed = squash_layers([empty_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'build/first_file', 'foo')
|
||||
assertHasFile(squashed, 'build/second_file', 'bar')
|
||||
|
||||
|
||||
def test_data_empty_layer():
|
||||
third_layer = create_layer(
|
||||
('build/first_file', 'foo'),
|
||||
('build/second_file', 'bar'))
|
||||
|
||||
empty_layer = create_empty_layer()
|
||||
|
||||
squashed = squash_layers([empty_layer, third_layer])
|
||||
|
||||
assertHasFile(squashed, 'build/first_file', 'foo')
|
||||
assertHasFile(squashed, 'build/second_file', 'bar')
|
||||
|
||||
|
||||
def test_broken_layer():
|
||||
third_layer = create_layer(
|
||||
('build/first_file', 'foo'),
|
||||
('build/second_file', 'bar'))
|
||||
|
||||
broken_layer = 'not valid data'
|
||||
|
||||
with pytest.raises(TarLayerReadException):
|
||||
squash_layers([broken_layer, third_layer])
|
||||
|
||||
|
||||
def test_single_layer_with_prefix():
|
||||
tar_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
squashed = squash_layers([tar_layer], path_prefix='foo/')
|
||||
|
||||
assertHasFile(squashed, 'foo/some_file', 'foo')
|
||||
assertHasFile(squashed, 'foo/another_file', 'bar')
|
||||
assertHasFile(squashed, 'foo/third_file', 'meh')
|
||||
|
||||
|
||||
def test_multiple_layers_overwrite_with_prefix():
|
||||
second_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
first_layer = create_layer(
|
||||
('another_file', 'top'))
|
||||
|
||||
squashed = squash_layers([first_layer, second_layer], path_prefix='foo/')
|
||||
|
||||
assertHasFile(squashed, 'foo/some_file', 'foo')
|
||||
assertHasFile(squashed, 'foo/third_file', 'meh')
|
||||
assertHasFile(squashed, 'foo/another_file', 'top')
|
||||
|
||||
|
||||
def test_superlong_filename():
|
||||
tar_layer = create_layer(
|
||||
('this_is_the_filename_that_never_ends_it_goes_on_and_on_my_friend_some_people_started', 'meh'))
|
||||
|
||||
squashed = squash_layers([tar_layer], path_prefix='foo/')
|
||||
assertHasFile(squashed, 'foo/this_is_the_filename_that_never_ends_it_goes_on_and_on_my_friend_some_people_started', 'meh')
|
||||
|
||||
|
||||
def test_superlong_prefix():
|
||||
tar_layer = create_layer(
|
||||
('some_file', 'foo'),
|
||||
('another_file', 'bar'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
squashed = squash_layers([tar_layer],
|
||||
path_prefix='foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/')
|
||||
|
||||
assertHasFile(squashed, 'foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/some_file', 'foo')
|
||||
assertHasFile(squashed, 'foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/another_file', 'bar')
|
||||
assertHasFile(squashed, 'foo/bar/baz/something/foo/bar/baz/anotherthing/whatever/this/is/a/really/long/filename/that/goes/here/third_file', 'meh')
|
||||
|
||||
|
||||
def test_hardlink_to_deleted_file():
|
||||
first_layer = create_layer(
|
||||
('tobedeletedfile', 'somecontents'),
|
||||
('link_to_deleted_file', 'linkto:tobedeletedfile'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('tobedeletedfile', None))
|
||||
|
||||
squashed = squash_layers([second_layer, first_layer], path_prefix='foo/')
|
||||
|
||||
assertHasFile(squashed, 'foo/third_file', 'meh')
|
||||
assertHasFile(squashed, 'foo/link_to_deleted_file', 'somecontents')
|
||||
assertDoesNotHaveFile(squashed, 'foo/tobedeletedfile')
|
||||
|
||||
|
||||
def test_multiple_hardlink_to_deleted_file():
|
||||
first_layer = create_layer(
|
||||
('tobedeletedfile', 'somecontents'),
|
||||
('link_to_deleted_file', 'linkto:tobedeletedfile'),
|
||||
('another_link_to_deleted_file', 'linkto:tobedeletedfile'),
|
||||
('third_file', 'meh'))
|
||||
|
||||
second_layer = create_layer(
|
||||
('tobedeletedfile', None))
|
||||
|
||||
squashed = squash_layers([second_layer, first_layer], path_prefix='foo/')
|
||||
|
||||
assertHasFile(squashed, 'foo/third_file', 'meh')
|
||||
assertHasFile(squashed, 'foo/link_to_deleted_file', 'somecontents')
|
||||
assertHasFile(squashed, 'foo/another_link_to_deleted_file', 'somecontents')
|
||||
|
||||
assertDoesNotHaveFile(squashed, 'foo/tobedeletedfile')
|
Reference in a new issue