initial import for Open Source 🎉

This commit is contained in:
Jimmy Zelinskie 2019-11-12 11:09:47 -05:00
parent 1898c361f3
commit 9c0dd3b722
2048 changed files with 218743 additions and 0 deletions

View file

@ -0,0 +1,145 @@
import hashlib
import os
import tarfile
from io import BytesIO
from contextlib import closing
import pytest
from data.registry_model.blobuploader import (retrieve_blob_upload_manager,
upload_blob, BlobUploadException,
BlobDigestMismatchException, BlobTooLargeException,
BlobUploadSettings)
from data.registry_model.registry_pre_oci_model import PreOCIModel
from storage.distributedstorage import DistributedStorage
from storage.fakestorage import FakeStorage
from test.fixtures import *
@pytest.fixture()
def pre_oci_model(initialized_db):
return PreOCIModel()
@pytest.mark.parametrize('chunk_count', [
0,
1,
2,
10,
])
@pytest.mark.parametrize('subchunk', [
True,
False,
])
def test_basic_upload_blob(chunk_count, subchunk, pre_oci_model):
repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
settings = BlobUploadSettings('2M', 512 * 1024, 3600)
app_config = {'TESTING': True}
data = ''
with upload_blob(repository_ref, storage, settings) as manager:
assert manager
assert manager.blob_upload_id
for index in range(0, chunk_count):
chunk_data = os.urandom(100)
data += chunk_data
if subchunk:
manager.upload_chunk(app_config, BytesIO(chunk_data))
manager.upload_chunk(app_config, BytesIO(chunk_data), (index * 100) + 50)
else:
manager.upload_chunk(app_config, BytesIO(chunk_data))
blob = manager.commit_to_blob(app_config)
# Check the blob.
assert blob.compressed_size == len(data)
assert not blob.uploading
assert blob.digest == 'sha256:' + hashlib.sha256(data).hexdigest()
# Ensure the blob exists in storage and has the expected data.
assert storage.get_content(['local_us'], blob.storage_path) == data
def test_cancel_upload(pre_oci_model):
repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
settings = BlobUploadSettings('2M', 512 * 1024, 3600)
app_config = {'TESTING': True}
blob_upload_id = None
with upload_blob(repository_ref, storage, settings) as manager:
blob_upload_id = manager.blob_upload_id
assert pre_oci_model.lookup_blob_upload(repository_ref, blob_upload_id) is not None
manager.upload_chunk(app_config, BytesIO('hello world'))
# Since the blob was not comitted, the upload should be deleted.
assert blob_upload_id
assert pre_oci_model.lookup_blob_upload(repository_ref, blob_upload_id) is None
def test_too_large(pre_oci_model):
repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
settings = BlobUploadSettings('1K', 512 * 1024, 3600)
app_config = {'TESTING': True}
with upload_blob(repository_ref, storage, settings) as manager:
with pytest.raises(BlobTooLargeException):
manager.upload_chunk(app_config, BytesIO(os.urandom(1024 * 1024 * 2)))
def test_extra_blob_stream_handlers(pre_oci_model):
handler1_result = []
handler2_result = []
def handler1(bytes):
handler1_result.append(bytes)
def handler2(bytes):
handler2_result.append(bytes)
repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
settings = BlobUploadSettings('1K', 512 * 1024, 3600)
app_config = {'TESTING': True}
with upload_blob(repository_ref, storage, settings,
extra_blob_stream_handlers=[handler1, handler2]) as manager:
manager.upload_chunk(app_config, BytesIO('hello '))
manager.upload_chunk(app_config, BytesIO('world'))
assert ''.join(handler1_result) == 'hello world'
assert ''.join(handler2_result) == 'hello world'
def valid_tar_gz(contents):
with closing(BytesIO()) as layer_data:
with closing(tarfile.open(fileobj=layer_data, mode='w|gz')) as tar_file:
tar_file_info = tarfile.TarInfo(name='somefile')
tar_file_info.type = tarfile.REGTYPE
tar_file_info.size = len(contents)
tar_file_info.mtime = 1
tar_file.addfile(tar_file_info, BytesIO(contents))
layer_bytes = layer_data.getvalue()
return layer_bytes
def test_uncompressed_size(pre_oci_model):
repository_ref = pre_oci_model.lookup_repository('devtable', 'complex')
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
settings = BlobUploadSettings('1K', 512 * 1024, 3600)
app_config = {'TESTING': True}
with upload_blob(repository_ref, storage, settings) as manager:
manager.upload_chunk(app_config, BytesIO(valid_tar_gz('hello world')))
blob = manager.commit_to_blob(app_config)
assert blob.compressed_size is not None
assert blob.uncompressed_size is not None

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,104 @@
import hashlib
import json
from io import BytesIO
import pytest
from mock import patch
from app import docker_v2_signing_key
from data.registry_model.blobuploader import BlobUploadSettings, upload_blob
from data.registry_model.manifestbuilder import create_manifest_builder, lookup_manifest_builder
from data.registry_model.registry_pre_oci_model import PreOCIModel
from data.registry_model.registry_oci_model import OCIModel
from storage.distributedstorage import DistributedStorage
from storage.fakestorage import FakeStorage
from test.fixtures import *
@pytest.fixture(params=[PreOCIModel, OCIModel])
def registry_model(request, initialized_db):
return request.param()
@pytest.fixture()
def fake_session():
with patch('data.registry_model.manifestbuilder.session', {}):
yield
@pytest.mark.parametrize('layers', [
pytest.param([('someid', None, 'some data')], id='Single layer'),
pytest.param([('parentid', None, 'some parent data'),
('someid', 'parentid', 'some data')],
id='Multi layer'),
])
def test_build_manifest(layers, fake_session, registry_model):
repository_ref = registry_model.lookup_repository('devtable', 'complex')
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
settings = BlobUploadSettings('2M', 512 * 1024, 3600)
app_config = {'TESTING': True}
builder = create_manifest_builder(repository_ref, storage, docker_v2_signing_key)
assert lookup_manifest_builder(repository_ref, 'anotherid', storage,
docker_v2_signing_key) is None
assert lookup_manifest_builder(repository_ref, builder.builder_id, storage,
docker_v2_signing_key) is not None
blobs_by_layer = {}
for layer_id, parent_id, layer_bytes in layers:
# Start a new layer.
assert builder.start_layer(layer_id, json.dumps({'id': layer_id, 'parent': parent_id}),
'local_us', None, 60)
checksum = hashlib.sha1(layer_bytes).hexdigest()
# Assign it a blob.
with upload_blob(repository_ref, storage, settings) as uploader:
uploader.upload_chunk(app_config, BytesIO(layer_bytes))
blob = uploader.commit_to_blob(app_config)
blobs_by_layer[layer_id] = blob
builder.assign_layer_blob(builder.lookup_layer(layer_id), blob, [checksum])
# Validate the checksum.
assert builder.validate_layer_checksum(builder.lookup_layer(layer_id), checksum)
# Commit the manifest to a tag.
tag = builder.commit_tag_and_manifest('somenewtag', builder.lookup_layer(layers[-1][0]))
assert tag
assert tag in builder.committed_tags
# Mark the builder as done.
builder.done()
# Verify the legacy image for the tag.
found = registry_model.get_repo_tag(repository_ref, 'somenewtag', include_legacy_image=True)
assert found
assert found.name == 'somenewtag'
assert found.legacy_image.docker_image_id == layers[-1][0]
# Verify the blob and manifest.
manifest = registry_model.get_manifest_for_tag(found)
assert manifest
parsed = manifest.get_parsed_manifest()
assert len(list(parsed.layers)) == len(layers)
for index, (layer_id, parent_id, layer_bytes) in enumerate(layers):
assert list(parsed.blob_digests)[index] == blobs_by_layer[layer_id].digest
assert list(parsed.layers)[index].v1_metadata.image_id == layer_id
assert list(parsed.layers)[index].v1_metadata.parent_image_id == parent_id
assert parsed.leaf_layer_v1_image_id == layers[-1][0]
def test_build_manifest_missing_parent(fake_session, registry_model):
storage = DistributedStorage({'local_us': FakeStorage(None)}, ['local_us'])
repository_ref = registry_model.lookup_repository('devtable', 'complex')
builder = create_manifest_builder(repository_ref, storage, docker_v2_signing_key)
assert builder.start_layer('somelayer', json.dumps({'id': 'somelayer', 'parent': 'someparent'}),
'local_us', None, 60) is None