Merge pull request #2730 from jzelinskie/v2-interface

endpoints.v2: new fs layout for data interface
This commit is contained in:
Jimmy Zelinskie 2017-06-28 13:51:43 -04:00 committed by GitHub
commit 80c9b611a4
9 changed files with 420 additions and 473 deletions

View file

@ -12,21 +12,19 @@ import features
from app import app, metric_queue, get_app_url, license_validator
from auth.auth_context import get_grant_context
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
AdministerRepositoryPermission)
from auth.permissions import (
ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission)
from auth.registry_jwt_auth import process_registry_jwt_auth, get_auth_headers
from data.interfaces.v2 import pre_oci_model as model
from endpoints.decorators import anon_protect, anon_allowed
from endpoints.v2.errors import V2RegistryException, Unauthorized, Unsupported, NameUnknown
from endpoints.v2.models_pre_oci import data_model as model
from util.http import abort
from util.metrics.metricqueue import time_blueprint
from util.registry.dockerver import docker_version
from util.pagination import encrypt_page_token, decrypt_page_token
logger = logging.getLogger(__name__)
v2_bp = Blueprint('v2', __name__)
license_validator.enforce_license_before_request(v2_bp)
time_blueprint(v2_bp, metric_queue)
@ -34,9 +32,7 @@ time_blueprint(v2_bp, metric_queue)
@v2_bp.app_errorhandler(V2RegistryException)
def handle_registry_v2_exception(error):
response = jsonify({
'errors': [error.as_dict()]
})
response = jsonify({'errors': [error.as_dict()]})
response.status_code = error.http_status_code
if response.status_code == 401:
@ -53,6 +49,7 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
"""
Decorates a handler adding a parsed pagination token and a callback to encode a response token.
"""
def wrapper(func):
@wraps(func)
def wrapped(*args, **kwargs):
@ -86,7 +83,9 @@ def paginate(limit_kwarg_name='limit', offset_kwarg_name='offset',
kwargs[offset_kwarg_name] = offset
kwargs[callback_kwarg_name] = callback
return func(*args, **kwargs)
return wrapped
return wrapper
@ -94,17 +93,15 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False):
def wrapper(func):
@wraps(func)
def wrapped(namespace_name, repo_name, *args, **kwargs):
logger.debug('Checking permission %s for repo: %s/%s', permission_class,
namespace_name, repo_name)
logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace_name,
repo_name)
repository = namespace_name + '/' + repo_name
repo = model.get_repository(namespace_name, repo_name)
if repo is None:
raise Unauthorized(repository=repository, scopes=scopes)
permission = permission_class(namespace_name, repo_name)
if (permission.can() or
(allow_public and
repo.is_public)):
if (permission.can() or (allow_public and repo.is_public)):
if repo.kind != 'image':
msg = 'This repository is for managing %s resources and not container images.' % repo.kind
raise Unsupported(detail=msg)
@ -112,16 +109,15 @@ def _require_repo_permission(permission_class, scopes=None, allow_public=False):
raise Unauthorized(repository=repository, scopes=scopes)
return wrapped
return wrapper
require_repo_read = _require_repo_permission(ReadRepositoryPermission,
scopes=['pull'],
require_repo_read = _require_repo_permission(ReadRepositoryPermission, scopes=['pull'],
allow_public=True)
require_repo_write = _require_repo_permission(ModifyRepositoryPermission,
scopes=['pull', 'push'])
require_repo_admin = _require_repo_permission(AdministerRepositoryPermission,
scopes=['pull', 'push'])
require_repo_write = _require_repo_permission(ModifyRepositoryPermission, scopes=['pull', 'push'])
require_repo_admin = _require_repo_permission(AdministerRepositoryPermission, scopes=[
'pull', 'push'])
def get_input_stream(flask_request):
@ -138,7 +134,9 @@ def route_show_if(value):
abort(404)
return f(*args, **kwargs)
return decorated_function
return decorator
@ -169,5 +167,4 @@ from endpoints.v2 import (
catalog,
manifest,
tag,
v2auth,
)
v2auth,)

View file

@ -10,22 +10,20 @@ import resumablehashlib
from app import storage, app, get_app_url, metric_queue
from auth.registry_jwt_auth import process_registry_jwt_auth
from data import database
from data.interfaces.v2 import pre_oci_model as model
from digest import digest_tools
from endpoints.common import parse_repository_name
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported,
NameUnknown, LayerTooLarge)
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
from endpoints.v2.errors import (
BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported, NameUnknown, LayerTooLarge)
from endpoints.v2.models_pre_oci import data_model as model
from util.cache import cache_control
from util.registry.filelike import wrap_with_handler, StreamSlice
from util.registry.gzipstream import calculate_size_handler
from util.registry.torrent import PieceHasher
logger = logging.getLogger(__name__)
BASE_BLOB_ROUTE = '/<repopath:repository>/blobs/<regex("{0}"):digest>'
BLOB_DIGEST_ROUTE = BASE_BLOB_ROUTE.format(digest_tools.DIGEST_PATTERN)
RANGE_HEADER_REGEX = re.compile(r'^bytes=([0-9]+)-([0-9]+)$')
@ -52,8 +50,7 @@ def check_blob_exists(namespace_name, repo_name, digest):
headers = {
'Docker-Content-Digest': digest,
'Content-Length': blob.size,
'Content-Type': BLOB_CONTENT_TYPE,
}
'Content-Type': BLOB_CONTENT_TYPE,}
# If our storage supports range requests, let the client know.
if storage.get_supports_resumable_downloads(blob.locations):
@ -102,10 +99,7 @@ def download_blob(namespace_name, repo_name, digest):
storage.stream_read(blob.locations, path),
headers=headers.update({
'Content-Length': blob.size,
'Content-Type': BLOB_CONTENT_TYPE,
}),
)
'Content-Type': BLOB_CONTENT_TYPE,}),)
@v2_bp.route('/<repopath:repository>/blobs/uploads/', methods=['POST'])
@ -128,13 +122,13 @@ def start_blob_upload(namespace_name, repo_name):
return Response(
status=202,
headers={
'Docker-Upload-UUID': new_upload_uuid,
'Range': _render_range(0),
'Location': get_app_url() + url_for('v2.upload_chunk',
repository='%s/%s' % (namespace_name, repo_name),
upload_uuid=new_upload_uuid)
},
)
'Docker-Upload-UUID':
new_upload_uuid,
'Range':
_render_range(0),
'Location':
get_app_url() + url_for('v2.upload_chunk', repository='%s/%s' %
(namespace_name, repo_name), upload_uuid=new_upload_uuid)},)
# The user plans to send us the entire body right now.
# Find the upload.
@ -158,12 +152,11 @@ def start_blob_upload(namespace_name, repo_name):
return Response(
status=201,
headers={
'Docker-Content-Digest': digest,
'Location': get_app_url() + url_for('v2.download_blob',
repository='%s/%s' % (namespace_name, repo_name),
digest=digest),
},
)
'Docker-Content-Digest':
digest,
'Location':
get_app_url() + url_for('v2.download_blob', repository='%s/%s' %
(namespace_name, repo_name), digest=digest),},)
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['GET'])
@ -180,9 +173,8 @@ def fetch_existing_upload(namespace_name, repo_name, upload_uuid):
status=204,
headers={
'Docker-Upload-UUID': upload_uuid,
'Range': _render_range(blob_upload.byte_count+1), # byte ranges are exclusive
},
)
'Range': _render_range(blob_upload.byte_count + 1), # byte ranges are exclusive
},)
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PATCH'])
@ -211,9 +203,7 @@ def upload_chunk(namespace_name, repo_name, upload_uuid):
headers={
'Location': _current_request_url(),
'Range': _render_range(updated_blob_upload.byte_count, with_bytes_prefix=False),
'Docker-Upload-UUID': upload_uuid,
},
)
'Docker-Upload-UUID': upload_uuid,},)
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PUT'])
@ -242,15 +232,12 @@ def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
_finish_upload(namespace_name, repo_name, updated_blob_upload, digest)
# Write the response to the client.
return Response(
status=201,
headers={
'Docker-Content-Digest': digest,
'Location': get_app_url() + url_for('v2.download_blob',
repository='%s/%s' % (namespace_name, repo_name),
digest=digest),
}
)
return Response(status=201, headers={
'Docker-Content-Digest':
digest,
'Location':
get_app_url() + url_for('v2.download_blob', repository='%s/%s' %
(namespace_name, repo_name), digest=digest),})
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['DELETE'])
@ -300,9 +287,11 @@ def _abort_range_not_satisfiable(valid_end, upload_uuid):
TODO(jzelinskie): Unify this with the V2RegistryException class.
"""
flask_abort(Response(status=416, headers={'Location': _current_request_url(),
'Range': '0-{0}'.format(valid_end),
'Docker-Upload-UUID': upload_uuid}))
flask_abort(
Response(status=416, headers={
'Location': _current_request_url(),
'Range': '0-{0}'.format(valid_end),
'Docker-Upload-UUID': upload_uuid}))
def _parse_range_header(range_header_text):
@ -415,16 +404,15 @@ def _upload_chunk(blob_upload, range_header):
length,
input_fp,
blob_upload.storage_metadata,
content_type=BLOB_CONTENT_TYPE,
)
content_type=BLOB_CONTENT_TYPE,)
if upload_error is not None:
logger.error('storage.stream_upload_chunk returned error %s', upload_error)
return None
# Update the chunk upload time metric.
metric_queue.chunk_upload_time.Observe(time.time() - start_time,
labelvalues=[length_written, list(location_set)[0]])
metric_queue.chunk_upload_time.Observe(time.time() - start_time, labelvalues=[
length_written, list(location_set)[0]])
# If we determined an uncompressed size and this is the first chunk, add it to the blob.
# Otherwise, we clear the size from the blob as it was uploaded in multiple chunks.
@ -499,8 +487,7 @@ def _finalize_blob_database(namespace_name, repo_name, blob_upload, digest, alre
repo_name,
digest,
blob_upload,
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],
)
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],)
# If it doesn't already exist, create the BitTorrent pieces for the blob.
if blob_upload.piece_sha_state is not None and not already_existed:
@ -521,5 +508,4 @@ def _finish_upload(namespace_name, repo_name, blob_upload, digest):
repo_name,
blob_upload,
digest,
_finalize_blob_storage(blob_upload, digest),
)
_finalize_blob_storage(blob_upload, digest),)

View file

@ -5,7 +5,8 @@ from flask import jsonify
from auth.registry_jwt_auth import process_registry_jwt_auth, get_granted_entity
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp, paginate
from data.interfaces.v2 import pre_oci_model as model
from endpoints.v2.models_pre_oci import data_model as model
@v2_bp.route('/_catalog', methods=['GET'])
@process_registry_jwt_auth()
@ -18,12 +19,11 @@ def catalog_search(limit, offset, pagination_callback):
username = entity.user.username
include_public = bool(features.PUBLIC_CATALOG)
visible_repositories = model.get_visible_repositories(username, limit+1, offset,
visible_repositories = model.get_visible_repositories(username, limit + 1, offset,
include_public=include_public)
response = jsonify({
'repositories': ['%s/%s' % (repo.namespace_name, repo.name)
for repo in visible_repositories][0:limit],
})
for repo in visible_repositories][0:limit],})
pagination_callback(len(visible_repositories), response)
return response

View file

@ -1,8 +1,9 @@
import bitmath
class V2RegistryException(Exception):
def __init__(self, error_code_str, message, detail, http_status_code=400,
repository=None, scopes=None):
def __init__(self, error_code_str, message, detail, http_status_code=400, repository=None,
scopes=None):
super(V2RegistryException, self).__init__(message)
self.http_status_code = http_status_code
self.repository = repository
@ -15,104 +16,81 @@ class V2RegistryException(Exception):
return {
'code': self._error_code_str,
'message': self.message,
'detail': self._detail if self._detail is not None else {},
}
'detail': self._detail if self._detail is not None else {},}
class BlobUnknown(V2RegistryException):
def __init__(self, detail=None):
super(BlobUnknown, self).__init__('BLOB_UNKNOWN',
'blob unknown to registry',
detail,
404)
super(BlobUnknown, self).__init__('BLOB_UNKNOWN', 'blob unknown to registry', detail, 404)
class BlobUploadInvalid(V2RegistryException):
def __init__(self, detail=None):
super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID',
'blob upload invalid',
detail)
super(BlobUploadInvalid, self).__init__('BLOB_UPLOAD_INVALID', 'blob upload invalid', detail)
class BlobUploadUnknown(V2RegistryException):
def __init__(self, detail=None):
super(BlobUploadUnknown, self).__init__('BLOB_UPLOAD_UNKNOWN',
'blob upload unknown to registry',
detail,
404)
'blob upload unknown to registry', detail, 404)
class DigestInvalid(V2RegistryException):
def __init__(self, detail=None):
super(DigestInvalid, self).__init__('DIGEST_INVALID',
'provided digest did not match uploaded content',
detail)
'provided digest did not match uploaded content', detail)
class ManifestBlobUnknown(V2RegistryException):
def __init__(self, detail=None):
super(ManifestBlobUnknown, self).__init__('MANIFEST_BLOB_UNKNOWN',
'manifest blob unknown to registry',
detail)
'manifest blob unknown to registry', detail)
class ManifestInvalid(V2RegistryException):
def __init__(self, detail=None, http_status_code=400):
super(ManifestInvalid, self).__init__('MANIFEST_INVALID',
'manifest invalid',
detail,
super(ManifestInvalid, self).__init__('MANIFEST_INVALID', 'manifest invalid', detail,
http_status_code)
class ManifestUnknown(V2RegistryException):
def __init__(self, detail=None):
super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN',
'manifest unknown',
detail,
404)
super(ManifestUnknown, self).__init__('MANIFEST_UNKNOWN', 'manifest unknown', detail, 404)
class ManifestUnverified(V2RegistryException):
def __init__(self, detail=None):
super(ManifestUnverified, self).__init__('MANIFEST_UNVERIFIED',
'manifest failed signature verification',
detail)
'manifest failed signature verification', detail)
class NameInvalid(V2RegistryException):
def __init__(self, detail=None, message=None):
super(NameInvalid, self).__init__('NAME_INVALID',
message or 'invalid repository name',
detail)
super(NameInvalid, self).__init__('NAME_INVALID', message or 'invalid repository name', detail)
class NameUnknown(V2RegistryException):
def __init__(self, detail=None):
super(NameUnknown, self).__init__('NAME_UNKNOWN',
'repository name not known to registry',
detail,
404)
super(NameUnknown, self).__init__('NAME_UNKNOWN', 'repository name not known to registry',
detail, 404)
class SizeInvalid(V2RegistryException):
def __init__(self, detail=None):
super(SizeInvalid, self).__init__('SIZE_INVALID',
'provided length did not match content length',
detail)
'provided length did not match content length', detail)
class TagAlreadyExists(V2RegistryException):
def __init__(self, detail=None):
super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS',
'tag was already pushed',
detail,
super(TagAlreadyExists, self).__init__('TAG_ALREADY_EXISTS', 'tag was already pushed', detail,
409)
class TagInvalid(V2RegistryException):
def __init__(self, detail=None):
super(TagInvalid, self).__init__('TAG_INVALID',
'manifest tag did not match URI',
detail)
super(TagInvalid, self).__init__('TAG_INVALID', 'manifest tag did not match URI', detail)
class LayerTooLarge(V2RegistryException):
def __init__(self, uploaded=None, max_allowed=None):
@ -123,43 +101,33 @@ class LayerTooLarge(V2RegistryException):
detail = {
'reason': '%s is greater than maximum allowed size %s' % (uploaded, max_allowed),
'max_allowed': max_allowed,
'uploaded': uploaded,
}
'uploaded': uploaded,}
up_str = bitmath.Byte(uploaded).best_prefix().format("{value:.2f} {unit}")
max_str = bitmath.Byte(max_allowed).best_prefix().format("{value:.2f} {unit}")
message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str, max_str)
message = 'Uploaded blob of %s is larger than %s allowed by this registry' % (up_str,
max_str)
class Unauthorized(V2RegistryException):
def __init__(self, detail=None, repository=None, scopes=None):
super(Unauthorized, self).__init__('UNAUTHORIZED',
'access to the requested resource is not authorized',
detail,
401,
repository=repository,
scopes=scopes)
super(Unauthorized,
self).__init__('UNAUTHORIZED', 'access to the requested resource is not authorized',
detail, 401, repository=repository, scopes=scopes)
class Unsupported(V2RegistryException):
def __init__(self, detail=None, message=None):
super(Unsupported, self).__init__('UNSUPPORTED',
message or 'The operation is unsupported.',
detail,
405)
super(Unsupported, self).__init__('UNSUPPORTED', message or 'The operation is unsupported.',
detail, 405)
class InvalidLogin(V2RegistryException):
def __init__(self, message=None):
super(InvalidLogin, self).__init__('UNAUTHORIZED',
message or 'Specified credentials are invalid',
{},
401)
super(InvalidLogin, self).__init__('UNAUTHORIZED', message or
'Specified credentials are invalid', {}, 401)
class InvalidRequest(V2RegistryException):
def __init__(self, message=None):
super(InvalidRequest, self).__init__('INVALID_REQUEST',
message or 'Invalid request',
{},
400)
super(InvalidRequest, self).__init__('INVALID_REQUEST', message or 'Invalid request', {}, 400)

View file

@ -8,14 +8,15 @@ import features
from app import docker_v2_signing_key, app, metric_queue
from auth.registry_jwt_auth import process_registry_jwt_auth
from data.interfaces.v2 import pre_oci_model as model, Label
from digest import digest_tools
from endpoints.common import parse_repository_name
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
from endpoints.v2.errors import (BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid,
NameInvalid)
from endpoints.notificationhelper import spawn_notification
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
from endpoints.v2.errors import (
BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid, NameInvalid)
from endpoints.v2.models_interface import Label
from endpoints.v2.models_pre_oci import data_model as model
from image.docker import ManifestException
from image.docker.schema1 import DockerSchema1Manifest, DockerSchema1ManifestBuilder
from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES
@ -24,14 +25,13 @@ from util.names import VALID_TAG_PATTERN
from util.registry.replication import queue_replication_batch
from util.validation import is_json
logger = logging.getLogger(__name__)
BASE_MANIFEST_ROUTE = '/<repopath:repository>/manifests/<regex("{0}"):manifest_ref>'
MANIFEST_DIGEST_ROUTE = BASE_MANIFEST_ROUTE.format(digest_tools.DIGEST_PATTERN)
MANIFEST_TAGNAME_ROUTE = BASE_MANIFEST_ROUTE.format(VALID_TAG_PATTERN)
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET'])
@parse_repository_name()
@process_registry_jwt_auth(scopes=['pull'])
@ -51,14 +51,14 @@ def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
repo = model.get_repository(namespace_name, repo_name)
if repo is not None:
track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01,
tag=manifest_ref)
tag=manifest_ref)
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
return Response(
manifest.json,
status=200,
headers={'Content-Type': manifest.media_type, 'Docker-Content-Digest': manifest.digest},
)
headers={'Content-Type': manifest.media_type,
'Docker-Content-Digest': manifest.digest},)
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['GET'])
@ -77,8 +77,9 @@ def fetch_manifest_by_digest(namespace_name, repo_name, manifest_ref):
track_and_log('pull_repo', repo, manifest_digest=manifest_ref)
metric_queue.repository_pull.Inc(labelvalues=[namespace_name, repo_name, 'v2', True])
return Response(manifest.json, status=200, headers={'Content-Type': manifest.media_type,
'Docker-Content-Digest': manifest.digest})
return Response(manifest.json, status=200, headers={
'Content-Type': manifest.media_type,
'Docker-Content-Digest': manifest.digest})
def _reject_manifest2_schema2(func):
@ -88,6 +89,7 @@ def _reject_manifest2_schema2(func):
raise ManifestInvalid(detail={'message': 'manifest schema version not supported'},
http_status_code=415)
return func(*args, **kwargs)
return wrapped
@ -130,8 +132,7 @@ def write_manifest_by_digest(namespace_name, repo_name, manifest_ref):
def _write_manifest(namespace_name, repo_name, manifest):
if (manifest.namespace == '' and
features.LIBRARY_SUPPORT and
if (manifest.namespace == '' and features.LIBRARY_SUPPORT and
namespace_name == app.config['LIBRARY_NAMESPACE']):
pass
elif manifest.namespace != namespace_name:
@ -173,8 +174,7 @@ def _write_manifest(namespace_name, repo_name, manifest):
rewritten_image.comment,
rewritten_image.command,
rewritten_image.compat_json,
rewritten_image.parent_image_id,
)
rewritten_image.parent_image_id,)
except ManifestException as me:
logger.exception("exception when rewriting v1 metadata")
raise ManifestInvalid(detail={'message': 'failed synthesizing v1 metadata: %s' % me.message})
@ -211,12 +211,11 @@ def _write_manifest_and_log(namespace_name, repo_name, manifest):
'OK',
status=202,
headers={
'Docker-Content-Digest': manifest.digest,
'Location': url_for('v2.fetch_manifest_by_digest',
repository='%s/%s' % (namespace_name, repo_name),
manifest_ref=manifest.digest),
},
)
'Docker-Content-Digest':
manifest.digest,
'Location':
url_for('v2.fetch_manifest_by_digest', repository='%s/%s' % (namespace_name, repo_name),
manifest_ref=manifest.digest),},)
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
@ -270,5 +269,6 @@ def _generate_and_store_manifest(namespace_name, repo_name, tag_name):
manifest.bytes)
return manifest
def _determine_media_type(value):
media_type_name = 'application/json' if is_json(value) else 'text/plain'

View file

@ -0,0 +1,258 @@
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from namedlist import namedlist
from six import add_metaclass
class Repository(
namedtuple('Repository', [
'id', 'name', 'namespace_name', 'description', 'is_public', 'kind', 'trust_enabled'])):
"""
Repository represents a namespaced collection of tags.
:type id: int
:type name: string
:type namespace_name: string
:type description: string
:type is_public: bool
:type kind: string
:type trust_enabled: bool
"""
class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])):
"""
ManifestJSON represents a Manifest of any format.
"""
class Tag(namedtuple('Tag', ['name', 'repository'])):
"""
Tag represents a user-facing alias for referencing a set of Manifests.
"""
class BlobUpload(
namedlist('BlobUpload', [
'uuid', 'byte_count', 'uncompressed_byte_count', 'chunk_count', 'sha_state', 'location_name',
'storage_metadata', 'piece_sha_state', 'piece_hashes', 'repo_namespace_name', 'repo_name'])):
"""
BlobUpload represents the current state of an Blob being uploaded.
"""
class Blob(namedtuple('Blob', ['uuid', 'digest', 'size', 'locations'])):
"""
Blob represents an opaque binary blob saved to the storage system.
"""
class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])):
"""
RepositoryReference represents a reference to a Repository, without its full metadata.
"""
class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])):
"""
Label represents a key-value pair that describes a particular Manifest.
"""
@add_metaclass(ABCMeta)
class DockerRegistryV2DataInterface(object):
"""
Interface that represents all data store interactions required by a Docker Registry v1.
"""
@abstractmethod
def create_repository(self, namespace_name, repo_name, creating_user=None):
"""
Creates a new repository under the specified namespace with the given name. The user supplied is
the user creating the repository, if any.
"""
pass
@abstractmethod
def get_repository(self, namespace_name, repo_name):
"""
Returns a repository tuple for the repository with the given name under the given namespace.
Returns None if no such repository was found.
"""
pass
@abstractmethod
def has_active_tag(self, namespace_name, repo_name, tag_name):
"""
Returns whether there is an active tag for the tag with the given name under the matching
repository, if any, or none if none.
"""
pass
@abstractmethod
def get_manifest_by_tag(self, namespace_name, repo_name, tag_name):
"""
Returns the current manifest for the tag with the given name under the matching repository, if
any, or None if none.
"""
pass
@abstractmethod
def get_manifest_by_digest(self, namespace_name, repo_name, digest):
"""
Returns the manifest matching the given digest under the matching repository, if any, or None if
none.
"""
pass
@abstractmethod
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
"""
Deletes the manifest with the associated digest (if any) and returns all removed tags that
pointed to that manifest. If the manifest was not found, returns an empty list.
"""
pass
@abstractmethod
def get_docker_v1_metadata_by_tag(self, namespace_name, repo_name, tag_name):
"""
Returns the Docker V1 metadata associated with the tag with the given name under the matching
repository, if any. If none, returns None.
"""
pass
@abstractmethod
def get_docker_v1_metadata_by_image_id(self, namespace_name, repo_name, docker_image_ids):
"""
Returns a map of Docker V1 metadata for each given image ID, matched under the repository with
the given namespace and name. Returns an empty map if the matching repository was not found.
"""
pass
@abstractmethod
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
"""
Returns an ordered list containing the Docker V1 metadata for each parent of the image with the
given docker ID under the matching repository. Returns an empty list if the image was not found.
"""
pass
@abstractmethod
def create_manifest_and_update_tag(self, namespace_name, repo_name, tag_name, manifest_digest,
manifest_bytes):
"""
Creates a new manifest with the given digest and byte data, and assigns the tag with the given
name under the matching repository to it.
"""
pass
@abstractmethod
def synthesize_v1_image(self, repository, storage, image_id, created, comment, command,
compat_json, parent_image_id):
"""
Synthesizes a V1 image under the specified repository, pointing to the given storage and returns
the V1 metadata for the synthesized image.
"""
pass
@abstractmethod
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
manifest_digest, manifest_bytes):
"""
Saves a manifest pointing to the given leaf image, with the given manifest, under the matching
repository as a tag with the given name.
Returns a boolean whether or not the tag was newly created or not.
"""
pass
@abstractmethod
def repository_tags(self, namespace_name, repo_name, limit, offset):
"""
Returns the active tags under the repository with the given name and namespace.
"""
pass
@abstractmethod
def get_visible_repositories(self, username, limit, offset):
"""
Returns the repositories visible to the user with the given username, if any.
"""
pass
@abstractmethod
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name,
storage_metadata):
"""
Creates a blob upload under the matching repository with the given UUID and metadata.
Returns whether the matching repository exists.
"""
pass
@abstractmethod
def blob_upload_by_uuid(self, namespace_name, repo_name, upload_uuid):
"""
Searches for a blob upload with the given UUID under the given repository and returns it or None
if none.
"""
pass
@abstractmethod
def update_blob_upload(self, blob_upload):
"""
Saves any changes to the blob upload object given to the backing data store.
Fields that can change:
- uncompressed_byte_count
- piece_hashes
- piece_sha_state
- storage_metadata
- byte_count
- chunk_count
- sha_state
"""
pass
@abstractmethod
def delete_blob_upload(self, namespace_name, repo_name, uuid):
"""
Deletes the blob upload with the given uuid under the matching repository. If none, does
nothing.
"""
pass
@abstractmethod
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
expiration_sec):
"""
Creates a blob and links a temporary tag with the specified expiration to it under the matching
repository.
"""
pass
@abstractmethod
def get_blob_by_digest(self, namespace_name, repo_name, digest):
"""
Returns the blob with the given digest under the matching repository or None if none.
"""
pass
@abstractmethod
def save_bittorrent_pieces(self, blob, piece_size, piece_bytes):
"""
Saves the BitTorrent piece hashes for the given blob.
"""
pass
@abstractmethod
def create_manifest_labels(self, namespace_name, repo_name, manifest_digest, labels):
"""
Creates a new labels for the provided manifest.
"""
pass
@abstractmethod
def get_blob_path(self, blob):
"""
Once everything is moved over, this could be in util.registry and not even touch the database.
"""
pass

View file

@ -1,272 +1,26 @@
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from namedlist import namedlist
from peewee import IntegrityError
from six import add_metaclass
from data import model, database
from data.model import DataModelException
from endpoints.v2.models_interface import (
Blob,
BlobUpload,
DockerRegistryV2DataInterface,
ManifestJSON,
Repository,
RepositoryReference,
Tag,)
from image.docker.v1 import DockerV1Metadata
_MEDIA_TYPE = "application/vnd.docker.distribution.manifest.v1+prettyjws"
class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description',
'is_public', 'kind', 'trust_enabled'])):
"""
Repository represents a namespaced collection of tags.
:type id: int
:type name: string
:type namespace_name: string
:type description: string
:type is_public: bool
:type kind: string
:type trust_enabled: bool
"""
class ManifestJSON(namedtuple('ManifestJSON', ['digest', 'json', 'media_type'])):
"""
ManifestJSON represents a Manifest of any format.
"""
class Tag(namedtuple('Tag', ['name', 'repository'])):
"""
Tag represents a user-facing alias for referencing a set of Manifests.
"""
class BlobUpload(namedlist('BlobUpload', ['uuid', 'byte_count', 'uncompressed_byte_count',
'chunk_count', 'sha_state', 'location_name',
'storage_metadata', 'piece_sha_state', 'piece_hashes',
'repo_namespace_name', 'repo_name'])):
"""
BlobUpload represents the current state of an Blob being uploaded.
"""
class Blob(namedtuple('Blob', ['uuid', 'digest', 'size', 'locations'])):
"""
Blob represents an opaque binary blob saved to the storage system.
"""
class RepositoryReference(namedtuple('RepositoryReference', ['id', 'name', 'namespace_name'])):
"""
RepositoryReference represents a reference to a Repository, without its full metadata.
"""
class Label(namedtuple('Label', ['key', 'value', 'source_type', 'media_type'])):
"""
Label represents a key-value pair that describes a particular Manifest.
"""
@add_metaclass(ABCMeta)
class DockerRegistryV2DataInterface(object):
"""
Interface that represents all data store interactions required by a Docker Registry v1.
"""
@abstractmethod
def create_repository(self, namespace_name, repo_name, creating_user=None):
"""
Creates a new repository under the specified namespace with the given name. The user supplied is
the user creating the repository, if any.
"""
pass
@abstractmethod
def get_repository(self, namespace_name, repo_name):
"""
Returns a repository tuple for the repository with the given name under the given namespace.
Returns None if no such repository was found.
"""
pass
@abstractmethod
def has_active_tag(self, namespace_name, repo_name, tag_name):
"""
Returns whether there is an active tag for the tag with the given name under the matching
repository, if any, or none if none.
"""
pass
@abstractmethod
def get_manifest_by_tag(self, namespace_name, repo_name, tag_name):
"""
Returns the current manifest for the tag with the given name under the matching repository, if
any, or None if none.
"""
pass
@abstractmethod
def get_manifest_by_digest(self, namespace_name, repo_name, digest):
"""
Returns the manifest matching the given digest under the matching repository, if any, or None if
none.
"""
pass
@abstractmethod
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
"""
Deletes the manifest with the associated digest (if any) and returns all removed tags that
pointed to that manifest. If the manifest was not found, returns an empty list.
"""
pass
@abstractmethod
def get_docker_v1_metadata_by_tag(self, namespace_name, repo_name, tag_name):
"""
Returns the Docker V1 metadata associated with the tag with the given name under the matching
repository, if any. If none, returns None.
"""
pass
@abstractmethod
def get_docker_v1_metadata_by_image_id(self, namespace_name, repo_name, docker_image_ids):
"""
Returns a map of Docker V1 metadata for each given image ID, matched under the repository with
the given namespace and name. Returns an empty map if the matching repository was not found.
"""
pass
@abstractmethod
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
"""
Returns an ordered list containing the Docker V1 metadata for each parent of the image with the
given docker ID under the matching repository. Returns an empty list if the image was not found.
"""
pass
@abstractmethod
def create_manifest_and_update_tag(self, namespace_name, repo_name, tag_name, manifest_digest,
manifest_bytes):
"""
Creates a new manifest with the given digest and byte data, and assigns the tag with the given
name under the matching repository to it.
"""
pass
@abstractmethod
def synthesize_v1_image(self, repository, storage, image_id, created, comment, command,
compat_json, parent_image_id):
"""
Synthesizes a V1 image under the specified repository, pointing to the given storage and returns
the V1 metadata for the synthesized image.
"""
pass
@abstractmethod
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
manifest_digest, manifest_bytes):
"""
Saves a manifest pointing to the given leaf image, with the given manifest, under the matching
repository as a tag with the given name.
Returns a boolean whether or not the tag was newly created or not.
"""
pass
@abstractmethod
def repository_tags(self, namespace_name, repo_name, limit, offset):
"""
Returns the active tags under the repository with the given name and namespace.
"""
pass
@abstractmethod
def get_visible_repositories(self, username, limit, offset):
"""
Returns the repositories visible to the user with the given username, if any.
"""
pass
@abstractmethod
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata):
"""
Creates a blob upload under the matching repository with the given UUID and metadata.
Returns whether the matching repository exists.
"""
pass
@abstractmethod
def blob_upload_by_uuid(self, namespace_name, repo_name, upload_uuid):
"""
Searches for a blob upload with the given UUID under the given repository and returns it or None
if none.
"""
pass
@abstractmethod
def update_blob_upload(self, blob_upload):
"""
Saves any changes to the blob upload object given to the backing data store.
Fields that can change:
- uncompressed_byte_count
- piece_hashes
- piece_sha_state
- storage_metadata
- byte_count
- chunk_count
- sha_state
"""
pass
@abstractmethod
def delete_blob_upload(self, namespace_name, repo_name, uuid):
"""
Deletes the blob upload with the given uuid under the matching repository. If none, does
nothing.
"""
pass
@abstractmethod
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
expiration_sec):
"""
Creates a blob and links a temporary tag with the specified expiration to it under the matching
repository.
"""
pass
@abstractmethod
def get_blob_by_digest(self, namespace_name, repo_name, digest):
"""
Returns the blob with the given digest under the matching repository or None if none.
"""
pass
@abstractmethod
def save_bittorrent_pieces(self, blob, piece_size, piece_bytes):
"""
Saves the BitTorrent piece hashes for the given blob.
"""
pass
@abstractmethod
def create_manifest_labels(self, namespace_name, repo_name, manifest_digest, labels):
"""
Creates a new labels for the provided manifest.
"""
pass
@abstractmethod
def get_blob_path(self, blob):
"""
Once everything is moved over, this could be in util.registry and not even touch the database.
"""
pass
class PreOCIModel(DockerRegistryV2DataInterface):
"""
PreOCIModel implements the data model for the v2 Docker Registry protocol using a database schema
before it was changed to support the OCI specification.
"""
def create_repository(self, namespace_name, repo_name, creating_user=None):
return model.repository.create_repository(namespace_name, repo_name, creating_user)
@ -299,14 +53,10 @@ class PreOCIModel(DockerRegistryV2DataInterface):
def delete_manifest_by_digest(self, namespace_name, repo_name, digest):
def _tag_view(tag):
return Tag(
name=tag.name,
repository=RepositoryReference(
id=tag.repository_id,
name=repo_name,
namespace_name=namespace_name,
)
)
return Tag(name=tag.name, repository=RepositoryReference(
id=tag.repository_id,
name=repo_name,
namespace_name=namespace_name,))
tags = model.tag.delete_manifest_by_digest(namespace_name, repo_name, digest)
return [_tag_view(tag) for tag in tags]
@ -324,8 +74,9 @@ class PreOCIModel(DockerRegistryV2DataInterface):
return {}
images_query = model.image.lookup_repository_images(repo, docker_image_ids)
return {image.docker_image_id: _docker_v1_metadata(namespace_name, repo_name, image)
for image in images_query}
return {
image.docker_image_id: _docker_v1_metadata(namespace_name, repo_name, image)
for image in images_query}
def get_parents_docker_v1_metadata(self, namespace_name, repo_name, docker_image_id):
repo_image = model.image.get_repo_image(namespace_name, repo_name, docker_image_id)
@ -367,21 +118,16 @@ class PreOCIModel(DockerRegistryV2DataInterface):
def save_manifest(self, namespace_name, repo_name, tag_name, leaf_layer_docker_id,
manifest_digest, manifest_bytes):
(_, newly_created) = model.tag.store_tag_manifest(namespace_name, repo_name, tag_name,
leaf_layer_docker_id, manifest_digest,
manifest_bytes)
(_, newly_created) = model.tag.store_tag_manifest(
namespace_name, repo_name, tag_name, leaf_layer_docker_id, manifest_digest, manifest_bytes)
return newly_created
def repository_tags(self, namespace_name, repo_name, limit, offset):
def _tag_view(tag):
return Tag(
name=tag.name,
repository=RepositoryReference(
id=tag.repository_id,
name=repo_name,
namespace_name=namespace_name,
)
)
return Tag(name=tag.name, repository=RepositoryReference(
id=tag.repository_id,
name=repo_name,
namespace_name=namespace_name,))
tags_query = model.tag.list_repository_tags(namespace_name, repo_name)
tags_query = tags_query.limit(limit).offset(offset)
@ -396,7 +142,8 @@ class PreOCIModel(DockerRegistryV2DataInterface):
query = query.limit(limit).offset(offset)
return [_repository_for_repo(repo) for repo in query]
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name, storage_metadata):
def create_blob_upload(self, namespace_name, repo_name, upload_uuid, location_name,
storage_metadata):
try:
model.blob.initiate_upload(namespace_name, repo_name, upload_uuid, location_name,
storage_metadata)
@ -421,8 +168,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
piece_sha_state=found.piece_sha_state,
piece_hashes=found.piece_hashes,
location_name=found.location.name,
storage_metadata=found.storage_metadata,
)
storage_metadata=found.storage_metadata,)
def update_blob_upload(self, blob_upload):
# Lookup the blob upload object.
@ -451,17 +197,14 @@ class PreOCIModel(DockerRegistryV2DataInterface):
def create_blob_and_temp_tag(self, namespace_name, repo_name, blob_digest, blob_upload,
expiration_sec):
location_obj = model.storage.get_image_location_for_name(blob_upload.location_name)
blob_record = model.blob.store_blob_record_and_temp_link(namespace_name, repo_name,
blob_digest, location_obj.id,
blob_upload.byte_count,
expiration_sec,
blob_upload.uncompressed_byte_count)
blob_record = model.blob.store_blob_record_and_temp_link(
namespace_name, repo_name, blob_digest, location_obj.id, blob_upload.byte_count,
expiration_sec, blob_upload.uncompressed_byte_count)
return Blob(
uuid=blob_record.uuid,
digest=blob_digest,
size=blob_upload.byte_count,
locations=[blob_upload.location_name],
)
locations=[blob_upload.location_name],)
def lookup_blobs_by_digest(self, namespace_name, repo_name, digests):
def _blob_view(blob_record):
@ -469,7 +212,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
uuid=blob_record.uuid,
digest=blob_record.content_checksum,
size=blob_record.image_size,
locations=None, # Note: Locations is None in this case.
locations=None, # Note: Locations is None in this case.
)
repo = model.repository.get_repository(namespace_name, repo_name)
@ -485,8 +228,7 @@ class PreOCIModel(DockerRegistryV2DataInterface):
uuid=blob_record.uuid,
digest=digest,
size=blob_record.image_size,
locations=blob_record.locations,
)
locations=blob_record.locations,)
except model.BlobDoesNotExist:
return None
@ -527,8 +269,7 @@ def _docker_v1_metadata(namespace_name, repo_name, repo_image):
comment=repo_image.comment,
command=repo_image.command,
# TODO: make sure this isn't needed anywhere, as it is expensive to lookup
parent_image_id=None,
)
parent_image_id=None,)
def _repository_for_repo(repo):
@ -540,8 +281,7 @@ def _repository_for_repo(repo):
description=repo.description,
is_public=model.repository.is_repository_public(repo),
kind=model.repository.get_repo_kind_name(repo),
trust_enabled=repo.trust_enabled,
)
trust_enabled=repo.trust_enabled,)
pre_oci_model = PreOCIModel()
data_model = PreOCIModel()

View file

@ -2,9 +2,10 @@ from flask import jsonify
from auth.registry_jwt_auth import process_registry_jwt_auth
from endpoints.common import parse_repository_name
from endpoints.v2 import v2_bp, require_repo_read, paginate
from endpoints.decorators import anon_protect
from data.interfaces.v2 import pre_oci_model as model
from endpoints.v2 import v2_bp, require_repo_read, paginate
from endpoints.v2.models_pre_oci import data_model as model
@v2_bp.route('/<repopath:repository>/tags/list', methods=['GET'])
@parse_repository_name()
@ -16,8 +17,7 @@ def list_all_tags(namespace_name, repo_name, limit, offset, pagination_callback)
tags = model.repository_tags(namespace_name, repo_name, limit, offset)
response = jsonify({
'name': '{0}/{1}'.format(namespace_name, repo_name),
'tags': [tag.name for tag in tags],
})
'tags': [tag.name for tag in tags],})
pagination_callback(len(tags), response)
return response

View file

@ -13,18 +13,18 @@ from auth.permissions import (ModifyRepositoryPermission, ReadRepositoryPermissi
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp
from endpoints.v2.errors import InvalidLogin, NameInvalid, InvalidRequest, Unsupported, Unauthorized
from data.interfaces.v2 import pre_oci_model as model
from endpoints.v2.models_pre_oci import data_model as model
from util.cache import no_cache
from util.names import parse_namespace_repository, REPOSITORY_NAME_REGEX
from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject, QUAY_TUF_ROOT,
SIGNER_TUF_ROOT, DISABLED_TUF_ROOT)
from util.security.registry_jwt import (generate_bearer_token, build_context_and_subject,
QUAY_TUF_ROOT, SIGNER_TUF_ROOT, DISABLED_TUF_ROOT)
logger = logging.getLogger(__name__)
TOKEN_VALIDITY_LIFETIME_S = 60 * 60 # 1 hour
SCOPE_REGEX_TEMPLATE = r'^repository:((?:{}\/)?((?:[\.a-zA-Z0-9_\-]+\/)*[\.a-zA-Z0-9_\-]+)):((?:push|pull|\*)(?:,(?:push|pull|\*))*)$'
@lru_cache(maxsize=1)
def get_scope_regex():
hostname = re.escape(app.config['SERVER_HOSTNAME'])
@ -64,8 +64,7 @@ def generate_registry_jwt(auth_result):
access = []
user_event_data = {
'action': 'login',
}
'action': 'login',}
tuf_root = DISABLED_TUF_ROOT
if len(scope_param) > 0:
@ -101,8 +100,8 @@ def generate_registry_jwt(auth_result):
repo_is_public = repo is not None and repo.is_public
invalid_repo_message = ''
if repo is not None and repo.kind != 'image':
invalid_repo_message = (('This repository is for managing %s resources ' +
'and not container images.') % repo.kind)
invalid_repo_message = ((
'This repository is for managing %s resources ' + 'and not container images.') % repo.kind)
if 'push' in actions:
# If there is no valid user or token, then the repository cannot be
@ -150,8 +149,7 @@ def generate_registry_jwt(auth_result):
access.append({
'type': 'repository',
'name': registry_and_repo,
'actions': final_actions,
})
'actions': final_actions,})
# Set the user event data for the auth.
if 'push' in final_actions:
@ -164,8 +162,7 @@ def generate_registry_jwt(auth_result):
user_event_data = {
'action': user_action,
'repository': reponame,
'namespace': namespace,
}
'namespace': namespace,}
tuf_root = get_tuf_root(repo, namespace, reponame)
elif user is None and token is None:
@ -179,7 +176,8 @@ def generate_registry_jwt(auth_result):
event.publish_event_data('docker-cli', user_event_data)
# Build the signed JWT.
context, subject = build_context_and_subject(user=user, token=token, oauthtoken=oauthtoken, tuf_root=tuf_root)
context, subject = build_context_and_subject(user=user, token=token, oauthtoken=oauthtoken,
tuf_root=tuf_root)
token = generate_bearer_token(audience_param, subject, context, access,
TOKEN_VALIDITY_LIFETIME_S, instance_keys)
return jsonify({'token': token})
@ -188,7 +186,7 @@ def generate_registry_jwt(auth_result):
def get_tuf_root(repo, namespace, reponame):
if not features.SIGNING or repo is None or not repo.trust_enabled:
return DISABLED_TUF_ROOT
# Users with write access to a repo will see signer-rooted TUF metadata
if ModifyRepositoryPermission(namespace, reponame).can():
return SIGNER_TUF_ROOT