use kwargs for parse_repository_name

This commit is contained in:
Jimmy Zelinskie 2016-03-09 16:20:28 -05:00
parent 3b52a255b2
commit bb46cc933d
15 changed files with 285 additions and 270 deletions

View file

@ -41,13 +41,14 @@ def handle_registry_v2_exception(error):
def _require_repo_permission(permission_class, allow_public=False):
def wrapper(func):
@wraps(func)
def wrapped(namespace, repo_name, *args, **kwargs):
logger.debug('Checking permission %s for repo: %s/%s', permission_class, namespace, repo_name)
permission = permission_class(namespace, repo_name)
def wrapped(namespace_name, repo_name, *args, **kwargs):
logger.debug('Checking permission %s for repo: %s/%s', permission_class,
namespace_name, repo_name)
permission = permission_class(namespace_name, repo_name)
if (permission.can() or
(allow_public and
model.repository.repository_is_public(namespace, repo_name))):
return func(namespace, repo_name, *args, **kwargs)
model.repository.repository_is_public(namespace_name, repo_name))):
return func(namespace_name, repo_name, *args, **kwargs)
raise Unauthorized()
return wrapped
return wrapper

View file

@ -9,6 +9,7 @@ from app import storage, app
from auth.registry_jwt_auth import process_registry_jwt_auth
from data import model, database
from digest import digest_tools
from endpoints.common import parse_repository_name
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
from endpoints.v2.errors import (BlobUnknown, BlobUploadInvalid, BlobUploadUnknown, Unsupported,
NameUnknown)
@ -17,7 +18,6 @@ from util.cache import cache_control
from util.registry.filelike import wrap_with_handler, StreamSlice
from util.registry.gzipstream import calculate_size_handler
from util.registry.torrent import PieceHasher
from endpoints.common import parse_repository_name
from storage.basestorage import InvalidChunkException
@ -34,12 +34,12 @@ class _InvalidRangeHeader(Exception):
pass
def _base_blob_fetch(namespace, repo_name, digest):
def _base_blob_fetch(namespace_name, repo_name, digest):
""" Some work that is common to both GET and HEAD requests. Callers MUST check for proper
authorization before calling this method.
"""
try:
found = model.blob.get_repo_blob_by_digest(namespace, repo_name, digest)
found = model.blob.get_repo_blob_by_digest(namespace_name, repo_name, digest)
except model.BlobDoesNotExist:
raise BlobUnknown()
@ -58,12 +58,12 @@ def _base_blob_fetch(namespace, repo_name, digest):
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['HEAD'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_read
@anon_protect
@cache_control(max_age=31436000)
def check_blob_exists(namespace, repo_name, digest):
found, headers = _base_blob_fetch(namespace, repo_name, digest)
def check_blob_exists(namespace_name, repo_name, digest):
found, headers = _base_blob_fetch(namespace_name, repo_name, digest)
response = make_response('')
response.headers.extend(headers)
@ -74,12 +74,12 @@ def check_blob_exists(namespace, repo_name, digest):
@v2_bp.route(BLOB_DIGEST_ROUTE, methods=['GET'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_read
@anon_protect
@cache_control(max_age=31536000)
def download_blob(namespace, repo_name, digest):
found, headers = _base_blob_fetch(namespace, repo_name, digest)
def download_blob(namespace_name, repo_name, digest):
found, headers = _base_blob_fetch(namespace_name, repo_name, digest)
path = model.storage.get_layer_path(found)
logger.debug('Looking up the direct download URL for path: %s', path)
@ -108,15 +108,15 @@ def _render_range(num_uploaded_bytes, with_bytes_prefix=True):
@v2_bp.route('/<repopath:repository>/blobs/uploads/', methods=['POST'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
def start_blob_upload(namespace, repo_name):
def start_blob_upload(namespace_name, repo_name):
location_name = storage.preferred_locations[0]
new_upload_uuid, upload_metadata = storage.initiate_chunked_upload(location_name)
try:
model.blob.initiate_upload(namespace, repo_name, new_upload_uuid, location_name,
model.blob.initiate_upload(namespace_name, repo_name, new_upload_uuid, location_name,
upload_metadata)
except database.Repository.DoesNotExist:
raise NameUnknown()
@ -126,7 +126,7 @@ def start_blob_upload(namespace, repo_name):
# The user will send the blob data in another request
accepted = make_response('', 202)
accepted.headers['Location'] = url_for('v2.upload_chunk',
repository='%s/%s' % (namespace, repo_name),
repository='%s/%s' % (namespace_name, repo_name),
upload_uuid=new_upload_uuid)
accepted.headers['Range'] = _render_range(0)
@ -134,22 +134,22 @@ def start_blob_upload(namespace, repo_name):
return accepted
else:
# The user plans to send us the entire body right now
uploaded, error = _upload_chunk(namespace, repo_name, new_upload_uuid)
uploaded, error = _upload_chunk(namespace_name, repo_name, new_upload_uuid)
uploaded.save()
if error:
_range_not_satisfiable(uploaded.byte_count)
return _finish_upload(namespace, repo_name, uploaded, digest)
return _finish_upload(namespace_name, repo_name, uploaded, digest)
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['GET'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
def fetch_existing_upload(namespace, repo_name, upload_uuid):
def fetch_existing_upload(namespace_name, repo_name, upload_uuid):
try:
found = model.blob.get_blob_upload(namespace, repo_name, upload_uuid)
found = model.blob.get_blob_upload(namespace_name, repo_name, upload_uuid)
except model.InvalidBlobUpload:
raise BlobUploadUnknown()
@ -189,12 +189,12 @@ def _parse_range_header(range_header_text):
return (start, length)
def _upload_chunk(namespace, repo_name, upload_uuid):
def _upload_chunk(namespace_name, repo_name, upload_uuid):
""" Common code among the various uploading paths for appending data to blobs.
Callers MUST call .save() or .delete_instance() on the returned database object.
"""
try:
found = model.blob.get_blob_upload(namespace, repo_name, upload_uuid)
found = model.blob.get_blob_upload(namespace_name, repo_name, upload_uuid)
except model.InvalidBlobUpload:
raise BlobUploadUnknown()
@ -280,7 +280,7 @@ def _upload_chunk(namespace, repo_name, upload_uuid):
return found, error
def _finish_upload(namespace, repo_name, upload_obj, expected_digest):
def _finish_upload(namespace_name, repo_name, upload_obj, expected_digest):
# Verify that the digest's SHA matches that of the uploaded data.
computed_digest = digest_tools.sha256_digest_from_hashlib(upload_obj.sha_state)
if not digest_tools.digests_equal(computed_digest, expected_digest):
@ -303,7 +303,7 @@ def _finish_upload(namespace, repo_name, upload_obj, expected_digest):
final_blob_location, upload_obj.storage_metadata)
# Mark the blob as uploaded.
blob_storage = model.blob.store_blob_record_and_temp_link(namespace, repo_name, expected_digest,
blob_storage = model.blob.store_blob_record_and_temp_link(namespace_name, repo_name, expected_digest,
upload_obj.location,
upload_obj.byte_count,
app.config['PUSH_TEMP_TAG_EXPIRATION_SEC'],
@ -319,18 +319,18 @@ def _finish_upload(namespace, repo_name, upload_obj, expected_digest):
response = make_response('', 201)
response.headers['Docker-Content-Digest'] = expected_digest
response.headers['Location'] = url_for('v2.download_blob',
repository='%s/%s' % (namespace, repo_name),
repository='%s/%s' % (namespace_name, repo_name),
digest=expected_digest)
return response
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PATCH'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
def upload_chunk(namespace, repo_name, upload_uuid):
upload, error = _upload_chunk(namespace, repo_name, upload_uuid)
def upload_chunk(namespace_name, repo_name, upload_uuid):
upload, error = _upload_chunk(namespace_name, repo_name, upload_uuid)
upload.save()
if error:
@ -345,31 +345,31 @@ def upload_chunk(namespace, repo_name, upload_uuid):
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['PUT'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
def monolithic_upload_or_last_chunk(namespace, repo_name, upload_uuid):
def monolithic_upload_or_last_chunk(namespace_name, repo_name, upload_uuid):
digest = request.args.get('digest', None)
if digest is None:
raise BlobUploadInvalid()
found, error = _upload_chunk(namespace, repo_name, upload_uuid)
found, error = _upload_chunk(namespace_name, repo_name, upload_uuid)
if error:
found.save()
_range_not_satisfiable(found.byte_count)
return _finish_upload(namespace, repo_name, found, digest)
return _finish_upload(namespace_name, repo_name, found, digest)
@v2_bp.route('/<repopath:repository>/blobs/uploads/<upload_uuid>', methods=['DELETE'])
@parse_repository_name()
@process_registry_jwt_auth
@parse_repository_name
@require_repo_write
@anon_protect
def cancel_upload(namespace, repo_name, upload_uuid):
def cancel_upload(namespace_name, repo_name, upload_uuid):
try:
found = model.blob.get_blob_upload(namespace, repo_name, upload_uuid)
found = model.blob.get_blob_upload(namespace_name, repo_name, upload_uuid)
except model.InvalidBlobUpload:
raise BlobUploadUnknown()
@ -384,11 +384,9 @@ def cancel_upload(namespace, repo_name, upload_uuid):
@v2_bp.route('/<repopath:repository>/blobs/<digest>', methods=['DELETE'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
def delete_digest(namespace, repo_name, upload_uuid):
def delete_digest(namespace_name, repo_name, upload_uuid):
# We do not support deleting arbitrary digests, as they break repo images.
raise Unsupported()

View file

@ -1,18 +1,22 @@
import logging
import jwt.utils
import json
import features
import hashlib
from peewee import IntegrityError
from flask import make_response, request, url_for
from collections import namedtuple, OrderedDict
from jwkest.jws import SIGNER_ALGS, keyrep
from datetime import datetime
from functools import wraps
import jwt.utils
from peewee import IntegrityError
from flask import make_response, request, url_for
from jwkest.jws import SIGNER_ALGS, keyrep
import features
from app import docker_v2_signing_key, app
from auth.registry_jwt_auth import process_registry_jwt_auth
from endpoints.common import parse_repository_name
from endpoints.decorators import anon_protect
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write
from endpoints.v2.errors import (BlobUnknown, ManifestInvalid, ManifestUnknown, TagInvalid,
@ -22,7 +26,6 @@ from endpoints.notificationhelper import spawn_notification
from digest import digest_tools
from data import model
from data.database import RepositoryTag
from endpoints.common import parse_repository_name
logger = logging.getLogger(__name__)
@ -84,7 +87,8 @@ class SignedManifest(object):
def _validate(self):
for signature in self._signatures:
bytes_to_verify = '{0}.{1}'.format(signature['protected'], jwt.utils.base64url_encode(self.payload))
bytes_to_verify = '{0}.{1}'.format(signature['protected'],
jwt.utils.base64url_encode(self.payload))
signer = SIGNER_ALGS[signature['header']['alg']]
key = keyrep(signature['header']['jwk'])
gk = key.get_key()
@ -163,9 +167,9 @@ class SignedManifest(object):
class SignedManifestBuilder(object):
""" Class which represents a manifest which is currently being built.
"""
def __init__(self, namespace, repo_name, tag, architecture='amd64', schema_ver=1):
repo_name_key = '{0}/{1}'.format(namespace, repo_name)
if namespace == '':
def __init__(self, namespace_name, repo_name, tag, architecture='amd64', schema_ver=1):
repo_name_key = '{0}/{1}'.format(namespace_name, repo_name)
if namespace_name == '':
repo_name_key = repo_name
self._base_payload = {
@ -238,26 +242,26 @@ class SignedManifestBuilder(object):
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['GET'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_read
@anon_protect
def fetch_manifest_by_tagname(namespace, repo_name, manifest_ref):
def fetch_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
try:
manifest = model.tag.load_tag_manifest(namespace, repo_name, manifest_ref)
manifest = model.tag.load_tag_manifest(namespace_name, repo_name, manifest_ref)
except model.InvalidManifestException:
try:
model.tag.get_active_tag(namespace, repo_name, manifest_ref)
model.tag.get_active_tag(namespace_name, repo_name, manifest_ref)
except RepositoryTag.DoesNotExist:
raise ManifestUnknown()
raise ManifestUnknown()
try:
manifest = _generate_and_store_manifest(namespace, repo_name, manifest_ref)
manifest = _generate_and_store_manifest(namespace_name, repo_name, manifest_ref)
except model.DataModelException:
logger.exception('Exception when generating manifest for %s/%s:%s', namespace, repo_name,
logger.exception('Exception when generating manifest for %s/%s:%s', namespace_name, repo_name,
manifest_ref)
raise ManifestUnknown()
repo = model.repository.get_repository(namespace, repo_name)
repo = model.repository.get_repository(namespace_name, repo_name)
if repo is not None:
track_and_log('pull_repo', repo, analytics_name='pull_repo_100x', analytics_sample=0.01)
@ -269,17 +273,17 @@ def fetch_manifest_by_tagname(namespace, repo_name, manifest_ref):
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['GET'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_read
@anon_protect
def fetch_manifest_by_digest(namespace, repo_name, manifest_ref):
def fetch_manifest_by_digest(namespace_name, repo_name, manifest_ref):
try:
manifest = model.tag.load_manifest_by_digest(namespace, repo_name, manifest_ref)
manifest = model.tag.load_manifest_by_digest(namespace_name, repo_name, manifest_ref)
except model.InvalidManifestException:
# Without a tag name to reference, we can't make an attempt to generate the manifest
raise ManifestUnknown()
repo = model.repository.get_repository(namespace, repo_name)
repo = model.repository.get_repository(namespace_name, repo_name)
if repo is not None:
track_and_log('pull_repo', repo)
@ -301,11 +305,11 @@ def _reject_manifest2_schema2(func):
@v2_bp.route(MANIFEST_TAGNAME_ROUTE, methods=['PUT'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
@_reject_manifest2_schema2
def write_manifest_by_tagname(namespace, repo_name, manifest_ref):
def write_manifest_by_tagname(namespace_name, repo_name, manifest_ref):
try:
manifest = SignedManifest(request.data)
except ValueError:
@ -314,16 +318,16 @@ def write_manifest_by_tagname(namespace, repo_name, manifest_ref):
if manifest.tag != manifest_ref:
raise TagInvalid()
return _write_manifest(namespace, repo_name, manifest)
return _write_manifest(namespace_name, repo_name, manifest)
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['PUT'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
@_reject_manifest2_schema2
def write_manifest_by_digest(namespace, repo_name, manifest_ref):
def write_manifest_by_digest(namespace_name, repo_name, manifest_ref):
try:
manifest = SignedManifest(request.data)
except ValueError:
@ -332,7 +336,7 @@ def write_manifest_by_digest(namespace, repo_name, manifest_ref):
if manifest.digest != manifest_ref:
raise ManifestInvalid(detail={'message': 'manifest digest mismatch'})
return _write_manifest(namespace, repo_name, manifest)
return _write_manifest(namespace_name, repo_name, manifest)
def _updated_v1_metadata(v1_metadata_json, updated_id_map):
@ -350,21 +354,21 @@ def _updated_v1_metadata(v1_metadata_json, updated_id_map):
return json.dumps(parsed)
def _write_manifest(namespace, repo_name, manifest):
def _write_manifest(namespace_name, repo_name, manifest):
# Ensure that the manifest is for this repository. If the manifest's namespace is empty, then
# it is for the library namespace and we need an extra check.
if (manifest.namespace == '' and features.LIBRARY_SUPPORT and
namespace == app.config['LIBRARY_NAMESPACE']):
namespace_name == app.config['LIBRARY_NAMESPACE']):
# This is a library manifest. All good.
pass
elif manifest.namespace != namespace:
elif manifest.namespace != namespace_name:
raise NameInvalid()
if manifest.repo_name != repo_name:
raise NameInvalid()
# Ensure that the repository exists.
repo = model.repository.get_repository(namespace, repo_name)
repo = model.repository.get_repository(namespace_name, repo_name)
if repo is None:
raise NameInvalid()
@ -411,8 +415,8 @@ def _write_manifest(namespace, repo_name, manifest):
v1_metadata_str = mdata.v1_metadata_str.encode('utf-8')
working_docker_id = hashlib.sha256(v1_metadata_str + '@' + digest_str).hexdigest()
logger.debug('Rewriting docker_id %s/%s %s -> %s', namespace, repo_name, v1_mdata.docker_id,
working_docker_id)
logger.debug('Rewriting docker_id %s/%s %s -> %s', namespace_name, repo_name,
v1_mdata.docker_id, working_docker_id)
has_rewritten_ids = True
# Store the new docker id in the map
@ -447,7 +451,7 @@ def _write_manifest(namespace, repo_name, manifest):
# Store the manifest pointing to the tag.
manifest_digest = manifest.digest
leaf_layer_id = images_map[layers[-1].v1_metadata.docker_id].docker_image_id
model.tag.store_tag_manifest(namespace, repo_name, tag_name, leaf_layer_id, manifest_digest,
model.tag.store_tag_manifest(namespace_name, repo_name, tag_name, leaf_layer_id, manifest_digest,
manifest.bytes)
# Spawn the repo_push event.
@ -461,29 +465,29 @@ def _write_manifest(namespace, repo_name, manifest):
response = make_response('OK', 202)
response.headers['Docker-Content-Digest'] = manifest_digest
response.headers['Location'] = url_for('v2.fetch_manifest_by_digest',
repository='%s/%s' % (namespace, repo_name),
repository='%s/%s' % (namespace_name, repo_name),
manifest_ref=manifest_digest)
return response
@v2_bp.route(MANIFEST_DIGEST_ROUTE, methods=['DELETE'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_write
@anon_protect
def delete_manifest_by_digest(namespace, repo_name, manifest_ref):
def delete_manifest_by_digest(namespace_name, repo_name, manifest_ref):
""" Delete the manifest specified by the digest. Note: there is no equivalent
method for deleting by tag name because it is forbidden by the spec.
"""
try:
manifest = model.tag.load_manifest_by_digest(namespace, repo_name, manifest_ref)
manifest = model.tag.load_manifest_by_digest(namespace_name, repo_name, manifest_ref)
except model.InvalidManifestException:
# Without a tag name to reference, we can't make an attempt to generate the manifest
raise ManifestUnknown()
# Mark the tag as no longer alive.
try:
model.tag.delete_tag(namespace, repo_name, manifest.tag.name)
model.tag.delete_tag(namespace_name, repo_name, manifest.tag.name)
except model.DataModelException:
# Tag is not alive.
raise ManifestUnknown()
@ -494,15 +498,15 @@ def delete_manifest_by_digest(namespace, repo_name, manifest_ref):
return make_response('', 202)
def _generate_and_store_manifest(namespace, repo_name, tag_name):
def _generate_and_store_manifest(namespace_name, repo_name, tag_name):
# First look up the tag object and its ancestors
image = model.tag.get_tag_image(namespace, repo_name, tag_name)
parents = model.image.get_parent_images(namespace, repo_name, image)
image = model.tag.get_tag_image(namespace_name, repo_name, tag_name)
parents = model.image.get_parent_images(namespace_name, repo_name, image)
# If the manifest is being generated under the library namespace, then we make its namespace
# empty.
manifest_namespace = namespace
if features.LIBRARY_SUPPORT and namespace == app.config['LIBRARY_NAMESPACE']:
manifest_namespace = namespace_name
if features.LIBRARY_SUPPORT and namespace_name == app.config['LIBRARY_NAMESPACE']:
manifest_namespace = ''
# Create and populate the manifest builder
@ -520,13 +524,12 @@ def _generate_and_store_manifest(namespace, repo_name, tag_name):
# Write the manifest to the DB. If an existing manifest already exists, return the
# one found.
try:
return model.tag.associate_generated_tag_manifest(namespace, repo_name, tag_name,
return model.tag.associate_generated_tag_manifest(namespace_name, repo_name, tag_name,
manifest.digest, manifest.bytes)
except IntegrityError as ie:
logger.debug('Got integrity error: %s', ie)
try:
return model.tag.load_tag_manifest(namespace, repo_name, tag_name)
return model.tag.load_tag_manifest(namespace_name, repo_name, tag_name)
except model.InvalidManifestException:
logger.exception('Exception when generating manifest')
raise model.DataModelException('Could not load or generate manifest')

View file

@ -1,29 +1,29 @@
from flask import jsonify, url_for
from auth.registry_jwt_auth import process_registry_jwt_auth
from endpoints.common import parse_repository_name
from endpoints.v2 import v2_bp, require_repo_read
from endpoints.v2.errors import NameUnknown
from endpoints.v2.v2util import add_pagination
from endpoints.decorators import anon_protect
from data import model
from endpoints.common import parse_repository_name
@v2_bp.route('/<repopath:repository>/tags/list', methods=['GET'])
@process_registry_jwt_auth
@parse_repository_name
@parse_repository_name()
@require_repo_read
@anon_protect
def list_all_tags(namespace, repo_name):
repository = model.repository.get_repository(namespace, repo_name)
def list_all_tags(namespace_name, repo_name):
repository = model.repository.get_repository(namespace_name, repo_name)
if repository is None:
raise NameUnknown()
query = model.tag.list_repository_tags(namespace, repo_name)
url = url_for('v2.list_all_tags', repository='%s/%s' % (namespace, repo_name))
query = model.tag.list_repository_tags(namespace_name, repo_name)
url = url_for('v2.list_all_tags', repository='%s/%s' % (namespace_name, repo_name))
link, query = add_pagination(query, url)
response = jsonify({
'name': '{0}/{1}'.format(namespace, repo_name),
'name': '{0}/{1}'.format(namespace_name, repo_name),
'tags': [tag.name for tag in query],
})