More changes for registry-v2 in python.
Implement the minimal changes to the local filesystem storage driver and feed them through the distributed storage driver. Create a digest package which contains digest_tools and checksums. Fix the tests to use the new v1 endpoint locations. Fix repository.delete_instance to properly filter the generated queries to avoid most subquery deletes, but still generate them when not explicitly filtered.
This commit is contained in:
parent
acbcc2e206
commit
bea8b9ac53
23 changed files with 397 additions and 179 deletions
|
@ -10,7 +10,8 @@ from time import time
|
|||
from app import storage as store, image_diff_queue, app
|
||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||
from auth.auth_context import get_authenticated_user, get_grant_user_context
|
||||
from util import checksums, changes
|
||||
from digest import checksums
|
||||
from util import changes
|
||||
from util.http import abort, exact_abort
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
|
|
|
@ -59,4 +59,4 @@ def v2_support_enabled():
|
|||
|
||||
from endpoints.v2 import v2auth
|
||||
from endpoints.v2 import manifest
|
||||
from endpoints.v2 import blobs
|
||||
from endpoints.v2 import blob
|
||||
|
|
77
endpoints/v2/blob.py
Normal file
77
endpoints/v2/blob.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import logging
|
||||
|
||||
from flask import make_response, url_for, request
|
||||
|
||||
import data.model.blob
|
||||
|
||||
from app import storage
|
||||
from digest import digest_tools
|
||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
||||
from auth.jwt_auth import process_jwt_auth
|
||||
from endpoints.decorators import anon_protect
|
||||
from util.http import abort
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/blobs/<regex("' + digest_tools.DIGEST_PATTERN + '"):digest>',
|
||||
methods=['HEAD'])
|
||||
@process_jwt_auth
|
||||
@require_repo_read
|
||||
@anon_protect
|
||||
def check_blob_existence(namespace, repo_name, digest):
|
||||
try:
|
||||
found = data.model.blob.get_blob_by_digest(digest)
|
||||
|
||||
# The response body must be empty for a successful HEAD request
|
||||
return make_response('')
|
||||
except data.model.blob.BlobDoesNotExist:
|
||||
abort(404)
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/blobs/<regex("' + digest_tools.DIGEST_PATTERN + '"):digest>',
|
||||
methods=['GET'])
|
||||
@process_jwt_auth
|
||||
@require_repo_read
|
||||
@anon_protect
|
||||
def download_blob(namespace, repo_name, digest):
|
||||
return make_response('')
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/blobs/uploads/', methods=['POST'])
|
||||
@process_jwt_auth
|
||||
@require_repo_write
|
||||
@anon_protect
|
||||
def start_blob_upload(namespace, repo_name):
|
||||
new_upload_uuid = storage.initiate_chunked_upload(storage.preferred_locations[0])
|
||||
accepted = make_response('', 202)
|
||||
accepted.headers['Location'] = url_for('v2.upload_chunk', namespace=namespace,
|
||||
repo_name=repo_name, upload_uuid=new_upload_uuid)
|
||||
accepted.headers['Range'] = 'bytes=0-0'
|
||||
accepted.headers['Docker-Upload-UUID'] = new_upload_uuid
|
||||
return accepted
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/blobs/uploads/<upload_uuid>', methods=['PUT'])
|
||||
@process_jwt_auth
|
||||
@require_repo_write
|
||||
@anon_protect
|
||||
def upload_chunk(namespace, repo_name, upload_uuid):
|
||||
digest = request.args.get('digest', None)
|
||||
upload_location = storage.preferred_locations[0]
|
||||
bytes_written = storage.stream_upload_chunk(upload_location, upload_uuid, 0, -1,
|
||||
get_input_stream(request))
|
||||
|
||||
if digest is not None:
|
||||
final_blob_location = digest_tools.content_path(digest)
|
||||
storage.complete_chunked_upload(upload_location, upload_uuid, final_blob_location, digest)
|
||||
data.model.blob.store_blob_record(digest, upload_location)
|
||||
|
||||
response = make_response('', 201)
|
||||
response.headers['Docker-Content-Digest'] = digest
|
||||
response.headers['Location'] = url_for('v2.download_blob', namespace=namespace,
|
||||
repo_name=repo_name, digest=digest)
|
||||
return response
|
||||
|
||||
return make_response('', 202)
|
|
@ -1,20 +0,0 @@
|
|||
import logging
|
||||
|
||||
from flask import make_response
|
||||
|
||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, require_repo_admin
|
||||
from auth.jwt_auth import process_jwt_auth
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from endpoints.decorators import anon_protect
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/blobs/<tarsum>', methods=['HEAD'])
|
||||
@process_jwt_auth
|
||||
@require_repo_read
|
||||
@anon_protect
|
||||
def check_blob_existence(namespace, repo_name, tarsum):
|
||||
logger.debug('Fetching blob with tarsum: %s', tarsum)
|
||||
return make_response('Blob {0}'.format(tarsum))
|
|
@ -1,46 +0,0 @@
|
|||
import re
|
||||
import os.path
|
||||
import hashlib
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
|
||||
Digest = namedtuple('Digest', ['is_tarsum', 'tarsum_version', 'hash_alg', 'hash_bytes'])
|
||||
|
||||
|
||||
DIGEST_PATTERN = r'(tarsum\.(v[\w]+)\+)?([\w]+):([0-9a-f]+)'
|
||||
DIGEST_REGEX = re.compile(DIGEST_PATTERN)
|
||||
|
||||
|
||||
class InvalidDigestException(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
def parse_digest(digest):
|
||||
""" Returns the digest parsed out to its components. """
|
||||
match = DIGEST_REGEX.match(digest)
|
||||
if match is None or match.end() != len(digest):
|
||||
raise InvalidDigestException('Not a valid digest: %s', digest)
|
||||
|
||||
is_tarsum = match.group(1) is not None
|
||||
return Digest(is_tarsum, match.group(2), match.group(3), match.group(4))
|
||||
|
||||
|
||||
def content_path(digest):
|
||||
""" Returns a relative path to the parsed digest. """
|
||||
parsed = parse_digest(digest)
|
||||
components = []
|
||||
|
||||
if parsed.is_tarsum:
|
||||
components.extend(['tarsum', parsed.tarsum_version])
|
||||
|
||||
prefix = parsed.hash_bytes[0:2].zfill(2)
|
||||
components.extend([parsed.hash_alg, prefix, parsed.hash_bytes])
|
||||
|
||||
return os.path.join(*components)
|
||||
|
||||
|
||||
def sha256_digest(content):
|
||||
""" Returns a sha256 hash of the content bytes in digest form. """
|
||||
digest = hashlib.sha256(content)
|
||||
return 'sha256:{0}'.format(digest.hexdigest())
|
|
@ -1,16 +1,15 @@
|
|||
import logging
|
||||
import re
|
||||
import hashlib
|
||||
import jwt.utils
|
||||
import yaml
|
||||
|
||||
from flask import make_response, request
|
||||
|
||||
from app import storage
|
||||
from auth.jwt_auth import process_jwt_auth
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from endpoints.decorators import anon_protect
|
||||
from endpoints.v2 import (v2_bp, require_repo_read, require_repo_write, require_repo_admin,
|
||||
get_input_stream)
|
||||
from endpoints.v2 import digest_tools
|
||||
from endpoints.v2 import v2_bp, require_repo_read, require_repo_write, get_input_stream
|
||||
from digest import digest_tools
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -20,9 +19,53 @@ VALID_TAG_PATTERN = r'[\w][\w.-]{0,127}'
|
|||
VALID_TAG_REGEX = re.compile(VALID_TAG_PATTERN)
|
||||
|
||||
|
||||
def is_tag_name(reference):
|
||||
match = VALID_TAG_REGEX.match(reference)
|
||||
return match is not None and match.end() == len(reference)
|
||||
class SignedManifest(object):
|
||||
SIGNATURES_KEY = 'signatures'
|
||||
PROTECTED_KEY = 'protected'
|
||||
FORMAT_LENGTH_KEY = 'formatLength'
|
||||
FORMAT_TAIL_KEY = 'formatTail'
|
||||
REPO_NAME_KEY = 'name'
|
||||
REPO_TAG_KEY = 'tag'
|
||||
|
||||
def __init__(self, manifest_bytes):
|
||||
self._bytes = manifest_bytes
|
||||
parsed = yaml.safe_load(manifest_bytes)
|
||||
|
||||
self._signatures = parsed[self.SIGNATURES_KEY]
|
||||
self._namespace, self._repo_name = parsed[self.REPO_NAME_KEY].split('/')
|
||||
self._tag = parsed[self.REPO_TAG_KEY]
|
||||
|
||||
self._validate()
|
||||
|
||||
def _validate(self):
|
||||
pass
|
||||
|
||||
@property
|
||||
def signatures(self):
|
||||
return self._signatures
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
return self._namespace
|
||||
|
||||
@property
|
||||
def repo_name(self):
|
||||
return self._repo_name
|
||||
|
||||
@property
|
||||
def tag(self):
|
||||
return self._tag
|
||||
|
||||
@property
|
||||
def payload(self):
|
||||
protected = self._signatures[0][self.PROTECTED_KEY]
|
||||
parsed_protected = yaml.safe_load(jwt.utils.base64url_decode(protected))
|
||||
logger.debug('parsed_protected: %s', parsed_protected)
|
||||
signed_content_head = self._bytes[:parsed_protected[self.FORMAT_LENGTH_KEY]]
|
||||
logger.debug('signed content head: %s', signed_content_head)
|
||||
signed_content_tail = jwt.utils.base64url_decode(parsed_protected[self.FORMAT_TAIL_KEY])
|
||||
logger.debug('signed content tail: %s', signed_content_tail)
|
||||
return signed_content_head + signed_content_tail
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/manifests/<regex("' + VALID_TAG_PATTERN + '"):tag_name>',
|
||||
|
@ -41,23 +84,24 @@ def fetch_manifest_by_tagname(namespace, repo_name, tag_name):
|
|||
@require_repo_write
|
||||
@anon_protect
|
||||
def write_manifest_by_tagname(namespace, repo_name, tag_name):
|
||||
manifest_data = request.data
|
||||
logger.debug('Manifest data: %s', manifest_data)
|
||||
manifest = SignedManifest(request.data)
|
||||
manifest_digest = digest_tools.sha256_digest(manifest.payload)
|
||||
|
||||
response = make_response('OK', 202)
|
||||
response.headers['Docker-Content-Digest'] = digest_tools.sha256_digest(manifest_data)
|
||||
response.headers['Docker-Content-Digest'] = manifest_digest
|
||||
response.headers['Location'] = 'https://fun.com'
|
||||
return response
|
||||
|
||||
|
||||
@v2_bp.route('/<namespace>/<repo_name>/manifests/<regex("' + digest_tools.DIGEST_PATTERN + '"):tag_digest>',
|
||||
methods=['PUT'])
|
||||
@process_jwt_auth
|
||||
@require_repo_write
|
||||
@anon_protect
|
||||
def write_manifest(namespace, repo_name, tag_digest):
|
||||
logger.debug('Writing tag manifest with name: %s', tag_digest)
|
||||
# @v2_bp.route('/<namespace>/<repo_name>/manifests/<regex("' + digest_tools.DIGEST_PATTERN + '"):tag_digest>',
|
||||
# methods=['PUT'])
|
||||
# @process_jwt_auth
|
||||
# @require_repo_write
|
||||
# @anon_protect
|
||||
# def write_manifest(namespace, repo_name, tag_digest):
|
||||
# logger.debug('Writing tag manifest with name: %s', tag_digest)
|
||||
|
||||
manifest_path = digest_tools.content_path(tag_digest)
|
||||
storage.stream_write('local_us', manifest_path, get_input_stream(request))
|
||||
# manifest_path = digest_tools.content_path(tag_digest)
|
||||
# storage.stream_write('local_us', manifest_path, get_input_stream(request))
|
||||
|
||||
return make_response('Manifest {0}'.format(tag_digest))
|
||||
# return make_response('Manifest {0}'.format(tag_digest))
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
import logging
|
||||
|
||||
from endpoints.v2 import v2_bp
|
||||
|
||||
|
||||
logging.getLogger(__name__)
|
||||
|
||||
|
||||
@v2_bp.route()
|
||||
|
||||
@process_auth
|
||||
@anon_protect
|
Reference in a new issue