endpoints.v2: yapf format
This commit is contained in:
parent
ecd5996386
commit
92e978fa0f
6 changed files with 56 additions and 70 deletions
|
@ -4,7 +4,6 @@ from app import metric_queue, license_validator
|
|||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
from util.metrics.metricqueue import time_blueprint
|
||||
|
||||
|
||||
v1_bp = Blueprint('v1', __name__)
|
||||
license_validator.enforce_license_before_request(v1_bp)
|
||||
time_blueprint(v1_bp, metric_queue)
|
||||
|
@ -31,5 +30,4 @@ def ping():
|
|||
from endpoints.v1 import (
|
||||
index,
|
||||
registry,
|
||||
tag,
|
||||
)
|
||||
tag,)
|
||||
|
|
|
@ -9,9 +9,9 @@ from flask import request, make_response, jsonify, session
|
|||
from app import authentication, userevents, metric_queue
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
from auth.decorators import process_auth
|
||||
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
||||
ReadRepositoryPermission, CreateRepositoryPermission,
|
||||
repository_read_grant, repository_write_grant)
|
||||
from auth.permissions import (
|
||||
ModifyRepositoryPermission, UserAdminPermission, ReadRepositoryPermission,
|
||||
CreateRepositoryPermission, repository_read_grant, repository_write_grant)
|
||||
from auth.signedgrant import generate_signed_token
|
||||
from endpoints.common import parse_repository_name
|
||||
from endpoints.decorators import anon_protect, anon_allowed
|
||||
|
@ -22,7 +22,6 @@ from util.audit import track_and_log
|
|||
from util.http import abort
|
||||
from util.names import REPOSITORY_NAME_REGEX
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -66,7 +65,9 @@ def generate_headers(scope=GrantType.READ_REPOSITORY, add_grant_for_status=None)
|
|||
response.headers['X-Docker-Token'] = signature
|
||||
|
||||
return response
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator_method
|
||||
|
||||
|
||||
|
@ -122,18 +123,15 @@ def get_user():
|
|||
if get_validated_oauth_token():
|
||||
return jsonify({
|
||||
'username': '$oauthtoken',
|
||||
'email': None,
|
||||
})
|
||||
'email': None,})
|
||||
elif get_authenticated_user():
|
||||
return jsonify({
|
||||
'username': get_authenticated_user().username,
|
||||
'email': get_authenticated_user().email,
|
||||
})
|
||||
'email': get_authenticated_user().email,})
|
||||
elif get_validated_token():
|
||||
return jsonify({
|
||||
'username': '$token',
|
||||
'email': None,
|
||||
})
|
||||
'email': None,})
|
||||
abort(404)
|
||||
|
||||
|
||||
|
@ -151,8 +149,7 @@ def update_user(username):
|
|||
|
||||
return jsonify({
|
||||
'username': get_authenticated_user().username,
|
||||
'email': get_authenticated_user().email
|
||||
})
|
||||
'email': get_authenticated_user().email})
|
||||
abort(403)
|
||||
|
||||
|
||||
|
@ -181,8 +178,7 @@ def create_repository(namespace_name, repo_name):
|
|||
if not modify_perm.can():
|
||||
abort(403,
|
||||
message='You do not have permission to modify repository %(namespace)s/%(repository)s',
|
||||
issue='no-repo-write-permission',
|
||||
namespace=namespace_name, repository=repo_name)
|
||||
issue='no-repo-write-permission', namespace=namespace_name, repository=repo_name)
|
||||
elif repo.kind != 'image':
|
||||
msg = 'This repository is for managing %s resources and not container images.' % repo.kind
|
||||
abort(405, message=msg, namespace=namespace_name)
|
||||
|
@ -205,8 +201,7 @@ def create_repository(namespace_name, repo_name):
|
|||
user_event_data = {
|
||||
'action': 'push_start',
|
||||
'repository': repo_name,
|
||||
'namespace': namespace_name,
|
||||
}
|
||||
'namespace': namespace_name,}
|
||||
|
||||
event = userevents.get_event(get_authenticated_user().username)
|
||||
event.publish_event_data('docker-cli', user_event_data)
|
||||
|
@ -237,8 +232,7 @@ def update_images(namespace_name, repo_name):
|
|||
|
||||
updated_tags = session.get('pushed_tags', {})
|
||||
event_data = {
|
||||
'updated_tags': updated_tags,
|
||||
}
|
||||
'updated_tags': updated_tags,}
|
||||
|
||||
track_and_log('push_repo', repo)
|
||||
spawn_notification(repo, 'repo_push', event_data)
|
||||
|
@ -329,7 +323,7 @@ def _conduct_repo_search(username, query, limit=25, page=1):
|
|||
offset = (page - 1) * limit
|
||||
|
||||
if query:
|
||||
matching_repos = model.get_sorted_matching_repositories(query, username, limit=limit+1,
|
||||
matching_repos = model.get_sorted_matching_repositories(query, username, limit=limit + 1,
|
||||
offset=offset)
|
||||
else:
|
||||
matching_repos = []
|
||||
|
@ -340,8 +334,7 @@ def _conduct_repo_search(username, query, limit=25, page=1):
|
|||
'name': repo.namespace_name + '/' + repo.name,
|
||||
'description': repo.description,
|
||||
'is_public': repo.is_public,
|
||||
'href': '/repository/' + repo.namespace_name + '/' + repo.name
|
||||
})
|
||||
'href': '/repository/' + repo.namespace_name + '/' + repo.name})
|
||||
|
||||
# Defined: https://docs.docker.com/v1.6/reference/api/registry_api/
|
||||
return {
|
||||
|
@ -350,5 +343,4 @@ def _conduct_repo_search(username, query, limit=25, page=1):
|
|||
'num_pages': page + 1 if len(matching_repos) > limit else page,
|
||||
'page': page,
|
||||
'page_size': limit,
|
||||
'results': results,
|
||||
}
|
||||
'results': results,}
|
||||
|
|
|
@ -4,8 +4,9 @@ from collections import namedtuple
|
|||
from six import add_metaclass
|
||||
|
||||
|
||||
class Repository(namedtuple('Repository', ['id', 'name', 'namespace_name', 'description',
|
||||
'is_public', 'kind'])):
|
||||
class Repository(
|
||||
namedtuple('Repository', ['id', 'name', 'namespace_name', 'description', 'is_public',
|
||||
'kind'])):
|
||||
"""
|
||||
Repository represents a namespaced collection of tags.
|
||||
:type id: int
|
||||
|
@ -212,7 +213,8 @@ class DockerRegistryV1DataInterface(object):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0,
|
||||
limit=25):
|
||||
"""
|
||||
Returns a sorted list of repositories matching the given search term.
|
||||
"""
|
||||
|
|
|
@ -9,6 +9,7 @@ class PreOCIModel(DockerRegistryV1DataInterface):
|
|||
PreOCIModel implements the data model for the v1 Docker Registry protocol using a database schema
|
||||
before it was changed to support the OCI specification.
|
||||
"""
|
||||
|
||||
def placement_locations_and_path_docker_v1(self, namespace_name, repo_name, image_id):
|
||||
repo_image = model.image.get_repo_image_extended(namespace_name, repo_name, image_id)
|
||||
if not repo_image or repo_image.storage is None:
|
||||
|
@ -25,8 +26,7 @@ class PreOCIModel(DockerRegistryV1DataInterface):
|
|||
'repo_name': repo_name,
|
||||
'image_id': image_id,
|
||||
'checksum': repo_image.v1_checksum,
|
||||
'compat_json': repo_image.v1_json_metadata,
|
||||
})
|
||||
'compat_json': repo_image.v1_json_metadata,})
|
||||
|
||||
def update_docker_v1_metadata(self, namespace_name, repo_name, image_id, created_date_str,
|
||||
comment, command, compat_json, parent_image_id=None):
|
||||
|
@ -168,10 +168,10 @@ class PreOCIModel(DockerRegistryV1DataInterface):
|
|||
def validate_oauth_token(self, token):
|
||||
return bool(model.oauth.validate_access_token(token))
|
||||
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0, limit=25):
|
||||
repos = model.repository.get_filtered_matching_repositories(search_term,
|
||||
filter_username=filter_username,
|
||||
offset=offset, limit=limit)
|
||||
def get_sorted_matching_repositories(self, search_term, filter_username=None, offset=0,
|
||||
limit=25):
|
||||
repos = model.repository.get_filtered_matching_repositories(
|
||||
search_term, filter_username=filter_username, offset=offset, limit=limit)
|
||||
return [_repository_for_repo(repo) for repo in repos]
|
||||
|
||||
|
||||
|
@ -183,8 +183,7 @@ def _repository_for_repo(repo):
|
|||
namespace_name=repo.namespace_user.username,
|
||||
description=repo.description,
|
||||
is_public=model.repository.is_repository_public(repo),
|
||||
kind=model.repository.get_repo_kind_name(repo),
|
||||
)
|
||||
kind=model.repository.get_repo_kind_name(repo),)
|
||||
|
||||
|
||||
pre_oci_model = PreOCIModel()
|
||||
|
|
|
@ -10,8 +10,7 @@ from flask import make_response, request, session, Response, redirect, abort as
|
|||
from app import storage as store, app, metric_queue
|
||||
from auth.auth_context import get_authenticated_user
|
||||
from auth.decorators import extract_namespace_repo_from_session, process_auth
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
|
||||
from auth.registry_jwt_auth import get_granted_username
|
||||
from data import model, database
|
||||
from digest import checksums
|
||||
|
@ -24,7 +23,6 @@ from util.registry import gzipstream
|
|||
from util.registry.replication import queue_storage_replication
|
||||
from util.registry.torrent import PieceHasher
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -38,18 +36,21 @@ def _finish_image(namespace, repository, image_id):
|
|||
|
||||
def require_completion(f):
|
||||
"""This make sure that the image push correctly finished."""
|
||||
|
||||
@wraps(f)
|
||||
def wrapper(namespace, repository, *args, **kwargs):
|
||||
image_id = kwargs['image_id']
|
||||
if model.is_image_uploading(namespace, repository, image_id):
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later',
|
||||
issue='upload-in-progress', image_id=image_id)
|
||||
abort(400, 'Image %(image_id)s is being uploaded, retry later', issue='upload-in-progress',
|
||||
image_id=image_id)
|
||||
return f(namespace, repository, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def set_cache_headers(f):
|
||||
"""Returns HTTP headers suitable for caching."""
|
||||
|
||||
@wraps(f)
|
||||
def wrapper(*args, **kwargs):
|
||||
# Set TTL to 1 year by default
|
||||
|
@ -59,8 +60,7 @@ def set_cache_headers(f):
|
|||
headers = {
|
||||
'Cache-Control': 'public, max-age={0}'.format(ttl),
|
||||
'Expires': expires,
|
||||
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',
|
||||
}
|
||||
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',}
|
||||
if 'If-Modified-Since' in request.headers:
|
||||
response = make_response('Not modified', 304)
|
||||
response.headers.extend(headers)
|
||||
|
@ -69,6 +69,7 @@ def set_cache_headers(f):
|
|||
# Prevent the Cookie to be sent when the object is cacheable
|
||||
session.modified = False
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
|
@ -92,8 +93,7 @@ def head_image_layer(namespace, repository, image_id, headers):
|
|||
locations, _ = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
|
||||
if locations is None:
|
||||
logger.debug('Could not find any blob placement locations')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
# Add the Accept-Ranges header if the storage engine supports resumable
|
||||
# downloads.
|
||||
|
@ -129,8 +129,7 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
logger.debug('Looking up placement locations and path')
|
||||
locations, path = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
|
||||
if not locations or not path:
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
try:
|
||||
logger.debug('Looking up the direct download URL for path: %s', path)
|
||||
direct_download_url = store.get_direct_download_url(locations, path)
|
||||
|
@ -145,8 +144,7 @@ def get_image_layer(namespace, repository, image_id, headers):
|
|||
return Response(store.stream_read(locations, path), headers=headers)
|
||||
except (IOError, AttributeError):
|
||||
logger.exception('Image layer data not found')
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||
image_id=image_id)
|
||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||
|
||||
abort(403)
|
||||
|
||||
|
@ -214,10 +212,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
try:
|
||||
start_time = time()
|
||||
store.stream_write(locations, path, sr)
|
||||
metric_queue.chunk_size.Observe(size_info.compressed_size,
|
||||
labelvalues=[list(locations)[0]])
|
||||
metric_queue.chunk_upload_time.Observe(time() - start_time,
|
||||
labelvalues=[list(locations)[0]])
|
||||
metric_queue.chunk_size.Observe(size_info.compressed_size, labelvalues=[list(locations)[0]])
|
||||
metric_queue.chunk_upload_time.Observe(time() - start_time, labelvalues=[list(locations)[0]])
|
||||
except IOError:
|
||||
logger.exception('Exception when writing image data')
|
||||
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
|
||||
|
@ -227,7 +223,8 @@ def put_image_layer(namespace, repository, image_id):
|
|||
size_info.uncompressed_size)
|
||||
|
||||
# Save the BitTorrent pieces.
|
||||
model.create_bittorrent_pieces(namespace, repository, image_id, piece_hasher.final_piece_hashes())
|
||||
model.create_bittorrent_pieces(namespace, repository, image_id,
|
||||
piece_hasher.final_piece_hashes())
|
||||
|
||||
# Append the computed checksum.
|
||||
csums = []
|
||||
|
@ -307,8 +304,8 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
|
||||
logger.debug('Marking image path')
|
||||
if not model.is_image_uploading(namespace, repository, image_id):
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||
issue='image-write-error', image_id=image_id)
|
||||
abort(409, 'Cannot set checksum for image %(image_id)s', issue='image-write-error',
|
||||
image_id=image_id)
|
||||
|
||||
logger.debug('Storing image and content checksums')
|
||||
|
||||
|
@ -323,8 +320,8 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
logger.debug('session checksums: %s', session.get('checksum', []))
|
||||
logger.debug('client supplied checksum: %s', checksum)
|
||||
logger.debug('put_image_checksum: Wrong checksum')
|
||||
abort(400, 'Checksum mismatch for image: %(image_id)s',
|
||||
issue='checksum-mismatch', image_id=image_id)
|
||||
abort(400, 'Checksum mismatch for image: %(image_id)s', issue='checksum-mismatch',
|
||||
image_id=image_id)
|
||||
|
||||
# Mark the image as uploaded.
|
||||
_finish_image(namespace, repository, image_id)
|
||||
|
@ -416,16 +413,16 @@ def put_image_json(namespace, repository, image_id):
|
|||
pass
|
||||
|
||||
if not data or not isinstance(data, dict):
|
||||
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
|
||||
issue='invalid-request', image_id=image_id, json=request.data)
|
||||
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s', issue='invalid-request',
|
||||
image_id=image_id, json=request.data)
|
||||
|
||||
if 'id' not in data:
|
||||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s', issue='invalid-request',
|
||||
image_id=image_id)
|
||||
|
||||
if image_id != data['id']:
|
||||
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
|
||||
issue='invalid-request', image_id=image_id)
|
||||
abort(400, 'JSON data contains invalid id for image: %(image_id)s', issue='invalid-request',
|
||||
image_id=image_id)
|
||||
|
||||
logger.debug('Looking up repo image')
|
||||
|
||||
|
@ -469,7 +466,8 @@ def put_image_json(namespace, repository, image_id):
|
|||
command = json.dumps(command_list) if command_list else None
|
||||
|
||||
logger.debug('Setting image metadata')
|
||||
model.update_docker_v1_metadata(namespace, repository, image_id, data.get('created'),
|
||||
model.update_docker_v1_metadata(namespace, repository, image_id,
|
||||
data.get('created'),
|
||||
data.get('comment'), command, uploaded_metadata, parent_id)
|
||||
|
||||
return make_response('true', 200)
|
||||
|
|
|
@ -3,10 +3,8 @@ import json
|
|||
|
||||
from flask import abort, request, jsonify, make_response, session
|
||||
|
||||
|
||||
from auth.decorators import process_auth
|
||||
from auth.permissions import (ReadRepositoryPermission,
|
||||
ModifyRepositoryPermission)
|
||||
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
|
||||
from data import model
|
||||
from endpoints.common import parse_repository_name
|
||||
from endpoints.decorators import anon_protect
|
||||
|
@ -15,7 +13,6 @@ from endpoints.v1.models_pre_oci import pre_oci_model as model
|
|||
from util.audit import track_and_log
|
||||
from util.names import TAG_ERROR, TAG_REGEX
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
|
Reference in a new issue