endpoints.v2: yapf format

This commit is contained in:
Jimmy Zelinskie 2017-06-29 13:24:00 -04:00
parent ecd5996386
commit 92e978fa0f
6 changed files with 56 additions and 70 deletions

View file

@ -10,8 +10,7 @@ from flask import make_response, request, session, Response, redirect, abort as
from app import storage as store, app, metric_queue
from auth.auth_context import get_authenticated_user
from auth.decorators import extract_namespace_repo_from_session, process_auth
from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission)
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission)
from auth.registry_jwt_auth import get_granted_username
from data import model, database
from digest import checksums
@ -24,7 +23,6 @@ from util.registry import gzipstream
from util.registry.replication import queue_storage_replication
from util.registry.torrent import PieceHasher
logger = logging.getLogger(__name__)
@ -38,18 +36,21 @@ def _finish_image(namespace, repository, image_id):
def require_completion(f):
"""This make sure that the image push correctly finished."""
@wraps(f)
def wrapper(namespace, repository, *args, **kwargs):
image_id = kwargs['image_id']
if model.is_image_uploading(namespace, repository, image_id):
abort(400, 'Image %(image_id)s is being uploaded, retry later',
issue='upload-in-progress', image_id=image_id)
abort(400, 'Image %(image_id)s is being uploaded, retry later', issue='upload-in-progress',
image_id=image_id)
return f(namespace, repository, *args, **kwargs)
return wrapper
def set_cache_headers(f):
"""Returns HTTP headers suitable for caching."""
@wraps(f)
def wrapper(*args, **kwargs):
# Set TTL to 1 year by default
@ -59,8 +60,7 @@ def set_cache_headers(f):
headers = {
'Cache-Control': 'public, max-age={0}'.format(ttl),
'Expires': expires,
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',
}
'Last-Modified': 'Thu, 01 Jan 1970 00:00:00 GMT',}
if 'If-Modified-Since' in request.headers:
response = make_response('Not modified', 304)
response.headers.extend(headers)
@ -69,6 +69,7 @@ def set_cache_headers(f):
# Prevent the Cookie to be sent when the object is cacheable
session.modified = False
return f(*args, **kwargs)
return wrapper
@ -92,8 +93,7 @@ def head_image_layer(namespace, repository, image_id, headers):
locations, _ = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
if locations is None:
logger.debug('Could not find any blob placement locations')
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id)
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
# Add the Accept-Ranges header if the storage engine supports resumable
# downloads.
@ -129,8 +129,7 @@ def get_image_layer(namespace, repository, image_id, headers):
logger.debug('Looking up placement locations and path')
locations, path = model.placement_locations_and_path_docker_v1(namespace, repository, image_id)
if not locations or not path:
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id)
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
try:
logger.debug('Looking up the direct download URL for path: %s', path)
direct_download_url = store.get_direct_download_url(locations, path)
@ -145,8 +144,7 @@ def get_image_layer(namespace, repository, image_id, headers):
return Response(store.stream_read(locations, path), headers=headers)
except (IOError, AttributeError):
logger.exception('Image layer data not found')
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
image_id=image_id)
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
abort(403)
@ -214,10 +212,8 @@ def put_image_layer(namespace, repository, image_id):
try:
start_time = time()
store.stream_write(locations, path, sr)
metric_queue.chunk_size.Observe(size_info.compressed_size,
labelvalues=[list(locations)[0]])
metric_queue.chunk_upload_time.Observe(time() - start_time,
labelvalues=[list(locations)[0]])
metric_queue.chunk_size.Observe(size_info.compressed_size, labelvalues=[list(locations)[0]])
metric_queue.chunk_upload_time.Observe(time() - start_time, labelvalues=[list(locations)[0]])
except IOError:
logger.exception('Exception when writing image data')
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
@ -227,7 +223,8 @@ def put_image_layer(namespace, repository, image_id):
size_info.uncompressed_size)
# Save the BitTorrent pieces.
model.create_bittorrent_pieces(namespace, repository, image_id, piece_hasher.final_piece_hashes())
model.create_bittorrent_pieces(namespace, repository, image_id,
piece_hasher.final_piece_hashes())
# Append the computed checksum.
csums = []
@ -307,8 +304,8 @@ def put_image_checksum(namespace, repository, image_id):
logger.debug('Marking image path')
if not model.is_image_uploading(namespace, repository, image_id):
abort(409, 'Cannot set checksum for image %(image_id)s',
issue='image-write-error', image_id=image_id)
abort(409, 'Cannot set checksum for image %(image_id)s', issue='image-write-error',
image_id=image_id)
logger.debug('Storing image and content checksums')
@ -323,8 +320,8 @@ def put_image_checksum(namespace, repository, image_id):
logger.debug('session checksums: %s', session.get('checksum', []))
logger.debug('client supplied checksum: %s', checksum)
logger.debug('put_image_checksum: Wrong checksum')
abort(400, 'Checksum mismatch for image: %(image_id)s',
issue='checksum-mismatch', image_id=image_id)
abort(400, 'Checksum mismatch for image: %(image_id)s', issue='checksum-mismatch',
image_id=image_id)
# Mark the image as uploaded.
_finish_image(namespace, repository, image_id)
@ -416,16 +413,16 @@ def put_image_json(namespace, repository, image_id):
pass
if not data or not isinstance(data, dict):
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s',
issue='invalid-request', image_id=image_id, json=request.data)
abort(400, 'Invalid JSON for image: %(image_id)s\nJSON: %(json)s', issue='invalid-request',
image_id=image_id, json=request.data)
if 'id' not in data:
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
issue='invalid-request', image_id=image_id)
abort(400, 'Missing key `id` in JSON for image: %(image_id)s', issue='invalid-request',
image_id=image_id)
if image_id != data['id']:
abort(400, 'JSON data contains invalid id for image: %(image_id)s',
issue='invalid-request', image_id=image_id)
abort(400, 'JSON data contains invalid id for image: %(image_id)s', issue='invalid-request',
image_id=image_id)
logger.debug('Looking up repo image')
@ -469,7 +466,8 @@ def put_image_json(namespace, repository, image_id):
command = json.dumps(command_list) if command_list else None
logger.debug('Setting image metadata')
model.update_docker_v1_metadata(namespace, repository, image_id, data.get('created'),
model.update_docker_v1_metadata(namespace, repository, image_id,
data.get('created'),
data.get('comment'), command, uploaded_metadata, parent_id)
return make_response('true', 200)