Work in progress. This is currently broken!
This commit is contained in:
parent
820d5c0476
commit
e3c52fa0eb
8 changed files with 344 additions and 0 deletions
75
endpoints/verbs.py
Normal file
75
endpoints/verbs.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import logging
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
from flask import (make_response, request, session, Response, redirect,
|
||||
Blueprint, abort, send_file, make_response)
|
||||
|
||||
from app import storage as store, app
|
||||
from auth.auth import process_auth
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from data import model
|
||||
from endpoints.registry import set_cache_headers
|
||||
|
||||
from util.dockerimportformat import build_docker_import_stream
|
||||
|
||||
from werkzeug.wsgi import wrap_file
|
||||
|
||||
verbs = Blueprint('verbs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@verbs.route('/<namespace>/<repository>/<tag>/squash', methods=['GET'])
|
||||
@process_auth
|
||||
@set_cache_headers
|
||||
def get_squashed_tag(namespace, repository, tag, headers):
|
||||
permission = ReadRepositoryPermission(namespace, repository)
|
||||
if permission.can() or model.repository_is_public(namespace, repository):
|
||||
# Lookup the requested tag.
|
||||
tag_image = model.get_tag_image(namespace, repository, tag)
|
||||
if not tag_image:
|
||||
abort(404)
|
||||
|
||||
# Lookup the tag's image and storage.
|
||||
repo_image = model.get_repo_image(namespace, repository, tag_image.docker_image_id)
|
||||
if not repo_image:
|
||||
abort(404)
|
||||
|
||||
# Calculate a synthetic image ID by hashing the *image storage ID* with our
|
||||
# secret. This is done to prevent the ID being guessable/overwritable by
|
||||
# external pushes.
|
||||
unhashed = str(repo_image.storage.id) + ':' + app.config['SECRET_KEY']
|
||||
synthetic_image_id = hashlib.sha256(unhashed).hexdigest()
|
||||
|
||||
# Load the ancestry for the image.
|
||||
uuid = repo_image.storage.uuid
|
||||
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
||||
full_image_list = json.loads(ancestry_data)
|
||||
|
||||
# Load the JSON for the image.
|
||||
json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||
layer_json = json.loads(json_data)
|
||||
|
||||
def get_next_image():
|
||||
for current_image_id in full_image_list:
|
||||
yield model.get_repo_image(namespace, repository, current_image_id)
|
||||
|
||||
def get_next_layer():
|
||||
for current_image_id in full_image_list:
|
||||
current_image_entry = model.get_repo_image(namespace, repository, current_image_id)
|
||||
current_image_path = store.image_layer_path(current_image_entry.storage.uuid)
|
||||
current_image_stream = store.stream_read_file(current_image_entry.storage.locations,
|
||||
current_image_path)
|
||||
|
||||
logger.debug('Returning image layer %s: %s' % (current_image_id, current_image_path))
|
||||
yield current_image_stream
|
||||
|
||||
stream = build_docker_import_stream(namespace, repository, tag, synthetic_image_id,
|
||||
layer_json, get_next_image, get_next_layer)
|
||||
|
||||
return app.response_class(wrap_file(request.environ, stream, 1024 * 16),
|
||||
mimetype='application/octet-stream',
|
||||
direct_passthrough=True)
|
||||
|
||||
|
||||
abort(403)
|
Reference in a new issue