Merge branch 'master' into ackbar

This commit is contained in:
Joseph Schorr 2015-02-09 17:16:42 -05:00
commit 045614c6c8
29 changed files with 920 additions and 246 deletions

View file

@ -3,6 +3,7 @@ import urlparse
import json
import string
import datetime
import os
# Register the various exceptions via decorators.
import endpoints.decorated
@ -32,6 +33,23 @@ profile = logging.getLogger('application.profiler')
route_data = None
CACHE_BUSTERS_JSON = 'static/dist/cachebusters.json'
CACHE_BUSTERS = None
def get_cache_busters():
""" Retrieves the cache busters hashes. """
global CACHE_BUSTERS
if CACHE_BUSTERS is not None:
return CACHE_BUSTERS
if not os.path.exists(CACHE_BUSTERS_JSON):
return {}
with open(CACHE_BUSTERS_JSON, 'r') as f:
CACHE_BUSTERS = json.loads(f.read())
return CACHE_BUSTERS
class RepoPathConverter(BaseConverter):
regex = '[\.a-zA-Z0-9_\-]+/[\.a-zA-Z0-9_\-]+'
weight = 200
@ -113,17 +131,15 @@ def list_files(path, extension):
filepath = 'static/' + path
return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
SAVED_CACHE_STRING = random_string()
def render_page_template(name, **kwargs):
if app.config.get('DEBUGGING', False):
debugging = app.config.get('DEBUGGING', False)
if debugging:
# If DEBUGGING is enabled, then we load the full set of individual JS and CSS files
# from the file system.
library_styles = list_files('lib', 'css')
main_styles = list_files('css', 'css')
library_scripts = list_files('lib', 'js')
main_scripts = list_files('js', 'js')
cache_buster = 'debugging'
file_lists = [library_styles, main_styles, library_scripts, main_scripts]
for file_list in file_lists:
@ -133,7 +149,6 @@ def render_page_template(name, **kwargs):
main_styles = ['dist/quay-frontend.css']
library_scripts = []
main_scripts = ['dist/quay-frontend.min.js']
cache_buster = SAVED_CACHE_STRING
use_cdn = app.config.get('USE_CDN', True)
if request.args.get('use_cdn') is not None:
@ -142,6 +157,12 @@ def render_page_template(name, **kwargs):
external_styles = get_external_css(local=not use_cdn)
external_scripts = get_external_javascript(local=not use_cdn)
def add_cachebusters(filenames):
cachebusters = get_cache_busters()
for filename in filenames:
cache_buster = cachebusters.get(filename, random_string()) if not debugging else 'debugging'
yield (filename, cache_buster)
def get_oauth_config():
oauth_config = {}
for oauth_app in oauth_apps:
@ -153,13 +174,14 @@ def render_page_template(name, **kwargs):
if len(app.config.get('CONTACT_INFO', [])) == 1:
contact_href = app.config['CONTACT_INFO'][0]
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
resp = make_response(render_template(name,
route_data=json.dumps(get_route_data()),
external_styles=external_styles,
external_scripts=external_scripts,
main_styles=main_styles,
library_styles=library_styles,
main_scripts=main_scripts,
library_scripts=library_scripts,
main_styles=add_cachebusters(main_styles),
library_styles=add_cachebusters(library_styles),
main_scripts=add_cachebusters(main_scripts),
library_scripts=add_cachebusters(library_scripts),
feature_set=json.dumps(features.get_features()),
config_set=json.dumps(getFrontendVisibleConfig(app.config)),
oauth_set=json.dumps(get_oauth_config()),
@ -169,9 +191,10 @@ def render_page_template(name, **kwargs):
sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
is_debug=str(app.config.get('DEBUGGING', False)).lower(),
show_chat=features.OLARK_CHAT,
cache_buster=cache_buster,
has_billing=features.BILLING,
contact_href=contact_href,
hostname=app.config['SERVER_HOSTNAME'],
preferred_scheme=app.config['PREFERRED_URL_SCHEME'],
**kwargs))
resp.headers['X-FRAME-OPTIONS'] = 'DENY'

View file

@ -2,11 +2,10 @@ import logging
import json
import hashlib
from flask import redirect, Blueprint, abort, send_file, request
from flask import redirect, Blueprint, abort, send_file, make_response
from app import app
from app import app, signer
from auth.auth import process_auth
from auth.auth_context import get_authenticated_user
from auth.permissions import ReadRepositoryPermission
from data import model
from data import database
@ -15,13 +14,16 @@ from storage import Storage
from util.queuefile import QueueFile
from util.queueprocess import QueueProcess
from util.gzipwrap import GzipWrap
from util.dockerloadformat import build_docker_load_stream
from formats.squashed import SquashedDockerImage
from formats.aci import ACIImage
# pylint: disable=invalid-name
verbs = Blueprint('verbs', __name__)
logger = logging.getLogger(__name__)
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_id_list):
def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, image_json,
image_id_list):
store = Storage(app)
# For performance reasons, we load the full image list here, cache it, then disconnect from
@ -42,20 +44,43 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima
current_image_path)
current_image_id = current_image_entry.id
logger.debug('Returning image layer %s: %s' % (current_image_id, current_image_path))
logger.debug('Returning image layer %s: %s', current_image_id, current_image_path)
yield current_image_stream
stream = build_docker_load_stream(namespace, repository, tag, synthetic_image_id, image_json,
stream = formatter.build_stream(namespace, repository, tag, synthetic_image_id, image_json,
get_next_image, get_next_layer)
return stream.read
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
def _sign_sythentic_image(verb, linked_storage_uuid, queue_file):
signature = None
try:
signature = signer.detached_sign(queue_file)
except:
logger.exception('Exception when signing %s image %s', verb, linked_storage_uuid)
return
# Setup the database (since this is a new process) and then disconnect immediately
# once the operation completes.
if not queue_file.raised_exception:
with database.UseThenDisconnect(app.config):
try:
derived = model.get_storage_by_uuid(linked_storage_uuid)
except model.InvalidImageException:
return
signature_entry = model.find_or_create_storage_signature(derived, signer.name)
signature_entry.signature = signature
signature_entry.uploading = False
signature_entry.save()
def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_locations, queue_file):
store = Storage(app)
def handle_exception(ex):
logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex)
logger.debug('Exception when building %s image %s: %s', verb, linked_storage_uuid, ex)
with database.UseThenDisconnect(app.config):
model.delete_derived_storage_by_uuid(linked_storage_uuid)
@ -67,86 +92,193 @@ def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, que
queue_file.close()
if not queue_file.raised_exception:
# Setup the database (since this is a new process) and then disconnect immediately
# once the operation completes.
with database.UseThenDisconnect(app.config):
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
done_uploading.uploading = False
done_uploading.save()
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
@process_auth
def get_squashed_tag(namespace, repository, tag):
# pylint: disable=too-many-locals
def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository):
# Lookup the requested tag.
try:
tag_image = model.get_tag_image(namespace, repository, tag)
except model.DataModelException:
abort(404)
# Lookup the tag's image and storage.
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
if not repo_image:
abort(404)
# pylint: disable=no-member
if not permission.can() and not model.repository_is_public(namespace, repository):
abort(403)
# Log the action.
track_and_log('repo_verb', repo_image.repository, tag=tag, verb='squash')
# Lookup the requested tag.
try:
tag_image = model.get_tag_image(namespace, repository, tag)
except model.DataModelException:
abort(404)
store = Storage(app)
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
store.preferred_locations[0])
if not derived.uploading:
logger.debug('Derived image %s exists in storage', derived.uuid)
derived_layer_path = store.image_layer_path(derived.uuid)
download_url = store.get_direct_download_url(derived.locations, derived_layer_path)
if download_url:
logger.debug('Redirecting to download URL for derived image %s', derived.uuid)
return redirect(download_url)
# Lookup the tag's image and storage.
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
if not repo_image:
abort(404)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
# If there is a data checker, call it first.
uuid = repo_image.storage.uuid
image_json = None
logger.debug('Sending cached derived image %s', derived.uuid)
return send_file(store.stream_read_file(derived.locations, derived_layer_path))
# Load the ancestry for the image.
logger.debug('Building and returning derived image %s', derived.uuid)
uuid = repo_image.storage.uuid
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
full_image_list = json.loads(ancestry_data)
# Load the image's JSON layer.
if checker is not None:
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
image_json = json.loads(image_json_data)
# Calculate a synthetic image ID.
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':squash').hexdigest()
if not checker(image_json):
logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
abort(404)
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
def _cleanup():
# Close any existing DB connection once the process has exited.
database.close_db_filter(None)
return (repo_image, tag_image, image_json)
args = (namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
queue_process = QueueProcess(_open_stream,
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
args, finished=_cleanup)
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
# pylint: disable=too-many-locals
def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwargs):
# Verify that the image exists and that we have access to it.
store = Storage(app)
result = _verify_repo_verb(store, namespace, repository, tag, verb, checker)
(repo_image, tag_image, image_json) = result
# Start building.
queue_process.run()
# Lookup the derived image storage for the verb.
derived = model.find_derived_storage(repo_image.storage, verb)
if derived is None or derived.uploading:
abort(404)
# Start the storage saving.
storage_args = (derived.uuid, derived.locations, storage_queue_file)
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
# Check if we have a valid signer configured.
if not signer.name:
abort(404)
# Lookup the signature for the verb.
signature_entry = model.lookup_storage_signature(derived, signer.name)
if signature_entry is None:
abort(404)
# Return the signature.
return make_response(signature_entry.signature)
# pylint: disable=too-many-locals
def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=None, **kwargs):
# Verify that the image exists and that we have access to it.
store = Storage(app)
result = _verify_repo_verb(store, namespace, repository, tag, verb, checker)
(repo_image, tag_image, image_json) = result
# Log the action.
track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs)
# Lookup/create the derived image storage for the verb.
derived = model.find_or_create_derived_storage(repo_image.storage, verb,
store.preferred_locations[0])
if not derived.uploading:
logger.debug('Derived %s image %s exists in storage', verb, derived.uuid)
derived_layer_path = store.image_layer_path(derived.uuid)
download_url = store.get_direct_download_url(derived.locations, derived_layer_path)
if download_url:
logger.debug('Redirecting to download URL for derived %s image %s', verb, derived.uuid)
return redirect(download_url)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
# Return the client's data.
return send_file(client_queue_file)
logger.debug('Sending cached derived %s image %s', verb, derived.uuid)
return send_file(store.stream_read_file(derived.locations, derived_layer_path))
# Load the ancestry for the image.
uuid = repo_image.storage.uuid
logger.debug('Building and returning derived %s image %s', verb, derived.uuid)
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
full_image_list = json.loads(ancestry_data)
# Load the image's JSON layer.
if not image_json:
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
image_json = json.loads(image_json_data)
# Calculate a synthetic image ID.
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest()
def _cleanup():
# Close any existing DB connection once the process has exited.
database.close_db_filter(None)
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
args = (formatter, namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
queue_process = QueueProcess(_open_stream,
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
args, finished=_cleanup)
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
# If signing is required, add a QueueFile for signing the image as we stream it out.
signing_queue_file = None
if sign and signer.name:
signing_queue_file = QueueFile(queue_process.create_queue(), 'signing')
# Start building.
queue_process.run()
# Start the storage saving.
storage_args = (verb, derived.uuid, derived.locations, storage_queue_file)
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
if sign and signer.name:
signing_args = (verb, derived.uuid, signing_queue_file)
QueueProcess.run_process(_sign_sythentic_image, signing_args, finished=_cleanup)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
# Return the client's data.
return send_file(client_queue_file)
def os_arch_checker(os, arch):
def checker(image_json):
# Verify the architecture and os.
operating_system = image_json.get('os', 'linux')
if operating_system != os:
return False
architecture = image_json.get('architecture', 'amd64')
# Note: Some older Docker images have 'x86_64' rather than 'amd64'.
# We allow the conversion here.
if architecture == 'x86_64' and operating_system == 'linux':
architecture = 'amd64'
if architecture != arch:
return False
return True
return checker
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/sig/<os>/<arch>/', methods=['GET'])
@process_auth
# pylint: disable=unused-argument
def get_aci_signature(server, namespace, repository, tag, os, arch):
return _repo_verb_signature(namespace, repository, tag, 'aci', checker=os_arch_checker(os, arch),
os=os, arch=arch)
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/aci/<os>/<arch>/', methods=['GET'])
@process_auth
# pylint: disable=unused-argument
def get_aci_image(server, namespace, repository, tag, os, arch):
return _repo_verb(namespace, repository, tag, 'aci', ACIImage(),
sign=True, checker=os_arch_checker(os, arch), os=os, arch=arch)
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
@process_auth
def get_squashed_tag(namespace, repository, tag):
return _repo_verb(namespace, repository, tag, 'squash', SquashedDockerImage())
abort(403)

View file

@ -1,7 +1,7 @@
import logging
from flask import (abort, redirect, request, url_for, make_response, Response,
Blueprint, send_from_directory, jsonify)
Blueprint, send_from_directory, jsonify, send_file)
from avatar_generator import Avatar
from flask.ext.login import current_user
@ -10,7 +10,7 @@ from health.healthcheck import get_healthchecker
from data import model
from data.model.oauth import DatabaseAuthorizationProvider
from app import app, billing as stripe, build_logs, avatar
from app import app, billing as stripe, build_logs, avatar, signer
from auth.auth import require_session_login, process_oauth
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
SuperUserPermission)
@ -63,6 +63,14 @@ def snapshot(path = ''):
abort(404)
@web.route('/aci-signing-key')
@no_cache
def aci_signing_key():
if not signer.name:
abort(404)
return send_file(signer.public_key_path)
@web.route('/plans/')
@no_cache
@route_show_if(features.BILLING)