Merge branch 'master' of github.com:coreos-inc/quay
This commit is contained in:
commit
ef61145b2c
6 changed files with 46 additions and 21 deletions
|
@ -15,7 +15,7 @@ from data import model
|
||||||
from data.model import oauth
|
from data.model import oauth
|
||||||
from app import app, authentication
|
from app import app, authentication
|
||||||
from permissions import QuayDeferredPermissionUser
|
from permissions import QuayDeferredPermissionUser
|
||||||
from auth_context import (set_authenticated_user, set_validated_token,
|
from auth_context import (set_authenticated_user, set_validated_token, set_grant_user_context,
|
||||||
set_authenticated_user_deferred, set_validated_oauth_token)
|
set_authenticated_user_deferred, set_validated_oauth_token)
|
||||||
from util.http import abort
|
from util.http import abort
|
||||||
|
|
||||||
|
@ -131,10 +131,11 @@ def _process_basic_auth(auth):
|
||||||
logger.debug('Basic auth present but could not be validated.')
|
logger.debug('Basic auth present but could not be validated.')
|
||||||
|
|
||||||
|
|
||||||
def generate_signed_token(grants):
|
def generate_signed_token(grants, user_context):
|
||||||
ser = SecureCookieSessionInterface().get_signing_serializer(app)
|
ser = SecureCookieSessionInterface().get_signing_serializer(app)
|
||||||
data_to_sign = {
|
data_to_sign = {
|
||||||
'grants': grants,
|
'grants': grants,
|
||||||
|
'user_context': user_context,
|
||||||
}
|
}
|
||||||
|
|
||||||
encrypted = ser.dumps(data_to_sign)
|
encrypted = ser.dumps(data_to_sign)
|
||||||
|
@ -164,6 +165,7 @@ def _process_signed_grant(auth):
|
||||||
logger.debug('Successfully validated signed grant with data: %s', token_data)
|
logger.debug('Successfully validated signed grant with data: %s', token_data)
|
||||||
|
|
||||||
loaded_identity = Identity(None, 'signed_grant')
|
loaded_identity = Identity(None, 'signed_grant')
|
||||||
|
set_grant_user_context(token_data['user_context'])
|
||||||
loaded_identity.provides.update(token_data['grants'])
|
loaded_identity.provides.update(token_data['grants'])
|
||||||
identity_changed.send(app, identity=loaded_identity)
|
identity_changed.send(app, identity=loaded_identity)
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,15 @@ def set_authenticated_user(user_or_robot):
|
||||||
ctx.authenticated_user = user_or_robot
|
ctx.authenticated_user = user_or_robot
|
||||||
|
|
||||||
|
|
||||||
|
def get_grant_user_context():
|
||||||
|
return getattr(_request_ctx_stack.top, 'grant_user_context', None)
|
||||||
|
|
||||||
|
|
||||||
|
def set_grant_user_context(username_or_robotname):
|
||||||
|
ctx = _request_ctx_stack.top
|
||||||
|
ctx.grant_user_context = username_or_robotname
|
||||||
|
|
||||||
|
|
||||||
def set_authenticated_user_deferred(user_or_robot_db_uuid):
|
def set_authenticated_user_deferred(user_or_robot_db_uuid):
|
||||||
logger.debug('Deferring loading of authenticated user object with uuid: %s', user_or_robot_db_uuid)
|
logger.debug('Deferring loading of authenticated user object with uuid: %s', user_or_robot_db_uuid)
|
||||||
ctx = _request_ctx_stack.top
|
ctx = _request_ctx_stack.top
|
||||||
|
|
|
@ -3,3 +3,4 @@ workers = 4
|
||||||
logconfig = 'conf/logging.conf'
|
logconfig = 'conf/logging.conf'
|
||||||
pythonpath = '.'
|
pythonpath = '.'
|
||||||
preload_app = True
|
preload_app = True
|
||||||
|
timeout = 2000 # Because sync workers
|
||||||
|
|
|
@ -1668,6 +1668,7 @@ def _garbage_collect_storage(storage_id_whitelist):
|
||||||
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
|
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
|
||||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||||
# Find out which derived storages will be removed, and add them to the whitelist
|
# Find out which derived storages will be removed, and add them to the whitelist
|
||||||
|
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
|
||||||
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||||
storage_id_whitelist,
|
storage_id_whitelist,
|
||||||
(ImageStorage.id,)))
|
(ImageStorage.id,)))
|
||||||
|
@ -1707,22 +1708,32 @@ def _garbage_collect_storage(storage_id_whitelist):
|
||||||
paths_to_remove = placements_query_to_paths_set(placements_to_remove.clone())
|
paths_to_remove = placements_query_to_paths_set(placements_to_remove.clone())
|
||||||
|
|
||||||
# Remove the placements for orphaned storages
|
# Remove the placements for orphaned storages
|
||||||
placements_subquery = list(placements_to_remove.clone().select(ImageStoragePlacement.id))
|
placements_subquery = (placements_to_remove
|
||||||
if len(placements_subquery) > 0:
|
.clone()
|
||||||
(ImageStoragePlacement
|
.select(ImageStoragePlacement.id)
|
||||||
|
.alias('ps'))
|
||||||
|
inner = (ImageStoragePlacement
|
||||||
|
.select(placements_subquery.c.id)
|
||||||
|
.from_(placements_subquery))
|
||||||
|
placements_removed = (ImageStoragePlacement
|
||||||
.delete()
|
.delete()
|
||||||
.where(ImageStoragePlacement.id << list(placements_subquery))
|
.where(ImageStoragePlacement.id << inner)
|
||||||
.execute())
|
.execute())
|
||||||
|
logger.debug('Removed %s image storage placements', placements_removed)
|
||||||
|
|
||||||
# Remove the all orphaned storages
|
# Remove all orphaned storages
|
||||||
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
# The comma after ImageStorage.id is VERY important, it makes it a tuple, which is a sequence
|
||||||
|
orphaned_storages = orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||||
storage_id_whitelist,
|
storage_id_whitelist,
|
||||||
(ImageStorage.id,)))
|
(ImageStorage.id,)).alias('osq')
|
||||||
if len(orphaned_storages) > 0:
|
orphaned_storage_inner = (ImageStorage
|
||||||
(ImageStorage
|
.select(orphaned_storages.c.id)
|
||||||
|
.from_(orphaned_storages))
|
||||||
|
storages_removed = (ImageStorage
|
||||||
.delete()
|
.delete()
|
||||||
.where(ImageStorage.id << orphaned_storages)
|
.where(ImageStorage.id << orphaned_storage_inner)
|
||||||
.execute())
|
.execute())
|
||||||
|
logger.debug('Removed %s image storage records', storages_removed)
|
||||||
|
|
||||||
# We are going to make the conscious decision to not delete image storage blobs inside
|
# We are going to make the conscious decision to not delete image storage blobs inside
|
||||||
# transactions.
|
# transactions.
|
||||||
|
|
|
@ -60,7 +60,8 @@ def generate_headers(scope=GrantType.READ_REPOSITORY):
|
||||||
|
|
||||||
if permission.can():
|
if permission.can():
|
||||||
# Generate a signed grant which expires here
|
# Generate a signed grant which expires here
|
||||||
signature = generate_signed_token(grants)
|
user_context = get_authenticated_user() and get_authenticated_user().username
|
||||||
|
signature = generate_signed_token(grants, user_context)
|
||||||
response.headers['WWW-Authenticate'] = signature
|
response.headers['WWW-Authenticate'] = signature
|
||||||
response.headers['X-Docker-Token'] = signature
|
response.headers['X-Docker-Token'] = signature
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -9,7 +9,7 @@ from time import time
|
||||||
|
|
||||||
from app import storage as store, image_diff_queue, app
|
from app import storage as store, image_diff_queue, app
|
||||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||||
from auth.auth_context import get_authenticated_user
|
from auth.auth_context import get_authenticated_user, get_grant_user_context
|
||||||
from util import checksums, changes
|
from util import checksums, changes
|
||||||
from util.http import abort, exact_abort
|
from util.http import abort, exact_abort
|
||||||
from auth.permissions import (ReadRepositoryPermission,
|
from auth.permissions import (ReadRepositoryPermission,
|
||||||
|
@ -463,8 +463,9 @@ def put_image_json(namespace, repository, image_id):
|
||||||
|
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
if not repo_image:
|
if not repo_image:
|
||||||
logger.debug('Image not found, creating image')
|
username = (get_authenticated_user() and get_authenticated_user().username or
|
||||||
username = get_authenticated_user() and get_authenticated_user().username
|
get_grant_user_context())
|
||||||
|
logger.debug('Image not found, creating image with initiating user context: %s', username)
|
||||||
repo_image = model.find_create_or_link_image(image_id, repo, username, {},
|
repo_image = model.find_create_or_link_image(image_id, repo, username, {},
|
||||||
store.preferred_locations[0])
|
store.preferred_locations[0])
|
||||||
|
|
||||||
|
|
Reference in a new issue