Merge branch 'master' of https://bitbucket.org/yackob03/quay
This commit is contained in:
commit
3080c47ef2
33 changed files with 477 additions and 257 deletions
|
@ -25,7 +25,7 @@ def _load_user_from_cookie():
|
||||||
if not current_user.is_anonymous():
|
if not current_user.is_anonymous():
|
||||||
logger.debug('Loading user from cookie: %s', current_user.get_id())
|
logger.debug('Loading user from cookie: %s', current_user.get_id())
|
||||||
set_authenticated_user_deferred(current_user.get_id())
|
set_authenticated_user_deferred(current_user.get_id())
|
||||||
loaded = QuayDeferredPermissionUser(current_user.get_id(), 'user_db_id', {scopes.DIRECT_LOGIN})
|
loaded = QuayDeferredPermissionUser(current_user.get_id(), 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||||
identity_changed.send(app, identity=loaded)
|
identity_changed.send(app, identity=loaded)
|
||||||
return current_user.db_user()
|
return current_user.db_user()
|
||||||
return None
|
return None
|
||||||
|
@ -58,7 +58,7 @@ def _validate_and_apply_oauth_token(token):
|
||||||
set_authenticated_user(validated.authorized_user)
|
set_authenticated_user(validated.authorized_user)
|
||||||
set_validated_oauth_token(validated)
|
set_validated_oauth_token(validated)
|
||||||
|
|
||||||
new_identity = QuayDeferredPermissionUser(validated.authorized_user.id, 'user_db_id', scope_set)
|
new_identity = QuayDeferredPermissionUser(validated.authorized_user.uuid, 'user_uuid', scope_set)
|
||||||
identity_changed.send(app, identity=new_identity)
|
identity_changed.send(app, identity=new_identity)
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ def process_basic_auth(auth):
|
||||||
logger.debug('Successfully validated robot: %s' % credentials[0])
|
logger.debug('Successfully validated robot: %s' % credentials[0])
|
||||||
set_authenticated_user(robot)
|
set_authenticated_user(robot)
|
||||||
|
|
||||||
deferred_robot = QuayDeferredPermissionUser(robot.id, 'user_db_id', {scopes.DIRECT_LOGIN})
|
deferred_robot = QuayDeferredPermissionUser(robot.uuid, 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||||
identity_changed.send(app, identity=deferred_robot)
|
identity_changed.send(app, identity=deferred_robot)
|
||||||
return
|
return
|
||||||
except model.InvalidRobotException:
|
except model.InvalidRobotException:
|
||||||
|
@ -111,7 +111,7 @@ def process_basic_auth(auth):
|
||||||
logger.debug('Successfully validated user: %s' % authenticated.username)
|
logger.debug('Successfully validated user: %s' % authenticated.username)
|
||||||
set_authenticated_user(authenticated)
|
set_authenticated_user(authenticated)
|
||||||
|
|
||||||
new_identity = QuayDeferredPermissionUser(authenticated.id, 'user_db_id',
|
new_identity = QuayDeferredPermissionUser(authenticated.uuid, 'user_uuid',
|
||||||
{scopes.DIRECT_LOGIN})
|
{scopes.DIRECT_LOGIN})
|
||||||
identity_changed.send(app, identity=new_identity)
|
identity_changed.send(app, identity=new_identity)
|
||||||
return
|
return
|
||||||
|
|
|
@ -10,13 +10,13 @@ logger = logging.getLogger(__name__)
|
||||||
def get_authenticated_user():
|
def get_authenticated_user():
|
||||||
user = getattr(_request_ctx_stack.top, 'authenticated_user', None)
|
user = getattr(_request_ctx_stack.top, 'authenticated_user', None)
|
||||||
if not user:
|
if not user:
|
||||||
db_id = getattr(_request_ctx_stack.top, 'authenticated_db_id', None)
|
user_uuid = getattr(_request_ctx_stack.top, 'authenticated_user_uuid', None)
|
||||||
if not db_id:
|
if not user_uuid:
|
||||||
logger.debug('No authenticated user or deferred database id.')
|
logger.debug('No authenticated user or deferred database uuid.')
|
||||||
return None
|
return None
|
||||||
|
|
||||||
logger.debug('Loading deferred authenticated user.')
|
logger.debug('Loading deferred authenticated user.')
|
||||||
loaded = model.get_user_by_id(db_id)
|
loaded = model.get_user_by_uuid(user_uuid)
|
||||||
set_authenticated_user(loaded)
|
set_authenticated_user(loaded)
|
||||||
user = loaded
|
user = loaded
|
||||||
|
|
||||||
|
@ -30,10 +30,10 @@ def set_authenticated_user(user_or_robot):
|
||||||
ctx.authenticated_user = user_or_robot
|
ctx.authenticated_user = user_or_robot
|
||||||
|
|
||||||
|
|
||||||
def set_authenticated_user_deferred(user_or_robot_db_id):
|
def set_authenticated_user_deferred(user_or_robot_uuid):
|
||||||
logger.debug('Deferring loading of authenticated user object: %s', user_or_robot_db_id)
|
logger.debug('Deferring loading of authenticated user object with uuid: %s', user_or_robot_uuid)
|
||||||
ctx = _request_ctx_stack.top
|
ctx = _request_ctx_stack.top
|
||||||
ctx.authenticated_db_id = user_or_robot_db_id
|
ctx.authenticated_user_uuid = user_or_robot_uuid
|
||||||
|
|
||||||
|
|
||||||
def get_validated_oauth_token():
|
def get_validated_oauth_token():
|
||||||
|
|
|
@ -58,8 +58,8 @@ SCOPE_MAX_USER_ROLES.update({
|
||||||
|
|
||||||
|
|
||||||
class QuayDeferredPermissionUser(Identity):
|
class QuayDeferredPermissionUser(Identity):
|
||||||
def __init__(self, db_id, auth_type, scopes):
|
def __init__(self, uuid, auth_type, scopes):
|
||||||
super(QuayDeferredPermissionUser, self).__init__(db_id, auth_type)
|
super(QuayDeferredPermissionUser, self).__init__(uuid, auth_type)
|
||||||
|
|
||||||
self._permissions_loaded = False
|
self._permissions_loaded = False
|
||||||
self._scope_set = scopes
|
self._scope_set = scopes
|
||||||
|
@ -88,7 +88,7 @@ class QuayDeferredPermissionUser(Identity):
|
||||||
def can(self, permission):
|
def can(self, permission):
|
||||||
if not self._permissions_loaded:
|
if not self._permissions_loaded:
|
||||||
logger.debug('Loading user permissions after deferring.')
|
logger.debug('Loading user permissions after deferring.')
|
||||||
user_object = model.get_user_by_id(self.id)
|
user_object = model.get_user_by_uuid(self.id)
|
||||||
|
|
||||||
# Add the superuser need, if applicable.
|
# Add the superuser need, if applicable.
|
||||||
if (user_object.username is not None and
|
if (user_object.username is not None and
|
||||||
|
@ -228,11 +228,11 @@ def on_identity_loaded(sender, identity):
|
||||||
# We have verified an identity, load in all of the permissions
|
# We have verified an identity, load in all of the permissions
|
||||||
|
|
||||||
if isinstance(identity, QuayDeferredPermissionUser):
|
if isinstance(identity, QuayDeferredPermissionUser):
|
||||||
logger.debug('Deferring permissions for user: %s', identity.id)
|
logger.debug('Deferring permissions for user with uuid: %s', identity.id)
|
||||||
|
|
||||||
elif identity.auth_type == 'user_db_id':
|
elif identity.auth_type == 'user_uuid':
|
||||||
logger.debug('Switching username permission to deferred object: %s', identity.id)
|
logger.debug('Switching username permission to deferred object with uuid: %s', identity.id)
|
||||||
switch_to_deferred = QuayDeferredPermissionUser(identity.id, 'user_db_id', {scopes.DIRECT_LOGIN})
|
switch_to_deferred = QuayDeferredPermissionUser(identity.id, 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||||
identity_changed.send(app, identity=switch_to_deferred)
|
identity_changed.send(app, identity=switch_to_deferred)
|
||||||
|
|
||||||
elif identity.auth_type == 'token':
|
elif identity.auth_type == 'token':
|
||||||
|
|
|
@ -3,6 +3,3 @@ set -e
|
||||||
|
|
||||||
# Run the database migration
|
# Run the database migration
|
||||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
PYTHONPATH=. venv/bin/alembic upgrade head
|
||||||
|
|
||||||
# Run the uncompressed size migration
|
|
||||||
PYTHONPATH=. venv/bin/python -m util.uncompressedsize
|
|
|
@ -137,6 +137,7 @@ class BaseModel(ReadSlaveModel):
|
||||||
|
|
||||||
|
|
||||||
class User(BaseModel):
|
class User(BaseModel):
|
||||||
|
uuid = CharField(default=uuid_generator, max_length=36, null=True)
|
||||||
username = CharField(unique=True, index=True)
|
username = CharField(unique=True, index=True)
|
||||||
password_hash = CharField(null=True)
|
password_hash = CharField(null=True)
|
||||||
email = CharField(unique=True, index=True,
|
email = CharField(unique=True, index=True,
|
||||||
|
|
|
@ -13,24 +13,8 @@ from app import app
|
||||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||||
from util.morecollections import AttrDict
|
from util.morecollections import AttrDict
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
db_uri = unquote(app.config['DB_URI'])
|
|
||||||
if 'GENMIGRATE' in os.environ:
|
|
||||||
docker_host = os.environ.get('DOCKER_HOST')
|
|
||||||
docker_host_ip = docker_host[len('tcp://'):].split(':')[0]
|
|
||||||
if os.environ.get('GENMIGRATE') == 'mysql':
|
|
||||||
db_uri = 'mysql+pymysql://root:password@%s/genschema' % (docker_host_ip)
|
|
||||||
else:
|
|
||||||
db_uri = 'postgresql://postgres@%s/genschema' % (docker_host_ip)
|
|
||||||
|
|
||||||
if 'DB_URI' in os.environ:
|
|
||||||
db_uri = os.environ['DB_URI']
|
|
||||||
|
|
||||||
app.config['DB_URI'] = db_uri
|
|
||||||
|
|
||||||
config = context.config
|
config = context.config
|
||||||
config.set_main_option('sqlalchemy.url', db_uri)
|
config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI']))
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
# Interpret the config file for Python logging.
|
||||||
# This line sets up loggers basically.
|
# This line sets up loggers basically.
|
||||||
|
@ -86,7 +70,8 @@ def run_migrations_online():
|
||||||
connection = engine.connect()
|
connection = engine.connect()
|
||||||
context.configure(
|
context.configure(
|
||||||
connection=connection,
|
connection=connection,
|
||||||
target_metadata=target_metadata
|
target_metadata=target_metadata,
|
||||||
|
transactional_ddl=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
|
||||||
|
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
||||||
|
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
|
||||||
|
|
||||||
up_mysql() {
|
up_mysql() {
|
||||||
# Run a SQL database on port 3306 inside of Docker.
|
# Run a SQL database on port 3306 inside of Docker.
|
||||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||||
|
@ -36,19 +40,19 @@ down_postgres() {
|
||||||
|
|
||||||
gen_migrate() {
|
gen_migrate() {
|
||||||
# Generate a SQLite database with the schema as defined by the existing alembic model.
|
# Generate a SQLite database with the schema as defined by the existing alembic model.
|
||||||
GENMIGRATE=$1 PYTHONPATH=. alembic upgrade head
|
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
|
||||||
|
|
||||||
# Generate the migration to the current model.
|
# Generate the migration to the current model.
|
||||||
GENMIGRATE=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
|
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
|
||||||
}
|
}
|
||||||
|
|
||||||
test_migrate() {
|
test_migrate() {
|
||||||
# Generate a SQLite database with the schema as defined by the existing alembic model.
|
# Generate a SQLite database with the schema as defined by the existing alembic model.
|
||||||
GENMIGRATE=$1 PYTHONPATH=. alembic upgrade head
|
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
|
||||||
|
|
||||||
# Downgrade to verify it works in both directions.
|
# Downgrade to verify it works in both directions.
|
||||||
COUNT=`ls data/migrations/versions/*.py | wc -l | tr -d ' '`
|
COUNT=`ls data/migrations/versions/*.py | wc -l | tr -d ' '`
|
||||||
GENMIGRATE=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
|
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Test (and generate, if requested) via MySQL.
|
# Test (and generate, if requested) via MySQL.
|
||||||
|
@ -59,13 +63,13 @@ if [ ! -z "$@" ]
|
||||||
then
|
then
|
||||||
set +e
|
set +e
|
||||||
echo '> Generating Migration'
|
echo '> Generating Migration'
|
||||||
gen_migrate "mysql" "$@"
|
gen_migrate $MYSQL_CONFIG_OVERRIDE "$@"
|
||||||
set -e
|
set -e
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo '> Testing Migration (mysql)'
|
echo '> Testing Migration (mysql)'
|
||||||
set +e
|
set +e
|
||||||
test_migrate "mysql"
|
test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||||
set -e
|
set -e
|
||||||
down_mysql
|
down_mysql
|
||||||
|
|
||||||
|
@ -75,8 +79,6 @@ up_postgres
|
||||||
|
|
||||||
echo '> Testing Migration (postgres)'
|
echo '> Testing Migration (postgres)'
|
||||||
set +e
|
set +e
|
||||||
test_migrate "postgres"
|
test_migrate $PGSQL_CONFIG_OVERRIDE
|
||||||
set -e
|
set -e
|
||||||
down_postgres
|
down_postgres
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
"""add uuid field to user
|
||||||
|
|
||||||
|
Revision ID: 17f11e265e13
|
||||||
|
Revises: 313d297811c4
|
||||||
|
Create Date: 2014-11-11 14:32:54.866188
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '17f11e265e13'
|
||||||
|
down_revision = '313d297811c4'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
from util.backfill_user_uuids import backfill_user_uuids
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
op.add_column('user', sa.Column('uuid', sa.String(length=36), nullable=True))
|
||||||
|
backfill_user_uuids()
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
op.drop_column('user', 'uuid')
|
|
@ -16,9 +16,7 @@ from util.uncompressedsize import backfill_sizes_from_data
|
||||||
|
|
||||||
|
|
||||||
def upgrade(tables):
|
def upgrade(tables):
|
||||||
# Note: Doing non-alembic operations inside alembic can cause a deadlock. This call has been
|
backfill_sizes_from_data()
|
||||||
# moved to runmigration.sh.
|
|
||||||
pass
|
|
||||||
|
|
||||||
def downgrade(tables):
|
def downgrade(tables):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -0,0 +1,59 @@
|
||||||
|
"""Translate the queue names to reference namespace by id, remove the namespace column.
|
||||||
|
|
||||||
|
Revision ID: 2430f55c41d5
|
||||||
|
Revises: 17f11e265e13
|
||||||
|
Create Date: 2014-09-30 17:31:33.308490
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '2fb36d4be80d'
|
||||||
|
down_revision = '17f11e265e13'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
import re
|
||||||
|
from app import app
|
||||||
|
from data.database import QueueItem, User, db
|
||||||
|
|
||||||
|
|
||||||
|
NAMESPACE_EXTRACTOR = re.compile(r'^([a-z]+/)([a-z0-9_]+)(/.*$)')
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
# Rename the namespace component of the existing queue items to reference user ids
|
||||||
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
for item in QueueItem.select():
|
||||||
|
namespace_match = NAMESPACE_EXTRACTOR.match(item.queue_name)
|
||||||
|
if namespace_match is not None:
|
||||||
|
namespace_name = namespace_match.group(2)
|
||||||
|
namespace_user = User.get(User.username == namespace_name)
|
||||||
|
item.queue_name = '%s%s%s' % (namespace_match.group(1), str(namespace_user.id),
|
||||||
|
namespace_match.group(3))
|
||||||
|
item.save()
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Invalid queue name: %s' % item.queue_name)
|
||||||
|
|
||||||
|
op.create_index('repository_namespace_user_id', 'repository', ['namespace_user_id'], unique=False)
|
||||||
|
op.drop_column('repository', 'namespace')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
# Add the namespace column back in and fill it in
|
||||||
|
op.add_column('repository', sa.Column('namespace', sa.String(length=255)))
|
||||||
|
op.drop_index('repository_namespace_user_id', table_name='repository')
|
||||||
|
|
||||||
|
# Rename the namespace component of existing queue items to reference namespace strings
|
||||||
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
for item in QueueItem.select():
|
||||||
|
namespace_match = NAMESPACE_EXTRACTOR.match(item.queue_name)
|
||||||
|
if namespace_match is not None:
|
||||||
|
namespace_id = namespace_match.group(2)
|
||||||
|
namespace_user = User.get(User.id == namespace_id)
|
||||||
|
item.queue_name = '%s%s%s' % (namespace_match.group(1),
|
||||||
|
str(namespace_user.username),
|
||||||
|
namespace_match.group(3))
|
||||||
|
item.save()
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Invalid queue name: %s' % item.queue_name)
|
|
@ -18,7 +18,7 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
|
||||||
from peewee import JOIN_LEFT_OUTER, fn
|
from peewee import JOIN_LEFT_OUTER, fn
|
||||||
from util.validation import (validate_username, validate_email, validate_password,
|
from util.validation import (validate_username, validate_email, validate_password,
|
||||||
INVALID_PASSWORD_MESSAGE)
|
INVALID_PASSWORD_MESSAGE)
|
||||||
from util.names import format_robot_username
|
from util.names import format_robot_username, parse_robot_username
|
||||||
from util.backoff import exponential_backoff
|
from util.backoff import exponential_backoff
|
||||||
|
|
||||||
|
|
||||||
|
@ -294,11 +294,17 @@ def delete_robot(robot_username):
|
||||||
robot_username)
|
robot_username)
|
||||||
|
|
||||||
|
|
||||||
def list_entity_robots(entity_name):
|
def _list_entity_robots(entity_name):
|
||||||
selected = User.select(User.username, FederatedLogin.service_ident)
|
return (User
|
||||||
joined = selected.join(FederatedLogin)
|
.select()
|
||||||
return joined.where(User.robot == True,
|
.join(FederatedLogin)
|
||||||
User.username ** (entity_name + '+%')).tuples()
|
.where(User.robot == True, User.username ** (entity_name + '+%')))
|
||||||
|
|
||||||
|
|
||||||
|
def list_entity_robot_tuples(entity_name):
|
||||||
|
return (_list_entity_robots(entity_name)
|
||||||
|
.select(User.username, FederatedLogin.service_ident)
|
||||||
|
.tuples())
|
||||||
|
|
||||||
|
|
||||||
def convert_user_to_organization(user, admin_user):
|
def convert_user_to_organization(user, admin_user):
|
||||||
|
@ -601,6 +607,20 @@ def get_user_by_id(user_db_id):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_namespace_by_user_id(namespace_user_db_id):
|
||||||
|
try:
|
||||||
|
return User.get(User.id == namespace_user_db_id, User.robot == False).username
|
||||||
|
except User.DoesNotExist:
|
||||||
|
raise InvalidUsernameException('User with id does not exist: %s' % namespace_user_db_id)
|
||||||
|
|
||||||
|
|
||||||
|
def get_user_by_uuid(user_uuid):
|
||||||
|
try:
|
||||||
|
return User.get(User.uuid == user_uuid, User.organization == False)
|
||||||
|
except User.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_user_or_org_by_customer_id(customer_id):
|
def get_user_or_org_by_customer_id(customer_id):
|
||||||
try:
|
try:
|
||||||
return User.get(User.stripe_id == customer_id)
|
return User.get(User.stripe_id == customer_id)
|
||||||
|
@ -879,6 +899,24 @@ def change_password(user, new_password):
|
||||||
delete_notifications_by_kind(user, 'password_required')
|
delete_notifications_by_kind(user, 'password_required')
|
||||||
|
|
||||||
|
|
||||||
|
def change_username(user, new_username):
|
||||||
|
(username_valid, username_issue) = validate_username(new_username)
|
||||||
|
if not username_valid:
|
||||||
|
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
||||||
|
|
||||||
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
# Rename the robots
|
||||||
|
for robot in _list_entity_robots(user.username):
|
||||||
|
_, robot_shortname = parse_robot_username(robot.username)
|
||||||
|
new_robot_name = format_robot_username(new_username, robot_shortname)
|
||||||
|
robot.username = new_robot_name
|
||||||
|
robot.save()
|
||||||
|
|
||||||
|
# Rename the user
|
||||||
|
user.username = new_username
|
||||||
|
user.save()
|
||||||
|
|
||||||
|
|
||||||
def change_invoice_email(user, invoice_email):
|
def change_invoice_email(user, invoice_email):
|
||||||
user.invoice_email = invoice_email
|
user.invoice_email = invoice_email
|
||||||
user.save()
|
user.save()
|
||||||
|
@ -1878,10 +1916,21 @@ def load_token_data(code):
|
||||||
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
||||||
|
|
||||||
|
|
||||||
def get_repository_build(namespace_name, repository_name, build_uuid):
|
def _get_build_base_query():
|
||||||
|
return (RepositoryBuild
|
||||||
|
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
|
||||||
|
Namespace)
|
||||||
|
.join(Repository)
|
||||||
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||||
|
.switch(RepositoryBuild)
|
||||||
|
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||||
|
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
||||||
|
.order_by(RepositoryBuild.started.desc()))
|
||||||
|
|
||||||
|
|
||||||
|
def get_repository_build(build_uuid):
|
||||||
try:
|
try:
|
||||||
query = list_repository_builds(namespace_name, repository_name, 1)
|
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
|
||||||
return query.where(RepositoryBuild.uuid == build_uuid).get()
|
|
||||||
|
|
||||||
except RepositoryBuild.DoesNotExist:
|
except RepositoryBuild.DoesNotExist:
|
||||||
msg = 'Unable to locate a build by id: %s' % build_uuid
|
msg = 'Unable to locate a build by id: %s' % build_uuid
|
||||||
|
@ -1890,15 +1939,8 @@ def get_repository_build(namespace_name, repository_name, build_uuid):
|
||||||
|
|
||||||
def list_repository_builds(namespace_name, repository_name, limit,
|
def list_repository_builds(namespace_name, repository_name, limit,
|
||||||
include_inactive=True):
|
include_inactive=True):
|
||||||
query = (RepositoryBuild
|
query = (_get_build_base_query()
|
||||||
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService)
|
|
||||||
.join(Repository)
|
|
||||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
|
||||||
.switch(RepositoryBuild)
|
|
||||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
|
||||||
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
|
||||||
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
||||||
.order_by(RepositoryBuild.started.desc())
|
|
||||||
.limit(limit))
|
.limit(limit))
|
||||||
|
|
||||||
if not include_inactive:
|
if not include_inactive:
|
||||||
|
@ -1962,21 +2004,23 @@ def create_repo_notification(repo, event_name, method_name, config):
|
||||||
config_json=json.dumps(config))
|
config_json=json.dumps(config))
|
||||||
|
|
||||||
|
|
||||||
def get_repo_notification(namespace_name, repository_name, uuid):
|
def get_repo_notification(uuid):
|
||||||
try:
|
try:
|
||||||
return (RepositoryNotification
|
return (RepositoryNotification
|
||||||
.select(RepositoryNotification, Repository, Namespace)
|
.select(RepositoryNotification, Repository, Namespace)
|
||||||
.join(Repository)
|
.join(Repository)
|
||||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||||
.where(Namespace.username == namespace_name, Repository.name == repository_name,
|
.where(RepositoryNotification.uuid == uuid)
|
||||||
RepositoryNotification.uuid == uuid)
|
|
||||||
.get())
|
.get())
|
||||||
except RepositoryNotification.DoesNotExist:
|
except RepositoryNotification.DoesNotExist:
|
||||||
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
|
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
|
||||||
|
|
||||||
|
|
||||||
def delete_repo_notification(namespace_name, repository_name, uuid):
|
def delete_repo_notification(namespace_name, repository_name, uuid):
|
||||||
found = get_repo_notification(namespace_name, repository_name, uuid)
|
found = get_repo_notification(uuid)
|
||||||
|
if (found.repository.namespace_user.username != namespace_name or
|
||||||
|
found.repository.name != repository_name):
|
||||||
|
raise InvalidNotificationException('No repository notifiation found with id: %s' % uuid)
|
||||||
found.delete_instance()
|
found.delete_instance()
|
||||||
return found
|
return found
|
||||||
|
|
||||||
|
@ -2035,7 +2079,7 @@ def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None):
|
||||||
return trigger
|
return trigger
|
||||||
|
|
||||||
|
|
||||||
def get_build_trigger(namespace_name, repository_name, trigger_uuid):
|
def get_build_trigger(trigger_uuid):
|
||||||
try:
|
try:
|
||||||
return (RepositoryBuildTrigger
|
return (RepositoryBuildTrigger
|
||||||
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
|
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
|
||||||
|
@ -2045,9 +2089,7 @@ def get_build_trigger(namespace_name, repository_name, trigger_uuid):
|
||||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||||
.switch(RepositoryBuildTrigger)
|
.switch(RepositoryBuildTrigger)
|
||||||
.join(User)
|
.join(User)
|
||||||
.where(RepositoryBuildTrigger.uuid == trigger_uuid,
|
.where(RepositoryBuildTrigger.uuid == trigger_uuid)
|
||||||
Namespace.username == namespace_name,
|
|
||||||
Repository.name == repository_name)
|
|
||||||
.get())
|
.get())
|
||||||
except RepositoryBuildTrigger.DoesNotExist:
|
except RepositoryBuildTrigger.DoesNotExist:
|
||||||
msg = 'No build trigger with uuid: %s' % trigger_uuid
|
msg = 'No build trigger with uuid: %s' % trigger_uuid
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from data.database import QueueItem, db
|
from data.database import QueueItem, db
|
||||||
|
from util.morecollections import AttrDict
|
||||||
|
|
||||||
|
|
||||||
MINIMUM_EXTENSION = timedelta(seconds=20)
|
MINIMUM_EXTENSION = timedelta(seconds=20)
|
||||||
|
@ -89,41 +90,49 @@ class WorkQueue(object):
|
||||||
|
|
||||||
item = None
|
item = None
|
||||||
try:
|
try:
|
||||||
item = avail.order_by(QueueItem.id).get()
|
db_item = avail.order_by(QueueItem.id).get()
|
||||||
item.available = False
|
db_item.available = False
|
||||||
item.processing_expires = now + timedelta(seconds=processing_time)
|
db_item.processing_expires = now + timedelta(seconds=processing_time)
|
||||||
item.retries_remaining -= 1
|
db_item.retries_remaining -= 1
|
||||||
item.save()
|
db_item.save()
|
||||||
|
|
||||||
|
item = AttrDict({
|
||||||
|
'id': db_item.id,
|
||||||
|
'body': db_item.body,
|
||||||
|
})
|
||||||
|
|
||||||
self._currently_processing = True
|
self._currently_processing = True
|
||||||
except QueueItem.DoesNotExist:
|
except QueueItem.DoesNotExist:
|
||||||
self._currently_processing = False
|
self._currently_processing = False
|
||||||
pass
|
|
||||||
|
|
||||||
|
# Return a view of the queue item rather than an active db object
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def complete(self, completed_item):
|
def complete(self, completed_item):
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
completed_item.delete_instance()
|
completed_item_obj = QueueItem.get(QueueItem.id == completed_item.id)
|
||||||
|
completed_item_obj.delete_instance()
|
||||||
self._currently_processing = False
|
self._currently_processing = False
|
||||||
|
|
||||||
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
||||||
incomplete_item.available_after = retry_date
|
incomplete_item_obj = QueueItem.get(QueueItem.id == incomplete_item.id)
|
||||||
incomplete_item.available = True
|
incomplete_item_obj.available_after = retry_date
|
||||||
|
incomplete_item_obj.available = True
|
||||||
|
|
||||||
if restore_retry:
|
if restore_retry:
|
||||||
incomplete_item.retries_remaining += 1
|
incomplete_item_obj.retries_remaining += 1
|
||||||
|
|
||||||
incomplete_item.save()
|
incomplete_item_obj.save()
|
||||||
self._currently_processing = False
|
self._currently_processing = False
|
||||||
|
|
||||||
@staticmethod
|
def extend_processing(self, queue_item, seconds_from_now):
|
||||||
def extend_processing(queue_item, seconds_from_now):
|
|
||||||
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
||||||
|
|
||||||
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
||||||
if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION:
|
queue_item_obj = QueueItem.get(QueueItem.id == queue_item.id)
|
||||||
queue_item.processing_expires = new_expiration
|
if new_expiration - queue_item_obj.processing_expires > MINIMUM_EXTENSION:
|
||||||
queue_item.save()
|
with self._transaction_factory(db):
|
||||||
|
queue_item_obj.processing_expires = new_expiration
|
||||||
|
queue_item_obj.save()
|
||||||
|
|
|
@ -196,8 +196,9 @@ class RepositoryBuildStatus(RepositoryParamResource):
|
||||||
@nickname('getRepoBuildStatus')
|
@nickname('getRepoBuildStatus')
|
||||||
def get(self, namespace, repository, build_uuid):
|
def get(self, namespace, repository, build_uuid):
|
||||||
""" Return the status for the builds specified by the build uuids. """
|
""" Return the status for the builds specified by the build uuids. """
|
||||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
build = model.get_repository_build(build_uuid)
|
||||||
if not build:
|
if (not build or build.repository.name != repository or
|
||||||
|
build.repository.namespace_user.username != namespace):
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
can_write = ModifyRepositoryPermission(namespace, repository).can()
|
can_write = ModifyRepositoryPermission(namespace, repository).can()
|
||||||
|
@ -213,7 +214,10 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
||||||
""" Return the build logs for the build specified by the build uuid. """
|
""" Return the build logs for the build specified by the build uuid. """
|
||||||
response_obj = {}
|
response_obj = {}
|
||||||
|
|
||||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
build = model.get_repository_build(build_uuid)
|
||||||
|
if (not build or build.repository.name != repository or
|
||||||
|
build.repository.namespace_user.username != namespace):
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
# If the logs have been archived, just redirect to the completed archive
|
# If the logs have been archived, just redirect to the completed archive
|
||||||
if build.logs_archived:
|
if build.logs_archived:
|
||||||
|
|
|
@ -102,10 +102,14 @@ class RepositoryNotification(RepositoryParamResource):
|
||||||
def get(self, namespace, repository, uuid):
|
def get(self, namespace, repository, uuid):
|
||||||
""" Get information for the specified notification. """
|
""" Get information for the specified notification. """
|
||||||
try:
|
try:
|
||||||
notification = model.get_repo_notification(namespace, repository, uuid)
|
notification = model.get_repo_notification(uuid)
|
||||||
except model.InvalidNotificationException:
|
except model.InvalidNotificationException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
if (notification.repository.namespace_user.username != namespace or
|
||||||
|
notification.repository.name != repository):
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
return notification_view(notification)
|
return notification_view(notification)
|
||||||
|
|
||||||
@require_repo_admin
|
@require_repo_admin
|
||||||
|
@ -129,14 +133,18 @@ class TestRepositoryNotification(RepositoryParamResource):
|
||||||
def post(self, namespace, repository, uuid):
|
def post(self, namespace, repository, uuid):
|
||||||
""" Queues a test notification for this repository. """
|
""" Queues a test notification for this repository. """
|
||||||
try:
|
try:
|
||||||
notification = model.get_repo_notification(namespace, repository, uuid)
|
notification = model.get_repo_notification(uuid)
|
||||||
except model.InvalidNotificationException:
|
except model.InvalidNotificationException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
if (notification.repository.namespace_user.username != namespace or
|
||||||
|
notification.repository.name != repository):
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
event_info = NotificationEvent.get_event(notification.event.name)
|
event_info = NotificationEvent.get_event(notification.event.name)
|
||||||
sample_data = event_info.get_sample_data(repository=notification.repository)
|
sample_data = event_info.get_sample_data(repository=notification.repository)
|
||||||
notification_data = build_notification_data(notification, sample_data)
|
notification_data = build_notification_data(notification, sample_data)
|
||||||
notification_queue.put([namespace, repository, notification.event.name],
|
notification_queue.put([str(notification.repository.namespace_user.id), repository,
|
||||||
json.dumps(notification_data))
|
notification.event.name], json.dumps(notification_data))
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|
|
@ -24,7 +24,7 @@ class UserRobotList(ApiResource):
|
||||||
def get(self):
|
def get(self):
|
||||||
""" List the available robots for the user. """
|
""" List the available robots for the user. """
|
||||||
user = get_authenticated_user()
|
user = get_authenticated_user()
|
||||||
robots = model.list_entity_robots(user.username)
|
robots = model.list_entity_robot_tuples(user.username)
|
||||||
return {
|
return {
|
||||||
'robots': [robot_view(name, password) for name, password in robots]
|
'robots': [robot_view(name, password) for name, password in robots]
|
||||||
}
|
}
|
||||||
|
@ -73,7 +73,7 @@ class OrgRobotList(ApiResource):
|
||||||
""" List the organization's robots. """
|
""" List the organization's robots. """
|
||||||
permission = OrganizationMemberPermission(orgname)
|
permission = OrganizationMemberPermission(orgname)
|
||||||
if permission.can():
|
if permission.can():
|
||||||
robots = model.list_entity_robots(orgname)
|
robots = model.list_entity_robot_tuples(orgname)
|
||||||
return {
|
return {
|
||||||
'robots': [robot_view(name, password) for name, password in robots]
|
'robots': [robot_view(name, password) for name, password in robots]
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,7 +52,7 @@ class BuildTrigger(RepositoryParamResource):
|
||||||
def get(self, namespace, repository, trigger_uuid):
|
def get(self, namespace, repository, trigger_uuid):
|
||||||
""" Get information for the specified build trigger. """
|
""" Get information for the specified build trigger. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -63,7 +63,7 @@ class BuildTrigger(RepositoryParamResource):
|
||||||
def delete(self, namespace, repository, trigger_uuid):
|
def delete(self, namespace, repository, trigger_uuid):
|
||||||
""" Delete the specified build trigger. """
|
""" Delete the specified build trigger. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -107,7 +107,7 @@ class BuildTriggerSubdirs(RepositoryParamResource):
|
||||||
def post(self, namespace, repository, trigger_uuid):
|
def post(self, namespace, repository, trigger_uuid):
|
||||||
""" List the subdirectories available for the specified build trigger and source. """
|
""" List the subdirectories available for the specified build trigger and source. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -167,7 +167,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
||||||
def post(self, namespace, repository, trigger_uuid):
|
def post(self, namespace, repository, trigger_uuid):
|
||||||
""" Activate the specified build trigger. """
|
""" Activate the specified build trigger. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -205,10 +205,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
||||||
'write')
|
'write')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
repository_path = '%s/%s' % (trigger.repository.namespace_user.username,
|
path = url_for('webhooks.build_trigger_webhook', trigger_uuid=trigger.uuid)
|
||||||
trigger.repository.name)
|
|
||||||
path = url_for('webhooks.build_trigger_webhook',
|
|
||||||
repository=repository_path, trigger_uuid=trigger.uuid)
|
|
||||||
authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'], '$token', token.code,
|
authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'], '$token', token.code,
|
||||||
app.config['SERVER_HOSTNAME'], path)
|
app.config['SERVER_HOSTNAME'], path)
|
||||||
|
|
||||||
|
@ -264,7 +261,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
||||||
def post(self, namespace, repository, trigger_uuid):
|
def post(self, namespace, repository, trigger_uuid):
|
||||||
""" Analyze the specified build trigger configuration. """
|
""" Analyze the specified build trigger configuration. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -395,7 +392,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
||||||
def post(self, namespace, repository, trigger_uuid):
|
def post(self, namespace, repository, trigger_uuid):
|
||||||
""" Manually start a build from the specified trigger. """
|
""" Manually start a build from the specified trigger. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -453,7 +450,7 @@ class BuildTriggerFieldValues(RepositoryParamResource):
|
||||||
def post(self, namespace, repository, trigger_uuid, field_name):
|
def post(self, namespace, repository, trigger_uuid, field_name):
|
||||||
""" List the field values for a custom run field. """
|
""" List the field values for a custom run field. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
@ -482,7 +479,7 @@ class BuildTriggerSources(RepositoryParamResource):
|
||||||
def get(self, namespace, repository, trigger_uuid):
|
def get(self, namespace, repository, trigger_uuid):
|
||||||
""" List the build sources for the trigger configuration thus far. """
|
""" List the build sources for the trigger configuration thus far. """
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
raise NotFound()
|
raise NotFound()
|
||||||
|
|
||||||
|
|
|
@ -143,6 +143,10 @@ class User(ApiResource):
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'description': 'The user\'s email address',
|
'description': 'The user\'s email address',
|
||||||
},
|
},
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The user\'s username',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -194,6 +198,14 @@ class User(ApiResource):
|
||||||
else:
|
else:
|
||||||
model.update_email(user, new_email, auto_verify=not features.MAILING)
|
model.update_email(user, new_email, auto_verify=not features.MAILING)
|
||||||
|
|
||||||
|
if 'username' in user_data and user_data['username'] != user.username:
|
||||||
|
new_username = user_data['username']
|
||||||
|
if model.get_user_or_org(new_username) is not None:
|
||||||
|
# Username already used
|
||||||
|
raise request_error(message='Username is already in use')
|
||||||
|
|
||||||
|
model.change_username(user, new_username)
|
||||||
|
|
||||||
except model.InvalidPasswordException, ex:
|
except model.InvalidPasswordException, ex:
|
||||||
raise request_error(exception=ex)
|
raise request_error(exception=ex)
|
||||||
|
|
||||||
|
|
|
@ -85,23 +85,19 @@ def param_required(param_name):
|
||||||
|
|
||||||
|
|
||||||
@login_manager.user_loader
|
@login_manager.user_loader
|
||||||
def load_user(user_db_id):
|
def load_user(user_uuid):
|
||||||
logger.debug('User loader loading deferred user id: %s' % user_db_id)
|
logger.debug('User loader loading deferred user with uuid: %s' % user_uuid)
|
||||||
try:
|
return _LoginWrappedDBUser(user_uuid)
|
||||||
user_db_id_int = int(user_db_id)
|
|
||||||
return _LoginWrappedDBUser(user_db_id_int)
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class _LoginWrappedDBUser(UserMixin):
|
class _LoginWrappedDBUser(UserMixin):
|
||||||
def __init__(self, user_db_id, db_user=None):
|
def __init__(self, user_uuid, db_user=None):
|
||||||
self._db_id = user_db_id
|
self._uuid = user_uuid
|
||||||
self._db_user = db_user
|
self._db_user = db_user
|
||||||
|
|
||||||
def db_user(self):
|
def db_user(self):
|
||||||
if not self._db_user:
|
if not self._db_user:
|
||||||
self._db_user = model.get_user_by_id(self._db_id)
|
self._db_user = model.get_user_by_uuid(self._uuid)
|
||||||
return self._db_user
|
return self._db_user
|
||||||
|
|
||||||
def is_authenticated(self):
|
def is_authenticated(self):
|
||||||
|
@ -111,13 +107,13 @@ class _LoginWrappedDBUser(UserMixin):
|
||||||
return self.db_user().verified
|
return self.db_user().verified
|
||||||
|
|
||||||
def get_id(self):
|
def get_id(self):
|
||||||
return unicode(self._db_id)
|
return unicode(self._uuid)
|
||||||
|
|
||||||
|
|
||||||
def common_login(db_user):
|
def common_login(db_user):
|
||||||
if login_user(_LoginWrappedDBUser(db_user.id, db_user)):
|
if login_user(_LoginWrappedDBUser(db_user.uuid, db_user)):
|
||||||
logger.debug('Successfully signed in as: %s' % db_user.username)
|
logger.debug('Successfully signed in as: %s (%s)' % (db_user.username, db_user.uuid))
|
||||||
new_identity = QuayDeferredPermissionUser(db_user.id, 'user_db_id', {scopes.DIRECT_LOGIN})
|
new_identity = QuayDeferredPermissionUser(db_user.uuid, 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||||
identity_changed.send(app, identity=new_identity)
|
identity_changed.send(app, identity=new_identity)
|
||||||
session['login_time'] = datetime.datetime.now()
|
session['login_time'] = datetime.datetime.now()
|
||||||
return True
|
return True
|
||||||
|
@ -233,7 +229,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
||||||
|
|
||||||
job_config = {
|
job_config = {
|
||||||
'docker_tags': tags,
|
'docker_tags': tags,
|
||||||
'repository': repo_path,
|
'registry': host,
|
||||||
'build_subdir': subdir
|
'build_subdir': subdir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,10 +238,8 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
||||||
dockerfile_id, build_name,
|
dockerfile_id, build_name,
|
||||||
trigger, pull_robot_name=pull_robot_name)
|
trigger, pull_robot_name=pull_robot_name)
|
||||||
|
|
||||||
dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({
|
dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({
|
||||||
'build_uuid': build_request.uuid,
|
'build_uuid': build_request.uuid,
|
||||||
'namespace': repository.namespace_user.username,
|
|
||||||
'repository': repository.name,
|
|
||||||
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
||||||
}), retries_remaining=1)
|
}), retries_remaining=1)
|
||||||
|
|
||||||
|
|
|
@ -45,10 +45,8 @@ def build_notification_data(notification, event_data, performer_data=None):
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'notification_uuid': notification.uuid,
|
'notification_uuid': notification.uuid,
|
||||||
'repository_namespace': notification.repository.namespace_user.username,
|
|
||||||
'repository_name': notification.repository.name,
|
|
||||||
'event_data': event_data,
|
'event_data': event_data,
|
||||||
'performer_data': performer_data
|
'performer_data': performer_data,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -60,5 +58,5 @@ def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[
|
||||||
event_name=event_name)
|
event_name=event_name)
|
||||||
for notification in list(notifications):
|
for notification in list(notifications):
|
||||||
notification_data = build_notification_data(notification, event_data, performer_data)
|
notification_data = build_notification_data(notification, event_data, performer_data)
|
||||||
path = [repo.namespace_user.username, repo.name, event_name] + pathargs
|
path = [str(repo.namespace_user.id), repo.name, event_name] + pathargs
|
||||||
notification_queue.put(path, json.dumps(notification_data))
|
notification_queue.put(path, json.dumps(notification_data))
|
||||||
|
|
|
@ -258,8 +258,9 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
# The layer is ready for download, send a job to the work queue to
|
# The layer is ready for download, send a job to the work queue to
|
||||||
# process it.
|
# process it.
|
||||||
profile.debug('Adding layer to diff queue')
|
profile.debug('Adding layer to diff queue')
|
||||||
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
repo = model.get_repository(namespace, repository)
|
||||||
'namespace': namespace,
|
image_diff_queue.put([str(repo.namespace_user.id), repository, image_id], json.dumps({
|
||||||
|
'namespace_user_id': repo.namespace_user.id,
|
||||||
'repository': repository,
|
'repository': repository,
|
||||||
'image_id': image_id,
|
'image_id': image_id,
|
||||||
}))
|
}))
|
||||||
|
@ -331,8 +332,9 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
# The layer is ready for download, send a job to the work queue to
|
# The layer is ready for download, send a job to the work queue to
|
||||||
# process it.
|
# process it.
|
||||||
profile.debug('Adding layer to diff queue')
|
profile.debug('Adding layer to diff queue')
|
||||||
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
repo = model.get_repository(namespace, repository)
|
||||||
'namespace': namespace,
|
image_diff_queue.put([str(repo.namespace_user.id), repository, image_id], json.dumps({
|
||||||
|
'namespace_user_id': repo.namespace_user.id,
|
||||||
'repository': repository,
|
'repository': repository,
|
||||||
'image_id': image_id,
|
'image_id': image_id,
|
||||||
}))
|
}))
|
||||||
|
|
|
@ -67,20 +67,23 @@ def stripe_webhook():
|
||||||
return make_response('Okay')
|
return make_response('Okay')
|
||||||
|
|
||||||
|
|
||||||
@webhooks.route('/push/<path:repository>/trigger/<trigger_uuid>',
|
@webhooks.route('/push/<path:repository>/trigger/<trigger_uuid>', methods=['POST'])
|
||||||
methods=['POST'])
|
@webhooks.route('/push/trigger/<trigger_uuid>', methods=['POST'], defaults={'repository': ''})
|
||||||
@process_auth
|
@process_auth
|
||||||
@parse_repository_name
|
def build_trigger_webhook(trigger_uuid, **kwargs):
|
||||||
def build_trigger_webhook(namespace, repository, trigger_uuid):
|
logger.debug('Webhook received with uuid %s', trigger_uuid)
|
||||||
logger.debug('Webhook received for %s/%s with uuid %s', namespace,
|
|
||||||
repository, trigger_uuid)
|
|
||||||
permission = ModifyRepositoryPermission(namespace, repository)
|
|
||||||
if permission.can():
|
|
||||||
try:
|
try:
|
||||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
trigger = model.get_build_trigger(trigger_uuid)
|
||||||
except model.InvalidBuildTriggerException:
|
except model.InvalidBuildTriggerException:
|
||||||
|
# It is ok to return 404 here, since letting an attacker know that a trigger UUID is valid
|
||||||
|
# doesn't leak anything
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
|
namespace = trigger.repository.namespace_user.username
|
||||||
|
repository = trigger.repository.name
|
||||||
|
permission = ModifyRepositoryPermission(namespace, repository)
|
||||||
|
if permission.can():
|
||||||
handler = BuildTrigger.get_trigger_for_service(trigger.service.name)
|
handler = BuildTrigger.get_trigger_for_service(trigger.service.name)
|
||||||
|
|
||||||
logger.debug('Passing webhook request to handler %s', handler)
|
logger.debug('Passing webhook request to handler %s', handler)
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$';
|
var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$';
|
||||||
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$';
|
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$';
|
||||||
|
var USER_PATTERN = '^[a-z0-9_]{4,30}$';
|
||||||
|
|
||||||
$.fn.clipboardCopy = function() {
|
$.fn.clipboardCopy = function() {
|
||||||
if (zeroClipboardSupported) {
|
if (zeroClipboardSupported) {
|
||||||
|
|
|
@ -1766,6 +1766,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
||||||
$scope.logsShown = 0;
|
$scope.logsShown = 0;
|
||||||
$scope.invoicesShown = 0;
|
$scope.invoicesShown = 0;
|
||||||
|
|
||||||
|
$scope.USER_PATTERN = USER_PATTERN;
|
||||||
|
|
||||||
$scope.loadAuthedApps = function() {
|
$scope.loadAuthedApps = function() {
|
||||||
if ($scope.authorizedApps) { return; }
|
if ($scope.authorizedApps) { return; }
|
||||||
|
|
||||||
|
@ -1841,6 +1843,24 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
$scope.changeUsername = function() {
|
||||||
|
UserService.load();
|
||||||
|
|
||||||
|
$scope.updatingUser = true;
|
||||||
|
|
||||||
|
ApiService.changeUserDetails($scope.cuser).then(function() {
|
||||||
|
$scope.updatingUser = false;
|
||||||
|
|
||||||
|
// Reset the form.
|
||||||
|
delete $scope.cuser['username'];
|
||||||
|
|
||||||
|
$scope.changeUsernameForm.$setPristine();
|
||||||
|
}, function(result) {
|
||||||
|
$scope.updatingUser = false;
|
||||||
|
UIService.showFormError('#changeUsernameForm', result);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
$scope.changeEmail = function() {
|
$scope.changeEmail = function() {
|
||||||
UIService.hidePopover('#changeEmailForm');
|
UIService.hidePopover('#changeEmailForm');
|
||||||
|
|
||||||
|
@ -1853,7 +1873,7 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
||||||
$scope.sentEmail = $scope.cuser.email;
|
$scope.sentEmail = $scope.cuser.email;
|
||||||
|
|
||||||
// Reset the form.
|
// Reset the form.
|
||||||
delete $scope.cuser['repeatEmail'];
|
delete $scope.cuser['email'];
|
||||||
|
|
||||||
$scope.changeEmailForm.$setPristine();
|
$scope.changeEmailForm.$setPristine();
|
||||||
}, function(result) {
|
}, function(result) {
|
||||||
|
|
|
@ -38,6 +38,7 @@
|
||||||
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
|
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
|
||||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
|
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
|
||||||
</li>
|
</li>
|
||||||
|
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#username">Change Username</a></li>
|
||||||
<li quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
<li quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#migrate" id="migrateTab">Convert to Organization</a>
|
<a href="javascript:void(0)" data-toggle="tab" data-target="#migrate" id="migrateTab">Convert to Organization</a>
|
||||||
</li>
|
</li>
|
||||||
|
@ -234,6 +235,31 @@
|
||||||
<div class="billing-invoices" user="user" makevisible="invoicesShown"></div>
|
<div class="billing-invoices" user="user" makevisible="invoicesShown"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Change username tab -->
|
||||||
|
<div id="username" class="tab-pane">
|
||||||
|
<div class="row">
|
||||||
|
<div class="panel">
|
||||||
|
<div class="panel-title">Change Username</div>
|
||||||
|
|
||||||
|
<div class="loading" ng-show="updatingUser">
|
||||||
|
<div class="quay-spinner 3x"></div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<span class="help-block" ng-show="changeUsernameSuccess">Username changed successfully</span>
|
||||||
|
|
||||||
|
<div ng-show="!updatingUser" class="panel-body">
|
||||||
|
<form class="form-change col-md-6" id="changeUsernameForm" name="changeUsernameForm" ng-submit="changePassword()"
|
||||||
|
ng-show="!awaitingConfirmation && !registering">
|
||||||
|
<input type="text" class="form-control" placeholder="Your new username" ng-model="cuser.username" required
|
||||||
|
ng-pattern="/{{ USER_PATTERN }}/">
|
||||||
|
<button class="btn btn-danger" ng-disabled="changeUsernameForm.$invalid" type="submit"
|
||||||
|
analytics-on analytics-event="change_username">Change Username</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
<!-- Convert to organization tab -->
|
<!-- Convert to organization tab -->
|
||||||
<div id="migrate" class="tab-pane" quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
<div id="migrate" class="tab-pane" quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||||
<!-- Step 0 -->
|
<!-- Step 0 -->
|
||||||
|
|
Binary file not shown.
|
@ -79,7 +79,7 @@ class ApiTestCase(unittest.TestCase):
|
||||||
with client.session_transaction() as sess:
|
with client.session_transaction() as sess:
|
||||||
if auth_username:
|
if auth_username:
|
||||||
loaded = model.get_user(auth_username)
|
loaded = model.get_user(auth_username)
|
||||||
sess['user_id'] = loaded.id
|
sess['user_id'] = loaded.uuid
|
||||||
sess['login_time'] = datetime.datetime.now()
|
sess['login_time'] = datetime.datetime.now()
|
||||||
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
||||||
|
|
||||||
|
|
|
@ -2342,7 +2342,7 @@ class TestBuildTriggers(ApiTestCase):
|
||||||
self.assertEquals(True, activate_json['is_active'])
|
self.assertEquals(True, activate_json['is_active'])
|
||||||
|
|
||||||
# Make sure the trigger has a write token.
|
# Make sure the trigger has a write token.
|
||||||
trigger = model.get_build_trigger(ADMIN_ACCESS_USER, 'simple', trigger.uuid)
|
trigger = model.get_build_trigger(trigger.uuid)
|
||||||
self.assertNotEquals(None, trigger.write_token)
|
self.assertNotEquals(None, trigger.write_token)
|
||||||
self.assertEquals(True, py_json.loads(trigger.config)['active'])
|
self.assertEquals(True, py_json.loads(trigger.config)['active'])
|
||||||
|
|
||||||
|
|
|
@ -67,8 +67,7 @@ class TestBuildLogs(RedisBuildLogs):
|
||||||
(phase, status) = status_wrapper
|
(phase, status) = status_wrapper
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
build_obj = model.get_repository_build(self.namespace, self.repository,
|
build_obj = model.get_repository_build(self.test_build_id)
|
||||||
self.test_build_id)
|
|
||||||
build_obj.phase = phase
|
build_obj.phase = phase
|
||||||
build_obj.save()
|
build_obj.save()
|
||||||
|
|
||||||
|
@ -88,8 +87,7 @@ class TestBuildLogs(RedisBuildLogs):
|
||||||
total_commands = random.randint(5, 20)
|
total_commands = random.randint(5, 20)
|
||||||
for command_num in range(1, total_commands + 1):
|
for command_num in range(1, total_commands + 1):
|
||||||
command_weight = random.randint(50, 100)
|
command_weight = random.randint(50, 100)
|
||||||
script.append(self._generate_command(command_num, total_commands,
|
script.append(self._generate_command(command_num, total_commands, command_weight))
|
||||||
command_weight))
|
|
||||||
|
|
||||||
# we want 0 logs some percent of the time
|
# we want 0 logs some percent of the time
|
||||||
num_logs = max(0, random.randint(-50, 400))
|
num_logs = max(0, random.randint(-50, 400))
|
||||||
|
|
54
util/backfill_user_uuids.py
Normal file
54
util/backfill_user_uuids.py
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from data.database import User, db
|
||||||
|
from app import app
|
||||||
|
|
||||||
|
LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def backfill_user_uuids():
|
||||||
|
""" Generates UUIDs for any Users without them. """
|
||||||
|
LOGGER.setLevel(logging.DEBUG)
|
||||||
|
LOGGER.debug('User UUID Backfill: Began execution')
|
||||||
|
|
||||||
|
|
||||||
|
# Check to see if any users are missing uuids.
|
||||||
|
has_missing_uuids = True
|
||||||
|
try:
|
||||||
|
User.select().where(User.uuid >> None).get()
|
||||||
|
except User.DoesNotExist:
|
||||||
|
has_missing_uuids = False
|
||||||
|
|
||||||
|
if not has_missing_uuids:
|
||||||
|
LOGGER.debug('User UUID Backfill: No migration needed')
|
||||||
|
return
|
||||||
|
|
||||||
|
LOGGER.debug('User UUID Backfill: Starting migration')
|
||||||
|
while True:
|
||||||
|
batch_user_ids = list(User
|
||||||
|
.select(User.id)
|
||||||
|
.where(User.uuid >> None)
|
||||||
|
.limit(100))
|
||||||
|
|
||||||
|
if len(batch_user_ids) == 0:
|
||||||
|
# There are no users left to backfill. We're done!
|
||||||
|
LOGGER.debug('User UUID Backfill: Backfill completed')
|
||||||
|
return
|
||||||
|
|
||||||
|
LOGGER.debug('User UUID Backfill: Found %s records to update', len(batch_user_ids))
|
||||||
|
for user_id in batch_user_ids:
|
||||||
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
try:
|
||||||
|
user = User.get(User.id == user_id)
|
||||||
|
user.uuid = str(uuid.uuid4())
|
||||||
|
user.save()
|
||||||
|
except User.DoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('peewee').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
|
backfill_user_uuids()
|
|
@ -1,9 +1,8 @@
|
||||||
import logging
|
import logging
|
||||||
import zlib
|
import zlib
|
||||||
import sys
|
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from data.database import ImageStorage, configure
|
from data.database import ImageStorage
|
||||||
from app import app, storage as store
|
from app import app, storage as store
|
||||||
from data.database import db, db_random_func
|
from data.database import db, db_random_func
|
||||||
from util.gzipstream import ZLIB_GZIP_WINDOW
|
from util.gzipstream import ZLIB_GZIP_WINDOW
|
||||||
|
@ -20,11 +19,6 @@ def backfill_sizes_from_data():
|
||||||
logger.debug('Starting uncompressed image size backfill')
|
logger.debug('Starting uncompressed image size backfill')
|
||||||
logger.debug('NOTE: This can be a LONG RUNNING OPERATION. Please wait!')
|
logger.debug('NOTE: This can be a LONG RUNNING OPERATION. Please wait!')
|
||||||
|
|
||||||
# Make sure we have a reference to the current DB.
|
|
||||||
configure(app.config)
|
|
||||||
|
|
||||||
logger.debug('Uncompressed backfill: Database configured')
|
|
||||||
|
|
||||||
# Check for any uncompressed images.
|
# Check for any uncompressed images.
|
||||||
has_images = bool(list(ImageStorage
|
has_images = bool(list(ImageStorage
|
||||||
.select(ImageStorage.uuid)
|
.select(ImageStorage.uuid)
|
||||||
|
|
|
@ -1,30 +1,28 @@
|
||||||
import logging
|
import logging
|
||||||
import argparse
|
|
||||||
|
|
||||||
from app import image_diff_queue
|
from app import image_diff_queue
|
||||||
from data.model import DataModelException
|
from data import model
|
||||||
from endpoints.registry import process_image_changes
|
from endpoints.registry import process_image_changes
|
||||||
from workers.worker import Worker
|
from workers.worker import Worker
|
||||||
|
|
||||||
|
|
||||||
root_logger = logging.getLogger('')
|
|
||||||
root_logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
|
||||||
formatter = logging.Formatter(FORMAT)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DiffsWorker(Worker):
|
class DiffsWorker(Worker):
|
||||||
def process_queue_item(self, job_details):
|
def process_queue_item(self, job_details):
|
||||||
image_id = job_details['image_id']
|
image_id = job_details['image_id']
|
||||||
namespace = job_details['namespace']
|
|
||||||
repository = job_details['repository']
|
repository = job_details['repository']
|
||||||
|
|
||||||
|
# TODO switch to the namespace_user_id branch only once exisiting jobs have all gone through
|
||||||
|
if 'namespace_user_id' in job_details:
|
||||||
|
namespace = model.get_namespace_by_user_id(job_details['namespace_user_id'])
|
||||||
|
else:
|
||||||
|
namespace = job_details['namespace']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
process_image_changes(namespace, repository, image_id)
|
process_image_changes(namespace, repository, image_id)
|
||||||
except DataModelException:
|
except model.DataModelException:
|
||||||
# This exception is unrecoverable, and the item should continue and be
|
# This exception is unrecoverable, and the item should continue and be
|
||||||
# marked as complete.
|
# marked as complete.
|
||||||
msg = ('Image does not exist in database \'%s\' for repo \'%s/\'%s\'' %
|
msg = ('Image does not exist in database \'%s\' for repo \'%s/\'%s\'' %
|
||||||
|
|
|
@ -40,6 +40,7 @@ NO_TAGS = ['<none>:<none>']
|
||||||
RESERVATION_TIME = (TIMEOUT_PERIOD_MINUTES + 5) * 60
|
RESERVATION_TIME = (TIMEOUT_PERIOD_MINUTES + 5) * 60
|
||||||
DOCKER_BASE_URL = os.environ.get('DOCKER_HOST', None)
|
DOCKER_BASE_URL = os.environ.get('DOCKER_HOST', None)
|
||||||
|
|
||||||
|
|
||||||
def matches_system_error(status_str):
|
def matches_system_error(status_str):
|
||||||
""" Returns true if the given status string matches a known system error in the
|
""" Returns true if the given status string matches a known system error in the
|
||||||
Docker builder.
|
Docker builder.
|
||||||
|
@ -242,8 +243,7 @@ class DockerfileBuildContext(object):
|
||||||
|
|
||||||
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
||||||
registry=self._pull_credentials['registry'], reauth=True)
|
registry=self._pull_credentials['registry'], reauth=True)
|
||||||
|
else:
|
||||||
# Pull the image, in case it was updated since the last build
|
|
||||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data={
|
self._build_logger('Pulling base image: %s' % image_and_tag, log_data={
|
||||||
'phasestep': 'pull',
|
'phasestep': 'pull',
|
||||||
'repo_url': image_and_tag
|
'repo_url': image_and_tag
|
||||||
|
@ -501,9 +501,7 @@ class DockerfileBuildWorker(Worker):
|
||||||
# Make sure we have more information for debugging problems
|
# Make sure we have more information for debugging problems
|
||||||
sentry.client.user_context(job_details)
|
sentry.client.user_context(job_details)
|
||||||
|
|
||||||
repository_build = model.get_repository_build(job_details['namespace'],
|
repository_build = model.get_repository_build(job_details['build_uuid'])
|
||||||
job_details['repository'],
|
|
||||||
job_details['build_uuid'])
|
|
||||||
|
|
||||||
pull_credentials = job_details.get('pull_credentials', None)
|
pull_credentials = job_details.get('pull_credentials', None)
|
||||||
|
|
||||||
|
@ -512,12 +510,18 @@ class DockerfileBuildWorker(Worker):
|
||||||
resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
|
resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
|
||||||
tag_names = job_config['docker_tags']
|
tag_names = job_config['docker_tags']
|
||||||
build_subdir = job_config['build_subdir']
|
build_subdir = job_config['build_subdir']
|
||||||
|
|
||||||
|
# TODO remove the top branch when there are no more jobs with a repository config
|
||||||
|
if 'repository' in job_config:
|
||||||
repo = job_config['repository']
|
repo = job_config['repository']
|
||||||
|
else:
|
||||||
|
repo = '%s/%s/%s' % (job_config['registry'],
|
||||||
|
repository_build.repository.namespace_user.username,
|
||||||
|
repository_build.repository.name)
|
||||||
|
|
||||||
access_token = repository_build.access_token.code
|
access_token = repository_build.access_token.code
|
||||||
|
|
||||||
log_appender = partial(build_logs.append_log_message,
|
log_appender = partial(build_logs.append_log_message, repository_build.uuid)
|
||||||
repository_build.uuid)
|
|
||||||
|
|
||||||
# Lookup and save the version of docker being used.
|
# Lookup and save the version of docker being used.
|
||||||
try:
|
try:
|
||||||
|
@ -538,8 +542,7 @@ class DockerfileBuildWorker(Worker):
|
||||||
|
|
||||||
log_appender('Docker version: %s' % docker_version)
|
log_appender('Docker version: %s' % docker_version)
|
||||||
|
|
||||||
start_msg = ('Starting job with resource url: %s repo: %s' % (resource_url,
|
start_msg = ('Starting job with resource url: %s repo: %s' % (resource_url, repo))
|
||||||
repo))
|
|
||||||
logger.debug(start_msg)
|
logger.debug(start_msg)
|
||||||
|
|
||||||
docker_resource = requests.get(resource_url, stream=True)
|
docker_resource = requests.get(resource_url, stream=True)
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
import logging
|
import logging
|
||||||
import argparse
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
|
|
||||||
from app import notification_queue
|
from app import notification_queue
|
||||||
from workers.worker import Worker
|
from workers.worker import Worker
|
||||||
|
@ -12,11 +9,6 @@ from workers.worker import JobException
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
|
|
||||||
root_logger = logging.getLogger('')
|
|
||||||
root_logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
|
||||||
formatter = logging.Formatter(FORMAT)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -24,10 +16,8 @@ logger = logging.getLogger(__name__)
|
||||||
class NotificationWorker(Worker):
|
class NotificationWorker(Worker):
|
||||||
def process_queue_item(self, job_details):
|
def process_queue_item(self, job_details):
|
||||||
notification_uuid = job_details['notification_uuid'];
|
notification_uuid = job_details['notification_uuid'];
|
||||||
repo_namespace = job_details['repository_namespace']
|
|
||||||
repo_name = job_details['repository_name']
|
|
||||||
|
|
||||||
notification = model.get_repo_notification(repo_namespace, repo_name, notification_uuid)
|
notification = model.get_repo_notification(notification_uuid)
|
||||||
if not notification:
|
if not notification:
|
||||||
# Probably deleted.
|
# Probably deleted.
|
||||||
return
|
return
|
||||||
|
|
Reference in a new issue