Merge branch 'master' of https://bitbucket.org/yackob03/quay
This commit is contained in:
commit
3080c47ef2
33 changed files with 477 additions and 257 deletions
10
auth/auth.py
10
auth/auth.py
|
@ -25,7 +25,7 @@ def _load_user_from_cookie():
|
|||
if not current_user.is_anonymous():
|
||||
logger.debug('Loading user from cookie: %s', current_user.get_id())
|
||||
set_authenticated_user_deferred(current_user.get_id())
|
||||
loaded = QuayDeferredPermissionUser(current_user.get_id(), 'user_db_id', {scopes.DIRECT_LOGIN})
|
||||
loaded = QuayDeferredPermissionUser(current_user.get_id(), 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||
identity_changed.send(app, identity=loaded)
|
||||
return current_user.db_user()
|
||||
return None
|
||||
|
@ -58,7 +58,7 @@ def _validate_and_apply_oauth_token(token):
|
|||
set_authenticated_user(validated.authorized_user)
|
||||
set_validated_oauth_token(validated)
|
||||
|
||||
new_identity = QuayDeferredPermissionUser(validated.authorized_user.id, 'user_db_id', scope_set)
|
||||
new_identity = QuayDeferredPermissionUser(validated.authorized_user.uuid, 'user_uuid', scope_set)
|
||||
identity_changed.send(app, identity=new_identity)
|
||||
|
||||
|
||||
|
@ -97,8 +97,8 @@ def process_basic_auth(auth):
|
|||
robot = model.verify_robot(credentials[0], credentials[1])
|
||||
logger.debug('Successfully validated robot: %s' % credentials[0])
|
||||
set_authenticated_user(robot)
|
||||
|
||||
deferred_robot = QuayDeferredPermissionUser(robot.id, 'user_db_id', {scopes.DIRECT_LOGIN})
|
||||
|
||||
deferred_robot = QuayDeferredPermissionUser(robot.uuid, 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||
identity_changed.send(app, identity=deferred_robot)
|
||||
return
|
||||
except model.InvalidRobotException:
|
||||
|
@ -111,7 +111,7 @@ def process_basic_auth(auth):
|
|||
logger.debug('Successfully validated user: %s' % authenticated.username)
|
||||
set_authenticated_user(authenticated)
|
||||
|
||||
new_identity = QuayDeferredPermissionUser(authenticated.id, 'user_db_id',
|
||||
new_identity = QuayDeferredPermissionUser(authenticated.uuid, 'user_uuid',
|
||||
{scopes.DIRECT_LOGIN})
|
||||
identity_changed.send(app, identity=new_identity)
|
||||
return
|
||||
|
|
|
@ -10,13 +10,13 @@ logger = logging.getLogger(__name__)
|
|||
def get_authenticated_user():
|
||||
user = getattr(_request_ctx_stack.top, 'authenticated_user', None)
|
||||
if not user:
|
||||
db_id = getattr(_request_ctx_stack.top, 'authenticated_db_id', None)
|
||||
if not db_id:
|
||||
logger.debug('No authenticated user or deferred database id.')
|
||||
user_uuid = getattr(_request_ctx_stack.top, 'authenticated_user_uuid', None)
|
||||
if not user_uuid:
|
||||
logger.debug('No authenticated user or deferred database uuid.')
|
||||
return None
|
||||
|
||||
logger.debug('Loading deferred authenticated user.')
|
||||
loaded = model.get_user_by_id(db_id)
|
||||
loaded = model.get_user_by_uuid(user_uuid)
|
||||
set_authenticated_user(loaded)
|
||||
user = loaded
|
||||
|
||||
|
@ -30,10 +30,10 @@ def set_authenticated_user(user_or_robot):
|
|||
ctx.authenticated_user = user_or_robot
|
||||
|
||||
|
||||
def set_authenticated_user_deferred(user_or_robot_db_id):
|
||||
logger.debug('Deferring loading of authenticated user object: %s', user_or_robot_db_id)
|
||||
def set_authenticated_user_deferred(user_or_robot_uuid):
|
||||
logger.debug('Deferring loading of authenticated user object with uuid: %s', user_or_robot_uuid)
|
||||
ctx = _request_ctx_stack.top
|
||||
ctx.authenticated_db_id = user_or_robot_db_id
|
||||
ctx.authenticated_user_uuid = user_or_robot_uuid
|
||||
|
||||
|
||||
def get_validated_oauth_token():
|
||||
|
|
|
@ -58,8 +58,8 @@ SCOPE_MAX_USER_ROLES.update({
|
|||
|
||||
|
||||
class QuayDeferredPermissionUser(Identity):
|
||||
def __init__(self, db_id, auth_type, scopes):
|
||||
super(QuayDeferredPermissionUser, self).__init__(db_id, auth_type)
|
||||
def __init__(self, uuid, auth_type, scopes):
|
||||
super(QuayDeferredPermissionUser, self).__init__(uuid, auth_type)
|
||||
|
||||
self._permissions_loaded = False
|
||||
self._scope_set = scopes
|
||||
|
@ -88,14 +88,14 @@ class QuayDeferredPermissionUser(Identity):
|
|||
def can(self, permission):
|
||||
if not self._permissions_loaded:
|
||||
logger.debug('Loading user permissions after deferring.')
|
||||
user_object = model.get_user_by_id(self.id)
|
||||
user_object = model.get_user_by_uuid(self.id)
|
||||
|
||||
# Add the superuser need, if applicable.
|
||||
if (user_object.username is not None and
|
||||
user_object.username in app.config.get('SUPER_USERS', [])):
|
||||
self.provides.add(_SuperUserNeed())
|
||||
|
||||
# Add the user specific permissions, only for non-oauth permission
|
||||
# Add the user specific permissions, only for non-oauth permission
|
||||
user_grant = _UserNeed(user_object.username, self._user_role_for_scopes('admin'))
|
||||
logger.debug('User permission: {0}'.format(user_grant))
|
||||
self.provides.add(user_grant)
|
||||
|
@ -217,7 +217,7 @@ class ViewTeamPermission(Permission):
|
|||
team_admin = _TeamNeed(org_name, team_name, 'admin')
|
||||
team_creator = _TeamNeed(org_name, team_name, 'creator')
|
||||
team_member = _TeamNeed(org_name, team_name, 'member')
|
||||
admin_org = _OrganizationNeed(org_name, 'admin')
|
||||
admin_org = _OrganizationNeed(org_name, 'admin')
|
||||
super(ViewTeamPermission, self).__init__(team_admin, team_creator,
|
||||
team_member, admin_org)
|
||||
|
||||
|
@ -228,11 +228,11 @@ def on_identity_loaded(sender, identity):
|
|||
# We have verified an identity, load in all of the permissions
|
||||
|
||||
if isinstance(identity, QuayDeferredPermissionUser):
|
||||
logger.debug('Deferring permissions for user: %s', identity.id)
|
||||
logger.debug('Deferring permissions for user with uuid: %s', identity.id)
|
||||
|
||||
elif identity.auth_type == 'user_db_id':
|
||||
logger.debug('Switching username permission to deferred object: %s', identity.id)
|
||||
switch_to_deferred = QuayDeferredPermissionUser(identity.id, 'user_db_id', {scopes.DIRECT_LOGIN})
|
||||
elif identity.auth_type == 'user_uuid':
|
||||
logger.debug('Switching username permission to deferred object with uuid: %s', identity.id)
|
||||
switch_to_deferred = QuayDeferredPermissionUser(identity.id, 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||
identity_changed.send(app, identity=switch_to_deferred)
|
||||
|
||||
elif identity.auth_type == 'token':
|
||||
|
|
|
@ -2,7 +2,4 @@
|
|||
set -e
|
||||
|
||||
# Run the database migration
|
||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
||||
|
||||
# Run the uncompressed size migration
|
||||
PYTHONPATH=. venv/bin/python -m util.uncompressedsize
|
||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
|
@ -26,7 +26,7 @@ SCHEME_RANDOM_FUNCTION = {
|
|||
'mysql+pymysql': fn.Rand,
|
||||
'sqlite': fn.Random,
|
||||
'postgresql': fn.Random,
|
||||
'postgresql+psycopg2': fn.Random,
|
||||
'postgresql+psycopg2': fn.Random,
|
||||
}
|
||||
|
||||
class CallableProxy(Proxy):
|
||||
|
@ -137,6 +137,7 @@ class BaseModel(ReadSlaveModel):
|
|||
|
||||
|
||||
class User(BaseModel):
|
||||
uuid = CharField(default=uuid_generator, max_length=36, null=True)
|
||||
username = CharField(unique=True, index=True)
|
||||
password_hash = CharField(null=True)
|
||||
email = CharField(unique=True, index=True,
|
||||
|
@ -212,7 +213,7 @@ class FederatedLogin(BaseModel):
|
|||
user = QuayUserField(allows_robots=True, index=True)
|
||||
service = ForeignKeyField(LoginService, index=True)
|
||||
service_ident = CharField()
|
||||
metadata_json = TextField(default='{}')
|
||||
metadata_json = TextField(default='{}')
|
||||
|
||||
class Meta:
|
||||
database = db
|
||||
|
@ -250,7 +251,7 @@ class Repository(BaseModel):
|
|||
# Therefore, we define our own deletion order here and use the dependency system to verify it.
|
||||
ordered_dependencies = [RepositoryAuthorizedEmail, RepositoryTag, Image, LogEntry,
|
||||
RepositoryBuild, RepositoryBuildTrigger, RepositoryNotification,
|
||||
RepositoryPermission, AccessToken]
|
||||
RepositoryPermission, AccessToken]
|
||||
|
||||
for query, fk in self.dependencies(search_nullable=True):
|
||||
model = fk.model_class
|
||||
|
@ -457,7 +458,7 @@ class LogEntry(BaseModel):
|
|||
kind = ForeignKeyField(LogEntryKind, index=True)
|
||||
account = QuayUserField(index=True, related_name='account')
|
||||
performer = QuayUserField(allows_robots=True, index=True, null=True,
|
||||
related_name='performer')
|
||||
related_name='performer')
|
||||
repository = ForeignKeyField(Repository, index=True, null=True)
|
||||
datetime = DateTimeField(default=datetime.now, index=True)
|
||||
ip = CharField(null=True)
|
||||
|
@ -537,7 +538,7 @@ class RepositoryAuthorizedEmail(BaseModel):
|
|||
# create a unique index on email and repository
|
||||
(('email', 'repository'), True),
|
||||
)
|
||||
|
||||
|
||||
|
||||
|
||||
all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission, Visibility,
|
||||
|
|
|
@ -13,24 +13,8 @@ from app import app
|
|||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
db_uri = unquote(app.config['DB_URI'])
|
||||
if 'GENMIGRATE' in os.environ:
|
||||
docker_host = os.environ.get('DOCKER_HOST')
|
||||
docker_host_ip = docker_host[len('tcp://'):].split(':')[0]
|
||||
if os.environ.get('GENMIGRATE') == 'mysql':
|
||||
db_uri = 'mysql+pymysql://root:password@%s/genschema' % (docker_host_ip)
|
||||
else:
|
||||
db_uri = 'postgresql://postgres@%s/genschema' % (docker_host_ip)
|
||||
|
||||
if 'DB_URI' in os.environ:
|
||||
db_uri = os.environ['DB_URI']
|
||||
|
||||
app.config['DB_URI'] = db_uri
|
||||
|
||||
config = context.config
|
||||
config.set_main_option('sqlalchemy.url', db_uri)
|
||||
config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI']))
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
|
@ -86,7 +70,8 @@ def run_migrations_online():
|
|||
connection = engine.connect()
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata
|
||||
target_metadata=target_metadata,
|
||||
transactional_ddl=False,
|
||||
)
|
||||
|
||||
try:
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
set -e
|
||||
set -e
|
||||
|
||||
DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
|
||||
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
||||
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
|
||||
|
||||
up_mysql() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
|
@ -36,19 +40,19 @@ down_postgres() {
|
|||
|
||||
gen_migrate() {
|
||||
# Generate a SQLite database with the schema as defined by the existing alembic model.
|
||||
GENMIGRATE=$1 PYTHONPATH=. alembic upgrade head
|
||||
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
|
||||
|
||||
# Generate the migration to the current model.
|
||||
GENMIGRATE=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
|
||||
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
|
||||
}
|
||||
|
||||
test_migrate() {
|
||||
# Generate a SQLite database with the schema as defined by the existing alembic model.
|
||||
GENMIGRATE=$1 PYTHONPATH=. alembic upgrade head
|
||||
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
|
||||
|
||||
# Downgrade to verify it works in both directions.
|
||||
COUNT=`ls data/migrations/versions/*.py | wc -l | tr -d ' '`
|
||||
GENMIGRATE=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
|
||||
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
|
||||
}
|
||||
|
||||
# Test (and generate, if requested) via MySQL.
|
||||
|
@ -59,13 +63,13 @@ if [ ! -z "$@" ]
|
|||
then
|
||||
set +e
|
||||
echo '> Generating Migration'
|
||||
gen_migrate "mysql" "$@"
|
||||
gen_migrate $MYSQL_CONFIG_OVERRIDE "$@"
|
||||
set -e
|
||||
fi
|
||||
|
||||
echo '> Testing Migration (mysql)'
|
||||
set +e
|
||||
test_migrate "mysql"
|
||||
test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_mysql
|
||||
|
||||
|
@ -75,8 +79,6 @@ up_postgres
|
|||
|
||||
echo '> Testing Migration (postgres)'
|
||||
set +e
|
||||
test_migrate "postgres"
|
||||
test_migrate $PGSQL_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_postgres
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
"""add uuid field to user
|
||||
|
||||
Revision ID: 17f11e265e13
|
||||
Revises: 313d297811c4
|
||||
Create Date: 2014-11-11 14:32:54.866188
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '17f11e265e13'
|
||||
down_revision = '313d297811c4'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from util.backfill_user_uuids import backfill_user_uuids
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
op.add_column('user', sa.Column('uuid', sa.String(length=36), nullable=True))
|
||||
backfill_user_uuids()
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
op.drop_column('user', 'uuid')
|
|
@ -16,9 +16,7 @@ from util.uncompressedsize import backfill_sizes_from_data
|
|||
|
||||
|
||||
def upgrade(tables):
|
||||
# Note: Doing non-alembic operations inside alembic can cause a deadlock. This call has been
|
||||
# moved to runmigration.sh.
|
||||
pass
|
||||
backfill_sizes_from_data()
|
||||
|
||||
def downgrade(tables):
|
||||
pass
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
"""Translate the queue names to reference namespace by id, remove the namespace column.
|
||||
|
||||
Revision ID: 2430f55c41d5
|
||||
Revises: 17f11e265e13
|
||||
Create Date: 2014-09-30 17:31:33.308490
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2fb36d4be80d'
|
||||
down_revision = '17f11e265e13'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
import re
|
||||
from app import app
|
||||
from data.database import QueueItem, User, db
|
||||
|
||||
|
||||
NAMESPACE_EXTRACTOR = re.compile(r'^([a-z]+/)([a-z0-9_]+)(/.*$)')
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
# Rename the namespace component of the existing queue items to reference user ids
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
for item in QueueItem.select():
|
||||
namespace_match = NAMESPACE_EXTRACTOR.match(item.queue_name)
|
||||
if namespace_match is not None:
|
||||
namespace_name = namespace_match.group(2)
|
||||
namespace_user = User.get(User.username == namespace_name)
|
||||
item.queue_name = '%s%s%s' % (namespace_match.group(1), str(namespace_user.id),
|
||||
namespace_match.group(3))
|
||||
item.save()
|
||||
else:
|
||||
raise RuntimeError('Invalid queue name: %s' % item.queue_name)
|
||||
|
||||
op.create_index('repository_namespace_user_id', 'repository', ['namespace_user_id'], unique=False)
|
||||
op.drop_column('repository', 'namespace')
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
# Add the namespace column back in and fill it in
|
||||
op.add_column('repository', sa.Column('namespace', sa.String(length=255)))
|
||||
op.drop_index('repository_namespace_user_id', table_name='repository')
|
||||
|
||||
# Rename the namespace component of existing queue items to reference namespace strings
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
for item in QueueItem.select():
|
||||
namespace_match = NAMESPACE_EXTRACTOR.match(item.queue_name)
|
||||
if namespace_match is not None:
|
||||
namespace_id = namespace_match.group(2)
|
||||
namespace_user = User.get(User.id == namespace_id)
|
||||
item.queue_name = '%s%s%s' % (namespace_match.group(1),
|
||||
str(namespace_user.username),
|
||||
namespace_match.group(3))
|
||||
item.save()
|
||||
else:
|
||||
raise RuntimeError('Invalid queue name: %s' % item.queue_name)
|
|
@ -18,7 +18,7 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
|
|||
from peewee import JOIN_LEFT_OUTER, fn
|
||||
from util.validation import (validate_username, validate_email, validate_password,
|
||||
INVALID_PASSWORD_MESSAGE)
|
||||
from util.names import format_robot_username
|
||||
from util.names import format_robot_username, parse_robot_username
|
||||
from util.backoff import exponential_backoff
|
||||
|
||||
|
||||
|
@ -132,7 +132,7 @@ def create_user(username, password, email, auto_verify=False):
|
|||
|
||||
created = _create_user(username, email)
|
||||
created.password_hash = hash_password(password)
|
||||
created.verified = auto_verify
|
||||
created.verified = auto_verify
|
||||
created.save()
|
||||
|
||||
return created
|
||||
|
@ -194,7 +194,7 @@ def create_organization(name, email, creating_user):
|
|||
return new_org
|
||||
except InvalidUsernameException:
|
||||
msg = ('Invalid organization name: %s Organization names must consist ' +
|
||||
'solely of lower case letters, numbers, and underscores. ' +
|
||||
'solely of lower case letters, numbers, and underscores. ' +
|
||||
'[a-z0-9_]') % name
|
||||
raise InvalidOrganizationException(msg)
|
||||
|
||||
|
@ -294,11 +294,17 @@ def delete_robot(robot_username):
|
|||
robot_username)
|
||||
|
||||
|
||||
def list_entity_robots(entity_name):
|
||||
selected = User.select(User.username, FederatedLogin.service_ident)
|
||||
joined = selected.join(FederatedLogin)
|
||||
return joined.where(User.robot == True,
|
||||
User.username ** (entity_name + '+%')).tuples()
|
||||
def _list_entity_robots(entity_name):
|
||||
return (User
|
||||
.select()
|
||||
.join(FederatedLogin)
|
||||
.where(User.robot == True, User.username ** (entity_name + '+%')))
|
||||
|
||||
|
||||
def list_entity_robot_tuples(entity_name):
|
||||
return (_list_entity_robots(entity_name)
|
||||
.select(User.username, FederatedLogin.service_ident)
|
||||
.tuples())
|
||||
|
||||
|
||||
def convert_user_to_organization(user, admin_user):
|
||||
|
@ -374,7 +380,7 @@ def remove_team(org_name, team_name, removed_by_username):
|
|||
def add_or_invite_to_team(inviter, team, user=None, email=None, requires_invite=True):
|
||||
# If the user is a member of the organization, then we simply add the
|
||||
# user directly to the team. Otherwise, an invite is created for the user/email.
|
||||
# We return None if the user was directly added and the invite object if the user was invited.
|
||||
# We return None if the user was directly added and the invite object if the user was invited.
|
||||
if user and requires_invite:
|
||||
orgname = team.organization.username
|
||||
|
||||
|
@ -384,7 +390,7 @@ def add_or_invite_to_team(inviter, team, user=None, email=None, requires_invite=
|
|||
if not user.username.startswith(orgname + '+'):
|
||||
raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
|
||||
'as it is not a member of the organization')
|
||||
else:
|
||||
else:
|
||||
Org = User.alias()
|
||||
found = User.select(User.username)
|
||||
found = found.where(User.username == user.username).join(TeamMember).join(Team)
|
||||
|
@ -519,7 +525,7 @@ def confirm_user_email(code):
|
|||
code = EmailConfirmation.get(EmailConfirmation.code == code,
|
||||
EmailConfirmation.email_confirm == True)
|
||||
except EmailConfirmation.DoesNotExist:
|
||||
raise DataModelException('Invalid email confirmation code.')
|
||||
raise DataModelException('Invalid email confirmation code.')
|
||||
|
||||
user = code.user
|
||||
user.verified = True
|
||||
|
@ -528,11 +534,11 @@ def confirm_user_email(code):
|
|||
new_email = code.new_email
|
||||
if new_email:
|
||||
if find_user_by_email(new_email):
|
||||
raise DataModelException('E-mail address already used.')
|
||||
|
||||
raise DataModelException('E-mail address already used.')
|
||||
|
||||
old_email = user.email
|
||||
user.email = new_email
|
||||
|
||||
|
||||
user.save()
|
||||
|
||||
code.delete_instance()
|
||||
|
@ -601,13 +607,27 @@ def get_user_by_id(user_db_id):
|
|||
return None
|
||||
|
||||
|
||||
def get_namespace_by_user_id(namespace_user_db_id):
|
||||
try:
|
||||
return User.get(User.id == namespace_user_db_id, User.robot == False).username
|
||||
except User.DoesNotExist:
|
||||
raise InvalidUsernameException('User with id does not exist: %s' % namespace_user_db_id)
|
||||
|
||||
|
||||
def get_user_by_uuid(user_uuid):
|
||||
try:
|
||||
return User.get(User.uuid == user_uuid, User.organization == False)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def get_user_or_org_by_customer_id(customer_id):
|
||||
try:
|
||||
return User.get(User.stripe_id == customer_id)
|
||||
except User.DoesNotExist:
|
||||
return None
|
||||
|
||||
def get_matching_teams(team_prefix, organization):
|
||||
def get_matching_teams(team_prefix, organization):
|
||||
query = Team.select().where(Team.name ** (team_prefix + '%'),
|
||||
Team.organization == organization)
|
||||
return query.limit(10)
|
||||
|
@ -615,13 +635,13 @@ def get_matching_teams(team_prefix, organization):
|
|||
|
||||
def get_matching_users(username_prefix, robot_namespace=None,
|
||||
organization=None):
|
||||
direct_user_query = (User.username ** (username_prefix + '%') &
|
||||
direct_user_query = (User.username ** (username_prefix + '%') &
|
||||
(User.organization == False) & (User.robot == False))
|
||||
|
||||
if robot_namespace:
|
||||
robot_prefix = format_robot_username(robot_namespace, username_prefix)
|
||||
direct_user_query = (direct_user_query |
|
||||
(User.username ** (robot_prefix + '%') &
|
||||
(User.username ** (robot_prefix + '%') &
|
||||
(User.robot == True)))
|
||||
|
||||
query = (User
|
||||
|
@ -879,6 +899,24 @@ def change_password(user, new_password):
|
|||
delete_notifications_by_kind(user, 'password_required')
|
||||
|
||||
|
||||
def change_username(user, new_username):
|
||||
(username_valid, username_issue) = validate_username(new_username)
|
||||
if not username_valid:
|
||||
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
||||
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Rename the robots
|
||||
for robot in _list_entity_robots(user.username):
|
||||
_, robot_shortname = parse_robot_username(robot.username)
|
||||
new_robot_name = format_robot_username(new_username, robot_shortname)
|
||||
robot.username = new_robot_name
|
||||
robot.save()
|
||||
|
||||
# Rename the user
|
||||
user.username = new_username
|
||||
user.save()
|
||||
|
||||
|
||||
def change_invoice_email(user, invoice_email):
|
||||
user.invoice_email = invoice_email
|
||||
user.save()
|
||||
|
@ -1167,7 +1205,7 @@ def __translate_ancestry(old_ancestry, translations, repository, username, prefe
|
|||
translations[old_id] = image_in_repo.id
|
||||
return translations[old_id]
|
||||
|
||||
# Select all the ancestor Docker IDs in a single query.
|
||||
# Select all the ancestor Docker IDs in a single query.
|
||||
old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]]
|
||||
query = Image.select(Image.id, Image.docker_image_id).where(Image.id << old_ids)
|
||||
old_images = {i.id: i.docker_image_id for i in query}
|
||||
|
@ -1561,7 +1599,7 @@ def garbage_collect_storage(storage_id_whitelist):
|
|||
storage_id_whitelist,
|
||||
(ImageStorage, ImageStoragePlacement,
|
||||
ImageStorageLocation))
|
||||
|
||||
|
||||
paths_to_remove = placements_query_to_paths_set(placements_to_remove.clone())
|
||||
|
||||
# Remove the placements for orphaned storages
|
||||
|
@ -1576,7 +1614,7 @@ def garbage_collect_storage(storage_id_whitelist):
|
|||
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
|
||||
storage_id_whitelist,
|
||||
(ImageStorage.id,)))
|
||||
if len(orphaned_storages) > 0:
|
||||
if len(orphaned_storages) > 0:
|
||||
(ImageStorage
|
||||
.delete()
|
||||
.where(ImageStorage.id << orphaned_storages)
|
||||
|
@ -1878,10 +1916,21 @@ def load_token_data(code):
|
|||
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
||||
|
||||
|
||||
def get_repository_build(namespace_name, repository_name, build_uuid):
|
||||
def _get_build_base_query():
|
||||
return (RepositoryBuild
|
||||
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
|
||||
Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuild)
|
||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
||||
.order_by(RepositoryBuild.started.desc()))
|
||||
|
||||
|
||||
def get_repository_build(build_uuid):
|
||||
try:
|
||||
query = list_repository_builds(namespace_name, repository_name, 1)
|
||||
return query.where(RepositoryBuild.uuid == build_uuid).get()
|
||||
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
|
||||
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
msg = 'Unable to locate a build by id: %s' % build_uuid
|
||||
|
@ -1890,15 +1939,8 @@ def get_repository_build(namespace_name, repository_name, build_uuid):
|
|||
|
||||
def list_repository_builds(namespace_name, repository_name, limit,
|
||||
include_inactive=True):
|
||||
query = (RepositoryBuild
|
||||
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuild)
|
||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
||||
query = (_get_build_base_query()
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
||||
.order_by(RepositoryBuild.started.desc())
|
||||
.limit(limit))
|
||||
|
||||
if not include_inactive:
|
||||
|
@ -1932,7 +1974,7 @@ def create_repository_build(repo, access_token, job_config_obj, dockerfile_id,
|
|||
def get_pull_robot_name(trigger):
|
||||
if not trigger.pull_robot:
|
||||
return None
|
||||
|
||||
|
||||
return trigger.pull_robot.username
|
||||
|
||||
|
||||
|
@ -1962,21 +2004,23 @@ def create_repo_notification(repo, event_name, method_name, config):
|
|||
config_json=json.dumps(config))
|
||||
|
||||
|
||||
def get_repo_notification(namespace_name, repository_name, uuid):
|
||||
def get_repo_notification(uuid):
|
||||
try:
|
||||
return (RepositoryNotification
|
||||
.select(RepositoryNotification, Repository, Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name,
|
||||
RepositoryNotification.uuid == uuid)
|
||||
.where(RepositoryNotification.uuid == uuid)
|
||||
.get())
|
||||
except RepositoryNotification.DoesNotExist:
|
||||
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
|
||||
|
||||
|
||||
def delete_repo_notification(namespace_name, repository_name, uuid):
|
||||
found = get_repo_notification(namespace_name, repository_name, uuid)
|
||||
found = get_repo_notification(uuid)
|
||||
if (found.repository.namespace_user.username != namespace_name or
|
||||
found.repository.name != repository_name):
|
||||
raise InvalidNotificationException('No repository notifiation found with id: %s' % uuid)
|
||||
found.delete_instance()
|
||||
return found
|
||||
|
||||
|
@ -2035,7 +2079,7 @@ def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None):
|
|||
return trigger
|
||||
|
||||
|
||||
def get_build_trigger(namespace_name, repository_name, trigger_uuid):
|
||||
def get_build_trigger(trigger_uuid):
|
||||
try:
|
||||
return (RepositoryBuildTrigger
|
||||
.select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
|
||||
|
@ -2045,9 +2089,7 @@ def get_build_trigger(namespace_name, repository_name, trigger_uuid):
|
|||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuildTrigger)
|
||||
.join(User)
|
||||
.where(RepositoryBuildTrigger.uuid == trigger_uuid,
|
||||
Namespace.username == namespace_name,
|
||||
Repository.name == repository_name)
|
||||
.where(RepositoryBuildTrigger.uuid == trigger_uuid)
|
||||
.get())
|
||||
except RepositoryBuildTrigger.DoesNotExist:
|
||||
msg = 'No build trigger with uuid: %s' % trigger_uuid
|
||||
|
@ -2111,14 +2153,14 @@ def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=F
|
|||
AdminTeamMember.team))
|
||||
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user ==
|
||||
AdminUser.id))
|
||||
.where((Notification.target == user) |
|
||||
.where((Notification.target == user) |
|
||||
((AdminUser.id == user) & (TeamRole.name == 'admin')))
|
||||
.order_by(Notification.created)
|
||||
.desc())
|
||||
|
||||
if not include_dismissed:
|
||||
query = query.switch(Notification).where(Notification.dismissed == False)
|
||||
|
||||
|
||||
if kind_name:
|
||||
query = (query
|
||||
.switch(Notification)
|
||||
|
@ -2243,7 +2285,7 @@ def confirm_email_authorization_for_repo(code):
|
|||
.where(RepositoryAuthorizedEmail.code == code)
|
||||
.get())
|
||||
except RepositoryAuthorizedEmail.DoesNotExist:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
found.confirmed = True
|
||||
found.save()
|
||||
|
@ -2275,7 +2317,7 @@ def lookup_team_invite(code, user=None):
|
|||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
if user and found.user != user:
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
raise DataModelException('Invalid confirmation code.')
|
||||
|
||||
return found
|
||||
|
||||
|
@ -2295,7 +2337,7 @@ def confirm_team_invite(code, user):
|
|||
|
||||
# If the invite is for a specific user, we have to confirm that here.
|
||||
if found.user is not None and found.user != user:
|
||||
message = """This invite is intended for user "%s".
|
||||
message = """This invite is intended for user "%s".
|
||||
Please login to that account and try again.""" % found.user.username
|
||||
raise DataModelException(message)
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from data.database import QueueItem, db
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
|
||||
MINIMUM_EXTENSION = timedelta(seconds=20)
|
||||
|
@ -25,17 +26,17 @@ class WorkQueue(object):
|
|||
|
||||
def _running_jobs(self, now, name_match_query):
|
||||
return (QueueItem
|
||||
.select(QueueItem.queue_name)
|
||||
.where(QueueItem.available == False,
|
||||
QueueItem.processing_expires > now,
|
||||
QueueItem.queue_name ** name_match_query))
|
||||
.select(QueueItem.queue_name)
|
||||
.where(QueueItem.available == False,
|
||||
QueueItem.processing_expires > now,
|
||||
QueueItem.queue_name ** name_match_query))
|
||||
|
||||
def _available_jobs(self, now, name_match_query, running_query):
|
||||
return (QueueItem
|
||||
.select()
|
||||
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
||||
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
||||
QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query)))
|
||||
.select()
|
||||
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
||||
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
||||
QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query)))
|
||||
|
||||
def _name_match_query(self):
|
||||
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
|
||||
|
@ -89,41 +90,49 @@ class WorkQueue(object):
|
|||
|
||||
item = None
|
||||
try:
|
||||
item = avail.order_by(QueueItem.id).get()
|
||||
item.available = False
|
||||
item.processing_expires = now + timedelta(seconds=processing_time)
|
||||
item.retries_remaining -= 1
|
||||
item.save()
|
||||
db_item = avail.order_by(QueueItem.id).get()
|
||||
db_item.available = False
|
||||
db_item.processing_expires = now + timedelta(seconds=processing_time)
|
||||
db_item.retries_remaining -= 1
|
||||
db_item.save()
|
||||
|
||||
item = AttrDict({
|
||||
'id': db_item.id,
|
||||
'body': db_item.body,
|
||||
})
|
||||
|
||||
self._currently_processing = True
|
||||
except QueueItem.DoesNotExist:
|
||||
self._currently_processing = False
|
||||
pass
|
||||
|
||||
# Return a view of the queue item rather than an active db object
|
||||
return item
|
||||
|
||||
def complete(self, completed_item):
|
||||
with self._transaction_factory(db):
|
||||
completed_item.delete_instance()
|
||||
completed_item_obj = QueueItem.get(QueueItem.id == completed_item.id)
|
||||
completed_item_obj.delete_instance()
|
||||
self._currently_processing = False
|
||||
|
||||
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
||||
with self._transaction_factory(db):
|
||||
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
||||
incomplete_item.available_after = retry_date
|
||||
incomplete_item.available = True
|
||||
incomplete_item_obj = QueueItem.get(QueueItem.id == incomplete_item.id)
|
||||
incomplete_item_obj.available_after = retry_date
|
||||
incomplete_item_obj.available = True
|
||||
|
||||
if restore_retry:
|
||||
incomplete_item.retries_remaining += 1
|
||||
incomplete_item_obj.retries_remaining += 1
|
||||
|
||||
incomplete_item.save()
|
||||
incomplete_item_obj.save()
|
||||
self._currently_processing = False
|
||||
|
||||
@staticmethod
|
||||
def extend_processing(queue_item, seconds_from_now):
|
||||
def extend_processing(self, queue_item, seconds_from_now):
|
||||
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
||||
|
||||
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
||||
if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION:
|
||||
queue_item.processing_expires = new_expiration
|
||||
queue_item.save()
|
||||
queue_item_obj = QueueItem.get(QueueItem.id == queue_item.id)
|
||||
if new_expiration - queue_item_obj.processing_expires > MINIMUM_EXTENSION:
|
||||
with self._transaction_factory(db):
|
||||
queue_item_obj.processing_expires = new_expiration
|
||||
queue_item_obj.save()
|
||||
|
|
|
@ -196,8 +196,9 @@ class RepositoryBuildStatus(RepositoryParamResource):
|
|||
@nickname('getRepoBuildStatus')
|
||||
def get(self, namespace, repository, build_uuid):
|
||||
""" Return the status for the builds specified by the build uuids. """
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
if not build:
|
||||
build = model.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
||||
can_write = ModifyRepositoryPermission(namespace, repository).can()
|
||||
|
@ -213,7 +214,10 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
|||
""" Return the build logs for the build specified by the build uuid. """
|
||||
response_obj = {}
|
||||
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
build = model.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
||||
# If the logs have been archived, just redirect to the completed archive
|
||||
if build.logs_archived:
|
||||
|
|
|
@ -102,10 +102,14 @@ class RepositoryNotification(RepositoryParamResource):
|
|||
def get(self, namespace, repository, uuid):
|
||||
""" Get information for the specified notification. """
|
||||
try:
|
||||
notification = model.get_repo_notification(namespace, repository, uuid)
|
||||
notification = model.get_repo_notification(uuid)
|
||||
except model.InvalidNotificationException:
|
||||
raise NotFound()
|
||||
|
||||
if (notification.repository.namespace_user.username != namespace or
|
||||
notification.repository.name != repository):
|
||||
raise NotFound()
|
||||
|
||||
return notification_view(notification)
|
||||
|
||||
@require_repo_admin
|
||||
|
@ -129,14 +133,18 @@ class TestRepositoryNotification(RepositoryParamResource):
|
|||
def post(self, namespace, repository, uuid):
|
||||
""" Queues a test notification for this repository. """
|
||||
try:
|
||||
notification = model.get_repo_notification(namespace, repository, uuid)
|
||||
notification = model.get_repo_notification(uuid)
|
||||
except model.InvalidNotificationException:
|
||||
raise NotFound()
|
||||
|
||||
if (notification.repository.namespace_user.username != namespace or
|
||||
notification.repository.name != repository):
|
||||
raise NotFound()
|
||||
|
||||
event_info = NotificationEvent.get_event(notification.event.name)
|
||||
sample_data = event_info.get_sample_data(repository=notification.repository)
|
||||
notification_data = build_notification_data(notification, sample_data)
|
||||
notification_queue.put([namespace, repository, notification.event.name],
|
||||
json.dumps(notification_data))
|
||||
notification_queue.put([str(notification.repository.namespace_user.id), repository,
|
||||
notification.event.name], json.dumps(notification_data))
|
||||
|
||||
return {}
|
||||
|
|
|
@ -24,7 +24,7 @@ class UserRobotList(ApiResource):
|
|||
def get(self):
|
||||
""" List the available robots for the user. """
|
||||
user = get_authenticated_user()
|
||||
robots = model.list_entity_robots(user.username)
|
||||
robots = model.list_entity_robot_tuples(user.username)
|
||||
return {
|
||||
'robots': [robot_view(name, password) for name, password in robots]
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ class OrgRobotList(ApiResource):
|
|||
""" List the organization's robots. """
|
||||
permission = OrganizationMemberPermission(orgname)
|
||||
if permission.can():
|
||||
robots = model.list_entity_robots(orgname)
|
||||
robots = model.list_entity_robot_tuples(orgname)
|
||||
return {
|
||||
'robots': [robot_view(name, password) for name, password in robots]
|
||||
}
|
||||
|
|
|
@ -52,7 +52,7 @@ class BuildTrigger(RepositoryParamResource):
|
|||
def get(self, namespace, repository, trigger_uuid):
|
||||
""" Get information for the specified build trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -63,7 +63,7 @@ class BuildTrigger(RepositoryParamResource):
|
|||
def delete(self, namespace, repository, trigger_uuid):
|
||||
""" Delete the specified build trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -107,7 +107,7 @@ class BuildTriggerSubdirs(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" List the subdirectories available for the specified build trigger and source. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -167,7 +167,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" Activate the specified build trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -205,10 +205,7 @@ class BuildTriggerActivate(RepositoryParamResource):
|
|||
'write')
|
||||
|
||||
try:
|
||||
repository_path = '%s/%s' % (trigger.repository.namespace_user.username,
|
||||
trigger.repository.name)
|
||||
path = url_for('webhooks.build_trigger_webhook',
|
||||
repository=repository_path, trigger_uuid=trigger.uuid)
|
||||
path = url_for('webhooks.build_trigger_webhook', trigger_uuid=trigger.uuid)
|
||||
authed_url = _prepare_webhook_url(app.config['PREFERRED_URL_SCHEME'], '$token', token.code,
|
||||
app.config['SERVER_HOSTNAME'], path)
|
||||
|
||||
|
@ -264,7 +261,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" Analyze the specified build trigger configuration. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -395,7 +392,7 @@ class ActivateBuildTrigger(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid):
|
||||
""" Manually start a build from the specified trigger. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -453,7 +450,7 @@ class BuildTriggerFieldValues(RepositoryParamResource):
|
|||
def post(self, namespace, repository, trigger_uuid, field_name):
|
||||
""" List the field values for a custom run field. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
@ -482,7 +479,7 @@ class BuildTriggerSources(RepositoryParamResource):
|
|||
def get(self, namespace, repository, trigger_uuid):
|
||||
""" List the build sources for the trigger configuration thus far. """
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
raise NotFound()
|
||||
|
||||
|
|
|
@ -143,6 +143,10 @@ class User(ApiResource):
|
|||
'type': 'string',
|
||||
'description': 'The user\'s email address',
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'description': 'The user\'s username',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -193,6 +197,14 @@ class User(ApiResource):
|
|||
send_change_email(user.username, user_data['email'], code.code)
|
||||
else:
|
||||
model.update_email(user, new_email, auto_verify=not features.MAILING)
|
||||
|
||||
if 'username' in user_data and user_data['username'] != user.username:
|
||||
new_username = user_data['username']
|
||||
if model.get_user_or_org(new_username) is not None:
|
||||
# Username already used
|
||||
raise request_error(message='Username is already in use')
|
||||
|
||||
model.change_username(user, new_username)
|
||||
|
||||
except model.InvalidPasswordException, ex:
|
||||
raise request_error(exception=ex)
|
||||
|
|
|
@ -85,23 +85,19 @@ def param_required(param_name):
|
|||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_db_id):
|
||||
logger.debug('User loader loading deferred user id: %s' % user_db_id)
|
||||
try:
|
||||
user_db_id_int = int(user_db_id)
|
||||
return _LoginWrappedDBUser(user_db_id_int)
|
||||
except ValueError:
|
||||
return None
|
||||
def load_user(user_uuid):
|
||||
logger.debug('User loader loading deferred user with uuid: %s' % user_uuid)
|
||||
return _LoginWrappedDBUser(user_uuid)
|
||||
|
||||
|
||||
class _LoginWrappedDBUser(UserMixin):
|
||||
def __init__(self, user_db_id, db_user=None):
|
||||
self._db_id = user_db_id
|
||||
def __init__(self, user_uuid, db_user=None):
|
||||
self._uuid = user_uuid
|
||||
self._db_user = db_user
|
||||
|
||||
def db_user(self):
|
||||
if not self._db_user:
|
||||
self._db_user = model.get_user_by_id(self._db_id)
|
||||
self._db_user = model.get_user_by_uuid(self._uuid)
|
||||
return self._db_user
|
||||
|
||||
def is_authenticated(self):
|
||||
|
@ -111,13 +107,13 @@ class _LoginWrappedDBUser(UserMixin):
|
|||
return self.db_user().verified
|
||||
|
||||
def get_id(self):
|
||||
return unicode(self._db_id)
|
||||
return unicode(self._uuid)
|
||||
|
||||
|
||||
def common_login(db_user):
|
||||
if login_user(_LoginWrappedDBUser(db_user.id, db_user)):
|
||||
logger.debug('Successfully signed in as: %s' % db_user.username)
|
||||
new_identity = QuayDeferredPermissionUser(db_user.id, 'user_db_id', {scopes.DIRECT_LOGIN})
|
||||
if login_user(_LoginWrappedDBUser(db_user.uuid, db_user)):
|
||||
logger.debug('Successfully signed in as: %s (%s)' % (db_user.username, db_user.uuid))
|
||||
new_identity = QuayDeferredPermissionUser(db_user.uuid, 'user_uuid', {scopes.DIRECT_LOGIN})
|
||||
identity_changed.send(app, identity=new_identity)
|
||||
session['login_time'] = datetime.datetime.now()
|
||||
return True
|
||||
|
@ -233,7 +229,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
|
||||
job_config = {
|
||||
'docker_tags': tags,
|
||||
'repository': repo_path,
|
||||
'registry': host,
|
||||
'build_subdir': subdir
|
||||
}
|
||||
|
||||
|
@ -242,10 +238,8 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
dockerfile_id, build_name,
|
||||
trigger, pull_robot_name=pull_robot_name)
|
||||
|
||||
dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({
|
||||
dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({
|
||||
'build_uuid': build_request.uuid,
|
||||
'namespace': repository.namespace_user.username,
|
||||
'repository': repository.name,
|
||||
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
||||
}), retries_remaining=1)
|
||||
|
||||
|
@ -281,4 +275,4 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
spawn_notification(repository, 'build_queued', event_data,
|
||||
subpage='build?current=%s' % build_request.uuid,
|
||||
pathargs=['build', build_request.uuid])
|
||||
return build_request
|
||||
return build_request
|
||||
|
|
|
@ -45,10 +45,8 @@ def build_notification_data(notification, event_data, performer_data=None):
|
|||
|
||||
return {
|
||||
'notification_uuid': notification.uuid,
|
||||
'repository_namespace': notification.repository.namespace_user.username,
|
||||
'repository_name': notification.repository.name,
|
||||
'event_data': event_data,
|
||||
'performer_data': performer_data
|
||||
'performer_data': performer_data,
|
||||
}
|
||||
|
||||
|
||||
|
@ -60,5 +58,5 @@ def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[
|
|||
event_name=event_name)
|
||||
for notification in list(notifications):
|
||||
notification_data = build_notification_data(notification, event_data, performer_data)
|
||||
path = [repo.namespace_user.username, repo.name, event_name] + pathargs
|
||||
path = [str(repo.namespace_user.id), repo.name, event_name] + pathargs
|
||||
notification_queue.put(path, json.dumps(notification_data))
|
||||
|
|
|
@ -258,8 +258,9 @@ def put_image_layer(namespace, repository, image_id):
|
|||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
profile.debug('Adding layer to diff queue')
|
||||
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
||||
'namespace': namespace,
|
||||
repo = model.get_repository(namespace, repository)
|
||||
image_diff_queue.put([str(repo.namespace_user.id), repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
|
@ -331,8 +332,9 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
profile.debug('Adding layer to diff queue')
|
||||
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
||||
'namespace': namespace,
|
||||
repo = model.get_repository(namespace, repository)
|
||||
image_diff_queue.put([str(repo.namespace_user.id), repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
|
|
|
@ -67,20 +67,23 @@ def stripe_webhook():
|
|||
return make_response('Okay')
|
||||
|
||||
|
||||
@webhooks.route('/push/<path:repository>/trigger/<trigger_uuid>',
|
||||
methods=['POST'])
|
||||
@webhooks.route('/push/<path:repository>/trigger/<trigger_uuid>', methods=['POST'])
|
||||
@webhooks.route('/push/trigger/<trigger_uuid>', methods=['POST'], defaults={'repository': ''})
|
||||
@process_auth
|
||||
@parse_repository_name
|
||||
def build_trigger_webhook(namespace, repository, trigger_uuid):
|
||||
logger.debug('Webhook received for %s/%s with uuid %s', namespace,
|
||||
repository, trigger_uuid)
|
||||
def build_trigger_webhook(trigger_uuid, **kwargs):
|
||||
logger.debug('Webhook received with uuid %s', trigger_uuid)
|
||||
|
||||
try:
|
||||
trigger = model.get_build_trigger(trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
# It is ok to return 404 here, since letting an attacker know that a trigger UUID is valid
|
||||
# doesn't leak anything
|
||||
abort(404)
|
||||
|
||||
namespace = trigger.repository.namespace_user.username
|
||||
repository = trigger.repository.name
|
||||
permission = ModifyRepositoryPermission(namespace, repository)
|
||||
if permission.can():
|
||||
try:
|
||||
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
|
||||
except model.InvalidBuildTriggerException:
|
||||
abort(404)
|
||||
|
||||
handler = BuildTrigger.get_trigger_for_service(trigger.service.name)
|
||||
|
||||
logger.debug('Passing webhook request to handler %s', handler)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$';
|
||||
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$';
|
||||
var USER_PATTERN = '^[a-z0-9_]{4,30}$';
|
||||
|
||||
$.fn.clipboardCopy = function() {
|
||||
if (zeroClipboardSupported) {
|
||||
|
|
|
@ -1765,6 +1765,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
|
||||
$scope.logsShown = 0;
|
||||
$scope.invoicesShown = 0;
|
||||
|
||||
$scope.USER_PATTERN = USER_PATTERN;
|
||||
|
||||
$scope.loadAuthedApps = function() {
|
||||
if ($scope.authorizedApps) { return; }
|
||||
|
@ -1841,6 +1843,24 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
});
|
||||
};
|
||||
|
||||
$scope.changeUsername = function() {
|
||||
UserService.load();
|
||||
|
||||
$scope.updatingUser = true;
|
||||
|
||||
ApiService.changeUserDetails($scope.cuser).then(function() {
|
||||
$scope.updatingUser = false;
|
||||
|
||||
// Reset the form.
|
||||
delete $scope.cuser['username'];
|
||||
|
||||
$scope.changeUsernameForm.$setPristine();
|
||||
}, function(result) {
|
||||
$scope.updatingUser = false;
|
||||
UIService.showFormError('#changeUsernameForm', result);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.changeEmail = function() {
|
||||
UIService.hidePopover('#changeEmailForm');
|
||||
|
||||
|
@ -1853,7 +1873,7 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
$scope.sentEmail = $scope.cuser.email;
|
||||
|
||||
// Reset the form.
|
||||
delete $scope.cuser['repeatEmail'];
|
||||
delete $scope.cuser['email'];
|
||||
|
||||
$scope.changeEmailForm.$setPristine();
|
||||
}, function(result) {
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
|
||||
</li>
|
||||
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#username">Change Username</a></li>
|
||||
<li quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#migrate" id="migrateTab">Convert to Organization</a>
|
||||
</li>
|
||||
|
@ -234,6 +235,31 @@
|
|||
<div class="billing-invoices" user="user" makevisible="invoicesShown"></div>
|
||||
</div>
|
||||
|
||||
<!-- Change username tab -->
|
||||
<div id="username" class="tab-pane">
|
||||
<div class="row">
|
||||
<div class="panel">
|
||||
<div class="panel-title">Change Username</div>
|
||||
|
||||
<div class="loading" ng-show="updatingUser">
|
||||
<div class="quay-spinner 3x"></div>
|
||||
</div>
|
||||
|
||||
<span class="help-block" ng-show="changeUsernameSuccess">Username changed successfully</span>
|
||||
|
||||
<div ng-show="!updatingUser" class="panel-body">
|
||||
<form class="form-change col-md-6" id="changeUsernameForm" name="changeUsernameForm" ng-submit="changePassword()"
|
||||
ng-show="!awaitingConfirmation && !registering">
|
||||
<input type="text" class="form-control" placeholder="Your new username" ng-model="cuser.username" required
|
||||
ng-pattern="/{{ USER_PATTERN }}/">
|
||||
<button class="btn btn-danger" ng-disabled="changeUsernameForm.$invalid" type="submit"
|
||||
analytics-on analytics-event="change_username">Change Username</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Convert to organization tab -->
|
||||
<div id="migrate" class="tab-pane" quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<!-- Step 0 -->
|
||||
|
|
Binary file not shown.
|
@ -79,7 +79,7 @@ class ApiTestCase(unittest.TestCase):
|
|||
with client.session_transaction() as sess:
|
||||
if auth_username:
|
||||
loaded = model.get_user(auth_username)
|
||||
sess['user_id'] = loaded.id
|
||||
sess['user_id'] = loaded.uuid
|
||||
sess['login_time'] = datetime.datetime.now()
|
||||
sess[CSRF_TOKEN_KEY] = CSRF_TOKEN
|
||||
|
||||
|
@ -93,7 +93,7 @@ class ApiTestCase(unittest.TestCase):
|
|||
final_url = self.url
|
||||
if method != 'GET' and method != 'HEAD':
|
||||
final_url = self._add_csrf(self.url)
|
||||
|
||||
|
||||
open_kwargs.update({
|
||||
'data': json.dumps(request_body),
|
||||
'content_type': 'application/json',
|
||||
|
@ -1069,7 +1069,7 @@ class TestBuildTriggerActivateSwo1BuynlargeOrgrepo(ApiTestCase):
|
|||
class TestBuildTriggerFieldValuesSwo1PublicPublicrepo(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="public/publicrepo",
|
||||
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="public/publicrepo",
|
||||
field_name="test_field")
|
||||
|
||||
def test_post_anonymous(self):
|
||||
|
@ -1088,7 +1088,7 @@ class TestBuildTriggerFieldValuesSwo1PublicPublicrepo(ApiTestCase):
|
|||
class TestBuildTriggerFieldValuesSwo1DevtableShared(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="devtable/shared",
|
||||
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="devtable/shared",
|
||||
field_name="test_field")
|
||||
|
||||
def test_post_anonymous(self):
|
||||
|
@ -1107,7 +1107,7 @@ class TestBuildTriggerFieldValuesSwo1DevtableShared(ApiTestCase):
|
|||
class TestBuildTriggerFieldValuesSwo1BuynlargeOrgrepo(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="buynlarge/orgrepo",
|
||||
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="buynlarge/orgrepo",
|
||||
field_name="test_field")
|
||||
|
||||
def test_post_anonymous(self):
|
||||
|
|
|
@ -2342,7 +2342,7 @@ class TestBuildTriggers(ApiTestCase):
|
|||
self.assertEquals(True, activate_json['is_active'])
|
||||
|
||||
# Make sure the trigger has a write token.
|
||||
trigger = model.get_build_trigger(ADMIN_ACCESS_USER, 'simple', trigger.uuid)
|
||||
trigger = model.get_build_trigger(trigger.uuid)
|
||||
self.assertNotEquals(None, trigger.write_token)
|
||||
self.assertEquals(True, py_json.loads(trigger.config)['active'])
|
||||
|
||||
|
|
|
@ -67,8 +67,7 @@ class TestBuildLogs(RedisBuildLogs):
|
|||
(phase, status) = status_wrapper
|
||||
|
||||
from data import model
|
||||
build_obj = model.get_repository_build(self.namespace, self.repository,
|
||||
self.test_build_id)
|
||||
build_obj = model.get_repository_build(self.test_build_id)
|
||||
build_obj.phase = phase
|
||||
build_obj.save()
|
||||
|
||||
|
@ -88,8 +87,7 @@ class TestBuildLogs(RedisBuildLogs):
|
|||
total_commands = random.randint(5, 20)
|
||||
for command_num in range(1, total_commands + 1):
|
||||
command_weight = random.randint(50, 100)
|
||||
script.append(self._generate_command(command_num, total_commands,
|
||||
command_weight))
|
||||
script.append(self._generate_command(command_num, total_commands, command_weight))
|
||||
|
||||
# we want 0 logs some percent of the time
|
||||
num_logs = max(0, random.randint(-50, 400))
|
||||
|
|
54
util/backfill_user_uuids.py
Normal file
54
util/backfill_user_uuids.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
import logging
|
||||
import uuid
|
||||
|
||||
from data.database import User, db
|
||||
from app import app
|
||||
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
def backfill_user_uuids():
|
||||
""" Generates UUIDs for any Users without them. """
|
||||
LOGGER.setLevel(logging.DEBUG)
|
||||
LOGGER.debug('User UUID Backfill: Began execution')
|
||||
|
||||
|
||||
# Check to see if any users are missing uuids.
|
||||
has_missing_uuids = True
|
||||
try:
|
||||
User.select().where(User.uuid >> None).get()
|
||||
except User.DoesNotExist:
|
||||
has_missing_uuids = False
|
||||
|
||||
if not has_missing_uuids:
|
||||
LOGGER.debug('User UUID Backfill: No migration needed')
|
||||
return
|
||||
|
||||
LOGGER.debug('User UUID Backfill: Starting migration')
|
||||
while True:
|
||||
batch_user_ids = list(User
|
||||
.select(User.id)
|
||||
.where(User.uuid >> None)
|
||||
.limit(100))
|
||||
|
||||
if len(batch_user_ids) == 0:
|
||||
# There are no users left to backfill. We're done!
|
||||
LOGGER.debug('User UUID Backfill: Backfill completed')
|
||||
return
|
||||
|
||||
LOGGER.debug('User UUID Backfill: Found %s records to update', len(batch_user_ids))
|
||||
for user_id in batch_user_ids:
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
try:
|
||||
user = User.get(User.id == user_id)
|
||||
user.uuid = str(uuid.uuid4())
|
||||
user.save()
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('peewee').setLevel(logging.CRITICAL)
|
||||
|
||||
backfill_user_uuids()
|
|
@ -1,9 +1,8 @@
|
|||
import logging
|
||||
import zlib
|
||||
import sys
|
||||
|
||||
from data import model
|
||||
from data.database import ImageStorage, configure
|
||||
from data.database import ImageStorage
|
||||
from app import app, storage as store
|
||||
from data.database import db, db_random_func
|
||||
from util.gzipstream import ZLIB_GZIP_WINDOW
|
||||
|
@ -15,16 +14,11 @@ logger = logging.getLogger(__name__)
|
|||
CHUNK_SIZE = 5 * 1024 * 1024
|
||||
|
||||
def backfill_sizes_from_data():
|
||||
logger.setLevel(logging.DEBUG)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
logger.debug('Starting uncompressed image size backfill')
|
||||
logger.debug('NOTE: This can be a LONG RUNNING OPERATION. Please wait!')
|
||||
|
||||
# Make sure we have a reference to the current DB.
|
||||
configure(app.config)
|
||||
|
||||
logger.debug('Uncompressed backfill: Database configured')
|
||||
|
||||
# Check for any uncompressed images.
|
||||
has_images = bool(list(ImageStorage
|
||||
.select(ImageStorage.uuid)
|
||||
|
|
|
@ -1,30 +1,28 @@
|
|||
import logging
|
||||
import argparse
|
||||
|
||||
from app import image_diff_queue
|
||||
from data.model import DataModelException
|
||||
from data import model
|
||||
from endpoints.registry import process_image_changes
|
||||
from workers.worker import Worker
|
||||
|
||||
|
||||
root_logger = logging.getLogger('')
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DiffsWorker(Worker):
|
||||
def process_queue_item(self, job_details):
|
||||
image_id = job_details['image_id']
|
||||
namespace = job_details['namespace']
|
||||
repository = job_details['repository']
|
||||
|
||||
# TODO switch to the namespace_user_id branch only once exisiting jobs have all gone through
|
||||
if 'namespace_user_id' in job_details:
|
||||
namespace = model.get_namespace_by_user_id(job_details['namespace_user_id'])
|
||||
else:
|
||||
namespace = job_details['namespace']
|
||||
|
||||
try:
|
||||
process_image_changes(namespace, repository, image_id)
|
||||
except DataModelException:
|
||||
except model.DataModelException:
|
||||
# This exception is unrecoverable, and the item should continue and be
|
||||
# marked as complete.
|
||||
msg = ('Image does not exist in database \'%s\' for repo \'%s/\'%s\'' %
|
||||
|
|
|
@ -40,6 +40,7 @@ NO_TAGS = ['<none>:<none>']
|
|||
RESERVATION_TIME = (TIMEOUT_PERIOD_MINUTES + 5) * 60
|
||||
DOCKER_BASE_URL = os.environ.get('DOCKER_HOST', None)
|
||||
|
||||
|
||||
def matches_system_error(status_str):
|
||||
""" Returns true if the given status string matches a known system error in the
|
||||
Docker builder.
|
||||
|
@ -129,8 +130,8 @@ class DockerfileBuildContext(object):
|
|||
# Note: We have two different clients here because we (potentially) login
|
||||
# with both, but with different credentials that we do not want shared between
|
||||
# the build and push operations.
|
||||
self._push_cl = StreamingDockerClient(timeout=1200, base_url = DOCKER_BASE_URL)
|
||||
self._build_cl = StreamingDockerClient(timeout=1200, base_url = DOCKER_BASE_URL)
|
||||
self._push_cl = StreamingDockerClient(timeout=1200, base_url=DOCKER_BASE_URL)
|
||||
self._build_cl = StreamingDockerClient(timeout=1200, base_url=DOCKER_BASE_URL)
|
||||
|
||||
dockerfile_path = os.path.join(self._build_dir, dockerfile_subdir,
|
||||
'Dockerfile')
|
||||
|
@ -233,8 +234,8 @@ class DockerfileBuildContext(object):
|
|||
if self._pull_credentials:
|
||||
logger.debug('Logging in with pull credentials: %s@%s',
|
||||
self._pull_credentials['username'], self._pull_credentials['registry'])
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data={
|
||||
'phasestep': 'login',
|
||||
'username': self._pull_credentials['username'],
|
||||
'registry': self._pull_credentials['registry']
|
||||
|
@ -242,12 +243,11 @@ class DockerfileBuildContext(object):
|
|||
|
||||
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
||||
registry=self._pull_credentials['registry'], reauth=True)
|
||||
|
||||
# Pull the image, in case it was updated since the last build
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
|
||||
'phasestep': 'pull',
|
||||
'repo_url': image_and_tag
|
||||
})
|
||||
else:
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data={
|
||||
'phasestep': 'pull',
|
||||
'repo_url': image_and_tag
|
||||
})
|
||||
|
||||
pull_status = self._build_cl.pull(image_and_tag, stream=True)
|
||||
|
||||
|
@ -278,7 +278,7 @@ class DockerfileBuildContext(object):
|
|||
if key in status:
|
||||
fully_unwrapped = status[key]
|
||||
break
|
||||
|
||||
|
||||
if not fully_unwrapped:
|
||||
logger.debug('Status dict did not have any extractable keys and was: %s', status)
|
||||
elif isinstance(status, basestring):
|
||||
|
@ -288,7 +288,7 @@ class DockerfileBuildContext(object):
|
|||
|
||||
# Check for system errors when building.
|
||||
if matches_system_error(status_str):
|
||||
raise WorkerUnhealthyException(status_str)
|
||||
raise WorkerUnhealthyException(status_str)
|
||||
|
||||
logger.debug('Status: %s', status_str)
|
||||
step_increment = re.search(r'Step ([0-9]+) :', status_str)
|
||||
|
@ -480,8 +480,8 @@ class DockerfileBuildWorker(Worker):
|
|||
def watchdog(self):
|
||||
logger.debug('Running build watchdog code.')
|
||||
try:
|
||||
docker_cl = Client(base_url = DOCKER_BASE_URL)
|
||||
|
||||
docker_cl = Client(base_url=DOCKER_BASE_URL)
|
||||
|
||||
# Iterate the running containers and kill ones that have been running more than 20 minutes
|
||||
for container in docker_cl.containers():
|
||||
start_time = datetime.fromtimestamp(container['Created'])
|
||||
|
@ -501,9 +501,7 @@ class DockerfileBuildWorker(Worker):
|
|||
# Make sure we have more information for debugging problems
|
||||
sentry.client.user_context(job_details)
|
||||
|
||||
repository_build = model.get_repository_build(job_details['namespace'],
|
||||
job_details['repository'],
|
||||
job_details['build_uuid'])
|
||||
repository_build = model.get_repository_build(job_details['build_uuid'])
|
||||
|
||||
pull_credentials = job_details.get('pull_credentials', None)
|
||||
|
||||
|
@ -512,16 +510,22 @@ class DockerfileBuildWorker(Worker):
|
|||
resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
|
||||
tag_names = job_config['docker_tags']
|
||||
build_subdir = job_config['build_subdir']
|
||||
repo = job_config['repository']
|
||||
|
||||
# TODO remove the top branch when there are no more jobs with a repository config
|
||||
if 'repository' in job_config:
|
||||
repo = job_config['repository']
|
||||
else:
|
||||
repo = '%s/%s/%s' % (job_config['registry'],
|
||||
repository_build.repository.namespace_user.username,
|
||||
repository_build.repository.name)
|
||||
|
||||
access_token = repository_build.access_token.code
|
||||
|
||||
log_appender = partial(build_logs.append_log_message,
|
||||
repository_build.uuid)
|
||||
log_appender = partial(build_logs.append_log_message, repository_build.uuid)
|
||||
|
||||
# Lookup and save the version of docker being used.
|
||||
try:
|
||||
docker_cl = Client(base_url = DOCKER_BASE_URL)
|
||||
docker_cl = Client(base_url=DOCKER_BASE_URL)
|
||||
docker_version = docker_cl.version().get('Version', '')
|
||||
except ConnectionError as exc:
|
||||
raise WorkerUnhealthyException(exc.message)
|
||||
|
@ -532,14 +536,13 @@ class DockerfileBuildWorker(Worker):
|
|||
if dash > 0:
|
||||
docker_version = docker_version[:dash]
|
||||
|
||||
log_appender('initializing', build_logs.PHASE, log_data = {
|
||||
log_appender('initializing', build_logs.PHASE, log_data={
|
||||
'docker_version': docker_version
|
||||
})
|
||||
|
||||
log_appender('Docker version: %s' % docker_version)
|
||||
|
||||
start_msg = ('Starting job with resource url: %s repo: %s' % (resource_url,
|
||||
repo))
|
||||
start_msg = ('Starting job with resource url: %s repo: %s' % (resource_url, repo))
|
||||
logger.debug(start_msg)
|
||||
|
||||
docker_resource = requests.get(resource_url, stream=True)
|
||||
|
@ -595,7 +598,7 @@ class DockerfileBuildWorker(Worker):
|
|||
cur_message = ex.message or 'Error while unpacking build package'
|
||||
log_appender(cur_message, build_logs.ERROR)
|
||||
spawn_failure(cur_message, event_data)
|
||||
raise JobException(cur_message)
|
||||
raise JobException(cur_message)
|
||||
|
||||
# Start the build process.
|
||||
try:
|
||||
|
@ -640,14 +643,14 @@ class DockerfileBuildWorker(Worker):
|
|||
|
||||
# Spawn a notification that the build has completed.
|
||||
spawn_notification(repository_build.repository, 'build_success', event_data,
|
||||
subpage='build?current=%s' % repository_build.uuid,
|
||||
pathargs=['build', repository_build.uuid])
|
||||
subpage='build?current=%s' % repository_build.uuid,
|
||||
pathargs=['build', repository_build.uuid])
|
||||
|
||||
except WorkerUnhealthyException as exc:
|
||||
# Spawn a notification that the build has failed.
|
||||
log_appender('Worker has become unhealthy. Will retry shortly.', build_logs.ERROR)
|
||||
spawn_failure(exc.message, event_data)
|
||||
|
||||
|
||||
# Raise the exception to the queue.
|
||||
raise exc
|
||||
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
import logging
|
||||
import argparse
|
||||
import requests
|
||||
import json
|
||||
|
||||
from app import notification_queue
|
||||
from workers.worker import Worker
|
||||
|
@ -12,11 +9,6 @@ from workers.worker import JobException
|
|||
|
||||
from data import model
|
||||
|
||||
root_logger = logging.getLogger('')
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -24,10 +16,8 @@ logger = logging.getLogger(__name__)
|
|||
class NotificationWorker(Worker):
|
||||
def process_queue_item(self, job_details):
|
||||
notification_uuid = job_details['notification_uuid'];
|
||||
repo_namespace = job_details['repository_namespace']
|
||||
repo_name = job_details['repository_name']
|
||||
|
||||
notification = model.get_repo_notification(repo_namespace, repo_name, notification_uuid)
|
||||
notification = model.get_repo_notification(notification_uuid)
|
||||
if not notification:
|
||||
# Probably deleted.
|
||||
return
|
||||
|
|
Reference in a new issue