diff --git a/Dockerfile.web b/Dockerfile.web
index 1c1b60c69..605b088b3 100644
--- a/Dockerfile.web
+++ b/Dockerfile.web
@@ -47,7 +47,7 @@ RUN venv/bin/python -m external_libraries
# Run the tests
RUN TEST=true venv/bin/python -m unittest discover
-VOLUME ["/conf/stack", "/var/log", "/datastorage"]
+VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp"]
EXPOSE 443 80
diff --git a/application.py b/application.py
index e7f8548f8..4c0adb9b9 100644
--- a/application.py
+++ b/application.py
@@ -7,7 +7,7 @@ from peewee import Proxy
from app import app as application
from flask import request, Request
from util.names import urn_generator
-from data.model import db as model_db, read_slave
+from data.database import db as model_db, read_slave
# Turn off debug logging for boto
logging.getLogger('boto').setLevel(logging.CRITICAL)
diff --git a/auth/auth.py b/auth/auth.py
index a81876e54..ed0c8d82a 100644
--- a/auth/auth.py
+++ b/auth/auth.py
@@ -25,7 +25,7 @@ def _load_user_from_cookie():
if not current_user.is_anonymous():
logger.debug('Loading user from cookie: %s', current_user.get_id())
set_authenticated_user_deferred(current_user.get_id())
- loaded = QuayDeferredPermissionUser(current_user.get_id(), 'username', {scopes.DIRECT_LOGIN})
+ loaded = QuayDeferredPermissionUser(current_user.get_id(), 'user_db_id', {scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=loaded)
return current_user.db_user()
return None
@@ -58,12 +58,10 @@ def _validate_and_apply_oauth_token(token):
set_authenticated_user(validated.authorized_user)
set_validated_oauth_token(validated)
- new_identity = QuayDeferredPermissionUser(validated.authorized_user.username, 'username',
- scope_set)
+ new_identity = QuayDeferredPermissionUser(validated.authorized_user.id, 'user_db_id', scope_set)
identity_changed.send(app, identity=new_identity)
-
def process_basic_auth(auth):
normalized = [part.strip() for part in auth.split(' ') if part]
if normalized[0].lower() != 'basic' or len(normalized) != 2:
@@ -100,8 +98,7 @@ def process_basic_auth(auth):
logger.debug('Successfully validated robot: %s' % credentials[0])
set_authenticated_user(robot)
- deferred_robot = QuayDeferredPermissionUser(robot.username, 'username',
- {scopes.DIRECT_LOGIN})
+ deferred_robot = QuayDeferredPermissionUser(robot.id, 'user_db_id', {scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=deferred_robot)
return
except model.InvalidRobotException:
@@ -114,7 +111,7 @@ def process_basic_auth(auth):
logger.debug('Successfully validated user: %s' % authenticated.username)
set_authenticated_user(authenticated)
- new_identity = QuayDeferredPermissionUser(authenticated.username, 'username',
+ new_identity = QuayDeferredPermissionUser(authenticated.id, 'user_db_id',
{scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=new_identity)
return
diff --git a/auth/auth_context.py b/auth/auth_context.py
index b97ffa02d..6c587f901 100644
--- a/auth/auth_context.py
+++ b/auth/auth_context.py
@@ -10,13 +10,13 @@ logger = logging.getLogger(__name__)
def get_authenticated_user():
user = getattr(_request_ctx_stack.top, 'authenticated_user', None)
if not user:
- username = getattr(_request_ctx_stack.top, 'authenticated_username', None)
- if not username:
- logger.debug('No authenticated user or deferred username.')
+ db_id = getattr(_request_ctx_stack.top, 'authenticated_db_id', None)
+ if not db_id:
+ logger.debug('No authenticated user or deferred database id.')
return None
logger.debug('Loading deferred authenticated user.')
- loaded = model.get_user(username)
+ loaded = model.get_user_by_id(db_id)
set_authenticated_user(loaded)
user = loaded
@@ -30,10 +30,10 @@ def set_authenticated_user(user_or_robot):
ctx.authenticated_user = user_or_robot
-def set_authenticated_user_deferred(username_or_robotname):
- logger.debug('Deferring loading of authenticated user object: %s', username_or_robotname)
+def set_authenticated_user_deferred(user_or_robot_db_id):
+ logger.debug('Deferring loading of authenticated user object: %s', user_or_robot_db_id)
ctx = _request_ctx_stack.top
- ctx.authenticated_username = username_or_robotname
+ ctx.authenticated_db_id = user_or_robot_db_id
def get_validated_oauth_token():
diff --git a/auth/permissions.py b/auth/permissions.py
index 2b27f9583..eb9059c22 100644
--- a/auth/permissions.py
+++ b/auth/permissions.py
@@ -58,8 +58,8 @@ SCOPE_MAX_USER_ROLES.update({
class QuayDeferredPermissionUser(Identity):
- def __init__(self, id, auth_type, scopes):
- super(QuayDeferredPermissionUser, self).__init__(id, auth_type)
+ def __init__(self, db_id, auth_type, scopes):
+ super(QuayDeferredPermissionUser, self).__init__(db_id, auth_type)
self._permissions_loaded = False
self._scope_set = scopes
@@ -88,7 +88,7 @@ class QuayDeferredPermissionUser(Identity):
def can(self, permission):
if not self._permissions_loaded:
logger.debug('Loading user permissions after deferring.')
- user_object = model.get_user(self.id)
+ user_object = model.get_user_by_id(self.id)
# Add the superuser need, if applicable.
if (user_object.username is not None and
@@ -112,7 +112,7 @@ class QuayDeferredPermissionUser(Identity):
# Add repository permissions
for perm in model.get_all_user_permissions(user_object):
- repo_grant = _RepositoryNeed(perm.repository.namespace, perm.repository.name,
+ repo_grant = _RepositoryNeed(perm.repository.namespace_user.username, perm.repository.name,
self._repo_role_for_scopes(perm.role.name))
logger.debug('User added permission: {0}'.format(repo_grant))
self.provides.add(repo_grant)
@@ -230,16 +230,16 @@ def on_identity_loaded(sender, identity):
if isinstance(identity, QuayDeferredPermissionUser):
logger.debug('Deferring permissions for user: %s', identity.id)
- elif identity.auth_type == 'username':
+ elif identity.auth_type == 'user_db_id':
logger.debug('Switching username permission to deferred object: %s', identity.id)
- switch_to_deferred = QuayDeferredPermissionUser(identity.id, 'username', {scopes.DIRECT_LOGIN})
+ switch_to_deferred = QuayDeferredPermissionUser(identity.id, 'user_db_id', {scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=switch_to_deferred)
elif identity.auth_type == 'token':
logger.debug('Loading permissions for token: %s', identity.id)
token_data = model.load_token_data(identity.id)
- repo_grant = _RepositoryNeed(token_data.repository.namespace,
+ repo_grant = _RepositoryNeed(token_data.repository.namespace_user.username,
token_data.repository.name,
token_data.role.name)
logger.debug('Delegate token added permission: {0}'.format(repo_grant))
diff --git a/conf/nginx-nossl.conf b/conf/nginx-nossl.conf
index 73a9c7605..fbcce63c0 100644
--- a/conf/nginx-nossl.conf
+++ b/conf/nginx-nossl.conf
@@ -13,10 +13,5 @@ http {
include server-base.conf;
listen 80 default;
-
- location /static/ {
- # checks for static file, if not found proxy to app
- alias /static/;
- }
}
}
diff --git a/conf/nginx.conf b/conf/nginx.conf
index 43c21b6ca..e208d30e0 100644
--- a/conf/nginx.conf
+++ b/conf/nginx.conf
@@ -23,10 +23,5 @@ http {
ssl_protocols SSLv3 TLSv1;
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
ssl_prefer_server_ciphers on;
-
- location /static/ {
- # checks for static file, if not found proxy to app
- alias /static/;
- }
}
}
diff --git a/conf/server-base.conf b/conf/server-base.conf
index a13cf1424..4636afdde 100644
--- a/conf/server-base.conf
+++ b/conf/server-base.conf
@@ -24,4 +24,16 @@ location / {
proxy_pass http://app_server;
proxy_read_timeout 2000;
proxy_temp_path /var/log/nginx/proxy_temp 1 2;
+}
+
+location /static/ {
+ # checks for static file, if not found proxy to app
+ alias /static/;
+}
+
+location /v1/_ping {
+ add_header Content-Type text/plain;
+ add_header X-Docker-Registry-Version 0.6.0;
+ add_header X-Docker-Registry-Standalone 0;
+ return 200 'okay';
}
\ No newline at end of file
diff --git a/config.py b/config.py
index f810007e8..6742d1a43 100644
--- a/config.py
+++ b/config.py
@@ -80,11 +80,11 @@ class DefaultConfig(object):
AUTHENTICATION_TYPE = 'Database'
# Build logs
- BUILDLOGS_REDIS_HOSTNAME = 'logs.quay.io'
+ BUILDLOGS_REDIS = {'host': 'logs.quay.io'}
BUILDLOGS_OPTIONS = []
# Real-time user events
- USER_EVENTS_REDIS_HOSTNAME = 'logs.quay.io'
+ USER_EVENTS_REDIS = {'host': 'logs.quay.io'}
# Stripe config
BILLING_TYPE = 'FakeStripe'
@@ -162,6 +162,12 @@ class DefaultConfig(object):
# Feature Flag: Dockerfile build support.
FEATURE_BUILD_SUPPORT = True
+ # Feature Flag: Whether emails are enabled.
+ FEATURE_MAILING = True
+
+ # Feature Flag: Whether users can be created (by non-super users).
+ FEATURE_USER_CREATION = True
+
DISTRIBUTED_STORAGE_CONFIG = {
'local_eu': ['LocalStorage', {'storage_path': 'test/data/registry/eu'}],
'local_us': ['LocalStorage', {'storage_path': 'test/data/registry/us'}],
diff --git a/data/buildlogs.py b/data/buildlogs.py
index 9128390af..6e24f501b 100644
--- a/data/buildlogs.py
+++ b/data/buildlogs.py
@@ -16,8 +16,8 @@ class RedisBuildLogs(object):
COMMAND = 'command'
PHASE = 'phase'
- def __init__(self, redis_host):
- self._redis = redis.StrictRedis(host=redis_host)
+ def __init__(self, redis_config):
+ self._redis = redis.StrictRedis(socket_connect_timeout=5, **redis_config)
@staticmethod
def _logs_key(build_id):
@@ -104,7 +104,13 @@ class BuildLogs(object):
self.state = None
def init_app(self, app):
- buildlogs_hostname = app.config.get('BUILDLOGS_REDIS_HOSTNAME')
+ buildlogs_config = app.config.get('BUILDLOGS_REDIS')
+ if not buildlogs_config:
+ # This is the old key name.
+ buildlogs_config = {
+ 'host': app.config.get('BUILDLOGS_REDIS_HOSTNAME')
+ }
+
buildlogs_options = app.config.get('BUILDLOGS_OPTIONS', [])
buildlogs_import = app.config.get('BUILDLOGS_MODULE_AND_CLASS', None)
@@ -113,7 +119,7 @@ class BuildLogs(object):
else:
klass = import_class(buildlogs_import[0], buildlogs_import[1])
- buildlogs = klass(buildlogs_hostname, *buildlogs_options)
+ buildlogs = klass(buildlogs_config, *buildlogs_options)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
diff --git a/data/database.py b/data/database.py
index fb060a041..3ad10b7b2 100644
--- a/data/database.py
+++ b/data/database.py
@@ -8,7 +8,7 @@ from peewee import *
from data.read_slave import ReadSlaveModel
from sqlalchemy.engine.url import make_url
from urlparse import urlparse
-
+from util.names import urn_generator
logger = logging.getLogger(__name__)
@@ -21,8 +21,24 @@ SCHEME_DRIVERS = {
'postgresql+psycopg2': PostgresqlDatabase,
}
+SCHEME_RANDOM_FUNCTION = {
+ 'mysql': fn.Rand,
+ 'mysql+pymysql': fn.Rand,
+ 'sqlite': fn.Random,
+ 'postgresql': fn.Random,
+ 'postgresql+psycopg2': fn.Random,
+}
+
+class CallableProxy(Proxy):
+ def __call__(self, *args, **kwargs):
+ if self.obj is None:
+ raise AttributeError('Cannot use uninitialized Proxy.')
+ return self.obj(*args, **kwargs)
+
db = Proxy()
read_slave = Proxy()
+db_random_func = CallableProxy()
+
def _db_from_url(url, db_kwargs):
parsed_url = make_url(url)
@@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs):
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
+
def configure(config_object):
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
write_db_uri = config_object['DB_URI']
db.initialize(_db_from_url(write_db_uri, db_kwargs))
+ parsed_write_uri = make_url(write_db_uri)
+ db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
+
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
if read_slave_uri is not None:
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
@@ -112,6 +132,15 @@ class TeamMember(BaseModel):
)
+class TeamMemberInvite(BaseModel):
+ # Note: Either user OR email will be filled in, but not both.
+ user = ForeignKeyField(User, index=True, null=True)
+ email = CharField(null=True)
+ team = ForeignKeyField(Team, index=True)
+ inviter = ForeignKeyField(User, related_name='inviter')
+ invite_token = CharField(default=urn_generator(['teaminvite']))
+
+
class LoginService(BaseModel):
name = CharField(unique=True, index=True)
@@ -139,7 +168,7 @@ class Visibility(BaseModel):
class Repository(BaseModel):
- namespace = CharField()
+ namespace_user = ForeignKeyField(User)
name = CharField()
visibility = ForeignKeyField(Visibility)
description = TextField(null=True)
@@ -150,7 +179,7 @@ class Repository(BaseModel):
read_slaves = (read_slave,)
indexes = (
# create a unique index on namespace and name
- (('namespace', 'name'), True),
+ (('namespace_user', 'name'), True),
)
@@ -227,7 +256,7 @@ class EmailConfirmation(BaseModel):
class ImageStorage(BaseModel):
- uuid = CharField(default=uuid_generator)
+ uuid = CharField(default=uuid_generator, index=True)
checksum = CharField(null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
@@ -333,7 +362,7 @@ class RepositoryBuild(BaseModel):
class QueueItem(BaseModel):
queue_name = CharField(index=True, max_length=1024)
body = TextField()
- available_after = DateTimeField(default=datetime.now, index=True)
+ available_after = DateTimeField(default=datetime.utcnow, index=True)
available = BooleanField(default=True, index=True)
processing_expires = DateTimeField(null=True, index=True)
retries_remaining = IntegerField(default=5)
@@ -438,4 +467,5 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind,
Notification, ImageStorageLocation, ImageStoragePlacement,
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
- RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage]
+ RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
+ TeamMemberInvite]
diff --git a/data/migrations/versions/13da56878560_migrate_registry_namespaces_to_.py b/data/migrations/versions/13da56878560_migrate_registry_namespaces_to_.py
new file mode 100644
index 000000000..30ac75c96
--- /dev/null
+++ b/data/migrations/versions/13da56878560_migrate_registry_namespaces_to_.py
@@ -0,0 +1,24 @@
+"""Migrate registry namespaces to reference a user.
+
+Revision ID: 13da56878560
+Revises: 51d04d0e7e6f
+Create Date: 2014-09-18 13:56:45.130455
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '13da56878560'
+down_revision = '51d04d0e7e6f'
+
+from alembic import op
+import sqlalchemy as sa
+
+from data.database import Repository, User
+
+def upgrade(tables):
+ # Add the namespace_user column, allowing it to be nullable
+ op.add_column('repository', sa.Column('namespace_user_id', sa.Integer(), sa.ForeignKey('user.id')))
+
+
+def downgrade(tables):
+ op.drop_column('repository', 'namespace_user_id')
diff --git a/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py b/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py
index d50c3a592..8185c1118 100644
--- a/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py
+++ b/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py
@@ -44,11 +44,11 @@ def downgrade(tables):
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.drop_index('logentrykind_name', table_name='logentrykind')
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
- op.add_column('image', sa.Column('created', mysql.DATETIME(), nullable=True))
- op.add_column('image', sa.Column('command', mysql.LONGTEXT(), nullable=True))
- op.add_column('image', sa.Column('image_size', mysql.BIGINT(display_width=20), nullable=True))
- op.add_column('image', sa.Column('checksum', mysql.VARCHAR(length=255), nullable=True))
- op.add_column('image', sa.Column('comment', mysql.LONGTEXT(), nullable=True))
+ op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True))
+ op.add_column('image', sa.Column('command', sa.Text(), nullable=True))
+ op.add_column('image', sa.Column('image_size', sa.BigInteger(), nullable=True))
+ op.add_column('image', sa.Column('checksum', sa.String(length=255), nullable=True))
+ op.add_column('image', sa.Column('comment', sa.Text(), nullable=True))
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
### end Alembic commands ###
diff --git a/data/migrations/versions/3f4fe1194671_backfill_the_namespace_user_fields.py b/data/migrations/versions/3f4fe1194671_backfill_the_namespace_user_fields.py
new file mode 100644
index 000000000..4a1e2fe9d
--- /dev/null
+++ b/data/migrations/versions/3f4fe1194671_backfill_the_namespace_user_fields.py
@@ -0,0 +1,26 @@
+"""Backfill the namespace_user fields.
+
+Revision ID: 3f4fe1194671
+Revises: 6f2ecf5afcf
+Create Date: 2014-09-24 14:29:45.192179
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '3f4fe1194671'
+down_revision = '6f2ecf5afcf'
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade(tables):
+ conn = op.get_bind()
+ user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
+ conn.execute('update repository set namespace_user_id = (select id from {0} where {0}.username = repository.namespace) where namespace_user_id is NULL'.format(user_table_name_escaped))
+ op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True)
+
+
+def downgrade(tables):
+ op.drop_constraint('fk_repository_namespace_user_id_user', table_name='repository', type_='foreignkey')
+ op.drop_index('repository_namespace_user_id_name', table_name='repository')
diff --git a/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py b/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py
new file mode 100644
index 000000000..c18335adb
--- /dev/null
+++ b/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py
@@ -0,0 +1,78 @@
+"""Email invites for joining a team.
+
+Revision ID: 51d04d0e7e6f
+Revises: 34fd69f63809
+Create Date: 2014-09-15 23:51:35.478232
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '51d04d0e7e6f'
+down_revision = '34fd69f63809'
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade(tables):
+ ### commands auto generated by Alembic - please adjust! ###
+ op.create_table('teammemberinvite',
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('user_id', sa.Integer(), nullable=True),
+ sa.Column('email', sa.String(length=255), nullable=True),
+ sa.Column('team_id', sa.Integer(), nullable=False),
+ sa.Column('inviter_id', sa.Integer(), nullable=False),
+ sa.Column('invite_token', sa.String(length=255), nullable=False),
+ sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ),
+ sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
+ sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
+ sa.PrimaryKeyConstraint('id')
+ )
+ op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False)
+ op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False)
+ op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False)
+ ### end Alembic commands ###
+
+ # Manually add the new logentrykind types
+ op.bulk_insert(tables.logentrykind,
+ [
+ {'id':42, 'name':'org_invite_team_member'},
+ {'id':43, 'name':'org_team_member_invite_accepted'},
+ {'id':44, 'name':'org_team_member_invite_declined'},
+ {'id':45, 'name':'org_delete_team_member_invite'},
+ ])
+
+ op.bulk_insert(tables.notificationkind,
+ [
+ {'id':10, 'name':'org_team_invite'},
+ ])
+
+def downgrade(tables):
+ ### commands auto generated by Alembic - please adjust! ###
+ op.execute(
+ (tables.logentrykind.delete()
+ .where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member')))
+ )
+
+ op.execute(
+ (tables.logentrykind.delete()
+ .where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted')))
+ )
+
+ op.execute(
+ (tables.logentrykind.delete()
+ .where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined')))
+ )
+
+ op.execute(
+ (tables.logentrykind.delete()
+ .where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite')))
+ )
+
+ op.execute(
+ (tables.notificationkind.delete()
+ .where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
+ )
+
+ op.drop_table('teammemberinvite')
+ ### end Alembic commands ###
diff --git a/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py b/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py
index 1081df651..0022ae128 100644
--- a/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py
+++ b/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py
@@ -1,17 +1,16 @@
"""add the uncompressed size to image storage
Revision ID: 6f2ecf5afcf
-Revises: 3f6d26399bd2
+Revises: 13da56878560
Create Date: 2014-09-22 14:39:13.470566
"""
# revision identifiers, used by Alembic.
revision = '6f2ecf5afcf'
-down_revision = '3f6d26399bd2'
+down_revision = '13da56878560'
from alembic import op
-from tools.uncompressedsize import backfill_sizes
import sqlalchemy as sa
@@ -20,9 +19,6 @@ def upgrade(tables):
op.add_column('imagestorage', sa.Column('uncompressed_size', sa.BigInteger(), nullable=True))
### end Alembic commands ###
- # Backfill the uncompressed size to the image storage table.
- backfill_sizes()
-
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('imagestorage', 'uncompressed_size')
diff --git a/data/migrations/versions/9a1087b007d_allow_the_namespace_column_to_be_.py b/data/migrations/versions/9a1087b007d_allow_the_namespace_column_to_be_.py
new file mode 100644
index 000000000..a0726bf3b
--- /dev/null
+++ b/data/migrations/versions/9a1087b007d_allow_the_namespace_column_to_be_.py
@@ -0,0 +1,29 @@
+"""Allow the namespace column to be nullable.
+
+Revision ID: 9a1087b007d
+Revises: 3f4fe1194671
+Create Date: 2014-10-01 16:11:21.277226
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '9a1087b007d'
+down_revision = '3f4fe1194671'
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade(tables):
+ op.drop_index('repository_namespace_name', table_name='repository')
+ op.alter_column('repository', 'namespace', nullable=True, existing_type=sa.String(length=255),
+ server_default=sa.text('NULL'))
+
+
+def downgrade(tables):
+ conn = op.get_bind()
+ user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
+ conn.execute('update repository set namespace = (select username from {0} where {0}.id = repository.namespace_user_id) where namespace is NULL'.format(user_table_name_escaped))
+
+ op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
+ op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255))
diff --git a/data/migrations/versions/b1d41e2071b_add_an_index_to_the_uuid_in_the_image_.py b/data/migrations/versions/b1d41e2071b_add_an_index_to_the_uuid_in_the_image_.py
new file mode 100644
index 000000000..71a9df794
--- /dev/null
+++ b/data/migrations/versions/b1d41e2071b_add_an_index_to_the_uuid_in_the_image_.py
@@ -0,0 +1,22 @@
+"""Add an index to the uuid in the image storage table.
+
+Revision ID: b1d41e2071b
+Revises: 9a1087b007d
+Create Date: 2014-10-06 18:42:10.021235
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'b1d41e2071b'
+down_revision = '9a1087b007d'
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade(tables):
+ op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True)
+
+
+def downgrade(tables):
+ op.drop_index('imagestorage_uuid', table_name='imagestorage')
diff --git a/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py b/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py
index 9ceab4218..5b3f6c812 100644
--- a/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py
+++ b/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py
@@ -23,13 +23,11 @@ def upgrade(tables):
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('webhook',
- sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
- sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False),
- sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
- sa.Column('parameters', mysql.LONGTEXT(), nullable=False),
- sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'),
- sa.PrimaryKeyConstraint('id'),
- mysql_default_charset=u'latin1',
- mysql_engine=u'InnoDB'
+ sa.Column('id', sa.Integer(), nullable=False),
+ sa.Column('public_id', sa.String(length=255), nullable=False),
+ sa.Column('repository_id', sa.Integer(), nullable=False),
+ sa.Column('parameters', sa.Text(), nullable=False),
+ sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
+ sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
diff --git a/data/model/legacy.py b/data/model/legacy.py
index faf9de223..fae1d694a 100644
--- a/data/model/legacy.py
+++ b/data/model/legacy.py
@@ -5,8 +5,18 @@ import json
from datetime import datetime, timedelta
-from data.database import *
-from util.validation import *
+from data.database import (User, Repository, Image, AccessToken, Role, RepositoryPermission,
+ Visibility, RepositoryTag, EmailConfirmation, FederatedLogin,
+ LoginService, RepositoryBuild, Team, TeamMember, TeamRole,
+ LogEntryKind, LogEntry, PermissionPrototype, ImageStorage,
+ BuildTriggerService, RepositoryBuildTrigger, NotificationKind,
+ Notification, ImageStorageLocation, ImageStoragePlacement,
+ ExternalNotificationEvent, ExternalNotificationMethod,
+ RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
+ random_string_generator, db, BUILD_PHASE)
+from peewee import JOIN_LEFT_OUTER, fn
+from util.validation import (validate_username, validate_email, validate_password,
+ INVALID_PASSWORD_MESSAGE)
from util.names import format_robot_username
from util.backoff import exponential_backoff
@@ -15,6 +25,9 @@ EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
PRESUMED_DEAD_BUILD_AGE = timedelta(days=15)
+Namespace = User.alias()
+
+
logger = logging.getLogger(__name__)
@@ -49,6 +62,9 @@ class InvalidRobotException(DataModelException):
class InvalidTeamException(DataModelException):
pass
+class InvalidTeamMemberException(DataModelException):
+ pass
+
class InvalidPasswordException(DataModelException):
pass
@@ -78,16 +94,34 @@ class TooManyUsersException(DataModelException):
pass
+class UserAlreadyInTeam(DataModelException):
+ pass
+
+
class TooManyLoginAttemptsException(Exception):
def __init__(self, message, retry_after):
super(TooManyLoginAttemptsException, self).__init__(message)
self.retry_after = retry_after
+def _get_repository(namespace_name, repository_name):
+ return (Repository
+ .select(Repository, Namespace)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name)
+ .get())
+
+
+def hash_password(password, salt=None):
+ salt = salt or bcrypt.gensalt()
+ return bcrypt.hashpw(password.encode('utf-8'), salt)
+
+
def is_create_user_allowed():
return True
-def create_user(username, password, email):
+
+def create_user(username, password, email, auto_verify=False):
""" Creates a regular user, if allowed. """
if not validate_password(password):
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
@@ -96,15 +130,13 @@ def create_user(username, password, email):
raise TooManyUsersException()
created = _create_user(username, email)
-
- # Store the password hash
- pw_hash = bcrypt.hashpw(password, bcrypt.gensalt())
- created.password_hash = pw_hash
-
+ created.password_hash = hash_password(password)
+ created.verified = auto_verify
created.save()
return created
+
def _create_user(username, email):
if not validate_email(email):
raise InvalidEmailAddressException('Invalid email address: %s' % email)
@@ -337,12 +369,41 @@ def remove_team(org_name, team_name, removed_by_username):
team.delete_instance(recursive=True, delete_nullable=True)
+def add_or_invite_to_team(inviter, team, user=None, email=None, requires_invite=True):
+ # If the user is a member of the organization, then we simply add the
+ # user directly to the team. Otherwise, an invite is created for the user/email.
+ # We return None if the user was directly added and the invite object if the user was invited.
+ if user and requires_invite:
+ orgname = team.organization.username
+
+ # If the user is part of the organization (or a robot), then no invite is required.
+ if user.robot:
+ requires_invite = False
+ if not user.username.startswith(orgname + '+'):
+ raise InvalidTeamMemberException('Cannot add the specified robot to this team, ' +
+ 'as it is not a member of the organization')
+ else:
+ Org = User.alias()
+ found = User.select(User.username)
+ found = found.where(User.username == user.username).join(TeamMember).join(Team)
+ found = found.join(Org, on=(Org.username == orgname)).limit(1)
+ requires_invite = not any(found)
+
+ # If we have a valid user and no invite is required, simply add the user to the team.
+ if user and not requires_invite:
+ add_user_to_team(user, team)
+ return None
+
+ email_address = email if not user else None
+ return TeamMemberInvite.create(user=user, email=email_address, team=team, inviter=inviter)
+
+
def add_user_to_team(user, team):
try:
return TeamMember.create(user=user, team=team)
except Exception:
- raise DataModelException('Unable to add user \'%s\' to team: \'%s\'' %
- (user.username, team.name))
+ raise UserAlreadyInTeam('User \'%s\' is already a member of team \'%s\'' %
+ (user.username, team.name))
def remove_user_from_team(org_name, team_name, username, removed_by_username):
@@ -461,18 +522,20 @@ def confirm_user_email(code):
user = code.user
user.verified = True
+ old_email = None
new_email = code.new_email
if new_email:
if find_user_by_email(new_email):
raise DataModelException('E-mail address already used.')
+ old_email = user.email
user.email = new_email
user.save()
code.delete_instance()
- return user, new_email
+ return user, new_email, old_email
def create_reset_password_email_code(email):
@@ -515,6 +578,20 @@ def get_user(username):
return None
+def get_user_or_org(username):
+ try:
+ return User.get(User.username == username, User.robot == False)
+ except User.DoesNotExist:
+ return None
+
+
+def get_user_by_id(user_db_id):
+ try:
+ return User.get(User.id == user_db_id, User.organization == False)
+ except User.DoesNotExist:
+ return None
+
+
def get_user_or_org_by_customer_id(customer_id):
try:
return User.get(User.stripe_id == customer_id)
@@ -539,12 +616,13 @@ def get_matching_users(username_prefix, robot_namespace=None,
(User.robot == True)))
query = (User
- .select(User.username, fn.Sum(Team.id), User.robot)
+ .select(User.username, User.robot)
.group_by(User.username)
.where(direct_user_query))
if organization:
query = (query
+ .select(User.username, User.robot, fn.Sum(Team.id))
.join(TeamMember, JOIN_LEFT_OUTER)
.join(Team, JOIN_LEFT_OUTER, on=((Team.id == TeamMember.team) &
(Team.organization == organization))))
@@ -553,9 +631,9 @@ def get_matching_users(username_prefix, robot_namespace=None,
class MatchingUserResult(object):
def __init__(self, *args):
self.username = args[0]
- self.is_robot = args[2]
+ self.is_robot = args[1]
if organization:
- self.is_org_member = (args[1] != None)
+ self.is_org_member = (args[2] != None)
else:
self.is_org_member = None
@@ -580,10 +658,8 @@ def verify_user(username_or_email, password):
retry_after = can_retry_at - now
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
- if (fetched.password_hash and
- bcrypt.hashpw(password, fetched.password_hash) ==
- fetched.password_hash):
-
+ if (fetched.password_hash and
+ hash_password(password, fetched.password_hash) == fetched.password_hash):
if fetched.invalid_login_attempts > 0:
fetched.invalid_login_attempts = 0
fetched.save()
@@ -638,6 +714,10 @@ def get_organization_team_members(teamid):
query = joined.where(Team.id == teamid)
return query
+def get_organization_team_member_invites(teamid):
+ joined = TeamMemberInvite.select().join(Team).join(User)
+ query = joined.where(Team.id == teamid)
+ return query
def get_organization_member_set(orgname):
Org = User.alias()
@@ -668,7 +748,7 @@ def get_visible_repositories(username=None, include_public=True, page=None,
limit=None, sort=False, namespace=None):
query = _visible_repository_query(username=username, include_public=include_public, page=page,
limit=limit, namespace=namespace,
- select_models=[Repository, Visibility])
+ select_models=[Repository, Namespace, Visibility])
if sort:
query = query.order_by(Repository.description.desc())
@@ -682,11 +762,13 @@ def get_visible_repositories(username=None, include_public=True, page=None,
def _visible_repository_query(username=None, include_public=True, limit=None,
page=None, namespace=None, select_models=[]):
query = (Repository
- .select(*select_models) # Note: We need to leave this blank for the get_count case. Otherwise, MySQL/RDS complains.
- .distinct()
- .join(Visibility)
- .switch(Repository)
- .join(RepositoryPermission, JOIN_LEFT_OUTER))
+ .select(*select_models) # MySQL/RDS complains is there are selected models for counts.
+ .distinct()
+ .join(Visibility)
+ .switch(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(Repository)
+ .join(RepositoryPermission, JOIN_LEFT_OUTER))
query = _filter_to_repos_for_user(query, username, namespace, include_public)
@@ -712,31 +794,25 @@ def _filter_to_repos_for_user(query, username=None, namespace=None,
AdminUser = User.alias()
query = (query
- .switch(RepositoryPermission)
- .join(User, JOIN_LEFT_OUTER)
- .switch(RepositoryPermission)
- .join(Team, JOIN_LEFT_OUTER)
- .join(TeamMember, JOIN_LEFT_OUTER)
- .join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id ==
- TeamMember.user))
- .switch(Repository)
- .join(Org, JOIN_LEFT_OUTER, on=(Org.username == Repository.namespace))
- .join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id ==
- AdminTeam.organization))
- .join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
- .switch(AdminTeam)
- .join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id ==
- AdminTeamMember.team))
- .join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user ==
- AdminUser.id)))
+ .switch(RepositoryPermission)
+ .join(User, JOIN_LEFT_OUTER)
+ .switch(RepositoryPermission)
+ .join(Team, JOIN_LEFT_OUTER)
+ .join(TeamMember, JOIN_LEFT_OUTER)
+ .join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id == TeamMember.user))
+ .switch(Repository)
+ .join(Org, JOIN_LEFT_OUTER, on=(Repository.namespace_user == Org.id))
+ .join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
+ .join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
+ .switch(AdminTeam)
+ .join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
+ .join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id)))
- where_clause = ((User.username == username) |
- (UserThroughTeam.username == username) |
- ((AdminUser.username == username) &
- (TeamRole.name == 'admin')))
+ where_clause = ((User.username == username) | (UserThroughTeam.username == username) |
+ ((AdminUser.username == username) & (TeamRole.name == 'admin')))
if namespace:
- where_clause = where_clause & (Repository.namespace == namespace)
+ where_clause = where_clause & (Namespace.username == namespace)
if include_public:
new_clause = (Visibility.name == 'public')
@@ -755,7 +831,7 @@ def get_matching_repositories(repo_term, username=None):
visible = get_visible_repositories(username)
search_clauses = (Repository.name ** ('%' + name_term + '%') |
- Repository.namespace ** ('%' + namespace_term + '%'))
+ Namespace.username ** ('%' + namespace_term + '%'))
# Handle the case where the user has already entered a namespace path.
if repo_term.find('/') > 0:
@@ -764,7 +840,7 @@ def get_matching_repositories(repo_term, username=None):
name_term = parts[-1]
search_clauses = (Repository.name ** ('%' + name_term + '%') &
- Repository.namespace ** ('%' + namespace_term + '%'))
+ Namespace.username ** ('%' + namespace_term + '%'))
final = visible.where(search_clauses).limit(10)
return list(final)
@@ -774,7 +850,7 @@ def change_password(user, new_password):
if not validate_password(new_password):
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
- pw_hash = bcrypt.hashpw(new_password, bcrypt.gensalt())
+ pw_hash = hash_password(new_password)
user.password_hash = pw_hash
user.save()
@@ -787,29 +863,27 @@ def change_invoice_email(user, invoice_email):
user.save()
-def update_email(user, new_email):
+def update_email(user, new_email, auto_verify=False):
user.email = new_email
- user.verified = False
+ user.verified = auto_verify
user.save()
def get_all_user_permissions(user):
- select = RepositoryPermission.select(RepositoryPermission, Role, Repository)
- with_role = select.join(Role)
- with_repo = with_role.switch(RepositoryPermission).join(Repository)
- through_user = with_repo.switch(RepositoryPermission).join(User,
- JOIN_LEFT_OUTER)
- as_perm = through_user.switch(RepositoryPermission)
- through_team = as_perm.join(Team, JOIN_LEFT_OUTER).join(TeamMember,
- JOIN_LEFT_OUTER)
-
UserThroughTeam = User.alias()
- with_team_member = through_team.join(UserThroughTeam, JOIN_LEFT_OUTER,
- on=(UserThroughTeam.id ==
- TeamMember.user))
- return with_team_member.where((User.id == user) |
- (UserThroughTeam.id == user))
+ return (RepositoryPermission
+ .select(RepositoryPermission, Role, Repository, Namespace)
+ .join(Role)
+ .switch(RepositoryPermission)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(RepositoryPermission)
+ .join(User, JOIN_LEFT_OUTER)
+ .switch(RepositoryPermission)
+ .join(Team, JOIN_LEFT_OUTER).join(TeamMember, JOIN_LEFT_OUTER)
+ .join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id == TeamMember.user))
+ .where((User.id == user) | (UserThroughTeam.id == user)))
def delete_prototype_permission(org, uid):
@@ -874,33 +948,37 @@ def get_org_wide_permissions(user):
def get_all_repo_teams(namespace_name, repository_name):
- select = RepositoryPermission.select(Team.name.alias('team_name'),
- Role.name, RepositoryPermission)
- with_team = select.join(Team)
- with_role = with_team.switch(RepositoryPermission).join(Role)
- with_repo = with_role.switch(RepositoryPermission).join(Repository)
- return with_repo.where(Repository.namespace == namespace_name,
- Repository.name == repository_name)
+ return (RepositoryPermission.select(Team.name.alias('team_name'), Role.name, RepositoryPermission)
+ .join(Team)
+ .switch(RepositoryPermission)
+ .join(Role)
+ .switch(RepositoryPermission)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name))
def get_all_repo_users(namespace_name, repository_name):
- select = RepositoryPermission.select(User.username, User.robot, Role.name,
- RepositoryPermission)
- with_user = select.join(User)
- with_role = with_user.switch(RepositoryPermission).join(Role)
- with_repo = with_role.switch(RepositoryPermission).join(Repository)
- return with_repo.where(Repository.namespace == namespace_name,
- Repository.name == repository_name)
+ return (RepositoryPermission.select(User.username, User.robot, Role.name, RepositoryPermission)
+ .join(User)
+ .switch(RepositoryPermission)
+ .join(Role)
+ .switch(RepositoryPermission)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name))
def get_all_repo_users_transitive_via_teams(namespace_name, repository_name):
- select = User.select().distinct()
- with_team_member = select.join(TeamMember)
- with_team = with_team_member.join(Team)
- with_perm = with_team.join(RepositoryPermission)
- with_repo = with_perm.join(Repository)
- return with_repo.where(Repository.namespace == namespace_name,
- Repository.name == repository_name)
+ return (User
+ .select()
+ .distinct()
+ .join(TeamMember)
+ .join(Team)
+ .join(RepositoryPermission)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name))
def get_all_repo_users_transitive(namespace_name, repository_name):
@@ -924,10 +1002,12 @@ def get_all_repo_users_transitive(namespace_name, repository_name):
def get_repository_for_resource(resource_key):
try:
return (Repository
- .select()
- .join(RepositoryBuild)
- .where(RepositoryBuild.resource_key == resource_key)
- .get())
+ .select(Repository, Namespace)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(Repository)
+ .join(RepositoryBuild)
+ .where(RepositoryBuild.resource_key == resource_key)
+ .get())
except Repository.DoesNotExist:
return None
@@ -941,8 +1021,7 @@ def lookup_repository(repo_id):
def get_repository(namespace_name, repository_name):
try:
- return Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ return _get_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
return None
@@ -959,11 +1038,18 @@ def get_repo_image(namespace_name, repository_name, image_id):
def repository_is_public(namespace_name, repository_name):
- joined = Repository.select().join(Visibility)
- query = joined.where(Repository.namespace == namespace_name,
- Repository.name == repository_name,
- Visibility.name == 'public')
- return len(list(query)) > 0
+ try:
+ (Repository
+ .select()
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(Repository)
+ .join(Visibility)
+ .where(Namespace.username == namespace_name, Repository.name == repository_name,
+ Visibility.name == 'public')
+ .get())
+ return True
+ except Repository.DoesNotExist:
+ return False
def set_repository_visibility(repo, visibility):
@@ -995,8 +1081,8 @@ def __apply_default_permissions(repo, proto_query, name_property,
def create_repository(namespace, name, creating_user, visibility='private'):
private = Visibility.get(name=visibility)
- repo = Repository.create(namespace=namespace, name=name,
- visibility=private)
+ namespace_user = User.get(username=namespace)
+ repo = Repository.create(name=name, visibility=private, namespace_user=namespace_user)
admin = Role.get(name='admin')
if creating_user and not creating_user.organization:
@@ -1070,32 +1156,34 @@ def _create_storage(location_name):
def find_create_or_link_image(docker_image_id, repository, username, translations,
preferred_location):
with config.app_config['DB_TRANSACTION_FACTORY'](db):
- repo_image = get_repo_image(repository.namespace, repository.name,
+ repo_image = get_repo_image(repository.namespace_user.username, repository.name,
docker_image_id)
if repo_image:
return repo_image
query = (Image
- .select(Image, ImageStorage)
- .distinct()
- .join(ImageStorage)
- .switch(Image)
- .join(Repository)
- .join(Visibility)
- .switch(Repository)
- .join(RepositoryPermission, JOIN_LEFT_OUTER)
- .where(ImageStorage.uploading == False))
+ .select(Image, ImageStorage)
+ .distinct()
+ .join(ImageStorage)
+ .switch(Image)
+ .join(Repository)
+ .join(Visibility)
+ .switch(Repository)
+ .join(RepositoryPermission, JOIN_LEFT_OUTER)
+ .switch(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(ImageStorage.uploading == False))
query = (_filter_to_repos_for_user(query, username)
- .where(Image.docker_image_id == docker_image_id))
-
+ .where(Image.docker_image_id == docker_image_id))
+
new_image_ancestry = '/'
origin_image_id = None
try:
to_copy = query.get()
msg = 'Linking image to existing storage with docker id: %s and uuid: %s'
logger.debug(msg, docker_image_id, to_copy.storage.uuid)
-
+
new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repository,
username, preferred_location)
@@ -1145,11 +1233,11 @@ def find_or_create_derived_storage(source, transformation_name, preferred_locati
def get_storage_by_uuid(storage_uuid):
placements = list(ImageStoragePlacement
- .select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
- .join(ImageStorageLocation)
- .switch(ImageStoragePlacement)
- .join(ImageStorage)
- .where(ImageStorage.uuid == storage_uuid))
+ .select(ImageStoragePlacement, ImageStorage, ImageStorageLocation)
+ .join(ImageStorageLocation)
+ .switch(ImageStoragePlacement)
+ .join(ImageStorage)
+ .where(ImageStorage.uuid == storage_uuid))
if not placements:
raise InvalidImageException('No storage found with uuid: %s', storage_uuid)
@@ -1160,49 +1248,47 @@ def get_storage_by_uuid(storage_uuid):
return found
-def set_image_size(docker_image_id, namespace_name, repository_name,
- image_size):
+def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
try:
image = (Image
- .select(Image, ImageStorage)
- .join(Repository)
- .switch(Image)
- .join(ImageStorage, JOIN_LEFT_OUTER)
- .where(Repository.name == repository_name,
- Repository.namespace == namespace_name,
- Image.docker_image_id == docker_image_id)
- .get())
+ .select(Image, ImageStorage)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(Image)
+ .join(ImageStorage, JOIN_LEFT_OUTER)
+ .where(Repository.name == repository_name, Namespace.username == namespace_name,
+ Image.docker_image_id == docker_image_id)
+ .get())
except Image.DoesNotExist:
raise DataModelException('No image with specified id and repository')
- if image.storage and image.storage.id:
- image.storage.image_size = image_size
- image.storage.save()
- else:
- image.image_size = image_size
- image.save()
+ image.storage.image_size = image_size
+ image.storage.uncompressed_size = uncompressed_size
+ image.storage.save()
return image
-def set_image_metadata(docker_image_id, namespace_name, repository_name,
- created_date_str, comment, command, parent=None):
+def set_image_metadata(docker_image_id, namespace_name, repository_name, created_date_str, comment,
+ command, parent=None):
with config.app_config['DB_TRANSACTION_FACTORY'](db):
query = (Image
- .select(Image, ImageStorage)
- .join(Repository)
- .switch(Image)
- .join(ImageStorage)
- .where(Repository.name == repository_name,
- Repository.namespace == namespace_name,
- Image.docker_image_id == docker_image_id))
+ .select(Image, ImageStorage)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(Image)
+ .join(ImageStorage)
+ .where(Repository.name == repository_name, Namespace.username == namespace_name,
+ Image.docker_image_id == docker_image_id))
try:
fetched = query.get()
except Image.DoesNotExist:
raise DataModelException('No image with specified id and repository')
+ # We cleanup any old checksum in case it's a retry after a fail
+ fetched.storage.checksum = None
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
fetched.storage.comment = comment
fetched.storage.command = command
@@ -1217,14 +1303,14 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name,
def _get_repository_images_base(namespace_name, repository_name, query_modifier):
query = (ImageStoragePlacement
- .select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
- .join(ImageStorageLocation)
- .switch(ImageStoragePlacement)
- .join(ImageStorage, JOIN_LEFT_OUTER)
- .join(Image)
- .join(Repository)
- .where(Repository.name == repository_name,
- Repository.namespace == namespace_name))
+ .select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
+ .join(ImageStorageLocation)
+ .switch(ImageStoragePlacement)
+ .join(ImageStorage, JOIN_LEFT_OUTER)
+ .join(Image)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Repository.name == repository_name, Namespace.username == namespace_name))
query = query_modifier(query)
@@ -1255,24 +1341,26 @@ def get_repository_images(namespace_name, repository_name):
def list_repository_tags(namespace_name, repository_name):
- select = RepositoryTag.select(RepositoryTag, Image)
- with_repo = select.join(Repository)
- with_image = with_repo.switch(RepositoryTag).join(Image)
- return with_image.where(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ return (RepositoryTag
+ .select(RepositoryTag, Image)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(RepositoryTag)
+ .join(Image)
+ .where(Repository.name == repository_name, Namespace.username == namespace_name))
def garbage_collect_repository(namespace_name, repository_name):
with config.app_config['DB_TRANSACTION_FACTORY'](db):
# Get a list of all images used by tags in the repository
tag_query = (RepositoryTag
- .select(RepositoryTag, Image, ImageStorage)
- .join(Image)
- .join(ImageStorage, JOIN_LEFT_OUTER)
- .switch(RepositoryTag)
- .join(Repository)
- .where(Repository.name == repository_name,
- Repository.namespace == namespace_name))
+ .select(RepositoryTag, Image, ImageStorage)
+ .join(Image)
+ .join(ImageStorage, JOIN_LEFT_OUTER)
+ .switch(RepositoryTag)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Repository.name == repository_name, Namespace.username == namespace_name))
referenced_anscestors = set()
for tag in tag_query:
@@ -1312,11 +1400,11 @@ def garbage_collect_repository(namespace_name, repository_name):
if uuids_to_check_for_gc:
storage_to_remove = (ImageStorage
- .select()
- .join(Image, JOIN_LEFT_OUTER)
- .group_by(ImageStorage)
- .where(ImageStorage.uuid << list(uuids_to_check_for_gc))
- .having(fn.Count(Image.id) == 0))
+ .select()
+ .join(Image, JOIN_LEFT_OUTER)
+ .group_by(ImageStorage)
+ .where(ImageStorage.uuid << list(uuids_to_check_for_gc))
+ .having(fn.Count(Image.id) == 0))
remove_storages(storage_to_remove)
@@ -1333,9 +1421,9 @@ def garbage_collect_repository(namespace_name, repository_name):
def get_tag_image(namespace_name, repository_name, tag_name):
def limit_to_tag(query):
return (query
- .switch(Image)
- .join(RepositoryTag)
- .where(RepositoryTag.name == tag_name))
+ .switch(Image)
+ .join(RepositoryTag)
+ .where(RepositoryTag.name == tag_name))
images = _get_repository_images_base(namespace_name, repository_name, limit_to_tag)
if not images:
@@ -1373,22 +1461,17 @@ def get_parent_images(namespace_name, repository_name, image_obj):
def create_or_update_tag(namespace_name, repository_name, tag_name,
tag_docker_image_id):
try:
- repo = Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ repo = _get_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
- raise DataModelException('Invalid repository %s/%s' %
- (namespace_name, repository_name))
+ raise DataModelException('Invalid repository %s/%s' % (namespace_name, repository_name))
try:
- image = Image.get(Image.docker_image_id == tag_docker_image_id,
- Image.repository == repo)
+ image = Image.get(Image.docker_image_id == tag_docker_image_id, Image.repository == repo)
except Image.DoesNotExist:
- raise DataModelException('Invalid image with id: %s' %
- tag_docker_image_id)
+ raise DataModelException('Invalid image with id: %s' % tag_docker_image_id)
try:
- tag = RepositoryTag.get(RepositoryTag.repository == repo,
- RepositoryTag.name == tag_name)
+ tag = RepositoryTag.get(RepositoryTag.repository == repo, RepositoryTag.name == tag_name)
tag.image = image
tag.save()
except RepositoryTag.DoesNotExist:
@@ -1398,41 +1481,46 @@ def create_or_update_tag(namespace_name, repository_name, tag_name,
def delete_tag(namespace_name, repository_name, tag_name):
- joined = RepositoryTag.select().join(Repository)
- found = list(joined.where(Repository.name == repository_name,
- Repository.namespace == namespace_name,
- RepositoryTag.name == tag_name))
+ try:
+ found = (RepositoryTag
+ .select()
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Repository.name == repository_name, Namespace.username == namespace_name,
+ RepositoryTag.name == tag_name)
+ .get())
- if not found:
+ except RepositoryTag.DoesNotExist:
msg = ('Invalid repository tag \'%s\' on repository \'%s/%s\'' %
(tag_name, namespace_name, repository_name))
raise DataModelException(msg)
- found[0].delete_instance()
+ found.delete_instance()
def delete_all_repository_tags(namespace_name, repository_name):
try:
- repo = Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ repo = _get_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
raise DataModelException('Invalid repository \'%s/%s\'' %
(namespace_name, repository_name))
RepositoryTag.delete().where(RepositoryTag.repository == repo.id).execute()
-def __entity_permission_repo_query(entity_id, entity_table,
- entity_id_property, namespace_name,
+def __entity_permission_repo_query(entity_id, entity_table, entity_id_property, namespace_name,
repository_name):
""" This method works for both users and teams. """
- selected = RepositoryPermission.select(entity_table, Repository, Role,
- RepositoryPermission)
- with_user = selected.join(entity_table)
- with_role = with_user.switch(RepositoryPermission).join(Role)
- with_repo = with_role.switch(RepositoryPermission).join(Repository)
- return with_repo.where(Repository.name == repository_name,
- Repository.namespace == namespace_name,
- entity_id_property == entity_id)
+
+ return (RepositoryPermission
+ .select(entity_table, Repository, Namespace, Role, RepositoryPermission)
+ .join(entity_table)
+ .switch(RepositoryPermission)
+ .join(Role)
+ .switch(RepositoryPermission)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Repository.name == repository_name, Namespace.username == namespace_name,
+ entity_id_property == entity_id))
def get_user_reponame_permission(username, namespace_name, repository_name):
@@ -1480,8 +1568,7 @@ def delete_team_permission(team_name, namespace_name, repository_name):
def __set_entity_repo_permission(entity, permission_entity_property,
namespace_name, repository_name, role_name):
- repo = Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ repo = _get_repository(namespace_name, repository_name)
new_role = Role.get(Role.name == role_name)
# Fetch any existing permission for this entity on the repo
@@ -1532,15 +1619,18 @@ def purge_repository(namespace_name, repository_name):
garbage_collect_repository(namespace_name, repository_name)
# Delete the rest of the repository metadata
- fetched = Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ fetched = _get_repository(namespace_name, repository_name)
fetched.delete_instance(recursive=True)
def get_private_repo_count(username):
- joined = Repository.select().join(Visibility)
- return joined.where(Repository.namespace == username,
- Visibility.name == 'private').count()
+ return (Repository
+ .select()
+ .join(Visibility)
+ .switch(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == username, Visibility.name == 'private')
+ .count())
def create_access_token(repository, role):
@@ -1553,22 +1643,23 @@ def create_access_token(repository, role):
def create_delegate_token(namespace_name, repository_name, friendly_name,
role='read'):
read_only = Role.get(name=role)
- repo = Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ repo = _get_repository(namespace_name, repository_name)
new_token = AccessToken.create(repository=repo, role=read_only,
friendly_name=friendly_name, temporary=False)
return new_token
def get_repository_delegate_tokens(namespace_name, repository_name):
- return (AccessToken.select(AccessToken, Role)
- .join(Repository)
- .switch(AccessToken)
- .join(Role)
- .switch(AccessToken)
- .join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
- .where(Repository.name == repository_name, Repository.namespace == namespace_name,
- AccessToken.temporary == False, RepositoryBuildTrigger.uuid >> None))
+ return (AccessToken
+ .select(AccessToken, Role)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(AccessToken)
+ .join(Role)
+ .switch(AccessToken)
+ .join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
+ .where(Repository.name == repository_name, Namespace.username == namespace_name,
+ AccessToken.temporary == False, RepositoryBuildTrigger.uuid >> None))
def get_repo_delegate_token(namespace_name, repository_name, code):
@@ -1602,14 +1693,17 @@ def delete_delegate_token(namespace_name, repository_name, code):
def load_token_data(code):
""" Load the permissions for any token by code. """
- selected = AccessToken.select(AccessToken, Repository, Role)
- with_role = selected.join(Role)
- with_repo = with_role.switch(AccessToken).join(Repository)
- fetched = list(with_repo.where(AccessToken.code == code))
+ try:
+ return (AccessToken
+ .select(AccessToken, Repository, Namespace, Role)
+ .join(Role)
+ .switch(AccessToken)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(AccessToken.code == code)
+ .get())
- if fetched:
- return fetched[0]
- else:
+ except AccessToken.DoesNotExist:
raise InvalidTokenException('Invalid delegate token code: %s' % code)
@@ -1626,15 +1720,15 @@ def get_repository_build(namespace_name, repository_name, build_uuid):
def list_repository_builds(namespace_name, repository_name, limit,
include_inactive=True):
query = (RepositoryBuild
- .select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService)
- .join(Repository)
- .switch(RepositoryBuild)
- .join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
- .join(BuildTriggerService, JOIN_LEFT_OUTER)
- .where(Repository.name == repository_name,
- Repository.namespace == namespace_name)
- .order_by(RepositoryBuild.started.desc())
- .limit(limit))
+ .select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(RepositoryBuild)
+ .join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
+ .join(BuildTriggerService, JOIN_LEFT_OUTER)
+ .where(Repository.name == repository_name, Namespace.username == namespace_name)
+ .order_by(RepositoryBuild.started.desc())
+ .limit(limit))
if not include_inactive:
query = query.where(RepositoryBuild.phase != 'error',
@@ -1698,16 +1792,17 @@ def create_repo_notification(repo, event_name, method_name, config):
def get_repo_notification(namespace_name, repository_name, uuid):
- joined = RepositoryNotification.select().join(Repository)
- found = list(joined.where(Repository.namespace == namespace_name,
- Repository.name == repository_name,
- RepositoryNotification.uuid == uuid))
-
- if not found:
+ try:
+ return (RepositoryNotification
+ .select(RepositoryNotification, Repository, Namespace)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name,
+ RepositoryNotification.uuid == uuid)
+ .get())
+ except RepositoryNotification.DoesNotExist:
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
- return found[0]
-
def delete_repo_notification(namespace_name, repository_name, uuid):
found = get_repo_notification(namespace_name, repository_name, uuid)
@@ -1716,15 +1811,19 @@ def delete_repo_notification(namespace_name, repository_name, uuid):
def list_repo_notifications(namespace_name, repository_name, event_name=None):
- joined = RepositoryNotification.select().join(Repository)
- where = joined.where(Repository.namespace == namespace_name,
- Repository.name == repository_name)
+ query = (RepositoryNotification
+ .select(RepositoryNotification, Repository, Namespace)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name))
if event_name:
- event = ExternalNotificationEvent.get(ExternalNotificationEvent.name == event_name)
- where = where.where(RepositoryNotification.event == event)
+ query = (query
+ .switch(RepositoryNotification)
+ .join(ExternalNotificationEvent)
+ .where(ExternalNotificationEvent.name == event_name))
- return where
+ return query
def list_logs(start_time, end_time, performer=None, repository=None, namespace=None):
@@ -1768,16 +1867,17 @@ def create_build_trigger(repo, service_name, auth_token, user, pull_robot=None):
def get_build_trigger(namespace_name, repository_name, trigger_uuid):
try:
return (RepositoryBuildTrigger
- .select(RepositoryBuildTrigger, BuildTriggerService, Repository)
- .join(BuildTriggerService)
- .switch(RepositoryBuildTrigger)
- .join(Repository)
- .switch(RepositoryBuildTrigger)
- .join(User)
- .where(RepositoryBuildTrigger.uuid == trigger_uuid,
- Repository.namespace == namespace_name,
- Repository.name == repository_name)
- .get())
+ .select(RepositoryBuildTrigger, BuildTriggerService, Repository, Namespace)
+ .join(BuildTriggerService)
+ .switch(RepositoryBuildTrigger)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .switch(RepositoryBuildTrigger)
+ .join(User)
+ .where(RepositoryBuildTrigger.uuid == trigger_uuid,
+ Namespace.username == namespace_name,
+ Repository.name == repository_name)
+ .get())
except RepositoryBuildTrigger.DoesNotExist:
msg = 'No build trigger with uuid: %s' % trigger_uuid
raise InvalidBuildTriggerException(msg)
@@ -1785,12 +1885,12 @@ def get_build_trigger(namespace_name, repository_name, trigger_uuid):
def list_build_triggers(namespace_name, repository_name):
return (RepositoryBuildTrigger
- .select(RepositoryBuildTrigger, BuildTriggerService, Repository)
- .join(BuildTriggerService)
- .switch(RepositoryBuildTrigger)
- .join(Repository)
- .where(Repository.namespace == namespace_name,
- Repository.name == repository_name))
+ .select(RepositoryBuildTrigger, BuildTriggerService, Repository)
+ .join(BuildTriggerService)
+ .switch(RepositoryBuildTrigger)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace_name, Repository.name == repository_name))
def list_trigger_builds(namespace_name, repository_name, trigger_uuid,
@@ -1880,9 +1980,37 @@ def delete_notifications_by_kind(target, kind_name):
Notification.kind == kind_ref).execute()
+def delete_matching_notifications(target, kind_name, **kwargs):
+ kind_ref = NotificationKind.get(name=kind_name)
+
+ # Load all notifications for the user with the given kind.
+ notifications = Notification.select().where(
+ Notification.target == target,
+ Notification.kind == kind_ref)
+
+ # For each, match the metadata to the specified values.
+ for notification in notifications:
+ matches = True
+ try:
+ metadata = json.loads(notification.metadata_json)
+ except:
+ continue
+
+ for (key, value) in kwargs.iteritems():
+ if not key in metadata or metadata[key] != value:
+ matches = False
+ break
+
+ if not matches:
+ continue
+
+ notification.delete_instance()
+
+
def get_active_users():
return User.select().where(User.organization == False, User.robot == False)
+
def get_active_user_count():
return get_active_users().count()
@@ -1896,11 +2024,13 @@ def detach_external_login(user, service_name):
FederatedLogin.delete().where(FederatedLogin.user == user,
FederatedLogin.service == service).execute()
+
def delete_user(user):
user.delete_instance(recursive=True, delete_nullable=True)
# TODO: also delete any repository data associated
+
def check_health():
# We will connect to the db, check that it contains some log entry kinds
try:
@@ -1909,24 +2039,23 @@ def check_health():
except:
return False
-def get_email_authorized_for_repo(namespace, repository, email):
- found = list(RepositoryAuthorizedEmail.select()
- .join(Repository)
- .where(Repository.namespace == namespace,
- Repository.name == repository,
- RepositoryAuthorizedEmail.email == email)
- .switch(RepositoryAuthorizedEmail)
- .limit(1))
- if not found or len(found) < 1:
- return None
- return found[0]
+def get_email_authorized_for_repo(namespace, repository, email):
+ try:
+ return (RepositoryAuthorizedEmail
+ .select(RepositoryAuthorizedEmail, Repository, Namespace)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(Namespace.username == namespace, Repository.name == repository,
+ RepositoryAuthorizedEmail.email == email)
+ .get())
+ except RepositoryAuthorizedEmail.DoesNotExist:
+ return None
def create_email_authorization_for_repo(namespace_name, repository_name, email):
try:
- repo = Repository.get(Repository.name == repository_name,
- Repository.namespace == namespace_name)
+ repo = _get_repository(namespace_name, repository_name)
except Repository.DoesNotExist:
raise DataModelException('Invalid repository %s/%s' %
(namespace_name, repository_name))
@@ -1936,7 +2065,12 @@ def create_email_authorization_for_repo(namespace_name, repository_name, email):
def confirm_email_authorization_for_repo(code):
try:
- found = RepositoryAuthorizedEmail.get(RepositoryAuthorizedEmail.code == code)
+ found = (RepositoryAuthorizedEmail
+ .select(RepositoryAuthorizedEmail, Repository, Namespace)
+ .join(Repository)
+ .join(Namespace, on=(Repository.namespace_user == Namespace.id))
+ .where(RepositoryAuthorizedEmail.code == code)
+ .get())
except RepositoryAuthorizedEmail.DoesNotExist:
raise DataModelException('Invalid confirmation code.')
@@ -1946,6 +2080,67 @@ def confirm_email_authorization_for_repo(code):
return found
+def delete_team_email_invite(team, email):
+ found = TeamMemberInvite.get(TeamMemberInvite.email == email, TeamMemberInvite.team == team)
+ found.delete_instance()
+
+def delete_team_user_invite(team, user):
+ try:
+ found = TeamMemberInvite.get(TeamMemberInvite.user == user, TeamMemberInvite.team == team)
+ except TeamMemberInvite.DoesNotExist:
+ return False
+
+ found.delete_instance()
+ return True
+
+def lookup_team_invites(user):
+ return TeamMemberInvite.select().where(TeamMemberInvite.user == user)
+
+def lookup_team_invite(code, user=None):
+ # Lookup the invite code.
+ try:
+ found = TeamMemberInvite.get(TeamMemberInvite.invite_token == code)
+ except TeamMemberInvite.DoesNotExist:
+ raise DataModelException('Invalid confirmation code.')
+
+ if user and found.user != user:
+ raise DataModelException('Invalid confirmation code.')
+
+ return found
+
+def delete_team_invite(code, user=None):
+ found = lookup_team_invite(code, user)
+
+ team = found.team
+ inviter = found.inviter
+
+ found.delete_instance()
+
+ return (team, inviter)
+
+
+def confirm_team_invite(code, user):
+ found = lookup_team_invite(code)
+
+ # If the invite is for a specific user, we have to confirm that here.
+ if found.user is not None and found.user != user:
+ message = """This invite is intended for user "%s".
+ Please login to that account and try again.""" % found.user.username
+ raise DataModelException(message)
+
+ # Add the user to the team.
+ try:
+ add_user_to_team(user, found.team)
+ except UserAlreadyInTeam:
+ # Ignore.
+ pass
+
+ # Delete the invite and return the team.
+ team = found.team
+ inviter = found.inviter
+ found.delete_instance()
+ return (team, inviter)
+
def archivable_buildlogs_query():
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
return (RepositoryBuild.select()
diff --git a/data/model/sqlalchemybridge.py b/data/model/sqlalchemybridge.py
index 46809fb21..8b7d8b664 100644
--- a/data/model/sqlalchemybridge.py
+++ b/data/model/sqlalchemybridge.py
@@ -17,7 +17,12 @@ OPTION_TRANSLATIONS = {
def gen_sqlalchemy_metadata(peewee_model_list):
- metadata = MetaData()
+ metadata = MetaData(naming_convention={
+ "ix": 'ix_%(column_0_label)s',
+ "uq": "uq_%(table_name)s_%(column_0_name)s",
+ "fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
+ "pk": "pk_%(table_name)s"
+ })
for model in peewee_model_list:
meta = model._meta
diff --git a/data/queue.py b/data/queue.py
index 44d7ad531..79e645ebf 100644
--- a/data/queue.py
+++ b/data/queue.py
@@ -68,9 +68,8 @@ class WorkQueue(object):
'retries_remaining': retries_remaining,
}
- if available_after:
- available_date = datetime.utcnow() + timedelta(seconds=available_after)
- params['available_after'] = available_date
+ available_date = datetime.utcnow() + timedelta(seconds=available_after or 0)
+ params['available_after'] = available_date
with self._transaction_factory(db):
QueueItem.create(**params)
diff --git a/data/userevent.py b/data/userevent.py
index bcdafd078..b45d4e4fa 100644
--- a/data/userevent.py
+++ b/data/userevent.py
@@ -7,14 +7,14 @@ class UserEventBuilder(object):
Defines a helper class for constructing UserEvent and UserEventListener
instances.
"""
- def __init__(self, redis_host):
- self._redis_host = redis_host
+ def __init__(self, redis_config):
+ self._redis_config = redis_config
def get_event(self, username):
- return UserEvent(self._redis_host, username)
+ return UserEvent(self._redis_config, username)
def get_listener(self, username, events):
- return UserEventListener(self._redis_host, username, events)
+ return UserEventListener(self._redis_config, username, events)
class UserEventsBuilderModule(object):
@@ -26,8 +26,14 @@ class UserEventsBuilderModule(object):
self.state = None
def init_app(self, app):
- redis_hostname = app.config.get('USER_EVENTS_REDIS_HOSTNAME')
- user_events = UserEventBuilder(redis_hostname)
+ redis_config = app.config.get('USER_EVENTS_REDIS')
+ if not redis_config:
+ # This is the old key name.
+ redis_config = {
+ 'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME')
+ }
+
+ user_events = UserEventBuilder(redis_config)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
@@ -43,8 +49,8 @@ class UserEvent(object):
Defines a helper class for publishing to realtime user events
as backed by Redis.
"""
- def __init__(self, redis_host, username):
- self._redis = redis.StrictRedis(host=redis_host)
+ def __init__(self, redis_config, username):
+ self._redis = redis.StrictRedis(socket_connect_timeout=5, **redis_config)
self._username = username
@staticmethod
@@ -74,10 +80,10 @@ class UserEventListener(object):
Defines a helper class for subscribing to realtime user events as
backed by Redis.
"""
- def __init__(self, redis_host, username, events=set([])):
+ def __init__(self, redis_config, username, events=set([])):
channels = [self._user_event_key(username, e) for e in events]
- self._redis = redis.StrictRedis(host=redis_host)
+ self._redis = redis.StrictRedis(socket_connect_timeout=5, **redis_config)
self._pubsub = self._redis.pubsub()
self._pubsub.subscribe(channels)
diff --git a/emails/base.html b/emails/base.html
new file mode 100644
index 000000000..33dac53de
--- /dev/null
+++ b/emails/base.html
@@ -0,0 +1,45 @@
+
+
+
+
+
+ {{ subject }}
+
+
+
+
+ |
+
+
+
+
+ {% block content %}{% endblock %}
+ |
+
+
+ |
+ |
+
+
+
diff --git a/emails/changeemail.html b/emails/changeemail.html
new file mode 100644
index 000000000..ee9b909fc
--- /dev/null
+++ b/emails/changeemail.html
@@ -0,0 +1,13 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+E-mail Address Change Requested
+
+This email address was recently asked to become the new e-mail address for user {{ username | user_reference }}.
+
+
+To confirm this change, please click the following link:
+{{ app_link('confirm?code=' + token) }}
+
+{% endblock %}
diff --git a/emails/confirmemail.html b/emails/confirmemail.html
new file mode 100644
index 000000000..de94372cd
--- /dev/null
+++ b/emails/confirmemail.html
@@ -0,0 +1,13 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Please Confirm E-mail Address
+
+This email address was recently used to register user {{ username | user_reference }}.
+
+
+To confirm this email address, please click the following link:
+{{ app_link('confirm?code=' + token) }}
+
+{% endblock %}
diff --git a/emails/emailchanged.html b/emails/emailchanged.html
new file mode 100644
index 000000000..ce6de5565
--- /dev/null
+++ b/emails/emailchanged.html
@@ -0,0 +1,12 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Account E-mail Address Changed
+
+The email address for user {{ username | user_reference }} has been changed from this e-mail address to {{ new_email }}.
+
+
+If this change was not expected, please immediately log into your {{ username | admin_reference }} and reset your email address.
+
+{% endblock %}
diff --git a/emails/passwordchanged.html b/emails/passwordchanged.html
new file mode 100644
index 000000000..07c6232cc
--- /dev/null
+++ b/emails/passwordchanged.html
@@ -0,0 +1,13 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Account Password Changed
+
+The password for user {{ username | user_reference }} has been updated.
+
+
+If this change was not expected, please immediately log into your account settings and reset your email address,
+or contact support.
+
+{% endblock %}
diff --git a/emails/paymentfailure.html b/emails/paymentfailure.html
new file mode 100644
index 000000000..790f590b4
--- /dev/null
+++ b/emails/paymentfailure.html
@@ -0,0 +1,13 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Subscription Payment Failure
+
+Your recent payment for account {{ username | user_reference }} failed, which usually results in our payments processor canceling
+your subscription automatically. If you would like to continue to use {{ app_title }} without interruption,
+please add a new card to {{ app_title }} and re-subscribe to your plan.
+
+You can find the card and subscription management features under your {{ username | admin_reference }}
+
+{% endblock %}
diff --git a/emails/recovery.html b/emails/recovery.html
new file mode 100644
index 000000000..6f0267e39
--- /dev/null
+++ b/emails/recovery.html
@@ -0,0 +1,18 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Account recovery
+
+A user at {{ app_link() }} has attempted to recover their account
+using this email address.
+
+
+If you made this request, please click the following link to recover your account and
+change your password:
+{{ app_link('recovery?code=' + token) }}
+
+If you did not make this request, your account has not been compromised and the user was
+not given access. Please disregard this email.
+
+{% endblock %}
diff --git a/emails/repoauthorizeemail.html b/emails/repoauthorizeemail.html
new file mode 100644
index 000000000..7ae33975c
--- /dev/null
+++ b/emails/repoauthorizeemail.html
@@ -0,0 +1,13 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Verify e-mail to receive repository notifications
+
+A request has been made to send notifications to this email address for repository {{ (namespace, repository) | repository_reference }}
+
+
+To verify this email address, please click the following link:
+{{ app_link('authrepoemail?code=' + token) }}
+
+{% endblock %}
diff --git a/emails/teaminvite.html b/emails/teaminvite.html
new file mode 100644
index 000000000..3d8ff9c14
--- /dev/null
+++ b/emails/teaminvite.html
@@ -0,0 +1,17 @@
+{% extends "base.html" %}
+
+{% block content %}
+
+Invitation to join team: {{ teamname }}
+
+{{ inviter | user_reference }} has invited you to join the team {{ teamname }} under organization {{ organization | user_reference }}.
+
+
+
+To join the team, please click the following link:
+{{ app_link('confirminvite?code=' + token) }}
+
+
+If you were not expecting this invitation, you can ignore this email.
+
+{% endblock %}
diff --git a/endpoints/api/__init__.py b/endpoints/api/__init__.py
index 2f5e2045e..1943051e0 100644
--- a/endpoints/api/__init__.py
+++ b/endpoints/api/__init__.py
@@ -27,8 +27,8 @@ api_bp = Blueprint('api', __name__)
api = Api()
api.init_app(api_bp)
api.decorators = [csrf_protect,
- process_oauth,
- crossdomain(origin='*', headers=['Authorization', 'Content-Type'])]
+ crossdomain(origin='*', headers=['Authorization', 'Content-Type']),
+ process_oauth]
class ApiException(Exception):
@@ -90,6 +90,7 @@ def handle_api_error(error):
if error.error_type is not None:
response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' %
(error.error_type, error.error_description))
+
return response
@@ -191,6 +192,7 @@ def query_param(name, help_str, type=reqparse.text_type, default=None,
'default': default,
'choices': choices,
'required': required,
+ 'location': ('args')
})
return func
return add_param
diff --git a/endpoints/api/build.py b/endpoints/api/build.py
index d792234dd..adf6f43ec 100644
--- a/endpoints/api/build.py
+++ b/endpoints/api/build.py
@@ -169,7 +169,7 @@ class RepositoryBuildList(RepositoryParamResource):
# was used.
associated_repository = model.get_repository_for_resource(dockerfile_id)
if associated_repository:
- if not ModifyRepositoryPermission(associated_repository.namespace,
+ if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
associated_repository.name):
raise Unauthorized()
diff --git a/endpoints/api/discovery.py b/endpoints/api/discovery.py
index 1995c6b42..c3525639f 100644
--- a/endpoints/api/discovery.py
+++ b/endpoints/api/discovery.py
@@ -125,7 +125,11 @@ def swagger_route_data(include_internal=False, compact=False):
new_operation['requires_fresh_login'] = True
if not internal or (internal and include_internal):
- operations.append(new_operation)
+ # Swagger requires valid nicknames on all operations.
+ if new_operation.get('nickname'):
+ operations.append(new_operation)
+ else:
+ logger.debug('Operation missing nickname: %s' % method)
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
new_resource = {
diff --git a/endpoints/api/image.py b/endpoints/api/image.py
index 3060053ad..3a6c62507 100644
--- a/endpoints/api/image.py
+++ b/endpoints/api/image.py
@@ -9,22 +9,33 @@ from data import model
from util.cache import cache_control_flask_restful
-def image_view(image):
+def image_view(image, image_map):
extended_props = image
if image.storage and image.storage.id:
extended_props = image.storage
command = extended_props.command
+
+ def docker_id(aid):
+ if not aid:
+ return ''
+
+ return image_map[aid]
+
+ # Calculate the ancestors string, with the DBID's replaced with the docker IDs.
+ ancestors = [docker_id(a) for a in image.ancestors.split('/')]
+ ancestors_string = '/'.join(ancestors)
+
return {
'id': image.docker_image_id,
'created': format_date(extended_props.created),
'comment': extended_props.comment,
'command': json.loads(command) if command else None,
- 'ancestors': image.ancestors,
- 'dbid': image.id,
'size': extended_props.image_size,
'locations': list(image.storage.locations),
'uploading': image.storage.uploading,
+ 'ancestors': ancestors_string,
+ 'sort_index': len(image.ancestors)
}
@@ -42,14 +53,16 @@ class RepositoryImageList(RepositoryParamResource):
for tag in all_tags:
tags_by_image_id[tag.image.docker_image_id].append(tag.name)
+ image_map = {}
+ for image in all_images:
+ image_map[str(image.id)] = image.docker_image_id
def add_tags(image_json):
image_json['tags'] = tags_by_image_id[image_json['id']]
return image_json
-
return {
- 'images': [add_tags(image_view(image)) for image in all_images]
+ 'images': [add_tags(image_view(image, image_map)) for image in all_images]
}
@@ -64,7 +77,12 @@ class RepositoryImage(RepositoryParamResource):
if not image:
raise NotFound()
- return image_view(image)
+ # Lookup all the ancestor images for the image.
+ image_map = {}
+ for current_image in model.get_parent_images(namespace, repository, image):
+ image_map[str(current_image.id)] = image.docker_image_id
+
+ return image_view(image, image_map)
@resource('/v1/repository//image//changes')
diff --git a/endpoints/api/repoemail.py b/endpoints/api/repoemail.py
index 6585bbc49..db4c9b571 100644
--- a/endpoints/api/repoemail.py
+++ b/endpoints/api/repoemail.py
@@ -3,7 +3,8 @@ import logging
from flask import request, abort
from endpoints.api import (resource, nickname, require_repo_admin, RepositoryParamResource,
- log_action, validate_json_request, NotFound, internal_only)
+ log_action, validate_json_request, NotFound, internal_only,
+ show_if)
from app import tf
from data import model
@@ -19,12 +20,13 @@ def record_view(record):
return {
'email': record.email,
'repository': record.repository.name,
- 'namespace': record.repository.namespace,
+ 'namespace': record.repository.namespace_user.username,
'confirmed': record.confirmed
}
@internal_only
+@show_if(features.MAILING)
@resource('/v1/repository//authorizedemail/')
class RepositoryAuthorizedEmail(RepositoryParamResource):
""" Resource for checking and authorizing e-mail addresses to receive repo notifications. """
diff --git a/endpoints/api/repository.py b/endpoints/api/repository.py
index 17a35fea1..be8c9e8f9 100644
--- a/endpoints/api/repository.py
+++ b/endpoints/api/repository.py
@@ -80,8 +80,7 @@ class RepositoryList(ApiResource):
visibility = req['visibility']
- repo = model.create_repository(namespace_name, repository_name, owner,
- visibility)
+ repo = model.create_repository(namespace_name, repository_name, owner, visibility)
repo.description = req['description']
repo.save()
@@ -110,7 +109,7 @@ class RepositoryList(ApiResource):
"""Fetch the list of repositories under a variety of situations."""
def repo_view(repo_obj):
return {
- 'namespace': repo_obj.namespace,
+ 'namespace': repo_obj.namespace_user.username,
'name': repo_obj.name,
'description': repo_obj.description,
'is_public': repo_obj.visibility.name == 'public',
@@ -134,7 +133,8 @@ class RepositoryList(ApiResource):
response['repositories'] = [repo_view(repo) for repo in repo_query
if (repo.visibility.name == 'public' or
- ReadRepositoryPermission(repo.namespace, repo.name).can())]
+ ReadRepositoryPermission(repo.namespace_user.username,
+ repo.name).can())]
return response
@@ -168,8 +168,7 @@ class Repository(RepositoryParamResource):
def tag_view(tag):
return {
'name': tag.name,
- 'image_id': tag.image.docker_image_id,
- 'dbid': tag.image.id
+ 'image_id': tag.image.docker_image_id
}
organization = None
diff --git a/endpoints/api/search.py b/endpoints/api/search.py
index 7cb1a1fda..1cce618d9 100644
--- a/endpoints/api/search.py
+++ b/endpoints/api/search.py
@@ -111,7 +111,7 @@ class FindRepositories(ApiResource):
def repo_view(repo):
return {
- 'namespace': repo.namespace,
+ 'namespace': repo.namespace_user.username,
'name': repo.name,
'description': repo.description
}
@@ -125,5 +125,5 @@ class FindRepositories(ApiResource):
return {
'repositories': [repo_view(repo) for repo in matching
if (repo.visibility.name == 'public' or
- ReadRepositoryPermission(repo.namespace, repo.name).can())]
+ ReadRepositoryPermission(repo.namespace_user.username, repo.name).can())]
}
diff --git a/endpoints/api/superuser.py b/endpoints/api/superuser.py
index 5a117289b..c41c6a46c 100644
--- a/endpoints/api/superuser.py
+++ b/endpoints/api/superuser.py
@@ -1,20 +1,22 @@
+import string
import logging
import json
+from random import SystemRandom
from app import app
-
from flask import request
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
log_action, internal_only, NotFound, require_user_admin, format_date,
InvalidToken, require_scope, format_date, hide_if, show_if, parse_args,
- query_param, abort)
+ query_param, abort, require_fresh_login)
from endpoints.api.logs import get_logs
from data import model
from auth.permissions import SuperUserPermission
from auth.auth_context import get_authenticated_user
+from util.useremails import send_confirmation_email, send_recovery_email
import features
@@ -55,6 +57,26 @@ def user_view(user):
@show_if(features.SUPER_USERS)
class SuperUserList(ApiResource):
""" Resource for listing users in the system. """
+ schemas = {
+ 'CreateInstallUser': {
+ 'id': 'CreateInstallUser',
+ 'description': 'Data for creating a user',
+ 'required': ['username', 'email'],
+ 'properties': {
+ 'username': {
+ 'type': 'string',
+ 'description': 'The username of the user being created'
+ },
+
+ 'email': {
+ 'type': 'string',
+ 'description': 'The email address of the user being created'
+ }
+ }
+ }
+ }
+
+ @require_fresh_login
@nickname('listAllUsers')
def get(self):
""" Returns a list of all users in the system. """
@@ -67,6 +89,63 @@ class SuperUserList(ApiResource):
abort(403)
+ @require_fresh_login
+ @nickname('createInstallUser')
+ @validate_json_request('CreateInstallUser')
+ def post(self):
+ """ Creates a new user. """
+ user_information = request.get_json()
+ if SuperUserPermission().can():
+ username = user_information['username']
+ email = user_information['email']
+
+ # Generate a temporary password for the user.
+ random = SystemRandom()
+ password = ''.join([random.choice(string.ascii_uppercase + string.digits) for _ in range(32)])
+
+ # Create the user.
+ user = model.create_user(username, password, email, auto_verify=not features.MAILING)
+
+ # If mailing is turned on, send the user a verification email.
+ if features.MAILING:
+ confirmation = model.create_confirm_email_code(user, new_email=user.email)
+ send_confirmation_email(user.username, user.email, confirmation.code)
+
+ return {
+ 'username': username,
+ 'email': email,
+ 'password': password
+ }
+
+ abort(403)
+
+
+@resource('/v1/superusers/users//sendrecovery')
+@internal_only
+@show_if(features.SUPER_USERS)
+@show_if(features.MAILING)
+class SuperUserSendRecoveryEmail(ApiResource):
+ """ Resource for sending a recovery user on behalf of a user. """
+ @require_fresh_login
+ @nickname('sendInstallUserRecoveryEmail')
+ def post(self, username):
+ if SuperUserPermission().can():
+ user = model.get_user(username)
+ if not user or user.organization or user.robot:
+ abort(404)
+
+ if username in app.config['SUPER_USERS']:
+ abort(403)
+
+ code = model.create_reset_password_email_code(user.email)
+ send_recovery_email(user.email, code.code)
+ return {
+ 'email': user.email
+ }
+
+ abort(403)
+
+
@resource('/v1/superuser/users/')
@internal_only
@show_if(features.SUPER_USERS)
@@ -90,18 +169,20 @@ class SuperUserManagement(ApiResource):
},
}
+ @require_fresh_login
@nickname('getInstallUser')
def get(self, username):
""" Returns information about the specified user. """
if SuperUserPermission().can():
- user = model.get_user(username)
- if not user or user.organization or user.robot:
- abort(404)
-
- return user_view(user)
+ user = model.get_user(username)
+ if not user or user.organization or user.robot:
+ abort(404)
+
+ return user_view(user)
abort(403)
+ @require_fresh_login
@nickname('deleteInstallUser')
def delete(self, username):
""" Deletes the specified user. """
@@ -118,6 +199,7 @@ class SuperUserManagement(ApiResource):
abort(403)
+ @require_fresh_login
@nickname('changeInstallUser')
@validate_json_request('UpdateUser')
def put(self, username):
diff --git a/endpoints/api/tag.py b/endpoints/api/tag.py
index f9210881c..779b821ae 100644
--- a/endpoints/api/tag.py
+++ b/endpoints/api/tag.py
@@ -85,11 +85,14 @@ class RepositoryTagImages(RepositoryParamResource):
raise NotFound()
parent_images = model.get_parent_images(namespace, repository, tag_image)
+ image_map = {}
+ for image in parent_images:
+ image_map[str(image.id)] = image.docker_image_id
parents = list(parent_images)
parents.reverse()
all_images = [tag_image] + parents
return {
- 'images': [image_view(image) for image in all_images]
+ 'images': [image_view(image, image_map) for image in all_images]
}
diff --git a/endpoints/api/team.py b/endpoints/api/team.py
index 0631cc028..a448cefc9 100644
--- a/endpoints/api/team.py
+++ b/endpoints/api/team.py
@@ -1,12 +1,51 @@
from flask import request
from endpoints.api import (resource, nickname, ApiResource, validate_json_request, request_error,
- log_action, Unauthorized, NotFound, internal_only, require_scope)
+ log_action, Unauthorized, NotFound, internal_only, require_scope,
+ query_param, truthy_bool, parse_args, require_user_admin, show_if)
from auth.permissions import AdministerOrganizationPermission, ViewTeamPermission
from auth.auth_context import get_authenticated_user
from auth import scopes
from data import model
+from util.useremails import send_org_invite_email
+from util.gravatar import compute_hash
+import features
+
+def try_accept_invite(code, user):
+ (team, inviter) = model.confirm_team_invite(code, user)
+
+ model.delete_matching_notifications(user, 'org_team_invite', code=code)
+
+ orgname = team.organization.username
+ log_action('org_team_member_invite_accepted', orgname, {
+ 'member': user.username,
+ 'team': team.name,
+ 'inviter': inviter.username
+ })
+
+ return team
+
+
+def handle_addinvite_team(inviter, team, user=None, email=None):
+ invite = model.add_or_invite_to_team(inviter, team, user, email,
+ requires_invite = features.MAILING)
+ if not invite:
+ # User was added to the team directly.
+ return
+
+ orgname = team.organization.username
+ if user:
+ model.create_notification('org_team_invite', user, metadata = {
+ 'code': invite.invite_token,
+ 'inviter': inviter.username,
+ 'org': orgname,
+ 'team': team.name
+ })
+
+ send_org_invite_email(user.username if user else email, user.email if user else email,
+ orgname, team.name, inviter.username, invite.invite_token)
+ return invite
def team_view(orgname, team):
view_permission = ViewTeamPermission(orgname, team.name)
@@ -19,14 +58,28 @@ def team_view(orgname, team):
'role': role
}
-def member_view(member):
+def member_view(member, invited=False):
return {
'name': member.username,
'kind': 'user',
'is_robot': member.robot,
+ 'gravatar': compute_hash(member.email) if not member.robot else None,
+ 'invited': invited,
}
+def invite_view(invite):
+ if invite.user:
+ return member_view(invite.user, invited=True)
+ else:
+ return {
+ 'email': invite.email,
+ 'kind': 'invite',
+ 'gravatar': compute_hash(invite.email),
+ 'invited': True
+ }
+
+
@resource('/v1/organization//team/')
@internal_only
class OrganizationTeam(ApiResource):
@@ -114,8 +167,10 @@ class OrganizationTeam(ApiResource):
@internal_only
class TeamMemberList(ApiResource):
""" Resource for managing the list of members for a team. """
+ @parse_args
+ @query_param('includePending', 'Whether to include pending members', type=truthy_bool, default=False)
@nickname('getOrganizationTeamMembers')
- def get(self, orgname, teamname):
+ def get(self, args, orgname, teamname):
""" Retrieve the list of members for the specified team. """
view_permission = ViewTeamPermission(orgname, teamname)
edit_permission = AdministerOrganizationPermission(orgname)
@@ -128,11 +183,18 @@ class TeamMemberList(ApiResource):
raise NotFound()
members = model.get_organization_team_members(team.id)
- return {
- 'members': {m.username : member_view(m) for m in members},
+ invites = []
+
+ if args['includePending'] and edit_permission.can():
+ invites = model.get_organization_team_member_invites(team.id)
+
+ data = {
+ 'members': [member_view(m) for m in members] + [invite_view(i) for i in invites],
'can_edit': edit_permission.can()
}
+ return data
+
raise Unauthorized()
@@ -142,7 +204,7 @@ class TeamMember(ApiResource):
@require_scope(scopes.ORG_ADMIN)
@nickname('updateOrganizationTeamMember')
def put(self, orgname, teamname, membername):
- """ Add a member to an existing team. """
+ """ Adds or invites a member to an existing team. """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
team = None
@@ -159,23 +221,151 @@ class TeamMember(ApiResource):
if not user:
raise request_error(message='Unknown user')
- # Add the user to the team.
- model.add_user_to_team(user, team)
- log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname})
- return member_view(user)
+ # Add or invite the user to the team.
+ inviter = get_authenticated_user()
+ invite = handle_addinvite_team(inviter, team, user=user)
+ if not invite:
+ log_action('org_add_team_member', orgname, {'member': membername, 'team': teamname})
+ return member_view(user, invited=False)
+
+ # User was invited.
+ log_action('org_invite_team_member', orgname, {
+ 'user': membername,
+ 'member': membername,
+ 'team': teamname
+ })
+ return member_view(user, invited=True)
raise Unauthorized()
@require_scope(scopes.ORG_ADMIN)
@nickname('deleteOrganizationTeamMember')
def delete(self, orgname, teamname, membername):
- """ Delete an existing member of a team. """
+ """ Delete a member of a team. If the user is merely invited to join
+ the team, then the invite is removed instead.
+ """
permission = AdministerOrganizationPermission(orgname)
if permission.can():
# Remote the user from the team.
invoking_user = get_authenticated_user().username
+
+ # Find the team.
+ try:
+ team = model.get_organization_team(orgname, teamname)
+ except model.InvalidTeamException:
+ raise NotFound()
+
+ # Find the member.
+ member = model.get_user(membername)
+ if not member:
+ raise NotFound()
+
+ # First attempt to delete an invite for the user to this team. If none found,
+ # then we try to remove the user directly.
+ if model.delete_team_user_invite(team, member):
+ log_action('org_delete_team_member_invite', orgname, {
+ 'user': membername,
+ 'team': teamname,
+ 'member': membername
+ })
+ return 'Deleted', 204
+
model.remove_user_from_team(orgname, teamname, membername, invoking_user)
log_action('org_remove_team_member', orgname, {'member': membername, 'team': teamname})
return 'Deleted', 204
raise Unauthorized()
+
+
+@resource('/v1/organization//team//invite/')
+@show_if(features.MAILING)
+class InviteTeamMember(ApiResource):
+ """ Resource for inviting a team member via email address. """
+ @require_scope(scopes.ORG_ADMIN)
+ @nickname('inviteTeamMemberEmail')
+ def put(self, orgname, teamname, email):
+ """ Invites an email address to an existing team. """
+ permission = AdministerOrganizationPermission(orgname)
+ if permission.can():
+ team = None
+
+ # Find the team.
+ try:
+ team = model.get_organization_team(orgname, teamname)
+ except model.InvalidTeamException:
+ raise NotFound()
+
+ # Invite the email to the team.
+ inviter = get_authenticated_user()
+ invite = handle_addinvite_team(inviter, team, email=email)
+ log_action('org_invite_team_member', orgname, {
+ 'email': email,
+ 'team': teamname,
+ 'member': email
+ })
+ return invite_view(invite)
+
+ raise Unauthorized()
+
+ @require_scope(scopes.ORG_ADMIN)
+ @nickname('deleteTeamMemberEmailInvite')
+ def delete(self, orgname, teamname, email):
+ """ Delete an invite of an email address to join a team. """
+ permission = AdministerOrganizationPermission(orgname)
+ if permission.can():
+ team = None
+
+ # Find the team.
+ try:
+ team = model.get_organization_team(orgname, teamname)
+ except model.InvalidTeamException:
+ raise NotFound()
+
+ # Delete the invite.
+ model.delete_team_email_invite(team, email)
+ log_action('org_delete_team_member_invite', orgname, {
+ 'email': email,
+ 'team': teamname,
+ 'member': email
+ })
+ return 'Deleted', 204
+
+ raise Unauthorized()
+
+
+@resource('/v1/teaminvite/')
+@internal_only
+@show_if(features.MAILING)
+class TeamMemberInvite(ApiResource):
+ """ Resource for managing invites to jon a team. """
+ @require_user_admin
+ @nickname('acceptOrganizationTeamInvite')
+ def put(self, code):
+ """ Accepts an invite to join a team in an organization. """
+ # Accept the invite for the current user.
+ team = try_accept_invite(code, get_authenticated_user())
+ if not team:
+ raise NotFound()
+
+ orgname = team.organization.username
+ return {
+ 'org': orgname,
+ 'team': team.name
+ }
+
+ @nickname('declineOrganizationTeamInvite')
+ @require_user_admin
+ def delete(self, code):
+ """ Delete an existing member of a team. """
+ (team, inviter) = model.delete_team_invite(code, get_authenticated_user())
+
+ model.delete_matching_notifications(get_authenticated_user(), 'org_team_invite', code=code)
+
+ orgname = team.organization.username
+ log_action('org_team_member_invite_declined', orgname, {
+ 'member': get_authenticated_user().username,
+ 'team': team.name,
+ 'inviter': inviter.username
+ })
+
+ return 'Deleted', 204
diff --git a/endpoints/api/trigger.py b/endpoints/api/trigger.py
index 4ec20bfdc..081641e00 100644
--- a/endpoints/api/trigger.py
+++ b/endpoints/api/trigger.py
@@ -14,7 +14,7 @@ from endpoints.api.build import (build_status_view, trigger_view, RepositoryBuil
from endpoints.common import start_build
from endpoints.trigger import (BuildTrigger as BuildTriggerBase, TriggerDeactivationException,
TriggerActivationException, EmptyRepositoryException,
- RepositoryReadException)
+ RepositoryReadException, TriggerStartException)
from data import model
from auth.permissions import UserAdminPermission, AdministerOrganizationPermission, ReadRepositoryPermission
from util.names import parse_robot_username
@@ -205,7 +205,7 @@ class BuildTriggerActivate(RepositoryParamResource):
'write')
try:
- repository_path = '%s/%s' % (trigger.repository.namespace,
+ repository_path = '%s/%s' % (trigger.repository.namespace_user.username,
trigger.repository.name)
path = url_for('webhooks.build_trigger_webhook',
repository=repository_path, trigger_uuid=trigger.uuid)
@@ -374,9 +374,24 @@ class BuildTriggerAnalyze(RepositoryParamResource):
@resource('/v1/repository//trigger//start')
class ActivateBuildTrigger(RepositoryParamResource):
""" Custom verb to manually activate a build trigger. """
+ schemas = {
+ 'RunParameters': {
+ 'id': 'RunParameters',
+ 'type': 'object',
+ 'description': 'Optional run parameters for activating the build trigger',
+ 'additional_properties': False,
+ 'properties': {
+ 'branch_name': {
+ 'type': 'string',
+ 'description': '(GitHub Only) If specified, the name of the GitHub branch to build.'
+ }
+ }
+ }
+ }
@require_repo_admin
@nickname('manuallyStartBuildTrigger')
+ @validate_json_request('RunParameters')
def post(self, namespace, repository, trigger_uuid):
""" Manually start a build from the specified trigger. """
try:
@@ -389,14 +404,18 @@ class ActivateBuildTrigger(RepositoryParamResource):
if not handler.is_active(config_dict):
raise InvalidRequest('Trigger is not active.')
- specs = handler.manual_start(trigger.auth_token, config_dict)
- dockerfile_id, tags, name, subdir = specs
+ try:
+ run_parameters = request.get_json()
+ specs = handler.manual_start(trigger.auth_token, config_dict, run_parameters=run_parameters)
+ dockerfile_id, tags, name, subdir = specs
- repo = model.get_repository(namespace, repository)
- pull_robot_name = model.get_pull_robot_name(trigger)
+ repo = model.get_repository(namespace, repository)
+ pull_robot_name = model.get_pull_robot_name(trigger)
- build_request = start_build(repo, dockerfile_id, tags, name, subdir, True,
- pull_robot_name=pull_robot_name)
+ build_request = start_build(repo, dockerfile_id, tags, name, subdir, True,
+ pull_robot_name=pull_robot_name)
+ except TriggerStartException as tse:
+ raise InvalidRequest(tse.message)
resp = build_status_view(build_request, True)
repo_string = '%s/%s' % (namespace, repository)
@@ -424,6 +443,36 @@ class TriggerBuildList(RepositoryParamResource):
}
+
+@resource('/v1/repository//trigger//fields/')
+@internal_only
+class BuildTriggerFieldValues(RepositoryParamResource):
+ """ Custom verb to fetch a values list for a particular field name. """
+ @require_repo_admin
+ @nickname('listTriggerFieldValues')
+ def get(self, namespace, repository, trigger_uuid, field_name):
+ """ List the field values for a custom run field. """
+ try:
+ trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
+ except model.InvalidBuildTriggerException:
+ raise NotFound()
+
+ user_permission = UserAdminPermission(trigger.connected_user.username)
+ if user_permission.can():
+ trigger_handler = BuildTriggerBase.get_trigger_for_service(trigger.service.name)
+ values = trigger_handler.list_field_values(trigger.auth_token, json.loads(trigger.config),
+ field_name)
+
+ if values is None:
+ raise NotFound()
+
+ return {
+ 'values': values
+ }
+ else:
+ raise Unauthorized()
+
+
@resource('/v1/repository//trigger//sources')
@internal_only
class BuildTriggerSources(RepositoryParamResource):
diff --git a/endpoints/api/user.py b/endpoints/api/user.py
index 43a08508a..3fd59d86e 100644
--- a/endpoints/api/user.py
+++ b/endpoints/api/user.py
@@ -12,6 +12,8 @@ from endpoints.api import (ApiResource, nickname, resource, validate_json_reques
license_error, require_fresh_login)
from endpoints.api.subscribe import subscribe
from endpoints.common import common_login
+from endpoints.api.team import try_accept_invite
+
from data import model
from data.billing import get_plan
from auth.permissions import (AdministerOrganizationPermission, CreateRepositoryPermission,
@@ -19,7 +21,8 @@ from auth.permissions import (AdministerOrganizationPermission, CreateRepository
from auth.auth_context import get_authenticated_user
from auth import scopes
from util.gravatar import compute_hash
-from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email)
+from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email, send_password_changed)
+from util.names import parse_single_urn
import features
@@ -117,6 +120,10 @@ class User(ApiResource):
'type': 'string',
'description': 'The user\'s email address',
},
+ 'invite_code': {
+ 'type': 'string',
+ 'description': 'The optional invite code'
+ }
}
},
'UpdateUser': {
@@ -166,6 +173,9 @@ class User(ApiResource):
log_action('account_change_password', user.username)
model.change_password(user, user_data['password'])
+ if features.MAILING:
+ send_password_changed(user.username, user.email)
+
if 'invoice_email' in user_data:
logger.debug('Changing invoice_email for user: %s', user.username)
model.change_invoice_email(user, user_data['invoice_email'])
@@ -176,22 +186,27 @@ class User(ApiResource):
# Email already used.
raise request_error(message='E-mail address already used')
- logger.debug('Sending email to change email address for user: %s',
- user.username)
- code = model.create_confirm_email_code(user, new_email=new_email)
- send_change_email(user.username, user_data['email'], code.code)
+ if features.MAILING:
+ logger.debug('Sending email to change email address for user: %s',
+ user.username)
+ code = model.create_confirm_email_code(user, new_email=new_email)
+ send_change_email(user.username, user_data['email'], code.code)
+ else:
+ model.update_email(user, new_email, auto_verify=not features.MAILING)
except model.InvalidPasswordException, ex:
raise request_error(exception=ex)
return user_view(user)
+ @show_if(features.USER_CREATION)
@nickname('createNewUser')
@internal_only
@validate_json_request('NewUser')
def post(self):
""" Create a new user. """
user_data = request.get_json()
+ invite_code = user_data.get('invite_code', '')
existing_user = model.get_user(user_data['username'])
if existing_user:
@@ -199,10 +214,29 @@ class User(ApiResource):
try:
new_user = model.create_user(user_data['username'], user_data['password'],
- user_data['email'])
- code = model.create_confirm_email_code(new_user)
- send_confirmation_email(new_user.username, new_user.email, code.code)
- return 'Created', 201
+ user_data['email'], auto_verify=not features.MAILING)
+
+ # Handle any invite codes.
+ parsed_invite = parse_single_urn(invite_code)
+ if parsed_invite is not None:
+ if parsed_invite[0] == 'teaminvite':
+ # Add the user to the team.
+ try:
+ try_accept_invite(invite_code, new_user)
+ except model.DataModelException:
+ pass
+
+
+ if features.MAILING:
+ code = model.create_confirm_email_code(new_user)
+ send_confirmation_email(new_user.username, new_user.email, code.code)
+ return {
+ 'awaiting_verification': True
+ }
+ else:
+ common_login(new_user)
+ return user_view(new_user)
+
except model.TooManyUsersException as ex:
raise license_error(exception=ex)
except model.DataModelException as ex:
@@ -422,6 +456,7 @@ class DetachExternal(ApiResource):
@resource("/v1/recovery")
+@show_if(features.MAILING)
@internal_only
class Recovery(ApiResource):
""" Resource for requesting a password recovery email. """
diff --git a/endpoints/callbacks.py b/endpoints/callbacks.py
index 1cbd46192..637033ab6 100644
--- a/endpoints/callbacks.py
+++ b/endpoints/callbacks.py
@@ -26,7 +26,8 @@ def render_ologin_error(service_name,
error_message='Could not load user data. The token may have expired.'):
return render_page_template('ologinerror.html', service_name=service_name,
error_message=error_message,
- service_url=get_app_url())
+ service_url=get_app_url(),
+ user_creation=features.USER_CREATION)
def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False,
redirect_suffix=''):
@@ -85,7 +86,12 @@ def get_google_user(token):
def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
to_login = model.verify_federated_login(service_name.lower(), user_id)
if not to_login:
- # try to create the user
+ # See if we can create a new user.
+ if not features.USER_CREATION:
+ error_message = 'User creation is disabled. Please contact your administrator'
+ return render_ologin_error(service_name, error_message)
+
+ # Try to create the user
try:
valid = next(generate_valid_usernames(username))
to_login = model.create_federated_user(valid, email, service_name.lower(),
@@ -147,7 +153,7 @@ def github_oauth_callback():
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB')
user_data = get_github_user(token)
- if not user_data:
+ if not user_data or not 'login' in user_data:
return render_ologin_error('GitHub')
username = user_data['login']
diff --git a/endpoints/common.py b/endpoints/common.py
index 52715a1d1..37ae80ee8 100644
--- a/endpoints/common.py
+++ b/endpoints/common.py
@@ -82,20 +82,23 @@ def param_required(param_name):
@login_manager.user_loader
-def load_user(username):
- logger.debug('User loader loading deferred user: %s' % username)
- return _LoginWrappedDBUser(username)
+def load_user(user_db_id):
+ logger.debug('User loader loading deferred user id: %s' % user_db_id)
+ try:
+ user_db_id_int = int(user_db_id)
+ return _LoginWrappedDBUser(user_db_id_int)
+ except ValueError:
+ return None
class _LoginWrappedDBUser(UserMixin):
- def __init__(self, db_username, db_user=None):
-
- self._db_username = db_username
+ def __init__(self, user_db_id, db_user=None):
+ self._db_id = user_db_id
self._db_user = db_user
def db_user(self):
if not self._db_user:
- self._db_user = model.get_user(self._db_username)
+ self._db_user = model.get_user_by_id(self._db_id)
return self._db_user
def is_authenticated(self):
@@ -105,13 +108,13 @@ class _LoginWrappedDBUser(UserMixin):
return self.db_user().verified
def get_id(self):
- return unicode(self._db_username)
+ return unicode(self._db_id)
def common_login(db_user):
- if login_user(_LoginWrappedDBUser(db_user.username, db_user)):
+ if login_user(_LoginWrappedDBUser(db_user.id, db_user)):
logger.debug('Successfully signed in as: %s' % db_user.username)
- new_identity = QuayDeferredPermissionUser(db_user.username, 'username', {scopes.DIRECT_LOGIN})
+ new_identity = QuayDeferredPermissionUser(db_user.id, 'user_db_id', {scopes.DIRECT_LOGIN})
identity_changed.send(app, identity=new_identity)
session['login_time'] = datetime.datetime.now()
return True
@@ -202,7 +205,7 @@ def check_repository_usage(user_or_org, plan_found):
def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
trigger=None, pull_robot_name=None):
host = urlparse.urlparse(request.url).netloc
- repo_path = '%s/%s/%s' % (host, repository.namespace, repository.name)
+ repo_path = '%s/%s/%s' % (host, repository.namespace_user.username, repository.name)
token = model.create_access_token(repository, 'write')
logger.debug('Creating build %s with repo %s tags %s and dockerfile_id %s',
@@ -218,9 +221,9 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
dockerfile_id, build_name,
trigger, pull_robot_name=pull_robot_name)
- dockerfile_build_queue.put([repository.namespace, repository.name], json.dumps({
+ dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({
'build_uuid': build_request.uuid,
- 'namespace': repository.namespace,
+ 'namespace': repository.namespace_user.username,
'repository': repository.name,
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
}), retries_remaining=1)
@@ -228,7 +231,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
# Add the build to the repo's log.
metadata = {
'repo': repository.name,
- 'namespace': repository.namespace,
+ 'namespace': repository.namespace_user.username,
'fileid': dockerfile_id,
'manual': manual,
}
@@ -238,9 +241,8 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
metadata['config'] = json.loads(trigger.config)
metadata['service'] = trigger.service.name
- model.log_action('build_dockerfile', repository.namespace,
- ip=request.remote_addr, metadata=metadata,
- repository=repository)
+ model.log_action('build_dockerfile', repository.namespace_user.username, ip=request.remote_addr,
+ metadata=metadata, repository=repository)
# Add notifications for the build queue.
profile.debug('Adding notifications for repository')
diff --git a/endpoints/index.py b/endpoints/index.py
index 5c8d7058a..eb52971cf 100644
--- a/endpoints/index.py
+++ b/endpoints/index.py
@@ -19,6 +19,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
from util.http import abort
from endpoints.notificationhelper import spawn_notification
+import features
logger = logging.getLogger(__name__)
profile = logging.getLogger('application.profiler')
@@ -65,6 +66,9 @@ def generate_headers(role='read'):
@index.route('/users', methods=['POST'])
@index.route('/users/', methods=['POST'])
def create_user():
+ if not features.USER_CREATION:
+ abort(400, 'User creation is disabled. Please speak to your administrator.')
+
user_data = request.get_json()
if not 'username' in user_data:
abort(400, 'Missing username')
@@ -420,7 +424,7 @@ def put_repository_auth(namespace, repository):
def get_search():
def result_view(repo):
return {
- "name": repo.namespace + '/' + repo.name,
+ "name": repo.namespace_user.username + '/' + repo.name,
"description": repo.description
}
@@ -438,7 +442,7 @@ def get_search():
results = [result_view(repo) for repo in matching
if (repo.visibility.name == 'public' or
- ReadRepositoryPermission(repo.namespace, repo.name).can())]
+ ReadRepositoryPermission(repo.namespace_user.username, repo.name).can())]
data = {
"query": query,
@@ -454,6 +458,7 @@ def get_search():
@index.route('/_ping')
@index.route('/_ping')
def ping():
+ # NOTE: any changes made here must also be reflected in the nginx config
response = make_response('true', 200)
response.headers['X-Docker-Registry-Version'] = '0.6.0'
response.headers['X-Docker-Registry-Standalone'] = '0'
diff --git a/endpoints/notificationevent.py b/endpoints/notificationevent.py
index f3f4d6a77..4a195fbd7 100644
--- a/endpoints/notificationevent.py
+++ b/endpoints/notificationevent.py
@@ -1,8 +1,4 @@
import logging
-import io
-import os.path
-import tarfile
-import base64
from notificationhelper import build_event_data
diff --git a/endpoints/notificationhelper.py b/endpoints/notificationhelper.py
index 773779fb7..6f80f83d0 100644
--- a/endpoints/notificationhelper.py
+++ b/endpoints/notificationhelper.py
@@ -4,7 +4,7 @@ from data import model
import json
def build_event_data(repo, extra_data={}, subpage=None):
- repo_string = '%s/%s' % (repo.namespace, repo.name)
+ repo_string = '%s/%s' % (repo.namespace_user.username, repo.name)
homepage = '%s://%s/repository/%s' % (app.config['PREFERRED_URL_SCHEME'],
app.config['SERVER_HOSTNAME'],
repo_string)
@@ -17,7 +17,7 @@ def build_event_data(repo, extra_data={}, subpage=None):
event_data = {
'repository': repo_string,
- 'namespace': repo.namespace,
+ 'namespace': repo.namespace_user.username,
'name': repo.name,
'docker_url': '%s/%s' % (app.config['SERVER_HOSTNAME'], repo_string),
'homepage': homepage,
@@ -30,7 +30,7 @@ def build_event_data(repo, extra_data={}, subpage=None):
def build_notification_data(notification, event_data):
return {
'notification_uuid': notification.uuid,
- 'repository_namespace': notification.repository.namespace,
+ 'repository_namespace': notification.repository.namespace_user.username,
'repository_name': notification.repository.name,
'event_data': event_data
}
@@ -39,8 +39,9 @@ def build_notification_data(notification, event_data):
def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[]):
event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage)
- notifications = model.list_repo_notifications(repo.namespace, repo.name, event_name=event_name)
+ notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name,
+ event_name=event_name)
for notification in notifications:
notification_data = build_notification_data(notification, event_data)
- path = [repo.namespace, repo.name, event_name] + pathargs
+ path = [repo.namespace_user.username, repo.name, event_name] + pathargs
notification_queue.put(path, json.dumps(notification_data))
diff --git a/endpoints/notificationmethod.py b/endpoints/notificationmethod.py
index 9650e79f6..589ebd06d 100644
--- a/endpoints/notificationmethod.py
+++ b/endpoints/notificationmethod.py
@@ -10,6 +10,7 @@ import re
from flask.ext.mail import Message
from app import mail, app, get_app_url
from data import model
+from workers.worker import JobException
logger = logging.getLogger(__name__)
@@ -19,6 +20,9 @@ class InvalidNotificationMethodException(Exception):
class CannotValidateNotificationMethodException(Exception):
pass
+class NotificationMethodPerformException(JobException):
+ pass
+
class NotificationMethod(object):
def __init__(self):
@@ -84,7 +88,7 @@ class QuayNotificationMethod(NotificationMethod):
return (True, 'Unknown organization %s' % target_info['name'], None)
# Only repositories under the organization can cause notifications to that org.
- if target_info['name'] != repository.namespace:
+ if target_info['name'] != repository.namespace_user.username:
return (False, 'Organization name must match repository namespace')
return (True, None, [target])
@@ -92,7 +96,7 @@ class QuayNotificationMethod(NotificationMethod):
# Lookup the team.
team = None
try:
- team = model.get_organization_team(repository.namespace, target_info['name'])
+ team = model.get_organization_team(repository.namespace_user.username, target_info['name'])
except model.InvalidTeamException:
# Probably deleted.
return (True, 'Unknown team %s' % target_info['name'], None)
@@ -105,19 +109,18 @@ class QuayNotificationMethod(NotificationMethod):
repository = notification.repository
if not repository:
# Probably deleted.
- return True
+ return
# Lookup the target user or team to which we'll send the notification.
config_data = json.loads(notification.config_json)
status, err_message, target_users = self.find_targets(repository, config_data)
if not status:
- return False
+ raise NotificationMethodPerformException(err_message)
# For each of the target users, create a notification.
for target_user in set(target_users or []):
model.create_notification(event_handler.event_name(), target_user,
metadata=notification_data['event_data'])
- return True
class EmailMethod(NotificationMethod):
@@ -130,7 +133,8 @@ class EmailMethod(NotificationMethod):
if not email:
raise CannotValidateNotificationMethodException('Missing e-mail address')
- record = model.get_email_authorized_for_repo(repository.namespace, repository.name, email)
+ record = model.get_email_authorized_for_repo(repository.namespace_user.username,
+ repository.name, email)
if not record or not record.confirmed:
raise CannotValidateNotificationMethodException('The specified e-mail address '
'is not authorized to receive '
@@ -141,7 +145,7 @@ class EmailMethod(NotificationMethod):
config_data = json.loads(notification.config_json)
email = config_data.get('email', '')
if not email:
- return False
+ return
msg = Message(event_handler.get_summary(notification_data['event_data'], notification_data),
sender='support@quay.io',
@@ -153,9 +157,7 @@ class EmailMethod(NotificationMethod):
mail.send(msg)
except Exception as ex:
logger.exception('Email was unable to be sent: %s' % ex.message)
- return False
-
- return True
+ raise NotificationMethodPerformException(ex.message)
class WebhookMethod(NotificationMethod):
@@ -172,7 +174,7 @@ class WebhookMethod(NotificationMethod):
config_data = json.loads(notification.config_json)
url = config_data.get('url', '')
if not url:
- return False
+ return
payload = notification_data['event_data']
headers = {'Content-type': 'application/json'}
@@ -180,15 +182,14 @@ class WebhookMethod(NotificationMethod):
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
- logger.error('%s response for webhook to url: %s' % (resp.status_code,
- url))
- return False
+ error_message = '%s response for webhook to url: %s' % (resp.status_code, url)
+ logger.error(error_message)
+ logger.error(resp.content)
+ raise NotificationMethodPerformException(error_message)
except requests.exceptions.RequestException as ex:
logger.exception('Webhook was unable to be sent: %s' % ex.message)
- return False
-
- return True
+ raise NotificationMethodPerformException(ex.message)
class FlowdockMethod(NotificationMethod):
@@ -208,12 +209,12 @@ class FlowdockMethod(NotificationMethod):
config_data = json.loads(notification.config_json)
token = config_data.get('flow_api_token', '')
if not token:
- return False
+ return
- owner = model.get_user(notification.repository.namespace)
+ owner = model.get_user(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
- return False
+ return
url = 'https://api.flowdock.com/v1/messages/team_inbox/%s' % token
headers = {'Content-type': 'application/json'}
@@ -223,7 +224,8 @@ class FlowdockMethod(NotificationMethod):
'subject': event_handler.get_summary(notification_data['event_data'], notification_data),
'content': event_handler.get_message(notification_data['event_data'], notification_data),
'from_name': owner.username,
- 'project': notification.repository.namespace + ' ' + notification.repository.name,
+ 'project': (notification.repository.namespace_user.username + ' ' +
+ notification.repository.name),
'tags': ['#' + event_handler.event_name()],
'link': notification_data['event_data']['homepage']
}
@@ -231,16 +233,14 @@ class FlowdockMethod(NotificationMethod):
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
- logger.error('%s response for flowdock to url: %s' % (resp.status_code,
- url))
+ error_message = '%s response for flowdock to url: %s' % (resp.status_code, url)
+ logger.error(error_message)
logger.error(resp.content)
- return False
+ raise NotificationMethodPerformException(error_message)
except requests.exceptions.RequestException as ex:
logger.exception('Flowdock method was unable to be sent: %s' % ex.message)
- return False
-
- return True
+ raise NotificationMethodPerformException(ex.message)
class HipchatMethod(NotificationMethod):
@@ -265,12 +265,12 @@ class HipchatMethod(NotificationMethod):
room_id = config_data.get('room_id', '')
if not token or not room_id:
- return False
+ return
- owner = model.get_user(notification.repository.namespace)
+ owner = model.get_user(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
- return False
+ return
url = 'https://api.hipchat.com/v2/room/%s/notification?auth_token=%s' % (room_id, token)
@@ -293,16 +293,14 @@ class HipchatMethod(NotificationMethod):
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
- logger.error('%s response for hipchat to url: %s' % (resp.status_code,
- url))
+ error_message = '%s response for hipchat to url: %s' % (resp.status_code, url)
+ logger.error(error_message)
logger.error(resp.content)
- return False
+ raise NotificationMethodPerformException(error_message)
except requests.exceptions.RequestException as ex:
logger.exception('Hipchat method was unable to be sent: %s' % ex.message)
- return False
-
- return True
+ raise NotificationMethodPerformException(ex.message)
class SlackMethod(NotificationMethod):
@@ -334,12 +332,12 @@ class SlackMethod(NotificationMethod):
subdomain = config_data.get('subdomain', '')
if not token or not subdomain:
- return False
+ return
- owner = model.get_user(notification.repository.namespace)
+ owner = model.get_user(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
- return False
+ return
url = 'https://%s.slack.com/services/hooks/incoming-webhook?token=%s' % (subdomain, token)
@@ -370,13 +368,11 @@ class SlackMethod(NotificationMethod):
try:
resp = requests.post(url, data=json.dumps(payload), headers=headers)
if resp.status_code/100 != 2:
- logger.error('%s response for Slack to url: %s' % (resp.status_code,
- url))
+ error_message = '%s response for Slack to url: %s' % (resp.status_code, url)
+ logger.error(error_message)
logger.error(resp.content)
- return False
+ raise NotificationMethodPerformException(error_message)
except requests.exceptions.RequestException as ex:
logger.exception('Slack method was unable to be sent: %s' % ex.message)
- return False
-
- return True
+ raise NotificationMethodPerformException(ex.message)
diff --git a/endpoints/registry.py b/endpoints/registry.py
index 4713ddd75..5699f0db2 100644
--- a/endpoints/registry.py
+++ b/endpoints/registry.py
@@ -14,6 +14,7 @@ from util.http import abort, exact_abort
from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission)
from data import model
+from util import gzipstream
registry = Blueprint('registry', __name__)
@@ -193,21 +194,33 @@ def put_image_layer(namespace, repository, image_id):
# encoding (Gunicorn)
input_stream = request.environ['wsgi.input']
- # compute checksums
- csums = []
+ # Create a socket reader to read the input stream containing the layer data.
sr = SocketReader(input_stream)
+
+ # Add a handler that store the data in storage.
tmp, store_hndlr = store.temp_store_handler()
sr.add_handler(store_hndlr)
+
+ # Add a handler to compute the uncompressed size of the layer.
+ uncompressed_size_info, size_hndlr = gzipstream.calculate_size_handler()
+ sr.add_handler(size_hndlr)
+
+ # Add a handler which computes the checksum.
h, sum_hndlr = checksums.simple_checksum_handler(json_data)
sr.add_handler(sum_hndlr)
+
+ # Stream write the data to storage.
store.stream_write(repo_image.storage.locations, layer_path, sr)
+
+ # Append the computed checksum.
+ csums = []
csums.append('sha256:{0}'.format(h.hexdigest()))
try:
image_size = tmp.tell()
# Save the size of the image.
- model.set_image_size(image_id, namespace, repository, image_size)
+ model.set_image_size(image_id, namespace, repository, image_size, uncompressed_size_info.size)
tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data))
@@ -451,12 +464,6 @@ def put_image_json(namespace, repository, image_id):
set_uploading_flag(repo_image, True)
- # We cleanup any old checksum in case it's a retry after a fail
- profile.debug('Cleanup old checksum')
- repo_image.storage.uncompressed_size = data.get('Size')
- repo_image.storage.checksum = None
- repo_image.storage.save()
-
# If we reach that point, it means that this is a new image or a retry
# on a failed push
# save the metadata
diff --git a/endpoints/trigger.py b/endpoints/trigger.py
index ae0b4b2b7..c7c47db79 100644
--- a/endpoints/trigger.py
+++ b/endpoints/trigger.py
@@ -36,6 +36,9 @@ class TriggerActivationException(Exception):
class TriggerDeactivationException(Exception):
pass
+class TriggerStartException(Exception):
+ pass
+
class ValidationRequestException(Exception):
pass
@@ -109,12 +112,19 @@ class BuildTrigger(object):
"""
raise NotImplementedError
- def manual_start(self, auth_token, config):
+ def manual_start(self, auth_token, config, run_parameters = None):
"""
Manually creates a repository build for this trigger.
"""
raise NotImplementedError
+ def list_field_values(self, auth_token, config, field_name):
+ """
+ Lists all values for the given custom trigger field. For example, a trigger might have a
+ field named "branches", and this method would return all branches.
+ """
+ raise NotImplementedError
+
@classmethod
def service_name(cls):
"""
@@ -345,14 +355,37 @@ class GithubBuildTrigger(BuildTrigger):
return GithubBuildTrigger._prepare_build(config, repo, commit_sha,
short_sha, ref)
- def manual_start(self, auth_token, config):
- source = config['build_source']
+ def manual_start(self, auth_token, config, run_parameters = None):
+ try:
+ source = config['build_source']
+ run_parameters = run_parameters or {}
- gh_client = self._get_client(auth_token)
- repo = gh_client.get_repo(source)
- master = repo.get_branch(repo.default_branch)
- master_sha = master.commit.sha
- short_sha = GithubBuildTrigger.get_display_name(master_sha)
- ref = 'refs/heads/%s' % repo.default_branch
+ gh_client = self._get_client(auth_token)
+ repo = gh_client.get_repo(source)
+ master = repo.get_branch(repo.default_branch)
+ master_sha = master.commit.sha
+ short_sha = GithubBuildTrigger.get_display_name(master_sha)
+ ref = 'refs/heads/%s' % (run_parameters.get('branch_name') or repo.default_branch)
- return self._prepare_build(config, repo, master_sha, short_sha, ref)
+ return self._prepare_build(config, repo, master_sha, short_sha, ref)
+ except GithubException as ghe:
+ raise TriggerStartException(ghe.data['message'])
+
+
+ def list_field_values(self, auth_token, config, field_name):
+ if field_name == 'branch_name':
+ gh_client = self._get_client(auth_token)
+ source = config['build_source']
+ repo = gh_client.get_repo(source)
+ branches = [branch.name for branch in repo.get_branches()]
+
+ if not repo.default_branch in branches:
+ branches.insert(0, repo.default_branch)
+
+ if branches[0] != repo.default_branch:
+ branches.remove(repo.default_branch)
+ branches.insert(0, repo.default_branch)
+
+ return branches
+
+ return None
diff --git a/endpoints/web.py b/endpoints/web.py
index 19f9bb7f1..63e463666 100644
--- a/endpoints/web.py
+++ b/endpoints/web.py
@@ -18,6 +18,7 @@ from endpoints.common import common_login, render_page_template, route_show_if,
from endpoints.csrf import csrf_protect, generate_csrf_token
from util.names import parse_repository_name
from util.gravatar import compute_hash
+from util.useremails import send_email_changed
from auth import scopes
import features
@@ -32,8 +33,8 @@ STATUS_TAGS = app.config['STATUS_TAGS']
@web.route('/', methods=['GET'], defaults={'path': ''})
@web.route('/organization/', methods=['GET'])
@no_cache
-def index(path):
- return render_page_template('index.html')
+def index(path, **kwargs):
+ return render_page_template('index.html', **kwargs)
@web.route('/500', methods=['GET'])
@@ -101,7 +102,7 @@ def superuser():
@web.route('/signin/')
@no_cache
-def signin():
+def signin(redirect=None):
return index('')
@@ -123,6 +124,13 @@ def new():
return index('')
+@web.route('/confirminvite')
+@no_cache
+def confirm_invite():
+ code = request.values['code']
+ return index('', code=code)
+
+
@web.route('/repository/', defaults={'path': ''})
@web.route('/repository/', methods=['GET'])
@no_cache
@@ -215,6 +223,7 @@ def receipt():
@web.route('/authrepoemail', methods=['GET'])
+@route_show_if(features.MAILING)
def confirm_repo_email():
code = request.values['code']
record = None
@@ -228,23 +237,27 @@ def confirm_repo_email():
Your E-mail address has been authorized to receive notifications for repository
%s/%s.
""" % (app.config['PREFERRED_URL_SCHEME'], app.config['SERVER_HOSTNAME'],
- record.repository.namespace, record.repository.name,
- record.repository.namespace, record.repository.name)
+ record.repository.namespace_user.username, record.repository.name,
+ record.repository.namespace_user.username, record.repository.name)
return render_page_template('message.html', message=message)
@web.route('/confirm', methods=['GET'])
+@route_show_if(features.MAILING)
def confirm_email():
code = request.values['code']
user = None
new_email = None
try:
- user, new_email = model.confirm_user_email(code)
+ user, new_email, old_email = model.confirm_user_email(code)
except model.DataModelException as ex:
return render_page_template('confirmerror.html', error_message=ex.message)
+ if new_email:
+ send_email_changed(user.username, old_email, new_email)
+
common_login(user)
return redirect(url_for('web.user', tab='email')
diff --git a/initdb.py b/initdb.py
index 26ef28678..87208a8d6 100644
--- a/initdb.py
+++ b/initdb.py
@@ -51,7 +51,7 @@ def __gen_checksum(image_id):
def __gen_image_id(repo, image_num):
- str_to_hash = "%s/%s/%s" % (repo.namespace, repo.name, image_num)
+ str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
h = hashlib.md5(str_to_hash)
return h.hexdigest() + h.hexdigest()
@@ -79,12 +79,12 @@ def __create_subtree(repo, structure, creator_username, parent):
creation_time = REFERENCE_DATE + timedelta(days=image_num)
command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)]
command = json.dumps(command_list) if command_list else None
- new_image = model.set_image_metadata(docker_image_id, repo.namespace,
- repo.name, str(creation_time),
- 'no comment', command, parent)
+ new_image = model.set_image_metadata(docker_image_id, repo.namespace_user.username, repo.name,
+ str(creation_time), 'no comment', command, parent)
- model.set_image_size(docker_image_id, repo.namespace, repo.name,
- random.randrange(1, 1024 * 1024 * 1024))
+ compressed_size = random.randrange(1, 1024 * 1024 * 1024)
+ model.set_image_size(docker_image_id, repo.namespace_user.username, repo.name, compressed_size,
+ int(compressed_size * 1.4))
# Populate the diff file
diff_path = store.image_file_diffs_path(new_image.storage.uuid)
@@ -100,7 +100,7 @@ def __create_subtree(repo, structure, creator_username, parent):
last_node_tags = [last_node_tags]
for tag_name in last_node_tags:
- model.create_or_update_tag(repo.namespace, repo.name, tag_name,
+ model.create_or_update_tag(repo.namespace_user.username, repo.name, tag_name,
new_image.docker_image_id)
for subtree in subtrees:
@@ -214,7 +214,11 @@ def initialize_database():
LogEntryKind.create(name='org_create_team')
LogEntryKind.create(name='org_delete_team')
+ LogEntryKind.create(name='org_invite_team_member')
+ LogEntryKind.create(name='org_delete_team_member_invite')
LogEntryKind.create(name='org_add_team_member')
+ LogEntryKind.create(name='org_team_member_invite_accepted')
+ LogEntryKind.create(name='org_team_member_invite_declined')
LogEntryKind.create(name='org_remove_team_member')
LogEntryKind.create(name='org_set_team_description')
LogEntryKind.create(name='org_set_team_role')
@@ -271,6 +275,7 @@ def initialize_database():
NotificationKind.create(name='over_private_usage')
NotificationKind.create(name='expiring_license')
NotificationKind.create(name='maintenance')
+ NotificationKind.create(name='org_team_invite')
NotificationKind.create(name='test_notification')
@@ -302,7 +307,7 @@ def populate_database():
new_user_2.verified = True
new_user_2.save()
- new_user_3 = model.create_user('freshuser', 'password', 'no@thanks.com')
+ new_user_3 = model.create_user('freshuser', 'password', 'jschorr+test@devtable.com')
new_user_3.verified = True
new_user_3.save()
@@ -323,7 +328,8 @@ def populate_database():
outside_org.verified = True
outside_org.save()
- model.create_notification('test_notification', new_user_1, metadata={'some': 'value', 'arr': [1,2,3], 'obj': {'a': 1, 'b': 2}})
+ model.create_notification('test_notification', new_user_1,
+ metadata={'some':'value', 'arr':[1, 2, 3], 'obj':{'a':1, 'b':2}})
from_date = datetime.utcnow()
to_date = from_date + timedelta(hours=1)
@@ -387,18 +393,20 @@ def populate_database():
})
trigger.save()
- repo = 'ci.devtable.com:5000/%s/%s' % (building.namespace, building.name)
+ repo = 'ci.devtable.com:5000/%s/%s' % (building.namespace_user.username, building.name)
job_config = {
'repository': repo,
'docker_tags': ['latest'],
'build_subdir': '',
}
- record = model.create_email_authorization_for_repo(new_user_1.username, 'simple', 'jschorr@devtable.com')
+ record = model.create_email_authorization_for_repo(new_user_1.username, 'simple',
+ 'jschorr@devtable.com')
record.confirmed = True
record.save()
- model.create_email_authorization_for_repo(new_user_1.username, 'simple', 'jschorr+other@devtable.com')
+ model.create_email_authorization_for_repo(new_user_1.username, 'simple',
+ 'jschorr+other@devtable.com')
build2 = model.create_repository_build(building, token, job_config,
'68daeebd-a5b9-457f-80a0-4363b882f8ea',
@@ -425,12 +433,12 @@ def populate_database():
model.create_robot('coolrobot', org)
- oauth.create_application(org, 'Some Test App', 'http://localhost:8000', 'http://localhost:8000/o2c.html',
- client_id='deadbeef')
+ oauth.create_application(org, 'Some Test App', 'http://localhost:8000',
+ 'http://localhost:8000/o2c.html', client_id='deadbeef')
- oauth.create_application(org, 'Some Other Test App', 'http://quay.io', 'http://localhost:8000/o2c.html',
- client_id='deadpork',
- description = 'This is another test application')
+ oauth.create_application(org, 'Some Other Test App', 'http://quay.io',
+ 'http://localhost:8000/o2c.html', client_id='deadpork',
+ description='This is another test application')
model.oauth.create_access_token_for_testing(new_user_1, 'deadbeef', 'repo:admin')
@@ -452,8 +460,8 @@ def populate_database():
reader_team = model.create_team('readers', org, 'member',
'Readers of orgrepo.')
- model.set_team_repo_permission(reader_team.name, org_repo.namespace,
- org_repo.name, 'read')
+ model.set_team_repo_permission(reader_team.name, org_repo.namespace_user.username, org_repo.name,
+ 'read')
model.add_user_to_team(new_user_2, reader_team)
model.add_user_to_team(reader, reader_team)
@@ -475,12 +483,9 @@ def populate_database():
(2, [], 'latest17'),
(2, [], 'latest18'),])
- model.add_prototype_permission(org, 'read', activating_user=new_user_1,
- delegate_user=new_user_2)
- model.add_prototype_permission(org, 'read', activating_user=new_user_1,
- delegate_team=reader_team)
- model.add_prototype_permission(org, 'write', activating_user=new_user_2,
- delegate_user=new_user_1)
+ model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_user=new_user_2)
+ model.add_prototype_permission(org, 'read', activating_user=new_user_1, delegate_team=reader_team)
+ model.add_prototype_permission(org, 'write', activating_user=new_user_2, delegate_user=new_user_1)
today = datetime.today()
week_ago = today - timedelta(6)
diff --git a/static/css/quay.css b/static/css/quay.css
index 15c71634b..08d55c970 100644
--- a/static/css/quay.css
+++ b/static/css/quay.css
@@ -144,6 +144,15 @@ nav.navbar-default .navbar-nav>li>a.active {
max-width: 320px;
}
+.notification-view-element .right-controls button {
+ margin-left: 10px;
+}
+
+.notification-view-element .message i.fa {
+ margin-right: 6px;
+}
+
+
.notification-view-element .orginfo {
margin-top: 8px;
float: left;
@@ -3593,6 +3602,12 @@ p.editable:hover i {
white-space: nowrap;
}
+.tt-message {
+ padding: 10px;
+ font-size: 12px;
+ white-space: nowrap;
+}
+
.tt-suggestion p {
margin: 0;
}
@@ -4284,7 +4299,7 @@ pre.command:before {
}
.user-row.super-user td {
- background-color: #d9edf7;
+ background-color: #eeeeee;
}
.user-row .user-class {
@@ -4672,4 +4687,68 @@ i.slack-icon {
.external-notification-view-element:hover .side-controls button {
border: 1px solid #eee;
+}
+
+.member-listing {
+ width: 100%;
+}
+
+.member-listing .section-header {
+ color: #ccc;
+ margin-top: 20px;
+ margin-bottom: 10px;
+}
+
+.member-listing .gravatar {
+ vertical-align: middle;
+ margin-right: 10px;
+}
+
+.member-listing .entity-reference {
+ margin-bottom: 10px;
+ display: inline-block;
+}
+
+.member-listing .invite-listing {
+ margin-bottom: 10px;
+ display: inline-block;
+}
+
+.team-view .organization-header .popover {
+ max-width: none !important;
+}
+
+.team-view .organization-header .popover.bottom-right .arrow:after {
+ border-bottom-color: #f7f7f7;
+ top: 2px;
+}
+
+.team-view .organization-header .popover-content {
+ font-size: 14px;
+ padding-top: 6px;
+}
+
+.team-view .organization-header .popover-content input {
+ background: white;
+}
+
+.team-view .team-view-add-element .help-text {
+ font-size: 13px;
+ color: #ccc;
+ margin-top: 10px;
+}
+
+.team-view .organization-header .popover-content {
+ min-width: 500px;
+}
+
+#startTriggerDialog .trigger-description {
+ margin-bottom: 20px;
+ padding-bottom: 20px;
+ border-bottom: 1px solid #eee;
+}
+
+#startTriggerDialog #runForm .field-title {
+ width: 120px;
+ padding-right: 10px;
}
\ No newline at end of file
diff --git a/static/directives/entity-reference.html b/static/directives/entity-reference.html
index d01b100ee..ea65db875 100644
--- a/static/directives/entity-reference.html
+++ b/static/directives/entity-reference.html
@@ -7,15 +7,19 @@
-
+
{{entity.name}}
{{entity.name}}
-
-
+
+
+
+
+
+
{{ getPrefix(entity.name) }}+{{ getShortenedName(entity.name) }}
diff --git a/static/directives/entity-search.html b/static/directives/entity-search.html
index fec00b393..63abb1528 100644
--- a/static/directives/entity-search.html
+++ b/static/directives/entity-search.html
@@ -5,7 +5,7 @@
ng-click="lazyLoad()">
-