Merge master into laffa
This commit is contained in:
commit
f38ce51943
94 changed files with 3132 additions and 871 deletions
|
@ -16,8 +16,8 @@ class RedisBuildLogs(object):
|
|||
COMMAND = 'command'
|
||||
PHASE = 'phase'
|
||||
|
||||
def __init__(self, redis_host):
|
||||
self._redis = redis.StrictRedis(host=redis_host)
|
||||
def __init__(self, redis_config):
|
||||
self._redis = redis.StrictRedis(socket_connect_timeout=5, **redis_config)
|
||||
|
||||
@staticmethod
|
||||
def _logs_key(build_id):
|
||||
|
@ -104,7 +104,13 @@ class BuildLogs(object):
|
|||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
buildlogs_hostname = app.config.get('BUILDLOGS_REDIS_HOSTNAME')
|
||||
buildlogs_config = app.config.get('BUILDLOGS_REDIS')
|
||||
if not buildlogs_config:
|
||||
# This is the old key name.
|
||||
buildlogs_config = {
|
||||
'host': app.config.get('BUILDLOGS_REDIS_HOSTNAME')
|
||||
}
|
||||
|
||||
buildlogs_options = app.config.get('BUILDLOGS_OPTIONS', [])
|
||||
buildlogs_import = app.config.get('BUILDLOGS_MODULE_AND_CLASS', None)
|
||||
|
||||
|
@ -113,7 +119,7 @@ class BuildLogs(object):
|
|||
else:
|
||||
klass = import_class(buildlogs_import[0], buildlogs_import[1])
|
||||
|
||||
buildlogs = klass(buildlogs_hostname, *buildlogs_options)
|
||||
buildlogs = klass(buildlogs_config, *buildlogs_options)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
|
|
|
@ -8,7 +8,7 @@ from peewee import *
|
|||
from data.read_slave import ReadSlaveModel
|
||||
from sqlalchemy.engine.url import make_url
|
||||
from urlparse import urlparse
|
||||
|
||||
from util.names import urn_generator
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -21,8 +21,24 @@ SCHEME_DRIVERS = {
|
|||
'postgresql+psycopg2': PostgresqlDatabase,
|
||||
}
|
||||
|
||||
SCHEME_RANDOM_FUNCTION = {
|
||||
'mysql': fn.Rand,
|
||||
'mysql+pymysql': fn.Rand,
|
||||
'sqlite': fn.Random,
|
||||
'postgresql': fn.Random,
|
||||
'postgresql+psycopg2': fn.Random,
|
||||
}
|
||||
|
||||
class CallableProxy(Proxy):
|
||||
def __call__(self, *args, **kwargs):
|
||||
if self.obj is None:
|
||||
raise AttributeError('Cannot use uninitialized Proxy.')
|
||||
return self.obj(*args, **kwargs)
|
||||
|
||||
db = Proxy()
|
||||
read_slave = Proxy()
|
||||
db_random_func = CallableProxy()
|
||||
|
||||
|
||||
def _db_from_url(url, db_kwargs):
|
||||
parsed_url = make_url(url)
|
||||
|
@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs):
|
|||
|
||||
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
|
||||
|
||||
def configure(config_object):
|
||||
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
||||
write_db_uri = config_object['DB_URI']
|
||||
db.initialize(_db_from_url(write_db_uri, db_kwargs))
|
||||
|
||||
parsed_write_uri = make_url(write_db_uri)
|
||||
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
|
||||
|
||||
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
||||
if read_slave_uri is not None:
|
||||
read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs))
|
||||
|
@ -112,6 +132,15 @@ class TeamMember(BaseModel):
|
|||
)
|
||||
|
||||
|
||||
class TeamMemberInvite(BaseModel):
|
||||
# Note: Either user OR email will be filled in, but not both.
|
||||
user = ForeignKeyField(User, index=True, null=True)
|
||||
email = CharField(null=True)
|
||||
team = ForeignKeyField(Team, index=True)
|
||||
inviter = ForeignKeyField(User, related_name='inviter')
|
||||
invite_token = CharField(default=urn_generator(['teaminvite']))
|
||||
|
||||
|
||||
class LoginService(BaseModel):
|
||||
name = CharField(unique=True, index=True)
|
||||
|
||||
|
@ -139,7 +168,7 @@ class Visibility(BaseModel):
|
|||
|
||||
|
||||
class Repository(BaseModel):
|
||||
namespace = CharField()
|
||||
namespace_user = ForeignKeyField(User)
|
||||
name = CharField()
|
||||
visibility = ForeignKeyField(Visibility)
|
||||
description = TextField(null=True)
|
||||
|
@ -150,7 +179,7 @@ class Repository(BaseModel):
|
|||
read_slaves = (read_slave,)
|
||||
indexes = (
|
||||
# create a unique index on namespace and name
|
||||
(('namespace', 'name'), True),
|
||||
(('namespace_user', 'name'), True),
|
||||
)
|
||||
|
||||
|
||||
|
@ -227,7 +256,7 @@ class EmailConfirmation(BaseModel):
|
|||
|
||||
|
||||
class ImageStorage(BaseModel):
|
||||
uuid = CharField(default=uuid_generator)
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
checksum = CharField(null=True)
|
||||
created = DateTimeField(null=True)
|
||||
comment = TextField(null=True)
|
||||
|
@ -333,7 +362,7 @@ class RepositoryBuild(BaseModel):
|
|||
class QueueItem(BaseModel):
|
||||
queue_name = CharField(index=True, max_length=1024)
|
||||
body = TextField()
|
||||
available_after = DateTimeField(default=datetime.now, index=True)
|
||||
available_after = DateTimeField(default=datetime.utcnow, index=True)
|
||||
available = BooleanField(default=True, index=True)
|
||||
processing_expires = DateTimeField(null=True, index=True)
|
||||
retries_remaining = IntegerField(default=5)
|
||||
|
@ -438,4 +467,5 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
|||
OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind,
|
||||
Notification, ImageStorageLocation, ImageStoragePlacement,
|
||||
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
||||
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage]
|
||||
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
|
||||
TeamMemberInvite]
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
"""Migrate registry namespaces to reference a user.
|
||||
|
||||
Revision ID: 13da56878560
|
||||
Revises: 51d04d0e7e6f
|
||||
Create Date: 2014-09-18 13:56:45.130455
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '13da56878560'
|
||||
down_revision = '51d04d0e7e6f'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from data.database import Repository, User
|
||||
|
||||
def upgrade(tables):
|
||||
# Add the namespace_user column, allowing it to be nullable
|
||||
op.add_column('repository', sa.Column('namespace_user_id', sa.Integer(), sa.ForeignKey('user.id')))
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
op.drop_column('repository', 'namespace_user_id')
|
|
@ -44,11 +44,11 @@ def downgrade(tables):
|
|||
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
|
||||
op.drop_index('logentrykind_name', table_name='logentrykind')
|
||||
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
|
||||
op.add_column('image', sa.Column('created', mysql.DATETIME(), nullable=True))
|
||||
op.add_column('image', sa.Column('command', mysql.LONGTEXT(), nullable=True))
|
||||
op.add_column('image', sa.Column('image_size', mysql.BIGINT(display_width=20), nullable=True))
|
||||
op.add_column('image', sa.Column('checksum', mysql.VARCHAR(length=255), nullable=True))
|
||||
op.add_column('image', sa.Column('comment', mysql.LONGTEXT(), nullable=True))
|
||||
op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True))
|
||||
op.add_column('image', sa.Column('command', sa.Text(), nullable=True))
|
||||
op.add_column('image', sa.Column('image_size', sa.BigInteger(), nullable=True))
|
||||
op.add_column('image', sa.Column('checksum', sa.String(length=255), nullable=True))
|
||||
op.add_column('image', sa.Column('comment', sa.Text(), nullable=True))
|
||||
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
|
||||
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
"""Backfill the namespace_user fields.
|
||||
|
||||
Revision ID: 3f4fe1194671
|
||||
Revises: 6f2ecf5afcf
|
||||
Create Date: 2014-09-24 14:29:45.192179
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3f4fe1194671'
|
||||
down_revision = '6f2ecf5afcf'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
conn = op.get_bind()
|
||||
user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
|
||||
conn.execute('update repository set namespace_user_id = (select id from {0} where {0}.username = repository.namespace) where namespace_user_id is NULL'.format(user_table_name_escaped))
|
||||
op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True)
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
op.drop_constraint('fk_repository_namespace_user_id_user', table_name='repository', type_='foreignkey')
|
||||
op.drop_index('repository_namespace_user_id_name', table_name='repository')
|
|
@ -0,0 +1,78 @@
|
|||
"""Email invites for joining a team.
|
||||
|
||||
Revision ID: 51d04d0e7e6f
|
||||
Revises: 34fd69f63809
|
||||
Create Date: 2014-09-15 23:51:35.478232
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '51d04d0e7e6f'
|
||||
down_revision = '34fd69f63809'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('teammemberinvite',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=True),
|
||||
sa.Column('team_id', sa.Integer(), nullable=False),
|
||||
sa.Column('inviter_id', sa.Integer(), nullable=False),
|
||||
sa.Column('invite_token', sa.String(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False)
|
||||
op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False)
|
||||
op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
||||
# Manually add the new logentrykind types
|
||||
op.bulk_insert(tables.logentrykind,
|
||||
[
|
||||
{'id':42, 'name':'org_invite_team_member'},
|
||||
{'id':43, 'name':'org_team_member_invite_accepted'},
|
||||
{'id':44, 'name':'org_team_member_invite_declined'},
|
||||
{'id':45, 'name':'org_delete_team_member_invite'},
|
||||
])
|
||||
|
||||
op.bulk_insert(tables.notificationkind,
|
||||
[
|
||||
{'id':10, 'name':'org_team_invite'},
|
||||
])
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.notificationkind.delete()
|
||||
.where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
|
||||
)
|
||||
|
||||
op.drop_table('teammemberinvite')
|
||||
### end Alembic commands ###
|
|
@ -1,17 +1,16 @@
|
|||
"""add the uncompressed size to image storage
|
||||
|
||||
Revision ID: 6f2ecf5afcf
|
||||
Revises: 3f6d26399bd2
|
||||
Revises: 13da56878560
|
||||
Create Date: 2014-09-22 14:39:13.470566
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6f2ecf5afcf'
|
||||
down_revision = '3f6d26399bd2'
|
||||
down_revision = '13da56878560'
|
||||
|
||||
from alembic import op
|
||||
from tools.uncompressedsize import backfill_sizes
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
|
@ -20,9 +19,6 @@ def upgrade(tables):
|
|||
op.add_column('imagestorage', sa.Column('uncompressed_size', sa.BigInteger(), nullable=True))
|
||||
### end Alembic commands ###
|
||||
|
||||
# Backfill the uncompressed size to the image storage table.
|
||||
backfill_sizes()
|
||||
|
||||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('imagestorage', 'uncompressed_size')
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
"""Allow the namespace column to be nullable.
|
||||
|
||||
Revision ID: 9a1087b007d
|
||||
Revises: 3f4fe1194671
|
||||
Create Date: 2014-10-01 16:11:21.277226
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9a1087b007d'
|
||||
down_revision = '3f4fe1194671'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
op.drop_index('repository_namespace_name', table_name='repository')
|
||||
op.alter_column('repository', 'namespace', nullable=True, existing_type=sa.String(length=255),
|
||||
server_default=sa.text('NULL'))
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
conn = op.get_bind()
|
||||
user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
|
||||
conn.execute('update repository set namespace = (select username from {0} where {0}.id = repository.namespace_user_id) where namespace is NULL'.format(user_table_name_escaped))
|
||||
|
||||
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
|
||||
op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255))
|
|
@ -0,0 +1,22 @@
|
|||
"""Add an index to the uuid in the image storage table.
|
||||
|
||||
Revision ID: b1d41e2071b
|
||||
Revises: 9a1087b007d
|
||||
Create Date: 2014-10-06 18:42:10.021235
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b1d41e2071b'
|
||||
down_revision = '9a1087b007d'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True)
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
op.drop_index('imagestorage_uuid', table_name='imagestorage')
|
|
@ -23,13 +23,11 @@ def upgrade(tables):
|
|||
def downgrade(tables):
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('webhook',
|
||||
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
|
||||
sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False),
|
||||
sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
|
||||
sa.Column('parameters', mysql.LONGTEXT(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
mysql_default_charset=u'latin1',
|
||||
mysql_engine=u'InnoDB'
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('public_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('parameters', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
### end Alembic commands ###
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -17,7 +17,12 @@ OPTION_TRANSLATIONS = {
|
|||
|
||||
|
||||
def gen_sqlalchemy_metadata(peewee_model_list):
|
||||
metadata = MetaData()
|
||||
metadata = MetaData(naming_convention={
|
||||
"ix": 'ix_%(column_0_label)s',
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s"
|
||||
})
|
||||
|
||||
for model in peewee_model_list:
|
||||
meta = model._meta
|
||||
|
|
|
@ -68,9 +68,8 @@ class WorkQueue(object):
|
|||
'retries_remaining': retries_remaining,
|
||||
}
|
||||
|
||||
if available_after:
|
||||
available_date = datetime.utcnow() + timedelta(seconds=available_after)
|
||||
params['available_after'] = available_date
|
||||
available_date = datetime.utcnow() + timedelta(seconds=available_after or 0)
|
||||
params['available_after'] = available_date
|
||||
|
||||
with self._transaction_factory(db):
|
||||
QueueItem.create(**params)
|
||||
|
|
|
@ -7,14 +7,14 @@ class UserEventBuilder(object):
|
|||
Defines a helper class for constructing UserEvent and UserEventListener
|
||||
instances.
|
||||
"""
|
||||
def __init__(self, redis_host):
|
||||
self._redis_host = redis_host
|
||||
def __init__(self, redis_config):
|
||||
self._redis_config = redis_config
|
||||
|
||||
def get_event(self, username):
|
||||
return UserEvent(self._redis_host, username)
|
||||
return UserEvent(self._redis_config, username)
|
||||
|
||||
def get_listener(self, username, events):
|
||||
return UserEventListener(self._redis_host, username, events)
|
||||
return UserEventListener(self._redis_config, username, events)
|
||||
|
||||
|
||||
class UserEventsBuilderModule(object):
|
||||
|
@ -26,8 +26,14 @@ class UserEventsBuilderModule(object):
|
|||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
redis_hostname = app.config.get('USER_EVENTS_REDIS_HOSTNAME')
|
||||
user_events = UserEventBuilder(redis_hostname)
|
||||
redis_config = app.config.get('USER_EVENTS_REDIS')
|
||||
if not redis_config:
|
||||
# This is the old key name.
|
||||
redis_config = {
|
||||
'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME')
|
||||
}
|
||||
|
||||
user_events = UserEventBuilder(redis_config)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
|
@ -43,8 +49,8 @@ class UserEvent(object):
|
|||
Defines a helper class for publishing to realtime user events
|
||||
as backed by Redis.
|
||||
"""
|
||||
def __init__(self, redis_host, username):
|
||||
self._redis = redis.StrictRedis(host=redis_host)
|
||||
def __init__(self, redis_config, username):
|
||||
self._redis = redis.StrictRedis(socket_connect_timeout=5, **redis_config)
|
||||
self._username = username
|
||||
|
||||
@staticmethod
|
||||
|
@ -74,10 +80,10 @@ class UserEventListener(object):
|
|||
Defines a helper class for subscribing to realtime user events as
|
||||
backed by Redis.
|
||||
"""
|
||||
def __init__(self, redis_host, username, events=set([])):
|
||||
def __init__(self, redis_config, username, events=set([])):
|
||||
channels = [self._user_event_key(username, e) for e in events]
|
||||
|
||||
self._redis = redis.StrictRedis(host=redis_host)
|
||||
self._redis = redis.StrictRedis(socket_connect_timeout=5, **redis_config)
|
||||
self._pubsub = self._redis.pubsub()
|
||||
self._pubsub.subscribe(channels)
|
||||
|
||||
|
|
Reference in a new issue