Merge remote-tracking branch 'origin/master' into yellowalert

Conflicts:
	data/migrations/versions/82297d834ad_add_us_west_location.py
	test/data/test.db
This commit is contained in:
Jake Moshenko 2014-09-05 11:30:30 -04:00
commit 64480fd4ed
81 changed files with 1550 additions and 822 deletions

View file

@ -3,6 +3,8 @@ import stripe
from datetime import datetime, timedelta
from calendar import timegm
from util.collections import AttrDict
PLANS = [
# Deprecated Plans
{
@ -118,20 +120,6 @@ def get_plan(plan_id):
return None
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
@classmethod
def deep_copy(cls, attr_dict):
copy = AttrDict(attr_dict)
for key, value in copy.items():
if isinstance(value, AttrDict):
copy[key] = cls.deep_copy(value)
return copy
class FakeStripe(object):
class Customer(AttrDict):
FAKE_PLAN = AttrDict({

View file

@ -17,6 +17,8 @@ SCHEME_DRIVERS = {
'mysql': MySQLDatabase,
'mysql+pymysql': MySQLDatabase,
'sqlite': SqliteDatabase,
'postgresql': PostgresqlDatabase,
'postgresql+psycopg2': PostgresqlDatabase,
}
db = Proxy()
@ -32,7 +34,7 @@ def _db_from_url(url, db_kwargs):
if parsed_url.username:
db_kwargs['user'] = parsed_url.username
if parsed_url.password:
db_kwargs['passwd'] = parsed_url.password
db_kwargs['password'] = parsed_url.password
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
@ -74,6 +76,8 @@ class User(BaseModel):
organization = BooleanField(default=False, index=True)
robot = BooleanField(default=False, index=True)
invoice_email = BooleanField(default=False)
invalid_login_attempts = IntegerField(default=0)
last_invalid_login = DateTimeField(default=datetime.utcnow)
class TeamRole(BaseModel):
@ -116,6 +120,7 @@ class FederatedLogin(BaseModel):
user = ForeignKeyField(User, index=True)
service = ForeignKeyField(LoginService, index=True)
service_ident = CharField()
metadata_json = TextField(default='{}')
class Meta:
database = db

View file

@ -8,6 +8,7 @@ from peewee import SqliteDatabase
from data.database import all_models, db
from app import app
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from util.collections import AttrDict
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
@ -23,6 +24,7 @@ fileConfig(config.config_file_name)
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = gen_sqlalchemy_metadata(all_models)
tables = AttrDict(target_metadata.tables)
# other values from the config, defined by the needs of env.py,
# can be acquired:
@ -45,7 +47,7 @@ def run_migrations_offline():
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
with context.begin_transaction():
context.run_migrations()
context.run_migrations(tables=tables)
def run_migrations_online():
"""Run migrations in 'online' mode.
@ -72,7 +74,7 @@ def run_migrations_online():
try:
with context.begin_transaction():
context.run_migrations()
context.run_migrations(tables=tables)
finally:
connection.close()

View file

@ -14,9 +14,9 @@ from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
def upgrade():
def upgrade(tables):
${upgrades if upgrades else "pass"}
def downgrade():
def downgrade(tables):
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,35 @@
"""add metadata field to external logins
Revision ID: 1594a74a74ca
Revises: f42b0ea7a4d
Create Date: 2014-09-04 18:17:35.205698
"""
# revision identifiers, used by Alembic.
revision = '1594a74a74ca'
down_revision = 'f42b0ea7a4d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('federatedlogin', sa.Column('metadata_json', sa.Text(), nullable=False))
### end Alembic commands ###
op.bulk_insert(tables.loginservice,
[
{'id':4, 'name':'google'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('federatedlogin', 'metadata_json')
### end Alembic commands ###
op.execute(
(tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('google')))
)

View file

@ -14,7 +14,7 @@ from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade():
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True)
@ -34,7 +34,7 @@ def upgrade():
### end Alembic commands ###
def downgrade():
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('visibility_name', table_name='visibility')
op.create_index('visibility_name', 'visibility', ['name'], unique=False)

View file

@ -13,12 +13,8 @@ down_revision = '4b7ef0c7bdb2'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('externalnotificationmethod',
sa.Column('id', sa.Integer(), nullable=False),
@ -26,7 +22,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id')
)
op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True)
op.bulk_insert(schema.tables['externalnotificationmethod'],
op.bulk_insert(tables.externalnotificationmethod,
[
{'id':1, 'name':'quay_notification'},
{'id':2, 'name':'email'},
@ -38,7 +34,7 @@ def upgrade():
sa.PrimaryKeyConstraint('id')
)
op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True)
op.bulk_insert(schema.tables['externalnotificationevent'],
op.bulk_insert(tables.externalnotificationevent,
[
{'id':1, 'name':'repo_push'},
{'id':2, 'name':'build_queued'},
@ -77,7 +73,7 @@ def upgrade():
op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False))
# Manually add the new notificationkind types
op.bulk_insert(schema.tables['notificationkind'],
op.bulk_insert(tables.notificationkind,
[
{'id':5, 'name':'repo_push'},
{'id':6, 'name':'build_queued'},
@ -87,7 +83,7 @@ def upgrade():
])
# Manually add the new logentrykind types
op.bulk_insert(schema.tables['logentrykind'],
op.bulk_insert(tables.logentrykind,
[
{'id':39, 'name':'add_repo_notification'},
{'id':40, 'name':'delete_repo_notification'},
@ -97,61 +93,49 @@ def upgrade():
### end Alembic commands ###
def downgrade():
schema = gen_sqlalchemy_metadata(all_models)
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'notification', 'dismissed')
op.drop_index('repositorynotification_uuid', table_name='repositorynotification')
op.drop_index('repositorynotification_repository_id', table_name='repositorynotification')
op.drop_index('repositorynotification_method_id', table_name='repositorynotification')
op.drop_index('repositorynotification_event_id', table_name='repositorynotification')
op.drop_table('repositorynotification')
op.drop_index('repositoryauthorizedemail_repository_id', table_name='repositoryauthorizedemail')
op.drop_index('repositoryauthorizedemail_email_repository_id', table_name='repositoryauthorizedemail')
op.drop_index('repositoryauthorizedemail_code', table_name='repositoryauthorizedemail')
op.drop_table('repositoryauthorizedemail')
op.drop_index('externalnotificationevent_name', table_name='externalnotificationevent')
op.drop_table('externalnotificationevent')
op.drop_index('externalnotificationmethod_name', table_name='externalnotificationmethod')
op.drop_table('externalnotificationmethod')
# Manually remove the notificationkind and logentrykind types
notificationkind = schema.tables['notificationkind']
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('repo_push')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('repo_push')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_queued')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_queued')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_start')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_start')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_success')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_success')))
)
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('build_failure')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_failure')))
)
op.execute(
(logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('add_repo_notification')))
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('add_repo_notification')))
)
op.execute(
(logentrykind.delete()
.where(logentrykind.c.name == op.inline_literal('delete_repo_notification')))
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('delete_repo_notification')))
)
### end Alembic commands ###

View file

@ -0,0 +1,29 @@
"""add log kind for regenerating robot tokens
Revision ID: 43e943c0639f
Revises: 82297d834ad
Create Date: 2014-08-25 17:14:42.784518
"""
# revision identifiers, used by Alembic.
revision = '43e943c0639f'
down_revision = '82297d834ad'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'id': 41, 'name':'regenerate_robot_token'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('regenerate_robot_token')))
)

View file

@ -18,14 +18,14 @@ def get_id(query):
conn = op.get_bind()
return list(conn.execute(query, ()).fetchall())[0][0]
def upgrade():
def upgrade(tables):
conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name="repo_push" Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name="webhook" Limit 1')
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id))
def downgrade():
def downgrade(tables):
conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name="repo_push" Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name="webhook" Limit 1')
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into webhook (public_id, repository_id, parameters) Select uuid, repository_id, config_json FROM repositorynotification Where event_id=%s And method_id=%s' % (event_id, method_id))

View file

@ -1,50 +1,39 @@
"""add new notification kinds
Revision ID: 4a0c94399f38
Revises: 82297d834ad
Revises: 1594a74a74ca
Create Date: 2014-08-28 16:17:01.898269
"""
# revision identifiers, used by Alembic.
revision = '4a0c94399f38'
down_revision = '82297d834ad'
down_revision = '1594a74a74ca'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['externalnotificationmethod'],
def upgrade(tables):
op.bulk_insert(tables.externalnotificationmethod,
[
{'id':4, 'name':'flowdock'},
{'id':5, 'name':'hipchat'},
{'id':6, 'name':'slack'},
])
def downgrade():
schema = gen_sqlalchemy_metadata(all_models)
externalnotificationmethod = schema.tables['externalnotificationmethod']
def downgrade(tables):
op.execute(
(externalnotificationmethod.delete()
.where(externalnotificationmethod.c.name == op.inline_literal('flowdock')))
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('flowdock')))
)
op.execute(
(externalnotificationmethod.delete()
.where(externalnotificationmethod.c.name == op.inline_literal('hipchat')))
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('hipchat')))
)
op.execute(
(externalnotificationmethod.delete()
.where(externalnotificationmethod.c.name == op.inline_literal('slack')))
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('slack')))
)

View file

@ -11,23 +11,18 @@ revision = '4b7ef0c7bdb2'
down_revision = 'bcdde200a1b'
from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['notificationkind'],
def upgrade(tables):
op.bulk_insert(tables.notificationkind,
[
{'id':4, 'name':'maintenance'},
])
def downgrade():
notificationkind = schema.tables['notificationkind']
def downgrade(tables):
op.execute(
(notificationkind.delete()
.where(notificationkind.c.name == op.inline_literal('maintenance')))
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('maintenance')))
)

View file

@ -0,0 +1,28 @@
"""Add brute force prevention metadata to the user table.
Revision ID: 4fdb65816b8d
Revises: 43e943c0639f
Create Date: 2014-09-03 12:35:33.722435
"""
# revision identifiers, used by Alembic.
revision = '4fdb65816b8d'
down_revision = '43e943c0639f'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_invalid_login')
op.drop_column('user', 'invalid_login_attempts')
### end Alembic commands ###

View file

@ -11,14 +11,9 @@ revision = '5a07499ce53f'
down_revision = None
from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('loginservice',
sa.Column('id', sa.Integer(), nullable=False),
@ -27,7 +22,7 @@ def upgrade():
)
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
op.bulk_insert(schema.tables['loginservice'],
op.bulk_insert(tables.loginservice,
[
{'id':1, 'name':'github'},
{'id':2, 'name':'quayrobot'},
@ -66,7 +61,7 @@ def upgrade():
)
op.create_index('role_name', 'role', ['name'], unique=False)
op.bulk_insert(schema.tables['role'],
op.bulk_insert(tables.role,
[
{'id':1, 'name':'admin'},
{'id':2, 'name':'write'},
@ -80,7 +75,7 @@ def upgrade():
)
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
op.bulk_insert(schema.tables['logentrykind'],
op.bulk_insert(tables.logentrykind,
[
{'id':1, 'name':'account_change_plan'},
{'id':2, 'name':'account_change_cc'},
@ -136,7 +131,7 @@ def upgrade():
)
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.bulk_insert(schema.tables['notificationkind'],
op.bulk_insert(tables.notificationkind,
[
{'id':1, 'name':'password_required'},
{'id':2, 'name':'over_private_usage'},
@ -150,7 +145,7 @@ def upgrade():
)
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
op.bulk_insert(schema.tables['teamrole'],
op.bulk_insert(tables.teamrole,
[
{'id':1, 'name':'admin'},
{'id':2, 'name':'creator'},
@ -164,7 +159,7 @@ def upgrade():
)
op.create_index('visibility_name', 'visibility', ['name'], unique=False)
op.bulk_insert(schema.tables['visibility'],
op.bulk_insert(tables.visibility,
[
{'id':1, 'name':'public'},
{'id':2, 'name':'private'},
@ -194,7 +189,7 @@ def upgrade():
)
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
op.bulk_insert(schema.tables['buildtriggerservice'],
op.bulk_insert(tables.buildtriggerservice,
[
{'id':1, 'name':'github'},
])
@ -203,7 +198,7 @@ def upgrade():
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('service_ident', sa.String(length=255, collation='utf8_general_ci'), nullable=False),
sa.Column('service_ident', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
@ -375,7 +370,7 @@ def upgrade():
sa.Column('command', sa.Text(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('image_size', sa.BigInteger(), nullable=True),
sa.Column('ancestors', sa.String(length=60535, collation='latin1_swedish_ci'), nullable=True),
sa.Column('ancestors', sa.String(length=60535), nullable=True),
sa.Column('storage_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
@ -490,119 +485,34 @@ def upgrade():
### end Alembic commands ###
def downgrade():
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('repositorybuild_uuid', table_name='repositorybuild')
op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild')
op.drop_index('repositorybuild_resource_key', table_name='repositorybuild')
op.drop_index('repositorybuild_repository_id', table_name='repositorybuild')
op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild')
op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild')
op.drop_table('repositorybuild')
op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger')
op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger')
op.drop_table('repositorybuildtrigger')
op.drop_index('logentry_repository_id', table_name='logentry')
op.drop_index('logentry_performer_id', table_name='logentry')
op.drop_index('logentry_kind_id', table_name='logentry')
op.drop_index('logentry_datetime', table_name='logentry')
op.drop_index('logentry_account_id', table_name='logentry')
op.drop_index('logentry_access_token_id', table_name='logentry')
op.drop_table('logentry')
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
op.drop_index('repositorytag_repository_id', table_name='repositorytag')
op.drop_index('repositorytag_image_id', table_name='repositorytag')
op.drop_table('repositorytag')
op.drop_index('permissionprototype_role_id', table_name='permissionprototype')
op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype')
op.drop_index('permissionprototype_org_id', table_name='permissionprototype')
op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype')
op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype')
op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype')
op.drop_table('permissionprototype')
op.drop_index('image_storage_id', table_name='image')
op.drop_index('image_repository_id_docker_image_id', table_name='image')
op.drop_index('image_repository_id', table_name='image')
op.drop_index('image_ancestors', table_name='image')
op.drop_table('image')
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode')
op.drop_table('oauthauthorizationcode')
op.drop_index('webhook_repository_id', table_name='webhook')
op.drop_index('webhook_public_id', table_name='webhook')
op.drop_table('webhook')
op.drop_index('teammember_user_id_team_id', table_name='teammember')
op.drop_index('teammember_user_id', table_name='teammember')
op.drop_index('teammember_team_id', table_name='teammember')
op.drop_table('teammember')
op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken')
op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken')
op.drop_table('oauthaccesstoken')
op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission')
op.drop_index('repositorypermission_user_id', table_name='repositorypermission')
op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission')
op.drop_index('repositorypermission_team_id', table_name='repositorypermission')
op.drop_index('repositorypermission_role_id', table_name='repositorypermission')
op.drop_index('repositorypermission_repository_id', table_name='repositorypermission')
op.drop_table('repositorypermission')
op.drop_index('accesstoken_role_id', table_name='accesstoken')
op.drop_index('accesstoken_repository_id', table_name='accesstoken')
op.drop_index('accesstoken_code', table_name='accesstoken')
op.drop_table('accesstoken')
op.drop_index('repository_visibility_id', table_name='repository')
op.drop_index('repository_namespace_name', table_name='repository')
op.drop_table('repository')
op.drop_index('team_role_id', table_name='team')
op.drop_index('team_organization_id', table_name='team')
op.drop_index('team_name_organization_id', table_name='team')
op.drop_index('team_name', table_name='team')
op.drop_table('team')
op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation')
op.drop_index('emailconfirmation_code', table_name='emailconfirmation')
op.drop_table('emailconfirmation')
op.drop_index('notification_uuid', table_name='notification')
op.drop_index('notification_target_id', table_name='notification')
op.drop_index('notification_kind_id', table_name='notification')
op.drop_index('notification_created', table_name='notification')
op.drop_table('notification')
op.drop_index('oauthapplication_organization_id', table_name='oauthapplication')
op.drop_index('oauthapplication_client_id', table_name='oauthapplication')
op.drop_table('oauthapplication')
op.drop_index('federatedlogin_user_id', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin')
op.drop_index('federatedlogin_service_id', table_name='federatedlogin')
op.drop_table('federatedlogin')
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.drop_table('buildtriggerservice')
op.drop_index('user_username', table_name='user')
op.drop_index('user_stripe_id', table_name='user')
op.drop_index('user_robot', table_name='user')
op.drop_index('user_organization', table_name='user')
op.drop_index('user_email', table_name='user')
op.drop_table('user')
op.drop_index('visibility_name', table_name='visibility')
op.drop_table('visibility')
op.drop_index('teamrole_name', table_name='teamrole')
op.drop_table('teamrole')
op.drop_index('notificationkind_name', table_name='notificationkind')
op.drop_table('notificationkind')
op.drop_index('logentrykind_name', table_name='logentrykind')
op.drop_table('logentrykind')
op.drop_index('role_name', table_name='role')
op.drop_table('role')
op.drop_index('queueitem_queue_name', table_name='queueitem')
op.drop_index('queueitem_processing_expires', table_name='queueitem')
op.drop_index('queueitem_available_after', table_name='queueitem')
op.drop_index('queueitem_available', table_name='queueitem')
op.drop_table('queueitem')
op.drop_table('imagestorage')
op.drop_index('loginservice_name', table_name='loginservice')
op.drop_table('loginservice')
### end Alembic commands ###

View file

@ -13,24 +13,17 @@ down_revision = '47670cbeced'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
op.bulk_insert(schema.tables['imagestoragelocation'],
def upgrade(tables):
op.bulk_insert(tables.imagestoragelocation,
[
{'id':8, 'name':'s3_us_west_1'},
])
def downgrade():
schema = gen_sqlalchemy_metadata(all_models)
imagestoragelocation = schema.tables['imagestoragelocation']
def downgrade(tables):
op.execute(
(imagestoragelocation.delete()
.where(imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
(tables.imagestoragelocation.delete()
.where(tables.imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
)

View file

@ -11,14 +11,10 @@ revision = 'bcdde200a1b'
down_revision = '201d55b38649'
from alembic import op
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
from data.database import all_models
import sqlalchemy as sa
def upgrade():
schema = gen_sqlalchemy_metadata(all_models)
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragelocation',
sa.Column('id', sa.Integer(), nullable=False),
@ -27,7 +23,7 @@ def upgrade():
)
op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True)
op.bulk_insert(schema.tables['imagestoragelocation'],
op.bulk_insert(tables.imagestoragelocation,
[
{'id':1, 'name':'s3_us_east_1'},
{'id':2, 'name':'s3_eu_west_1'},
@ -52,12 +48,8 @@ def upgrade():
### end Alembic commands ###
def downgrade():
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('imagestorageplacement_storage_id_location_id', table_name='imagestorageplacement')
op.drop_index('imagestorageplacement_storage_id', table_name='imagestorageplacement')
op.drop_index('imagestorageplacement_location_id', table_name='imagestorageplacement')
op.drop_table('imagestorageplacement')
op.drop_index('imagestoragelocation_name', table_name='imagestoragelocation')
op.drop_table('imagestoragelocation')
### end Alembic commands ###

View file

@ -0,0 +1,35 @@
"""Remove the old webhooks table.
Revision ID: f42b0ea7a4d
Revises: 4fdb65816b8d
Create Date: 2014-09-03 13:43:23.391464
"""
# revision identifiers, used by Alembic.
revision = 'f42b0ea7a4d'
down_revision = '4fdb65816b8d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('webhook')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('webhook',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False),
sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('parameters', mysql.LONGTEXT(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'latin1',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###

View file

@ -1,12 +1,17 @@
import bcrypt
import logging
import datetime
import dateutil.parser
import json
from datetime import datetime, timedelta
from data.database import *
from util.validation import *
from util.names import format_robot_username
from util.backoff import exponential_backoff
EXPONENTIAL_BACKOFF_SCALE = timedelta(seconds=1)
logger = logging.getLogger(__name__)
@ -68,10 +73,15 @@ class TooManyUsersException(DataModelException):
pass
def is_create_user_allowed():
return get_active_user_count() < config.app_config['LICENSE_USER_LIMIT']
class TooManyLoginAttemptsException(Exception):
def __init__(self, message, retry_after):
super(TooManyLoginAttemptsException, self).__init__(message)
self.retry_after = retry_after
def is_create_user_allowed():
return True
def create_user(username, password, email):
""" Creates a regular user, if allowed. """
if not validate_password(password):
@ -181,6 +191,19 @@ def create_robot(robot_shortname, parent):
except Exception as ex:
raise DataModelException(ex.message)
def get_robot(robot_shortname, parent):
robot_username = format_robot_username(parent.username, robot_shortname)
robot = lookup_robot(robot_username)
if not robot:
msg = ('Could not find robot with username: %s' %
robot_username)
raise InvalidRobotException(msg)
service = LoginService.get(name='quayrobot')
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
return robot, login.service_ident
def lookup_robot(robot_username):
joined = User.select().join(FederatedLogin).join(LoginService)
@ -191,7 +214,6 @@ def lookup_robot(robot_username):
return found[0]
def verify_robot(robot_username, password):
joined = User.select().join(FederatedLogin).join(LoginService)
found = list(joined.where(FederatedLogin.service_ident == password,
@ -204,6 +226,25 @@ def verify_robot(robot_username, password):
return found[0]
def regenerate_robot_token(robot_shortname, parent):
robot_username = format_robot_username(parent.username, robot_shortname)
robot = lookup_robot(robot_username)
if not robot:
raise InvalidRobotException('Could not find robot with username: %s' %
robot_username)
password = random_string_generator(length=64)()
robot.email = password
service = LoginService.get(name='quayrobot')
login = FederatedLogin.get(FederatedLogin.user == robot, FederatedLogin.service == service)
login.service_ident = password
login.save()
robot.save()
return robot, password
def delete_robot(robot_username):
try:
@ -346,7 +387,8 @@ def set_team_org_permission(team, team_role_name, set_by_username):
return team
def create_federated_user(username, email, service_name, service_id, set_password_notification):
def create_federated_user(username, email, service_name, service_id,
set_password_notification, metadata={}):
if not is_create_user_allowed():
raise TooManyUsersException()
@ -356,7 +398,8 @@ def create_federated_user(username, email, service_name, service_id, set_passwor
service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=new_user, service=service,
service_ident=service_id)
service_ident=service_id,
metadata_json=json.dumps(metadata))
if set_password_notification:
create_notification('password_required', new_user)
@ -364,9 +407,10 @@ def create_federated_user(username, email, service_name, service_id, set_passwor
return new_user
def attach_federated_login(user, service_name, service_id):
def attach_federated_login(user, service_name, service_id, metadata={}):
service = LoginService.get(LoginService.name == service_name)
FederatedLogin.create(user=user, service=service, service_ident=service_id)
FederatedLogin.create(user=user, service=service, service_ident=service_id,
metadata_json=json.dumps(metadata))
return user
@ -385,7 +429,7 @@ def verify_federated_login(service_name, service_id):
def list_federated_logins(user):
selected = FederatedLogin.select(FederatedLogin.service_ident,
LoginService.name)
LoginService.name, FederatedLogin.metadata_json)
joined = selected.join(LoginService)
return joined.where(LoginService.name != 'quayrobot',
FederatedLogin.user == user)
@ -521,11 +565,30 @@ def verify_user(username_or_email, password):
except User.DoesNotExist:
return None
now = datetime.utcnow()
if fetched.invalid_login_attempts > 0:
can_retry_at = exponential_backoff(fetched.invalid_login_attempts, EXPONENTIAL_BACKOFF_SCALE,
fetched.last_invalid_login)
if can_retry_at > now:
retry_after = can_retry_at - now
raise TooManyLoginAttemptsException('Too many login attempts.', retry_after.total_seconds())
if (fetched.password_hash and
bcrypt.hashpw(password, fetched.password_hash) ==
fetched.password_hash):
if fetched.invalid_login_attempts > 0:
fetched.invalid_login_attempts = 0
fetched.save()
return fetched
fetched.invalid_login_attempts += 1
fetched.last_invalid_login = now
fetched.save()
# We weren't able to authorize the user
return None
@ -1007,7 +1070,8 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
.join(Repository)
.join(Visibility)
.switch(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER))
.join(RepositoryPermission, JOIN_LEFT_OUTER)
.where(ImageStorage.uploading == False))
query = (_filter_to_repos_for_user(query, username)
.where(Image.docker_image_id == docker_image_id))
@ -1687,19 +1751,20 @@ def create_notification(kind_name, target, metadata={}):
def create_unique_notification(kind_name, target, metadata={}):
with config.app_config['DB_TRANSACTION_FACTORY'](db):
if list_notifications(target, kind_name).count() == 0:
if list_notifications(target, kind_name, limit=1).count() == 0:
create_notification(kind_name, target, metadata)
def lookup_notification(user, uuid):
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True))
results = list(list_notifications(user, id_filter=uuid, include_dismissed=True, limit=1))
if not results:
return None
return results[0]
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False):
def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False,
page=None, limit=None):
Org = User.alias()
AdminTeam = Team.alias()
AdminTeamMember = TeamMember.alias()
@ -1737,6 +1802,11 @@ def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=F
.switch(Notification)
.where(Notification.uuid == id_filter))
if page:
query = query.paginate(page, limit)
elif limit:
query = query.limit(limit)
return query