Collapse all migrations prior to 2.0.0 into one.

This commit is contained in:
Jake Moshenko 2016-11-10 14:56:56 -05:00
parent 6de039dc97
commit b5834a8a66
105 changed files with 957 additions and 4692 deletions

View file

@ -806,7 +806,9 @@ class LogEntry(BaseModel):
class Messages(BaseModel):
content = TextField()
uuid = CharField(default=uuid_generator, index=True)
# TODO: This should be non-nullable and indexed
uuid = CharField(default=uuid_generator, max_length=36, null=True)
class RepositoryActionCount(BaseModel):
repository = ForeignKeyField(Repository)

View file

@ -1,26 +0,0 @@
"""Add take_ownership log entry kind
Revision ID: 0f17d94d11eb
Revises: a3ba52d02dec
Create Date: 2016-06-07 17:22:20.438873
"""
# revision identifiers, used by Alembic.
revision = '0f17d94d11eb'
down_revision = 'a3ba52d02dec'
from alembic import op
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'name':'take_ownership'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('take_ownership')))
)

View file

@ -1,29 +0,0 @@
"""Add uniqueness hash column for derived image storage
Revision ID: 1093d8b212bb
Revises: 0f17d94d11eb
Create Date: 2016-06-06 15:27:21.735669
"""
# revision identifiers, used by Alembic.
revision = '1093d8b212bb'
down_revision = '0f17d94d11eb'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('derivedstorageforimage_source_image_id_transformation_id', table_name='derivedstorageforimage')
op.add_column('derivedstorageforimage', sa.Column('uniqueness_hash', sa.String(length=255), nullable=True))
op.create_index('uniqueness_index', 'derivedstorageforimage', ['source_image_id', 'transformation_id', 'uniqueness_hash'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('uniqueness_index', table_name='derivedstorageforimage')
op.drop_column('derivedstorageforimage', 'uniqueness_hash')
op.create_index('derivedstorageforimage_source_image_id_transformation_id', 'derivedstorageforimage', ['source_image_id', 'transformation_id'], unique=True)
### end Alembic commands ###

View file

@ -1,24 +0,0 @@
"""Fix sequences in postgres
Revision ID: 10b999e8db1f
Revises: 22af01f81722
Create Date: 2015-11-16 14:00:05.383227
"""
# revision identifiers, used by Alembic.
revision = '10b999e8db1f'
down_revision = '1849ca8199fc'
from alembic import op
import sqlalchemy as sa
import uuid
from peewee import CharField, IntegrityError
from util.migrate.fixsequences import reset_enum_sequences
def upgrade(tables):
reset_enum_sequences()
def downgrade(tables):
pass

View file

@ -1,31 +0,0 @@
"""Remove the deprecated imagestorage columns.
Revision ID: 127905a52fdd
Revises: 2e0380215d01
Create Date: 2015-09-17 15:48:56.667823
"""
# revision identifiers, used by Alembic.
revision = '127905a52fdd'
down_revision = '2e0380215d01'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('imagestorage', 'comment')
op.drop_column('imagestorage', 'aggregate_size')
op.drop_column('imagestorage', 'command')
op.drop_column('imagestorage', 'created')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('imagestorage', sa.Column('created', sa.DateTime(), nullable=True))
op.add_column('imagestorage', sa.Column('command', sa.Text(), nullable=True))
op.add_column('imagestorage', sa.Column('aggregate_size', sa.BigInteger(), nullable=True))
op.add_column('imagestorage', sa.Column('comment', sa.Text(), nullable=True))
### end Alembic commands ###

View file

@ -1,24 +0,0 @@
"""Migrate registry namespaces to reference a user.
Revision ID: 13da56878560
Revises: 51d04d0e7e6f
Create Date: 2014-09-18 13:56:45.130455
"""
# revision identifiers, used by Alembic.
revision = '13da56878560'
down_revision = '51d04d0e7e6f'
from alembic import op
import sqlalchemy as sa
from data.database import Repository, User
def upgrade(tables):
# Add the namespace_user column, allowing it to be nullable
op.add_column('repository', sa.Column('namespace_user_id', sa.Integer(), sa.ForeignKey('user.id')))
def downgrade(tables):
op.drop_column('repository', 'namespace_user_id')

View file

@ -1,29 +0,0 @@
"""Add build queue item reference to the repositorybuild table
Revision ID: 14fe12ade3df
Revises: 5ad999136045
Create Date: 2015-02-12 16:11:57.814645
"""
# revision identifiers, used by Alembic.
revision = '14fe12ade3df'
down_revision = '5ad999136045'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuild', sa.Column('queue_item_id', sa.Integer(), nullable=True))
op.create_index('repositorybuild_queue_item_id', 'repositorybuild', ['queue_item_id'], unique=False)
op.create_foreign_key(op.f('fk_repositorybuild_queue_item_id_queueitem'), 'repositorybuild', 'queueitem', ['queue_item_id'], ['id'])
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('fk_repositorybuild_queue_item_id_queueitem'), 'repositorybuild', type_='foreignkey')
op.drop_index('repositorybuild_queue_item_id', table_name='repositorybuild')
op.drop_column('repositorybuild', 'queue_item_id')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add enabled column to the user system
Revision ID: 154f2befdfbe
Revises: 41f4587c84ae
Create Date: 2015-05-11 17:02:43.507847
"""
# revision identifiers, used by Alembic.
revision = '154f2befdfbe'
down_revision = '41f4587c84ae'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('enabled', sa.Boolean(), nullable=False, default=True, server_default="1"))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'enabled')
### end Alembic commands ###

View file

@ -1,34 +0,0 @@
"""add metadata field to external logins
Revision ID: 1594a74a74ca
Revises: f42b0ea7a4d
Create Date: 2014-09-04 18:17:35.205698
"""
# revision identifiers, used by Alembic.
revision = '1594a74a74ca'
down_revision = 'f42b0ea7a4d'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('federatedlogin', sa.Column('metadata_json', sa.Text(), nullable=False))
### end Alembic commands ###
op.bulk_insert(tables.loginservice,
[
{'name':'google'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('federatedlogin', 'metadata_json')
### end Alembic commands ###
op.execute(
(tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('google')))
)

View file

@ -1,22 +0,0 @@
"""add uuid field to user
Revision ID: 17f11e265e13
Revises: 313d297811c4
Create Date: 2014-11-11 14:32:54.866188
"""
# revision identifiers, used by Alembic.
revision = '17f11e265e13'
down_revision = '313d297811c4'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.add_column('user', sa.Column('uuid', sa.String(length=36), nullable=True))
def downgrade(tables):
op.drop_column('user', 'uuid')

View file

@ -1,37 +0,0 @@
"""Remove DerivedImageStorage table
Revision ID: 1849ca8199fc
Revises: 5a2445ffe21b
Create Date: 2015-11-25 11:45:32.928533
"""
# revision identifiers, used by Alembic.
revision = '1849ca8199fc'
down_revision = '5a2445ffe21b'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('derivedimagestorage')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('derivedimagestorage',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False),
sa.Column('source_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True),
sa.Column('derivative_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.Column('transformation_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False),
sa.ForeignKeyConstraint(['derivative_id'], [u'imagestorage.id'], name=u'fk_derivedimagestorage_derivative_id_imagestorage'),
sa.ForeignKeyConstraint(['source_id'], [u'imagestorage.id'], name=u'fk_derivedimagestorage_source_id_imagestorage'),
sa.ForeignKeyConstraint(['transformation_id'], [u'imagestoragetransformation.id'], name=u'fk_dis_transformation_id_ist'),
sa.PrimaryKeyConstraint('id'),
mysql_default_charset=u'latin1',
mysql_engine=u'InnoDB'
)
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add indices for security worker queries.
Revision ID: 1b2bb93ceb82
Revises: 22af01f81722
Create Date: 2015-11-18 13:27:41.161898
"""
# revision identifiers, used by Alembic.
revision = '1b2bb93ceb82'
down_revision = '22af01f81722'
from alembic import op
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('image_security_indexed', 'image', ['security_indexed'], unique=False)
op.create_index('image_security_indexed_engine', 'image', ['security_indexed_engine'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('image_security_indexed_engine', table_name='image')
op.drop_index('image_security_indexed', table_name='image')
### end Alembic commands ###

View file

@ -1,55 +0,0 @@
"""Quay releases
Revision ID: 1c0f6ede8992
Revises: 545794454f49
Create Date: 2015-09-15 15:46:09.784607
"""
# revision identifiers, used by Alembic.
revision = '1c0f6ede8992'
down_revision = '545794454f49'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('quayregion',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayregion'))
)
op.create_index('quayregion_name', 'quayregion', ['name'], unique=True)
op.create_table('quayservice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayservice'))
)
op.create_index('quayservice_name', 'quayservice', ['name'], unique=True)
op.create_table('quayrelease',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('version', sa.String(length=255), nullable=False),
sa.Column('region_id', sa.Integer(), nullable=False),
sa.Column('reverted', sa.Boolean(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['region_id'], ['quayregion.id'], name=op.f('fk_quayrelease_region_id_quayregion')),
sa.ForeignKeyConstraint(['service_id'], ['quayservice.id'], name=op.f('fk_quayrelease_service_id_quayservice')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayrelease'))
)
op.create_index('quayrelease_created', 'quayrelease', ['created'], unique=False)
op.create_index('quayrelease_region_id', 'quayrelease', ['region_id'], unique=False)
op.create_index('quayrelease_service_id', 'quayrelease', ['service_id'], unique=False)
op.create_index('quayrelease_service_id_region_id_created', 'quayrelease', ['service_id', 'region_id', 'created'], unique=False)
op.create_index('quayrelease_service_id_version_region_id', 'quayrelease', ['service_id', 'version', 'region_id'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('quayrelease')
op.drop_table('quayservice')
op.drop_table('quayregion')
### end Alembic commands ###

View file

@ -1,29 +0,0 @@
"""Add revert_tag log entry kind
Revision ID: 1c3decf6b9c4
Revises: 4ce2169efd3b
Create Date: 2015-04-16 17:14:11.154856
"""
# revision identifiers, used by Alembic.
revision = '1c3decf6b9c4'
down_revision = '4ce2169efd3b'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'name':'revert_tag'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('revert_tag')))
)

View file

@ -1,22 +0,0 @@
"""backfill user uuids
Revision ID: 1c5b738283a5
Revises: 2fb36d4be80d
Create Date: 2014-11-20 18:22:03.418215
"""
# revision identifiers, used by Alembic.
revision = '1c5b738283a5'
down_revision = '2fb36d4be80d'
from alembic import op
import sqlalchemy as sa
from util.migrate.backfill_user_uuids import backfill_user_uuids
def upgrade(tables):
backfill_user_uuids()
def downgrade(tables):
pass

View file

@ -1,36 +0,0 @@
"""Actually remove the column access_token_id
Revision ID: 1d2d86d09fcd
Revises: 14fe12ade3df
Create Date: 2015-02-12 16:27:30.260797
"""
# revision identifiers, used by Alembic.
revision = '1d2d86d09fcd'
down_revision = '14fe12ade3df'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.exc import InternalError
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
try:
op.drop_constraint(u'fk_logentry_access_token_id_accesstoken', 'logentry', type_='foreignkey')
op.drop_index('logentry_access_token_id', table_name='logentry')
op.drop_column('logentry', 'access_token_id')
except InternalError:
pass
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
try:
op.add_column('logentry', sa.Column('access_token_id', sa.Integer(), nullable=True))
op.create_foreign_key(u'fk_logentry_access_token_id_accesstoken', 'logentry', 'accesstoken', ['access_token_id'], ['id'])
op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False)
except InternalError:
pass
### end Alembic commands ###

View file

@ -1,25 +0,0 @@
"""Add gitlab trigger type
Revision ID: 1f116e06b68
Revises: 313179799c8b
Create Date: 2015-05-03 10:45:06.257913
"""
# revision identifiers, used by Alembic.
revision = '1f116e06b68'
down_revision = '313179799c8b'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.buildtriggerservice, [{'name': 'gitlab'}])
def downgrade(tables):
op.execute(
tables.buildtriggerservice.delete()
.where(tables.buildtriggerservice.c.name == op.inline_literal('gitlab'))
)

View file

@ -1,53 +0,0 @@
"""Remove fields from image table that were migrated to imagestorage.
Revision ID: 201d55b38649
Revises: 5a07499ce53f
Create Date: 2014-06-12 19:48:53.861115
"""
# revision identifiers, used by Alembic.
revision = '201d55b38649'
down_revision = '5a07499ce53f'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True)
op.drop_column('image', 'comment')
op.drop_column('image', 'checksum')
op.drop_column('image', 'image_size')
op.drop_column('image', 'command')
op.drop_column('image', 'created')
op.drop_index('logentrykind_name', table_name='logentrykind')
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=True)
op.drop_index('notificationkind_name', table_name='notificationkind')
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=True)
op.drop_index('role_name', table_name='role')
op.create_index('role_name', 'role', ['name'], unique=True)
op.drop_index('visibility_name', table_name='visibility')
op.create_index('visibility_name', 'visibility', ['name'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('visibility_name', table_name='visibility')
op.create_index('visibility_name', 'visibility', ['name'], unique=False)
op.drop_index('role_name', table_name='role')
op.create_index('role_name', 'role', ['name'], unique=False)
op.drop_index('notificationkind_name', table_name='notificationkind')
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.drop_index('logentrykind_name', table_name='logentrykind')
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True))
op.add_column('image', sa.Column('command', sa.Text(), nullable=True))
op.add_column('image', sa.Column('image_size', sa.BigInteger(), nullable=True))
op.add_column('image', sa.Column('checksum', sa.String(length=255), nullable=True))
op.add_column('image', sa.Column('comment', sa.Text(), nullable=True))
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
### end Alembic commands ###

View file

@ -1,28 +0,0 @@
"""Add log entry kind for verbs
Revision ID: 204abf14783d
Revises: 2430f55c41d5
Create Date: 2014-10-29 15:38:06.100915
"""
# revision identifiers, used by Alembic.
revision = '204abf14783d'
down_revision = '2430f55c41d5'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'name':'repo_verb'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('repo_verb')))
)

View file

@ -1,39 +0,0 @@
"""add stars
Revision ID: 2088f2b81010
Revises: 1c5b738283a5
Create Date: 2014-12-02 17:45:00.707498
"""
# revision identifiers, used by Alembic.
revision = '2088f2b81010'
down_revision = '707d5191eda'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.create_table('star',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_star_repository_id_repository')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_star_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_star'))
)
with op.batch_alter_table('star', schema=None) as batch_op:
batch_op.create_index('star_repository_id', ['repository_id'], unique=False)
batch_op.create_index('star_user_id', ['user_id'], unique=False)
batch_op.create_index('star_user_id_repository_id', ['user_id', 'repository_id'], unique=True)
def downgrade(tables):
op.drop_constraint('fk_star_repository_id_repository', 'star', type_='foreignkey')
op.drop_constraint('fk_star_user_id_user', 'star', type_='foreignkey')
with op.batch_alter_table('star', schema=None) as batch_op:
batch_op.drop_index('star_user_id_repository_id')
batch_op.drop_index('star_user_id')
batch_op.drop_index('star_repository_id')
op.drop_table('star')

View file

@ -1,26 +0,0 @@
"""add private key to build triggers
Revision ID: 214350b6a8b1
Revises: 2b2529fd23ff
Create Date: 2015-03-19 14:23:52.604505
"""
# revision identifiers, used by Alembic.
revision = '214350b6a8b1'
down_revision = '67eb43c778b'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuildtrigger', sa.Column('private_key', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorybuildtrigger', 'private_key')
### end Alembic commands ###

View file

@ -1,24 +0,0 @@
"""mysql max index lengths
Revision ID: 228d1af6af1c
Revises: 5b84373e5db
Create Date: 2015-01-06 14:35:24.651424
"""
# revision identifiers, used by Alembic.
revision = '228d1af6af1c'
down_revision = '5b84373e5db'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.drop_index('queueitem_queue_name', table_name='queueitem')
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767)
op.drop_index('image_ancestors', table_name='image')
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767)
def downgrade(tables):
pass

View file

@ -1,21 +0,0 @@
"""Backfill parent id and v1 checksums
Revision ID: 22af01f81722
Revises: 2827d36939e4
Create Date: 2015-11-05 16:24:43.679323
"""
# revision identifiers, used by Alembic.
revision = '22af01f81722'
down_revision = '2827d36939e4'
from util.migrate.backfill_v1_checksums import backfill_checksums
from util.migrate.backfill_parent_id import backfill_parent_id
def upgrade(tables):
backfill_parent_id()
backfill_checksums()
def downgrade(tables):
pass

View file

@ -1,40 +0,0 @@
"""Add the torrentinfo table and torrent fields on blobuploads.
Revision ID: 23ca04d0bc8e
Revises: 471caec2cb66
Create Date: 2016-01-06 13:25:24.597037
"""
# revision identifiers, used by Alembic.
revision = '23ca04d0bc8e'
down_revision = '471caec2cb66'
from alembic import op
import sqlalchemy as sa
from util.migrate import UTF8LongText
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('torrentinfo',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('storage_id', sa.Integer(), nullable=False),
sa.Column('piece_length', sa.Integer(), nullable=False),
sa.Column('pieces', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_torrentinfo_storage_id_imagestorage')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_torrentinfo'))
)
op.create_index('torrentinfo_storage_id', 'torrentinfo', ['storage_id'], unique=False)
op.create_index('torrentinfo_storage_id_piece_length', 'torrentinfo', ['storage_id', 'piece_length'], unique=True)
op.add_column(u'blobupload', sa.Column('piece_hashes', UTF8LongText(), nullable=True))
op.add_column(u'blobupload', sa.Column('piece_sha_state', UTF8LongText(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'blobupload', 'piece_sha_state')
op.drop_column(u'blobupload', 'piece_hashes')
op.drop_table('torrentinfo')
### end Alembic commands ###

View file

@ -1,22 +0,0 @@
"""Calculate uncompressed sizes for all images
Revision ID: 2430f55c41d5
Revises: 3b4d3a4461dc
Create Date: 2014-10-07 14:50:04.660315
"""
# revision identifiers, used by Alembic.
revision = '2430f55c41d5'
down_revision = '3b4d3a4461dc'
from alembic import op
import sqlalchemy as sa
from util.migrate.uncompressedsize import backfill_sizes_from_data
def upgrade(tables):
backfill_sizes_from_data()
def downgrade(tables):
pass

View file

@ -1,26 +0,0 @@
"""Add index to retries_remaining
Revision ID: 246df01a6d51
Revises: 5232a5610a0a
Create Date: 2015-08-04 17:59:42.262877
"""
# revision identifiers, used by Alembic.
revision = '246df01a6d51'
down_revision = '5232a5610a0a'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('queueitem_retries_remaining', 'queueitem', ['retries_remaining'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('queueitem_retries_remaining', table_name='queueitem')
### end Alembic commands ###

View file

@ -1,30 +0,0 @@
"""Separate v1 and v2 checksums.
Revision ID: 2827d36939e4
Revises: 73669db7e12
Create Date: 2015-11-04 16:29:48.905775
"""
# revision identifiers, used by Alembic.
revision = '2827d36939e4'
down_revision = '5cdc2d819c5'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('image', sa.Column('v1_checksum', sa.String(length=255), nullable=True))
op.add_column('imagestorage', sa.Column('content_checksum', sa.String(length=255), nullable=True))
op.create_index('imagestorage_content_checksum', 'imagestorage', ['content_checksum'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('imagestorage_content_checksum', table_name='imagestorage')
op.drop_column('imagestorage', 'content_checksum')
op.drop_column('image', 'v1_checksum')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add aggregate size column
Revision ID: 2b2529fd23ff
Revises: 2088f2b81010
Create Date: 2015-03-16 17:36:53.321458
"""
# revision identifiers, used by Alembic.
revision = '2b2529fd23ff'
down_revision = '2088f2b81010'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('imagestorage', sa.Column('aggregate_size', sa.BigInteger(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('imagestorage', 'aggregate_size')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add a unique index to prevent deadlocks with tags.
Revision ID: 2b4dc0818a5e
Revises: 2b2529fd23ff
Create Date: 2015-03-20 23:37:10.558179
"""
# revision identifiers, used by Alembic.
revision = '2b4dc0818a5e'
down_revision = '2b2529fd23ff'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('repositorytag_repository_id_name_lifetime_end_ts', 'repositorytag', ['repository_id', 'name', 'lifetime_end_ts'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('repositorytag_repository_id_name_lifetime_end_ts', table_name='repositorytag')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add keystone login service
Revision ID: 2bf8af5bad95
Revises: 154f2befdfbe
Create Date: 2015-06-29 21:19:13.053165
"""
# revision identifiers, used by Alembic.
revision = '2bf8af5bad95'
down_revision = '154f2befdfbe'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.loginservice, [{'name': 'keystone'}])
def downgrade(tables):
op.execute(
tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('keystone'))
)

View file

@ -1,24 +0,0 @@
"""Backfill image fields from image storages
Revision ID: 2e0380215d01
Revises: 3ff4fbc94644
Create Date: 2015-09-15 16:57:42.850246
"""
# revision identifiers, used by Alembic.
revision = '2e0380215d01'
down_revision = '3ff4fbc94644'
from alembic import op
import sqlalchemy as sa
from util.migrate.backfill_image_fields import backfill_image_fields
from util.migrate.backfill_v1_metadata import backfill_v1_metadata
def upgrade(tables):
backfill_image_fields()
backfill_v1_metadata()
def downgrade(tables):
pass

View file

@ -1,35 +0,0 @@
"""Add missing tag manifest table
Revision ID: 2e09ad97b06c
Revises: 2bf8af5bad95
Create Date: 2015-07-22 16:10:42.549566
"""
# revision identifiers, used by Alembic.
revision = '2e09ad97b06c'
down_revision = '2bf8af5bad95'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('tagmanifest',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.Column('digest', sa.String(length=255), nullable=False),
sa.Column('json_data', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['tag_id'], ['repositorytag.id'], name=op.f('fk_tagmanifest_tag_id_repositorytag')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifest'))
)
op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=True)
op.create_index('tagmanifest_tag_id', 'tagmanifest', ['tag_id'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tagmanifest')
### end Alembic commands ###

View file

@ -1,30 +0,0 @@
"""remove the namespace column.
Revision ID: 2430f55c41d5
Revises: 17f11e265e13
Create Date: 2014-09-30 17:31:33.308490
"""
# revision identifiers, used by Alembic.
revision = '2fb36d4be80d'
down_revision = '17f11e265e13'
from alembic import op
import sqlalchemy as sa
import re
from app import app
NAMESPACE_EXTRACTOR = re.compile(r'^([a-z]+/)([a-z0-9_]+)(/.*$)')
def upgrade(tables):
op.create_index('repository_namespace_user_id', 'repository', ['namespace_user_id'], unique=False)
op.drop_column('repository', 'namespace')
def downgrade(tables):
op.add_column('repository', sa.Column('namespace', sa.String(length=255)))
op.drop_index('repository_namespace_user_id', table_name='repository')

View file

@ -1,36 +0,0 @@
"""Add RepositoryActionCount table
Revision ID: 30c044b75632
Revises: 2b4dc0818a5e
Create Date: 2015-04-13 13:21:18.159602
"""
# revision identifiers, used by Alembic.
revision = '30c044b75632'
down_revision = '2b4dc0818a5e'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('repositoryactioncount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('date', sa.Date(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositoryactioncount_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositoryactioncount'))
)
op.create_index('repositoryactioncount_date', 'repositoryactioncount', ['date'], unique=False)
op.create_index('repositoryactioncount_repository_id', 'repositoryactioncount', ['repository_id'], unique=False)
op.create_index('repositoryactioncount_repository_id_date', 'repositoryactioncount', ['repository_id', 'date'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('repositoryactioncount')
### end Alembic commands ###

View file

@ -1,29 +0,0 @@
"""make resource_key nullable
Revision ID: 31288f79df53
Revises: 214350b6a8b1
Create Date: 2015-03-23 14:34:04.816295
"""
# revision identifiers, used by Alembic.
revision = '31288f79df53'
down_revision = '214350b6a8b1'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.alter_column('repositorybuild', 'resource_key',
existing_type=sa.String(length=255),
nullable=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.alter_column('repositorybuild', 'resource_key',
existing_type=sa.String(length=255),
nullable=False)
### end Alembic commands ###

View file

@ -1,25 +0,0 @@
"""Add bitbucket build trigger type
Revision ID: 313179799c8b
Revises: 37c47a7af956
Create Date: 2015-04-30 15:52:33.388825
"""
# revision identifiers, used by Alembic.
revision = '313179799c8b'
down_revision = '37c47a7af956'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.buildtriggerservice, [{'name': 'bitbucket'}])
def downgrade(tables):
op.execute(
tables.buildtriggerservice.delete()
.where(tables.buildtriggerservice.c.name == op.inline_literal('bitbucket'))
)

View file

@ -1,25 +0,0 @@
"""Add an index to the docker_image_id field
Revision ID: 313d297811c4
Revises: 204abf14783d
Create Date: 2014-11-13 12:40:57.414787
"""
# revision identifiers, used by Alembic.
revision = '313d297811c4'
down_revision = '204abf14783d'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('image_docker_image_id', 'image', ['docker_image_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('image_docker_image_id', table_name='image')
### end Alembic commands ###

View file

@ -1,140 +0,0 @@
"""Prepare the database for the new notifications system
Revision ID: 325a4d7c79d9
Revises: 4b7ef0c7bdb2
Create Date: 2014-07-31 13:08:18.667393
"""
# revision identifiers, used by Alembic.
revision = '325a4d7c79d9'
down_revision = '4b7ef0c7bdb2'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('externalnotificationmethod',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True)
op.bulk_insert(tables.externalnotificationmethod,
[
{'name':'quay_notification'},
{'name':'email'},
{'name':'webhook'},
])
op.create_table('externalnotificationevent',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True)
op.bulk_insert(tables.externalnotificationevent,
[
{'name':'repo_push'},
{'name':'build_queued'},
{'name':'build_start'},
{'name':'build_success'},
{'name':'build_failure'},
])
op.create_table('repositoryauthorizedemail',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('confirmed', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repositoryauthorizedemail_code', 'repositoryauthorizedemail', ['code'], unique=True)
op.create_index('repositoryauthorizedemail_email_repository_id', 'repositoryauthorizedemail', ['email', 'repository_id'], unique=True)
op.create_index('repositoryauthorizedemail_repository_id', 'repositoryauthorizedemail', ['repository_id'], unique=False)
op.create_table('repositorynotification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('method_id', sa.Integer(), nullable=False),
sa.Column('config_json', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['externalnotificationevent.id'], ),
sa.ForeignKeyConstraint(['method_id'], ['externalnotificationmethod.id'], ),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repositorynotification_event_id', 'repositorynotification', ['event_id'], unique=False)
op.create_index('repositorynotification_method_id', 'repositorynotification', ['method_id'], unique=False)
op.create_index('repositorynotification_repository_id', 'repositorynotification', ['repository_id'], unique=False)
op.create_index('repositorynotification_uuid', 'repositorynotification', ['uuid'], unique=False)
op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False))
# Manually add the new notificationkind types
op.bulk_insert(tables.notificationkind,
[
{'name':'repo_push'},
{'name':'build_queued'},
{'name':'build_start'},
{'name':'build_success'},
{'name':'build_failure'},
])
# Manually add the new logentrykind types
op.bulk_insert(tables.logentrykind,
[
{'name':'add_repo_notification'},
{'name':'delete_repo_notification'},
])
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'notification', 'dismissed')
op.drop_table('repositorynotification')
op.drop_table('repositoryauthorizedemail')
op.drop_table('externalnotificationevent')
op.drop_table('externalnotificationmethod')
# Manually remove the notificationkind and logentrykind types
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('repo_push')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_queued')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_start')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_success')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('build_failure')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('add_repo_notification')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('delete_repo_notification')))
)
### end Alembic commands ###

View file

@ -1,43 +0,0 @@
"""Backport v2 db changes.
Revision ID: 33bd39ef5ed6
Revises: 127905a52fdd
Create Date: 2015-10-23 12:34:22.776542
"""
# revision identifiers, used by Alembic.
revision = '33bd39ef5ed6'
down_revision = '127905a52fdd'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('blobupload',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('byte_count', sa.Integer(), nullable=False),
sa.Column('sha_state', sa.Text(), nullable=True),
sa.Column('location_id', sa.Integer(), nullable=False),
sa.Column('storage_metadata', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_blobupload_location_id_imagestoragelocation')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_blobupload_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobupload'))
)
op.create_index('blobupload_location_id', 'blobupload', ['location_id'], unique=False)
op.create_index('blobupload_repository_id', 'blobupload', ['repository_id'], unique=False)
op.create_index('blobupload_repository_id_uuid', 'blobupload', ['repository_id', 'uuid'], unique=True)
op.create_index('blobupload_uuid', 'blobupload', ['uuid'], unique=True)
op.add_column(u'imagestorage', sa.Column('cas_path', sa.Boolean(), nullable=False, server_default="0"))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'imagestorage', 'cas_path')
op.drop_table('blobupload')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add support for build log migration.
Revision ID: 34fd69f63809
Revises: 4a0c94399f38
Create Date: 2014-09-12 11:50:09.217777
"""
# revision identifiers, used by Alembic.
revision = '34fd69f63809'
down_revision = '4a0c94399f38'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuild', sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorybuild', 'logs_archived')
### end Alembic commands ###

View file

@ -1,32 +0,0 @@
"""Switch manifest text to a longtext.
Revision ID: 35f538da62
Revises: 33bd39ef5ed6
Create Date: 2015-10-23 15:31:27.353995
"""
# revision identifiers, used by Alembic.
revision = '35f538da62'
down_revision = '33bd39ef5ed6'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.types import TypeDecorator, Text
from sqlalchemy.dialects.mysql import LONGTEXT
from util.migrate import UTF8LongText
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'tagmanifest', 'json_data')
op.add_column(u'tagmanifest', sa.Column('json_data', UTF8LongText(), nullable=False))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column(u'tagmanifest', 'json_data')
op.add_column(u'tagmanifest', sa.Column('json_data', sa.Text(), nullable=False))
### end Alembic commands ###

View file

@ -1,25 +0,0 @@
"""add custom-git trigger type to database
Revision ID: 37c47a7af956
Revises: 3fee6f979c2a
Create Date: 2015-04-24 14:50:26.275516
"""
# revision identifiers, used by Alembic.
revision = '37c47a7af956'
down_revision = '3fee6f979c2a'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.buildtriggerservice, [{'name': 'custom-git'}])
def downgrade(tables):
op.execute(
tables.buildtriggerservice.delete()
.where(tables.buildtriggerservice.c.name == op.inline_literal('custom-git'))
)

View file

@ -1,26 +0,0 @@
"""Add support for Dex login
Revision ID: 3a3bb77e17d5
Revises: 9512773a4a2
Create Date: 2015-09-04 15:57:38.007822
"""
# revision identifiers, used by Alembic.
revision = '3a3bb77e17d5'
down_revision = '9512773a4a2'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.loginservice, [{'name': 'dex'}])
def downgrade(tables):
op.execute(
tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('dex'))
)

View file

@ -1,49 +0,0 @@
"""Add support for squashed images
Revision ID: 3b4d3a4461dc
Revises: b1d41e2071b
Create Date: 2014-10-07 14:49:13.105746
"""
# revision identifiers, used by Alembic.
revision = '3b4d3a4461dc'
down_revision = 'b1d41e2071b'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragetransformation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragetransformation'))
)
op.create_index('imagestoragetransformation_name', 'imagestoragetransformation', ['name'], unique=True)
op.bulk_insert(tables.imagestoragetransformation,
[
{'name':'squash'},
])
op.create_table('derivedimagestorage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('source_id', sa.Integer(), nullable=True),
sa.Column('derivative_id', sa.Integer(), nullable=False),
sa.Column('transformation_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedimagestorage_derivative_id_imagestorage')),
sa.ForeignKeyConstraint(['source_id'], ['imagestorage.id'], name=op.f('fk_derivedimagestorage_source_id_imagestorage')),
sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_dis_transformation_id_ist')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedimagestorage'))
)
op.create_index('derivedimagestorage_derivative_id', 'derivedimagestorage', ['derivative_id'], unique=False)
op.create_index('derivedimagestorage_source_id', 'derivedimagestorage', ['source_id'], unique=False)
op.create_index('derivedimagestorage_source_id_transformation_id', 'derivedimagestorage', ['source_id', 'transformation_id'], unique=True)
op.create_index('derivedimagestorage_transformation_id', 'derivedimagestorage', ['transformation_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('derivedimagestorage')
op.drop_table('imagestoragetransformation')
### end Alembic commands ###

View file

@ -1,44 +0,0 @@
"""Add access token kinds type
Revision ID: 3e2d38b52a75
Revises: 1d2d86d09fcd
Create Date: 2015-02-17 12:03:26.422485
"""
# revision identifiers, used by Alembic.
revision = '3e2d38b52a75'
down_revision = '1d2d86d09fcd'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('accesstokenkind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstokenkind'))
)
op.create_index('accesstokenkind_name', 'accesstokenkind', ['name'], unique=True)
op.add_column(u'accesstoken', sa.Column('kind_id', sa.Integer(), nullable=True))
op.create_index('accesstoken_kind_id', 'accesstoken', ['kind_id'], unique=False)
op.create_foreign_key(op.f('fk_accesstoken_kind_id_accesstokenkind'), 'accesstoken', 'accesstokenkind', ['kind_id'], ['id'])
### end Alembic commands ###
op.bulk_insert(tables.accesstokenkind,
[
{'name':'build-worker'},
{'name':'pushpull-token'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(op.f('fk_accesstoken_kind_id_accesstokenkind'), 'accesstoken', type_='foreignkey')
op.drop_index('accesstoken_kind_id', table_name='accesstoken')
op.drop_column(u'accesstoken', 'kind_id')
op.drop_index('accesstokenkind_name', table_name='accesstokenkind')
op.drop_table('accesstokenkind')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Backfill the namespace_user fields.
Revision ID: 3f4fe1194671
Revises: 6f2ecf5afcf
Create Date: 2014-09-24 14:29:45.192179
"""
# revision identifiers, used by Alembic.
revision = '3f4fe1194671'
down_revision = '6f2ecf5afcf'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
conn = op.get_bind()
user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
conn.execute('update repository set namespace_user_id = (select id from {0} where {0}.username = repository.namespace) where namespace_user_id is NULL'.format(user_table_name_escaped))
op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True)
def downgrade(tables):
op.drop_constraint('fk_repository_namespace_user_id_user', table_name='repository', type_='foreignkey')
op.drop_index('repository_namespace_user_id_name', table_name='repository')

View file

@ -1,29 +0,0 @@
"""make auth_token nullable
Revision ID: 3fee6f979c2a
Revises: 31288f79df53
Create Date: 2015-03-27 11:11:24.046996
"""
# revision identifiers, used by Alembic.
revision = '3fee6f979c2a'
down_revision = '31288f79df53'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.alter_column('repositorybuildtrigger', 'auth_token',
existing_type=sa.String(length=255),
nullable=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.alter_column('repositorybuildtrigger', 'auth_token',
existing_type=sa.String(length=255),
nullable=False)
### end Alembic commands ###

View file

@ -1,28 +0,0 @@
"""Migrate GitHub triggers to use deploy keys
Revision ID: 3ff4fbc94644
Revises: 4d5f6716df0
Create Date: 2015-09-16 17:50:22.034146
"""
# revision identifiers, used by Alembic.
revision = '3ff4fbc94644'
down_revision = '4d5f6716df0'
from alembic import op
import sqlalchemy as sa
from util.migrate.migrategithubdeploykeys import backfill_github_deploykeys
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
backfill_github_deploykeys()
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###

View file

@ -1,28 +0,0 @@
"""Add new blobupload columns
Revision ID: 403d02fea323
Revises: 10b999e8db1f
Create Date: 2015-11-30 14:25:46.822730
"""
# revision identifiers, used by Alembic.
revision = '403d02fea323'
down_revision = '10b999e8db1f'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('blobupload', sa.Column('chunk_count', sa.Integer(), server_default="0", nullable=False))
op.add_column('blobupload', sa.Column('uncompressed_byte_count', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('blobupload', 'uncompressed_byte_count')
op.drop_column('blobupload', 'chunk_count')
### end Alembic commands ###

View file

@ -1,28 +0,0 @@
"""Add JWT Authentication login service
Revision ID: 41f4587c84ae
Revises: 1f116e06b68
Create Date: 2015-06-02 16:13:02.636590
"""
# revision identifiers, used by Alembic.
revision = '41f4587c84ae'
down_revision = '1f116e06b68'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.loginservice,
[
{'name':'jwtauthn'},
])
def downgrade(tables):
op.execute(
(tables.loginservice.delete()
.where(tables.loginservice.c.name == op.inline_literal('jwtauthn')))
)

View file

@ -1,24 +0,0 @@
"""Migrate BitBucket services to webhooks
Revision ID: 437ee6269a9d
Revises: 154f2befdfbe
Create Date: 2015-07-21 14:03:44.964200
"""
from util.migrate.migratebitbucketservices import run_bitbucket_migration
# revision identifiers, used by Alembic.
revision = '437ee6269a9d'
down_revision = '2e09ad97b06c'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
run_bitbucket_migration()
def downgrade(tables):
pass

View file

@ -1,28 +0,0 @@
"""add log kind for regenerating robot tokens
Revision ID: 43e943c0639f
Revises: 82297d834ad
Create Date: 2014-08-25 17:14:42.784518
"""
# revision identifiers, used by Alembic.
revision = '43e943c0639f'
down_revision = '82297d834ad'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'name':'regenerate_robot_token'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('regenerate_robot_token')))
)

View file

@ -1,27 +0,0 @@
"""Add invoice email address to user
Revision ID: 471caec2cb66
Revises: 88e0f440a2f
Create Date: 2015-12-28 13:57:17.761334
"""
# revision identifiers, used by Alembic.
revision = '471caec2cb66'
down_revision = '88e0f440a2f'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('invoice_email_address', sa.String(length=255), nullable=True))
op.create_index('user_invoice_email_address', 'user', ['invoice_email_address'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'invoice_email_address')
### end Alembic commands ###

View file

@ -1,31 +0,0 @@
"""Migrate existing webhooks to notifications.
Revision ID: 47670cbeced
Revises: 325a4d7c79d9
Create Date: 2014-07-31 13:49:38.332807
Hand Edited By Joseph Schorr
"""
# revision identifiers, used by Alembic.
revision = '47670cbeced'
down_revision = '325a4d7c79d9'
from alembic import op, context
import sqlalchemy as sa
def get_id(query):
conn = op.get_bind()
return list(conn.execute(query, ()).fetchall())[0][0]
def upgrade(tables):
conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id))
def downgrade(tables):
conn = op.get_bind()
event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1')
method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1')
conn.execute('Insert Into webhook (public_id, repository_id, parameters) Select uuid, repository_id, config_json FROM repositorynotification Where event_id=%s And method_id=%s' % (event_id, method_id))

View file

@ -1,26 +0,0 @@
"""Add title field to notification
Revision ID: 499f6f08de3
Revises: 246df01a6d51
Create Date: 2015-08-21 14:18:07.287743
"""
# revision identifiers, used by Alembic.
revision = '499f6f08de3'
down_revision = '246df01a6d51'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorynotification', sa.Column('title', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorynotification', 'title')
### end Alembic commands ###

View file

@ -1,38 +0,0 @@
"""add new notification kinds
Revision ID: 4a0c94399f38
Revises: 1594a74a74ca
Create Date: 2014-08-28 16:17:01.898269
"""
# revision identifiers, used by Alembic.
revision = '4a0c94399f38'
down_revision = '1594a74a74ca'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.externalnotificationmethod,
[
{'name':'flowdock'},
{'name':'hipchat'},
{'name':'slack'},
])
def downgrade(tables):
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('flowdock')))
)
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('hipchat')))
)
op.execute(
(tables.externalnotificationmethod.delete()
.where(tables.externalnotificationmethod.c.name == op.inline_literal('slack')))
)

View file

@ -1,28 +0,0 @@
"""Add the maintenance notification type.
Revision ID: 4b7ef0c7bdb2
Revises: bcdde200a1b
Create Date: 2014-06-27 19:09:56.387534
"""
# revision identifiers, used by Alembic.
revision = '4b7ef0c7bdb2'
down_revision = 'bcdde200a1b'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.notificationkind,
[
{'name':'maintenance'},
])
def downgrade(tables):
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('maintenance')))
)

View file

@ -1,26 +0,0 @@
"""Add reversion column to the tags table
Revision ID: 4ce2169efd3b
Revises: 30c044b75632
Create Date: 2015-04-16 17:10:16.039835
"""
# revision identifiers, used by Alembic.
revision = '4ce2169efd3b'
down_revision = '30c044b75632'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorytag', sa.Column('reversion', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorytag', 'reversion')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add legacy column for GitHub backfill tracking
Revision ID: 4d5f6716df0
Revises: 1c0f6ede8992
Create Date: 2015-09-16 17:49:40.334540
"""
# revision identifiers, used by Alembic.
revision = '4d5f6716df0'
down_revision = '1c0f6ede8992'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorybuildtrigger', 'used_legacy_github')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Allow tags to be marked as hidden.
Revision ID: 4ef04c61fcf9
Revises: 509d2857566f
Create Date: 2015-02-18 16:34:16.586129
"""
# revision identifiers, used by Alembic.
revision = '4ef04c61fcf9'
down_revision = '509d2857566f'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorytag', sa.Column('hidden', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorytag', 'hidden')
### end Alembic commands ###

View file

@ -1,27 +0,0 @@
"""Add brute force prevention metadata to the user table.
Revision ID: 4fdb65816b8d
Revises: 43e943c0639f
Create Date: 2014-09-03 12:35:33.722435
"""
# revision identifiers, used by Alembic.
revision = '4fdb65816b8d'
down_revision = '43e943c0639f'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'last_invalid_login')
op.drop_column('user', 'invalid_login_attempts')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add event-specific config
Revision ID: 50925110da8c
Revises: 2fb9492c20cc
Create Date: 2015-10-13 18:03:14.859839
"""
# revision identifiers, used by Alembic.
revision = '50925110da8c'
down_revision = '57dad559ff2d'
from alembic import op
import sqlalchemy as sa
from util.migrate import UTF8LongText
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorynotification', sa.Column('event_config_json', UTF8LongText, nullable=False))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorynotification', 'event_config_json')
### end Alembic commands ###

View file

@ -1,36 +0,0 @@
"""Track the lifetime start and end for tags to allow the state of a repository to be rewound.
Revision ID: 509d2857566f
Revises: 3e2d38b52a75
Create Date: 2015-02-13 14:35:38.939049
"""
# revision identifiers, used by Alembic.
revision = '509d2857566f'
down_revision = '3e2d38b52a75'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorytag', sa.Column('lifetime_end_ts', sa.Integer(), nullable=True))
op.add_column('repositorytag', sa.Column('lifetime_start_ts', sa.Integer(), nullable=False, server_default="0"))
op.create_index('repositorytag_lifetime_end_ts', 'repositorytag', ['lifetime_end_ts'], unique=False)
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=False)
op.add_column('user', sa.Column('removed_tag_expiration_s', sa.Integer(), nullable=False, server_default="1209600"))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', 'removed_tag_expiration_s')
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True)
op.drop_index('repositorytag_lifetime_end_ts', table_name='repositorytag')
op.drop_column('repositorytag', 'lifetime_start_ts')
op.drop_column('repositorytag', 'lifetime_end_ts')
### end Alembic commands ###

View file

@ -1,78 +0,0 @@
"""Email invites for joining a team.
Revision ID: 51d04d0e7e6f
Revises: 34fd69f63809
Create Date: 2014-09-15 23:51:35.478232
"""
# revision identifiers, used by Alembic.
revision = '51d04d0e7e6f'
down_revision = '34fd69f63809'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('teammemberinvite',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.Column('inviter_id', sa.Integer(), nullable=False),
sa.Column('invite_token', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False)
op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False)
op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False)
### end Alembic commands ###
# Manually add the new logentrykind types
op.bulk_insert(tables.logentrykind,
[
{'name':'org_invite_team_member'},
{'name':'org_team_member_invite_accepted'},
{'name':'org_team_member_invite_declined'},
{'name':'org_delete_team_member_invite'},
])
op.bulk_insert(tables.notificationkind,
[
{'name':'org_team_invite'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined')))
)
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('org_team_invite')))
)
op.drop_table('teammemberinvite')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add LogEntry repo-datetime-kind index
Revision ID: 5232a5610a0a
Revises: 437ee6269a9d
Create Date: 2015-07-31 13:25:41.877733
"""
# revision identifiers, used by Alembic.
revision = '5232a5610a0a'
down_revision = '437ee6269a9d'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('logentry_repository_id_datetime_kind_id', 'logentry', ['repository_id', 'datetime', 'kind_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('logentry_repository_id_datetime_kind_id', table_name='logentry')
### end Alembic commands ###

View file

@ -1,35 +0,0 @@
"""Migrate image data back to image table
Revision ID: 545794454f49
Revises: 3a3bb77e17d5
Create Date: 2015-09-15 11:48:47.554255
"""
# revision identifiers, used by Alembic.
revision = '545794454f49'
down_revision = '3a3bb77e17d5'
from alembic import op
import sqlalchemy as sa
from util.migrate import UTF8LongText
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('image', sa.Column('aggregate_size', sa.BigInteger(), nullable=True))
op.add_column('image', sa.Column('command', sa.Text(), nullable=True))
op.add_column('image', sa.Column('comment', UTF8LongText(), nullable=True))
op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True))
op.add_column('image', sa.Column('v1_json_metadata', UTF8LongText(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('image', 'v1_json_metadata')
op.drop_column('image', 'created')
op.drop_column('image', 'comment')
op.drop_column('image', 'command')
op.drop_column('image', 'aggregate_size')
### end Alembic commands ###

View file

@ -1,33 +0,0 @@
"""add support for quay's security indexer
Revision ID: 57dad559ff2d
Revises: 154f2befdfbe
Create Date: 2015-07-13 16:51:41.669249
"""
# revision identifiers, used by Alembic.
revision = '57dad559ff2d'
down_revision = '73669db7e12'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('image', sa.Column('parent_id', sa.Integer(), nullable=True))
op.add_column('image', sa.Column('security_indexed', sa.Boolean(), nullable=False, default=False, server_default=sa.sql.expression.false()))
op.add_column('image', sa.Column('security_indexed_engine', sa.Integer(), nullable=False, default=-1, server_default="-1"))
op.create_index('image_parent_id', 'image', ['parent_id'], unique=False)
### end Alembic commands ###
op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed'])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('image_security_indexed_engine_security_indexed', 'image')
op.drop_index('image_parent_id', table_name='image')
op.drop_column('image', 'security_indexed')
op.drop_column('image', 'security_indexed_engine')
op.drop_column('image', 'parent_id')
### end Alembic commands ###

View file

@ -1,518 +0,0 @@
"""Set up initial database
Revision ID: 5a07499ce53f
Revises: None
Create Date: 2014-05-13 11:26:51.808426
"""
# revision identifiers, used by Alembic.
revision = '5a07499ce53f'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('loginservice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
op.bulk_insert(tables.loginservice,
[
{'name':'github'},
{'name':'quayrobot'},
{'name':'ldap'},
])
op.create_table('imagestorage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('checksum', sa.String(length=255), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('comment', sa.Text(), nullable=True),
sa.Column('command', sa.Text(), nullable=True),
sa.Column('image_size', sa.BigInteger(), nullable=True),
sa.Column('uploading', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('queueitem',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('queue_name', sa.String(length=1024), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('available_after', sa.DateTime(), nullable=False),
sa.Column('available', sa.Boolean(), nullable=False),
sa.Column('processing_expires', sa.DateTime(), nullable=True),
sa.Column('retries_remaining', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('queueitem_available', 'queueitem', ['available'], unique=False)
op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False)
op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False)
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767)
op.create_table('role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('role_name', 'role', ['name'], unique=False)
op.bulk_insert(tables.role,
[
{'name':'admin'},
{'name':'write'},
{'name':'read'},
])
op.create_table('logentrykind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
op.bulk_insert(tables.logentrykind,
[
{'name':'account_change_plan'},
{'name':'account_change_cc'},
{'name':'account_change_password'},
{'name':'account_convert'},
{'name':'create_robot'},
{'name':'delete_robot'},
{'name':'create_repo'},
{'name':'push_repo'},
{'name':'pull_repo'},
{'name':'delete_repo'},
{'name':'create_tag'},
{'name':'move_tag'},
{'name':'delete_tag'},
{'name':'add_repo_permission'},
{'name':'change_repo_permission'},
{'name':'delete_repo_permission'},
{'name':'change_repo_visibility'},
{'name':'add_repo_accesstoken'},
{'name':'delete_repo_accesstoken'},
{'name':'add_repo_webhook'},
{'name':'delete_repo_webhook'},
{'name':'set_repo_description'},
{'name':'build_dockerfile'},
{'name':'org_create_team'},
{'name':'org_delete_team'},
{'name':'org_add_team_member'},
{'name':'org_remove_team_member'},
{'name':'org_set_team_description'},
{'name':'org_set_team_role'},
{'name':'create_prototype_permission'},
{'name':'modify_prototype_permission'},
{'name':'delete_prototype_permission'},
{'name':'setup_repo_trigger'},
{'name':'delete_repo_trigger'},
{'name':'create_application'},
{'name':'update_application'},
{'name':'delete_application'},
{'name':'reset_application_client_secret'},
])
op.create_table('notificationkind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.bulk_insert(tables.notificationkind,
[
{'name':'password_required'},
{'name':'over_private_usage'},
{'name':'expiring_license'},
])
op.create_table('teamrole',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
op.bulk_insert(tables.teamrole,
[
{'name':'admin'},
{'name':'creator'},
{'name':'member'},
])
op.create_table('visibility',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('visibility_name', 'visibility', ['name'], unique=False)
op.bulk_insert(tables.visibility,
[
{'name':'public'},
{'name':'private'},
])
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('password_hash', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('verified', sa.Boolean(), nullable=False),
sa.Column('stripe_id', sa.String(length=255), nullable=True),
sa.Column('organization', sa.Boolean(), nullable=False),
sa.Column('robot', sa.Boolean(), nullable=False),
sa.Column('invoice_email', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('user_email', 'user', ['email'], unique=True)
op.create_index('user_organization', 'user', ['organization'], unique=False)
op.create_index('user_robot', 'user', ['robot'], unique=False)
op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False)
op.create_index('user_username', 'user', ['username'], unique=True)
op.create_table('buildtriggerservice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
op.bulk_insert(tables.buildtriggerservice,
[
{'name':'github'},
])
op.create_table('federatedlogin',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('service_ident', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False)
op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True)
op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True)
op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False)
op.create_table('oauthapplication',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('application_uri', sa.String(length=255), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('gravatar_email', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False)
op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False)
op.create_table('notification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=False),
sa.Column('target_id', sa.Integer(), nullable=False),
sa.Column('metadata_json', sa.Text(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], ),
sa.ForeignKeyConstraint(['target_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('notification_created', 'notification', ['created'], unique=False)
op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False)
op.create_index('notification_target_id', 'notification', ['target_id'], unique=False)
op.create_index('notification_uuid', 'notification', ['uuid'], unique=False)
op.create_table('emailconfirmation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('pw_reset', sa.Boolean(), nullable=False),
sa.Column('new_email', sa.String(length=255), nullable=True),
sa.Column('email_confirm', sa.Boolean(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True)
op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False)
op.create_table('team',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('team_name', 'team', ['name'], unique=False)
op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True)
op.create_index('team_organization_id', 'team', ['organization_id'], unique=False)
op.create_index('team_role_id', 'team', ['role_id'], unique=False)
op.create_table('repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('namespace', sa.String(length=255), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('visibility_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('badge_token', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False)
op.create_table('accesstoken',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('friendly_name', sa.String(length=255), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('temporary', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True)
op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False)
op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False)
op.create_table('repositorypermission',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False)
op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False)
op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False)
op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True)
op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False)
op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True)
op.create_table('oauthaccesstoken',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('authorized_user_id', sa.Integer(), nullable=False),
sa.Column('scope', sa.String(length=255), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('token_type', sa.String(length=255), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=True),
sa.Column('data', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ),
sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False)
op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False)
op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False)
op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False)
op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False)
op.create_table('teammember',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False)
op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False)
op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True)
op.create_table('webhook',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('public_id', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('webhook_public_id', 'webhook', ['public_id'], unique=True)
op.create_index('webhook_repository_id', 'webhook', ['repository_id'], unique=False)
op.create_table('oauthauthorizationcode',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('scope', sa.String(length=255), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False)
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False)
op.create_table('image',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('docker_image_id', sa.String(length=255), nullable=False),
sa.Column('checksum', sa.String(length=255), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('comment', sa.Text(), nullable=True),
sa.Column('command', sa.Text(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('image_size', sa.BigInteger(), nullable=True),
sa.Column('ancestors', sa.String(length=60535), nullable=True),
sa.Column('storage_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767)
op.create_index('image_repository_id', 'image', ['repository_id'], unique=False)
op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=True)
op.create_index('image_storage_id', 'image', ['storage_id'], unique=False)
op.create_table('permissionprototype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('org_id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('activating_user_id', sa.Integer(), nullable=True),
sa.Column('delegate_user_id', sa.Integer(), nullable=True),
sa.Column('delegate_team_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], ),
sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['org_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False)
op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False)
op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False)
op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False)
op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False)
op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False)
op.create_table('repositorytag',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('image_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['image_id'], ['image.id'], ),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False)
op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False)
op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True)
op.create_table('logentry',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('performer_id', sa.Integer(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=True),
sa.Column('access_token_id', sa.Integer(), nullable=True),
sa.Column('datetime', sa.DateTime(), nullable=False),
sa.Column('ip', sa.String(length=255), nullable=True),
sa.Column('metadata_json', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ),
sa.ForeignKeyConstraint(['account_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], ),
sa.ForeignKeyConstraint(['performer_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False)
op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False)
op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False)
op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False)
op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False)
op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False)
op.create_table('repositorybuildtrigger',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('connected_user_id', sa.Integer(), nullable=False),
sa.Column('auth_token', sa.String(length=255), nullable=False),
sa.Column('config', sa.Text(), nullable=False),
sa.Column('write_token_id', sa.Integer(), nullable=True),
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], ),
sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False)
op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False)
op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False)
op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False)
op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False)
op.create_table('repositorybuild',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('access_token_id', sa.Integer(), nullable=False),
sa.Column('resource_key', sa.String(length=255), nullable=False),
sa.Column('job_config', sa.Text(), nullable=False),
sa.Column('phase', sa.String(length=255), nullable=False),
sa.Column('started', sa.DateTime(), nullable=False),
sa.Column('display_name', sa.String(length=255), nullable=False),
sa.Column('trigger_id', sa.Integer(), nullable=True),
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ),
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False)
op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False)
op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False)
op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False)
op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False)
op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('repositorybuild')
op.drop_table('repositorybuildtrigger')
op.drop_table('logentry')
op.drop_table('repositorytag')
op.drop_table('permissionprototype')
op.drop_table('image')
op.drop_table('oauthauthorizationcode')
op.drop_table('webhook')
op.drop_table('teammember')
op.drop_table('oauthaccesstoken')
op.drop_table('repositorypermission')
op.drop_table('accesstoken')
op.drop_table('repository')
op.drop_table('team')
op.drop_table('emailconfirmation')
op.drop_table('notification')
op.drop_table('oauthapplication')
op.drop_table('federatedlogin')
op.drop_table('buildtriggerservice')
op.drop_table('user')
op.drop_table('visibility')
op.drop_table('teamrole')
op.drop_table('notificationkind')
op.drop_table('logentrykind')
op.drop_table('role')
op.drop_table('queueitem')
op.drop_table('imagestorage')
op.drop_table('loginservice')
### end Alembic commands ###

View file

@ -1,39 +0,0 @@
"""Add new DerivedStorageForImage table
Revision ID: 5a2445ffe21b
Revises: 1b2bb93ceb82
Create Date: 2015-11-24 11:58:02.956687
"""
# revision identifiers, used by Alembic.
revision = '5a2445ffe21b'
down_revision = '1b2bb93ceb82'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('derivedstorageforimage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('source_image_id', sa.Integer(), nullable=False),
sa.Column('derivative_id', sa.Integer(), nullable=False),
sa.Column('transformation_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedstorageforimage_derivative_id_imagestorage')),
sa.ForeignKeyConstraint(['source_image_id'], ['image.id'], name=op.f('fk_derivedstorageforimage_source_image_id_image')),
sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_derivedstorageforimage_transformation_constraint')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedstorageforimage'))
)
op.create_index('derivedstorageforimage_derivative_id', 'derivedstorageforimage', ['derivative_id'], unique=False)
op.create_index('derivedstorageforimage_source_image_id', 'derivedstorageforimage', ['source_image_id'], unique=False)
op.create_index('derivedstorageforimage_source_image_id_transformation_id', 'derivedstorageforimage', ['source_image_id', 'transformation_id'], unique=True)
op.create_index('derivedstorageforimage_transformation_id', 'derivedstorageforimage', ['transformation_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('derivedstorageforimage')
### end Alembic commands ###

View file

@ -1,54 +0,0 @@
"""Add signature storage
Revision ID: 5ad999136045
Revises: 228d1af6af1c
Create Date: 2015-02-05 15:01:54.989573
"""
# revision identifiers, used by Alembic.
revision = '5ad999136045'
down_revision = '228d1af6af1c'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragesignaturekind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignaturekind'))
)
op.create_index('imagestoragesignaturekind_name', 'imagestoragesignaturekind', ['name'], unique=True)
op.create_table('imagestoragesignature',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('storage_id', sa.Integer(), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=False),
sa.Column('signature', sa.Text(), nullable=True),
sa.Column('uploading', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['kind_id'], ['imagestoragesignaturekind.id'], name=op.f('fk_imagestoragesignature_kind_id_imagestoragesignaturekind')),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestoragesignature_storage_id_imagestorage')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignature'))
)
op.create_index('imagestoragesignature_kind_id', 'imagestoragesignature', ['kind_id'], unique=False)
op.create_index('imagestoragesignature_kind_id_storage_id', 'imagestoragesignature', ['kind_id', 'storage_id'], unique=True)
op.create_index('imagestoragesignature_storage_id', 'imagestoragesignature', ['storage_id'], unique=False)
### end Alembic commands ###
op.bulk_insert(tables.imagestoragetransformation,
[
{'name':'aci'},
])
op.bulk_insert(tables.imagestoragesignaturekind,
[
{'name':'gpg2'},
])
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('imagestoragesignature')
op.drop_table('imagestoragesignaturekind')
### end Alembic commands ###

View file

@ -1,24 +0,0 @@
"""Convert slack webhook data
Revision ID: 5b84373e5db
Revises: 1c5b738283a5
Create Date: 2014-12-16 12:02:55.167744
"""
# revision identifiers, used by Alembic.
revision = '5b84373e5db'
down_revision = '1c5b738283a5'
from alembic import op
import sqlalchemy as sa
from util.migrate.migrateslackwebhook import run_slackwebhook_migration
def upgrade(tables):
run_slackwebhook_migration()
def downgrade(tables):
pass

View file

@ -1,41 +0,0 @@
"""Add vulnerability_found event
Revision ID: 5cdc2d819c5
Revises: 50925110da8c
Create Date: 2015-10-13 18:05:32.157858
"""
# revision identifiers, used by Alembic.
revision = '5cdc2d819c5'
down_revision = '50925110da8c'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.externalnotificationevent,
[
{'name':'vulnerability_found'},
])
op.bulk_insert(tables.notificationkind,
[
{'name':'vulnerability_found'},
])
def downgrade(tables):
op.execute(
(tables.externalnotificationevent.delete()
.where(tables.externalnotificationevent.c.name == op.inline_literal('vulnerability_found')))
)
op.execute(
(tables.notificationkind.delete()
.where(tables.notificationkind.c.name == op.inline_literal('vulnerability_found')))
)

View file

@ -1,26 +0,0 @@
"""add index for repository+datetime to logentry
Revision ID: 67eb43c778b
Revises: 1c3decf6b9c4
Create Date: 2015-04-19 16:00:39.126289
"""
# revision identifiers, used by Alembic.
revision = '67eb43c778b'
down_revision = '1c3decf6b9c4'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('logentry_repository_id_datetime', 'logentry', ['repository_id', 'datetime'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('logentry_repository_id_datetime', table_name='logentry')
### end Alembic commands ###

View file

@ -1,25 +0,0 @@
"""add the uncompressed size to image storage
Revision ID: 6f2ecf5afcf
Revises: 13da56878560
Create Date: 2014-09-22 14:39:13.470566
"""
# revision identifiers, used by Alembic.
revision = '6f2ecf5afcf'
down_revision = '13da56878560'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('imagestorage', sa.Column('uncompressed_size', sa.BigInteger(), nullable=True))
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('imagestorage', 'uncompressed_size')
### end Alembic commands ###

View file

@ -1,33 +0,0 @@
"""Change build queue reference from foreign key to an id.
Revision ID: 707d5191eda
Revises: 4ef04c61fcf9
Create Date: 2015-02-23 12:36:33.814528
"""
# revision identifiers, used by Alembic.
revision = '707d5191eda'
down_revision = '4ef04c61fcf9'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuild', sa.Column('queue_id', sa.String(length=255), nullable=True))
op.create_index('repositorybuild_queue_id', 'repositorybuild', ['queue_id'], unique=False)
op.drop_constraint(u'fk_repositorybuild_queue_item_id_queueitem', 'repositorybuild', type_='foreignkey')
op.drop_index('repositorybuild_queue_item_id', table_name='repositorybuild')
op.drop_column('repositorybuild', 'queue_item_id')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuild', sa.Column('queue_item_id', sa.Integer(), autoincrement=False, nullable=True))
op.create_foreign_key(u'fk_repositorybuild_queue_item_id_queueitem', 'repositorybuild', 'queueitem', ['queue_item_id'], ['id'])
op.create_index('repositorybuild_queue_item_id', 'repositorybuild', ['queue_item_id'], unique=False)
op.drop_index('repositorybuild_queue_id', table_name='repositorybuild')
op.drop_column('repositorybuild', 'queue_id')
### end Alembic commands ###

View file

@ -1,25 +0,0 @@
"""Remove legacy github column
Revision ID: 73669db7e12
Revises: 35f538da62
Create Date: 2015-11-04 16:18:18.107314
"""
# revision identifiers, used by Alembic.
revision = '73669db7e12'
down_revision = '35f538da62'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_column('repositorybuildtrigger', 'used_legacy_github')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True))
### end Alembic commands ###

View file

@ -1,29 +0,0 @@
"""Add missing indexes
Revision ID: 790d91952fa8
Revises: 1093d8b212bb
Create Date: 2016-08-03 17:05:06.675520
"""
# revision identifiers, used by Alembic.
revision = '790d91952fa8'
down_revision = '1093d8b212bb'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('repositorybuild_repository_id_started_phase', 'repositorybuild', ['repository_id', 'started', 'phase'], unique=False)
op.create_index('repositorybuild_started', 'repositorybuild', ['started'], unique=False)
op.create_index('repositorybuild_started_logs_archived_phase', 'repositorybuild', ['started', 'logs_archived', 'phase'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('repositorybuild_started_logs_archived_phase', table_name='repositorybuild')
op.drop_index('repositorybuild_started', table_name='repositorybuild')
op.drop_index('repositorybuild_repository_id_started_phase', table_name='repositorybuild')
### end Alembic commands ###

View file

@ -1,28 +0,0 @@
"""add US West location
Revision ID: 82297d834ad
Revises: 47670cbeced
Create Date: 2014-08-15 13:35:23.834079
"""
# revision identifiers, used by Alembic.
revision = '82297d834ad'
down_revision = '47670cbeced'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.imagestoragelocation,
[
{'name':'s3_us_west_1'},
])
def downgrade(tables):
op.execute(
(tables.imagestoragelocation.delete()
.where(tables.imagestoragelocation.c.name == op.inline_literal('s3_us_west_1')))
)

View file

@ -1,29 +0,0 @@
"""Add created field to the BlobUpload table
Revision ID: 88e0f440a2f
Revises: 403d02fea323
Create Date: 2015-12-14 15:19:11.825279
"""
# revision identifiers, used by Alembic.
revision = '88e0f440a2f'
down_revision = '403d02fea323'
from alembic import op
import sqlalchemy as sa
from datetime import datetime
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
now = datetime.now().strftime("'%Y-%m-%d %H:%M:%S'")
op.add_column('blobupload', sa.Column('created', sa.DateTime(), nullable=False, server_default=sa.text(now)))
op.create_index('blobupload_created', 'blobupload', ['created'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('blobupload_created', table_name='blobupload')
op.drop_column('blobupload', 'created')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add logentry index for lookup by account
Revision ID: 8981dabd329f
Revises: 790d91952fa8
Create Date: 2016-08-12 16:50:15.816120
"""
# revision identifiers, used by Alembic.
revision = '8981dabd329f'
down_revision = '790d91952fa8'
from alembic import op
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('logentry_account_id_datetime', 'logentry', ['account_id', 'datetime'], unique=False)
op.create_index('logentry_performer_id_datetime', 'logentry', ['performer_id', 'datetime'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('logentry_account_id_datetime', table_name='logentry')
op.drop_index('logentry_performer_id_datetime', table_name='logentry')
### end Alembic commands ###

View file

@ -1,35 +0,0 @@
"""Add UserRegion table
Revision ID: 9512773a4a2
Revises: 499f6f08de3
Create Date: 2015-09-01 14:17:08.628052
"""
# revision identifiers, used by Alembic.
revision = '9512773a4a2'
down_revision = '499f6f08de3'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('userregion',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('location_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_userregion_location_id_imagestoragelocation')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_userregion_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_userregion'))
)
op.create_index('userregion_location_id', 'userregion', ['location_id'], unique=False)
op.create_index('userregion_user_id', 'userregion', ['user_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('userregion')
### end Alembic commands ###

View file

@ -1,26 +0,0 @@
"""Add backfill for torrent shas and checksums
Revision ID: 956a0833223
Revises: 23ca04d0bc8e
Create Date: 2016-01-08 17:11:07.261123
"""
# revision identifiers, used by Alembic.
revision = '956a0833223'
down_revision = '23ca04d0bc8e'
from app import app
from util.migrate.backfill_content_checksums_and_torrent_pieces import backfill_content_checksums_and_torrent_pieces
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
backfill_content_checksums_and_torrent_pieces(app.config['BITTORRENT_PIECE_SIZE'])
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###

View file

@ -1,141 +0,0 @@
"""Rename all foreign key constraints to have predictable names
Revision ID: 983247d75af3
Revises: 8981dabd329f
Create Date: 2016-08-08 16:33:00.198527
"""
# revision identifiers, used by Alembic.
revision = '983247d75af3'
down_revision = '8981dabd329f'
from alembic import op
from functools import wraps
import sqlalchemy as sa
import logging
logger = logging.getLogger(__name__)
CORRECT_FKC_NAMES = {
# Names that existed according to the constraint in previous migrations
"fk_accesstoken_kind_id_accesstokenkind",
"fk_blobupload_location_id_imagestoragelocation",
"fk_blobupload_repository_id_repository",
"fk_derivedstorageforimage_derivative_id_imagestorage",
"fk_derivedstorageforimage_source_image_id_image",
"fk_imagestoragesignature_kind_id_imagestoragesignaturekind",
"fk_imagestoragesignature_storage_id_imagestorage",
"fk_quayrelease_region_id_quayregion",
"fk_quayrelease_service_id_quayservice",
"fk_repositoryactioncount_repository_id_repository",
"fk_servicekey_approval_id_servicekeyapproval",
"fk_star_repository_id_repository",
"fk_star_user_id_user",
"fk_tagmanifest_tag_id_repositorytag",
"fk_torrentinfo_storage_id_imagestorage",
"fk_userregion_location_id_imagestoragelocation",
"fk_userregion_user_id_user",
# Names that had to be set manually as truncated versions
'fk_derivedstorageforimage_transformation_constraint',
}
def _generate_all_foreign_key_constraint_names():
conn = op.get_bind()
inspector = sa.inspect(conn.engine)
for table_name, fkc_list in inspector.get_sorted_table_and_fkc_names():
for fkc_name in fkc_list:
yield table_name, fkc_name
def _wrap_create_op(create_foreign_key_func, existing_fk_tuple_set):
fk_name_set = set()
@wraps(create_foreign_key_func)
def wrapped(fk_name, table_name, *args, **kwargs):
fk_name_set.add(fk_name)
if (table_name, fk_name) in existing_fk_tuple_set:
logger.debug('Skipping already correct fkc: %s', fk_name)
else:
logger.debug('Creating foreign key constraint: %s', fk_name)
return create_foreign_key_func(op.f(fk_name), table_name, *args, **kwargs)
return wrapped, fk_name_set
def _disable_constraints():
conn = op.get_bind()
if conn.dialect.name == 'mysql':
logger.debug('Setting mysql foreign_key_checks=0')
op.execute('SET FOREIGN_KEY_CHECKS=0')
else:
logger.warning('Unable to disable foreign key checks for dialect: %s', conn.dialect.name)
def upgrade(tables):
existing_fk_tuples = set(_generate_all_foreign_key_constraint_names())
create_fk, new_fk_name_set = _wrap_create_op(op.create_foreign_key, existing_fk_tuples)
_disable_constraints()
create_fk('fk_accesstoken_role_id_role', 'accesstoken', 'role', ['role_id'], ['id'])
create_fk('fk_accesstoken_repository_id_repository', 'accesstoken', 'repository', ['repository_id'], ['id'])
create_fk('fk_emailconfirmation_user_id_user', 'emailconfirmation', 'user', ['user_id'], ['id'])
create_fk('fk_federatedlogin_user_id_user', 'federatedlogin', 'user', ['user_id'], ['id'])
create_fk('fk_federatedlogin_service_id_loginservice', 'federatedlogin', 'loginservice', ['service_id'], ['id'])
create_fk('fk_image_repository_id_repository', 'image', 'repository', ['repository_id'], ['id'])
create_fk('fk_image_storage_id_imagestorage', 'image', 'imagestorage', ['storage_id'], ['id'])
create_fk('fk_imagestorageplacement_location_id_imagestoragelocation', 'imagestorageplacement', 'imagestoragelocation', ['location_id'], ['id'])
create_fk('fk_imagestorageplacement_storage_id_imagestorage', 'imagestorageplacement', 'imagestorage', ['storage_id'], ['id'])
create_fk('fk_logentry_kind_id_logentrykind', 'logentry', 'logentrykind', ['kind_id'], ['id'])
create_fk('fk_notification_target_id_user', 'notification', 'user', ['target_id'], ['id'])
create_fk('fk_notification_kind_id_notificationkind', 'notification', 'notificationkind', ['kind_id'], ['id'])
create_fk('fk_oauthaccesstoken_authorized_user_id_user', 'oauthaccesstoken', 'user', ['authorized_user_id'], ['id'])
create_fk('fk_oauthaccesstoken_application_id_oauthapplication', 'oauthaccesstoken', 'oauthapplication', ['application_id'], ['id'])
create_fk('fk_oauthapplication_organization_id_user', 'oauthapplication', 'user', ['organization_id'], ['id'])
create_fk('fk_oauthauthorizationcode_application_id_oauthapplication', 'oauthauthorizationcode', 'oauthapplication', ['application_id'], ['id'])
create_fk('fk_permissionprototype_delegate_team_id_team', 'permissionprototype', 'team', ['delegate_team_id'], ['id'])
create_fk('fk_permissionprototype_role_id_role', 'permissionprototype', 'role', ['role_id'], ['id'])
create_fk('fk_permissionprototype_delegate_user_id_user', 'permissionprototype', 'user', ['delegate_user_id'], ['id'])
create_fk('fk_permissionprototype_activating_user_id_user', 'permissionprototype', 'user', ['activating_user_id'], ['id'])
create_fk('fk_permissionprototype_org_id_user', 'permissionprototype', 'user', ['org_id'], ['id'])
create_fk('fk_repository_namespace_user_id_user', 'repository', 'user', ['namespace_user_id'], ['id'])
create_fk('fk_repository_visibility_id_visibility', 'repository', 'visibility', ['visibility_id'], ['id'])
create_fk('fk_repositoryauthorizedemail_repository_id_repository', 'repositoryauthorizedemail', 'repository', ['repository_id'], ['id'])
create_fk('fk_repositorybuild_access_token_id_accesstoken', 'repositorybuild', 'accesstoken', ['access_token_id'], ['id'])
create_fk('fk_repositorybuild_pull_robot_id_user', 'repositorybuild', 'user', ['pull_robot_id'], ['id'])
create_fk('fk_repositorybuild_repository_id_repository', 'repositorybuild', 'repository', ['repository_id'], ['id'])
create_fk('fk_repositorybuild_trigger_id_repositorybuildtrigger', 'repositorybuild', 'repositorybuildtrigger', ['trigger_id'], ['id'])
create_fk('fk_repositorybuildtrigger_repository_id_repository', 'repositorybuildtrigger', 'repository', ['repository_id'], ['id'])
create_fk('fk_repositorybuildtrigger_connected_user_id_user', 'repositorybuildtrigger', 'user', ['connected_user_id'], ['id'])
create_fk('fk_repositorybuildtrigger_service_id_buildtriggerservice', 'repositorybuildtrigger', 'buildtriggerservice', ['service_id'], ['id'])
create_fk('fk_repositorybuildtrigger_pull_robot_id_user', 'repositorybuildtrigger', 'user', ['pull_robot_id'], ['id'])
create_fk('fk_repositorybuildtrigger_write_token_id_accesstoken', 'repositorybuildtrigger', 'accesstoken', ['write_token_id'], ['id'])
create_fk('fk_repositorynotification_method_id_externalnotificationmethod', 'repositorynotification', 'externalnotificationmethod', ['method_id'], ['id'])
create_fk('fk_repositorynotification_repository_id_repository', 'repositorynotification', 'repository', ['repository_id'], ['id'])
create_fk('fk_repositorynotification_event_id_externalnotificationevent', 'repositorynotification', 'externalnotificationevent', ['event_id'], ['id'])
create_fk('fk_repositorypermission_role_id_role', 'repositorypermission', 'role', ['role_id'], ['id'])
create_fk('fk_repositorypermission_user_id_user', 'repositorypermission', 'user', ['user_id'], ['id'])
create_fk('fk_repositorypermission_repository_id_repository', 'repositorypermission', 'repository', ['repository_id'], ['id'])
create_fk('fk_repositorypermission_team_id_team', 'repositorypermission', 'team', ['team_id'], ['id'])
create_fk('fk_repositorytag_repository_id_repository', 'repositorytag', 'repository', ['repository_id'], ['id'])
create_fk('fk_repositorytag_image_id_image', 'repositorytag', 'image', ['image_id'], ['id'])
create_fk('fk_team_organization_id_user', 'team', 'user', ['organization_id'], ['id'])
create_fk('fk_team_role_id_teamrole', 'team', 'teamrole', ['role_id'], ['id'])
create_fk('fk_teammember_user_id_user', 'teammember', 'user', ['user_id'], ['id'])
create_fk('fk_teammember_team_id_team', 'teammember', 'team', ['team_id'], ['id'])
create_fk('fk_teammemberinvite_inviter_id_user', 'teammemberinvite', 'user', ['inviter_id'], ['id'])
create_fk('fk_teammemberinvite_team_id_team', 'teammemberinvite', 'team', ['team_id'], ['id'])
create_fk('fk_teammemberinvite_user_id_user', 'teammemberinvite', 'user', ['user_id'], ['id'])
# Drop all of the fk names that aren't correct
final_correct_fk_names = new_fk_name_set | CORRECT_FKC_NAMES
for table_name, fk_name in existing_fk_tuples:
if fk_name not in final_correct_fk_names:
logger.debug("dropping: %s", fk_name)
op.drop_constraint(fk_name, table_name, type_='foreignkey')
def downgrade(tables):
# nah
pass

View file

@ -1,29 +0,0 @@
"""Allow the namespace column to be nullable.
Revision ID: 9a1087b007d
Revises: 3f4fe1194671
Create Date: 2014-10-01 16:11:21.277226
"""
# revision identifiers, used by Alembic.
revision = '9a1087b007d'
down_revision = '3f4fe1194671'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.drop_index('repository_namespace_name', table_name='repository')
op.alter_column('repository', 'namespace', nullable=True, existing_type=sa.String(length=255),
server_default=sa.text('NULL'))
def downgrade(tables):
conn = op.get_bind()
user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
conn.execute('update repository set namespace = (select username from {0} where {0}.id = repository.namespace_user_id) where namespace is NULL'.format(user_table_name_escaped))
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255))

View file

@ -1,26 +0,0 @@
"""Adding in messages table
Revision ID: a3002f7638d5
Revises: c9b91bee7554
Create Date: 2016-10-07 11:14:15.054546
"""
# revision identifiers, used by Alembic.
revision = 'a3002f7638d5'
down_revision = 'c9b91bee7554'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.create_table('messages',
sa.Column("id", sa.INTEGER, primary_key=True),
sa.Column("content", sa.UnicodeText, nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_messages'))
)
def downgrade(tables):
op.drop_table('messages')

View file

@ -1,91 +0,0 @@
"""initial keyserver
Revision ID: a3ba52d02dec
Revises: e4129c93e477
Create Date: 2016-03-30 15:28:32.036753
"""
# revision identifiers, used by Alembic.
revision = 'a3ba52d02dec'
down_revision = 'e4129c93e477'
from alembic import op
import sqlalchemy as sa
from util.migrate import UTF8LongText
def upgrade(tables):
op.create_table(
'servicekeyapproval',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('approver_id', sa.Integer(), nullable=True),
sa.Column('approval_type', sa.String(length=255), nullable=False),
sa.Column('approved_date', sa.DateTime(), nullable=False),
sa.Column('notes', UTF8LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekeyapproval')),
)
op.create_index('servicekeyapproval_approval_type', 'servicekeyapproval', ['approval_type'], unique=False)
op.create_index('servicekeyapproval_approver_id', 'servicekeyapproval', ['approver_id'], unique=False)
op.bulk_insert(
tables.notificationkind,
[{'name':'service_key_submitted'}],
)
op.bulk_insert(tables.logentrykind, [
{'name':'service_key_create'},
{'name':'service_key_approve'},
{'name':'service_key_delete'},
{'name':'service_key_modify'},
{'name':'service_key_extend'},
{'name':'service_key_rotate'},
])
op.create_table(
'servicekey',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('kid', sa.String(length=255), nullable=False),
sa.Column('service', sa.String(length=255), nullable=False),
sa.Column('jwk', UTF8LongText(), nullable=False),
sa.Column('metadata', UTF8LongText(), nullable=False),
sa.Column('created_date', sa.DateTime(), nullable=False),
sa.Column('expiration_date', sa.DateTime(), nullable=True),
sa.Column('rotation_duration', sa.Integer(), nullable=True),
sa.Column('approval_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['approval_id'], ['servicekeyapproval.id'],
name=op.f('fk_servicekey_approval_id_servicekeyapproval')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekey')),
)
op.create_index('servicekey_approval_id', 'servicekey', ['approval_id'], unique=False)
op.create_index('servicekey_kid', 'servicekey', ['kid'], unique=True)
op.create_index('servicekey_service', 'servicekey', ['service'], unique=False)
op.add_column(u'notification', sa.Column('lookup_path', sa.String(length=255), nullable=True))
op.create_index('notification_lookup_path', 'notification', ['lookup_path'], unique=False)
def downgrade(tables):
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_create')))
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_approve')))
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_delete')))
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_modify')))
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_extend')))
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_rotate')))
op.execute(tables.notificationkind.delete().where(tables.notificationkind.c.name == op.inline_literal('service_key_submitted')))
op.drop_column(u'notification', 'lookup_path')
op.drop_table('servicekey')
op.drop_table('servicekeyapproval')

View file

@ -1,22 +0,0 @@
"""Add an index to the uuid in the image storage table.
Revision ID: b1d41e2071b
Revises: 9a1087b007d
Create Date: 2014-10-06 18:42:10.021235
"""
# revision identifiers, used by Alembic.
revision = 'b1d41e2071b'
down_revision = '9a1087b007d'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True)
def downgrade(tables):
op.drop_index('imagestorage_uuid', table_name='imagestorage')

View file

@ -1,55 +0,0 @@
"""Add placements and locations to the db.
Revision ID: bcdde200a1b
Revises: 201d55b38649
Create Date: 2014-06-18 13:32:42.907922
"""
# revision identifiers, used by Alembic.
revision = 'bcdde200a1b'
down_revision = '201d55b38649'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragelocation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True)
op.bulk_insert(tables.imagestoragelocation,
[
{'name':'s3_us_east_1'},
{'name':'s3_eu_west_1'},
{'name':'s3_ap_southeast_1'},
{'name':'s3_ap_southeast_2'},
{'name':'s3_ap_northeast_1'},
{'name':'s3_sa_east_1'},
{'name':'local'},
])
op.create_table('imagestorageplacement',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('storage_id', sa.Integer(), nullable=False),
sa.Column('location_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], ),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index('imagestorageplacement_location_id', 'imagestorageplacement', ['location_id'], unique=False)
op.create_index('imagestorageplacement_storage_id', 'imagestorageplacement', ['storage_id'], unique=False)
op.create_index('imagestorageplacement_storage_id_location_id', 'imagestorageplacement', ['storage_id', 'location_id'], unique=True)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('imagestorageplacement')
op.drop_table('imagestoragelocation')
### end Alembic commands ###

View file

@ -1,23 +0,0 @@
"""Add uuid to messages
Revision ID: c156deb8845d
Revises: a3002f7638d5
Create Date: 2016-10-11 15:44:29.450181
"""
# revision identifiers, used by Alembic.
revision = 'c156deb8845d'
down_revision = 'a3002f7638d5'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.add_column('messages', sa.Column('uuid', sa.String(length=36), nullable=True))
def downgrade(tables):
op.drop_column('messages', 'uuid')

View file

@ -0,0 +1,954 @@
"""Reset our migrations with a required update
Revision ID: c156deb8845d
Revises: None
Create Date: 2016-11-08 11:58:11.110762
"""
# revision identifiers, used by Alembic.
revision = 'c156deb8845d'
down_revision = None
from alembic import op
import sqlalchemy as sa
from util.migrate import UTF8LongText, UTF8CharField
from datetime import datetime
def upgrade(tables):
now = datetime.now().strftime("'%Y-%m-%d %H:%M:%S'")
op.create_table('accesstokenkind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstokenkind'))
)
op.create_index('accesstokenkind_name', 'accesstokenkind', ['name'], unique=True)
op.create_table('buildtriggerservice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_buildtriggerservice'))
)
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True)
op.create_table('externalnotificationevent',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_externalnotificationevent'))
)
op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True)
op.create_table('externalnotificationmethod',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_externalnotificationmethod'))
)
op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True)
op.create_table('imagestorage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('checksum', sa.String(length=255), nullable=True),
sa.Column('image_size', sa.BigInteger(), nullable=True),
sa.Column('uncompressed_size', sa.BigInteger(), nullable=True),
sa.Column('uploading', sa.Boolean(), nullable=True),
sa.Column('cas_path', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
sa.Column('content_checksum', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestorage'))
)
op.create_index('imagestorage_content_checksum', 'imagestorage', ['content_checksum'], unique=False)
op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True)
op.create_table('imagestoragelocation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragelocation'))
)
op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True)
op.create_table('imagestoragesignaturekind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignaturekind'))
)
op.create_index('imagestoragesignaturekind_name', 'imagestoragesignaturekind', ['name'], unique=True)
op.create_table('imagestoragetransformation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragetransformation'))
)
op.create_index('imagestoragetransformation_name', 'imagestoragetransformation', ['name'], unique=True)
op.create_table('labelsourcetype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('mutable', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_labelsourcetype'))
)
op.create_index('labelsourcetype_name', 'labelsourcetype', ['name'], unique=True)
op.create_table('logentrykind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_logentrykind'))
)
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=True)
op.create_table('loginservice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_loginservice'))
)
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
op.create_table('mediatype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mediatype'))
)
op.create_index('mediatype_name', 'mediatype', ['name'], unique=True)
op.create_table('messages',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_messages'))
)
op.create_table('notificationkind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_notificationkind'))
)
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=True)
op.create_table('quayregion',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayregion'))
)
op.create_index('quayregion_name', 'quayregion', ['name'], unique=True)
op.create_table('quayservice',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayservice'))
)
op.create_index('quayservice_name', 'quayservice', ['name'], unique=True)
op.create_table('queueitem',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('queue_name', sa.String(length=1024), nullable=False),
sa.Column('body', sa.Text(), nullable=False),
sa.Column('available_after', sa.DateTime(), nullable=False),
sa.Column('available', sa.Boolean(), nullable=False),
sa.Column('processing_expires', sa.DateTime(), nullable=True),
sa.Column('retries_remaining', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_queueitem'))
)
op.create_index('queueitem_available', 'queueitem', ['available'], unique=False)
op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False)
op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False)
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767)
op.create_index('queueitem_retries_remaining', 'queueitem', ['retries_remaining'], unique=False)
op.create_table('role',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_role'))
)
op.create_index('role_name', 'role', ['name'], unique=True)
op.create_table('servicekeyapproval',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('approver_id', sa.Integer(), nullable=True),
sa.Column('approval_type', sa.String(length=255), nullable=False),
sa.Column('approved_date', sa.DateTime(), nullable=False),
sa.Column('notes', UTF8LongText(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekeyapproval'))
)
op.create_index('servicekeyapproval_approval_type', 'servicekeyapproval', ['approval_type'], unique=False)
op.create_index('servicekeyapproval_approver_id', 'servicekeyapproval', ['approver_id'], unique=False)
op.create_table('teamrole',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_teamrole'))
)
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
op.create_table('user',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.Column('username', sa.String(length=255), nullable=False),
sa.Column('password_hash', sa.String(length=255), nullable=True),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('verified', sa.Boolean(), nullable=False),
sa.Column('stripe_id', sa.String(length=255), nullable=True),
sa.Column('organization', sa.Boolean(), nullable=False),
sa.Column('robot', sa.Boolean(), nullable=False),
sa.Column('invoice_email', sa.Boolean(), nullable=False),
sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default='0'),
sa.Column('last_invalid_login', sa.DateTime(), nullable=False),
sa.Column('removed_tag_expiration_s', sa.Integer(), nullable=False, server_default='1209600'),
sa.Column('enabled', sa.Boolean(), nullable=False, server_default=sa.sql.expression.true()),
sa.Column('invoice_email_address', sa.String(length=255), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_user'))
)
op.create_index('user_email', 'user', ['email'], unique=True)
op.create_index('user_invoice_email_address', 'user', ['invoice_email_address'], unique=False)
op.create_index('user_organization', 'user', ['organization'], unique=False)
op.create_index('user_robot', 'user', ['robot'], unique=False)
op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False)
op.create_index('user_username', 'user', ['username'], unique=True)
op.create_table('visibility',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_visibility'))
)
op.create_index('visibility_name', 'visibility', ['name'], unique=True)
op.create_table('emailconfirmation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('pw_reset', sa.Boolean(), nullable=False),
sa.Column('new_email', sa.String(length=255), nullable=True),
sa.Column('email_confirm', sa.Boolean(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_emailconfirmation_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_emailconfirmation'))
)
op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True)
op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False)
op.create_table('federatedlogin',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('service_ident', sa.String(length=255), nullable=False),
sa.Column('metadata_json', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], name=op.f('fk_federatedlogin_service_id_loginservice')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_federatedlogin_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_federatedlogin'))
)
op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False)
op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True)
op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True)
op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False)
op.create_table('imagestorageplacement',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('storage_id', sa.Integer(), nullable=False),
sa.Column('location_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_imagestorageplacement_location_id_imagestoragelocation')),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestorageplacement_storage_id_imagestorage')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestorageplacement'))
)
op.create_index('imagestorageplacement_location_id', 'imagestorageplacement', ['location_id'], unique=False)
op.create_index('imagestorageplacement_storage_id', 'imagestorageplacement', ['storage_id'], unique=False)
op.create_index('imagestorageplacement_storage_id_location_id', 'imagestorageplacement', ['storage_id', 'location_id'], unique=True)
op.create_table('imagestoragesignature',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('storage_id', sa.Integer(), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=False),
sa.Column('signature', sa.Text(), nullable=True),
sa.Column('uploading', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['kind_id'], ['imagestoragesignaturekind.id'], name=op.f('fk_imagestoragesignature_kind_id_imagestoragesignaturekind')),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestoragesignature_storage_id_imagestorage')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignature'))
)
op.create_index('imagestoragesignature_kind_id', 'imagestoragesignature', ['kind_id'], unique=False)
op.create_index('imagestoragesignature_kind_id_storage_id', 'imagestoragesignature', ['kind_id', 'storage_id'], unique=True)
op.create_index('imagestoragesignature_storage_id', 'imagestoragesignature', ['storage_id'], unique=False)
op.create_table('label',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('key', UTF8CharField(length=255), nullable=False),
sa.Column('value', UTF8LongText(), nullable=False),
sa.Column('media_type_id', sa.Integer(), nullable=False),
sa.Column('source_type_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_label_media_type_id_mediatype')),
sa.ForeignKeyConstraint(['source_type_id'], ['labelsourcetype.id'], name=op.f('fk_label_source_type_id_labelsourcetype')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_label'))
)
op.create_index('label_key', 'label', ['key'], unique=False)
op.create_index('label_media_type_id', 'label', ['media_type_id'], unique=False)
op.create_index('label_source_type_id', 'label', ['source_type_id'], unique=False)
op.create_index('label_uuid', 'label', ['uuid'], unique=True)
op.create_table('logentry',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=False),
sa.Column('account_id', sa.Integer(), nullable=False),
sa.Column('performer_id', sa.Integer(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=True),
sa.Column('datetime', sa.DateTime(), nullable=False),
sa.Column('ip', sa.String(length=255), nullable=True),
sa.Column('metadata_json', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], name=op.f('fk_logentry_kind_id_logentrykind')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_logentry'))
)
op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False)
op.create_index('logentry_account_id_datetime', 'logentry', ['account_id', 'datetime'], unique=False)
op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False)
op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False)
op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False)
op.create_index('logentry_performer_id_datetime', 'logentry', ['performer_id', 'datetime'], unique=False)
op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False)
op.create_index('logentry_repository_id_datetime', 'logentry', ['repository_id', 'datetime'], unique=False)
op.create_index('logentry_repository_id_datetime_kind_id', 'logentry', ['repository_id', 'datetime', 'kind_id'], unique=False)
op.create_table('notification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=False),
sa.Column('target_id', sa.Integer(), nullable=False),
sa.Column('metadata_json', sa.Text(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('dismissed', sa.Boolean(), nullable=False),
sa.Column('lookup_path', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], name=op.f('fk_notification_kind_id_notificationkind')),
sa.ForeignKeyConstraint(['target_id'], ['user.id'], name=op.f('fk_notification_target_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_notification'))
)
op.create_index('notification_created', 'notification', ['created'], unique=False)
op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False)
op.create_index('notification_lookup_path', 'notification', ['lookup_path'], unique=False)
op.create_index('notification_target_id', 'notification', ['target_id'], unique=False)
op.create_index('notification_uuid', 'notification', ['uuid'], unique=False)
op.create_table('oauthapplication',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('client_id', sa.String(length=255), nullable=False),
sa.Column('client_secret', sa.String(length=255), nullable=False),
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
sa.Column('application_uri', sa.String(length=255), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.Column('gravatar_email', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], name=op.f('fk_oauthapplication_organization_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauthapplication'))
)
op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False)
op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False)
op.create_table('quayrelease',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('version', sa.String(length=255), nullable=False),
sa.Column('region_id', sa.Integer(), nullable=False),
sa.Column('reverted', sa.Boolean(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['region_id'], ['quayregion.id'], name=op.f('fk_quayrelease_region_id_quayregion')),
sa.ForeignKeyConstraint(['service_id'], ['quayservice.id'], name=op.f('fk_quayrelease_service_id_quayservice')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_quayrelease'))
)
op.create_index('quayrelease_created', 'quayrelease', ['created'], unique=False)
op.create_index('quayrelease_region_id', 'quayrelease', ['region_id'], unique=False)
op.create_index('quayrelease_service_id', 'quayrelease', ['service_id'], unique=False)
op.create_index('quayrelease_service_id_region_id_created', 'quayrelease', ['service_id', 'region_id', 'created'], unique=False)
op.create_index('quayrelease_service_id_version_region_id', 'quayrelease', ['service_id', 'version', 'region_id'], unique=True)
op.create_table('repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('namespace_user_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('visibility_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('badge_token', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['namespace_user_id'], ['user.id'], name=op.f('fk_repository_namespace_user_id_user')),
sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], name=op.f('fk_repository_visibility_id_visibility')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repository'))
)
op.create_index('repository_namespace_user_id', 'repository', ['namespace_user_id'], unique=False)
op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True)
op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False)
op.create_table('servicekey',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('kid', sa.String(length=255), nullable=False),
sa.Column('service', sa.String(length=255), nullable=False),
sa.Column('jwk', UTF8LongText(), nullable=False),
sa.Column('metadata', UTF8LongText(), nullable=False),
sa.Column('created_date', sa.DateTime(), nullable=False),
sa.Column('expiration_date', sa.DateTime(), nullable=True),
sa.Column('rotation_duration', sa.Integer(), nullable=True),
sa.Column('approval_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['approval_id'], ['servicekeyapproval.id'], name=op.f('fk_servicekey_approval_id_servicekeyapproval')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekey'))
)
op.create_index('servicekey_approval_id', 'servicekey', ['approval_id'], unique=False)
op.create_index('servicekey_kid', 'servicekey', ['kid'], unique=True)
op.create_index('servicekey_service', 'servicekey', ['service'], unique=False)
op.create_table('team',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('organization_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('description', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], name=op.f('fk_team_organization_id_user')),
sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], name=op.f('fk_team_role_id_teamrole')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_team'))
)
op.create_index('team_name', 'team', ['name'], unique=False)
op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True)
op.create_index('team_organization_id', 'team', ['organization_id'], unique=False)
op.create_index('team_role_id', 'team', ['role_id'], unique=False)
op.create_table('torrentinfo',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('storage_id', sa.Integer(), nullable=False),
sa.Column('piece_length', sa.Integer(), nullable=False),
sa.Column('pieces', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_torrentinfo_storage_id_imagestorage')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_torrentinfo'))
)
op.create_index('torrentinfo_storage_id', 'torrentinfo', ['storage_id'], unique=False)
op.create_index('torrentinfo_storage_id_piece_length', 'torrentinfo', ['storage_id', 'piece_length'], unique=True)
op.create_table('userregion',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('location_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_userregion_location_id_imagestoragelocation')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_userregion_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_userregion'))
)
op.create_index('userregion_location_id', 'userregion', ['location_id'], unique=False)
op.create_index('userregion_user_id', 'userregion', ['user_id'], unique=False)
op.create_table('accesstoken',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('friendly_name', sa.String(length=255), nullable=True),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.Column('temporary', sa.Boolean(), nullable=False),
sa.Column('kind_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['kind_id'], ['accesstokenkind.id'], name=op.f('fk_accesstoken_kind_id_accesstokenkind')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_accesstoken_repository_id_repository')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_accesstoken_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstoken'))
)
op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True)
op.create_index('accesstoken_kind_id', 'accesstoken', ['kind_id'], unique=False)
op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False)
op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False)
op.create_table('blobupload',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('byte_count', sa.Integer(), nullable=False),
sa.Column('sha_state', sa.Text(), nullable=True),
sa.Column('location_id', sa.Integer(), nullable=False),
sa.Column('storage_metadata', sa.Text(), nullable=True),
sa.Column('chunk_count', sa.Integer(), nullable=False, server_default='0'),
sa.Column('uncompressed_byte_count', sa.Integer(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=False, server_default=sa.text(now)),
sa.Column('piece_sha_state', UTF8LongText(), nullable=True),
sa.Column('piece_hashes', UTF8LongText(), nullable=True),
sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_blobupload_location_id_imagestoragelocation')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_blobupload_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobupload'))
)
op.create_index('blobupload_created', 'blobupload', ['created'], unique=False)
op.create_index('blobupload_location_id', 'blobupload', ['location_id'], unique=False)
op.create_index('blobupload_repository_id', 'blobupload', ['repository_id'], unique=False)
op.create_index('blobupload_repository_id_uuid', 'blobupload', ['repository_id', 'uuid'], unique=True)
op.create_index('blobupload_uuid', 'blobupload', ['uuid'], unique=True)
op.create_table('image',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('docker_image_id', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('ancestors', sa.String(length=60535), nullable=True),
sa.Column('storage_id', sa.Integer(), nullable=True),
sa.Column('created', sa.DateTime(), nullable=True),
sa.Column('comment', UTF8LongText(), nullable=True),
sa.Column('command', sa.Text(), nullable=True),
sa.Column('aggregate_size', sa.BigInteger(), nullable=True),
sa.Column('v1_json_metadata', UTF8LongText(), nullable=True),
sa.Column('v1_checksum', sa.String(length=255), nullable=True),
sa.Column('security_indexed', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
sa.Column('security_indexed_engine', sa.Integer(), nullable=False, server_default='-1'),
sa.Column('parent_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_image_repository_id_repository')),
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_image_storage_id_imagestorage')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_image'))
)
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767)
op.create_index('image_docker_image_id', 'image', ['docker_image_id'], unique=False)
op.create_index('image_parent_id', 'image', ['parent_id'], unique=False)
op.create_index('image_repository_id', 'image', ['repository_id'], unique=False)
op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=True)
op.create_index('image_security_indexed', 'image', ['security_indexed'], unique=False)
op.create_index('image_security_indexed_engine', 'image', ['security_indexed_engine'], unique=False)
op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed'], unique=False)
op.create_index('image_storage_id', 'image', ['storage_id'], unique=False)
op.create_table('oauthaccesstoken',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('authorized_user_id', sa.Integer(), nullable=False),
sa.Column('scope', sa.String(length=255), nullable=False),
sa.Column('access_token', sa.String(length=255), nullable=False),
sa.Column('token_type', sa.String(length=255), nullable=False),
sa.Column('expires_at', sa.DateTime(), nullable=False),
sa.Column('refresh_token', sa.String(length=255), nullable=True),
sa.Column('data', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], name=op.f('fk_oauthaccesstoken_application_id_oauthapplication')),
sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], name=op.f('fk_oauthaccesstoken_authorized_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauthaccesstoken'))
)
op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False)
op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False)
op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False)
op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False)
op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False)
op.create_table('oauthauthorizationcode',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('application_id', sa.Integer(), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('scope', sa.String(length=255), nullable=False),
sa.Column('data', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], name=op.f('fk_oauthauthorizationcode_application_id_oauthapplication')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_oauthauthorizationcode'))
)
op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False)
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False)
op.create_table('permissionprototype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('org_id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('activating_user_id', sa.Integer(), nullable=True),
sa.Column('delegate_user_id', sa.Integer(), nullable=True),
sa.Column('delegate_team_id', sa.Integer(), nullable=True),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], name=op.f('fk_permissionprototype_activating_user_id_user')),
sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], name=op.f('fk_permissionprototype_delegate_team_id_team')),
sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], name=op.f('fk_permissionprototype_delegate_user_id_user')),
sa.ForeignKeyConstraint(['org_id'], ['user.id'], name=op.f('fk_permissionprototype_org_id_user')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_permissionprototype_role_id_role')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_permissionprototype'))
)
op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False)
op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False)
op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False)
op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False)
op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False)
op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False)
op.create_table('repositoryactioncount',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('date', sa.Date(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositoryactioncount_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositoryactioncount'))
)
op.create_index('repositoryactioncount_date', 'repositoryactioncount', ['date'], unique=False)
op.create_index('repositoryactioncount_repository_id', 'repositoryactioncount', ['repository_id'], unique=False)
op.create_index('repositoryactioncount_repository_id_date', 'repositoryactioncount', ['repository_id', 'date'], unique=True)
op.create_table('repositoryauthorizedemail',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('email', sa.String(length=255), nullable=False),
sa.Column('code', sa.String(length=255), nullable=False),
sa.Column('confirmed', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositoryauthorizedemail_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositoryauthorizedemail'))
)
op.create_index('repositoryauthorizedemail_code', 'repositoryauthorizedemail', ['code'], unique=True)
op.create_index('repositoryauthorizedemail_email_repository_id', 'repositoryauthorizedemail', ['email', 'repository_id'], unique=True)
op.create_index('repositoryauthorizedemail_repository_id', 'repositoryauthorizedemail', ['repository_id'], unique=False)
op.create_table('repositorynotification',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('method_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=255), nullable=True),
sa.Column('config_json', sa.Text(), nullable=False),
sa.Column('event_config_json', UTF8LongText(), nullable=False),
sa.ForeignKeyConstraint(['event_id'], ['externalnotificationevent.id'], name=op.f('fk_repositorynotification_event_id_externalnotificationevent')),
sa.ForeignKeyConstraint(['method_id'], ['externalnotificationmethod.id'], name=op.f('fk_repositorynotification_method_id_externalnotificationmethod')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorynotification_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorynotification'))
)
op.create_index('repositorynotification_event_id', 'repositorynotification', ['event_id'], unique=False)
op.create_index('repositorynotification_method_id', 'repositorynotification', ['method_id'], unique=False)
op.create_index('repositorynotification_repository_id', 'repositorynotification', ['repository_id'], unique=False)
op.create_index('repositorynotification_uuid', 'repositorynotification', ['uuid'], unique=False)
op.create_table('repositorypermission',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorypermission_repository_id_repository')),
sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_repositorypermission_role_id_role')),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_repositorypermission_team_id_team')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_repositorypermission_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorypermission'))
)
op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False)
op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False)
op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False)
op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True)
op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False)
op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True)
op.create_table('star',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('created', sa.DateTime(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_star_repository_id_repository')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_star_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_star'))
)
op.create_index('star_repository_id', 'star', ['repository_id'], unique=False)
op.create_index('star_user_id', 'star', ['user_id'], unique=False)
op.create_index('star_user_id_repository_id', 'star', ['user_id', 'repository_id'], unique=True)
op.create_table('teammember',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_teammember_team_id_team')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_teammember_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_teammember'))
)
op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False)
op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False)
op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True)
op.create_table('teammemberinvite',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.Column('email', sa.String(length=255), nullable=True),
sa.Column('team_id', sa.Integer(), nullable=False),
sa.Column('inviter_id', sa.Integer(), nullable=False),
sa.Column('invite_token', sa.String(length=255), nullable=False),
sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], name=op.f('fk_teammemberinvite_inviter_id_user')),
sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_teammemberinvite_team_id_team')),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_teammemberinvite_user_id_user')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_teammemberinvite'))
)
op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False)
op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False)
op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False)
op.create_table('derivedstorageforimage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('source_image_id', sa.Integer(), nullable=False),
sa.Column('derivative_id', sa.Integer(), nullable=False),
sa.Column('transformation_id', sa.Integer(), nullable=False),
sa.Column('uniqueness_hash', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedstorageforimage_derivative_id_imagestorage')),
sa.ForeignKeyConstraint(['source_image_id'], ['image.id'], name=op.f('fk_derivedstorageforimage_source_image_id_image')),
sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_derivedstorageforimage_transformation_constraint')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedstorageforimage'))
)
op.create_index('derivedstorageforimage_derivative_id', 'derivedstorageforimage', ['derivative_id'], unique=False)
op.create_index('derivedstorageforimage_source_image_id', 'derivedstorageforimage', ['source_image_id'], unique=False)
op.create_index('uniqueness_index', 'derivedstorageforimage', ['source_image_id', 'transformation_id', 'uniqueness_hash'], unique=True)
op.create_index('derivedstorageforimage_transformation_id', 'derivedstorageforimage', ['transformation_id'], unique=False)
op.create_table('repositorybuildtrigger',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('service_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('connected_user_id', sa.Integer(), nullable=False),
sa.Column('auth_token', sa.String(length=255), nullable=True),
sa.Column('private_key', sa.Text(), nullable=True),
sa.Column('config', sa.Text(), nullable=False),
sa.Column('write_token_id', sa.Integer(), nullable=True),
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], name=op.f('fk_repositorybuildtrigger_connected_user_id_user')),
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], name=op.f('fk_repositorybuildtrigger_pull_robot_id_user')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorybuildtrigger_repository_id_repository')),
sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], name=op.f('fk_repositorybuildtrigger_service_id_buildtriggerservice')),
sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], name=op.f('fk_repositorybuildtrigger_write_token_id_accesstoken')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorybuildtrigger'))
)
op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False)
op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False)
op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False)
op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False)
op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False)
op.create_table('repositorytag',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('image_id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('lifetime_start_ts', sa.Integer(), nullable=False, server_default='0'),
sa.Column('lifetime_end_ts', sa.Integer(), nullable=True),
sa.Column('hidden', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
sa.Column('reversion', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
sa.ForeignKeyConstraint(['image_id'], ['image.id'], name=op.f('fk_repositorytag_image_id_image')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorytag_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorytag'))
)
op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False)
op.create_index('repositorytag_lifetime_end_ts', 'repositorytag', ['lifetime_end_ts'], unique=False)
op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False)
op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=False)
op.create_index('repositorytag_repository_id_name_lifetime_end_ts', 'repositorytag', ['repository_id', 'name', 'lifetime_end_ts'], unique=True)
op.create_table('repositorybuild',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('access_token_id', sa.Integer(), nullable=False),
sa.Column('resource_key', sa.String(length=255), nullable=True),
sa.Column('job_config', sa.Text(), nullable=False),
sa.Column('phase', sa.String(length=255), nullable=False),
sa.Column('started', sa.DateTime(), nullable=False),
sa.Column('display_name', sa.String(length=255), nullable=False),
sa.Column('trigger_id', sa.Integer(), nullable=True),
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
sa.Column('queue_id', sa.String(length=255), nullable=True),
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], name=op.f('fk_repositorybuild_access_token_id_accesstoken')),
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], name=op.f('fk_repositorybuild_pull_robot_id_user')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorybuild_repository_id_repository')),
sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], name=op.f('fk_repositorybuild_trigger_id_repositorybuildtrigger')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorybuild'))
)
op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False)
op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False)
op.create_index('repositorybuild_queue_id', 'repositorybuild', ['queue_id'], unique=False)
op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False)
op.create_index('repositorybuild_repository_id_started_phase', 'repositorybuild', ['repository_id', 'started', 'phase'], unique=False)
op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False)
op.create_index('repositorybuild_started', 'repositorybuild', ['started'], unique=False)
op.create_index('repositorybuild_started_logs_archived_phase', 'repositorybuild', ['started', 'logs_archived', 'phase'], unique=False)
op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False)
op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False)
op.create_table('tagmanifest',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tag_id', sa.Integer(), nullable=False),
sa.Column('digest', sa.String(length=255), nullable=False),
sa.Column('json_data', UTF8LongText(), nullable=False),
sa.ForeignKeyConstraint(['tag_id'], ['repositorytag.id'], name=op.f('fk_tagmanifest_tag_id_repositorytag')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifest'))
)
op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=False)
op.create_index('tagmanifest_tag_id', 'tagmanifest', ['tag_id'], unique=True)
op.create_table('tagmanifestlabel',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('annotated_id', sa.Integer(), nullable=False),
sa.Column('label_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['annotated_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifestlabel_annotated_id_tagmanifest')),
sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_tagmanifestlabel_label_id_label')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tagmanifestlabel_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifestlabel'))
)
op.create_index('tagmanifestlabel_annotated_id', 'tagmanifestlabel', ['annotated_id'], unique=False)
op.create_index('tagmanifestlabel_annotated_id_label_id', 'tagmanifestlabel', ['annotated_id', 'label_id'], unique=True)
op.create_index('tagmanifestlabel_label_id', 'tagmanifestlabel', ['label_id'], unique=False)
op.create_index('tagmanifestlabel_repository_id', 'tagmanifestlabel', ['repository_id'], unique=False)
op.bulk_insert(tables.accesstokenkind,
[
{'name':'build-worker'},
{'name':'pushpull-token'},
])
op.bulk_insert(tables.buildtriggerservice,
[
{'name':'github'},
{'name':'gitlab'},
{'name':'bitbucket'},
{'name':'custom-git'},
])
op.bulk_insert(tables.externalnotificationevent,
[
{'name':'build_failure'},
{'name':'build_queued'},
{'name':'build_start'},
{'name':'build_success'},
{'name':'repo_push'},
{'name':'vulnerability_found'},
])
op.bulk_insert(tables.externalnotificationmethod,
[
{'name':'email'},
{'name':'flowdock'},
{'name':'hipchat'},
{'name':'quay_notification'},
{'name':'slack'},
{'name':'webhook'},
])
op.bulk_insert(tables.imagestoragelocation,
[
{'name':'s3_us_east_1'},
{'name':'s3_eu_west_1'},
{'name':'s3_ap_southeast_1'},
{'name':'s3_ap_southeast_2'},
{'name':'s3_ap_northeast_1'},
{'name':'s3_sa_east_1'},
{'name':'local'},
{'name':'s3_us_west_1'},
])
op.bulk_insert(tables.imagestoragesignaturekind,
[
{'name':'gpg2'},
])
op.bulk_insert(tables.imagestoragetransformation,
[
{'name':'squash'},
{'name':'aci'},
])
op.bulk_insert(tables.labelsourcetype,
[
{'name':'manifest', 'mutable': False},
{'name':'api', 'mutable': True},
{'name':'internal', 'mutable': False},
])
op.bulk_insert(tables.logentrykind,
[
{'name':'account_change_cc'},
{'name':'account_change_password'},
{'name':'account_change_plan'},
{'name':'account_convert'},
{'name':'add_repo_accesstoken'},
{'name':'add_repo_notification'},
{'name':'add_repo_permission'},
{'name':'add_repo_webhook'},
{'name':'build_dockerfile'},
{'name':'change_repo_permission'},
{'name':'change_repo_visibility'},
{'name':'create_application'},
{'name':'create_prototype_permission'},
{'name':'create_repo'},
{'name':'create_robot'},
{'name':'create_tag'},
{'name':'delete_application'},
{'name':'delete_prototype_permission'},
{'name':'delete_repo'},
{'name':'delete_repo_accesstoken'},
{'name':'delete_repo_notification'},
{'name':'delete_repo_permission'},
{'name':'delete_repo_trigger'},
{'name':'delete_repo_webhook'},
{'name':'delete_robot'},
{'name':'delete_tag'},
{'name':'manifest_label_add'},
{'name':'manifest_label_delete'},
{'name':'modify_prototype_permission'},
{'name':'move_tag'},
{'name':'org_add_team_member'},
{'name':'org_create_team'},
{'name':'org_delete_team'},
{'name':'org_delete_team_member_invite'},
{'name':'org_invite_team_member'},
{'name':'org_remove_team_member'},
{'name':'org_set_team_description'},
{'name':'org_set_team_role'},
{'name':'org_team_member_invite_accepted'},
{'name':'org_team_member_invite_declined'},
{'name':'pull_repo'},
{'name':'push_repo'},
{'name':'regenerate_robot_token'},
{'name':'repo_verb'},
{'name':'reset_application_client_secret'},
{'name':'revert_tag'},
{'name':'service_key_approve'},
{'name':'service_key_create'},
{'name':'service_key_delete'},
{'name':'service_key_extend'},
{'name':'service_key_modify'},
{'name':'service_key_rotate'},
{'name':'setup_repo_trigger'},
{'name':'set_repo_description'},
{'name':'take_ownership'},
{'name':'update_application'},
])
op.bulk_insert(tables.loginservice,
[
{'name':'github'},
{'name':'quayrobot'},
{'name':'ldap'},
{'name':'google'},
{'name':'keystone'},
{'name':'dex'},
{'name':'jwtauthn'},
])
op.bulk_insert(tables.mediatype,
[
{'name':'text/plain'},
{'name':'application/json'},
])
op.bulk_insert(tables.notificationkind,
[
{'name':'build_failure'},
{'name':'build_queued'},
{'name':'build_start'},
{'name':'build_success'},
{'name':'expiring_license'},
{'name':'maintenance'},
{'name':'org_team_invite'},
{'name':'over_private_usage'},
{'name':'password_required'},
{'name':'repo_push'},
{'name':'service_key_submitted'},
{'name':'vulnerability_found'},
])
op.bulk_insert(tables.role,
[
{'name':'admin'},
{'name':'write'},
{'name':'read'},
])
op.bulk_insert(tables.teamrole,
[
{'name':'admin'},
{'name':'creator'},
{'name':'member'},
])
op.bulk_insert(tables.visibility,
[
{'name':'public'},
{'name':'private'},
])
def downgrade(tables):
op.drop_table('tagmanifestlabel')
op.drop_table('tagmanifest')
op.drop_table('repositorybuild')
op.drop_table('repositorytag')
op.drop_table('repositorybuildtrigger')
op.drop_table('derivedstorageforimage')
op.drop_table('teammemberinvite')
op.drop_table('teammember')
op.drop_table('star')
op.drop_table('repositorypermission')
op.drop_table('repositorynotification')
op.drop_table('repositoryauthorizedemail')
op.drop_table('repositoryactioncount')
op.drop_table('permissionprototype')
op.drop_table('oauthauthorizationcode')
op.drop_table('oauthaccesstoken')
op.drop_table('image')
op.drop_table('blobupload')
op.drop_table('accesstoken')
op.drop_table('userregion')
op.drop_table('torrentinfo')
op.drop_table('team')
op.drop_table('servicekey')
op.drop_table('repository')
op.drop_table('quayrelease')
op.drop_table('oauthapplication')
op.drop_table('notification')
op.drop_table('logentry')
op.drop_table('label')
op.drop_table('imagestoragesignature')
op.drop_table('imagestorageplacement')
op.drop_table('federatedlogin')
op.drop_table('emailconfirmation')
op.drop_table('visibility')
op.drop_table('user')
op.drop_table('teamrole')
op.drop_table('servicekeyapproval')
op.drop_table('role')
op.drop_table('queueitem')
op.drop_table('quayservice')
op.drop_table('quayregion')
op.drop_table('notificationkind')
op.drop_table('messages')
op.drop_table('mediatype')
op.drop_table('loginservice')
op.drop_table('logentrykind')
op.drop_table('labelsourcetype')
op.drop_table('imagestoragetransformation')
op.drop_table('imagestoragesignaturekind')
op.drop_table('imagestoragelocation')
op.drop_table('imagestorage')
op.drop_table('externalnotificationmethod')
op.drop_table('externalnotificationevent')
op.drop_table('buildtriggerservice')
op.drop_table('accesstokenkind')

View file

@ -1,89 +0,0 @@
"""Add labels to the schema
Revision ID: c9b91bee7554
Revises: 983247d75af3
Create Date: 2016-08-22 15:40:25.226541
"""
# revision identifiers, used by Alembic.
revision = 'c9b91bee7554'
down_revision = '983247d75af3'
from alembic import op
import sqlalchemy as sa
from util.migrate import UTF8LongText, UTF8CharField
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('labelsourcetype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('mutable', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_labelsourcetype'))
)
op.create_index('labelsourcetype_name', 'labelsourcetype', ['name'], unique=True)
op.create_table('mediatype',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_mediatype'))
)
op.create_index('mediatype_name', 'mediatype', ['name'], unique=True)
op.create_table('label',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=255), nullable=False),
sa.Column('key', UTF8CharField(length=255), nullable=False),
sa.Column('value', UTF8LongText(), nullable=False),
sa.Column('media_type_id', sa.Integer(), nullable=False),
sa.Column('source_type_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_label_media_type_id_mediatype')),
sa.ForeignKeyConstraint(['source_type_id'], ['labelsourcetype.id'], name=op.f('fk_label_source_type_id_labelsourcetype')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_label'))
)
op.create_index('label_key', 'label', ['key'], unique=False)
op.create_index('label_media_type_id', 'label', ['media_type_id'], unique=False)
op.create_index('label_source_type_id', 'label', ['source_type_id'], unique=False)
op.create_index('label_uuid', 'label', ['uuid'], unique=True)
op.create_table('tagmanifestlabel',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('annotated_id', sa.Integer(), nullable=False),
sa.Column('label_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['annotated_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifestlabel_annotated_id_tagmanifest')),
sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_tagmanifestlabel_label_id_label')),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tagmanifestlabel_repository_id_repository')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifestlabel'))
)
op.create_index('tagmanifestlabel_annotated_id', 'tagmanifestlabel', ['annotated_id'], unique=False)
op.create_index('tagmanifestlabel_annotated_id_label_id', 'tagmanifestlabel', ['annotated_id', 'label_id'], unique=True)
op.create_index('tagmanifestlabel_label_id', 'tagmanifestlabel', ['label_id'], unique=False)
op.create_index('tagmanifestlabel_repository_id', 'tagmanifestlabel', ['repository_id'], unique=False)
### end Alembic commands ###
op.bulk_insert(tables.logentrykind, [
{'name':'manifest_label_add'},
{'name':'manifest_label_delete'},
])
op.bulk_insert(tables.mediatype, [
{'name':'text/plain'},
{'name':'application/json'},
])
op.bulk_insert(tables.labelsourcetype, [
{'name':'manifest', 'mutable': False},
{'name':'api', 'mutable': True},
{'name':'internal', 'mutable': False},
])
def downgrade(tables):
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('manifest_label_add')))
op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('manifest_label_delete')))
### commands auto generated by Alembic - please adjust! ###
op.drop_table('tagmanifestlabel')
op.drop_table('label')
op.drop_table('mediatype')
op.drop_table('labelsourcetype')
### end Alembic commands ###

View file

@ -1,28 +0,0 @@
"""Remove uniqueness constraint on the TagManifest digest column
Revision ID: e4129c93e477
Revises: 956a0833223
Create Date: 2016-02-12 17:22:48.039791
"""
# revision identifiers, used by Alembic.
revision = 'e4129c93e477'
down_revision = '956a0833223'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('tagmanifest_digest', table_name='tagmanifest')
op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('tagmanifest_digest', table_name='tagmanifest')
op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=True)
### end Alembic commands ###

View file

@ -1,32 +0,0 @@
"""Remove the old webhooks table.
Revision ID: f42b0ea7a4d
Revises: 4fdb65816b8d
Create Date: 2014-09-03 13:43:23.391464
"""
# revision identifiers, used by Alembic.
revision = 'f42b0ea7a4d'
down_revision = '4fdb65816b8d'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('webhook')
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('webhook',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('public_id', sa.String(length=255), nullable=False),
sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('parameters', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###

View file

@ -1,55 +0,0 @@
import logging
from data.database import ImageStorage, Image, db, db_for_update
from app import app
logger = logging.getLogger(__name__)
def backfill_aggregate_sizes():
""" Generates aggregate sizes for any image storage entries without them """
logger.debug('Aggregate sizes backfill: Began execution')
while True:
batch_image_ids = list(Image
.select(Image.id)
.where(Image.aggregate_size >> None)
.limit(100))
if len(batch_image_ids) == 0:
# There are no storages left to backfill. We're done!
logger.debug('Aggregate sizes backfill: Backfill completed')
return
logger.debug('Aggregate sizes backfill: Found %s records to update', len(batch_image_ids))
for image_id in batch_image_ids:
logger.debug('Updating image : %s', image_id.id)
with app.config['DB_TRANSACTION_FACTORY'](db):
try:
image = (Image
.select(Image, ImageStorage)
.join(ImageStorage)
.where(Image.id == image_id)
.get())
aggregate_size = image.storage.image_size
image_ids = image.ancestor_id_list()
for image_id in image_ids:
to_add = db_for_update(Image
.select(Image, ImageStorage)
.join(ImageStorage)
.where(Image.id == image_id)).get()
aggregate_size += to_add.storage.image_size
image.aggregate_size = aggregate_size
image.save()
except Image.DoesNotExist:
pass
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('peewee').setLevel(logging.CRITICAL)
backfill_aggregate_sizes()

View file

@ -1,145 +0,0 @@
import logging
from peewee import (JOIN_LEFT_OUTER, CharField, BigIntegerField, BooleanField, ForeignKeyField,
IntegerField, IntegrityError, fn)
from data.database import BaseModel, CloseForLongOperation
from data.fields import Base64BinaryField
from app import app, storage
from digest import checksums
from util.migrate.allocator import yield_random_entries
from util.registry.torrent import PieceHasher
from util.registry.filelike import wrap_with_handler
BATCH_SIZE = 1000
logger = logging.getLogger(__name__)
# Vendor the information from tables we will be writing to at the time of this migration
class ImageStorage(BaseModel):
uuid = CharField(index=True, unique=True)
checksum = CharField(null=True)
image_size = BigIntegerField(null=True)
uncompressed_size = BigIntegerField(null=True)
uploading = BooleanField(default=True, null=True)
cas_path = BooleanField(default=True)
content_checksum = CharField(null=True, index=True)
class ImageStorageLocation(BaseModel):
name = CharField(unique=True, index=True)
class ImageStoragePlacement(BaseModel):
storage = ForeignKeyField(ImageStorage)
location = ForeignKeyField(ImageStorageLocation)
class TorrentInfo(BaseModel):
storage = ForeignKeyField(ImageStorage)
piece_length = IntegerField()
pieces = Base64BinaryField()
def _get_image_storage_locations(storage_id):
placements_query = (ImageStoragePlacement
.select(ImageStoragePlacement, ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage, JOIN_LEFT_OUTER)
.where(ImageStorage.id == storage_id))
locations = set()
for placement in placements_query:
locations.add(placement.location.name)
return locations
def _get_layer_path(storage_record):
""" Returns the path in the storage engine to the layer data referenced by the storage row. """
if not storage_record.cas_path:
logger.debug('Serving layer from legacy v1 path: %s', storage_record.uuid)
return storage.v1_image_layer_path(storage_record.uuid)
return storage.blob_path(storage_record.content_checksum)
def backfill_content_checksums_and_torrent_pieces(piece_length):
""" Hashes the entire file for the content associated with an imagestorage. """
logger.debug('Began execution')
logger.debug('This may be a long operation!')
def batch_query():
return (ImageStorage
.select(ImageStorage.id, ImageStorage.uuid, ImageStorage.content_checksum,
ImageStorage.cas_path)
.join(TorrentInfo, JOIN_LEFT_OUTER, on=((TorrentInfo.storage == ImageStorage.id) &
(TorrentInfo.piece_length == piece_length)))
.where((TorrentInfo.id >> None) | (ImageStorage.content_checksum >> None)))
max_id = ImageStorage.select(fn.Max(ImageStorage.id)).scalar()
checksums_written = 0
pieces_written = 0
for candidate_storage, abort in yield_random_entries(batch_query, ImageStorage.id, BATCH_SIZE,
max_id):
locations = _get_image_storage_locations(candidate_storage.id)
checksum = candidate_storage.content_checksum
torrent_pieces = ''
with CloseForLongOperation(app.config):
try:
# Compute the checksum
layer_path = _get_layer_path(candidate_storage)
with storage.stream_read_file(locations, layer_path) as layer_data_handle:
hasher = PieceHasher(piece_length)
wrapped = wrap_with_handler(layer_data_handle, hasher.update)
checksum = 'sha256:{0}'.format(checksums.sha256_file(wrapped))
torrent_pieces = hasher.final_piece_hashes()
except Exception as exc:
logger.exception('Unable to compute hashes for storage: %s', candidate_storage.uuid)
# Create a fallback value for the checksum
if checksum is None:
checksum = 'unknown:{0}'.format(exc.__class__.__name__)
torrent_collision = False
checksum_collision = False
# Now update the ImageStorage with the checksum
num_updated = (ImageStorage
.update(content_checksum=checksum)
.where(ImageStorage.id == candidate_storage.id,
ImageStorage.content_checksum >> None)).execute()
checksums_written += num_updated
if num_updated == 0:
checksum_collision = True
try:
TorrentInfo.create(storage=candidate_storage.id, piece_length=piece_length,
pieces=torrent_pieces)
pieces_written += 1
except IntegrityError:
torrent_collision = True
if torrent_collision and checksum_collision:
logger.info('Another worker pre-empted us for storage: %s', candidate_storage.uuid)
abort.set()
if (pieces_written % BATCH_SIZE) == 0 or (checksums_written % BATCH_SIZE) == 0:
logger.debug('%s checksums written, %s torrent pieces written', checksums_written,
pieces_written)
logger.debug('Completed, %s checksums written, %s torrent pieces written', checksums_written,
pieces_written)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
#logging.getLogger('peewee').setLevel(logging.WARNING)
logging.getLogger('boto').setLevel(logging.WARNING)
logging.getLogger('data.database').setLevel(logging.WARNING)
backfill_content_checksums_and_torrent_pieces(app.config['BITTORRENT_PIECE_SIZE'])

View file

@ -1,87 +0,0 @@
import logging
from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField,
TextField)
from data.database import BaseModel, db, db_for_update
from app import app
logger = logging.getLogger(__name__)
class Repository(BaseModel):
pass
# Vendor the information from tables we will be writing to at the time of this migration
class ImageStorage(BaseModel):
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
uploading = BooleanField(default=True, null=True)
class Image(BaseModel):
# This class is intentionally denormalized. Even though images are supposed
# to be globally unique we can't treat them as such for permissions and
# security reasons. So rather than Repository <-> Image being many to many
# each image now belongs to exactly one repository.
docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository)
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
ancestors = CharField(index=True, default='/', max_length=64535, null=True)
storage = ForeignKeyField(ImageStorage, index=True, null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
v1_json_metadata = TextField(null=True)
def backfill_image_fields():
""" Copies metadata from image storages to their images. """
logger.debug('Image metadata backfill: Began execution')
while True:
batch_image_ids = list(Image
.select(Image.id)
.join(ImageStorage)
.where(Image.created >> None, Image.comment >> None,
Image.command >> None, Image.aggregate_size >> None,
ImageStorage.uploading == False,
~((ImageStorage.created >> None) &
(ImageStorage.comment >> None) &
(ImageStorage.command >> None) &
(ImageStorage.aggregate_size >> None)))
.limit(100))
if len(batch_image_ids) == 0:
logger.debug('Image metadata backfill: Backfill completed')
return
logger.debug('Image metadata backfill: Found %s records to update', len(batch_image_ids))
for image_id in batch_image_ids:
logger.debug('Updating image: %s', image_id.id)
with app.config['DB_TRANSACTION_FACTORY'](db):
try:
image = db_for_update(Image
.select(Image, ImageStorage)
.join(ImageStorage)
.where(Image.id == image_id.id)).get()
image.created = image.storage.created
image.comment = image.storage.comment
image.command = image.storage.command
image.aggregate_size = image.storage.aggregate_size
image.save()
except Image.DoesNotExist:
pass
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('peewee').setLevel(logging.CRITICAL)
backfill_image_fields()

View file

@ -1,82 +0,0 @@
import logging
from data.database import BaseModel
from peewee import (fn, CharField, BigIntegerField, ForeignKeyField, BooleanField, DateTimeField,
TextField, IntegerField)
from app import app
from util.migrate.allocator import yield_random_entries
BATCH_SIZE = 1000
class Repository(BaseModel):
pass
# Vendor the information from tables we will be writing to at the time of this migration
class ImageStorage(BaseModel):
uuid = CharField(index=True, unique=True)
checksum = CharField(null=True)
image_size = BigIntegerField(null=True)
uncompressed_size = BigIntegerField(null=True)
uploading = BooleanField(default=True, null=True)
cas_path = BooleanField(default=True)
content_checksum = CharField(null=True, index=True)
class Image(BaseModel):
docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository)
ancestors = CharField(index=True, default='/', max_length=64535, null=True)
storage = ForeignKeyField(ImageStorage, index=True, null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
v1_json_metadata = TextField(null=True)
v1_checksum = CharField(null=True)
security_indexed = BooleanField(default=False)
security_indexed_engine = IntegerField(default=-1)
parent_id = IntegerField(index=True, null=True)
logger = logging.getLogger(__name__)
def backfill_parent_id():
logger.setLevel(logging.DEBUG)
logger.debug('backfill_parent_id: Starting')
logger.debug('backfill_parent_id: This can be a LONG RUNNING OPERATION. Please wait!')
def fetch_batch():
return (Image
.select(Image.id, Image.ancestors)
.join(ImageStorage)
.where(Image.parent_id >> None, Image.ancestors != '/',
ImageStorage.uploading == False))
max_id = Image.select(fn.Max(Image.id)).scalar()
written = 0
for to_backfill, abort in yield_random_entries(fetch_batch, Image.id, BATCH_SIZE, max_id):
computed_parent = int(to_backfill.ancestors.split('/')[-2])
num_changed = (Image
.update(parent_id=computed_parent)
.where(Image.id == to_backfill.id, Image.parent_id >> None)).execute()
if num_changed == 0:
logger.info('Collision with another worker, aborting batch')
abort.set()
written += num_changed
if (written % BATCH_SIZE) == 0:
logger.debug('%s entries written', written)
logger.debug('backfill_parent_id: Completed, updated %s entries', written)
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('peewee').setLevel(logging.CRITICAL)
backfill_parent_id()

View file

@ -1,54 +0,0 @@
import logging
import uuid
from data.database import User, db
from app import app
LOGGER = logging.getLogger(__name__)
def backfill_user_uuids():
""" Generates UUIDs for any Users without them. """
LOGGER.setLevel(logging.DEBUG)
LOGGER.debug('User UUID Backfill: Began execution')
# Check to see if any users are missing uuids.
has_missing_uuids = True
try:
User.select(User.id).where(User.uuid >> None).get()
except User.DoesNotExist:
has_missing_uuids = False
if not has_missing_uuids:
LOGGER.debug('User UUID Backfill: No migration needed')
return
LOGGER.debug('User UUID Backfill: Starting migration')
while True:
batch_user_ids = list(User
.select(User.id)
.where(User.uuid >> None)
.limit(100))
if len(batch_user_ids) == 0:
# There are no users left to backfill. We're done!
LOGGER.debug('User UUID Backfill: Backfill completed')
return
LOGGER.debug('User UUID Backfill: Found %s records to update', len(batch_user_ids))
for user_id in batch_user_ids:
with app.config['DB_TRANSACTION_FACTORY'](db):
try:
user = User.select(User.id, User.uuid).where(User.id == user_id).get()
user.uuid = str(uuid.uuid4())
user.save(only=[User.uuid])
except User.DoesNotExist:
pass
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('boto').setLevel(logging.CRITICAL)
logging.getLogger('peewee').setLevel(logging.CRITICAL)
backfill_user_uuids()

View file

@ -1,75 +0,0 @@
import logging
from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField,
TextField, fn)
from data.database import BaseModel
from util.migrate.allocator import yield_random_entries
from app import app
BATCH_SIZE = 1000
logger = logging.getLogger(__name__)
class Repository(BaseModel):
pass
# Vendor the information from tables we will be writing to at the time of this migration
class ImageStorage(BaseModel):
uuid = CharField(index=True, unique=True)
checksum = CharField(null=True)
image_size = BigIntegerField(null=True)
uncompressed_size = BigIntegerField(null=True)
uploading = BooleanField(default=True, null=True)
cas_path = BooleanField(default=True)
content_checksum = CharField(null=True, index=True)
class Image(BaseModel):
docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository)
ancestors = CharField(index=True, default='/', max_length=64535, null=True)
storage = ForeignKeyField(ImageStorage, index=True, null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
v1_json_metadata = TextField(null=True)
v1_checksum = CharField(null=True)
def backfill_checksums():
""" Copies checksums from image storages to their images. """
logger.debug('Began execution')
logger.debug('This may be a long operation!')
def batch_query():
return (Image
.select(Image, ImageStorage)
.join(ImageStorage)
.where(Image.v1_checksum >> None, ImageStorage.uploading == False,
~(ImageStorage.checksum >> None)))
max_id = Image.select(fn.Max(Image.id)).scalar()
written = 0
for candidate_image, abort in yield_random_entries(batch_query, Image.id, BATCH_SIZE, max_id):
num_changed = (Image
.update(v1_checksum=candidate_image.storage.checksum)
.where(Image.id == candidate_image.id, Image.v1_checksum >> None)).execute()
if num_changed == 0:
logger.info('Collision with another worker, aborting batch')
abort.set()
written += num_changed
if (written % BATCH_SIZE) == 0:
logger.debug('%s entries written', written)
logger.debug('Completed, updated %s entries', written)
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('peewee').setLevel(logging.CRITICAL)
backfill_checksums()

View file

@ -1,117 +0,0 @@
import logging
from peewee import JOIN_LEFT_OUTER
from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField,
TextField)
from data.database import BaseModel, db, db_for_update
from app import app, storage
from data import model
logger = logging.getLogger(__name__)
class Repository(BaseModel):
pass
# Vendor the information from tables we will be writing to at the time of this migration
class ImageStorage(BaseModel):
uuid = CharField(index=True, unique=True)
checksum = CharField(null=True)
image_size = BigIntegerField(null=True)
uncompressed_size = BigIntegerField(null=True)
uploading = BooleanField(default=True, null=True)
class Image(BaseModel):
# This class is intentionally denormalized. Even though images are supposed
# to be globally unique we can't treat them as such for permissions and
# security reasons. So rather than Repository <-> Image being many to many
# each image now belongs to exactly one repository.
docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository)
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
ancestors = CharField(index=True, default='/', max_length=64535, null=True)
storage = ForeignKeyField(ImageStorage, index=True, null=True)
created = DateTimeField(null=True)
comment = TextField(null=True)
command = TextField(null=True)
aggregate_size = BigIntegerField(null=True)
v1_json_metadata = TextField(null=True)
class ImageStorageLocation(BaseModel):
name = CharField(unique=True, index=True)
class ImageStoragePlacement(BaseModel):
storage = ForeignKeyField(ImageStorage)
location = ForeignKeyField(ImageStorageLocation)
def image_json_path(storage_uuid):
base_path = storage._image_path(storage_uuid)
return '{0}json'.format(base_path)
def backfill_v1_metadata():
""" Copies metadata from image storages to their images. """
logger.debug('Image v1 metadata backfill: Began execution')
while True:
batch_image_ids = list(Image
.select(Image.id)
.join(ImageStorage)
.where(Image.v1_json_metadata >> None, ImageStorage.uploading == False)
.limit(100))
if len(batch_image_ids) == 0:
logger.debug('Image v1 metadata backfill: Backfill completed')
return
logger.debug('Image v1 metadata backfill: Found %s records to update', len(batch_image_ids))
for one_id in batch_image_ids:
with app.config['DB_TRANSACTION_FACTORY'](db):
try:
logger.debug('Loading image: %s', one_id.id)
raw_query = (ImageStoragePlacement
.select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage, JOIN_LEFT_OUTER)
.join(Image)
.where(Image.id == one_id.id))
placement_query = db_for_update(raw_query)
repo_image_list = model.image.invert_placement_query_results(placement_query)
if len(repo_image_list) > 1:
logger.error('Found more images than we requested, something is wrong with the query')
return
repo_image = repo_image_list[0]
uuid = repo_image.storage.uuid
json_path = image_json_path(uuid)
logger.debug('Updating image: %s from: %s', repo_image.id, json_path)
try:
data = storage.get_content(repo_image.storage.locations, json_path)
except IOError:
data = "{}"
logger.warning('failed to find v1 metadata, defaulting to {}')
repo_image.v1_json_metadata = data
repo_image.save()
except ImageStoragePlacement.DoesNotExist:
pass
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
# logging.getLogger('peewee').setLevel(logging.CRITICAL)
backfill_v1_metadata()

Some files were not shown because too many files have changed in this diff Show more