diff --git a/data/database.py b/data/database.py index 7fda6738b..acc8f3fe7 100644 --- a/data/database.py +++ b/data/database.py @@ -806,7 +806,9 @@ class LogEntry(BaseModel): class Messages(BaseModel): content = TextField() - uuid = CharField(default=uuid_generator, index=True) + + # TODO: This should be non-nullable and indexed + uuid = CharField(default=uuid_generator, max_length=36, null=True) class RepositoryActionCount(BaseModel): repository = ForeignKeyField(Repository) diff --git a/data/migrations/versions/0f17d94d11eb_add_take_ownership_log_entry_kind.py b/data/migrations/versions/0f17d94d11eb_add_take_ownership_log_entry_kind.py deleted file mode 100644 index 5293c756a..000000000 --- a/data/migrations/versions/0f17d94d11eb_add_take_ownership_log_entry_kind.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add take_ownership log entry kind - -Revision ID: 0f17d94d11eb -Revises: a3ba52d02dec -Create Date: 2016-06-07 17:22:20.438873 - -""" - -# revision identifiers, used by Alembic. -revision = '0f17d94d11eb' -down_revision = 'a3ba52d02dec' - -from alembic import op - -def upgrade(tables): - op.bulk_insert(tables.logentrykind, - [ - {'name':'take_ownership'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('take_ownership'))) - ) diff --git a/data/migrations/versions/1093d8b212bb_add_uniqueness_hash_column_for_derived_.py b/data/migrations/versions/1093d8b212bb_add_uniqueness_hash_column_for_derived_.py deleted file mode 100644 index cc2c70e2d..000000000 --- a/data/migrations/versions/1093d8b212bb_add_uniqueness_hash_column_for_derived_.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add uniqueness hash column for derived image storage - -Revision ID: 1093d8b212bb -Revises: 0f17d94d11eb -Create Date: 2016-06-06 15:27:21.735669 - -""" - -# revision identifiers, used by Alembic. -revision = '1093d8b212bb' -down_revision = '0f17d94d11eb' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('derivedstorageforimage_source_image_id_transformation_id', table_name='derivedstorageforimage') - op.add_column('derivedstorageforimage', sa.Column('uniqueness_hash', sa.String(length=255), nullable=True)) - op.create_index('uniqueness_index', 'derivedstorageforimage', ['source_image_id', 'transformation_id', 'uniqueness_hash'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('uniqueness_index', table_name='derivedstorageforimage') - op.drop_column('derivedstorageforimage', 'uniqueness_hash') - op.create_index('derivedstorageforimage_source_image_id_transformation_id', 'derivedstorageforimage', ['source_image_id', 'transformation_id'], unique=True) - ### end Alembic commands ### diff --git a/data/migrations/versions/10b999e8db1f_fix_sequences_in_postgres.py b/data/migrations/versions/10b999e8db1f_fix_sequences_in_postgres.py deleted file mode 100644 index b28ab18b1..000000000 --- a/data/migrations/versions/10b999e8db1f_fix_sequences_in_postgres.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Fix sequences in postgres - -Revision ID: 10b999e8db1f -Revises: 22af01f81722 -Create Date: 2015-11-16 14:00:05.383227 - -""" - -# revision identifiers, used by Alembic. -revision = '10b999e8db1f' -down_revision = '1849ca8199fc' - -from alembic import op -import sqlalchemy as sa - -import uuid -from peewee import CharField, IntegrityError -from util.migrate.fixsequences import reset_enum_sequences - -def upgrade(tables): - reset_enum_sequences() - -def downgrade(tables): - pass diff --git a/data/migrations/versions/127905a52fdd_remove_the_deprecated_imagestorage_.py b/data/migrations/versions/127905a52fdd_remove_the_deprecated_imagestorage_.py deleted file mode 100644 index 6ab6d79b7..000000000 --- a/data/migrations/versions/127905a52fdd_remove_the_deprecated_imagestorage_.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Remove the deprecated imagestorage columns. - -Revision ID: 127905a52fdd -Revises: 2e0380215d01 -Create Date: 2015-09-17 15:48:56.667823 - -""" - -# revision identifiers, used by Alembic. -revision = '127905a52fdd' -down_revision = '2e0380215d01' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('imagestorage', 'comment') - op.drop_column('imagestorage', 'aggregate_size') - op.drop_column('imagestorage', 'command') - op.drop_column('imagestorage', 'created') - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('imagestorage', sa.Column('created', sa.DateTime(), nullable=True)) - op.add_column('imagestorage', sa.Column('command', sa.Text(), nullable=True)) - op.add_column('imagestorage', sa.Column('aggregate_size', sa.BigInteger(), nullable=True)) - op.add_column('imagestorage', sa.Column('comment', sa.Text(), nullable=True)) - ### end Alembic commands ### diff --git a/data/migrations/versions/13da56878560_migrate_registry_namespaces_to_.py b/data/migrations/versions/13da56878560_migrate_registry_namespaces_to_.py deleted file mode 100644 index 30ac75c96..000000000 --- a/data/migrations/versions/13da56878560_migrate_registry_namespaces_to_.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Migrate registry namespaces to reference a user. - -Revision ID: 13da56878560 -Revises: 51d04d0e7e6f -Create Date: 2014-09-18 13:56:45.130455 - -""" - -# revision identifiers, used by Alembic. -revision = '13da56878560' -down_revision = '51d04d0e7e6f' - -from alembic import op -import sqlalchemy as sa - -from data.database import Repository, User - -def upgrade(tables): - # Add the namespace_user column, allowing it to be nullable - op.add_column('repository', sa.Column('namespace_user_id', sa.Integer(), sa.ForeignKey('user.id'))) - - -def downgrade(tables): - op.drop_column('repository', 'namespace_user_id') diff --git a/data/migrations/versions/14fe12ade3df_add_build_queue_item_reference_to_the_.py b/data/migrations/versions/14fe12ade3df_add_build_queue_item_reference_to_the_.py deleted file mode 100644 index 561a32dca..000000000 --- a/data/migrations/versions/14fe12ade3df_add_build_queue_item_reference_to_the_.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add build queue item reference to the repositorybuild table - -Revision ID: 14fe12ade3df -Revises: 5ad999136045 -Create Date: 2015-02-12 16:11:57.814645 - -""" - -# revision identifiers, used by Alembic. -revision = '14fe12ade3df' -down_revision = '5ad999136045' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuild', sa.Column('queue_item_id', sa.Integer(), nullable=True)) - op.create_index('repositorybuild_queue_item_id', 'repositorybuild', ['queue_item_id'], unique=False) - op.create_foreign_key(op.f('fk_repositorybuild_queue_item_id_queueitem'), 'repositorybuild', 'queueitem', ['queue_item_id'], ['id']) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(op.f('fk_repositorybuild_queue_item_id_queueitem'), 'repositorybuild', type_='foreignkey') - op.drop_index('repositorybuild_queue_item_id', table_name='repositorybuild') - op.drop_column('repositorybuild', 'queue_item_id') - ### end Alembic commands ### diff --git a/data/migrations/versions/154f2befdfbe_add_enabled_column_to_the_user_system.py b/data/migrations/versions/154f2befdfbe_add_enabled_column_to_the_user_system.py deleted file mode 100644 index 9e6532197..000000000 --- a/data/migrations/versions/154f2befdfbe_add_enabled_column_to_the_user_system.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add enabled column to the user system - -Revision ID: 154f2befdfbe -Revises: 41f4587c84ae -Create Date: 2015-05-11 17:02:43.507847 - -""" - -# revision identifiers, used by Alembic. -revision = '154f2befdfbe' -down_revision = '41f4587c84ae' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('user', sa.Column('enabled', sa.Boolean(), nullable=False, default=True, server_default="1")) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('user', 'enabled') - ### end Alembic commands ### diff --git a/data/migrations/versions/1594a74a74ca_add_metadata_field_to_external_logins.py b/data/migrations/versions/1594a74a74ca_add_metadata_field_to_external_logins.py deleted file mode 100644 index 324e38c36..000000000 --- a/data/migrations/versions/1594a74a74ca_add_metadata_field_to_external_logins.py +++ /dev/null @@ -1,34 +0,0 @@ -"""add metadata field to external logins - -Revision ID: 1594a74a74ca -Revises: f42b0ea7a4d -Create Date: 2014-09-04 18:17:35.205698 - -""" - -# revision identifiers, used by Alembic. -revision = '1594a74a74ca' -down_revision = 'f42b0ea7a4d' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('federatedlogin', sa.Column('metadata_json', sa.Text(), nullable=False)) - ### end Alembic commands ### - - op.bulk_insert(tables.loginservice, - [ - {'name':'google'}, - ]) - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('federatedlogin', 'metadata_json') - ### end Alembic commands ### - - op.execute( - (tables.loginservice.delete() - .where(tables.loginservice.c.name == op.inline_literal('google'))) - ) diff --git a/data/migrations/versions/17f11e265e13_add_uuid_field_to_user.py b/data/migrations/versions/17f11e265e13_add_uuid_field_to_user.py deleted file mode 100644 index 3bf692fe6..000000000 --- a/data/migrations/versions/17f11e265e13_add_uuid_field_to_user.py +++ /dev/null @@ -1,22 +0,0 @@ -"""add uuid field to user - -Revision ID: 17f11e265e13 -Revises: 313d297811c4 -Create Date: 2014-11-11 14:32:54.866188 - -""" - -# revision identifiers, used by Alembic. -revision = '17f11e265e13' -down_revision = '313d297811c4' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.add_column('user', sa.Column('uuid', sa.String(length=36), nullable=True)) - - -def downgrade(tables): - op.drop_column('user', 'uuid') diff --git a/data/migrations/versions/1849ca8199fc_remove_derivedimagestorage_table.py b/data/migrations/versions/1849ca8199fc_remove_derivedimagestorage_table.py deleted file mode 100644 index 2594e5368..000000000 --- a/data/migrations/versions/1849ca8199fc_remove_derivedimagestorage_table.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Remove DerivedImageStorage table - -Revision ID: 1849ca8199fc -Revises: 5a2445ffe21b -Create Date: 2015-11-25 11:45:32.928533 - -""" - -# revision identifiers, used by Alembic. -revision = '1849ca8199fc' -down_revision = '5a2445ffe21b' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('derivedimagestorage') - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('derivedimagestorage', - sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), - sa.Column('source_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True), - sa.Column('derivative_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), - sa.Column('transformation_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), - sa.ForeignKeyConstraint(['derivative_id'], [u'imagestorage.id'], name=u'fk_derivedimagestorage_derivative_id_imagestorage'), - sa.ForeignKeyConstraint(['source_id'], [u'imagestorage.id'], name=u'fk_derivedimagestorage_source_id_imagestorage'), - sa.ForeignKeyConstraint(['transformation_id'], [u'imagestoragetransformation.id'], name=u'fk_dis_transformation_id_ist'), - sa.PrimaryKeyConstraint('id'), - mysql_default_charset=u'latin1', - mysql_engine=u'InnoDB' - ) - ### end Alembic commands ### diff --git a/data/migrations/versions/1b2bb93ceb82_add_indices_for_security_worker_queries.py b/data/migrations/versions/1b2bb93ceb82_add_indices_for_security_worker_queries.py deleted file mode 100644 index 7dd464384..000000000 --- a/data/migrations/versions/1b2bb93ceb82_add_indices_for_security_worker_queries.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add indices for security worker queries. - -Revision ID: 1b2bb93ceb82 -Revises: 22af01f81722 -Create Date: 2015-11-18 13:27:41.161898 - -""" - -# revision identifiers, used by Alembic. -revision = '1b2bb93ceb82' -down_revision = '22af01f81722' - -from alembic import op - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('image_security_indexed', 'image', ['security_indexed'], unique=False) - op.create_index('image_security_indexed_engine', 'image', ['security_indexed_engine'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('image_security_indexed_engine', table_name='image') - op.drop_index('image_security_indexed', table_name='image') - ### end Alembic commands ### diff --git a/data/migrations/versions/1c0f6ede8992_quay_releases.py b/data/migrations/versions/1c0f6ede8992_quay_releases.py deleted file mode 100644 index 92583881d..000000000 --- a/data/migrations/versions/1c0f6ede8992_quay_releases.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Quay releases - -Revision ID: 1c0f6ede8992 -Revises: 545794454f49 -Create Date: 2015-09-15 15:46:09.784607 - -""" - -# revision identifiers, used by Alembic. -revision = '1c0f6ede8992' -down_revision = '545794454f49' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('quayregion', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_quayregion')) - ) - op.create_index('quayregion_name', 'quayregion', ['name'], unique=True) - op.create_table('quayservice', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_quayservice')) - ) - op.create_index('quayservice_name', 'quayservice', ['name'], unique=True) - op.create_table('quayrelease', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('service_id', sa.Integer(), nullable=False), - sa.Column('version', sa.String(length=255), nullable=False), - sa.Column('region_id', sa.Integer(), nullable=False), - sa.Column('reverted', sa.Boolean(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['region_id'], ['quayregion.id'], name=op.f('fk_quayrelease_region_id_quayregion')), - sa.ForeignKeyConstraint(['service_id'], ['quayservice.id'], name=op.f('fk_quayrelease_service_id_quayservice')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_quayrelease')) - ) - op.create_index('quayrelease_created', 'quayrelease', ['created'], unique=False) - op.create_index('quayrelease_region_id', 'quayrelease', ['region_id'], unique=False) - op.create_index('quayrelease_service_id', 'quayrelease', ['service_id'], unique=False) - op.create_index('quayrelease_service_id_region_id_created', 'quayrelease', ['service_id', 'region_id', 'created'], unique=False) - op.create_index('quayrelease_service_id_version_region_id', 'quayrelease', ['service_id', 'version', 'region_id'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('quayrelease') - op.drop_table('quayservice') - op.drop_table('quayregion') - ### end Alembic commands ### diff --git a/data/migrations/versions/1c3decf6b9c4_add_revert_tag_log_entry_kind.py b/data/migrations/versions/1c3decf6b9c4_add_revert_tag_log_entry_kind.py deleted file mode 100644 index 8965dfd06..000000000 --- a/data/migrations/versions/1c3decf6b9c4_add_revert_tag_log_entry_kind.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add revert_tag log entry kind - -Revision ID: 1c3decf6b9c4 -Revises: 4ce2169efd3b -Create Date: 2015-04-16 17:14:11.154856 - -""" - -# revision identifiers, used by Alembic. -revision = '1c3decf6b9c4' -down_revision = '4ce2169efd3b' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.logentrykind, - [ - {'name':'revert_tag'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('revert_tag'))) - - ) diff --git a/data/migrations/versions/1c5b738283a5_backfill_user_uuids.py b/data/migrations/versions/1c5b738283a5_backfill_user_uuids.py deleted file mode 100644 index baa78465b..000000000 --- a/data/migrations/versions/1c5b738283a5_backfill_user_uuids.py +++ /dev/null @@ -1,22 +0,0 @@ -"""backfill user uuids - -Revision ID: 1c5b738283a5 -Revises: 2fb36d4be80d -Create Date: 2014-11-20 18:22:03.418215 - -""" - -# revision identifiers, used by Alembic. -revision = '1c5b738283a5' -down_revision = '2fb36d4be80d' - -from alembic import op -import sqlalchemy as sa -from util.migrate.backfill_user_uuids import backfill_user_uuids - -def upgrade(tables): - backfill_user_uuids() - - -def downgrade(tables): - pass diff --git a/data/migrations/versions/1d2d86d09fcd_actually_remove_the_column.py b/data/migrations/versions/1d2d86d09fcd_actually_remove_the_column.py deleted file mode 100644 index 460296f17..000000000 --- a/data/migrations/versions/1d2d86d09fcd_actually_remove_the_column.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Actually remove the column access_token_id - -Revision ID: 1d2d86d09fcd -Revises: 14fe12ade3df -Create Date: 2015-02-12 16:27:30.260797 - -""" - -# revision identifiers, used by Alembic. -revision = '1d2d86d09fcd' -down_revision = '14fe12ade3df' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.exc import InternalError - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - try: - op.drop_constraint(u'fk_logentry_access_token_id_accesstoken', 'logentry', type_='foreignkey') - op.drop_index('logentry_access_token_id', table_name='logentry') - op.drop_column('logentry', 'access_token_id') - except InternalError: - pass - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - try: - op.add_column('logentry', sa.Column('access_token_id', sa.Integer(), nullable=True)) - op.create_foreign_key(u'fk_logentry_access_token_id_accesstoken', 'logentry', 'accesstoken', ['access_token_id'], ['id']) - op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False) - except InternalError: - pass - ### end Alembic commands ### diff --git a/data/migrations/versions/1f116e06b68_add_gitlab_trigger_type.py b/data/migrations/versions/1f116e06b68_add_gitlab_trigger_type.py deleted file mode 100644 index ecee9ab16..000000000 --- a/data/migrations/versions/1f116e06b68_add_gitlab_trigger_type.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Add gitlab trigger type - -Revision ID: 1f116e06b68 -Revises: 313179799c8b -Create Date: 2015-05-03 10:45:06.257913 - -""" - -# revision identifiers, used by Alembic. -revision = '1f116e06b68' -down_revision = '313179799c8b' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.buildtriggerservice, [{'name': 'gitlab'}]) - - -def downgrade(tables): - op.execute( - tables.buildtriggerservice.delete() - .where(tables.buildtriggerservice.c.name == op.inline_literal('gitlab')) - ) diff --git a/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py b/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py deleted file mode 100644 index 02a119074..000000000 --- a/data/migrations/versions/201d55b38649_remove_fields_from_image_table_that_.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Remove fields from image table that were migrated to imagestorage. - -Revision ID: 201d55b38649 -Revises: 5a07499ce53f -Create Date: 2014-06-12 19:48:53.861115 - -""" - -# revision identifiers, used by Alembic. -revision = '201d55b38649' -down_revision = '5a07499ce53f' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') - op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True) - op.drop_column('image', 'comment') - op.drop_column('image', 'checksum') - op.drop_column('image', 'image_size') - op.drop_column('image', 'command') - op.drop_column('image', 'created') - op.drop_index('logentrykind_name', table_name='logentrykind') - op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=True) - op.drop_index('notificationkind_name', table_name='notificationkind') - op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=True) - op.drop_index('role_name', table_name='role') - op.create_index('role_name', 'role', ['name'], unique=True) - op.drop_index('visibility_name', table_name='visibility') - op.create_index('visibility_name', 'visibility', ['name'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('visibility_name', table_name='visibility') - op.create_index('visibility_name', 'visibility', ['name'], unique=False) - op.drop_index('role_name', table_name='role') - op.create_index('role_name', 'role', ['name'], unique=False) - op.drop_index('notificationkind_name', table_name='notificationkind') - op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False) - op.drop_index('logentrykind_name', table_name='logentrykind') - op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False) - op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True)) - op.add_column('image', sa.Column('command', sa.Text(), nullable=True)) - op.add_column('image', sa.Column('image_size', sa.BigInteger(), nullable=True)) - op.add_column('image', sa.Column('checksum', sa.String(length=255), nullable=True)) - op.add_column('image', sa.Column('comment', sa.Text(), nullable=True)) - op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') - op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False) - ### end Alembic commands ### diff --git a/data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py b/data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py deleted file mode 100644 index 8e9176db8..000000000 --- a/data/migrations/versions/204abf14783d_add_log_entry_kind_for_verbs.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add log entry kind for verbs - -Revision ID: 204abf14783d -Revises: 2430f55c41d5 -Create Date: 2014-10-29 15:38:06.100915 - -""" - -# revision identifiers, used by Alembic. -revision = '204abf14783d' -down_revision = '2430f55c41d5' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.bulk_insert(tables.logentrykind, - [ - {'name':'repo_verb'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('repo_verb'))) - - ) diff --git a/data/migrations/versions/2088f2b81010_add_stars.py b/data/migrations/versions/2088f2b81010_add_stars.py deleted file mode 100644 index af27da83e..000000000 --- a/data/migrations/versions/2088f2b81010_add_stars.py +++ /dev/null @@ -1,39 +0,0 @@ -"""add stars - -Revision ID: 2088f2b81010 -Revises: 1c5b738283a5 -Create Date: 2014-12-02 17:45:00.707498 - -""" - -# revision identifiers, used by Alembic. -revision = '2088f2b81010' -down_revision = '707d5191eda' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.create_table('star', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_star_repository_id_repository')), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_star_user_id_user')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_star')) - ) - with op.batch_alter_table('star', schema=None) as batch_op: - batch_op.create_index('star_repository_id', ['repository_id'], unique=False) - batch_op.create_index('star_user_id', ['user_id'], unique=False) - batch_op.create_index('star_user_id_repository_id', ['user_id', 'repository_id'], unique=True) - -def downgrade(tables): - op.drop_constraint('fk_star_repository_id_repository', 'star', type_='foreignkey') - op.drop_constraint('fk_star_user_id_user', 'star', type_='foreignkey') - with op.batch_alter_table('star', schema=None) as batch_op: - batch_op.drop_index('star_user_id_repository_id') - batch_op.drop_index('star_user_id') - batch_op.drop_index('star_repository_id') - - op.drop_table('star') diff --git a/data/migrations/versions/214350b6a8b1_add_private_key_to_build_triggers.py b/data/migrations/versions/214350b6a8b1_add_private_key_to_build_triggers.py deleted file mode 100644 index dc7e052bc..000000000 --- a/data/migrations/versions/214350b6a8b1_add_private_key_to_build_triggers.py +++ /dev/null @@ -1,26 +0,0 @@ -"""add private key to build triggers - -Revision ID: 214350b6a8b1 -Revises: 2b2529fd23ff -Create Date: 2015-03-19 14:23:52.604505 - -""" - -# revision identifiers, used by Alembic. -revision = '214350b6a8b1' -down_revision = '67eb43c778b' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuildtrigger', sa.Column('private_key', sa.Text(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorybuildtrigger', 'private_key') - ### end Alembic commands ### diff --git a/data/migrations/versions/228d1af6af1c_mysql_max_index_lengths.py b/data/migrations/versions/228d1af6af1c_mysql_max_index_lengths.py deleted file mode 100644 index ed7fdc8be..000000000 --- a/data/migrations/versions/228d1af6af1c_mysql_max_index_lengths.py +++ /dev/null @@ -1,24 +0,0 @@ -"""mysql max index lengths - -Revision ID: 228d1af6af1c -Revises: 5b84373e5db -Create Date: 2015-01-06 14:35:24.651424 - -""" - -# revision identifiers, used by Alembic. -revision = '228d1af6af1c' -down_revision = '5b84373e5db' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.drop_index('queueitem_queue_name', table_name='queueitem') - op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767) - - op.drop_index('image_ancestors', table_name='image') - op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767) - -def downgrade(tables): - pass diff --git a/data/migrations/versions/22af01f81722_backfill_parent_id_and_v1_checksums.py b/data/migrations/versions/22af01f81722_backfill_parent_id_and_v1_checksums.py deleted file mode 100644 index 2f6772b66..000000000 --- a/data/migrations/versions/22af01f81722_backfill_parent_id_and_v1_checksums.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Backfill parent id and v1 checksums - -Revision ID: 22af01f81722 -Revises: 2827d36939e4 -Create Date: 2015-11-05 16:24:43.679323 - -""" - -# revision identifiers, used by Alembic. -revision = '22af01f81722' -down_revision = '2827d36939e4' - -from util.migrate.backfill_v1_checksums import backfill_checksums -from util.migrate.backfill_parent_id import backfill_parent_id - -def upgrade(tables): - backfill_parent_id() - backfill_checksums() - -def downgrade(tables): - pass diff --git a/data/migrations/versions/23ca04d0bc8e_add_the_torrentinfo_table_and_torrent_.py b/data/migrations/versions/23ca04d0bc8e_add_the_torrentinfo_table_and_torrent_.py deleted file mode 100644 index 1c5539cec..000000000 --- a/data/migrations/versions/23ca04d0bc8e_add_the_torrentinfo_table_and_torrent_.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Add the torrentinfo table and torrent fields on blobuploads. - -Revision ID: 23ca04d0bc8e -Revises: 471caec2cb66 -Create Date: 2016-01-06 13:25:24.597037 - -""" - -# revision identifiers, used by Alembic. -revision = '23ca04d0bc8e' -down_revision = '471caec2cb66' - -from alembic import op -import sqlalchemy as sa - -from util.migrate import UTF8LongText - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('torrentinfo', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('storage_id', sa.Integer(), nullable=False), - sa.Column('piece_length', sa.Integer(), nullable=False), - sa.Column('pieces', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_torrentinfo_storage_id_imagestorage')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_torrentinfo')) - ) - op.create_index('torrentinfo_storage_id', 'torrentinfo', ['storage_id'], unique=False) - op.create_index('torrentinfo_storage_id_piece_length', 'torrentinfo', ['storage_id', 'piece_length'], unique=True) - op.add_column(u'blobupload', sa.Column('piece_hashes', UTF8LongText(), nullable=True)) - op.add_column(u'blobupload', sa.Column('piece_sha_state', UTF8LongText(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column(u'blobupload', 'piece_sha_state') - op.drop_column(u'blobupload', 'piece_hashes') - op.drop_table('torrentinfo') - ### end Alembic commands ### diff --git a/data/migrations/versions/2430f55c41d5_calculate_uncompressed_sizes_for_all_.py b/data/migrations/versions/2430f55c41d5_calculate_uncompressed_sizes_for_all_.py deleted file mode 100644 index 20d6fb094..000000000 --- a/data/migrations/versions/2430f55c41d5_calculate_uncompressed_sizes_for_all_.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Calculate uncompressed sizes for all images - -Revision ID: 2430f55c41d5 -Revises: 3b4d3a4461dc -Create Date: 2014-10-07 14:50:04.660315 - -""" - -# revision identifiers, used by Alembic. -revision = '2430f55c41d5' -down_revision = '3b4d3a4461dc' - -from alembic import op -import sqlalchemy as sa -from util.migrate.uncompressedsize import backfill_sizes_from_data - - -def upgrade(tables): - backfill_sizes_from_data() - -def downgrade(tables): - pass diff --git a/data/migrations/versions/246df01a6d51_add_index_to_retries_remaining.py b/data/migrations/versions/246df01a6d51_add_index_to_retries_remaining.py deleted file mode 100644 index 41488b17b..000000000 --- a/data/migrations/versions/246df01a6d51_add_index_to_retries_remaining.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add index to retries_remaining - -Revision ID: 246df01a6d51 -Revises: 5232a5610a0a -Create Date: 2015-08-04 17:59:42.262877 - -""" - -# revision identifiers, used by Alembic. -revision = '246df01a6d51' -down_revision = '5232a5610a0a' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('queueitem_retries_remaining', 'queueitem', ['retries_remaining'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('queueitem_retries_remaining', table_name='queueitem') - ### end Alembic commands ### diff --git a/data/migrations/versions/2827d36939e4_separate_v1_and_v2_checksums.py b/data/migrations/versions/2827d36939e4_separate_v1_and_v2_checksums.py deleted file mode 100644 index f3ee69d0e..000000000 --- a/data/migrations/versions/2827d36939e4_separate_v1_and_v2_checksums.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Separate v1 and v2 checksums. - -Revision ID: 2827d36939e4 -Revises: 73669db7e12 -Create Date: 2015-11-04 16:29:48.905775 - -""" - -# revision identifiers, used by Alembic. -revision = '2827d36939e4' -down_revision = '5cdc2d819c5' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('image', sa.Column('v1_checksum', sa.String(length=255), nullable=True)) - op.add_column('imagestorage', sa.Column('content_checksum', sa.String(length=255), nullable=True)) - op.create_index('imagestorage_content_checksum', 'imagestorage', ['content_checksum'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('imagestorage_content_checksum', table_name='imagestorage') - op.drop_column('imagestorage', 'content_checksum') - op.drop_column('image', 'v1_checksum') - ### end Alembic commands ### diff --git a/data/migrations/versions/2b2529fd23ff_add_aggregate_size_column.py b/data/migrations/versions/2b2529fd23ff_add_aggregate_size_column.py deleted file mode 100644 index 32a6e7f2f..000000000 --- a/data/migrations/versions/2b2529fd23ff_add_aggregate_size_column.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add aggregate size column - -Revision ID: 2b2529fd23ff -Revises: 2088f2b81010 -Create Date: 2015-03-16 17:36:53.321458 - -""" - -# revision identifiers, used by Alembic. -revision = '2b2529fd23ff' -down_revision = '2088f2b81010' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('imagestorage', sa.Column('aggregate_size', sa.BigInteger(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('imagestorage', 'aggregate_size') - ### end Alembic commands ### diff --git a/data/migrations/versions/2b4dc0818a5e_add_a_unique_index_to_prevent_deadlocks_.py b/data/migrations/versions/2b4dc0818a5e_add_a_unique_index_to_prevent_deadlocks_.py deleted file mode 100644 index 8efe0c123..000000000 --- a/data/migrations/versions/2b4dc0818a5e_add_a_unique_index_to_prevent_deadlocks_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add a unique index to prevent deadlocks with tags. - -Revision ID: 2b4dc0818a5e -Revises: 2b2529fd23ff -Create Date: 2015-03-20 23:37:10.558179 - -""" - -# revision identifiers, used by Alembic. -revision = '2b4dc0818a5e' -down_revision = '2b2529fd23ff' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('repositorytag_repository_id_name_lifetime_end_ts', 'repositorytag', ['repository_id', 'name', 'lifetime_end_ts'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('repositorytag_repository_id_name_lifetime_end_ts', table_name='repositorytag') - ### end Alembic commands ### diff --git a/data/migrations/versions/2bf8af5bad95_add_keystone_login_service.py b/data/migrations/versions/2bf8af5bad95_add_keystone_login_service.py deleted file mode 100644 index 6e0cf9658..000000000 --- a/data/migrations/versions/2bf8af5bad95_add_keystone_login_service.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add keystone login service - -Revision ID: 2bf8af5bad95 -Revises: 154f2befdfbe -Create Date: 2015-06-29 21:19:13.053165 - -""" - -# revision identifiers, used by Alembic. -revision = '2bf8af5bad95' -down_revision = '154f2befdfbe' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.loginservice, [{'name': 'keystone'}]) - - -def downgrade(tables): - op.execute( - tables.loginservice.delete() - .where(tables.loginservice.c.name == op.inline_literal('keystone')) - ) - diff --git a/data/migrations/versions/2e0380215d01_backfill_image_fields_from_image_.py b/data/migrations/versions/2e0380215d01_backfill_image_fields_from_image_.py deleted file mode 100644 index 93d89ed6e..000000000 --- a/data/migrations/versions/2e0380215d01_backfill_image_fields_from_image_.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Backfill image fields from image storages - -Revision ID: 2e0380215d01 -Revises: 3ff4fbc94644 -Create Date: 2015-09-15 16:57:42.850246 - -""" - -# revision identifiers, used by Alembic. -revision = '2e0380215d01' -down_revision = '3ff4fbc94644' - -from alembic import op -import sqlalchemy as sa -from util.migrate.backfill_image_fields import backfill_image_fields -from util.migrate.backfill_v1_metadata import backfill_v1_metadata - - -def upgrade(tables): - backfill_image_fields() - backfill_v1_metadata() - -def downgrade(tables): - pass diff --git a/data/migrations/versions/2e09ad97b06c_add_missing_tag_manifest_table.py b/data/migrations/versions/2e09ad97b06c_add_missing_tag_manifest_table.py deleted file mode 100644 index 9659ba2ae..000000000 --- a/data/migrations/versions/2e09ad97b06c_add_missing_tag_manifest_table.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Add missing tag manifest table - -Revision ID: 2e09ad97b06c -Revises: 2bf8af5bad95 -Create Date: 2015-07-22 16:10:42.549566 - -""" - -# revision identifiers, used by Alembic. -revision = '2e09ad97b06c' -down_revision = '2bf8af5bad95' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('tagmanifest', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('tag_id', sa.Integer(), nullable=False), - sa.Column('digest', sa.String(length=255), nullable=False), - sa.Column('json_data', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['tag_id'], ['repositorytag.id'], name=op.f('fk_tagmanifest_tag_id_repositorytag')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifest')) - ) - op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=True) - op.create_index('tagmanifest_tag_id', 'tagmanifest', ['tag_id'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('tagmanifest') - ### end Alembic commands ### diff --git a/data/migrations/versions/2fb36d4be80d_remove_the_namespace_column.py b/data/migrations/versions/2fb36d4be80d_remove_the_namespace_column.py deleted file mode 100644 index 19b9582f3..000000000 --- a/data/migrations/versions/2fb36d4be80d_remove_the_namespace_column.py +++ /dev/null @@ -1,30 +0,0 @@ -"""remove the namespace column. - -Revision ID: 2430f55c41d5 -Revises: 17f11e265e13 -Create Date: 2014-09-30 17:31:33.308490 - -""" - -# revision identifiers, used by Alembic. -revision = '2fb36d4be80d' -down_revision = '17f11e265e13' - -from alembic import op -import sqlalchemy as sa - -import re -from app import app - - -NAMESPACE_EXTRACTOR = re.compile(r'^([a-z]+/)([a-z0-9_]+)(/.*$)') - - -def upgrade(tables): - op.create_index('repository_namespace_user_id', 'repository', ['namespace_user_id'], unique=False) - op.drop_column('repository', 'namespace') - - -def downgrade(tables): - op.add_column('repository', sa.Column('namespace', sa.String(length=255))) - op.drop_index('repository_namespace_user_id', table_name='repository') diff --git a/data/migrations/versions/30c044b75632_add_repositoryactioncount_table.py b/data/migrations/versions/30c044b75632_add_repositoryactioncount_table.py deleted file mode 100644 index 8df45958e..000000000 --- a/data/migrations/versions/30c044b75632_add_repositoryactioncount_table.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Add RepositoryActionCount table - -Revision ID: 30c044b75632 -Revises: 2b4dc0818a5e -Create Date: 2015-04-13 13:21:18.159602 - -""" - -# revision identifiers, used by Alembic. -revision = '30c044b75632' -down_revision = '2b4dc0818a5e' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('repositoryactioncount', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('count', sa.Integer(), nullable=False), - sa.Column('date', sa.Date(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositoryactioncount_repository_id_repository')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_repositoryactioncount')) - ) - op.create_index('repositoryactioncount_date', 'repositoryactioncount', ['date'], unique=False) - op.create_index('repositoryactioncount_repository_id', 'repositoryactioncount', ['repository_id'], unique=False) - op.create_index('repositoryactioncount_repository_id_date', 'repositoryactioncount', ['repository_id', 'date'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('repositoryactioncount') - ### end Alembic commands ### diff --git a/data/migrations/versions/31288f79df53_make_resource_key_nullable.py b/data/migrations/versions/31288f79df53_make_resource_key_nullable.py deleted file mode 100644 index e14dfaca1..000000000 --- a/data/migrations/versions/31288f79df53_make_resource_key_nullable.py +++ /dev/null @@ -1,29 +0,0 @@ -"""make resource_key nullable - -Revision ID: 31288f79df53 -Revises: 214350b6a8b1 -Create Date: 2015-03-23 14:34:04.816295 - -""" - -# revision identifiers, used by Alembic. -revision = '31288f79df53' -down_revision = '214350b6a8b1' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.alter_column('repositorybuild', 'resource_key', - existing_type=sa.String(length=255), - nullable=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.alter_column('repositorybuild', 'resource_key', - existing_type=sa.String(length=255), - nullable=False) - ### end Alembic commands ### diff --git a/data/migrations/versions/313179799c8b_add_bitbucket_build_trigger_type.py b/data/migrations/versions/313179799c8b_add_bitbucket_build_trigger_type.py deleted file mode 100644 index bce3a679c..000000000 --- a/data/migrations/versions/313179799c8b_add_bitbucket_build_trigger_type.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Add bitbucket build trigger type - -Revision ID: 313179799c8b -Revises: 37c47a7af956 -Create Date: 2015-04-30 15:52:33.388825 - -""" - -# revision identifiers, used by Alembic. -revision = '313179799c8b' -down_revision = '37c47a7af956' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.buildtriggerservice, [{'name': 'bitbucket'}]) - - -def downgrade(tables): - op.execute( - tables.buildtriggerservice.delete() - .where(tables.buildtriggerservice.c.name == op.inline_literal('bitbucket')) - ) diff --git a/data/migrations/versions/313d297811c4_add_an_index_to_the_docker_image_id_.py b/data/migrations/versions/313d297811c4_add_an_index_to_the_docker_image_id_.py deleted file mode 100644 index 3987fe2cc..000000000 --- a/data/migrations/versions/313d297811c4_add_an_index_to_the_docker_image_id_.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Add an index to the docker_image_id field - -Revision ID: 313d297811c4 -Revises: 204abf14783d -Create Date: 2014-11-13 12:40:57.414787 - -""" - -# revision identifiers, used by Alembic. -revision = '313d297811c4' -down_revision = '204abf14783d' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('image_docker_image_id', 'image', ['docker_image_id'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('image_docker_image_id', table_name='image') - ### end Alembic commands ### diff --git a/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py b/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py deleted file mode 100644 index 970a01df5..000000000 --- a/data/migrations/versions/325a4d7c79d9_prepare_the_database_for_the_new_.py +++ /dev/null @@ -1,140 +0,0 @@ -"""Prepare the database for the new notifications system - -Revision ID: 325a4d7c79d9 -Revises: 4b7ef0c7bdb2 -Create Date: 2014-07-31 13:08:18.667393 - -""" - -# revision identifiers, used by Alembic. -revision = '325a4d7c79d9' -down_revision = '4b7ef0c7bdb2' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('externalnotificationmethod', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True) - op.bulk_insert(tables.externalnotificationmethod, - [ - {'name':'quay_notification'}, - {'name':'email'}, - {'name':'webhook'}, - ]) - op.create_table('externalnotificationevent', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True) - op.bulk_insert(tables.externalnotificationevent, - [ - {'name':'repo_push'}, - {'name':'build_queued'}, - {'name':'build_start'}, - {'name':'build_success'}, - {'name':'build_failure'}, - ]) - op.create_table('repositoryauthorizedemail', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('email', sa.String(length=255), nullable=False), - sa.Column('code', sa.String(length=255), nullable=False), - sa.Column('confirmed', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repositoryauthorizedemail_code', 'repositoryauthorizedemail', ['code'], unique=True) - op.create_index('repositoryauthorizedemail_email_repository_id', 'repositoryauthorizedemail', ['email', 'repository_id'], unique=True) - op.create_index('repositoryauthorizedemail_repository_id', 'repositoryauthorizedemail', ['repository_id'], unique=False) - op.create_table('repositorynotification', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('event_id', sa.Integer(), nullable=False), - sa.Column('method_id', sa.Integer(), nullable=False), - sa.Column('config_json', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['event_id'], ['externalnotificationevent.id'], ), - sa.ForeignKeyConstraint(['method_id'], ['externalnotificationmethod.id'], ), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repositorynotification_event_id', 'repositorynotification', ['event_id'], unique=False) - op.create_index('repositorynotification_method_id', 'repositorynotification', ['method_id'], unique=False) - op.create_index('repositorynotification_repository_id', 'repositorynotification', ['repository_id'], unique=False) - op.create_index('repositorynotification_uuid', 'repositorynotification', ['uuid'], unique=False) - op.add_column(u'notification', sa.Column('dismissed', sa.Boolean(), nullable=False)) - - # Manually add the new notificationkind types - op.bulk_insert(tables.notificationkind, - [ - {'name':'repo_push'}, - {'name':'build_queued'}, - {'name':'build_start'}, - {'name':'build_success'}, - {'name':'build_failure'}, - ]) - - # Manually add the new logentrykind types - op.bulk_insert(tables.logentrykind, - [ - {'name':'add_repo_notification'}, - {'name':'delete_repo_notification'}, - ]) - - - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column(u'notification', 'dismissed') - op.drop_table('repositorynotification') - op.drop_table('repositoryauthorizedemail') - op.drop_table('externalnotificationevent') - op.drop_table('externalnotificationmethod') - - # Manually remove the notificationkind and logentrykind types - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('repo_push'))) - - ) - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('build_queued'))) - - ) - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('build_start'))) - - ) - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('build_success'))) - - ) - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('build_failure'))) - - ) - - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('add_repo_notification'))) - - ) - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('delete_repo_notification'))) - - ) - ### end Alembic commands ### diff --git a/data/migrations/versions/33bd39ef5ed6_backport_v2_db_changes.py b/data/migrations/versions/33bd39ef5ed6_backport_v2_db_changes.py deleted file mode 100644 index c63924c93..000000000 --- a/data/migrations/versions/33bd39ef5ed6_backport_v2_db_changes.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Backport v2 db changes. - -Revision ID: 33bd39ef5ed6 -Revises: 127905a52fdd -Create Date: 2015-10-23 12:34:22.776542 - -""" - -# revision identifiers, used by Alembic. -revision = '33bd39ef5ed6' -down_revision = '127905a52fdd' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('blobupload', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('byte_count', sa.Integer(), nullable=False), - sa.Column('sha_state', sa.Text(), nullable=True), - sa.Column('location_id', sa.Integer(), nullable=False), - sa.Column('storage_metadata', sa.Text(), nullable=True), - sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_blobupload_location_id_imagestoragelocation')), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_blobupload_repository_id_repository')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_blobupload')) - ) - op.create_index('blobupload_location_id', 'blobupload', ['location_id'], unique=False) - op.create_index('blobupload_repository_id', 'blobupload', ['repository_id'], unique=False) - op.create_index('blobupload_repository_id_uuid', 'blobupload', ['repository_id', 'uuid'], unique=True) - op.create_index('blobupload_uuid', 'blobupload', ['uuid'], unique=True) - op.add_column(u'imagestorage', sa.Column('cas_path', sa.Boolean(), nullable=False, server_default="0")) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column(u'imagestorage', 'cas_path') - op.drop_table('blobupload') - ### end Alembic commands ### diff --git a/data/migrations/versions/34fd69f63809_add_support_for_build_log_migration.py b/data/migrations/versions/34fd69f63809_add_support_for_build_log_migration.py deleted file mode 100644 index a731d0158..000000000 --- a/data/migrations/versions/34fd69f63809_add_support_for_build_log_migration.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add support for build log migration. - -Revision ID: 34fd69f63809 -Revises: 4a0c94399f38 -Create Date: 2014-09-12 11:50:09.217777 - -""" - -# revision identifiers, used by Alembic. -revision = '34fd69f63809' -down_revision = '4a0c94399f38' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuild', sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false())) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorybuild', 'logs_archived') - ### end Alembic commands ### diff --git a/data/migrations/versions/35f538da62_switch_manifest_text_to_a_longtext.py b/data/migrations/versions/35f538da62_switch_manifest_text_to_a_longtext.py deleted file mode 100644 index bd139158c..000000000 --- a/data/migrations/versions/35f538da62_switch_manifest_text_to_a_longtext.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Switch manifest text to a longtext. - -Revision ID: 35f538da62 -Revises: 33bd39ef5ed6 -Create Date: 2015-10-23 15:31:27.353995 - -""" - -# revision identifiers, used by Alembic. -revision = '35f538da62' -down_revision = '33bd39ef5ed6' - -from alembic import op -import sqlalchemy as sa - -from sqlalchemy.types import TypeDecorator, Text -from sqlalchemy.dialects.mysql import LONGTEXT - -from util.migrate import UTF8LongText - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column(u'tagmanifest', 'json_data') - op.add_column(u'tagmanifest', sa.Column('json_data', UTF8LongText(), nullable=False)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column(u'tagmanifest', 'json_data') - op.add_column(u'tagmanifest', sa.Column('json_data', sa.Text(), nullable=False)) - ### end Alembic commands ### diff --git a/data/migrations/versions/37c47a7af956_add_custom_git_trigger_type_to_database.py b/data/migrations/versions/37c47a7af956_add_custom_git_trigger_type_to_database.py deleted file mode 100644 index ce92fcb0e..000000000 --- a/data/migrations/versions/37c47a7af956_add_custom_git_trigger_type_to_database.py +++ /dev/null @@ -1,25 +0,0 @@ -"""add custom-git trigger type to database - -Revision ID: 37c47a7af956 -Revises: 3fee6f979c2a -Create Date: 2015-04-24 14:50:26.275516 - -""" - -# revision identifiers, used by Alembic. -revision = '37c47a7af956' -down_revision = '3fee6f979c2a' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.buildtriggerservice, [{'name': 'custom-git'}]) - - -def downgrade(tables): - op.execute( - tables.buildtriggerservice.delete() - .where(tables.buildtriggerservice.c.name == op.inline_literal('custom-git')) - ) diff --git a/data/migrations/versions/3a3bb77e17d5_add_support_for_dex_login.py b/data/migrations/versions/3a3bb77e17d5_add_support_for_dex_login.py deleted file mode 100644 index 537285b3d..000000000 --- a/data/migrations/versions/3a3bb77e17d5_add_support_for_dex_login.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add support for Dex login - -Revision ID: 3a3bb77e17d5 -Revises: 9512773a4a2 -Create Date: 2015-09-04 15:57:38.007822 - -""" - -# revision identifiers, used by Alembic. -revision = '3a3bb77e17d5' -down_revision = '9512773a4a2' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.loginservice, [{'name': 'dex'}]) - - -def downgrade(tables): - op.execute( - tables.loginservice.delete() - .where(tables.loginservice.c.name == op.inline_literal('dex')) - ) - diff --git a/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py b/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py deleted file mode 100644 index 4ac52b3a1..000000000 --- a/data/migrations/versions/3b4d3a4461dc_add_support_for_squashed_images.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Add support for squashed images - -Revision ID: 3b4d3a4461dc -Revises: b1d41e2071b -Create Date: 2014-10-07 14:49:13.105746 - -""" - -# revision identifiers, used by Alembic. -revision = '3b4d3a4461dc' -down_revision = 'b1d41e2071b' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('imagestoragetransformation', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragetransformation')) - ) - op.create_index('imagestoragetransformation_name', 'imagestoragetransformation', ['name'], unique=True) - op.bulk_insert(tables.imagestoragetransformation, - [ - {'name':'squash'}, - ]) - op.create_table('derivedimagestorage', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('derivative_id', sa.Integer(), nullable=False), - sa.Column('transformation_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedimagestorage_derivative_id_imagestorage')), - sa.ForeignKeyConstraint(['source_id'], ['imagestorage.id'], name=op.f('fk_derivedimagestorage_source_id_imagestorage')), - sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_dis_transformation_id_ist')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedimagestorage')) - ) - op.create_index('derivedimagestorage_derivative_id', 'derivedimagestorage', ['derivative_id'], unique=False) - op.create_index('derivedimagestorage_source_id', 'derivedimagestorage', ['source_id'], unique=False) - op.create_index('derivedimagestorage_source_id_transformation_id', 'derivedimagestorage', ['source_id', 'transformation_id'], unique=True) - op.create_index('derivedimagestorage_transformation_id', 'derivedimagestorage', ['transformation_id'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('derivedimagestorage') - op.drop_table('imagestoragetransformation') - ### end Alembic commands ### diff --git a/data/migrations/versions/3e2d38b52a75_add_access_token_kinds_type.py b/data/migrations/versions/3e2d38b52a75_add_access_token_kinds_type.py deleted file mode 100644 index f74673f53..000000000 --- a/data/migrations/versions/3e2d38b52a75_add_access_token_kinds_type.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Add access token kinds type - -Revision ID: 3e2d38b52a75 -Revises: 1d2d86d09fcd -Create Date: 2015-02-17 12:03:26.422485 - -""" - -# revision identifiers, used by Alembic. -revision = '3e2d38b52a75' -down_revision = '1d2d86d09fcd' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('accesstokenkind', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstokenkind')) - ) - op.create_index('accesstokenkind_name', 'accesstokenkind', ['name'], unique=True) - op.add_column(u'accesstoken', sa.Column('kind_id', sa.Integer(), nullable=True)) - op.create_index('accesstoken_kind_id', 'accesstoken', ['kind_id'], unique=False) - op.create_foreign_key(op.f('fk_accesstoken_kind_id_accesstokenkind'), 'accesstoken', 'accesstokenkind', ['kind_id'], ['id']) - ### end Alembic commands ### - - op.bulk_insert(tables.accesstokenkind, - [ - {'name':'build-worker'}, - {'name':'pushpull-token'}, - ]) - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(op.f('fk_accesstoken_kind_id_accesstokenkind'), 'accesstoken', type_='foreignkey') - op.drop_index('accesstoken_kind_id', table_name='accesstoken') - op.drop_column(u'accesstoken', 'kind_id') - op.drop_index('accesstokenkind_name', table_name='accesstokenkind') - op.drop_table('accesstokenkind') - ### end Alembic commands ### diff --git a/data/migrations/versions/3f4fe1194671_backfill_the_namespace_user_fields.py b/data/migrations/versions/3f4fe1194671_backfill_the_namespace_user_fields.py deleted file mode 100644 index 4a1e2fe9d..000000000 --- a/data/migrations/versions/3f4fe1194671_backfill_the_namespace_user_fields.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Backfill the namespace_user fields. - -Revision ID: 3f4fe1194671 -Revises: 6f2ecf5afcf -Create Date: 2014-09-24 14:29:45.192179 - -""" - -# revision identifiers, used by Alembic. -revision = '3f4fe1194671' -down_revision = '6f2ecf5afcf' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - conn = op.get_bind() - user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user']) - conn.execute('update repository set namespace_user_id = (select id from {0} where {0}.username = repository.namespace) where namespace_user_id is NULL'.format(user_table_name_escaped)) - op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True) - - -def downgrade(tables): - op.drop_constraint('fk_repository_namespace_user_id_user', table_name='repository', type_='foreignkey') - op.drop_index('repository_namespace_user_id_name', table_name='repository') diff --git a/data/migrations/versions/3fee6f979c2a_make_auth_token_nullable.py b/data/migrations/versions/3fee6f979c2a_make_auth_token_nullable.py deleted file mode 100644 index 04379eb60..000000000 --- a/data/migrations/versions/3fee6f979c2a_make_auth_token_nullable.py +++ /dev/null @@ -1,29 +0,0 @@ -"""make auth_token nullable - -Revision ID: 3fee6f979c2a -Revises: 31288f79df53 -Create Date: 2015-03-27 11:11:24.046996 - -""" - -# revision identifiers, used by Alembic. -revision = '3fee6f979c2a' -down_revision = '31288f79df53' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.alter_column('repositorybuildtrigger', 'auth_token', - existing_type=sa.String(length=255), - nullable=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.alter_column('repositorybuildtrigger', 'auth_token', - existing_type=sa.String(length=255), - nullable=False) - ### end Alembic commands ### diff --git a/data/migrations/versions/3ff4fbc94644_migrate_github_triggers_to_use_deploy_.py b/data/migrations/versions/3ff4fbc94644_migrate_github_triggers_to_use_deploy_.py deleted file mode 100644 index 820b21548..000000000 --- a/data/migrations/versions/3ff4fbc94644_migrate_github_triggers_to_use_deploy_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Migrate GitHub triggers to use deploy keys - -Revision ID: 3ff4fbc94644 -Revises: 4d5f6716df0 -Create Date: 2015-09-16 17:50:22.034146 - -""" - -# revision identifiers, used by Alembic. -revision = '3ff4fbc94644' -down_revision = '4d5f6716df0' - -from alembic import op -import sqlalchemy as sa - -from util.migrate.migrategithubdeploykeys import backfill_github_deploykeys - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - backfill_github_deploykeys() - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - pass - ### end Alembic commands ### diff --git a/data/migrations/versions/403d02fea323_add_new_blobupload_columns.py b/data/migrations/versions/403d02fea323_add_new_blobupload_columns.py deleted file mode 100644 index a99afb017..000000000 --- a/data/migrations/versions/403d02fea323_add_new_blobupload_columns.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add new blobupload columns - -Revision ID: 403d02fea323 -Revises: 10b999e8db1f -Create Date: 2015-11-30 14:25:46.822730 - -""" - -# revision identifiers, used by Alembic. -revision = '403d02fea323' -down_revision = '10b999e8db1f' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('blobupload', sa.Column('chunk_count', sa.Integer(), server_default="0", nullable=False)) - op.add_column('blobupload', sa.Column('uncompressed_byte_count', sa.Integer(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('blobupload', 'uncompressed_byte_count') - op.drop_column('blobupload', 'chunk_count') - ### end Alembic commands ### diff --git a/data/migrations/versions/41f4587c84ae_add_jwt_authentication_login_service.py b/data/migrations/versions/41f4587c84ae_add_jwt_authentication_login_service.py deleted file mode 100644 index a0819fa58..000000000 --- a/data/migrations/versions/41f4587c84ae_add_jwt_authentication_login_service.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add JWT Authentication login service - -Revision ID: 41f4587c84ae -Revises: 1f116e06b68 -Create Date: 2015-06-02 16:13:02.636590 - -""" - -# revision identifiers, used by Alembic. -revision = '41f4587c84ae' -down_revision = '1f116e06b68' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.bulk_insert(tables.loginservice, - [ - {'name':'jwtauthn'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.loginservice.delete() - .where(tables.loginservice.c.name == op.inline_literal('jwtauthn'))) - ) diff --git a/data/migrations/versions/437ee6269a9d_migrate_bitbucket_services_to_webhooks.py b/data/migrations/versions/437ee6269a9d_migrate_bitbucket_services_to_webhooks.py deleted file mode 100644 index 9ebf6bcd2..000000000 --- a/data/migrations/versions/437ee6269a9d_migrate_bitbucket_services_to_webhooks.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Migrate BitBucket services to webhooks - -Revision ID: 437ee6269a9d -Revises: 154f2befdfbe -Create Date: 2015-07-21 14:03:44.964200 - -""" - -from util.migrate.migratebitbucketservices import run_bitbucket_migration - -# revision identifiers, used by Alembic. -revision = '437ee6269a9d' -down_revision = '2e09ad97b06c' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - run_bitbucket_migration() - - -def downgrade(tables): - pass diff --git a/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py b/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py deleted file mode 100644 index d7ea6cb20..000000000 --- a/data/migrations/versions/43e943c0639f_add_log_kind_for_regenerating_robot_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""add log kind for regenerating robot tokens - -Revision ID: 43e943c0639f -Revises: 82297d834ad -Create Date: 2014-08-25 17:14:42.784518 - -""" - -# revision identifiers, used by Alembic. -revision = '43e943c0639f' -down_revision = '82297d834ad' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.bulk_insert(tables.logentrykind, - [ - {'name':'regenerate_robot_token'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('regenerate_robot_token'))) - - ) diff --git a/data/migrations/versions/471caec2cb66_add_invoice_email_address_to_user.py b/data/migrations/versions/471caec2cb66_add_invoice_email_address_to_user.py deleted file mode 100644 index 173f23624..000000000 --- a/data/migrations/versions/471caec2cb66_add_invoice_email_address_to_user.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Add invoice email address to user - -Revision ID: 471caec2cb66 -Revises: 88e0f440a2f -Create Date: 2015-12-28 13:57:17.761334 - -""" - -# revision identifiers, used by Alembic. -revision = '471caec2cb66' -down_revision = '88e0f440a2f' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('user', sa.Column('invoice_email_address', sa.String(length=255), nullable=True)) - op.create_index('user_invoice_email_address', 'user', ['invoice_email_address'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('user', 'invoice_email_address') - ### end Alembic commands ### diff --git a/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py b/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py deleted file mode 100644 index eaa687c73..000000000 --- a/data/migrations/versions/47670cbeced_migrate_existing_webhooks_to_.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Migrate existing webhooks to notifications. - -Revision ID: 47670cbeced -Revises: 325a4d7c79d9 -Create Date: 2014-07-31 13:49:38.332807 -Hand Edited By Joseph Schorr - -""" - -# revision identifiers, used by Alembic. -revision = '47670cbeced' -down_revision = '325a4d7c79d9' - -from alembic import op, context -import sqlalchemy as sa - -def get_id(query): - conn = op.get_bind() - return list(conn.execute(query, ()).fetchall())[0][0] - -def upgrade(tables): - conn = op.get_bind() - event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1') - method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1') - conn.execute('Insert Into repositorynotification (uuid, repository_id, event_id, method_id, config_json) Select public_id, repository_id, %s, %s, parameters FROM webhook' % (event_id, method_id)) - -def downgrade(tables): - conn = op.get_bind() - event_id = get_id('Select id From externalnotificationevent Where name=\'repo_push\' Limit 1') - method_id = get_id('Select id From externalnotificationmethod Where name=\'webhook\' Limit 1') - conn.execute('Insert Into webhook (public_id, repository_id, parameters) Select uuid, repository_id, config_json FROM repositorynotification Where event_id=%s And method_id=%s' % (event_id, method_id)) diff --git a/data/migrations/versions/499f6f08de3_add_title_field_to_notification.py b/data/migrations/versions/499f6f08de3_add_title_field_to_notification.py deleted file mode 100644 index 7d203b176..000000000 --- a/data/migrations/versions/499f6f08de3_add_title_field_to_notification.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add title field to notification - -Revision ID: 499f6f08de3 -Revises: 246df01a6d51 -Create Date: 2015-08-21 14:18:07.287743 - -""" - -# revision identifiers, used by Alembic. -revision = '499f6f08de3' -down_revision = '246df01a6d51' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorynotification', sa.Column('title', sa.String(length=255), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorynotification', 'title') - ### end Alembic commands ### diff --git a/data/migrations/versions/4a0c94399f38_add_new_notification_kinds.py b/data/migrations/versions/4a0c94399f38_add_new_notification_kinds.py deleted file mode 100644 index 833a4fe90..000000000 --- a/data/migrations/versions/4a0c94399f38_add_new_notification_kinds.py +++ /dev/null @@ -1,38 +0,0 @@ -"""add new notification kinds - -Revision ID: 4a0c94399f38 -Revises: 1594a74a74ca -Create Date: 2014-08-28 16:17:01.898269 - -""" - -# revision identifiers, used by Alembic. -revision = '4a0c94399f38' -down_revision = '1594a74a74ca' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.bulk_insert(tables.externalnotificationmethod, - [ - {'name':'flowdock'}, - {'name':'hipchat'}, - {'name':'slack'}, - ]) - -def downgrade(tables): - op.execute( - (tables.externalnotificationmethod.delete() - .where(tables.externalnotificationmethod.c.name == op.inline_literal('flowdock'))) - ) - - op.execute( - (tables.externalnotificationmethod.delete() - .where(tables.externalnotificationmethod.c.name == op.inline_literal('hipchat'))) - ) - - op.execute( - (tables.externalnotificationmethod.delete() - .where(tables.externalnotificationmethod.c.name == op.inline_literal('slack'))) - ) diff --git a/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py b/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py deleted file mode 100644 index 9e8e854ff..000000000 --- a/data/migrations/versions/4b7ef0c7bdb2_add_the_maintenance_notification_type.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Add the maintenance notification type. - -Revision ID: 4b7ef0c7bdb2 -Revises: bcdde200a1b -Create Date: 2014-06-27 19:09:56.387534 - -""" - -# revision identifiers, used by Alembic. -revision = '4b7ef0c7bdb2' -down_revision = 'bcdde200a1b' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.bulk_insert(tables.notificationkind, - [ - {'name':'maintenance'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('maintenance'))) - - ) diff --git a/data/migrations/versions/4ce2169efd3b_add_reversion_column_to_the_tags_table.py b/data/migrations/versions/4ce2169efd3b_add_reversion_column_to_the_tags_table.py deleted file mode 100644 index 19a1d6ba6..000000000 --- a/data/migrations/versions/4ce2169efd3b_add_reversion_column_to_the_tags_table.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add reversion column to the tags table - -Revision ID: 4ce2169efd3b -Revises: 30c044b75632 -Create Date: 2015-04-16 17:10:16.039835 - -""" - -# revision identifiers, used by Alembic. -revision = '4ce2169efd3b' -down_revision = '30c044b75632' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorytag', sa.Column('reversion', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false())) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorytag', 'reversion') - ### end Alembic commands ### diff --git a/data/migrations/versions/4d5f6716df0_add_legacy_column_for_github_backfill_.py b/data/migrations/versions/4d5f6716df0_add_legacy_column_for_github_backfill_.py deleted file mode 100644 index 17f4360b5..000000000 --- a/data/migrations/versions/4d5f6716df0_add_legacy_column_for_github_backfill_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add legacy column for GitHub backfill tracking - -Revision ID: 4d5f6716df0 -Revises: 1c0f6ede8992 -Create Date: 2015-09-16 17:49:40.334540 - -""" - -# revision identifiers, used by Alembic. -revision = '4d5f6716df0' -down_revision = '1c0f6ede8992' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorybuildtrigger', 'used_legacy_github') - ### end Alembic commands ### diff --git a/data/migrations/versions/4ef04c61fcf9_allow_tags_to_be_marked_as_hidden.py b/data/migrations/versions/4ef04c61fcf9_allow_tags_to_be_marked_as_hidden.py deleted file mode 100644 index e4fc1ea5e..000000000 --- a/data/migrations/versions/4ef04c61fcf9_allow_tags_to_be_marked_as_hidden.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Allow tags to be marked as hidden. - -Revision ID: 4ef04c61fcf9 -Revises: 509d2857566f -Create Date: 2015-02-18 16:34:16.586129 - -""" - -# revision identifiers, used by Alembic. -revision = '4ef04c61fcf9' -down_revision = '509d2857566f' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorytag', sa.Column('hidden', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false())) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorytag', 'hidden') - ### end Alembic commands ### diff --git a/data/migrations/versions/4fdb65816b8d_add_brute_force_prevention_metadata_to_.py b/data/migrations/versions/4fdb65816b8d_add_brute_force_prevention_metadata_to_.py deleted file mode 100644 index 42afef28f..000000000 --- a/data/migrations/versions/4fdb65816b8d_add_brute_force_prevention_metadata_to_.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Add brute force prevention metadata to the user table. - -Revision ID: 4fdb65816b8d -Revises: 43e943c0639f -Create Date: 2014-09-03 12:35:33.722435 - -""" - -# revision identifiers, used by Alembic. -revision = '4fdb65816b8d' -down_revision = '43e943c0639f' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0")) - op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('user', 'last_invalid_login') - op.drop_column('user', 'invalid_login_attempts') - ### end Alembic commands ### diff --git a/data/migrations/versions/50925110da8c_add_event_specific_config.py b/data/migrations/versions/50925110da8c_add_event_specific_config.py deleted file mode 100644 index 8b67fe51a..000000000 --- a/data/migrations/versions/50925110da8c_add_event_specific_config.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add event-specific config - -Revision ID: 50925110da8c -Revises: 2fb9492c20cc -Create Date: 2015-10-13 18:03:14.859839 - -""" - -# revision identifiers, used by Alembic. -revision = '50925110da8c' -down_revision = '57dad559ff2d' - -from alembic import op -import sqlalchemy as sa -from util.migrate import UTF8LongText - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorynotification', sa.Column('event_config_json', UTF8LongText, nullable=False)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorynotification', 'event_config_json') - ### end Alembic commands ### diff --git a/data/migrations/versions/509d2857566f_track_the_lifetime_start_and_end_for_.py b/data/migrations/versions/509d2857566f_track_the_lifetime_start_and_end_for_.py deleted file mode 100644 index a13ec00d1..000000000 --- a/data/migrations/versions/509d2857566f_track_the_lifetime_start_and_end_for_.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Track the lifetime start and end for tags to allow the state of a repository to be rewound. - -Revision ID: 509d2857566f -Revises: 3e2d38b52a75 -Create Date: 2015-02-13 14:35:38.939049 - -""" - -# revision identifiers, used by Alembic. -revision = '509d2857566f' -down_revision = '3e2d38b52a75' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorytag', sa.Column('lifetime_end_ts', sa.Integer(), nullable=True)) - op.add_column('repositorytag', sa.Column('lifetime_start_ts', sa.Integer(), nullable=False, server_default="0")) - op.create_index('repositorytag_lifetime_end_ts', 'repositorytag', ['lifetime_end_ts'], unique=False) - op.drop_index('repositorytag_repository_id_name', table_name='repositorytag') - op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=False) - op.add_column('user', sa.Column('removed_tag_expiration_s', sa.Integer(), nullable=False, server_default="1209600")) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('user', 'removed_tag_expiration_s') - op.drop_index('repositorytag_repository_id_name', table_name='repositorytag') - op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True) - op.drop_index('repositorytag_lifetime_end_ts', table_name='repositorytag') - op.drop_column('repositorytag', 'lifetime_start_ts') - op.drop_column('repositorytag', 'lifetime_end_ts') - ### end Alembic commands ### diff --git a/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py b/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py deleted file mode 100644 index d3062793b..000000000 --- a/data/migrations/versions/51d04d0e7e6f_email_invites_for_joining_a_team.py +++ /dev/null @@ -1,78 +0,0 @@ -"""Email invites for joining a team. - -Revision ID: 51d04d0e7e6f -Revises: 34fd69f63809 -Create Date: 2014-09-15 23:51:35.478232 - -""" - -# revision identifiers, used by Alembic. -revision = '51d04d0e7e6f' -down_revision = '34fd69f63809' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('teammemberinvite', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('email', sa.String(length=255), nullable=True), - sa.Column('team_id', sa.Integer(), nullable=False), - sa.Column('inviter_id', sa.Integer(), nullable=False), - sa.Column('invite_token', sa.String(length=255), nullable=False), - sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['team_id'], ['team.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False) - op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False) - op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False) - ### end Alembic commands ### - - # Manually add the new logentrykind types - op.bulk_insert(tables.logentrykind, - [ - {'name':'org_invite_team_member'}, - {'name':'org_team_member_invite_accepted'}, - {'name':'org_team_member_invite_declined'}, - {'name':'org_delete_team_member_invite'}, - ]) - - op.bulk_insert(tables.notificationkind, - [ - {'name':'org_team_invite'}, - ]) - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('org_invite_team_member'))) - ) - - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_accepted'))) - ) - - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('org_team_member_invite_declined'))) - ) - - op.execute( - (tables.logentrykind.delete() - .where(tables.logentrykind.c.name == op.inline_literal('org_delete_team_member_invite'))) - ) - - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('org_team_invite'))) - ) - - op.drop_table('teammemberinvite') - ### end Alembic commands ### diff --git a/data/migrations/versions/5232a5610a0a_add_logentry_repo_datetime_kind_index.py b/data/migrations/versions/5232a5610a0a_add_logentry_repo_datetime_kind_index.py deleted file mode 100644 index c8bff8bbd..000000000 --- a/data/migrations/versions/5232a5610a0a_add_logentry_repo_datetime_kind_index.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add LogEntry repo-datetime-kind index - -Revision ID: 5232a5610a0a -Revises: 437ee6269a9d -Create Date: 2015-07-31 13:25:41.877733 - -""" - -# revision identifiers, used by Alembic. -revision = '5232a5610a0a' -down_revision = '437ee6269a9d' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('logentry_repository_id_datetime_kind_id', 'logentry', ['repository_id', 'datetime', 'kind_id'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('logentry_repository_id_datetime_kind_id', table_name='logentry') - ### end Alembic commands ### diff --git a/data/migrations/versions/545794454f49_migrate_image_data_back_to_image_table.py b/data/migrations/versions/545794454f49_migrate_image_data_back_to_image_table.py deleted file mode 100644 index 270968aae..000000000 --- a/data/migrations/versions/545794454f49_migrate_image_data_back_to_image_table.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Migrate image data back to image table - -Revision ID: 545794454f49 -Revises: 3a3bb77e17d5 -Create Date: 2015-09-15 11:48:47.554255 - -""" - -# revision identifiers, used by Alembic. -revision = '545794454f49' -down_revision = '3a3bb77e17d5' - -from alembic import op -import sqlalchemy as sa - -from util.migrate import UTF8LongText - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('image', sa.Column('aggregate_size', sa.BigInteger(), nullable=True)) - op.add_column('image', sa.Column('command', sa.Text(), nullable=True)) - op.add_column('image', sa.Column('comment', UTF8LongText(), nullable=True)) - op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True)) - op.add_column('image', sa.Column('v1_json_metadata', UTF8LongText(), nullable=True)) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('image', 'v1_json_metadata') - op.drop_column('image', 'created') - op.drop_column('image', 'comment') - op.drop_column('image', 'command') - op.drop_column('image', 'aggregate_size') - ### end Alembic commands ### diff --git a/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py b/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py deleted file mode 100644 index 078137c61..000000000 --- a/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py +++ /dev/null @@ -1,33 +0,0 @@ -"""add support for quay's security indexer - -Revision ID: 57dad559ff2d -Revises: 154f2befdfbe -Create Date: 2015-07-13 16:51:41.669249 - -""" - -# revision identifiers, used by Alembic. -revision = '57dad559ff2d' -down_revision = '73669db7e12' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('image', sa.Column('parent_id', sa.Integer(), nullable=True)) - op.add_column('image', sa.Column('security_indexed', sa.Boolean(), nullable=False, default=False, server_default=sa.sql.expression.false())) - op.add_column('image', sa.Column('security_indexed_engine', sa.Integer(), nullable=False, default=-1, server_default="-1")) - op.create_index('image_parent_id', 'image', ['parent_id'], unique=False) - ### end Alembic commands ### - - op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed']) - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('image_security_indexed_engine_security_indexed', 'image') - op.drop_index('image_parent_id', table_name='image') - op.drop_column('image', 'security_indexed') - op.drop_column('image', 'security_indexed_engine') - op.drop_column('image', 'parent_id') - ### end Alembic commands ### diff --git a/data/migrations/versions/5a07499ce53f_set_up_initial_database.py b/data/migrations/versions/5a07499ce53f_set_up_initial_database.py deleted file mode 100644 index db9bb69e9..000000000 --- a/data/migrations/versions/5a07499ce53f_set_up_initial_database.py +++ /dev/null @@ -1,518 +0,0 @@ -"""Set up initial database - -Revision ID: 5a07499ce53f -Revises: None -Create Date: 2014-05-13 11:26:51.808426 - -""" - -# revision identifiers, used by Alembic. -revision = '5a07499ce53f' -down_revision = None - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('loginservice', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('loginservice_name', 'loginservice', ['name'], unique=True) - - op.bulk_insert(tables.loginservice, - [ - {'name':'github'}, - {'name':'quayrobot'}, - {'name':'ldap'}, - ]) - - op.create_table('imagestorage', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('checksum', sa.String(length=255), nullable=True), - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('comment', sa.Text(), nullable=True), - sa.Column('command', sa.Text(), nullable=True), - sa.Column('image_size', sa.BigInteger(), nullable=True), - sa.Column('uploading', sa.Boolean(), nullable=True), - sa.PrimaryKeyConstraint('id') - ) - op.create_table('queueitem', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('queue_name', sa.String(length=1024), nullable=False), - sa.Column('body', sa.Text(), nullable=False), - sa.Column('available_after', sa.DateTime(), nullable=False), - sa.Column('available', sa.Boolean(), nullable=False), - sa.Column('processing_expires', sa.DateTime(), nullable=True), - sa.Column('retries_remaining', sa.Integer(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('queueitem_available', 'queueitem', ['available'], unique=False) - op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False) - op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False) - op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767) - op.create_table('role', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('role_name', 'role', ['name'], unique=False) - - op.bulk_insert(tables.role, - [ - {'name':'admin'}, - {'name':'write'}, - {'name':'read'}, - ]) - - op.create_table('logentrykind', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False) - - op.bulk_insert(tables.logentrykind, - [ - {'name':'account_change_plan'}, - {'name':'account_change_cc'}, - {'name':'account_change_password'}, - {'name':'account_convert'}, - - {'name':'create_robot'}, - {'name':'delete_robot'}, - - {'name':'create_repo'}, - {'name':'push_repo'}, - {'name':'pull_repo'}, - {'name':'delete_repo'}, - {'name':'create_tag'}, - {'name':'move_tag'}, - {'name':'delete_tag'}, - {'name':'add_repo_permission'}, - {'name':'change_repo_permission'}, - {'name':'delete_repo_permission'}, - {'name':'change_repo_visibility'}, - {'name':'add_repo_accesstoken'}, - {'name':'delete_repo_accesstoken'}, - {'name':'add_repo_webhook'}, - {'name':'delete_repo_webhook'}, - {'name':'set_repo_description'}, - - {'name':'build_dockerfile'}, - - {'name':'org_create_team'}, - {'name':'org_delete_team'}, - {'name':'org_add_team_member'}, - {'name':'org_remove_team_member'}, - {'name':'org_set_team_description'}, - {'name':'org_set_team_role'}, - - {'name':'create_prototype_permission'}, - {'name':'modify_prototype_permission'}, - {'name':'delete_prototype_permission'}, - - {'name':'setup_repo_trigger'}, - {'name':'delete_repo_trigger'}, - - {'name':'create_application'}, - {'name':'update_application'}, - {'name':'delete_application'}, - {'name':'reset_application_client_secret'}, - ]) - - op.create_table('notificationkind', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False) - - op.bulk_insert(tables.notificationkind, - [ - {'name':'password_required'}, - {'name':'over_private_usage'}, - {'name':'expiring_license'}, - ]) - - op.create_table('teamrole', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('teamrole_name', 'teamrole', ['name'], unique=False) - - op.bulk_insert(tables.teamrole, - [ - {'name':'admin'}, - {'name':'creator'}, - {'name':'member'}, - ]) - - op.create_table('visibility', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('visibility_name', 'visibility', ['name'], unique=False) - - op.bulk_insert(tables.visibility, - [ - {'name':'public'}, - {'name':'private'}, - ]) - - op.create_table('user', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('username', sa.String(length=255), nullable=False), - sa.Column('password_hash', sa.String(length=255), nullable=True), - sa.Column('email', sa.String(length=255), nullable=False), - sa.Column('verified', sa.Boolean(), nullable=False), - sa.Column('stripe_id', sa.String(length=255), nullable=True), - sa.Column('organization', sa.Boolean(), nullable=False), - sa.Column('robot', sa.Boolean(), nullable=False), - sa.Column('invoice_email', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('user_email', 'user', ['email'], unique=True) - op.create_index('user_organization', 'user', ['organization'], unique=False) - op.create_index('user_robot', 'user', ['robot'], unique=False) - op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False) - op.create_index('user_username', 'user', ['username'], unique=True) - op.create_table('buildtriggerservice', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False) - - op.bulk_insert(tables.buildtriggerservice, - [ - {'name':'github'}, - ]) - - op.create_table('federatedlogin', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('service_id', sa.Integer(), nullable=False), - sa.Column('service_ident', sa.String(length=255), nullable=False), - sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False) - op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True) - op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True) - op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False) - op.create_table('oauthapplication', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('client_id', sa.String(length=255), nullable=False), - sa.Column('client_secret', sa.String(length=255), nullable=False), - sa.Column('redirect_uri', sa.String(length=255), nullable=False), - sa.Column('application_uri', sa.String(length=255), nullable=False), - sa.Column('organization_id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=False), - sa.Column('gravatar_email', sa.String(length=255), nullable=True), - sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False) - op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False) - op.create_table('notification', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('kind_id', sa.Integer(), nullable=False), - sa.Column('target_id', sa.Integer(), nullable=False), - sa.Column('metadata_json', sa.Text(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], ), - sa.ForeignKeyConstraint(['target_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('notification_created', 'notification', ['created'], unique=False) - op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False) - op.create_index('notification_target_id', 'notification', ['target_id'], unique=False) - op.create_index('notification_uuid', 'notification', ['uuid'], unique=False) - op.create_table('emailconfirmation', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('code', sa.String(length=255), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('pw_reset', sa.Boolean(), nullable=False), - sa.Column('new_email', sa.String(length=255), nullable=True), - sa.Column('email_confirm', sa.Boolean(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True) - op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False) - op.create_table('team', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('organization_id', sa.Integer(), nullable=False), - sa.Column('role_id', sa.Integer(), nullable=False), - sa.Column('description', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('team_name', 'team', ['name'], unique=False) - op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True) - op.create_index('team_organization_id', 'team', ['organization_id'], unique=False) - op.create_index('team_role_id', 'team', ['role_id'], unique=False) - op.create_table('repository', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('namespace', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('visibility_id', sa.Integer(), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('badge_token', sa.String(length=255), nullable=False), - sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True) - op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False) - op.create_table('accesstoken', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('friendly_name', sa.String(length=255), nullable=True), - sa.Column('code', sa.String(length=255), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('created', sa.DateTime(), nullable=False), - sa.Column('role_id', sa.Integer(), nullable=False), - sa.Column('temporary', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True) - op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False) - op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False) - op.create_table('repositorypermission', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('team_id', sa.Integer(), nullable=True), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('role_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), - sa.ForeignKeyConstraint(['team_id'], ['team.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False) - op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False) - op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False) - op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True) - op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False) - op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True) - op.create_table('oauthaccesstoken', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('application_id', sa.Integer(), nullable=False), - sa.Column('authorized_user_id', sa.Integer(), nullable=False), - sa.Column('scope', sa.String(length=255), nullable=False), - sa.Column('access_token', sa.String(length=255), nullable=False), - sa.Column('token_type', sa.String(length=255), nullable=False), - sa.Column('expires_at', sa.DateTime(), nullable=False), - sa.Column('refresh_token', sa.String(length=255), nullable=True), - sa.Column('data', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ), - sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False) - op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False) - op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False) - op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False) - op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False) - op.create_table('teammember', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('team_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['team_id'], ['team.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False) - op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False) - op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True) - op.create_table('webhook', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', sa.String(length=255), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('parameters', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('webhook_public_id', 'webhook', ['public_id'], unique=True) - op.create_index('webhook_repository_id', 'webhook', ['repository_id'], unique=False) - op.create_table('oauthauthorizationcode', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('application_id', sa.Integer(), nullable=False), - sa.Column('code', sa.String(length=255), nullable=False), - sa.Column('scope', sa.String(length=255), nullable=False), - sa.Column('data', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False) - op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False) - op.create_table('image', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('docker_image_id', sa.String(length=255), nullable=False), - sa.Column('checksum', sa.String(length=255), nullable=True), - sa.Column('created', sa.DateTime(), nullable=True), - sa.Column('comment', sa.Text(), nullable=True), - sa.Column('command', sa.Text(), nullable=True), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('image_size', sa.BigInteger(), nullable=True), - sa.Column('ancestors', sa.String(length=60535), nullable=True), - sa.Column('storage_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767) - op.create_index('image_repository_id', 'image', ['repository_id'], unique=False) - op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=True) - op.create_index('image_storage_id', 'image', ['storage_id'], unique=False) - op.create_table('permissionprototype', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('org_id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('activating_user_id', sa.Integer(), nullable=True), - sa.Column('delegate_user_id', sa.Integer(), nullable=True), - sa.Column('delegate_team_id', sa.Integer(), nullable=True), - sa.Column('role_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], ), - sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['org_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['role.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False) - op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False) - op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False) - op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False) - op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False) - op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False) - op.create_table('repositorytag', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('image_id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['image_id'], ['image.id'], ), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False) - op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False) - op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True) - op.create_table('logentry', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('kind_id', sa.Integer(), nullable=False), - sa.Column('account_id', sa.Integer(), nullable=False), - sa.Column('performer_id', sa.Integer(), nullable=True), - sa.Column('repository_id', sa.Integer(), nullable=True), - sa.Column('access_token_id', sa.Integer(), nullable=True), - sa.Column('datetime', sa.DateTime(), nullable=False), - sa.Column('ip', sa.String(length=255), nullable=True), - sa.Column('metadata_json', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ), - sa.ForeignKeyConstraint(['account_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], ), - sa.ForeignKeyConstraint(['performer_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False) - op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False) - op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False) - op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False) - op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False) - op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False) - op.create_table('repositorybuildtrigger', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('service_id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('connected_user_id', sa.Integer(), nullable=False), - sa.Column('auth_token', sa.String(length=255), nullable=False), - sa.Column('config', sa.Text(), nullable=False), - sa.Column('write_token_id', sa.Integer(), nullable=True), - sa.Column('pull_robot_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], ), - sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False) - op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False) - op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False) - op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False) - op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False) - op.create_table('repositorybuild', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('access_token_id', sa.Integer(), nullable=False), - sa.Column('resource_key', sa.String(length=255), nullable=False), - sa.Column('job_config', sa.Text(), nullable=False), - sa.Column('phase', sa.String(length=255), nullable=False), - sa.Column('started', sa.DateTime(), nullable=False), - sa.Column('display_name', sa.String(length=255), nullable=False), - sa.Column('trigger_id', sa.Integer(), nullable=True), - sa.Column('pull_robot_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ), - sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False) - op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False) - op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False) - op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False) - op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False) - op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('repositorybuild') - op.drop_table('repositorybuildtrigger') - op.drop_table('logentry') - op.drop_table('repositorytag') - op.drop_table('permissionprototype') - op.drop_table('image') - op.drop_table('oauthauthorizationcode') - op.drop_table('webhook') - op.drop_table('teammember') - op.drop_table('oauthaccesstoken') - op.drop_table('repositorypermission') - op.drop_table('accesstoken') - op.drop_table('repository') - op.drop_table('team') - op.drop_table('emailconfirmation') - op.drop_table('notification') - op.drop_table('oauthapplication') - op.drop_table('federatedlogin') - op.drop_table('buildtriggerservice') - op.drop_table('user') - op.drop_table('visibility') - op.drop_table('teamrole') - op.drop_table('notificationkind') - op.drop_table('logentrykind') - op.drop_table('role') - op.drop_table('queueitem') - op.drop_table('imagestorage') - op.drop_table('loginservice') - ### end Alembic commands ### diff --git a/data/migrations/versions/5a2445ffe21b_add_new_derivedstorageforimage_table.py b/data/migrations/versions/5a2445ffe21b_add_new_derivedstorageforimage_table.py deleted file mode 100644 index b998c2c2c..000000000 --- a/data/migrations/versions/5a2445ffe21b_add_new_derivedstorageforimage_table.py +++ /dev/null @@ -1,39 +0,0 @@ -"""Add new DerivedStorageForImage table - -Revision ID: 5a2445ffe21b -Revises: 1b2bb93ceb82 -Create Date: 2015-11-24 11:58:02.956687 - -""" - -# revision identifiers, used by Alembic. -revision = '5a2445ffe21b' -down_revision = '1b2bb93ceb82' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('derivedstorageforimage', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('source_image_id', sa.Integer(), nullable=False), - sa.Column('derivative_id', sa.Integer(), nullable=False), - sa.Column('transformation_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedstorageforimage_derivative_id_imagestorage')), - sa.ForeignKeyConstraint(['source_image_id'], ['image.id'], name=op.f('fk_derivedstorageforimage_source_image_id_image')), - sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_derivedstorageforimage_transformation_constraint')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedstorageforimage')) - ) - op.create_index('derivedstorageforimage_derivative_id', 'derivedstorageforimage', ['derivative_id'], unique=False) - op.create_index('derivedstorageforimage_source_image_id', 'derivedstorageforimage', ['source_image_id'], unique=False) - op.create_index('derivedstorageforimage_source_image_id_transformation_id', 'derivedstorageforimage', ['source_image_id', 'transformation_id'], unique=True) - op.create_index('derivedstorageforimage_transformation_id', 'derivedstorageforimage', ['transformation_id'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('derivedstorageforimage') - ### end Alembic commands ### diff --git a/data/migrations/versions/5ad999136045_add_signature_storage.py b/data/migrations/versions/5ad999136045_add_signature_storage.py deleted file mode 100644 index 5369b3ec5..000000000 --- a/data/migrations/versions/5ad999136045_add_signature_storage.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Add signature storage - -Revision ID: 5ad999136045 -Revises: 228d1af6af1c -Create Date: 2015-02-05 15:01:54.989573 - -""" - -# revision identifiers, used by Alembic. -revision = '5ad999136045' -down_revision = '228d1af6af1c' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('imagestoragesignaturekind', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignaturekind')) - ) - op.create_index('imagestoragesignaturekind_name', 'imagestoragesignaturekind', ['name'], unique=True) - op.create_table('imagestoragesignature', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('storage_id', sa.Integer(), nullable=False), - sa.Column('kind_id', sa.Integer(), nullable=False), - sa.Column('signature', sa.Text(), nullable=True), - sa.Column('uploading', sa.Boolean(), nullable=True), - sa.ForeignKeyConstraint(['kind_id'], ['imagestoragesignaturekind.id'], name=op.f('fk_imagestoragesignature_kind_id_imagestoragesignaturekind')), - sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestoragesignature_storage_id_imagestorage')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignature')) - ) - op.create_index('imagestoragesignature_kind_id', 'imagestoragesignature', ['kind_id'], unique=False) - op.create_index('imagestoragesignature_kind_id_storage_id', 'imagestoragesignature', ['kind_id', 'storage_id'], unique=True) - op.create_index('imagestoragesignature_storage_id', 'imagestoragesignature', ['storage_id'], unique=False) - ### end Alembic commands ### - - op.bulk_insert(tables.imagestoragetransformation, - [ - {'name':'aci'}, - ]) - - op.bulk_insert(tables.imagestoragesignaturekind, - [ - {'name':'gpg2'}, - ]) - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('imagestoragesignature') - op.drop_table('imagestoragesignaturekind') - ### end Alembic commands ### diff --git a/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py b/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py deleted file mode 100644 index 52f431b74..000000000 --- a/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Convert slack webhook data - -Revision ID: 5b84373e5db -Revises: 1c5b738283a5 -Create Date: 2014-12-16 12:02:55.167744 - -""" - -# revision identifiers, used by Alembic. -revision = '5b84373e5db' -down_revision = '1c5b738283a5' - -from alembic import op -import sqlalchemy as sa - -from util.migrate.migrateslackwebhook import run_slackwebhook_migration - - -def upgrade(tables): - run_slackwebhook_migration() - - -def downgrade(tables): - pass diff --git a/data/migrations/versions/5cdc2d819c5_add_vulnerability_found_event.py b/data/migrations/versions/5cdc2d819c5_add_vulnerability_found_event.py deleted file mode 100644 index f9440acfb..000000000 --- a/data/migrations/versions/5cdc2d819c5_add_vulnerability_found_event.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Add vulnerability_found event - -Revision ID: 5cdc2d819c5 -Revises: 50925110da8c -Create Date: 2015-10-13 18:05:32.157858 - -""" - -# revision identifiers, used by Alembic. -revision = '5cdc2d819c5' -down_revision = '50925110da8c' - -from alembic import op -import sqlalchemy as sa - - - -def upgrade(tables): - op.bulk_insert(tables.externalnotificationevent, - [ - {'name':'vulnerability_found'}, - ]) - - op.bulk_insert(tables.notificationkind, - [ - {'name':'vulnerability_found'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.externalnotificationevent.delete() - .where(tables.externalnotificationevent.c.name == op.inline_literal('vulnerability_found'))) - - ) - - op.execute( - (tables.notificationkind.delete() - .where(tables.notificationkind.c.name == op.inline_literal('vulnerability_found'))) - - ) diff --git a/data/migrations/versions/67eb43c778b_add_index_for_repository_datetime_to_.py b/data/migrations/versions/67eb43c778b_add_index_for_repository_datetime_to_.py deleted file mode 100644 index 00ff374e4..000000000 --- a/data/migrations/versions/67eb43c778b_add_index_for_repository_datetime_to_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""add index for repository+datetime to logentry - -Revision ID: 67eb43c778b -Revises: 1c3decf6b9c4 -Create Date: 2015-04-19 16:00:39.126289 - -""" - -# revision identifiers, used by Alembic. -revision = '67eb43c778b' -down_revision = '1c3decf6b9c4' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('logentry_repository_id_datetime', 'logentry', ['repository_id', 'datetime'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('logentry_repository_id_datetime', table_name='logentry') - ### end Alembic commands ### diff --git a/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py b/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py deleted file mode 100644 index 0022ae128..000000000 --- a/data/migrations/versions/6f2ecf5afcf_add_the_uncompressed_size_to_image_.py +++ /dev/null @@ -1,25 +0,0 @@ -"""add the uncompressed size to image storage - -Revision ID: 6f2ecf5afcf -Revises: 13da56878560 -Create Date: 2014-09-22 14:39:13.470566 - -""" - -# revision identifiers, used by Alembic. -revision = '6f2ecf5afcf' -down_revision = '13da56878560' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('imagestorage', sa.Column('uncompressed_size', sa.BigInteger(), nullable=True)) - ### end Alembic commands ### - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('imagestorage', 'uncompressed_size') - ### end Alembic commands ### diff --git a/data/migrations/versions/707d5191eda_change_build_queue_reference_from_.py b/data/migrations/versions/707d5191eda_change_build_queue_reference_from_.py deleted file mode 100644 index dc8f88087..000000000 --- a/data/migrations/versions/707d5191eda_change_build_queue_reference_from_.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Change build queue reference from foreign key to an id. - -Revision ID: 707d5191eda -Revises: 4ef04c61fcf9 -Create Date: 2015-02-23 12:36:33.814528 - -""" - -# revision identifiers, used by Alembic. -revision = '707d5191eda' -down_revision = '4ef04c61fcf9' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuild', sa.Column('queue_id', sa.String(length=255), nullable=True)) - op.create_index('repositorybuild_queue_id', 'repositorybuild', ['queue_id'], unique=False) - op.drop_constraint(u'fk_repositorybuild_queue_item_id_queueitem', 'repositorybuild', type_='foreignkey') - op.drop_index('repositorybuild_queue_item_id', table_name='repositorybuild') - op.drop_column('repositorybuild', 'queue_item_id') - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuild', sa.Column('queue_item_id', sa.Integer(), autoincrement=False, nullable=True)) - op.create_foreign_key(u'fk_repositorybuild_queue_item_id_queueitem', 'repositorybuild', 'queueitem', ['queue_item_id'], ['id']) - op.create_index('repositorybuild_queue_item_id', 'repositorybuild', ['queue_item_id'], unique=False) - op.drop_index('repositorybuild_queue_id', table_name='repositorybuild') - op.drop_column('repositorybuild', 'queue_id') - ### end Alembic commands ### diff --git a/data/migrations/versions/73669db7e12_remove_legacy_github_column.py b/data/migrations/versions/73669db7e12_remove_legacy_github_column.py deleted file mode 100644 index 38698c5eb..000000000 --- a/data/migrations/versions/73669db7e12_remove_legacy_github_column.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Remove legacy github column - -Revision ID: 73669db7e12 -Revises: 35f538da62 -Create Date: 2015-11-04 16:18:18.107314 - -""" - -# revision identifiers, used by Alembic. -revision = '73669db7e12' -down_revision = '35f538da62' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_column('repositorybuildtrigger', 'used_legacy_github') - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True)) - ### end Alembic commands ### diff --git a/data/migrations/versions/790d91952fa8_add_missing_indexes.py b/data/migrations/versions/790d91952fa8_add_missing_indexes.py deleted file mode 100644 index d6f62078d..000000000 --- a/data/migrations/versions/790d91952fa8_add_missing_indexes.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add missing indexes - -Revision ID: 790d91952fa8 -Revises: 1093d8b212bb -Create Date: 2016-08-03 17:05:06.675520 - -""" - -# revision identifiers, used by Alembic. -revision = '790d91952fa8' -down_revision = '1093d8b212bb' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('repositorybuild_repository_id_started_phase', 'repositorybuild', ['repository_id', 'started', 'phase'], unique=False) - op.create_index('repositorybuild_started', 'repositorybuild', ['started'], unique=False) - op.create_index('repositorybuild_started_logs_archived_phase', 'repositorybuild', ['started', 'logs_archived', 'phase'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('repositorybuild_started_logs_archived_phase', table_name='repositorybuild') - op.drop_index('repositorybuild_started', table_name='repositorybuild') - op.drop_index('repositorybuild_repository_id_started_phase', table_name='repositorybuild') - ### end Alembic commands ### diff --git a/data/migrations/versions/82297d834ad_add_us_west_location.py b/data/migrations/versions/82297d834ad_add_us_west_location.py deleted file mode 100644 index c564557a8..000000000 --- a/data/migrations/versions/82297d834ad_add_us_west_location.py +++ /dev/null @@ -1,28 +0,0 @@ -"""add US West location - -Revision ID: 82297d834ad -Revises: 47670cbeced -Create Date: 2014-08-15 13:35:23.834079 - -""" - -# revision identifiers, used by Alembic. -revision = '82297d834ad' -down_revision = '47670cbeced' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - op.bulk_insert(tables.imagestoragelocation, - [ - {'name':'s3_us_west_1'}, - ]) - - -def downgrade(tables): - op.execute( - (tables.imagestoragelocation.delete() - .where(tables.imagestoragelocation.c.name == op.inline_literal('s3_us_west_1'))) - - ) diff --git a/data/migrations/versions/88e0f440a2f_add_created_field_to_the_blobupload_.py b/data/migrations/versions/88e0f440a2f_add_created_field_to_the_blobupload_.py deleted file mode 100644 index 0fd70fc87..000000000 --- a/data/migrations/versions/88e0f440a2f_add_created_field_to_the_blobupload_.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add created field to the BlobUpload table - -Revision ID: 88e0f440a2f -Revises: 403d02fea323 -Create Date: 2015-12-14 15:19:11.825279 - -""" - -# revision identifiers, used by Alembic. -revision = '88e0f440a2f' -down_revision = '403d02fea323' - -from alembic import op -import sqlalchemy as sa -from datetime import datetime - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - now = datetime.now().strftime("'%Y-%m-%d %H:%M:%S'") - op.add_column('blobupload', sa.Column('created', sa.DateTime(), nullable=False, server_default=sa.text(now))) - op.create_index('blobupload_created', 'blobupload', ['created'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('blobupload_created', table_name='blobupload') - op.drop_column('blobupload', 'created') - ### end Alembic commands ### diff --git a/data/migrations/versions/8981dabd329f_add_logentry_index_for_lookup_by_account.py b/data/migrations/versions/8981dabd329f_add_logentry_index_for_lookup_by_account.py deleted file mode 100644 index 4434982d1..000000000 --- a/data/migrations/versions/8981dabd329f_add_logentry_index_for_lookup_by_account.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add logentry index for lookup by account - -Revision ID: 8981dabd329f -Revises: 790d91952fa8 -Create Date: 2016-08-12 16:50:15.816120 - -""" - -# revision identifiers, used by Alembic. -revision = '8981dabd329f' -down_revision = '790d91952fa8' - -from alembic import op - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_index('logentry_account_id_datetime', 'logentry', ['account_id', 'datetime'], unique=False) - op.create_index('logentry_performer_id_datetime', 'logentry', ['performer_id', 'datetime'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('logentry_account_id_datetime', table_name='logentry') - op.drop_index('logentry_performer_id_datetime', table_name='logentry') - ### end Alembic commands ### diff --git a/data/migrations/versions/9512773a4a2_add_userregion_table.py b/data/migrations/versions/9512773a4a2_add_userregion_table.py deleted file mode 100644 index 212110054..000000000 --- a/data/migrations/versions/9512773a4a2_add_userregion_table.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Add UserRegion table - -Revision ID: 9512773a4a2 -Revises: 499f6f08de3 -Create Date: 2015-09-01 14:17:08.628052 - -""" - -# revision identifiers, used by Alembic. -revision = '9512773a4a2' -down_revision = '499f6f08de3' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('userregion', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('location_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_userregion_location_id_imagestoragelocation')), - sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_userregion_user_id_user')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_userregion')) - ) - op.create_index('userregion_location_id', 'userregion', ['location_id'], unique=False) - op.create_index('userregion_user_id', 'userregion', ['user_id'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('userregion') - ### end Alembic commands ### diff --git a/data/migrations/versions/956a0833223_add_backfill_for_torrent_shas_and_.py b/data/migrations/versions/956a0833223_add_backfill_for_torrent_shas_and_.py deleted file mode 100644 index fc2499bec..000000000 --- a/data/migrations/versions/956a0833223_add_backfill_for_torrent_shas_and_.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Add backfill for torrent shas and checksums - -Revision ID: 956a0833223 -Revises: 23ca04d0bc8e -Create Date: 2016-01-08 17:11:07.261123 - -""" - -# revision identifiers, used by Alembic. -revision = '956a0833223' -down_revision = '23ca04d0bc8e' - -from app import app -from util.migrate.backfill_content_checksums_and_torrent_pieces import backfill_content_checksums_and_torrent_pieces - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - backfill_content_checksums_and_torrent_pieces(app.config['BITTORRENT_PIECE_SIZE']) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - pass - ### end Alembic commands ### diff --git a/data/migrations/versions/983247d75af3_rename_predictable.py b/data/migrations/versions/983247d75af3_rename_predictable.py deleted file mode 100644 index d99797955..000000000 --- a/data/migrations/versions/983247d75af3_rename_predictable.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Rename all foreign key constraints to have predictable names - -Revision ID: 983247d75af3 -Revises: 8981dabd329f -Create Date: 2016-08-08 16:33:00.198527 - -""" - -# revision identifiers, used by Alembic. -revision = '983247d75af3' -down_revision = '8981dabd329f' - -from alembic import op -from functools import wraps -import sqlalchemy as sa -import logging - -logger = logging.getLogger(__name__) - -CORRECT_FKC_NAMES = { - # Names that existed according to the constraint in previous migrations - "fk_accesstoken_kind_id_accesstokenkind", - "fk_blobupload_location_id_imagestoragelocation", - "fk_blobupload_repository_id_repository", - "fk_derivedstorageforimage_derivative_id_imagestorage", - "fk_derivedstorageforimage_source_image_id_image", - "fk_imagestoragesignature_kind_id_imagestoragesignaturekind", - "fk_imagestoragesignature_storage_id_imagestorage", - "fk_quayrelease_region_id_quayregion", - "fk_quayrelease_service_id_quayservice", - "fk_repositoryactioncount_repository_id_repository", - "fk_servicekey_approval_id_servicekeyapproval", - "fk_star_repository_id_repository", - "fk_star_user_id_user", - "fk_tagmanifest_tag_id_repositorytag", - "fk_torrentinfo_storage_id_imagestorage", - "fk_userregion_location_id_imagestoragelocation", - "fk_userregion_user_id_user", - - # Names that had to be set manually as truncated versions - 'fk_derivedstorageforimage_transformation_constraint', -} - - -def _generate_all_foreign_key_constraint_names(): - conn = op.get_bind() - inspector = sa.inspect(conn.engine) - for table_name, fkc_list in inspector.get_sorted_table_and_fkc_names(): - for fkc_name in fkc_list: - yield table_name, fkc_name - - -def _wrap_create_op(create_foreign_key_func, existing_fk_tuple_set): - fk_name_set = set() - @wraps(create_foreign_key_func) - def wrapped(fk_name, table_name, *args, **kwargs): - fk_name_set.add(fk_name) - - if (table_name, fk_name) in existing_fk_tuple_set: - logger.debug('Skipping already correct fkc: %s', fk_name) - else: - logger.debug('Creating foreign key constraint: %s', fk_name) - return create_foreign_key_func(op.f(fk_name), table_name, *args, **kwargs) - return wrapped, fk_name_set - - -def _disable_constraints(): - conn = op.get_bind() - if conn.dialect.name == 'mysql': - logger.debug('Setting mysql foreign_key_checks=0') - op.execute('SET FOREIGN_KEY_CHECKS=0') - else: - logger.warning('Unable to disable foreign key checks for dialect: %s', conn.dialect.name) - - -def upgrade(tables): - existing_fk_tuples = set(_generate_all_foreign_key_constraint_names()) - create_fk, new_fk_name_set = _wrap_create_op(op.create_foreign_key, existing_fk_tuples) - - _disable_constraints() - - create_fk('fk_accesstoken_role_id_role', 'accesstoken', 'role', ['role_id'], ['id']) - create_fk('fk_accesstoken_repository_id_repository', 'accesstoken', 'repository', ['repository_id'], ['id']) - create_fk('fk_emailconfirmation_user_id_user', 'emailconfirmation', 'user', ['user_id'], ['id']) - create_fk('fk_federatedlogin_user_id_user', 'federatedlogin', 'user', ['user_id'], ['id']) - create_fk('fk_federatedlogin_service_id_loginservice', 'federatedlogin', 'loginservice', ['service_id'], ['id']) - create_fk('fk_image_repository_id_repository', 'image', 'repository', ['repository_id'], ['id']) - create_fk('fk_image_storage_id_imagestorage', 'image', 'imagestorage', ['storage_id'], ['id']) - create_fk('fk_imagestorageplacement_location_id_imagestoragelocation', 'imagestorageplacement', 'imagestoragelocation', ['location_id'], ['id']) - create_fk('fk_imagestorageplacement_storage_id_imagestorage', 'imagestorageplacement', 'imagestorage', ['storage_id'], ['id']) - create_fk('fk_logentry_kind_id_logentrykind', 'logentry', 'logentrykind', ['kind_id'], ['id']) - create_fk('fk_notification_target_id_user', 'notification', 'user', ['target_id'], ['id']) - create_fk('fk_notification_kind_id_notificationkind', 'notification', 'notificationkind', ['kind_id'], ['id']) - create_fk('fk_oauthaccesstoken_authorized_user_id_user', 'oauthaccesstoken', 'user', ['authorized_user_id'], ['id']) - create_fk('fk_oauthaccesstoken_application_id_oauthapplication', 'oauthaccesstoken', 'oauthapplication', ['application_id'], ['id']) - create_fk('fk_oauthapplication_organization_id_user', 'oauthapplication', 'user', ['organization_id'], ['id']) - create_fk('fk_oauthauthorizationcode_application_id_oauthapplication', 'oauthauthorizationcode', 'oauthapplication', ['application_id'], ['id']) - create_fk('fk_permissionprototype_delegate_team_id_team', 'permissionprototype', 'team', ['delegate_team_id'], ['id']) - create_fk('fk_permissionprototype_role_id_role', 'permissionprototype', 'role', ['role_id'], ['id']) - create_fk('fk_permissionprototype_delegate_user_id_user', 'permissionprototype', 'user', ['delegate_user_id'], ['id']) - create_fk('fk_permissionprototype_activating_user_id_user', 'permissionprototype', 'user', ['activating_user_id'], ['id']) - create_fk('fk_permissionprototype_org_id_user', 'permissionprototype', 'user', ['org_id'], ['id']) - create_fk('fk_repository_namespace_user_id_user', 'repository', 'user', ['namespace_user_id'], ['id']) - create_fk('fk_repository_visibility_id_visibility', 'repository', 'visibility', ['visibility_id'], ['id']) - create_fk('fk_repositoryauthorizedemail_repository_id_repository', 'repositoryauthorizedemail', 'repository', ['repository_id'], ['id']) - create_fk('fk_repositorybuild_access_token_id_accesstoken', 'repositorybuild', 'accesstoken', ['access_token_id'], ['id']) - create_fk('fk_repositorybuild_pull_robot_id_user', 'repositorybuild', 'user', ['pull_robot_id'], ['id']) - create_fk('fk_repositorybuild_repository_id_repository', 'repositorybuild', 'repository', ['repository_id'], ['id']) - create_fk('fk_repositorybuild_trigger_id_repositorybuildtrigger', 'repositorybuild', 'repositorybuildtrigger', ['trigger_id'], ['id']) - create_fk('fk_repositorybuildtrigger_repository_id_repository', 'repositorybuildtrigger', 'repository', ['repository_id'], ['id']) - create_fk('fk_repositorybuildtrigger_connected_user_id_user', 'repositorybuildtrigger', 'user', ['connected_user_id'], ['id']) - create_fk('fk_repositorybuildtrigger_service_id_buildtriggerservice', 'repositorybuildtrigger', 'buildtriggerservice', ['service_id'], ['id']) - create_fk('fk_repositorybuildtrigger_pull_robot_id_user', 'repositorybuildtrigger', 'user', ['pull_robot_id'], ['id']) - create_fk('fk_repositorybuildtrigger_write_token_id_accesstoken', 'repositorybuildtrigger', 'accesstoken', ['write_token_id'], ['id']) - create_fk('fk_repositorynotification_method_id_externalnotificationmethod', 'repositorynotification', 'externalnotificationmethod', ['method_id'], ['id']) - create_fk('fk_repositorynotification_repository_id_repository', 'repositorynotification', 'repository', ['repository_id'], ['id']) - create_fk('fk_repositorynotification_event_id_externalnotificationevent', 'repositorynotification', 'externalnotificationevent', ['event_id'], ['id']) - create_fk('fk_repositorypermission_role_id_role', 'repositorypermission', 'role', ['role_id'], ['id']) - create_fk('fk_repositorypermission_user_id_user', 'repositorypermission', 'user', ['user_id'], ['id']) - create_fk('fk_repositorypermission_repository_id_repository', 'repositorypermission', 'repository', ['repository_id'], ['id']) - create_fk('fk_repositorypermission_team_id_team', 'repositorypermission', 'team', ['team_id'], ['id']) - create_fk('fk_repositorytag_repository_id_repository', 'repositorytag', 'repository', ['repository_id'], ['id']) - create_fk('fk_repositorytag_image_id_image', 'repositorytag', 'image', ['image_id'], ['id']) - create_fk('fk_team_organization_id_user', 'team', 'user', ['organization_id'], ['id']) - create_fk('fk_team_role_id_teamrole', 'team', 'teamrole', ['role_id'], ['id']) - create_fk('fk_teammember_user_id_user', 'teammember', 'user', ['user_id'], ['id']) - create_fk('fk_teammember_team_id_team', 'teammember', 'team', ['team_id'], ['id']) - create_fk('fk_teammemberinvite_inviter_id_user', 'teammemberinvite', 'user', ['inviter_id'], ['id']) - create_fk('fk_teammemberinvite_team_id_team', 'teammemberinvite', 'team', ['team_id'], ['id']) - create_fk('fk_teammemberinvite_user_id_user', 'teammemberinvite', 'user', ['user_id'], ['id']) - - # Drop all of the fk names that aren't correct - final_correct_fk_names = new_fk_name_set | CORRECT_FKC_NAMES - for table_name, fk_name in existing_fk_tuples: - if fk_name not in final_correct_fk_names: - logger.debug("dropping: %s", fk_name) - op.drop_constraint(fk_name, table_name, type_='foreignkey') - -def downgrade(tables): - # nah - pass diff --git a/data/migrations/versions/9a1087b007d_allow_the_namespace_column_to_be_.py b/data/migrations/versions/9a1087b007d_allow_the_namespace_column_to_be_.py deleted file mode 100644 index a0726bf3b..000000000 --- a/data/migrations/versions/9a1087b007d_allow_the_namespace_column_to_be_.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Allow the namespace column to be nullable. - -Revision ID: 9a1087b007d -Revises: 3f4fe1194671 -Create Date: 2014-10-01 16:11:21.277226 - -""" - -# revision identifiers, used by Alembic. -revision = '9a1087b007d' -down_revision = '3f4fe1194671' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.drop_index('repository_namespace_name', table_name='repository') - op.alter_column('repository', 'namespace', nullable=True, existing_type=sa.String(length=255), - server_default=sa.text('NULL')) - - -def downgrade(tables): - conn = op.get_bind() - user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user']) - conn.execute('update repository set namespace = (select username from {0} where {0}.id = repository.namespace_user_id) where namespace is NULL'.format(user_table_name_escaped)) - - op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True) - op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255)) diff --git a/data/migrations/versions/a3002f7638d5_adding_in_messages_table.py b/data/migrations/versions/a3002f7638d5_adding_in_messages_table.py deleted file mode 100644 index c16ea7339..000000000 --- a/data/migrations/versions/a3002f7638d5_adding_in_messages_table.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Adding in messages table - -Revision ID: a3002f7638d5 -Revises: c9b91bee7554 -Create Date: 2016-10-07 11:14:15.054546 - -""" - -# revision identifiers, used by Alembic. -revision = 'a3002f7638d5' -down_revision = 'c9b91bee7554' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -def upgrade(tables): - op.create_table('messages', - sa.Column("id", sa.INTEGER, primary_key=True), - sa.Column("content", sa.UnicodeText, nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_messages')) - ) - - -def downgrade(tables): - op.drop_table('messages') diff --git a/data/migrations/versions/a3ba52d02dec_initial_keyserver.py b/data/migrations/versions/a3ba52d02dec_initial_keyserver.py deleted file mode 100644 index 4e59ba1eb..000000000 --- a/data/migrations/versions/a3ba52d02dec_initial_keyserver.py +++ /dev/null @@ -1,91 +0,0 @@ -"""initial keyserver - -Revision ID: a3ba52d02dec -Revises: e4129c93e477 -Create Date: 2016-03-30 15:28:32.036753 - -""" - -# revision identifiers, used by Alembic. -revision = 'a3ba52d02dec' -down_revision = 'e4129c93e477' - -from alembic import op -import sqlalchemy as sa - -from util.migrate import UTF8LongText - -def upgrade(tables): - op.create_table( - 'servicekeyapproval', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('approver_id', sa.Integer(), nullable=True), - sa.Column('approval_type', sa.String(length=255), nullable=False), - sa.Column('approved_date', sa.DateTime(), nullable=False), - sa.Column('notes', UTF8LongText(), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekeyapproval')), - ) - op.create_index('servicekeyapproval_approval_type', 'servicekeyapproval', ['approval_type'], unique=False) - op.create_index('servicekeyapproval_approver_id', 'servicekeyapproval', ['approver_id'], unique=False) - - - op.bulk_insert( - tables.notificationkind, - [{'name':'service_key_submitted'}], - ) - - - op.bulk_insert(tables.logentrykind, [ - {'name':'service_key_create'}, - {'name':'service_key_approve'}, - {'name':'service_key_delete'}, - {'name':'service_key_modify'}, - {'name':'service_key_extend'}, - {'name':'service_key_rotate'}, - ]) - - - op.create_table( - 'servicekey', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('kid', sa.String(length=255), nullable=False), - sa.Column('service', sa.String(length=255), nullable=False), - sa.Column('jwk', UTF8LongText(), nullable=False), - sa.Column('metadata', UTF8LongText(), nullable=False), - sa.Column('created_date', sa.DateTime(), nullable=False), - sa.Column('expiration_date', sa.DateTime(), nullable=True), - sa.Column('rotation_duration', sa.Integer(), nullable=True), - sa.Column('approval_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['approval_id'], ['servicekeyapproval.id'], - name=op.f('fk_servicekey_approval_id_servicekeyapproval')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekey')), - ) - op.create_index('servicekey_approval_id', 'servicekey', ['approval_id'], unique=False) - op.create_index('servicekey_kid', 'servicekey', ['kid'], unique=True) - op.create_index('servicekey_service', 'servicekey', ['service'], unique=False) - - - op.add_column(u'notification', sa.Column('lookup_path', sa.String(length=255), nullable=True)) - op.create_index('notification_lookup_path', 'notification', ['lookup_path'], unique=False) - - -def downgrade(tables): - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_create'))) - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_approve'))) - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_delete'))) - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_modify'))) - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_extend'))) - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_rotate'))) - - - op.execute(tables.notificationkind.delete().where(tables.notificationkind.c.name == op.inline_literal('service_key_submitted'))) - - - op.drop_column(u'notification', 'lookup_path') - - - op.drop_table('servicekey') - - - op.drop_table('servicekeyapproval') diff --git a/data/migrations/versions/b1d41e2071b_add_an_index_to_the_uuid_in_the_image_.py b/data/migrations/versions/b1d41e2071b_add_an_index_to_the_uuid_in_the_image_.py deleted file mode 100644 index 71a9df794..000000000 --- a/data/migrations/versions/b1d41e2071b_add_an_index_to_the_uuid_in_the_image_.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Add an index to the uuid in the image storage table. - -Revision ID: b1d41e2071b -Revises: 9a1087b007d -Create Date: 2014-10-06 18:42:10.021235 - -""" - -# revision identifiers, used by Alembic. -revision = 'b1d41e2071b' -down_revision = '9a1087b007d' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True) - - -def downgrade(tables): - op.drop_index('imagestorage_uuid', table_name='imagestorage') diff --git a/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py b/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py deleted file mode 100644 index 310f1dff2..000000000 --- a/data/migrations/versions/bcdde200a1b_add_placements_and_locations_to_the_db.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Add placements and locations to the db. - -Revision ID: bcdde200a1b -Revises: 201d55b38649 -Create Date: 2014-06-18 13:32:42.907922 - -""" - -# revision identifiers, used by Alembic. -revision = 'bcdde200a1b' -down_revision = '201d55b38649' - -from alembic import op -import sqlalchemy as sa - - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('imagestoragelocation', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True) - - op.bulk_insert(tables.imagestoragelocation, - [ - {'name':'s3_us_east_1'}, - {'name':'s3_eu_west_1'}, - {'name':'s3_ap_southeast_1'}, - {'name':'s3_ap_southeast_2'}, - {'name':'s3_ap_northeast_1'}, - {'name':'s3_sa_east_1'}, - {'name':'local'}, - ]) - - op.create_table('imagestorageplacement', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('storage_id', sa.Integer(), nullable=False), - sa.Column('location_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], ), - sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ), - sa.PrimaryKeyConstraint('id') - ) - op.create_index('imagestorageplacement_location_id', 'imagestorageplacement', ['location_id'], unique=False) - op.create_index('imagestorageplacement_storage_id', 'imagestorageplacement', ['storage_id'], unique=False) - op.create_index('imagestorageplacement_storage_id_location_id', 'imagestorageplacement', ['storage_id', 'location_id'], unique=True) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('imagestorageplacement') - op.drop_table('imagestoragelocation') - ### end Alembic commands ### diff --git a/data/migrations/versions/c156deb8845d_add_uuid_to_messages.py b/data/migrations/versions/c156deb8845d_add_uuid_to_messages.py deleted file mode 100644 index eae45341c..000000000 --- a/data/migrations/versions/c156deb8845d_add_uuid_to_messages.py +++ /dev/null @@ -1,23 +0,0 @@ -"""Add uuid to messages - -Revision ID: c156deb8845d -Revises: a3002f7638d5 -Create Date: 2016-10-11 15:44:29.450181 - -""" - -# revision identifiers, used by Alembic. -revision = 'c156deb8845d' -down_revision = 'a3002f7638d5' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - - -def upgrade(tables): - op.add_column('messages', sa.Column('uuid', sa.String(length=36), nullable=True)) - - -def downgrade(tables): - op.drop_column('messages', 'uuid') diff --git a/data/migrations/versions/c156deb8845d_reset_our_migrations_with_a_required_.py b/data/migrations/versions/c156deb8845d_reset_our_migrations_with_a_required_.py new file mode 100644 index 000000000..7e9f1743c --- /dev/null +++ b/data/migrations/versions/c156deb8845d_reset_our_migrations_with_a_required_.py @@ -0,0 +1,954 @@ +"""Reset our migrations with a required update + +Revision ID: c156deb8845d +Revises: None +Create Date: 2016-11-08 11:58:11.110762 + +""" + +# revision identifiers, used by Alembic. +revision = 'c156deb8845d' +down_revision = None + +from alembic import op +import sqlalchemy as sa +from util.migrate import UTF8LongText, UTF8CharField +from datetime import datetime + +def upgrade(tables): + now = datetime.now().strftime("'%Y-%m-%d %H:%M:%S'") + + op.create_table('accesstokenkind', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstokenkind')) + ) + op.create_index('accesstokenkind_name', 'accesstokenkind', ['name'], unique=True) + op.create_table('buildtriggerservice', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_buildtriggerservice')) + ) + op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=True) + op.create_table('externalnotificationevent', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_externalnotificationevent')) + ) + op.create_index('externalnotificationevent_name', 'externalnotificationevent', ['name'], unique=True) + op.create_table('externalnotificationmethod', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_externalnotificationmethod')) + ) + op.create_index('externalnotificationmethod_name', 'externalnotificationmethod', ['name'], unique=True) + op.create_table('imagestorage', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('checksum', sa.String(length=255), nullable=True), + sa.Column('image_size', sa.BigInteger(), nullable=True), + sa.Column('uncompressed_size', sa.BigInteger(), nullable=True), + sa.Column('uploading', sa.Boolean(), nullable=True), + sa.Column('cas_path', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()), + sa.Column('content_checksum', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestorage')) + ) + op.create_index('imagestorage_content_checksum', 'imagestorage', ['content_checksum'], unique=False) + op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True) + op.create_table('imagestoragelocation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragelocation')) + ) + op.create_index('imagestoragelocation_name', 'imagestoragelocation', ['name'], unique=True) + op.create_table('imagestoragesignaturekind', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignaturekind')) + ) + op.create_index('imagestoragesignaturekind_name', 'imagestoragesignaturekind', ['name'], unique=True) + op.create_table('imagestoragetransformation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragetransformation')) + ) + op.create_index('imagestoragetransformation_name', 'imagestoragetransformation', ['name'], unique=True) + op.create_table('labelsourcetype', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('mutable', sa.Boolean(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_labelsourcetype')) + ) + op.create_index('labelsourcetype_name', 'labelsourcetype', ['name'], unique=True) + op.create_table('logentrykind', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_logentrykind')) + ) + op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=True) + op.create_table('loginservice', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_loginservice')) + ) + op.create_index('loginservice_name', 'loginservice', ['name'], unique=True) + op.create_table('mediatype', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_mediatype')) + ) + op.create_index('mediatype_name', 'mediatype', ['name'], unique=True) + op.create_table('messages', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('content', sa.Text(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_messages')) + ) + op.create_table('notificationkind', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_notificationkind')) + ) + op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=True) + op.create_table('quayregion', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_quayregion')) + ) + op.create_index('quayregion_name', 'quayregion', ['name'], unique=True) + op.create_table('quayservice', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_quayservice')) + ) + op.create_index('quayservice_name', 'quayservice', ['name'], unique=True) + op.create_table('queueitem', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('queue_name', sa.String(length=1024), nullable=False), + sa.Column('body', sa.Text(), nullable=False), + sa.Column('available_after', sa.DateTime(), nullable=False), + sa.Column('available', sa.Boolean(), nullable=False), + sa.Column('processing_expires', sa.DateTime(), nullable=True), + sa.Column('retries_remaining', sa.Integer(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_queueitem')) + ) + op.create_index('queueitem_available', 'queueitem', ['available'], unique=False) + op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False) + op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False) + op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767) + op.create_index('queueitem_retries_remaining', 'queueitem', ['retries_remaining'], unique=False) + op.create_table('role', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_role')) + ) + op.create_index('role_name', 'role', ['name'], unique=True) + op.create_table('servicekeyapproval', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('approver_id', sa.Integer(), nullable=True), + sa.Column('approval_type', sa.String(length=255), nullable=False), + sa.Column('approved_date', sa.DateTime(), nullable=False), + sa.Column('notes', UTF8LongText(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekeyapproval')) + ) + op.create_index('servicekeyapproval_approval_type', 'servicekeyapproval', ['approval_type'], unique=False) + op.create_index('servicekeyapproval_approver_id', 'servicekeyapproval', ['approver_id'], unique=False) + op.create_table('teamrole', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_teamrole')) + ) + op.create_index('teamrole_name', 'teamrole', ['name'], unique=False) + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=36), nullable=True), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('password_hash', sa.String(length=255), nullable=True), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('verified', sa.Boolean(), nullable=False), + sa.Column('stripe_id', sa.String(length=255), nullable=True), + sa.Column('organization', sa.Boolean(), nullable=False), + sa.Column('robot', sa.Boolean(), nullable=False), + sa.Column('invoice_email', sa.Boolean(), nullable=False), + sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default='0'), + sa.Column('last_invalid_login', sa.DateTime(), nullable=False), + sa.Column('removed_tag_expiration_s', sa.Integer(), nullable=False, server_default='1209600'), + sa.Column('enabled', sa.Boolean(), nullable=False, server_default=sa.sql.expression.true()), + sa.Column('invoice_email_address', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_user')) + ) + op.create_index('user_email', 'user', ['email'], unique=True) + op.create_index('user_invoice_email_address', 'user', ['invoice_email_address'], unique=False) + op.create_index('user_organization', 'user', ['organization'], unique=False) + op.create_index('user_robot', 'user', ['robot'], unique=False) + op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False) + op.create_index('user_username', 'user', ['username'], unique=True) + op.create_table('visibility', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_visibility')) + ) + op.create_index('visibility_name', 'visibility', ['name'], unique=True) + op.create_table('emailconfirmation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('pw_reset', sa.Boolean(), nullable=False), + sa.Column('new_email', sa.String(length=255), nullable=True), + sa.Column('email_confirm', sa.Boolean(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_emailconfirmation_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_emailconfirmation')) + ) + op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True) + op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False) + op.create_table('federatedlogin', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('service_id', sa.Integer(), nullable=False), + sa.Column('service_ident', sa.String(length=255), nullable=False), + sa.Column('metadata_json', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], name=op.f('fk_federatedlogin_service_id_loginservice')), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_federatedlogin_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_federatedlogin')) + ) + op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False) + op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True) + op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True) + op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False) + op.create_table('imagestorageplacement', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('storage_id', sa.Integer(), nullable=False), + sa.Column('location_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_imagestorageplacement_location_id_imagestoragelocation')), + sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestorageplacement_storage_id_imagestorage')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestorageplacement')) + ) + op.create_index('imagestorageplacement_location_id', 'imagestorageplacement', ['location_id'], unique=False) + op.create_index('imagestorageplacement_storage_id', 'imagestorageplacement', ['storage_id'], unique=False) + op.create_index('imagestorageplacement_storage_id_location_id', 'imagestorageplacement', ['storage_id', 'location_id'], unique=True) + op.create_table('imagestoragesignature', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('storage_id', sa.Integer(), nullable=False), + sa.Column('kind_id', sa.Integer(), nullable=False), + sa.Column('signature', sa.Text(), nullable=True), + sa.Column('uploading', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(['kind_id'], ['imagestoragesignaturekind.id'], name=op.f('fk_imagestoragesignature_kind_id_imagestoragesignaturekind')), + sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestoragesignature_storage_id_imagestorage')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignature')) + ) + op.create_index('imagestoragesignature_kind_id', 'imagestoragesignature', ['kind_id'], unique=False) + op.create_index('imagestoragesignature_kind_id_storage_id', 'imagestoragesignature', ['kind_id', 'storage_id'], unique=True) + op.create_index('imagestoragesignature_storage_id', 'imagestoragesignature', ['storage_id'], unique=False) + op.create_table('label', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('key', UTF8CharField(length=255), nullable=False), + sa.Column('value', UTF8LongText(), nullable=False), + sa.Column('media_type_id', sa.Integer(), nullable=False), + sa.Column('source_type_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_label_media_type_id_mediatype')), + sa.ForeignKeyConstraint(['source_type_id'], ['labelsourcetype.id'], name=op.f('fk_label_source_type_id_labelsourcetype')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_label')) + ) + op.create_index('label_key', 'label', ['key'], unique=False) + op.create_index('label_media_type_id', 'label', ['media_type_id'], unique=False) + op.create_index('label_source_type_id', 'label', ['source_type_id'], unique=False) + op.create_index('label_uuid', 'label', ['uuid'], unique=True) + op.create_table('logentry', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('kind_id', sa.Integer(), nullable=False), + sa.Column('account_id', sa.Integer(), nullable=False), + sa.Column('performer_id', sa.Integer(), nullable=True), + sa.Column('repository_id', sa.Integer(), nullable=True), + sa.Column('datetime', sa.DateTime(), nullable=False), + sa.Column('ip', sa.String(length=255), nullable=True), + sa.Column('metadata_json', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], name=op.f('fk_logentry_kind_id_logentrykind')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_logentry')) + ) + op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False) + op.create_index('logentry_account_id_datetime', 'logentry', ['account_id', 'datetime'], unique=False) + op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False) + op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False) + op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False) + op.create_index('logentry_performer_id_datetime', 'logentry', ['performer_id', 'datetime'], unique=False) + op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False) + op.create_index('logentry_repository_id_datetime', 'logentry', ['repository_id', 'datetime'], unique=False) + op.create_index('logentry_repository_id_datetime_kind_id', 'logentry', ['repository_id', 'datetime', 'kind_id'], unique=False) + op.create_table('notification', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('kind_id', sa.Integer(), nullable=False), + sa.Column('target_id', sa.Integer(), nullable=False), + sa.Column('metadata_json', sa.Text(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.Column('dismissed', sa.Boolean(), nullable=False), + sa.Column('lookup_path', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], name=op.f('fk_notification_kind_id_notificationkind')), + sa.ForeignKeyConstraint(['target_id'], ['user.id'], name=op.f('fk_notification_target_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_notification')) + ) + op.create_index('notification_created', 'notification', ['created'], unique=False) + op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False) + op.create_index('notification_lookup_path', 'notification', ['lookup_path'], unique=False) + op.create_index('notification_target_id', 'notification', ['target_id'], unique=False) + op.create_index('notification_uuid', 'notification', ['uuid'], unique=False) + op.create_table('oauthapplication', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('client_id', sa.String(length=255), nullable=False), + sa.Column('client_secret', sa.String(length=255), nullable=False), + sa.Column('redirect_uri', sa.String(length=255), nullable=False), + sa.Column('application_uri', sa.String(length=255), nullable=False), + sa.Column('organization_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.Column('gravatar_email', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['organization_id'], ['user.id'], name=op.f('fk_oauthapplication_organization_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_oauthapplication')) + ) + op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False) + op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False) + op.create_table('quayrelease', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('service_id', sa.Integer(), nullable=False), + sa.Column('version', sa.String(length=255), nullable=False), + sa.Column('region_id', sa.Integer(), nullable=False), + sa.Column('reverted', sa.Boolean(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['region_id'], ['quayregion.id'], name=op.f('fk_quayrelease_region_id_quayregion')), + sa.ForeignKeyConstraint(['service_id'], ['quayservice.id'], name=op.f('fk_quayrelease_service_id_quayservice')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_quayrelease')) + ) + op.create_index('quayrelease_created', 'quayrelease', ['created'], unique=False) + op.create_index('quayrelease_region_id', 'quayrelease', ['region_id'], unique=False) + op.create_index('quayrelease_service_id', 'quayrelease', ['service_id'], unique=False) + op.create_index('quayrelease_service_id_region_id_created', 'quayrelease', ['service_id', 'region_id', 'created'], unique=False) + op.create_index('quayrelease_service_id_version_region_id', 'quayrelease', ['service_id', 'version', 'region_id'], unique=True) + op.create_table('repository', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('namespace_user_id', sa.Integer(), nullable=True), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('visibility_id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=True), + sa.Column('badge_token', sa.String(length=255), nullable=False), + sa.ForeignKeyConstraint(['namespace_user_id'], ['user.id'], name=op.f('fk_repository_namespace_user_id_user')), + sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], name=op.f('fk_repository_visibility_id_visibility')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repository')) + ) + op.create_index('repository_namespace_user_id', 'repository', ['namespace_user_id'], unique=False) + op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True) + op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False) + op.create_table('servicekey', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('kid', sa.String(length=255), nullable=False), + sa.Column('service', sa.String(length=255), nullable=False), + sa.Column('jwk', UTF8LongText(), nullable=False), + sa.Column('metadata', UTF8LongText(), nullable=False), + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('expiration_date', sa.DateTime(), nullable=True), + sa.Column('rotation_duration', sa.Integer(), nullable=True), + sa.Column('approval_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['approval_id'], ['servicekeyapproval.id'], name=op.f('fk_servicekey_approval_id_servicekeyapproval')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekey')) + ) + op.create_index('servicekey_approval_id', 'servicekey', ['approval_id'], unique=False) + op.create_index('servicekey_kid', 'servicekey', ['kid'], unique=True) + op.create_index('servicekey_service', 'servicekey', ['service'], unique=False) + op.create_table('team', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('organization_id', sa.Integer(), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.Column('description', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['organization_id'], ['user.id'], name=op.f('fk_team_organization_id_user')), + sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], name=op.f('fk_team_role_id_teamrole')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_team')) + ) + op.create_index('team_name', 'team', ['name'], unique=False) + op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True) + op.create_index('team_organization_id', 'team', ['organization_id'], unique=False) + op.create_index('team_role_id', 'team', ['role_id'], unique=False) + op.create_table('torrentinfo', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('storage_id', sa.Integer(), nullable=False), + sa.Column('piece_length', sa.Integer(), nullable=False), + sa.Column('pieces', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_torrentinfo_storage_id_imagestorage')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_torrentinfo')) + ) + op.create_index('torrentinfo_storage_id', 'torrentinfo', ['storage_id'], unique=False) + op.create_index('torrentinfo_storage_id_piece_length', 'torrentinfo', ['storage_id', 'piece_length'], unique=True) + op.create_table('userregion', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('location_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_userregion_location_id_imagestoragelocation')), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_userregion_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_userregion')) + ) + op.create_index('userregion_location_id', 'userregion', ['location_id'], unique=False) + op.create_index('userregion_user_id', 'userregion', ['user_id'], unique=False) + op.create_table('accesstoken', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('friendly_name', sa.String(length=255), nullable=True), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.Column('temporary', sa.Boolean(), nullable=False), + sa.Column('kind_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['kind_id'], ['accesstokenkind.id'], name=op.f('fk_accesstoken_kind_id_accesstokenkind')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_accesstoken_repository_id_repository')), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_accesstoken_role_id_role')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstoken')) + ) + op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True) + op.create_index('accesstoken_kind_id', 'accesstoken', ['kind_id'], unique=False) + op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False) + op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False) + op.create_table('blobupload', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('byte_count', sa.Integer(), nullable=False), + sa.Column('sha_state', sa.Text(), nullable=True), + sa.Column('location_id', sa.Integer(), nullable=False), + sa.Column('storage_metadata', sa.Text(), nullable=True), + sa.Column('chunk_count', sa.Integer(), nullable=False, server_default='0'), + sa.Column('uncompressed_byte_count', sa.Integer(), nullable=True), + sa.Column('created', sa.DateTime(), nullable=False, server_default=sa.text(now)), + sa.Column('piece_sha_state', UTF8LongText(), nullable=True), + sa.Column('piece_hashes', UTF8LongText(), nullable=True), + sa.ForeignKeyConstraint(['location_id'], ['imagestoragelocation.id'], name=op.f('fk_blobupload_location_id_imagestoragelocation')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_blobupload_repository_id_repository')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_blobupload')) + ) + op.create_index('blobupload_created', 'blobupload', ['created'], unique=False) + op.create_index('blobupload_location_id', 'blobupload', ['location_id'], unique=False) + op.create_index('blobupload_repository_id', 'blobupload', ['repository_id'], unique=False) + op.create_index('blobupload_repository_id_uuid', 'blobupload', ['repository_id', 'uuid'], unique=True) + op.create_index('blobupload_uuid', 'blobupload', ['uuid'], unique=True) + op.create_table('image', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('docker_image_id', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('ancestors', sa.String(length=60535), nullable=True), + sa.Column('storage_id', sa.Integer(), nullable=True), + sa.Column('created', sa.DateTime(), nullable=True), + sa.Column('comment', UTF8LongText(), nullable=True), + sa.Column('command', sa.Text(), nullable=True), + sa.Column('aggregate_size', sa.BigInteger(), nullable=True), + sa.Column('v1_json_metadata', UTF8LongText(), nullable=True), + sa.Column('v1_checksum', sa.String(length=255), nullable=True), + sa.Column('security_indexed', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()), + sa.Column('security_indexed_engine', sa.Integer(), nullable=False, server_default='-1'), + sa.Column('parent_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_image_repository_id_repository')), + sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_image_storage_id_imagestorage')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_image')) + ) + op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767) + op.create_index('image_docker_image_id', 'image', ['docker_image_id'], unique=False) + op.create_index('image_parent_id', 'image', ['parent_id'], unique=False) + op.create_index('image_repository_id', 'image', ['repository_id'], unique=False) + op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=True) + op.create_index('image_security_indexed', 'image', ['security_indexed'], unique=False) + op.create_index('image_security_indexed_engine', 'image', ['security_indexed_engine'], unique=False) + op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed'], unique=False) + op.create_index('image_storage_id', 'image', ['storage_id'], unique=False) + op.create_table('oauthaccesstoken', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('application_id', sa.Integer(), nullable=False), + sa.Column('authorized_user_id', sa.Integer(), nullable=False), + sa.Column('scope', sa.String(length=255), nullable=False), + sa.Column('access_token', sa.String(length=255), nullable=False), + sa.Column('token_type', sa.String(length=255), nullable=False), + sa.Column('expires_at', sa.DateTime(), nullable=False), + sa.Column('refresh_token', sa.String(length=255), nullable=True), + sa.Column('data', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], name=op.f('fk_oauthaccesstoken_application_id_oauthapplication')), + sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], name=op.f('fk_oauthaccesstoken_authorized_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_oauthaccesstoken')) + ) + op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False) + op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False) + op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False) + op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False) + op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False) + op.create_table('oauthauthorizationcode', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('application_id', sa.Integer(), nullable=False), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('scope', sa.String(length=255), nullable=False), + sa.Column('data', sa.Text(), nullable=False), + sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], name=op.f('fk_oauthauthorizationcode_application_id_oauthapplication')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_oauthauthorizationcode')) + ) + op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False) + op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False) + op.create_table('permissionprototype', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('org_id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('activating_user_id', sa.Integer(), nullable=True), + sa.Column('delegate_user_id', sa.Integer(), nullable=True), + sa.Column('delegate_team_id', sa.Integer(), nullable=True), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], name=op.f('fk_permissionprototype_activating_user_id_user')), + sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], name=op.f('fk_permissionprototype_delegate_team_id_team')), + sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], name=op.f('fk_permissionprototype_delegate_user_id_user')), + sa.ForeignKeyConstraint(['org_id'], ['user.id'], name=op.f('fk_permissionprototype_org_id_user')), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_permissionprototype_role_id_role')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_permissionprototype')) + ) + op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False) + op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False) + op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False) + op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False) + op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False) + op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False) + op.create_table('repositoryactioncount', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('count', sa.Integer(), nullable=False), + sa.Column('date', sa.Date(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositoryactioncount_repository_id_repository')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositoryactioncount')) + ) + op.create_index('repositoryactioncount_date', 'repositoryactioncount', ['date'], unique=False) + op.create_index('repositoryactioncount_repository_id', 'repositoryactioncount', ['repository_id'], unique=False) + op.create_index('repositoryactioncount_repository_id_date', 'repositoryactioncount', ['repository_id', 'date'], unique=True) + op.create_table('repositoryauthorizedemail', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('email', sa.String(length=255), nullable=False), + sa.Column('code', sa.String(length=255), nullable=False), + sa.Column('confirmed', sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositoryauthorizedemail_repository_id_repository')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositoryauthorizedemail')) + ) + op.create_index('repositoryauthorizedemail_code', 'repositoryauthorizedemail', ['code'], unique=True) + op.create_index('repositoryauthorizedemail_email_repository_id', 'repositoryauthorizedemail', ['email', 'repository_id'], unique=True) + op.create_index('repositoryauthorizedemail_repository_id', 'repositoryauthorizedemail', ['repository_id'], unique=False) + op.create_table('repositorynotification', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('event_id', sa.Integer(), nullable=False), + sa.Column('method_id', sa.Integer(), nullable=False), + sa.Column('title', sa.String(length=255), nullable=True), + sa.Column('config_json', sa.Text(), nullable=False), + sa.Column('event_config_json', UTF8LongText(), nullable=False), + sa.ForeignKeyConstraint(['event_id'], ['externalnotificationevent.id'], name=op.f('fk_repositorynotification_event_id_externalnotificationevent')), + sa.ForeignKeyConstraint(['method_id'], ['externalnotificationmethod.id'], name=op.f('fk_repositorynotification_method_id_externalnotificationmethod')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorynotification_repository_id_repository')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorynotification')) + ) + op.create_index('repositorynotification_event_id', 'repositorynotification', ['event_id'], unique=False) + op.create_index('repositorynotification_method_id', 'repositorynotification', ['method_id'], unique=False) + op.create_index('repositorynotification_repository_id', 'repositorynotification', ['repository_id'], unique=False) + op.create_index('repositorynotification_uuid', 'repositorynotification', ['uuid'], unique=False) + op.create_table('repositorypermission', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('team_id', sa.Integer(), nullable=True), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorypermission_repository_id_repository')), + sa.ForeignKeyConstraint(['role_id'], ['role.id'], name=op.f('fk_repositorypermission_role_id_role')), + sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_repositorypermission_team_id_team')), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_repositorypermission_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorypermission')) + ) + op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False) + op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False) + op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False) + op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True) + op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False) + op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True) + op.create_table('star', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('created', sa.DateTime(), nullable=False), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_star_repository_id_repository')), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_star_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_star')) + ) + op.create_index('star_repository_id', 'star', ['repository_id'], unique=False) + op.create_index('star_user_id', 'star', ['user_id'], unique=False) + op.create_index('star_user_id_repository_id', 'star', ['user_id', 'repository_id'], unique=True) + op.create_table('teammember', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('team_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_teammember_team_id_team')), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_teammember_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_teammember')) + ) + op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False) + op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False) + op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True) + op.create_table('teammemberinvite', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('email', sa.String(length=255), nullable=True), + sa.Column('team_id', sa.Integer(), nullable=False), + sa.Column('inviter_id', sa.Integer(), nullable=False), + sa.Column('invite_token', sa.String(length=255), nullable=False), + sa.ForeignKeyConstraint(['inviter_id'], ['user.id'], name=op.f('fk_teammemberinvite_inviter_id_user')), + sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_teammemberinvite_team_id_team')), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_teammemberinvite_user_id_user')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_teammemberinvite')) + ) + op.create_index('teammemberinvite_inviter_id', 'teammemberinvite', ['inviter_id'], unique=False) + op.create_index('teammemberinvite_team_id', 'teammemberinvite', ['team_id'], unique=False) + op.create_index('teammemberinvite_user_id', 'teammemberinvite', ['user_id'], unique=False) + op.create_table('derivedstorageforimage', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('source_image_id', sa.Integer(), nullable=False), + sa.Column('derivative_id', sa.Integer(), nullable=False), + sa.Column('transformation_id', sa.Integer(), nullable=False), + sa.Column('uniqueness_hash', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedstorageforimage_derivative_id_imagestorage')), + sa.ForeignKeyConstraint(['source_image_id'], ['image.id'], name=op.f('fk_derivedstorageforimage_source_image_id_image')), + sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_derivedstorageforimage_transformation_constraint')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedstorageforimage')) + ) + op.create_index('derivedstorageforimage_derivative_id', 'derivedstorageforimage', ['derivative_id'], unique=False) + op.create_index('derivedstorageforimage_source_image_id', 'derivedstorageforimage', ['source_image_id'], unique=False) + op.create_index('uniqueness_index', 'derivedstorageforimage', ['source_image_id', 'transformation_id', 'uniqueness_hash'], unique=True) + op.create_index('derivedstorageforimage_transformation_id', 'derivedstorageforimage', ['transformation_id'], unique=False) + op.create_table('repositorybuildtrigger', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('service_id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('connected_user_id', sa.Integer(), nullable=False), + sa.Column('auth_token', sa.String(length=255), nullable=True), + sa.Column('private_key', sa.Text(), nullable=True), + sa.Column('config', sa.Text(), nullable=False), + sa.Column('write_token_id', sa.Integer(), nullable=True), + sa.Column('pull_robot_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], name=op.f('fk_repositorybuildtrigger_connected_user_id_user')), + sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], name=op.f('fk_repositorybuildtrigger_pull_robot_id_user')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorybuildtrigger_repository_id_repository')), + sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], name=op.f('fk_repositorybuildtrigger_service_id_buildtriggerservice')), + sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], name=op.f('fk_repositorybuildtrigger_write_token_id_accesstoken')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorybuildtrigger')) + ) + op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False) + op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False) + op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False) + op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False) + op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False) + op.create_table('repositorytag', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('image_id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('lifetime_start_ts', sa.Integer(), nullable=False, server_default='0'), + sa.Column('lifetime_end_ts', sa.Integer(), nullable=True), + sa.Column('hidden', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()), + sa.Column('reversion', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()), + sa.ForeignKeyConstraint(['image_id'], ['image.id'], name=op.f('fk_repositorytag_image_id_image')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorytag_repository_id_repository')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorytag')) + ) + op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False) + op.create_index('repositorytag_lifetime_end_ts', 'repositorytag', ['lifetime_end_ts'], unique=False) + op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False) + op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=False) + op.create_index('repositorytag_repository_id_name_lifetime_end_ts', 'repositorytag', ['repository_id', 'name', 'lifetime_end_ts'], unique=True) + op.create_table('repositorybuild', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uuid', sa.String(length=255), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('access_token_id', sa.Integer(), nullable=False), + sa.Column('resource_key', sa.String(length=255), nullable=True), + sa.Column('job_config', sa.Text(), nullable=False), + sa.Column('phase', sa.String(length=255), nullable=False), + sa.Column('started', sa.DateTime(), nullable=False), + sa.Column('display_name', sa.String(length=255), nullable=False), + sa.Column('trigger_id', sa.Integer(), nullable=True), + sa.Column('pull_robot_id', sa.Integer(), nullable=True), + sa.Column('logs_archived', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()), + sa.Column('queue_id', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], name=op.f('fk_repositorybuild_access_token_id_accesstoken')), + sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], name=op.f('fk_repositorybuild_pull_robot_id_user')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorybuild_repository_id_repository')), + sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], name=op.f('fk_repositorybuild_trigger_id_repositorybuildtrigger')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorybuild')) + ) + op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False) + op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False) + op.create_index('repositorybuild_queue_id', 'repositorybuild', ['queue_id'], unique=False) + op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False) + op.create_index('repositorybuild_repository_id_started_phase', 'repositorybuild', ['repository_id', 'started', 'phase'], unique=False) + op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False) + op.create_index('repositorybuild_started', 'repositorybuild', ['started'], unique=False) + op.create_index('repositorybuild_started_logs_archived_phase', 'repositorybuild', ['started', 'logs_archived', 'phase'], unique=False) + op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False) + op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False) + op.create_table('tagmanifest', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('tag_id', sa.Integer(), nullable=False), + sa.Column('digest', sa.String(length=255), nullable=False), + sa.Column('json_data', UTF8LongText(), nullable=False), + sa.ForeignKeyConstraint(['tag_id'], ['repositorytag.id'], name=op.f('fk_tagmanifest_tag_id_repositorytag')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifest')) + ) + op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=False) + op.create_index('tagmanifest_tag_id', 'tagmanifest', ['tag_id'], unique=True) + op.create_table('tagmanifestlabel', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('repository_id', sa.Integer(), nullable=False), + sa.Column('annotated_id', sa.Integer(), nullable=False), + sa.Column('label_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['annotated_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifestlabel_annotated_id_tagmanifest')), + sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_tagmanifestlabel_label_id_label')), + sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tagmanifestlabel_repository_id_repository')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifestlabel')) + ) + op.create_index('tagmanifestlabel_annotated_id', 'tagmanifestlabel', ['annotated_id'], unique=False) + op.create_index('tagmanifestlabel_annotated_id_label_id', 'tagmanifestlabel', ['annotated_id', 'label_id'], unique=True) + op.create_index('tagmanifestlabel_label_id', 'tagmanifestlabel', ['label_id'], unique=False) + op.create_index('tagmanifestlabel_repository_id', 'tagmanifestlabel', ['repository_id'], unique=False) + + op.bulk_insert(tables.accesstokenkind, + [ + {'name':'build-worker'}, + {'name':'pushpull-token'}, + ]) + + op.bulk_insert(tables.buildtriggerservice, + [ + {'name':'github'}, + {'name':'gitlab'}, + {'name':'bitbucket'}, + {'name':'custom-git'}, + ]) + + op.bulk_insert(tables.externalnotificationevent, + [ + {'name':'build_failure'}, + {'name':'build_queued'}, + {'name':'build_start'}, + {'name':'build_success'}, + {'name':'repo_push'}, + {'name':'vulnerability_found'}, + ]) + + op.bulk_insert(tables.externalnotificationmethod, + [ + {'name':'email'}, + {'name':'flowdock'}, + {'name':'hipchat'}, + {'name':'quay_notification'}, + {'name':'slack'}, + {'name':'webhook'}, + ]) + + op.bulk_insert(tables.imagestoragelocation, + [ + {'name':'s3_us_east_1'}, + {'name':'s3_eu_west_1'}, + {'name':'s3_ap_southeast_1'}, + {'name':'s3_ap_southeast_2'}, + {'name':'s3_ap_northeast_1'}, + {'name':'s3_sa_east_1'}, + {'name':'local'}, + {'name':'s3_us_west_1'}, + ]) + + op.bulk_insert(tables.imagestoragesignaturekind, + [ + {'name':'gpg2'}, + ]) + + op.bulk_insert(tables.imagestoragetransformation, + [ + {'name':'squash'}, + {'name':'aci'}, + ]) + + op.bulk_insert(tables.labelsourcetype, + [ + {'name':'manifest', 'mutable': False}, + {'name':'api', 'mutable': True}, + {'name':'internal', 'mutable': False}, + ]) + + op.bulk_insert(tables.logentrykind, + [ + {'name':'account_change_cc'}, + {'name':'account_change_password'}, + {'name':'account_change_plan'}, + {'name':'account_convert'}, + {'name':'add_repo_accesstoken'}, + {'name':'add_repo_notification'}, + {'name':'add_repo_permission'}, + {'name':'add_repo_webhook'}, + {'name':'build_dockerfile'}, + {'name':'change_repo_permission'}, + {'name':'change_repo_visibility'}, + {'name':'create_application'}, + {'name':'create_prototype_permission'}, + {'name':'create_repo'}, + {'name':'create_robot'}, + {'name':'create_tag'}, + {'name':'delete_application'}, + {'name':'delete_prototype_permission'}, + {'name':'delete_repo'}, + {'name':'delete_repo_accesstoken'}, + {'name':'delete_repo_notification'}, + {'name':'delete_repo_permission'}, + {'name':'delete_repo_trigger'}, + {'name':'delete_repo_webhook'}, + {'name':'delete_robot'}, + {'name':'delete_tag'}, + {'name':'manifest_label_add'}, + {'name':'manifest_label_delete'}, + {'name':'modify_prototype_permission'}, + {'name':'move_tag'}, + {'name':'org_add_team_member'}, + {'name':'org_create_team'}, + {'name':'org_delete_team'}, + {'name':'org_delete_team_member_invite'}, + {'name':'org_invite_team_member'}, + {'name':'org_remove_team_member'}, + {'name':'org_set_team_description'}, + {'name':'org_set_team_role'}, + {'name':'org_team_member_invite_accepted'}, + {'name':'org_team_member_invite_declined'}, + {'name':'pull_repo'}, + {'name':'push_repo'}, + {'name':'regenerate_robot_token'}, + {'name':'repo_verb'}, + {'name':'reset_application_client_secret'}, + {'name':'revert_tag'}, + {'name':'service_key_approve'}, + {'name':'service_key_create'}, + {'name':'service_key_delete'}, + {'name':'service_key_extend'}, + {'name':'service_key_modify'}, + {'name':'service_key_rotate'}, + {'name':'setup_repo_trigger'}, + {'name':'set_repo_description'}, + {'name':'take_ownership'}, + {'name':'update_application'}, + ]) + + op.bulk_insert(tables.loginservice, + [ + {'name':'github'}, + {'name':'quayrobot'}, + {'name':'ldap'}, + {'name':'google'}, + {'name':'keystone'}, + {'name':'dex'}, + {'name':'jwtauthn'}, + ]) + + op.bulk_insert(tables.mediatype, + [ + {'name':'text/plain'}, + {'name':'application/json'}, + ]) + + op.bulk_insert(tables.notificationkind, + [ + {'name':'build_failure'}, + {'name':'build_queued'}, + {'name':'build_start'}, + {'name':'build_success'}, + {'name':'expiring_license'}, + {'name':'maintenance'}, + {'name':'org_team_invite'}, + {'name':'over_private_usage'}, + {'name':'password_required'}, + {'name':'repo_push'}, + {'name':'service_key_submitted'}, + {'name':'vulnerability_found'}, + ]) + + op.bulk_insert(tables.role, + [ + {'name':'admin'}, + {'name':'write'}, + {'name':'read'}, + ]) + + op.bulk_insert(tables.teamrole, + [ + {'name':'admin'}, + {'name':'creator'}, + {'name':'member'}, + ]) + + op.bulk_insert(tables.visibility, + [ + {'name':'public'}, + {'name':'private'}, + ]) + + +def downgrade(tables): + op.drop_table('tagmanifestlabel') + op.drop_table('tagmanifest') + op.drop_table('repositorybuild') + op.drop_table('repositorytag') + op.drop_table('repositorybuildtrigger') + op.drop_table('derivedstorageforimage') + op.drop_table('teammemberinvite') + op.drop_table('teammember') + op.drop_table('star') + op.drop_table('repositorypermission') + op.drop_table('repositorynotification') + op.drop_table('repositoryauthorizedemail') + op.drop_table('repositoryactioncount') + op.drop_table('permissionprototype') + op.drop_table('oauthauthorizationcode') + op.drop_table('oauthaccesstoken') + op.drop_table('image') + op.drop_table('blobupload') + op.drop_table('accesstoken') + op.drop_table('userregion') + op.drop_table('torrentinfo') + op.drop_table('team') + op.drop_table('servicekey') + op.drop_table('repository') + op.drop_table('quayrelease') + op.drop_table('oauthapplication') + op.drop_table('notification') + op.drop_table('logentry') + op.drop_table('label') + op.drop_table('imagestoragesignature') + op.drop_table('imagestorageplacement') + op.drop_table('federatedlogin') + op.drop_table('emailconfirmation') + op.drop_table('visibility') + op.drop_table('user') + op.drop_table('teamrole') + op.drop_table('servicekeyapproval') + op.drop_table('role') + op.drop_table('queueitem') + op.drop_table('quayservice') + op.drop_table('quayregion') + op.drop_table('notificationkind') + op.drop_table('messages') + op.drop_table('mediatype') + op.drop_table('loginservice') + op.drop_table('logentrykind') + op.drop_table('labelsourcetype') + op.drop_table('imagestoragetransformation') + op.drop_table('imagestoragesignaturekind') + op.drop_table('imagestoragelocation') + op.drop_table('imagestorage') + op.drop_table('externalnotificationmethod') + op.drop_table('externalnotificationevent') + op.drop_table('buildtriggerservice') + op.drop_table('accesstokenkind') diff --git a/data/migrations/versions/c9b91bee7554_add_labels_to_the_schema.py b/data/migrations/versions/c9b91bee7554_add_labels_to_the_schema.py deleted file mode 100644 index 21f1113ff..000000000 --- a/data/migrations/versions/c9b91bee7554_add_labels_to_the_schema.py +++ /dev/null @@ -1,89 +0,0 @@ -"""Add labels to the schema - -Revision ID: c9b91bee7554 -Revises: 983247d75af3 -Create Date: 2016-08-22 15:40:25.226541 - -""" - -# revision identifiers, used by Alembic. -revision = 'c9b91bee7554' -down_revision = '983247d75af3' - -from alembic import op -import sqlalchemy as sa -from util.migrate import UTF8LongText, UTF8CharField - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('labelsourcetype', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('mutable', sa.Boolean(), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_labelsourcetype')) - ) - op.create_index('labelsourcetype_name', 'labelsourcetype', ['name'], unique=True) - op.create_table('mediatype', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.PrimaryKeyConstraint('id', name=op.f('pk_mediatype')) - ) - op.create_index('mediatype_name', 'mediatype', ['name'], unique=True) - op.create_table('label', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('uuid', sa.String(length=255), nullable=False), - sa.Column('key', UTF8CharField(length=255), nullable=False), - sa.Column('value', UTF8LongText(), nullable=False), - sa.Column('media_type_id', sa.Integer(), nullable=False), - sa.Column('source_type_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_label_media_type_id_mediatype')), - sa.ForeignKeyConstraint(['source_type_id'], ['labelsourcetype.id'], name=op.f('fk_label_source_type_id_labelsourcetype')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_label')) - ) - op.create_index('label_key', 'label', ['key'], unique=False) - op.create_index('label_media_type_id', 'label', ['media_type_id'], unique=False) - op.create_index('label_source_type_id', 'label', ['source_type_id'], unique=False) - op.create_index('label_uuid', 'label', ['uuid'], unique=True) - op.create_table('tagmanifestlabel', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('annotated_id', sa.Integer(), nullable=False), - sa.Column('label_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['annotated_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifestlabel_annotated_id_tagmanifest')), - sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_tagmanifestlabel_label_id_label')), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tagmanifestlabel_repository_id_repository')), - sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifestlabel')) - ) - op.create_index('tagmanifestlabel_annotated_id', 'tagmanifestlabel', ['annotated_id'], unique=False) - op.create_index('tagmanifestlabel_annotated_id_label_id', 'tagmanifestlabel', ['annotated_id', 'label_id'], unique=True) - op.create_index('tagmanifestlabel_label_id', 'tagmanifestlabel', ['label_id'], unique=False) - op.create_index('tagmanifestlabel_repository_id', 'tagmanifestlabel', ['repository_id'], unique=False) - ### end Alembic commands ### - - op.bulk_insert(tables.logentrykind, [ - {'name':'manifest_label_add'}, - {'name':'manifest_label_delete'}, - ]) - - op.bulk_insert(tables.mediatype, [ - {'name':'text/plain'}, - {'name':'application/json'}, - ]) - - op.bulk_insert(tables.labelsourcetype, [ - {'name':'manifest', 'mutable': False}, - {'name':'api', 'mutable': True}, - {'name':'internal', 'mutable': False}, - ]) - - -def downgrade(tables): - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('manifest_label_add'))) - op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('manifest_label_delete'))) - - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('tagmanifestlabel') - op.drop_table('label') - op.drop_table('mediatype') - op.drop_table('labelsourcetype') - ### end Alembic commands ### diff --git a/data/migrations/versions/e4129c93e477_remove_uniqueness_constraint_on_the_.py b/data/migrations/versions/e4129c93e477_remove_uniqueness_constraint_on_the_.py deleted file mode 100644 index 08cc02c7c..000000000 --- a/data/migrations/versions/e4129c93e477_remove_uniqueness_constraint_on_the_.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Remove uniqueness constraint on the TagManifest digest column - -Revision ID: e4129c93e477 -Revises: 956a0833223 -Create Date: 2016-02-12 17:22:48.039791 - -""" - -# revision identifiers, used by Alembic. -revision = 'e4129c93e477' -down_revision = '956a0833223' - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import mysql - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('tagmanifest_digest', table_name='tagmanifest') - op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=False) - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_index('tagmanifest_digest', table_name='tagmanifest') - op.create_index('tagmanifest_digest', 'tagmanifest', ['digest'], unique=True) - ### end Alembic commands ### diff --git a/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py b/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py deleted file mode 100644 index e36586a09..000000000 --- a/data/migrations/versions/f42b0ea7a4d_remove_the_old_webhooks_table.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Remove the old webhooks table. - -Revision ID: f42b0ea7a4d -Revises: 4fdb65816b8d -Create Date: 2014-09-03 13:43:23.391464 - -""" - -# revision identifiers, used by Alembic. -revision = 'f42b0ea7a4d' -down_revision = '4fdb65816b8d' - -from alembic import op -import sqlalchemy as sa - -def upgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.drop_table('webhook') - ### end Alembic commands ### - - -def downgrade(tables): - ### commands auto generated by Alembic - please adjust! ### - op.create_table('webhook', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('public_id', sa.String(length=255), nullable=False), - sa.Column('repository_id', sa.Integer(), nullable=False), - sa.Column('parameters', sa.Text(), nullable=False), - sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ), - sa.PrimaryKeyConstraint('id') - ) - ### end Alembic commands ### diff --git a/util/migrate/backfill_aggregate_sizes.py b/util/migrate/backfill_aggregate_sizes.py deleted file mode 100644 index b19314e1a..000000000 --- a/util/migrate/backfill_aggregate_sizes.py +++ /dev/null @@ -1,55 +0,0 @@ -import logging - -from data.database import ImageStorage, Image, db, db_for_update -from app import app - - -logger = logging.getLogger(__name__) - - -def backfill_aggregate_sizes(): - """ Generates aggregate sizes for any image storage entries without them """ - logger.debug('Aggregate sizes backfill: Began execution') - while True: - batch_image_ids = list(Image - .select(Image.id) - .where(Image.aggregate_size >> None) - .limit(100)) - - if len(batch_image_ids) == 0: - # There are no storages left to backfill. We're done! - logger.debug('Aggregate sizes backfill: Backfill completed') - return - - logger.debug('Aggregate sizes backfill: Found %s records to update', len(batch_image_ids)) - for image_id in batch_image_ids: - logger.debug('Updating image : %s', image_id.id) - - with app.config['DB_TRANSACTION_FACTORY'](db): - try: - image = (Image - .select(Image, ImageStorage) - .join(ImageStorage) - .where(Image.id == image_id) - .get()) - - aggregate_size = image.storage.image_size - - image_ids = image.ancestor_id_list() - for image_id in image_ids: - to_add = db_for_update(Image - .select(Image, ImageStorage) - .join(ImageStorage) - .where(Image.id == image_id)).get() - aggregate_size += to_add.storage.image_size - - image.aggregate_size = aggregate_size - image.save() - except Image.DoesNotExist: - pass - - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('peewee').setLevel(logging.CRITICAL) - backfill_aggregate_sizes() diff --git a/util/migrate/backfill_content_checksums_and_torrent_pieces.py b/util/migrate/backfill_content_checksums_and_torrent_pieces.py deleted file mode 100644 index c20b96bad..000000000 --- a/util/migrate/backfill_content_checksums_and_torrent_pieces.py +++ /dev/null @@ -1,145 +0,0 @@ -import logging - -from peewee import (JOIN_LEFT_OUTER, CharField, BigIntegerField, BooleanField, ForeignKeyField, - IntegerField, IntegrityError, fn) - -from data.database import BaseModel, CloseForLongOperation -from data.fields import Base64BinaryField -from app import app, storage -from digest import checksums -from util.migrate.allocator import yield_random_entries -from util.registry.torrent import PieceHasher -from util.registry.filelike import wrap_with_handler - - -BATCH_SIZE = 1000 - - -logger = logging.getLogger(__name__) - - -# Vendor the information from tables we will be writing to at the time of this migration -class ImageStorage(BaseModel): - uuid = CharField(index=True, unique=True) - checksum = CharField(null=True) - image_size = BigIntegerField(null=True) - uncompressed_size = BigIntegerField(null=True) - uploading = BooleanField(default=True, null=True) - cas_path = BooleanField(default=True) - content_checksum = CharField(null=True, index=True) - - -class ImageStorageLocation(BaseModel): - name = CharField(unique=True, index=True) - - -class ImageStoragePlacement(BaseModel): - storage = ForeignKeyField(ImageStorage) - location = ForeignKeyField(ImageStorageLocation) - - -class TorrentInfo(BaseModel): - storage = ForeignKeyField(ImageStorage) - piece_length = IntegerField() - pieces = Base64BinaryField() - - -def _get_image_storage_locations(storage_id): - placements_query = (ImageStoragePlacement - .select(ImageStoragePlacement, ImageStorageLocation) - .join(ImageStorageLocation) - .switch(ImageStoragePlacement) - .join(ImageStorage, JOIN_LEFT_OUTER) - .where(ImageStorage.id == storage_id)) - - locations = set() - for placement in placements_query: - locations.add(placement.location.name) - - return locations - - -def _get_layer_path(storage_record): - """ Returns the path in the storage engine to the layer data referenced by the storage row. """ - if not storage_record.cas_path: - logger.debug('Serving layer from legacy v1 path: %s', storage_record.uuid) - return storage.v1_image_layer_path(storage_record.uuid) - return storage.blob_path(storage_record.content_checksum) - - -def backfill_content_checksums_and_torrent_pieces(piece_length): - """ Hashes the entire file for the content associated with an imagestorage. """ - logger.debug('Began execution') - logger.debug('This may be a long operation!') - - def batch_query(): - return (ImageStorage - .select(ImageStorage.id, ImageStorage.uuid, ImageStorage.content_checksum, - ImageStorage.cas_path) - .join(TorrentInfo, JOIN_LEFT_OUTER, on=((TorrentInfo.storage == ImageStorage.id) & - (TorrentInfo.piece_length == piece_length))) - .where((TorrentInfo.id >> None) | (ImageStorage.content_checksum >> None))) - - max_id = ImageStorage.select(fn.Max(ImageStorage.id)).scalar() - - checksums_written = 0 - pieces_written = 0 - for candidate_storage, abort in yield_random_entries(batch_query, ImageStorage.id, BATCH_SIZE, - max_id): - locations = _get_image_storage_locations(candidate_storage.id) - - checksum = candidate_storage.content_checksum - torrent_pieces = '' - with CloseForLongOperation(app.config): - try: - # Compute the checksum - layer_path = _get_layer_path(candidate_storage) - with storage.stream_read_file(locations, layer_path) as layer_data_handle: - hasher = PieceHasher(piece_length) - wrapped = wrap_with_handler(layer_data_handle, hasher.update) - checksum = 'sha256:{0}'.format(checksums.sha256_file(wrapped)) - torrent_pieces = hasher.final_piece_hashes() - except Exception as exc: - logger.exception('Unable to compute hashes for storage: %s', candidate_storage.uuid) - - # Create a fallback value for the checksum - if checksum is None: - checksum = 'unknown:{0}'.format(exc.__class__.__name__) - - torrent_collision = False - checksum_collision = False - - # Now update the ImageStorage with the checksum - num_updated = (ImageStorage - .update(content_checksum=checksum) - .where(ImageStorage.id == candidate_storage.id, - ImageStorage.content_checksum >> None)).execute() - checksums_written += num_updated - if num_updated == 0: - checksum_collision = True - - try: - TorrentInfo.create(storage=candidate_storage.id, piece_length=piece_length, - pieces=torrent_pieces) - pieces_written += 1 - except IntegrityError: - torrent_collision = True - - if torrent_collision and checksum_collision: - logger.info('Another worker pre-empted us for storage: %s', candidate_storage.uuid) - abort.set() - - if (pieces_written % BATCH_SIZE) == 0 or (checksums_written % BATCH_SIZE) == 0: - logger.debug('%s checksums written, %s torrent pieces written', checksums_written, - pieces_written) - - logger.debug('Completed, %s checksums written, %s torrent pieces written', checksums_written, - pieces_written) - - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - #logging.getLogger('peewee').setLevel(logging.WARNING) - logging.getLogger('boto').setLevel(logging.WARNING) - logging.getLogger('data.database').setLevel(logging.WARNING) - backfill_content_checksums_and_torrent_pieces(app.config['BITTORRENT_PIECE_SIZE']) diff --git a/util/migrate/backfill_image_fields.py b/util/migrate/backfill_image_fields.py deleted file mode 100644 index 184cc8a42..000000000 --- a/util/migrate/backfill_image_fields.py +++ /dev/null @@ -1,87 +0,0 @@ -import logging - -from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField, - TextField) -from data.database import BaseModel, db, db_for_update -from app import app - - -logger = logging.getLogger(__name__) - - -class Repository(BaseModel): - pass - - -# Vendor the information from tables we will be writing to at the time of this migration -class ImageStorage(BaseModel): - created = DateTimeField(null=True) - comment = TextField(null=True) - command = TextField(null=True) - aggregate_size = BigIntegerField(null=True) - uploading = BooleanField(default=True, null=True) - - -class Image(BaseModel): - # This class is intentionally denormalized. Even though images are supposed - # to be globally unique we can't treat them as such for permissions and - # security reasons. So rather than Repository <-> Image being many to many - # each image now belongs to exactly one repository. - docker_image_id = CharField(index=True) - repository = ForeignKeyField(Repository) - - # '/' separated list of ancestory ids, e.g. /1/2/6/7/10/ - ancestors = CharField(index=True, default='/', max_length=64535, null=True) - - storage = ForeignKeyField(ImageStorage, index=True, null=True) - - created = DateTimeField(null=True) - comment = TextField(null=True) - command = TextField(null=True) - aggregate_size = BigIntegerField(null=True) - v1_json_metadata = TextField(null=True) - - -def backfill_image_fields(): - """ Copies metadata from image storages to their images. """ - logger.debug('Image metadata backfill: Began execution') - while True: - batch_image_ids = list(Image - .select(Image.id) - .join(ImageStorage) - .where(Image.created >> None, Image.comment >> None, - Image.command >> None, Image.aggregate_size >> None, - ImageStorage.uploading == False, - ~((ImageStorage.created >> None) & - (ImageStorage.comment >> None) & - (ImageStorage.command >> None) & - (ImageStorage.aggregate_size >> None))) - .limit(100)) - - if len(batch_image_ids) == 0: - logger.debug('Image metadata backfill: Backfill completed') - return - - logger.debug('Image metadata backfill: Found %s records to update', len(batch_image_ids)) - for image_id in batch_image_ids: - logger.debug('Updating image: %s', image_id.id) - - with app.config['DB_TRANSACTION_FACTORY'](db): - try: - image = db_for_update(Image - .select(Image, ImageStorage) - .join(ImageStorage) - .where(Image.id == image_id.id)).get() - - image.created = image.storage.created - image.comment = image.storage.comment - image.command = image.storage.command - image.aggregate_size = image.storage.aggregate_size - image.save() - except Image.DoesNotExist: - pass - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('peewee').setLevel(logging.CRITICAL) - backfill_image_fields() diff --git a/util/migrate/backfill_parent_id.py b/util/migrate/backfill_parent_id.py deleted file mode 100644 index 0c1621775..000000000 --- a/util/migrate/backfill_parent_id.py +++ /dev/null @@ -1,82 +0,0 @@ -import logging - -from data.database import BaseModel -from peewee import (fn, CharField, BigIntegerField, ForeignKeyField, BooleanField, DateTimeField, - TextField, IntegerField) -from app import app -from util.migrate.allocator import yield_random_entries - - -BATCH_SIZE = 1000 - - -class Repository(BaseModel): - pass - - -# Vendor the information from tables we will be writing to at the time of this migration -class ImageStorage(BaseModel): - uuid = CharField(index=True, unique=True) - checksum = CharField(null=True) - image_size = BigIntegerField(null=True) - uncompressed_size = BigIntegerField(null=True) - uploading = BooleanField(default=True, null=True) - cas_path = BooleanField(default=True) - content_checksum = CharField(null=True, index=True) - - -class Image(BaseModel): - docker_image_id = CharField(index=True) - repository = ForeignKeyField(Repository) - ancestors = CharField(index=True, default='/', max_length=64535, null=True) - storage = ForeignKeyField(ImageStorage, index=True, null=True) - created = DateTimeField(null=True) - comment = TextField(null=True) - command = TextField(null=True) - aggregate_size = BigIntegerField(null=True) - v1_json_metadata = TextField(null=True) - v1_checksum = CharField(null=True) - - security_indexed = BooleanField(default=False) - security_indexed_engine = IntegerField(default=-1) - parent_id = IntegerField(index=True, null=True) - - -logger = logging.getLogger(__name__) - - -def backfill_parent_id(): - logger.setLevel(logging.DEBUG) - - logger.debug('backfill_parent_id: Starting') - logger.debug('backfill_parent_id: This can be a LONG RUNNING OPERATION. Please wait!') - - def fetch_batch(): - return (Image - .select(Image.id, Image.ancestors) - .join(ImageStorage) - .where(Image.parent_id >> None, Image.ancestors != '/', - ImageStorage.uploading == False)) - - max_id = Image.select(fn.Max(Image.id)).scalar() - - written = 0 - for to_backfill, abort in yield_random_entries(fetch_batch, Image.id, BATCH_SIZE, max_id): - computed_parent = int(to_backfill.ancestors.split('/')[-2]) - num_changed = (Image - .update(parent_id=computed_parent) - .where(Image.id == to_backfill.id, Image.parent_id >> None)).execute() - if num_changed == 0: - logger.info('Collision with another worker, aborting batch') - abort.set() - written += num_changed - if (written % BATCH_SIZE) == 0: - logger.debug('%s entries written', written) - - logger.debug('backfill_parent_id: Completed, updated %s entries', written) - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('peewee').setLevel(logging.CRITICAL) - - backfill_parent_id() diff --git a/util/migrate/backfill_user_uuids.py b/util/migrate/backfill_user_uuids.py deleted file mode 100644 index e71ec82a2..000000000 --- a/util/migrate/backfill_user_uuids.py +++ /dev/null @@ -1,54 +0,0 @@ -import logging -import uuid - -from data.database import User, db -from app import app - -LOGGER = logging.getLogger(__name__) - -def backfill_user_uuids(): - """ Generates UUIDs for any Users without them. """ - LOGGER.setLevel(logging.DEBUG) - LOGGER.debug('User UUID Backfill: Began execution') - - - # Check to see if any users are missing uuids. - has_missing_uuids = True - try: - User.select(User.id).where(User.uuid >> None).get() - except User.DoesNotExist: - has_missing_uuids = False - - if not has_missing_uuids: - LOGGER.debug('User UUID Backfill: No migration needed') - return - - LOGGER.debug('User UUID Backfill: Starting migration') - while True: - batch_user_ids = list(User - .select(User.id) - .where(User.uuid >> None) - .limit(100)) - - if len(batch_user_ids) == 0: - # There are no users left to backfill. We're done! - LOGGER.debug('User UUID Backfill: Backfill completed') - return - - LOGGER.debug('User UUID Backfill: Found %s records to update', len(batch_user_ids)) - for user_id in batch_user_ids: - with app.config['DB_TRANSACTION_FACTORY'](db): - try: - user = User.select(User.id, User.uuid).where(User.id == user_id).get() - user.uuid = str(uuid.uuid4()) - user.save(only=[User.uuid]) - except User.DoesNotExist: - pass - - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('boto').setLevel(logging.CRITICAL) - logging.getLogger('peewee').setLevel(logging.CRITICAL) - - backfill_user_uuids() diff --git a/util/migrate/backfill_v1_checksums.py b/util/migrate/backfill_v1_checksums.py deleted file mode 100644 index 0c4e190ae..000000000 --- a/util/migrate/backfill_v1_checksums.py +++ /dev/null @@ -1,75 +0,0 @@ -import logging - -from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField, - TextField, fn) -from data.database import BaseModel -from util.migrate.allocator import yield_random_entries -from app import app - - -BATCH_SIZE = 1000 - - -logger = logging.getLogger(__name__) - - -class Repository(BaseModel): - pass - - -# Vendor the information from tables we will be writing to at the time of this migration -class ImageStorage(BaseModel): - uuid = CharField(index=True, unique=True) - checksum = CharField(null=True) - image_size = BigIntegerField(null=True) - uncompressed_size = BigIntegerField(null=True) - uploading = BooleanField(default=True, null=True) - cas_path = BooleanField(default=True) - content_checksum = CharField(null=True, index=True) - - -class Image(BaseModel): - docker_image_id = CharField(index=True) - repository = ForeignKeyField(Repository) - ancestors = CharField(index=True, default='/', max_length=64535, null=True) - storage = ForeignKeyField(ImageStorage, index=True, null=True) - created = DateTimeField(null=True) - comment = TextField(null=True) - command = TextField(null=True) - aggregate_size = BigIntegerField(null=True) - v1_json_metadata = TextField(null=True) - v1_checksum = CharField(null=True) - - -def backfill_checksums(): - """ Copies checksums from image storages to their images. """ - logger.debug('Began execution') - logger.debug('This may be a long operation!') - def batch_query(): - return (Image - .select(Image, ImageStorage) - .join(ImageStorage) - .where(Image.v1_checksum >> None, ImageStorage.uploading == False, - ~(ImageStorage.checksum >> None))) - - max_id = Image.select(fn.Max(Image.id)).scalar() - - written = 0 - for candidate_image, abort in yield_random_entries(batch_query, Image.id, BATCH_SIZE, max_id): - num_changed = (Image - .update(v1_checksum=candidate_image.storage.checksum) - .where(Image.id == candidate_image.id, Image.v1_checksum >> None)).execute() - if num_changed == 0: - logger.info('Collision with another worker, aborting batch') - abort.set() - written += num_changed - if (written % BATCH_SIZE) == 0: - logger.debug('%s entries written', written) - - logger.debug('Completed, updated %s entries', written) - - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('peewee').setLevel(logging.CRITICAL) - backfill_checksums() diff --git a/util/migrate/backfill_v1_metadata.py b/util/migrate/backfill_v1_metadata.py deleted file mode 100644 index 20680f077..000000000 --- a/util/migrate/backfill_v1_metadata.py +++ /dev/null @@ -1,117 +0,0 @@ -import logging - -from peewee import JOIN_LEFT_OUTER - -from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField, - TextField) - -from data.database import BaseModel, db, db_for_update -from app import app, storage -from data import model - - -logger = logging.getLogger(__name__) - - -class Repository(BaseModel): - pass - - -# Vendor the information from tables we will be writing to at the time of this migration -class ImageStorage(BaseModel): - uuid = CharField(index=True, unique=True) - checksum = CharField(null=True) - image_size = BigIntegerField(null=True) - uncompressed_size = BigIntegerField(null=True) - uploading = BooleanField(default=True, null=True) - - -class Image(BaseModel): - # This class is intentionally denormalized. Even though images are supposed - # to be globally unique we can't treat them as such for permissions and - # security reasons. So rather than Repository <-> Image being many to many - # each image now belongs to exactly one repository. - docker_image_id = CharField(index=True) - repository = ForeignKeyField(Repository) - - # '/' separated list of ancestory ids, e.g. /1/2/6/7/10/ - ancestors = CharField(index=True, default='/', max_length=64535, null=True) - - storage = ForeignKeyField(ImageStorage, index=True, null=True) - - created = DateTimeField(null=True) - comment = TextField(null=True) - command = TextField(null=True) - aggregate_size = BigIntegerField(null=True) - v1_json_metadata = TextField(null=True) - - -class ImageStorageLocation(BaseModel): - name = CharField(unique=True, index=True) - - -class ImageStoragePlacement(BaseModel): - storage = ForeignKeyField(ImageStorage) - location = ForeignKeyField(ImageStorageLocation) - - -def image_json_path(storage_uuid): - base_path = storage._image_path(storage_uuid) - return '{0}json'.format(base_path) - - -def backfill_v1_metadata(): - """ Copies metadata from image storages to their images. """ - logger.debug('Image v1 metadata backfill: Began execution') - - while True: - batch_image_ids = list(Image - .select(Image.id) - .join(ImageStorage) - .where(Image.v1_json_metadata >> None, ImageStorage.uploading == False) - .limit(100)) - - if len(batch_image_ids) == 0: - logger.debug('Image v1 metadata backfill: Backfill completed') - return - - logger.debug('Image v1 metadata backfill: Found %s records to update', len(batch_image_ids)) - for one_id in batch_image_ids: - with app.config['DB_TRANSACTION_FACTORY'](db): - try: - logger.debug('Loading image: %s', one_id.id) - - raw_query = (ImageStoragePlacement - .select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation) - .join(ImageStorageLocation) - .switch(ImageStoragePlacement) - .join(ImageStorage, JOIN_LEFT_OUTER) - .join(Image) - .where(Image.id == one_id.id)) - - placement_query = db_for_update(raw_query) - - repo_image_list = model.image.invert_placement_query_results(placement_query) - if len(repo_image_list) > 1: - logger.error('Found more images than we requested, something is wrong with the query') - return - - repo_image = repo_image_list[0] - uuid = repo_image.storage.uuid - json_path = image_json_path(uuid) - - logger.debug('Updating image: %s from: %s', repo_image.id, json_path) - try: - data = storage.get_content(repo_image.storage.locations, json_path) - except IOError: - data = "{}" - logger.warning('failed to find v1 metadata, defaulting to {}') - repo_image.v1_json_metadata = data - repo_image.save() - except ImageStoragePlacement.DoesNotExist: - pass - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - # logging.getLogger('peewee').setLevel(logging.CRITICAL) - backfill_v1_metadata() diff --git a/util/migrate/fixsequences.py b/util/migrate/fixsequences.py deleted file mode 100644 index ecfa03a14..000000000 --- a/util/migrate/fixsequences.py +++ /dev/null @@ -1,52 +0,0 @@ -import uuid -import logging - -from peewee import IntegrityError, CharField - -from app import app -from data.database import BaseModel - - -logger = logging.getLogger(__name__) - - -ENUM_CLASSES_WITH_SEQUENCES = [ - 'TeamRole', - 'LoginService', - 'Visibility', - 'Role', - 'AccessTokenKind', - 'BuildTriggerService', - 'ImageStorageTransformation', - 'ImageStorageSignatureKind', - 'ImageStorageLocation', - 'LogEntryKind', - 'NotificationKind', - 'ExternalNotificationEvent', - 'ExternalNotificationMethod', -] - - -def reset_enum_sequences(): - for class_name in ENUM_CLASSES_WITH_SEQUENCES: - reset_sequence(class_name) - - -def reset_sequence(class_name): - logger.info('Resetting sequence for table: %s', class_name.lower()) - unique_name = '%s' % uuid.uuid4() - - Model = type(class_name, (BaseModel,), {'name': CharField(index=True)}) - - for skips in xrange(50): - try: - Model.create(name=unique_name).delete_instance() - logger.info('Advanced sequence %s numbers', skips) - break - except IntegrityError: - pass - - -if __name__ == '__main__': - logging.basicConfig(level=logging.DEBUG) - reset_enum_sequences() diff --git a/util/migrate/migratebitbucketservices.py b/util/migrate/migratebitbucketservices.py deleted file mode 100644 index f20f98edd..000000000 --- a/util/migrate/migratebitbucketservices.py +++ /dev/null @@ -1,116 +0,0 @@ -import logging -import json - -from app import app -from data.database import configure, BaseModel, uuid_generator -from peewee import * -from bitbucket import BitBucket -from buildtrigger.bitbuckethandler import BitbucketBuildTrigger - -configure(app.config) - -logger = logging.getLogger(__name__) - -# Note: We vendor the RepositoryBuildTrigger and its dependencies here -class Repository(BaseModel): - pass - -class BuildTriggerService(BaseModel): - name = CharField(index=True, unique=True) - -class AccessToken(BaseModel): - pass - -class User(BaseModel): - pass - -class RepositoryBuildTrigger(BaseModel): - uuid = CharField(default=uuid_generator) - service = ForeignKeyField(BuildTriggerService, index=True) - repository = ForeignKeyField(Repository, index=True) - connected_user = ForeignKeyField(User) - auth_token = CharField(null=True) - private_key = TextField(null=True) - config = TextField(default='{}') - write_token = ForeignKeyField(AccessToken, null=True) - pull_robot = ForeignKeyField(User, related_name='triggerpullrobot') - - -def run_bitbucket_migration(): - bitbucket_trigger = BuildTriggerService.get(BuildTriggerService.name == "bitbucket") - - encountered = set() - while True: - found = list(RepositoryBuildTrigger.select().where( - RepositoryBuildTrigger.service == bitbucket_trigger, - RepositoryBuildTrigger.config ** "%\"hook_id%")) - - found = [f for f in found if not f.uuid in encountered] - - if not found: - logger.debug('No additional records found') - return - - logger.debug('Found %s records to be changed', len(found)) - for trigger in found: - encountered.add(trigger.uuid) - - try: - config = json.loads(trigger.config) - except: - logging.error("Cannot parse config for trigger %s", trigger.uuid) - continue - - logger.debug("Checking trigger %s", trigger.uuid) - if 'hook_id' in config: - logger.debug("Updating trigger %s to a webhook", trigger.uuid) - - trigger_handler = BitbucketBuildTrigger(trigger) - client = trigger_handler._get_repository_client() - - hook_id = config['hook_id'] - - # Lookup the old service hook. - logger.debug("Looking up old service URL for trigger %s", trigger.uuid) - (result, hook_data, err_msg) = client.services().get(hook_id) - if not result or not hook_data: - logger.error('Error when retrieving service hook for trigger %s: %s', trigger.uuid, err_msg) - continue - - if not 'webhook_id' in config: - hook_data = hook_data[0]['service'] - webhook_url = [f for f in hook_data['fields'] if f['name'] == 'URL'][0]['value'] - logger.debug("Adding webhook for trigger %s: %s", trigger.uuid, webhook_url) - - # Add the new web hook. - description = 'Webhook for invoking builds on %s' % app.config['REGISTRY_TITLE_SHORT'] - webhook_events = ['repo:push'] - (result, data, err_msg) = client.webhooks().create(description, webhook_url, webhook_events) - if not result: - logger.error('Error when adding webhook for trigger %s: %s', trigger.uuid, err_msg) - continue - - config['webhook_id'] = data['uuid'] - trigger.config = json.dumps(config) - trigger.save() - - # Remove the old service hook. - logger.debug("Deleting old service URL for trigger %s", trigger.uuid) - (result, _, err_msg) = client.services().delete(hook_id) - if not result: - logger.error('Error when deleting service hook for trigger %s: %s', trigger.uuid, err_msg) - continue - - del config['hook_id'] - - # Update the config. - trigger.config = json.dumps(config) - trigger.save() - logger.debug("Trigger %s updated to a webhook", trigger.uuid) - - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('boto').setLevel(logging.CRITICAL) - - run_bitbucket_migration() diff --git a/util/migrate/migrategithubdeploykeys.py b/util/migrate/migrategithubdeploykeys.py deleted file mode 100644 index b2c1903a1..000000000 --- a/util/migrate/migrategithubdeploykeys.py +++ /dev/null @@ -1,125 +0,0 @@ -import logging -import logging.config -import json - -from data.database import (db, db_for_update, BaseModel, CharField, ForeignKeyField, - TextField, BooleanField) -from app import app -from buildtrigger.basehandler import BuildTriggerHandler -from util.security.ssh import generate_ssh_keypair -from github import GithubException - -logger = logging.getLogger(__name__) - -class BuildTriggerService(BaseModel): - name = CharField(index=True, unique=True) - -class Repository(BaseModel): - pass - -class User(BaseModel): - pass - -class AccessToken(BaseModel): - pass - -class RepositoryBuildTrigger(BaseModel): - uuid = CharField() - service = ForeignKeyField(BuildTriggerService, index=True) - repository = ForeignKeyField(Repository, index=True) - connected_user = ForeignKeyField(User) - auth_token = CharField(null=True) - private_key = TextField(null=True) - config = TextField(default='{}') - write_token = ForeignKeyField(AccessToken, null=True) - pull_robot = ForeignKeyField(User, null=True, related_name='triggerpullrobot') - - used_legacy_github = BooleanField(null=True, default=False) - - -def backfill_github_deploykeys(): - """ Generates and saves private deploy keys for any GitHub build triggers still relying on - the old buildpack behavior. """ - logger.setLevel(logging.DEBUG) - logger.debug('GitHub deploy key backfill: Began execution') - - encountered = set() - github_service = BuildTriggerService.get(name='github') - - while True: - build_trigger_ids = list(RepositoryBuildTrigger - .select(RepositoryBuildTrigger.id) - .where(RepositoryBuildTrigger.private_key >> None) - .where(RepositoryBuildTrigger.service == github_service) - .where(RepositoryBuildTrigger.used_legacy_github >> None) - .limit(100)) - - filtered_ids = [trigger.id for trigger in build_trigger_ids if trigger.id not in encountered] - if len(filtered_ids) == 0: - # We're done! - logger.debug('GitHub deploy key backfill: Backfill completed') - return - - logger.debug('GitHub deploy key backfill: Found %s records to update', len(filtered_ids)) - for trigger_id in filtered_ids: - encountered.add(trigger_id) - logger.debug('Updating build trigger: %s', trigger_id) - - with app.config['DB_TRANSACTION_FACTORY'](db): - try: - query = RepositoryBuildTrigger.select().where(RepositoryBuildTrigger.id == trigger_id) - trigger = db_for_update(query).get() - except RepositoryBuildTrigger.DoesNotExist: - logger.debug('Could not find build trigger %s', trigger_id) - continue - - trigger.used_legacy_github = True - trigger.save() - - handler = BuildTriggerHandler.get_handler(trigger) - - config = handler.config - if not 'build_source' in config: - logger.debug('Could not find build source for trigger %s', trigger_id) - continue - - build_source = config['build_source'] - gh_client = handler._get_client() - - # Find the GitHub repository. - try: - gh_repo = gh_client.get_repo(build_source) - except GithubException: - logger.exception('Cannot find repository %s for trigger %s', build_source, trigger.id) - continue - - # Add a deploy key to the GitHub repository. - public_key, private_key = generate_ssh_keypair() - config['credentials'] = [ - { - 'name': 'SSH Public Key', - 'value': public_key, - }, - ] - - logger.debug('Adding deploy key to build trigger %s', trigger.id) - try: - deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'], public_key) - config['deploy_key_id'] = deploy_key.id - except GithubException: - logger.exception('Cannot add deploy key to repository %s for trigger %s', build_source, trigger.id) - continue - - logger.debug('Saving deploy key for trigger %s', trigger.id) - trigger.used_legacy_github = True - trigger.private_key = private_key - trigger.config = json.dumps(config) - trigger.save() - - -if __name__ == "__main__": - logging.getLogger('boto').setLevel(logging.CRITICAL) - logging.getLogger('github').setLevel(logging.CRITICAL) - - logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False) - backfill_github_deploykeys() diff --git a/util/migrate/migrateslackwebhook.py b/util/migrate/migrateslackwebhook.py deleted file mode 100644 index e480f6180..000000000 --- a/util/migrate/migrateslackwebhook.py +++ /dev/null @@ -1,53 +0,0 @@ -import logging -import json - -from app import app -from data.database import configure, RepositoryNotification, ExternalNotificationMethod - -configure(app.config) - -logger = logging.getLogger(__name__) - -def run_slackwebhook_migration(): - slack_method = ExternalNotificationMethod.get(ExternalNotificationMethod.name == "slack") - - encountered = set() - while True: - found = list(RepositoryNotification.select(RepositoryNotification.uuid, - RepositoryNotification.config_json) - .where(RepositoryNotification.method == slack_method, - RepositoryNotification.config_json ** "%subdomain%", - ~(RepositoryNotification.config_json ** "%url%"))) - - found = [f for f in found if not f.uuid in encountered] - - if not found: - logger.debug('No additional records found') - return - - logger.debug('Found %s records to be changed', len(found)) - for notification in found: - encountered.add(notification.uuid) - - try: - config = json.loads(notification.config_json) - except: - logging.error("Cannot parse config for noticification %s", notification.uuid) - continue - - logger.debug("Checking notification %s", notification.uuid) - if 'subdomain' in config and 'token' in config: - subdomain = config['subdomain'] - token = config['token'] - new_url = 'https://%s.slack.com/services/hooks/incoming-webhook?token=%s' % (subdomain, token) - config['url'] = new_url - - logger.debug("Updating notification %s to URL: %s", notification.uuid, new_url) - notification.config_json = json.dumps(config) - notification.save() - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('boto').setLevel(logging.CRITICAL) - - run_slackwebhook_migration() diff --git a/util/migrate/uncompressedsize.py b/util/migrate/uncompressedsize.py deleted file mode 100644 index 1a3ceb4a6..000000000 --- a/util/migrate/uncompressedsize.py +++ /dev/null @@ -1,105 +0,0 @@ -import logging -import zlib - -from data import model -from data.database import ImageStorage -from app import app, storage as store -from data.database import db, db_random_func -from util.registry.gzipstream import ZLIB_GZIP_WINDOW - - -logger = logging.getLogger(__name__) - - -CHUNK_SIZE = 5 * 1024 * 1024 - -def backfill_sizes_from_data(): - logger.setLevel(logging.DEBUG) - - logger.debug('Starting uncompressed image size backfill') - logger.debug('NOTE: This can be a LONG RUNNING OPERATION. Please wait!') - - # Check for any uncompressed images. - has_images = bool(list(ImageStorage - .select(ImageStorage.uuid) - .where(ImageStorage.uncompressed_size >> None, - ImageStorage.image_size > 0, - ImageStorage.uploading == False) - .limit(1))) - - if not has_images: - logger.debug('Uncompressed backfill: No migration needed') - return - - logger.debug('Uncompressed backfill: Starting migration') - encountered = set() - while True: - # Load the record from the DB. - batch_ids = list(ImageStorage - .select(ImageStorage.uuid) - .where(ImageStorage.uncompressed_size >> None, - ImageStorage.image_size > 0, - ImageStorage.uploading == False) - .limit(100) - .order_by(db_random_func())) - - batch_ids = set([s.uuid for s in batch_ids]) - encountered - logger.debug('Found %s images to process', len(batch_ids)) - if len(batch_ids) == 0: - # We're done! - return - - counter = 1 - for uuid in batch_ids: - encountered.add(uuid) - - logger.debug('Processing image ID %s (%s/%s)', uuid, counter, len(batch_ids)) - counter = counter + 1 - - try: - with_locs = model.storage.get_storage_by_uuid(uuid) - if with_locs.uncompressed_size is not None: - logger.debug('Somebody else already filled this in for us: %s', uuid) - continue - - # Read the layer from backing storage and calculate the uncompressed size. - logger.debug('Loading data: %s (%s bytes)', uuid, with_locs.image_size) - decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW) - - uncompressed_size = 0 - with store.stream_read_file(with_locs.locations, store.v1_image_layer_path(uuid)) as stream: - while True: - current_data = stream.read(CHUNK_SIZE) - if len(current_data) == 0: - break - - while current_data: - uncompressed_size += len(decompressor.decompress(current_data, CHUNK_SIZE)) - current_data = decompressor.unconsumed_tail - - # Write the size to the image storage. We do so under a transaction AFTER checking to - # make sure the image storage still exists and has not changed. - logger.debug('Writing entry: %s. Size: %s', uuid, uncompressed_size) - with app.config['DB_TRANSACTION_FACTORY'](db): - current_record = model.storage.get_storage_by_uuid(uuid) - - if not current_record.uploading and current_record.uncompressed_size == None: - current_record.uncompressed_size = uncompressed_size - current_record.save() - else: - logger.debug('Somebody else already filled this in for us, after we did the work: %s', - uuid) - - except model.InvalidImageException: - logger.warning('Storage with uuid no longer exists: %s', uuid) - except IOError: - logger.warning('IOError on %s', uuid) - except MemoryError: - logger.warning('MemoryError on %s', uuid) - -if __name__ == "__main__": - logging.basicConfig(level=logging.DEBUG) - logging.getLogger('boto').setLevel(logging.CRITICAL) - logging.getLogger('peewee').setLevel(logging.CRITICAL) - - backfill_sizes_from_data()