initial import for Open Source 🎉
This commit is contained in:
parent
1898c361f3
commit
9c0dd3b722
2048 changed files with 218743 additions and 0 deletions
0
data/migrations/__init__.py
Normal file
0
data/migrations/__init__.py
Normal file
154
data/migrations/env.py
Normal file
154
data/migrations/env.py
Normal file
|
@ -0,0 +1,154 @@
|
|||
import logging
|
||||
import os
|
||||
|
||||
from logging.config import fileConfig
|
||||
from urllib import unquote
|
||||
|
||||
from alembic import context
|
||||
from alembic.script.revision import ResolutionError
|
||||
from alembic.util import CommandError
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from peewee import SqliteDatabase
|
||||
|
||||
from data.database import all_models, db
|
||||
from data.migrations.tester import NoopTester, PopulateTestDataTester
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from release import GIT_HEAD, REGION, SERVICE
|
||||
from util.morecollections import AttrDict
|
||||
from data.migrations.progress import PrometheusReporter, NullReporter
|
||||
|
||||
|
||||
config = context.config
|
||||
DB_URI = config.get_main_option('db_uri', 'sqlite:///test/data/test.db')
|
||||
PROM_LABEL_PREFIX = 'DBA_OP_LABEL_'
|
||||
|
||||
|
||||
# This option exists because alembic needs the db proxy to be configured in order
|
||||
# to perform migrations. The app import does the init of the proxy, but we don't
|
||||
# want that in the case of the config app, as we are explicitly connecting to a
|
||||
# db that the user has passed in, and we can't have import dependency on app
|
||||
if config.get_main_option('alembic_setup_app', 'True') == 'True':
|
||||
from app import app
|
||||
DB_URI = app.config['DB_URI']
|
||||
|
||||
config.set_main_option('sqlalchemy.url', unquote(DB_URI))
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = gen_sqlalchemy_metadata(all_models)
|
||||
tables = AttrDict(target_metadata.tables)
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
def get_tester():
|
||||
""" Returns the tester to use. We only return the tester that populates data
|
||||
if the TEST_MIGRATE env var is set to `true` AND we make sure we're not
|
||||
connecting to a production database.
|
||||
"""
|
||||
if os.environ.get('TEST_MIGRATE', '') == 'true':
|
||||
url = unquote(DB_URI)
|
||||
if url.find('amazonaws.com') < 0:
|
||||
return PopulateTestDataTester()
|
||||
|
||||
return NoopTester()
|
||||
|
||||
def get_progress_reporter():
|
||||
prom_addr = os.environ.get('DBA_OP_PROMETHEUS_PUSH_GATEWAY_ADDR', None)
|
||||
|
||||
if prom_addr is not None:
|
||||
prom_job = os.environ.get('DBA_OP_JOB_ID')
|
||||
|
||||
def _process_label_key(label_key):
|
||||
return label_key[len(PROM_LABEL_PREFIX):].lower()
|
||||
labels = {_process_label_key(k): v for k, v in os.environ.items()
|
||||
if k.startswith(PROM_LABEL_PREFIX)}
|
||||
|
||||
return PrometheusReporter(prom_addr, prom_job, labels)
|
||||
else:
|
||||
return NullReporter()
|
||||
|
||||
def report_success(ctx=None, step=None, heads=None, run_args=None):
|
||||
progress_reporter = run_args['progress_reporter']
|
||||
progress_reporter.report_version_complete(success=True)
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = unquote(DB_URI)
|
||||
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations(tables=tables, tester=get_tester(), progress_reporter=NullReporter())
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
if (isinstance(db.obj, SqliteDatabase) and
|
||||
not 'GENMIGRATE' in os.environ and
|
||||
not 'DB_URI' in os.environ):
|
||||
print 'Skipping Sqlite migration!'
|
||||
return
|
||||
|
||||
progress_reporter = get_progress_reporter()
|
||||
engine = engine_from_config(config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
connection = engine.connect()
|
||||
context.configure(connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
transactional_ddl=False,
|
||||
on_version_apply=report_success)
|
||||
|
||||
try:
|
||||
with context.begin_transaction():
|
||||
try:
|
||||
context.run_migrations(tables=tables, tester=get_tester(),
|
||||
progress_reporter=progress_reporter)
|
||||
except (CommandError, ResolutionError) as ex:
|
||||
if 'No such revision' not in str(ex):
|
||||
raise
|
||||
|
||||
if not REGION or not GIT_HEAD:
|
||||
raise
|
||||
|
||||
from data.model.release import get_recent_releases
|
||||
|
||||
# ignore revision error if we're running the previous release
|
||||
releases = list(get_recent_releases(SERVICE, REGION).offset(1).limit(1))
|
||||
if releases and releases[0].version == GIT_HEAD:
|
||||
logger.warn('Skipping database migration because revision not found')
|
||||
else:
|
||||
raise
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
147
data/migrations/migration.sh
Executable file
147
data/migrations/migration.sh
Executable file
|
@ -0,0 +1,147 @@
|
|||
set -e
|
||||
|
||||
PARSED_DOCKER_HOST=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
|
||||
DOCKER_IP="${PARSED_DOCKER_HOST:-127.0.0.1}"
|
||||
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
||||
PERCONA_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
||||
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
|
||||
|
||||
up_mysql() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql:5.7
|
||||
|
||||
echo 'Sleeping for 25...'
|
||||
sleep 25
|
||||
|
||||
# Add the database to mysql.
|
||||
docker run --rm --link mysql:mysql mysql:5.7 sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||
}
|
||||
|
||||
down_mysql() {
|
||||
docker kill mysql || true
|
||||
docker rm -v mysql || true
|
||||
}
|
||||
|
||||
up_mariadb() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name mariadb -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mariadb
|
||||
|
||||
echo 'Sleeping for 25...'
|
||||
sleep 25
|
||||
|
||||
# Add the database to mysql.
|
||||
docker run --rm --link mariadb:mariadb mariadb sh -c 'echo "create database genschema" | mysql -h"$MARIADB_PORT_3306_TCP_ADDR" -P"$MARIADB_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||
}
|
||||
|
||||
down_mariadb() {
|
||||
docker kill mariadb || true
|
||||
docker rm -v mariadb || true
|
||||
}
|
||||
|
||||
up_percona() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name percona -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d percona
|
||||
|
||||
echo 'Sleeping for 25...'
|
||||
sleep 25
|
||||
|
||||
# Add the daabase to mysql.
|
||||
docker run --rm --link percona:percona percona sh -c 'echo "create database genschema" | mysql -h $PERCONA_PORT_3306_TCP_ADDR -uroot -ppassword'
|
||||
}
|
||||
|
||||
down_percona() {
|
||||
docker kill percona || true
|
||||
docker rm -v percona || true
|
||||
}
|
||||
|
||||
up_postgres() {
|
||||
# Run a SQL database on port 5432 inside of Docker.
|
||||
docker run --name postgres -p 5432:5432 -d postgres
|
||||
|
||||
# Sleep for 5s to get SQL get started.
|
||||
echo 'Sleeping for 5...'
|
||||
sleep 5
|
||||
|
||||
# Add the database to postgres.
|
||||
docker run --rm --link postgres:postgres postgres sh -c 'echo "create database genschema" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres'
|
||||
docker run --rm --link postgres:postgres postgres sh -c 'echo "CREATE EXTENSION IF NOT EXISTS pg_trgm;" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres -d genschema'
|
||||
|
||||
}
|
||||
|
||||
down_postgres() {
|
||||
docker kill postgres || true
|
||||
docker rm -v postgres || true
|
||||
}
|
||||
|
||||
gen_migrate() {
|
||||
# Generate a database with the schema as defined by the existing alembic model.
|
||||
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
|
||||
|
||||
|
||||
# Generate the migration to the current model.
|
||||
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
|
||||
}
|
||||
|
||||
test_migrate() {
|
||||
# Generate a database with the schema as defined by the existing alembic model.
|
||||
echo '> Running upgrade'
|
||||
TEST_MIGRATE=true QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
|
||||
|
||||
# Downgrade to verify it works in both directions.
|
||||
echo '> Running downgrade'
|
||||
COUNT=`ls data/migrations/versions/*.py | wc -l | tr -d ' '`
|
||||
TEST_MIGRATE=true QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
|
||||
}
|
||||
|
||||
down_mysql
|
||||
down_postgres
|
||||
down_mariadb
|
||||
down_percona
|
||||
|
||||
# Test (and generate, if requested) via MySQL.
|
||||
echo '> Starting MySQL'
|
||||
up_mysql
|
||||
|
||||
if [ ! -z "$@" ]
|
||||
then
|
||||
set +e
|
||||
echo '> Generating Migration'
|
||||
gen_migrate $MYSQL_CONFIG_OVERRIDE "$@"
|
||||
set -e
|
||||
fi
|
||||
|
||||
echo '> Testing Migration (mysql)'
|
||||
set +e
|
||||
test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_mysql
|
||||
|
||||
# Test via Postgres.
|
||||
echo '> Starting Postgres'
|
||||
up_postgres
|
||||
|
||||
echo '> Testing Migration (postgres)'
|
||||
set +e
|
||||
test_migrate $PGSQL_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_postgres
|
||||
|
||||
# Test via MariaDB.
|
||||
echo '> Starting MariaDB'
|
||||
up_mariadb
|
||||
|
||||
echo '> Testing Migration (mariadb)'
|
||||
set +e
|
||||
test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_mariadb
|
||||
|
||||
# Test via Percona.
|
||||
echo '> Starting Percona'
|
||||
up_percona
|
||||
|
||||
echo '> Testing Migration (percona)'
|
||||
set +e
|
||||
test_migrate $PERCONA_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_percona
|
101
data/migrations/progress.py
Normal file
101
data/migrations/progress.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
from abc import ABCMeta, abstractmethod
|
||||
from six import add_metaclass
|
||||
from functools import partial, wraps
|
||||
|
||||
from prometheus_client import CollectorRegistry, Gauge, Counter, push_to_gateway
|
||||
|
||||
from util.abchelpers import nooper
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class ProgressReporter(object):
|
||||
""" Implements an interface for reporting progress with the migrations.
|
||||
"""
|
||||
@abstractmethod
|
||||
def report_version_complete(self, success):
|
||||
""" Called when an entire migration is complete. """
|
||||
|
||||
@abstractmethod
|
||||
def report_step_progress(self):
|
||||
""" Called when a single step in the migration has been completed. """
|
||||
|
||||
|
||||
@nooper
|
||||
class NullReporter(ProgressReporter):
|
||||
""" No-op version of the progress reporter, designed for use when no progress
|
||||
reporting endpoint is provided. """
|
||||
|
||||
|
||||
class PrometheusReporter(ProgressReporter):
|
||||
def __init__(self, prom_pushgateway_addr, prom_job, labels, total_steps_num=None):
|
||||
self._total_steps_num = total_steps_num
|
||||
self._completed_steps = 0.0
|
||||
|
||||
registry = CollectorRegistry()
|
||||
|
||||
self._migration_completion_percent = Gauge(
|
||||
'migration_completion_percent',
|
||||
'Estimate of the completion percentage of the job',
|
||||
registry=registry,
|
||||
)
|
||||
self._migration_complete_total = Counter(
|
||||
'migration_complete_total',
|
||||
'Binary value of whether or not the job is complete',
|
||||
registry=registry,
|
||||
)
|
||||
self._migration_failed_total = Counter(
|
||||
'migration_failed_total',
|
||||
'Binary value of whether or not the job has failed',
|
||||
registry=registry,
|
||||
)
|
||||
self._migration_items_completed_total = Counter(
|
||||
'migration_items_completed_total',
|
||||
'Number of items this migration has completed',
|
||||
registry=registry,
|
||||
)
|
||||
|
||||
self._push = partial(push_to_gateway,
|
||||
prom_pushgateway_addr,
|
||||
job=prom_job,
|
||||
registry=registry,
|
||||
grouping_key=labels,
|
||||
)
|
||||
|
||||
def report_version_complete(self, success=True):
|
||||
if success:
|
||||
self._migration_complete_total.inc()
|
||||
else:
|
||||
self._migration_failed_total.inc()
|
||||
self._migration_completion_percent.set(1.0)
|
||||
|
||||
self._push()
|
||||
|
||||
def report_step_progress(self):
|
||||
self._migration_items_completed_total.inc()
|
||||
|
||||
if self._total_steps_num is not None:
|
||||
self._completed_steps += 1
|
||||
self._migration_completion_percent = self._completed_steps / self._total_steps_num
|
||||
|
||||
self._push()
|
||||
|
||||
|
||||
class ProgressWrapper(object):
|
||||
def __init__(self, delegate_module, progress_monitor):
|
||||
self._delegate_module = delegate_module
|
||||
self._progress_monitor = progress_monitor
|
||||
|
||||
def __getattr__(self, attr_name):
|
||||
# Will raise proper attribute error
|
||||
maybe_callable = self._delegate_module.__dict__[attr_name]
|
||||
if callable(maybe_callable):
|
||||
# Build a callable which when executed places the request
|
||||
# onto a queue
|
||||
@wraps(maybe_callable)
|
||||
def wrapped_method(*args, **kwargs):
|
||||
result = maybe_callable(*args, **kwargs)
|
||||
self._progress_monitor.report_step_progress()
|
||||
return result
|
||||
|
||||
return wrapped_method
|
||||
return maybe_callable
|
27
data/migrations/script.py.mako
Normal file
27
data/migrations/script.py.mako
Normal file
|
@ -0,0 +1,27 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
|
||||
from alembic import op as original_op
|
||||
from progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
${downgrades if downgrades else "pass"}
|
21
data/migrations/test/test_db_config.py
Normal file
21
data/migrations/test/test_db_config.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import pytest
|
||||
from mock import patch
|
||||
|
||||
from data.runmigration import run_alembic_migration
|
||||
from alembic.script import ScriptDirectory
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('db_uri, is_valid', [
|
||||
('postgresql://devtable:password@quay-postgres/registry_database', True),
|
||||
('postgresql://devtable:password%25@quay-postgres/registry_database', False),
|
||||
('postgresql://devtable:password%%25@quay-postgres/registry_database', True),
|
||||
('postgresql://devtable@db:password@quay-postgres/registry_database', True),
|
||||
])
|
||||
def test_alembic_db_uri(db_uri, is_valid):
|
||||
""" Test if the given URI is escaped for string interpolation (Python's configparser). """
|
||||
with patch('alembic.script.ScriptDirectory.run_env') as m:
|
||||
if is_valid:
|
||||
run_alembic_migration(db_uri)
|
||||
else:
|
||||
with pytest.raises(ValueError):
|
||||
run_alembic_migration(db_uri)
|
132
data/migrations/tester.py
Normal file
132
data/migrations/tester.py
Normal file
|
@ -0,0 +1,132 @@
|
|||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from datetime import datetime
|
||||
from six import add_metaclass
|
||||
|
||||
from alembic import op
|
||||
from sqlalchemy import text
|
||||
|
||||
from util.abchelpers import nooper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def escape_table_name(table_name):
|
||||
if op.get_bind().engine.name == 'postgresql':
|
||||
# Needed for the `user` table.
|
||||
return '"%s"' % table_name
|
||||
|
||||
return table_name
|
||||
|
||||
|
||||
class DataTypes(object):
|
||||
@staticmethod
|
||||
def DateTime():
|
||||
return datetime.now()
|
||||
|
||||
@staticmethod
|
||||
def Date():
|
||||
return datetime.now()
|
||||
|
||||
@staticmethod
|
||||
def String():
|
||||
return 'somestringvalue'
|
||||
|
||||
@staticmethod
|
||||
def Token():
|
||||
return '%s%s' % ('a' * 60, 'b' * 60)
|
||||
|
||||
@staticmethod
|
||||
def UTF8Char():
|
||||
return 'some other value'
|
||||
|
||||
@staticmethod
|
||||
def UUID():
|
||||
return str(uuid.uuid4())
|
||||
|
||||
@staticmethod
|
||||
def JSON():
|
||||
return json.dumps(dict(foo='bar', baz='meh'))
|
||||
|
||||
@staticmethod
|
||||
def Boolean():
|
||||
if op.get_bind().engine.name == 'postgresql':
|
||||
return True
|
||||
|
||||
return 1
|
||||
|
||||
@staticmethod
|
||||
def BigInteger():
|
||||
return 21474836470
|
||||
|
||||
@staticmethod
|
||||
def Integer():
|
||||
return 42
|
||||
|
||||
@staticmethod
|
||||
def Constant(value):
|
||||
def get_value():
|
||||
return value
|
||||
return get_value
|
||||
|
||||
@staticmethod
|
||||
def Foreign(table_name):
|
||||
def get_index():
|
||||
result = op.get_bind().execute("SELECT id FROM %s LIMIT 1" % escape_table_name(table_name))
|
||||
try:
|
||||
return list(result)[0][0]
|
||||
except IndexError:
|
||||
raise Exception('Could not find row for table %s' % table_name)
|
||||
finally:
|
||||
result.close()
|
||||
|
||||
return get_index
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class MigrationTester(object):
|
||||
""" Implements an interface for adding testing capabilities to the
|
||||
data model migration system in Alembic.
|
||||
"""
|
||||
TestDataType = DataTypes
|
||||
|
||||
@abstractmethod
|
||||
def populate_table(self, table_name, fields):
|
||||
""" Called to populate a table with the given fields filled in with testing data. """
|
||||
|
||||
@abstractmethod
|
||||
def populate_column(self, table_name, col_name, field_type):
|
||||
""" Called to populate a column in a table to be filled in with testing data. """
|
||||
|
||||
|
||||
@nooper
|
||||
class NoopTester(MigrationTester):
|
||||
""" No-op version of the tester, designed for production workloads. """
|
||||
|
||||
|
||||
class PopulateTestDataTester(MigrationTester):
|
||||
def populate_table(self, table_name, fields):
|
||||
columns = {field_name: field_type() for field_name, field_type in fields}
|
||||
field_name_vars = [':' + field_name for field_name, _ in fields]
|
||||
|
||||
if op.get_bind().engine.name == 'postgresql':
|
||||
field_names = ["%s" % field_name for field_name, _ in fields]
|
||||
else:
|
||||
field_names = ["`%s`" % field_name for field_name, _ in fields]
|
||||
|
||||
table_name = escape_table_name(table_name)
|
||||
query = text('INSERT INTO %s (%s) VALUES (%s)' % (table_name, ', '.join(field_names),
|
||||
', '.join(field_name_vars)))
|
||||
logger.info("Executing test query %s with values %s", query, columns.values())
|
||||
op.get_bind().execute(query, **columns)
|
||||
|
||||
def populate_column(self, table_name, col_name, field_type):
|
||||
col_value = field_type()
|
||||
row_id = DataTypes.Foreign(table_name)()
|
||||
|
||||
table_name = escape_table_name(table_name)
|
||||
update_text = text("UPDATE %s SET %s=:col_value where ID=:row_id" % (table_name, col_name))
|
||||
logger.info("Executing test query %s with value %s on row %s", update_text, col_value, row_id)
|
||||
op.get_bind().execute(update_text, col_value=col_value, row_id=row_id)
|
|
@ -0,0 +1,33 @@
|
|||
"""Add creation date to User table
|
||||
|
||||
Revision ID: 0cf50323c78b
|
||||
Revises: 87fbbc224f10
|
||||
Create Date: 2018-03-09 13:19:41.903196
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0cf50323c78b'
|
||||
down_revision = '87fbbc224f10'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('creation_date', sa.DateTime(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('user', 'creation_date', tester.TestDataType.DateTime)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('user', 'creation_date')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,100 @@
|
|||
"""Add Tag, TagKind and ManifestChild tables
|
||||
|
||||
Revision ID: 10f45ee2310b
|
||||
Revises: 13411de1c0ff
|
||||
Create Date: 2018-10-29 15:22:53.552216
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '10f45ee2310b'
|
||||
down_revision = '13411de1c0ff'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from util.migrate import UTF8CharField
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tagkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagkind'))
|
||||
)
|
||||
op.create_index('tagkind_name', 'tagkind', ['name'], unique=True)
|
||||
op.create_table('manifestchild',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('child_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['child_manifest_id'], ['manifest.id'], name=op.f('fk_manifestchild_child_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestchild_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestchild_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestchild'))
|
||||
)
|
||||
op.create_index('manifestchild_child_manifest_id', 'manifestchild', ['child_manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_manifest_id', 'manifestchild', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_manifest_id_child_manifest_id', 'manifestchild', ['manifest_id', 'child_manifest_id'], unique=True)
|
||||
op.create_index('manifestchild_repository_id', 'manifestchild', ['repository_id'], unique=False)
|
||||
op.create_index('manifestchild_repository_id_child_manifest_id', 'manifestchild', ['repository_id', 'child_manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_repository_id_manifest_id', 'manifestchild', ['repository_id', 'manifest_id'], unique=False)
|
||||
op.create_index('manifestchild_repository_id_manifest_id_child_manifest_id', 'manifestchild', ['repository_id', 'manifest_id', 'child_manifest_id'], unique=False)
|
||||
op.create_table('tag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lifetime_start_ms', sa.BigInteger(), nullable=False),
|
||||
sa.Column('lifetime_end_ms', sa.BigInteger(), nullable=True),
|
||||
sa.Column('hidden', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.Column('reversion', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.Column('tag_kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('linked_tag_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['linked_tag_id'], ['tag.id'], name=op.f('fk_tag_linked_tag_id_tag')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_tag_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['tag_kind_id'], ['tagkind.id'], name=op.f('fk_tag_tag_kind_id_tagkind')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tag'))
|
||||
)
|
||||
op.create_index('tag_lifetime_end_ms', 'tag', ['lifetime_end_ms'], unique=False)
|
||||
op.create_index('tag_linked_tag_id', 'tag', ['linked_tag_id'], unique=False)
|
||||
op.create_index('tag_manifest_id', 'tag', ['manifest_id'], unique=False)
|
||||
op.create_index('tag_repository_id', 'tag', ['repository_id'], unique=False)
|
||||
op.create_index('tag_repository_id_name', 'tag', ['repository_id', 'name'], unique=False)
|
||||
op.create_index('tag_repository_id_name_hidden', 'tag', ['repository_id', 'name', 'hidden'], unique=False)
|
||||
op.create_index('tag_repository_id_name_lifetime_end_ms', 'tag', ['repository_id', 'name', 'lifetime_end_ms'], unique=True)
|
||||
op.create_index('tag_repository_id_name_tag_kind_id', 'tag', ['repository_id', 'name', 'tag_kind_id'], unique=False)
|
||||
op.create_index('tag_tag_kind_id', 'tag', ['tag_kind_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(tables.tagkind,
|
||||
[
|
||||
{'name': 'tag'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('tag', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('tag_kind_id', tester.TestDataType.Foreign('tagkind')),
|
||||
('name', tester.TestDataType.String),
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('lifetime_start_ms', tester.TestDataType.BigInteger),
|
||||
])
|
||||
|
||||
tester.populate_table('manifestchild', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('child_manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tag')
|
||||
op.drop_table('manifestchild')
|
||||
op.drop_table('tagkind')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,46 @@
|
|||
"""Remove unique from TagManifestToManifest
|
||||
|
||||
Revision ID: 13411de1c0ff
|
||||
Revises: 654e6df88b71
|
||||
Create Date: 2018-08-19 23:30:24.969549
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '13411de1c0ff'
|
||||
down_revision = '654e6df88b71'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# Note: Because of a restriction in MySQL, we cannot simply remove the index and re-add
|
||||
# it without the unique=False, nor can we simply alter the index. To make it work, we'd have to
|
||||
# remove the primary key on the field, so instead we simply drop the table entirely and
|
||||
# recreate it with the modified index. The backfill will re-fill this in.
|
||||
op.drop_table('tagmanifesttomanifest')
|
||||
|
||||
op.create_table('tagmanifesttomanifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('tag_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('broken', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_tagmanifesttomanifest_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['tag_manifest_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifesttomanifest_tag_manifest_id_tagmanifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifesttomanifest'))
|
||||
)
|
||||
op.create_index('tagmanifesttomanifest_broken', 'tagmanifesttomanifest', ['broken'], unique=False)
|
||||
op.create_index('tagmanifesttomanifest_manifest_id', 'tagmanifesttomanifest', ['manifest_id'], unique=False)
|
||||
op.create_index('tagmanifesttomanifest_tag_manifest_id', 'tagmanifesttomanifest', ['tag_manifest_id'], unique=True)
|
||||
|
||||
tester.populate_table('tagmanifesttomanifest', [
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('tag_manifest_id', tester.TestDataType.Foreign('tagmanifest')),
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
pass
|
|
@ -0,0 +1,33 @@
|
|||
"""Add maximum build queue count setting to user table
|
||||
|
||||
Revision ID: 152bb29a1bb3
|
||||
Revises: 7367229b38d9
|
||||
Create Date: 2018-02-20 13:34:34.902415
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '152bb29a1bb3'
|
||||
down_revision = 'cbc8177760d9'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('maximum_queued_builds_count', sa.Integer(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('user', 'maximum_queued_builds_count', tester.TestDataType.Integer)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('user', 'maximum_queued_builds_count')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,27 @@
|
|||
"""Make BlodUpload byte_count not nullable
|
||||
|
||||
Revision ID: 152edccba18c
|
||||
Revises: c91c564aad34
|
||||
Create Date: 2018-02-23 12:41:25.571835
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '152edccba18c'
|
||||
down_revision = 'c91c564aad34'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column('blobupload', 'byte_count', existing_type=sa.BigInteger(),
|
||||
nullable=False)
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column('blobupload', 'byte_count', existing_type=sa.BigInteger(),
|
||||
nullable=True)
|
|
@ -0,0 +1,49 @@
|
|||
"""Add LogEntry2 table - QUAY.IO ONLY
|
||||
|
||||
Revision ID: 1783530bee68
|
||||
Revises: 5b7503aada1b
|
||||
Create Date: 2018-05-17 16:32:28.532264
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1783530bee68'
|
||||
down_revision = '5b7503aada1b'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('logentry2',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('performer_id', sa.Integer(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=True),
|
||||
sa.Column('datetime', sa.DateTime(), nullable=False),
|
||||
sa.Column('ip', sa.String(length=255), nullable=True),
|
||||
sa.Column('metadata_json', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], name=op.f('fk_logentry2_kind_id_logentrykind')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_logentry2'))
|
||||
)
|
||||
op.create_index('logentry2_account_id', 'logentry2', ['account_id'], unique=False)
|
||||
op.create_index('logentry2_account_id_datetime', 'logentry2', ['account_id', 'datetime'], unique=False)
|
||||
op.create_index('logentry2_datetime', 'logentry2', ['datetime'], unique=False)
|
||||
op.create_index('logentry2_kind_id', 'logentry2', ['kind_id'], unique=False)
|
||||
op.create_index('logentry2_performer_id', 'logentry2', ['performer_id'], unique=False)
|
||||
op.create_index('logentry2_performer_id_datetime', 'logentry2', ['performer_id', 'datetime'], unique=False)
|
||||
op.create_index('logentry2_repository_id', 'logentry2', ['repository_id'], unique=False)
|
||||
op.create_index('logentry2_repository_id_datetime', 'logentry2', ['repository_id', 'datetime'], unique=False)
|
||||
op.create_index('logentry2_repository_id_datetime_kind_id', 'logentry2', ['repository_id', 'datetime', 'kind_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('logentry2')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,54 @@
|
|||
"""Add automatic disable of build triggers
|
||||
|
||||
Revision ID: 17aff2e1354e
|
||||
Revises: 61cadbacb9fc
|
||||
Create Date: 2017-10-18 15:58:03.971526
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '17aff2e1354e'
|
||||
down_revision = '61cadbacb9fc'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('repositorybuildtrigger', sa.Column('successive_failure_count', sa.Integer(), server_default='0', nullable=False))
|
||||
op.add_column('repositorybuildtrigger', sa.Column('successive_internal_error_count', sa.Integer(), server_default='0', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(
|
||||
tables.disablereason,
|
||||
[
|
||||
{'id': 2, 'name': 'successive_build_failures'},
|
||||
{'id': 3, 'name': 'successive_build_internal_errors'},
|
||||
],
|
||||
)
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('repositorybuildtrigger', 'successive_failure_count', tester.TestDataType.Integer)
|
||||
tester.populate_column('repositorybuildtrigger', 'successive_internal_error_count', tester.TestDataType.Integer)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('repositorybuildtrigger', 'successive_internal_error_count')
|
||||
op.drop_column('repositorybuildtrigger', 'successive_failure_count')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.execute(tables
|
||||
.disablereason
|
||||
.delete()
|
||||
.where(tables.disablereason.c.name == op.inline_literal('successive_internal_error_count')))
|
||||
|
||||
op.execute(tables
|
||||
.disablereason
|
||||
.delete()
|
||||
.where(tables.disablereason.c.name == op.inline_literal('successive_failure_count')))
|
|
@ -0,0 +1,35 @@
|
|||
"""Add last_accessed field to User table
|
||||
|
||||
Revision ID: 224ce4c72c2f
|
||||
Revises: b547bc139ad8
|
||||
Create Date: 2018-03-12 22:44:07.070490
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '224ce4c72c2f'
|
||||
down_revision = 'b547bc139ad8'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('last_accessed', sa.DateTime(), nullable=True))
|
||||
op.create_index('user_last_accessed', 'user', ['last_accessed'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('user', 'last_accessed', tester.TestDataType.DateTime)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('user_last_accessed', table_name='user')
|
||||
op.drop_column('user', 'last_accessed')
|
||||
# ### end Alembic commands ###
|
125
data/migrations/versions/34c8ef052ec9_repo_mirror_columns.py
Normal file
125
data/migrations/versions/34c8ef052ec9_repo_mirror_columns.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
"""repo mirror columns
|
||||
|
||||
Revision ID: 34c8ef052ec9
|
||||
Revises: c059b952ed76
|
||||
Create Date: 2019-10-07 13:11:20.424715
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '34c8ef052ec9'
|
||||
down_revision = 'cc6778199cdb'
|
||||
|
||||
from alembic import op
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
from datetime import datetime
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from peewee import ForeignKeyField, DateTimeField, BooleanField
|
||||
from data.database import (BaseModel, RepoMirrorType, RepoMirrorStatus, RepoMirrorRule, uuid_generator,
|
||||
QuayUserField, Repository, IntegerField, JSONField)
|
||||
from data.fields import EnumField as ClientEnumField, CharField, EncryptedCharField
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
BATCH_SIZE = 10
|
||||
|
||||
|
||||
# Original model
|
||||
class RepoMirrorConfig(BaseModel):
|
||||
"""
|
||||
Represents a repository to be mirrored and any additional configuration
|
||||
required to perform the mirroring.
|
||||
"""
|
||||
repository = ForeignKeyField(Repository, index=True, unique=True, backref='mirror')
|
||||
creation_date = DateTimeField(default=datetime.utcnow)
|
||||
is_enabled = BooleanField(default=True)
|
||||
|
||||
# Mirror Configuration
|
||||
mirror_type = ClientEnumField(RepoMirrorType, default=RepoMirrorType.PULL)
|
||||
internal_robot = QuayUserField(allows_robots=True, null=True, backref='mirrorpullrobot',
|
||||
robot_null_delete=True)
|
||||
external_reference = CharField()
|
||||
external_registry = CharField()
|
||||
external_namespace = CharField()
|
||||
external_repository = CharField()
|
||||
external_registry_username = EncryptedCharField(max_length=2048, null=True)
|
||||
external_registry_password = EncryptedCharField(max_length=2048, null=True)
|
||||
external_registry_config = JSONField(default={})
|
||||
|
||||
# Worker Queuing
|
||||
sync_interval = IntegerField() # seconds between syncs
|
||||
sync_start_date = DateTimeField(null=True) # next start time
|
||||
sync_expiration_date = DateTimeField(null=True) # max duration
|
||||
sync_retries_remaining = IntegerField(default=3)
|
||||
sync_status = ClientEnumField(RepoMirrorStatus, default=RepoMirrorStatus.NEVER_RUN)
|
||||
sync_transaction_id = CharField(default=uuid_generator, max_length=36)
|
||||
|
||||
# Tag-Matching Rules
|
||||
root_rule = ForeignKeyField(RepoMirrorRule)
|
||||
|
||||
|
||||
def _iterate(model_class, clause):
|
||||
while True:
|
||||
has_rows = False
|
||||
for row in list(model_class.select().where(clause).limit(BATCH_SIZE)):
|
||||
has_rows = True
|
||||
yield row
|
||||
|
||||
if not has_rows:
|
||||
break
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
logger.info('Migrating to external_reference from existing columns')
|
||||
|
||||
op.add_column('repomirrorconfig', sa.Column('external_reference', sa.Text(), nullable=True))
|
||||
|
||||
for repo_mirror in _iterate(RepoMirrorConfig, (RepoMirrorConfig.external_reference >> None)):
|
||||
repo = '%s/%s/%s' % (repo_mirror.external_registry, repo_mirror.external_namespace, repo_mirror.external_repository)
|
||||
logger.info('migrating %s' % repo)
|
||||
repo_mirror.external_reference = repo
|
||||
repo_mirror.save()
|
||||
|
||||
op.drop_column('repomirrorconfig', 'external_registry')
|
||||
op.drop_column('repomirrorconfig', 'external_namespace')
|
||||
op.drop_column('repomirrorconfig', 'external_repository')
|
||||
|
||||
op.alter_column('repomirrorconfig', 'external_reference', nullable=False, existing_type=sa.Text())
|
||||
|
||||
|
||||
tester.populate_column('repomirrorconfig', 'external_reference', tester.TestDataType.String)
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
'''
|
||||
This will downgrade existing data but may not exactly match previous data structure. If the
|
||||
external_reference does not have three parts (registry, namespace, repository) then a failed
|
||||
value is inserted.
|
||||
'''
|
||||
|
||||
op.add_column('repomirrorconfig', sa.Column('external_registry', sa.String(length=255), nullable=True))
|
||||
op.add_column('repomirrorconfig', sa.Column('external_namespace', sa.String(length=255), nullable=True))
|
||||
op.add_column('repomirrorconfig', sa.Column('external_repository', sa.String(length=255), nullable=True))
|
||||
|
||||
logger.info('Restoring columns from external_reference')
|
||||
for repo_mirror in _iterate(RepoMirrorConfig, (RepoMirrorConfig.external_registry >> None)):
|
||||
logger.info('Restoring %s' % repo_mirror.external_reference)
|
||||
parts = repo_mirror.external_reference.split('/', 2)
|
||||
repo_mirror.external_registry = parts[0] if len(parts) >= 1 else 'DOWNGRADE-FAILED'
|
||||
repo_mirror.external_namespace = parts[1] if len(parts) >= 2 else 'DOWNGRADE-FAILED'
|
||||
repo_mirror.external_repository = parts[2] if len(parts) >= 3 else 'DOWNGRADE-FAILED'
|
||||
repo_mirror.save()
|
||||
|
||||
op.drop_column('repomirrorconfig', 'external_reference')
|
||||
|
||||
op.alter_column('repomirrorconfig', 'external_registry', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('repomirrorconfig', 'external_namespace', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('repomirrorconfig', 'external_repository', nullable=False, existing_type=sa.String(length=255))
|
|
@ -0,0 +1,63 @@
|
|||
"""Add severity and media_type to global messages
|
||||
|
||||
Revision ID: 3e8cc74a1e7b
|
||||
Revises: fc47c1ec019f
|
||||
Create Date: 2017-01-17 16:22:28.584237
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3e8cc74a1e7b'
|
||||
down_revision = 'fc47c1ec019f'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('messages', sa.Column('media_type_id', sa.Integer(), nullable=False, server_default='1'))
|
||||
op.add_column('messages', sa.Column('severity', sa.String(length=255), nullable=False, server_default='info'))
|
||||
op.alter_column('messages', 'uuid',
|
||||
existing_type=mysql.VARCHAR(length=36),
|
||||
server_default='',
|
||||
nullable=False)
|
||||
op.create_index('messages_media_type_id', 'messages', ['media_type_id'], unique=False)
|
||||
op.create_index('messages_severity', 'messages', ['severity'], unique=False)
|
||||
op.create_index('messages_uuid', 'messages', ['uuid'], unique=False)
|
||||
op.create_foreign_key(op.f('fk_messages_media_type_id_mediatype'), 'messages', 'mediatype', ['media_type_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(tables.mediatype,
|
||||
[
|
||||
{'name': 'text/markdown'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('messages', 'media_type_id', tester.TestDataType.Foreign('mediatype'))
|
||||
tester.populate_column('messages', 'severity', lambda: 'info')
|
||||
tester.populate_column('messages', 'uuid', tester.TestDataType.UUID)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('fk_messages_media_type_id_mediatype'), 'messages', type_='foreignkey')
|
||||
op.drop_index('messages_uuid', table_name='messages')
|
||||
op.drop_index('messages_severity', table_name='messages')
|
||||
op.drop_index('messages_media_type_id', table_name='messages')
|
||||
op.alter_column('messages', 'uuid',
|
||||
existing_type=mysql.VARCHAR(length=36),
|
||||
nullable=True)
|
||||
op.drop_column('messages', 'severity')
|
||||
op.drop_column('messages', 'media_type_id')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.execute(tables
|
||||
.mediatype
|
||||
.delete()
|
||||
.where(tables.
|
||||
mediatype.c.name == op.inline_literal('text/markdown')))
|
|
@ -0,0 +1,30 @@
|
|||
"""add_notification_type
|
||||
|
||||
Revision ID: 45fd8b9869d4
|
||||
Revises: 94836b099894
|
||||
Create Date: 2016-12-01 12:02:19.724528
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '45fd8b9869d4'
|
||||
down_revision = '94836b099894'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.bulk_insert(tables.notificationkind,
|
||||
[
|
||||
{'name': 'build_cancelled'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.execute(tables
|
||||
.notificationkind
|
||||
.delete()
|
||||
.where(tables.
|
||||
notificationkind.c.name == op.inline_literal('build_cancelled')))
|
|
@ -0,0 +1,27 @@
|
|||
"""Add index on logs_archived on repositorybuild
|
||||
|
||||
Revision ID: 481623ba00ba
|
||||
Revises: b9045731c4de
|
||||
Create Date: 2019-02-15 16:09:47.326805
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '481623ba00ba'
|
||||
down_revision = 'b9045731c4de'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('repositorybuild_logs_archived', 'repositorybuild', ['logs_archived'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('repositorybuild_logs_archived', table_name='repositorybuild')
|
||||
# ### end Alembic commands ###
|
144
data/migrations/versions/5248ddf35167_repository_mirror.py
Normal file
144
data/migrations/versions/5248ddf35167_repository_mirror.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
"""Repository Mirror
|
||||
|
||||
Revision ID: 5248ddf35167
|
||||
Revises: b918abdbee43
|
||||
Create Date: 2019-06-25 16:22:36.310532
|
||||
|
||||
"""
|
||||
|
||||
revision = '5248ddf35167'
|
||||
down_revision = 'b918abdbee43'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.create_table('repomirrorrule',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=36), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('creation_date', sa.DateTime(), nullable=False),
|
||||
sa.Column('rule_type', sa.Integer(), nullable=False),
|
||||
sa.Column('rule_value', sa.Text(), nullable=False),
|
||||
sa.Column('left_child_id', sa.Integer(), nullable=True),
|
||||
sa.Column('right_child_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['left_child_id'], ['repomirrorrule.id'], name=op.f('fk_repomirrorrule_left_child_id_repomirrorrule')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repomirrorrule_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['right_child_id'], ['repomirrorrule.id'], name=op.f('fk_repomirrorrule_right_child_id_repomirrorrule')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_repomirrorrule')))
|
||||
op.create_index('repomirrorrule_left_child_id', 'repomirrorrule', ['left_child_id'], unique=False)
|
||||
op.create_index('repomirrorrule_repository_id', 'repomirrorrule', ['repository_id'], unique=False)
|
||||
op.create_index('repomirrorrule_right_child_id', 'repomirrorrule', ['right_child_id'], unique=False)
|
||||
op.create_index('repomirrorrule_rule_type', 'repomirrorrule', ['rule_type'], unique=False)
|
||||
op.create_index('repomirrorrule_uuid', 'repomirrorrule', ['uuid'], unique=True)
|
||||
|
||||
op.create_table('repomirrorconfig',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('creation_date', sa.DateTime(), nullable=False),
|
||||
sa.Column('is_enabled', sa.Boolean(), nullable=False),
|
||||
sa.Column('mirror_type', sa.Integer(), nullable=False),
|
||||
sa.Column('internal_robot_id', sa.Integer(), nullable=False),
|
||||
sa.Column('external_registry', sa.String(length=255), nullable=False),
|
||||
sa.Column('external_namespace', sa.String(length=255), nullable=False),
|
||||
sa.Column('external_repository', sa.String(length=255), nullable=False),
|
||||
sa.Column('external_registry_username', sa.String(length=2048), nullable=True),
|
||||
sa.Column('external_registry_password', sa.String(length=2048), nullable=True),
|
||||
sa.Column('external_registry_config', sa.Text(), nullable=False),
|
||||
sa.Column('sync_interval', sa.Integer(), nullable=False, server_default='60'),
|
||||
sa.Column('sync_start_date', sa.DateTime(), nullable=True),
|
||||
sa.Column('sync_expiration_date', sa.DateTime(), nullable=True),
|
||||
sa.Column('sync_retries_remaining', sa.Integer(), nullable=False, server_default='3'),
|
||||
sa.Column('sync_status', sa.Integer(), nullable=False),
|
||||
sa.Column('sync_transaction_id', sa.String(length=36), nullable=True),
|
||||
sa.Column('root_rule_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repomirrorconfig_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['root_rule_id'], ['repomirrorrule.id'], name=op.f('fk_repomirrorconfig_root_rule_id_repomirrorrule')),
|
||||
sa.ForeignKeyConstraint(['internal_robot_id'], ['user.id'], name=op.f('fk_repomirrorconfig_internal_robot_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_repomirrorconfig'))
|
||||
)
|
||||
op.create_index('repomirrorconfig_mirror_type', 'repomirrorconfig', ['mirror_type'], unique=False)
|
||||
op.create_index('repomirrorconfig_repository_id', 'repomirrorconfig', ['repository_id'], unique=True)
|
||||
op.create_index('repomirrorconfig_root_rule_id', 'repomirrorconfig', ['root_rule_id'], unique=False)
|
||||
op.create_index('repomirrorconfig_sync_status', 'repomirrorconfig', ['sync_status'], unique=False)
|
||||
op.create_index('repomirrorconfig_sync_transaction_id', 'repomirrorconfig', ['sync_transaction_id'], unique=False)
|
||||
op.create_index('repomirrorconfig_internal_robot_id', 'repomirrorconfig', ['internal_robot_id'], unique=False)
|
||||
|
||||
op.add_column(u'repository', sa.Column('state', sa.Integer(), nullable=False, server_default='0'))
|
||||
op.create_index('repository_state', 'repository', ['state'], unique=False)
|
||||
|
||||
op.bulk_insert(tables.logentrykind,
|
||||
[
|
||||
{'name': 'repo_mirror_enabled'},
|
||||
{'name': 'repo_mirror_disabled'},
|
||||
{'name': 'repo_mirror_config_changed'},
|
||||
{'name': 'repo_mirror_sync_started'},
|
||||
{'name': 'repo_mirror_sync_failed'},
|
||||
{'name': 'repo_mirror_sync_success'},
|
||||
{'name': 'repo_mirror_sync_now_requested'},
|
||||
{'name': 'repo_mirror_sync_tag_success'},
|
||||
{'name': 'repo_mirror_sync_tag_failed'},
|
||||
{'name': 'repo_mirror_sync_test_success'},
|
||||
{'name': 'repo_mirror_sync_test_failed'},
|
||||
{'name': 'repo_mirror_sync_test_started'},
|
||||
{'name': 'change_repo_state'}
|
||||
])
|
||||
|
||||
|
||||
tester.populate_table('repomirrorrule', [
|
||||
('uuid', tester.TestDataType.String),
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('creation_date', tester.TestDataType.DateTime),
|
||||
('rule_type', tester.TestDataType.Integer),
|
||||
('rule_value', tester.TestDataType.String),
|
||||
])
|
||||
|
||||
tester.populate_table('repomirrorconfig', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('creation_date', tester.TestDataType.DateTime),
|
||||
('is_enabled', tester.TestDataType.Boolean),
|
||||
('mirror_type', tester.TestDataType.Constant(1)),
|
||||
('internal_robot_id', tester.TestDataType.Foreign('user')),
|
||||
('external_registry', tester.TestDataType.String),
|
||||
('external_namespace', tester.TestDataType.String),
|
||||
('external_repository', tester.TestDataType.String),
|
||||
('external_registry_username', tester.TestDataType.String),
|
||||
('external_registry_password', tester.TestDataType.String),
|
||||
('external_registry_config', tester.TestDataType.JSON),
|
||||
('sync_start_date', tester.TestDataType.DateTime),
|
||||
('sync_expiration_date', tester.TestDataType.DateTime),
|
||||
('sync_retries_remaining', tester.TestDataType.Integer),
|
||||
('sync_status', tester.TestDataType.Constant(0)),
|
||||
('sync_transaction_id', tester.TestDataType.String),
|
||||
('root_rule_id', tester.TestDataType.Foreign('repomirrorrule')),
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.drop_column(u'repository', 'state')
|
||||
|
||||
op.drop_table('repomirrorconfig')
|
||||
|
||||
op.drop_table('repomirrorrule')
|
||||
|
||||
for logentrykind in [
|
||||
'repo_mirror_enabled',
|
||||
'repo_mirror_disabled',
|
||||
'repo_mirror_config_changed',
|
||||
'repo_mirror_sync_started',
|
||||
'repo_mirror_sync_failed',
|
||||
'repo_mirror_sync_success',
|
||||
'repo_mirror_sync_now_requested',
|
||||
'repo_mirror_sync_tag_success',
|
||||
'repo_mirror_sync_tag_failed',
|
||||
'repo_mirror_sync_test_success',
|
||||
'repo_mirror_sync_test_failed',
|
||||
'repo_mirror_sync_test_started',
|
||||
'change_repo_state'
|
||||
]:
|
||||
op.execute(tables.logentrykind.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal(logentrykind)))
|
|
@ -0,0 +1,63 @@
|
|||
"""Remove reference to subdir
|
||||
|
||||
Revision ID: 53e2ac668296
|
||||
Revises: ed01e313d3cb
|
||||
Create Date: 2017-03-28 15:01:31.073382
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
import json
|
||||
|
||||
import logging
|
||||
from alembic.script.revision import RevisionError
|
||||
from alembic.util import CommandError
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
revision = '53e2ac668296'
|
||||
down_revision = 'ed01e313d3cb'
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def run_migration(migrate_function, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
conn = op.get_bind()
|
||||
triggers = conn.execute("SELECT id, config FROM repositorybuildtrigger")
|
||||
for trigger in triggers:
|
||||
config = json.dumps(migrate_function(json.loads(trigger[1])))
|
||||
try:
|
||||
conn.execute("UPDATE repositorybuildtrigger SET config=%s WHERE id=%s", config, trigger[0])
|
||||
except(RevisionError, CommandError) as e:
|
||||
log.warning("Failed to update build trigger %s with exception: ", trigger[0], e)
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
run_migration(delete_subdir, progress_reporter)
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
run_migration(add_subdir, progress_reporter)
|
||||
|
||||
|
||||
def delete_subdir(config):
|
||||
""" Remove subdir from config """
|
||||
if not config:
|
||||
return config
|
||||
if 'subdir' in config:
|
||||
del config['subdir']
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def add_subdir(config):
|
||||
""" Add subdir back into config """
|
||||
if not config:
|
||||
return config
|
||||
if 'context' in config:
|
||||
config['subdir'] = config['context']
|
||||
|
||||
return config
|
|
@ -0,0 +1,49 @@
|
|||
"""Add NamespaceGeoRestriction table
|
||||
|
||||
Revision ID: 54492a68a3cf
|
||||
Revises: c00a1f15968b
|
||||
Create Date: 2018-12-05 15:12:14.201116
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '54492a68a3cf'
|
||||
down_revision = 'c00a1f15968b'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('namespacegeorestriction',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('namespace_id', sa.Integer(), nullable=False),
|
||||
sa.Column('added', sa.DateTime(), nullable=False),
|
||||
sa.Column('description', sa.String(length=255), nullable=False),
|
||||
sa.Column('unstructured_json', sa.Text(), nullable=False),
|
||||
sa.Column('restricted_region_iso_code', sa.String(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['namespace_id'], ['user.id'], name=op.f('fk_namespacegeorestriction_namespace_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_namespacegeorestriction'))
|
||||
)
|
||||
op.create_index('namespacegeorestriction_namespace_id', 'namespacegeorestriction', ['namespace_id'], unique=False)
|
||||
op.create_index('namespacegeorestriction_namespace_id_restricted_region_iso_code', 'namespacegeorestriction', ['namespace_id', 'restricted_region_iso_code'], unique=True)
|
||||
op.create_index('namespacegeorestriction_restricted_region_iso_code', 'namespacegeorestriction', ['restricted_region_iso_code'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
tester.populate_table('namespacegeorestriction', [
|
||||
('namespace_id', tester.TestDataType.Foreign('user')),
|
||||
('added', tester.TestDataType.DateTime),
|
||||
('description', tester.TestDataType.String),
|
||||
('unstructured_json', tester.TestDataType.JSON),
|
||||
('restricted_region_iso_code', tester.TestDataType.String),
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('namespacegeorestriction')
|
||||
# ### end Alembic commands ###
|
26
data/migrations/versions/5b7503aada1b_cleanup_old_robots.py
Normal file
26
data/migrations/versions/5b7503aada1b_cleanup_old_robots.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
"""Cleanup old robots
|
||||
|
||||
Revision ID: 5b7503aada1b
|
||||
Revises: 224ce4c72c2f
|
||||
Create Date: 2018-05-09 17:18:52.230504
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5b7503aada1b'
|
||||
down_revision = '224ce4c72c2f'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
from util.migrate.cleanup_old_robots import cleanup_old_robots
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
cleanup_old_robots()
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# Nothing to do.
|
||||
pass
|
|
@ -0,0 +1,170 @@
|
|||
"""Remove 'oci' tables not used by CNR. The rest will be migrated and renamed.
|
||||
|
||||
Revision ID: 5cbbfc95bac7
|
||||
Revises: 1783530bee68
|
||||
Create Date: 2018-05-23 17:28:40.114433
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5cbbfc95bac7'
|
||||
down_revision = '1783530bee68'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from util.migrate import UTF8LongText, UTF8CharField
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('derivedimage')
|
||||
op.drop_table('manifestlabel')
|
||||
op.drop_table('blobplacementlocationpreference')
|
||||
op.drop_table('blobuploading')
|
||||
op.drop_table('bittorrentpieces')
|
||||
op.drop_table('manifestlayerdockerv1')
|
||||
op.drop_table('manifestlayerscan')
|
||||
op.drop_table('manifestlayer')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
'manifestlayer',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_index', sa.BigInteger(), nullable=False),
|
||||
sa.Column('metadata_json', UTF8LongText, nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_manifestlayer_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestlayer_manifest_id_manifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlayer'))
|
||||
)
|
||||
op.create_index('manifestlayer_manifest_index', 'manifestlayer', ['manifest_index'], unique=False)
|
||||
op.create_index('manifestlayer_manifest_id_manifest_index', 'manifestlayer', ['manifest_id', 'manifest_index'], unique=True)
|
||||
op.create_index('manifestlayer_manifest_id', 'manifestlayer', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestlayer_blob_id', 'manifestlayer', ['blob_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlayerscan',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('layer_id', sa.Integer(), nullable=False),
|
||||
sa.Column('scannable', sa.Boolean(), nullable=False),
|
||||
sa.Column('scanned_by', UTF8CharField(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['layer_id'], ['manifestlayer.id'], name=op.f('fk_manifestlayerscan_layer_id_manifestlayer')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlayerscan'))
|
||||
)
|
||||
|
||||
op.create_index('manifestlayerscan_layer_id', 'manifestlayerscan', ['layer_id'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'bittorrentpieces',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('pieces', UTF8LongText, nullable=False),
|
||||
sa.Column('piece_length', sa.BigInteger(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_bittorrentpieces_blob_id_blob')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_bittorrentpieces'))
|
||||
)
|
||||
|
||||
op.create_index('bittorrentpieces_blob_id_piece_length', 'bittorrentpieces', ['blob_id', 'piece_length'], unique=True)
|
||||
op.create_index('bittorrentpieces_blob_id', 'bittorrentpieces', ['blob_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'blobuploading',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.Column('byte_count', sa.BigInteger(), nullable=False),
|
||||
sa.Column('uncompressed_byte_count', sa.BigInteger(), nullable=True),
|
||||
sa.Column('chunk_count', sa.BigInteger(), nullable=False),
|
||||
sa.Column('storage_metadata', UTF8LongText, nullable=True),
|
||||
sa.Column('sha_state', UTF8LongText, nullable=True),
|
||||
sa.Column('piece_sha_state', UTF8LongText, nullable=True),
|
||||
sa.Column('piece_hashes', UTF8LongText, nullable=True),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['blobplacementlocation.id'], name=op.f('fk_blobuploading_location_id_blobplacementlocation')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_blobuploading_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobuploading'))
|
||||
)
|
||||
|
||||
op.create_index('blobuploading_uuid', 'blobuploading', ['uuid'], unique=True)
|
||||
op.create_index('blobuploading_repository_id_uuid', 'blobuploading', ['repository_id', 'uuid'], unique=True)
|
||||
op.create_index('blobuploading_repository_id', 'blobuploading', ['repository_id'], unique=False)
|
||||
op.create_index('blobuploading_location_id', 'blobuploading', ['location_id'], unique=False)
|
||||
op.create_index('blobuploading_created', 'blobuploading', ['created'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlayerdockerv1',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_layer_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_id', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('checksum', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('compat_json', UTF8LongText, nullable=False),
|
||||
sa.ForeignKeyConstraint(['manifest_layer_id'], ['manifestlayer.id'], name=op.f('fk_manifestlayerdockerv1_manifest_layer_id_manifestlayer')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlayerdockerv1'))
|
||||
)
|
||||
|
||||
op.create_index('manifestlayerdockerv1_manifest_layer_id', 'manifestlayerdockerv1', ['manifest_layer_id'], unique=False)
|
||||
op.create_index('manifestlayerdockerv1_image_id', 'manifestlayerdockerv1', ['image_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlabel',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('annotated_id', sa.Integer(), nullable=False),
|
||||
sa.Column('label_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['annotated_id'], ['manifest.id'], name=op.f('fk_manifestlabel_annotated_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_manifestlabel_label_id_label')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestlabel_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlabel'))
|
||||
)
|
||||
|
||||
op.create_index('manifestlabel_repository_id_annotated_id_label_id', 'manifestlabel', ['repository_id', 'annotated_id', 'label_id'], unique=True)
|
||||
op.create_index('manifestlabel_repository_id', 'manifestlabel', ['repository_id'], unique=False)
|
||||
op.create_index('manifestlabel_label_id', 'manifestlabel', ['label_id'], unique=False)
|
||||
op.create_index('manifestlabel_annotated_id', 'manifestlabel', ['annotated_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'blobplacementlocationpreference',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['blobplacementlocation.id'], name=op.f('fk_blobplacementlocpref_locid_blobplacementlocation')),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_blobplacementlocationpreference_user_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobplacementlocationpreference'))
|
||||
)
|
||||
op.create_index('blobplacementlocationpreference_user_id', 'blobplacementlocationpreference', ['user_id'], unique=False)
|
||||
op.create_index('blobplacementlocationpreference_location_id', 'blobplacementlocationpreference', ['location_id'], unique=False)
|
||||
|
||||
|
||||
op.create_table(
|
||||
'derivedimage',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('source_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('derived_manifest_json', UTF8LongText, nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('uniqueness_hash', sa.String(length=255), nullable=False),
|
||||
sa.Column('signature_blob_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_derivedimage_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_derivedimage_media_type_id_mediatype')),
|
||||
sa.ForeignKeyConstraint(['signature_blob_id'], ['blob.id'], name=op.f('fk_derivedimage_signature_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['source_manifest_id'], ['manifest.id'], name=op.f('fk_derivedimage_source_manifest_id_manifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedimage'))
|
||||
)
|
||||
op.create_index('derivedimage_uuid', 'derivedimage', ['uuid'], unique=True)
|
||||
op.create_index('derivedimage_uniqueness_hash', 'derivedimage', ['uniqueness_hash'], unique=True)
|
||||
op.create_index('derivedimage_source_manifest_id_media_type_id_uniqueness_hash', 'derivedimage', ['source_manifest_id', 'media_type_id', 'uniqueness_hash'], unique=True)
|
||||
op.create_index('derivedimage_source_manifest_id_blob_id', 'derivedimage', ['source_manifest_id', 'blob_id'], unique=True)
|
||||
op.create_index('derivedimage_source_manifest_id', 'derivedimage', ['source_manifest_id'], unique=False)
|
||||
op.create_index('derivedimage_signature_blob_id', 'derivedimage', ['signature_blob_id'], unique=False)
|
||||
op.create_index('derivedimage_media_type_id', 'derivedimage', ['media_type_id'], unique=False)
|
||||
op.create_index('derivedimage_blob_id', 'derivedimage', ['blob_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,32 @@
|
|||
"""Backfill new appr tables
|
||||
|
||||
Revision ID: 5d463ea1e8a8
|
||||
Revises: 610320e9dacf
|
||||
Create Date: 2018-07-08 10:01:19.756126
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5d463ea1e8a8'
|
||||
down_revision = '610320e9dacf'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from util.migrate.table_ops import copy_table_contents
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
conn = op.get_bind()
|
||||
|
||||
copy_table_contents('blob', 'apprblob', conn)
|
||||
copy_table_contents('manifest', 'apprmanifest', conn)
|
||||
copy_table_contents('manifestlist', 'apprmanifestlist', conn)
|
||||
copy_table_contents('blobplacement', 'apprblobplacement', conn)
|
||||
copy_table_contents('manifestblob', 'apprmanifestblob', conn)
|
||||
copy_table_contents('manifestlistmanifest', 'apprmanifestlistmanifest', conn)
|
||||
copy_table_contents('tag', 'apprtag', conn)
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
pass
|
|
@ -0,0 +1,206 @@
|
|||
"""Add new Appr-specific tables
|
||||
|
||||
Revision ID: 610320e9dacf
|
||||
Revises: 5cbbfc95bac7
|
||||
Create Date: 2018-05-24 16:46:13.514562
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '610320e9dacf'
|
||||
down_revision = '5cbbfc95bac7'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
from util.migrate.table_ops import copy_table_contents
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('apprblobplacementlocation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprblobplacementlocation'))
|
||||
)
|
||||
op.create_index('apprblobplacementlocation_name', 'apprblobplacementlocation', ['name'], unique=True)
|
||||
op.create_table('apprtagkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprtagkind'))
|
||||
)
|
||||
op.create_index('apprtagkind_name', 'apprtagkind', ['name'], unique=True)
|
||||
op.create_table('apprblob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('size', sa.BigInteger(), nullable=False),
|
||||
sa.Column('uncompressed_size', sa.BigInteger(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_apprblob_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprblob'))
|
||||
)
|
||||
op.create_index('apprblob_digest', 'apprblob', ['digest'], unique=True)
|
||||
op.create_index('apprblob_media_type_id', 'apprblob', ['media_type_id'], unique=False)
|
||||
op.create_table('apprmanifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_json', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_apprmanifest_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprmanifest'))
|
||||
)
|
||||
op.create_index('apprmanifest_digest', 'apprmanifest', ['digest'], unique=True)
|
||||
op.create_index('apprmanifest_media_type_id', 'apprmanifest', ['media_type_id'], unique=False)
|
||||
op.create_table('apprmanifestlist',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('manifest_list_json', sa.Text(), nullable=False),
|
||||
sa.Column('schema_version', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_apprmanifestlist_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprmanifestlist'))
|
||||
)
|
||||
op.create_index('apprmanifestlist_digest', 'apprmanifestlist', ['digest'], unique=True)
|
||||
op.create_index('apprmanifestlist_media_type_id', 'apprmanifestlist', ['media_type_id'], unique=False)
|
||||
op.create_table('apprblobplacement',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['apprblob.id'], name=op.f('fk_apprblobplacement_blob_id_apprblob')),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['apprblobplacementlocation.id'], name=op.f('fk_apprblobplacement_location_id_apprblobplacementlocation')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprblobplacement'))
|
||||
)
|
||||
op.create_index('apprblobplacement_blob_id', 'apprblobplacement', ['blob_id'], unique=False)
|
||||
op.create_index('apprblobplacement_blob_id_location_id', 'apprblobplacement', ['blob_id', 'location_id'], unique=True)
|
||||
op.create_index('apprblobplacement_location_id', 'apprblobplacement', ['location_id'], unique=False)
|
||||
op.create_table('apprmanifestblob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['apprblob.id'], name=op.f('fk_apprmanifestblob_blob_id_apprblob')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['apprmanifest.id'], name=op.f('fk_apprmanifestblob_manifest_id_apprmanifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprmanifestblob'))
|
||||
)
|
||||
op.create_index('apprmanifestblob_blob_id', 'apprmanifestblob', ['blob_id'], unique=False)
|
||||
op.create_index('apprmanifestblob_manifest_id', 'apprmanifestblob', ['manifest_id'], unique=False)
|
||||
op.create_index('apprmanifestblob_manifest_id_blob_id', 'apprmanifestblob', ['manifest_id', 'blob_id'], unique=True)
|
||||
op.create_table('apprmanifestlistmanifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_list_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('operating_system', sa.String(length=255), nullable=True),
|
||||
sa.Column('architecture', sa.String(length=255), nullable=True),
|
||||
sa.Column('platform_json', sa.Text(), nullable=True),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['apprmanifest.id'], name=op.f('fk_apprmanifestlistmanifest_manifest_id_apprmanifest')),
|
||||
sa.ForeignKeyConstraint(['manifest_list_id'], ['apprmanifestlist.id'], name=op.f('fk_apprmanifestlistmanifest_manifest_list_id_apprmanifestlist')),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_apprmanifestlistmanifest_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprmanifestlistmanifest'))
|
||||
)
|
||||
op.create_index('apprmanifestlistmanifest_manifest_id', 'apprmanifestlistmanifest', ['manifest_id'], unique=False)
|
||||
op.create_index('apprmanifestlistmanifest_manifest_list_id', 'apprmanifestlistmanifest', ['manifest_list_id'], unique=False)
|
||||
op.create_index('apprmanifestlistmanifest_manifest_list_id_media_type_id', 'apprmanifestlistmanifest', ['manifest_list_id', 'media_type_id'], unique=False)
|
||||
op.create_index('apprmanifestlistmanifest_manifest_list_id_operating_system_arch', 'apprmanifestlistmanifest', ['manifest_list_id', 'operating_system', 'architecture', 'media_type_id'], unique=False)
|
||||
op.create_index('apprmanifestlistmanifest_media_type_id', 'apprmanifestlistmanifest', ['media_type_id'], unique=False)
|
||||
op.create_table('apprtag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_list_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lifetime_start', sa.BigInteger(), nullable=False),
|
||||
sa.Column('lifetime_end', sa.BigInteger(), nullable=True),
|
||||
sa.Column('hidden', sa.Boolean(), nullable=False),
|
||||
sa.Column('reverted', sa.Boolean(), nullable=False),
|
||||
sa.Column('protected', sa.Boolean(), nullable=False),
|
||||
sa.Column('tag_kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('linked_tag_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['linked_tag_id'], ['apprtag.id'], name=op.f('fk_apprtag_linked_tag_id_apprtag')),
|
||||
sa.ForeignKeyConstraint(['manifest_list_id'], ['apprmanifestlist.id'], name=op.f('fk_apprtag_manifest_list_id_apprmanifestlist')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_apprtag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['tag_kind_id'], ['apprtagkind.id'], name=op.f('fk_apprtag_tag_kind_id_apprtagkind')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_apprtag'))
|
||||
)
|
||||
op.create_index('apprtag_lifetime_end', 'apprtag', ['lifetime_end'], unique=False)
|
||||
op.create_index('apprtag_linked_tag_id', 'apprtag', ['linked_tag_id'], unique=False)
|
||||
op.create_index('apprtag_manifest_list_id', 'apprtag', ['manifest_list_id'], unique=False)
|
||||
op.create_index('apprtag_repository_id', 'apprtag', ['repository_id'], unique=False)
|
||||
op.create_index('apprtag_repository_id_name', 'apprtag', ['repository_id', 'name'], unique=False)
|
||||
op.create_index('apprtag_repository_id_name_hidden', 'apprtag', ['repository_id', 'name', 'hidden'], unique=False)
|
||||
op.create_index('apprtag_repository_id_name_lifetime_end', 'apprtag', ['repository_id', 'name', 'lifetime_end'], unique=True)
|
||||
op.create_index('apprtag_tag_kind_id', 'apprtag', ['tag_kind_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
conn = op.get_bind()
|
||||
copy_table_contents('blobplacementlocation', 'apprblobplacementlocation', conn)
|
||||
copy_table_contents('tagkind', 'apprtagkind', conn)
|
||||
|
||||
# ### population of test data ### #
|
||||
|
||||
tester.populate_table('apprmanifest', [
|
||||
('digest', tester.TestDataType.String),
|
||||
('media_type_id', tester.TestDataType.Foreign('mediatype')),
|
||||
('manifest_json', tester.TestDataType.JSON),
|
||||
])
|
||||
|
||||
tester.populate_table('apprmanifestlist', [
|
||||
('digest', tester.TestDataType.String),
|
||||
('manifest_list_json', tester.TestDataType.JSON),
|
||||
('schema_version', tester.TestDataType.String),
|
||||
('media_type_id', tester.TestDataType.Foreign('mediatype')),
|
||||
])
|
||||
|
||||
tester.populate_table('apprmanifestlistmanifest', [
|
||||
('manifest_list_id', tester.TestDataType.Foreign('apprmanifestlist')),
|
||||
('manifest_id', tester.TestDataType.Foreign('apprmanifest')),
|
||||
('operating_system', tester.TestDataType.String),
|
||||
('architecture', tester.TestDataType.String),
|
||||
('platform_json', tester.TestDataType.JSON),
|
||||
('media_type_id', tester.TestDataType.Foreign('mediatype')),
|
||||
])
|
||||
|
||||
tester.populate_table('apprblob', [
|
||||
('digest', tester.TestDataType.String),
|
||||
('media_type_id', tester.TestDataType.Foreign('mediatype')),
|
||||
('size', tester.TestDataType.BigInteger),
|
||||
('uncompressed_size', tester.TestDataType.BigInteger),
|
||||
])
|
||||
|
||||
tester.populate_table('apprmanifestblob', [
|
||||
('manifest_id', tester.TestDataType.Foreign('apprmanifest')),
|
||||
('blob_id', tester.TestDataType.Foreign('apprblob')),
|
||||
])
|
||||
|
||||
tester.populate_table('apprtag', [
|
||||
('name', tester.TestDataType.String),
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('manifest_list_id', tester.TestDataType.Foreign('apprmanifestlist')),
|
||||
('lifetime_start', tester.TestDataType.Integer),
|
||||
('hidden', tester.TestDataType.Boolean),
|
||||
('reverted', tester.TestDataType.Boolean),
|
||||
('protected', tester.TestDataType.Boolean),
|
||||
('tag_kind_id', tester.TestDataType.Foreign('apprtagkind')),
|
||||
])
|
||||
|
||||
tester.populate_table('apprblobplacement', [
|
||||
('blob_id', tester.TestDataType.Foreign('apprmanifestblob')),
|
||||
('location_id', tester.TestDataType.Foreign('apprblobplacementlocation')),
|
||||
])
|
||||
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('apprtag')
|
||||
op.drop_table('apprmanifestlistmanifest')
|
||||
op.drop_table('apprmanifestblob')
|
||||
op.drop_table('apprblobplacement')
|
||||
op.drop_table('apprmanifestlist')
|
||||
op.drop_table('apprmanifest')
|
||||
op.drop_table('apprblob')
|
||||
op.drop_table('apprtagkind')
|
||||
op.drop_table('apprblobplacementlocation')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,64 @@
|
|||
"""Add ability for build triggers to be disabled
|
||||
|
||||
Revision ID: 61cadbacb9fc
|
||||
Revises: b4c2d45bc132
|
||||
Create Date: 2017-10-18 12:07:26.190901
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '61cadbacb9fc'
|
||||
down_revision = 'b4c2d45bc132'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('disablereason',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_disablereason'))
|
||||
)
|
||||
op.create_index('disablereason_name', 'disablereason', ['name'], unique=True)
|
||||
|
||||
op.bulk_insert(
|
||||
tables.disablereason,
|
||||
[
|
||||
{'id': 1, 'name': 'user_toggled'},
|
||||
],
|
||||
)
|
||||
|
||||
op.bulk_insert(tables.logentrykind, [
|
||||
{'name': 'toggle_repo_trigger'},
|
||||
])
|
||||
|
||||
op.add_column(u'repositorybuildtrigger', sa.Column('disabled_reason_id', sa.Integer(), nullable=True))
|
||||
op.add_column(u'repositorybuildtrigger', sa.Column('enabled', sa.Boolean(), nullable=False, server_default=sa.sql.expression.true()))
|
||||
op.create_index('repositorybuildtrigger_disabled_reason_id', 'repositorybuildtrigger', ['disabled_reason_id'], unique=False)
|
||||
op.create_foreign_key(op.f('fk_repositorybuildtrigger_disabled_reason_id_disablereason'), 'repositorybuildtrigger', 'disablereason', ['disabled_reason_id'], ['id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('repositorybuildtrigger', 'disabled_reason_id', tester.TestDataType.Foreign('disablereason'))
|
||||
tester.populate_column('repositorybuildtrigger', 'enabled', tester.TestDataType.Boolean)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(op.f('fk_repositorybuildtrigger_disabled_reason_id_disablereason'), 'repositorybuildtrigger', type_='foreignkey')
|
||||
op.drop_index('repositorybuildtrigger_disabled_reason_id', table_name='repositorybuildtrigger')
|
||||
op.drop_column(u'repositorybuildtrigger', 'enabled')
|
||||
op.drop_column(u'repositorybuildtrigger', 'disabled_reason_id')
|
||||
op.drop_table('disablereason')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.execute(tables
|
||||
.logentrykind
|
||||
.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('toggle_repo_trigger')))
|
|
@ -0,0 +1,26 @@
|
|||
"""Change manifest_bytes to a UTF8 text field
|
||||
|
||||
Revision ID: 654e6df88b71
|
||||
Revises: eafdeadcebc7
|
||||
Create Date: 2018-08-15 09:58:46.109277
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '654e6df88b71'
|
||||
down_revision = 'eafdeadcebc7'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
from util.migrate import UTF8LongText
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column('manifest', 'manifest_bytes', existing_type=sa.Text(), type_=UTF8LongText())
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column('manifest', 'manifest_bytes', existing_type=UTF8LongText(), type_=sa.Text())
|
|
@ -0,0 +1,47 @@
|
|||
"""Add TagToRepositoryTag table
|
||||
|
||||
Revision ID: 67f0abd172ae
|
||||
Revises: 10f45ee2310b
|
||||
Create Date: 2018-10-30 11:31:06.615488
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '67f0abd172ae'
|
||||
down_revision = '10f45ee2310b'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tagtorepositorytag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('tag_id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_tag_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tagtorepositorytag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['repository_tag_id'], ['repositorytag.id'], name=op.f('fk_tagtorepositorytag_repository_tag_id_repositorytag')),
|
||||
sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], name=op.f('fk_tagtorepositorytag_tag_id_tag')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagtorepositorytag'))
|
||||
)
|
||||
op.create_index('tagtorepositorytag_repository_id', 'tagtorepositorytag', ['repository_id'], unique=False)
|
||||
op.create_index('tagtorepositorytag_repository_tag_id', 'tagtorepositorytag', ['repository_tag_id'], unique=True)
|
||||
op.create_index('tagtorepositorytag_tag_id', 'tagtorepositorytag', ['tag_id'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
tester.populate_table('tagtorepositorytag', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('tag_id', tester.TestDataType.Foreign('tag')),
|
||||
('repository_tag_id', tester.TestDataType.Foreign('repositorytag')),
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tagtorepositorytag')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,36 @@
|
|||
"""Change LogEntry to use a BigInteger as its primary key
|
||||
|
||||
Revision ID: 6c21e2cfb8b6
|
||||
Revises: d17c695859ea
|
||||
Create Date: 2018-07-27 16:30:02.877346
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6c21e2cfb8b6'
|
||||
down_revision = 'd17c695859ea'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column(
|
||||
table_name='logentry',
|
||||
column_name='id',
|
||||
nullable=False,
|
||||
autoincrement=True,
|
||||
type_=sa.BigInteger(),
|
||||
)
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column(
|
||||
table_name='logentry',
|
||||
column_name='id',
|
||||
nullable=False,
|
||||
autoincrement=True,
|
||||
type_=sa.Integer(),
|
||||
)
|
|
@ -0,0 +1,56 @@
|
|||
"""Add user prompt support
|
||||
|
||||
Revision ID: 6c7014e84a5e
|
||||
Revises: c156deb8845d
|
||||
Create Date: 2016-10-31 16:26:31.447705
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6c7014e84a5e'
|
||||
down_revision = 'c156deb8845d'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('userpromptkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_userpromptkind'))
|
||||
)
|
||||
op.create_index('userpromptkind_name', 'userpromptkind', ['name'], unique=False)
|
||||
op.create_table('userprompt',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['kind_id'], ['userpromptkind.id'], name=op.f('fk_userprompt_kind_id_userpromptkind')),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_userprompt_user_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_userprompt'))
|
||||
)
|
||||
op.create_index('userprompt_kind_id', 'userprompt', ['kind_id'], unique=False)
|
||||
op.create_index('userprompt_user_id', 'userprompt', ['user_id'], unique=False)
|
||||
op.create_index('userprompt_user_id_kind_id', 'userprompt', ['user_id', 'kind_id'], unique=True)
|
||||
### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(tables.userpromptkind,
|
||||
[
|
||||
{'name':'confirm_username'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('userprompt', [
|
||||
('user_id', tester.TestDataType.Foreign('user')),
|
||||
('kind_id', tester.TestDataType.Foreign('userpromptkind')),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('userprompt')
|
||||
op.drop_table('userpromptkind')
|
||||
### end Alembic commands ###
|
43
data/migrations/versions/6ec8726c0ace_add_logentry3_table.py
Normal file
43
data/migrations/versions/6ec8726c0ace_add_logentry3_table.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
"""Add LogEntry3 table
|
||||
|
||||
Revision ID: 6ec8726c0ace
|
||||
Revises: 54492a68a3cf
|
||||
Create Date: 2019-01-03 13:41:02.897957
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6ec8726c0ace'
|
||||
down_revision = '54492a68a3cf'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('logentry3',
|
||||
sa.Column('id', sa.BigInteger(), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('performer_id', sa.Integer(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=True),
|
||||
sa.Column('datetime', sa.DateTime(), nullable=False),
|
||||
sa.Column('ip', sa.String(length=255), nullable=True),
|
||||
sa.Column('metadata_json', sa.Text(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_logentry3'))
|
||||
)
|
||||
op.create_index('logentry3_account_id_datetime', 'logentry3', ['account_id', 'datetime'], unique=False)
|
||||
op.create_index('logentry3_datetime', 'logentry3', ['datetime'], unique=False)
|
||||
op.create_index('logentry3_performer_id_datetime', 'logentry3', ['performer_id', 'datetime'], unique=False)
|
||||
op.create_index('logentry3_repository_id_datetime_kind_id', 'logentry3', ['repository_id', 'datetime', 'kind_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('logentry3')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,289 @@
|
|||
"""Backfill new encrypted fields
|
||||
|
||||
Revision ID: 703298a825c2
|
||||
Revises: c13c8052f7a6
|
||||
Create Date: 2019-08-19 16:07:48.109889
|
||||
|
||||
"""
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '703298a825c2'
|
||||
down_revision = 'c13c8052f7a6'
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from peewee import (JOIN, IntegrityError, DateTimeField, CharField, ForeignKeyField,
|
||||
BooleanField, TextField, IntegerField)
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from data.database import (BaseModel, User, Repository, AccessTokenKind, Role,
|
||||
random_string_generator, QuayUserField, BuildTriggerService,
|
||||
uuid_generator, DisableReason)
|
||||
from data.fields import Credential, DecryptedValue, EncryptedCharField, EncryptedTextField, EnumField, CredentialField
|
||||
from data.model.token import ACCESS_TOKEN_NAME_PREFIX_LENGTH
|
||||
from data.model.appspecifictoken import TOKEN_NAME_PREFIX_LENGTH as AST_TOKEN_NAME_PREFIX_LENGTH
|
||||
from data.model.oauth import ACCESS_TOKEN_PREFIX_LENGTH as OAUTH_ACCESS_TOKEN_PREFIX_LENGTH
|
||||
from data.model.oauth import AUTHORIZATION_CODE_PREFIX_LENGTH
|
||||
|
||||
BATCH_SIZE = 10
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _iterate(model_class, clause):
|
||||
while True:
|
||||
has_rows = False
|
||||
for row in list(model_class.select().where(clause).limit(BATCH_SIZE)):
|
||||
has_rows = True
|
||||
yield row
|
||||
|
||||
if not has_rows:
|
||||
break
|
||||
|
||||
|
||||
def _decrypted(value):
|
||||
if value is None:
|
||||
return None
|
||||
|
||||
assert isinstance(value, basestring)
|
||||
return DecryptedValue(value)
|
||||
|
||||
|
||||
# NOTE: As per standard migrations involving Peewee models, we copy them here, as they will change
|
||||
# after this call.
|
||||
class AccessToken(BaseModel):
|
||||
code = CharField(default=random_string_generator(length=64), unique=True, index=True)
|
||||
token_name = CharField(default=random_string_generator(length=32), unique=True, index=True)
|
||||
token_code = EncryptedCharField(default_token_length=32)
|
||||
|
||||
class RobotAccountToken(BaseModel):
|
||||
robot_account = QuayUserField(index=True, allows_robots=True, unique=True)
|
||||
token = EncryptedCharField(default_token_length=64)
|
||||
fully_migrated = BooleanField(default=False)
|
||||
|
||||
class RepositoryBuildTrigger(BaseModel):
|
||||
uuid = CharField(default=uuid_generator, index=True)
|
||||
auth_token = CharField(null=True)
|
||||
private_key = TextField(null=True)
|
||||
|
||||
secure_auth_token = EncryptedCharField(null=True)
|
||||
secure_private_key = EncryptedTextField(null=True)
|
||||
fully_migrated = BooleanField(default=False)
|
||||
|
||||
class AppSpecificAuthToken(BaseModel):
|
||||
token_name = CharField(index=True, unique=True, default=random_string_generator(60))
|
||||
token_secret = EncryptedCharField(default_token_length=60)
|
||||
token_code = CharField(default=random_string_generator(length=120), unique=True, index=True)
|
||||
|
||||
class OAuthAccessToken(BaseModel):
|
||||
token_name = CharField(index=True, unique=True)
|
||||
token_code = CredentialField()
|
||||
access_token = CharField(index=True)
|
||||
|
||||
class OAuthAuthorizationCode(BaseModel):
|
||||
code = CharField(index=True, unique=True, null=True)
|
||||
code_name = CharField(index=True, unique=True)
|
||||
code_credential = CredentialField()
|
||||
|
||||
class OAuthApplication(BaseModel):
|
||||
secure_client_secret = EncryptedCharField(default_token_length=40, null=True)
|
||||
fully_migrated = BooleanField(default=False)
|
||||
client_secret = CharField(default=random_string_generator(length=40))
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
# Empty all access token names to fix the bug where we put the wrong name and code
|
||||
# in for some tokens.
|
||||
AccessToken.update(token_name=None).where(AccessToken.token_name >> None).execute()
|
||||
|
||||
# AccessToken.
|
||||
logger.info('Backfilling encrypted credentials for access tokens')
|
||||
for access_token in _iterate(AccessToken, ((AccessToken.token_name >> None) |
|
||||
(AccessToken.token_name == ''))):
|
||||
logger.info('Backfilling encrypted credentials for access token %s', access_token.id)
|
||||
assert access_token.code is not None
|
||||
assert access_token.code[:ACCESS_TOKEN_NAME_PREFIX_LENGTH]
|
||||
assert access_token.code[ACCESS_TOKEN_NAME_PREFIX_LENGTH:]
|
||||
|
||||
token_name = access_token.code[:ACCESS_TOKEN_NAME_PREFIX_LENGTH]
|
||||
token_code = _decrypted(access_token.code[ACCESS_TOKEN_NAME_PREFIX_LENGTH:])
|
||||
|
||||
(AccessToken
|
||||
.update(token_name=token_name, token_code=token_code)
|
||||
.where(AccessToken.id == access_token.id, AccessToken.code == access_token.code)
|
||||
.execute())
|
||||
|
||||
assert AccessToken.select().where(AccessToken.token_name >> None).count() == 0
|
||||
|
||||
# Robots.
|
||||
logger.info('Backfilling encrypted credentials for robots')
|
||||
while True:
|
||||
has_row = False
|
||||
query = (User
|
||||
.select()
|
||||
.join(RobotAccountToken, JOIN.LEFT_OUTER)
|
||||
.where(User.robot == True, RobotAccountToken.id >> None)
|
||||
.limit(BATCH_SIZE))
|
||||
|
||||
for robot_user in query:
|
||||
logger.info('Backfilling encrypted credentials for robot %s', robot_user.id)
|
||||
has_row = True
|
||||
try:
|
||||
RobotAccountToken.create(robot_account=robot_user,
|
||||
token=_decrypted(robot_user.email),
|
||||
fully_migrated=False)
|
||||
except IntegrityError:
|
||||
break
|
||||
|
||||
if not has_row:
|
||||
break
|
||||
|
||||
# RepositoryBuildTrigger
|
||||
logger.info('Backfilling encrypted credentials for repo build triggers')
|
||||
for repo_build_trigger in _iterate(RepositoryBuildTrigger,
|
||||
(RepositoryBuildTrigger.fully_migrated == False)):
|
||||
logger.info('Backfilling encrypted credentials for repo build trigger %s',
|
||||
repo_build_trigger.id)
|
||||
|
||||
(RepositoryBuildTrigger
|
||||
.update(secure_auth_token=_decrypted(repo_build_trigger.auth_token),
|
||||
secure_private_key=_decrypted(repo_build_trigger.private_key),
|
||||
fully_migrated=True)
|
||||
.where(RepositoryBuildTrigger.id == repo_build_trigger.id,
|
||||
RepositoryBuildTrigger.uuid == repo_build_trigger.uuid)
|
||||
.execute())
|
||||
|
||||
assert (RepositoryBuildTrigger
|
||||
.select()
|
||||
.where(RepositoryBuildTrigger.fully_migrated == False)
|
||||
.count()) == 0
|
||||
|
||||
# AppSpecificAuthToken
|
||||
logger.info('Backfilling encrypted credentials for app specific auth tokens')
|
||||
for token in _iterate(AppSpecificAuthToken, ((AppSpecificAuthToken.token_name >> None) |
|
||||
(AppSpecificAuthToken.token_name == '') |
|
||||
(AppSpecificAuthToken.token_secret >> None))):
|
||||
logger.info('Backfilling encrypted credentials for app specific auth %s',
|
||||
token.id)
|
||||
assert token.token_code[AST_TOKEN_NAME_PREFIX_LENGTH:]
|
||||
|
||||
token_name = token.token_code[:AST_TOKEN_NAME_PREFIX_LENGTH]
|
||||
token_secret = _decrypted(token.token_code[AST_TOKEN_NAME_PREFIX_LENGTH:])
|
||||
assert token_name
|
||||
assert token_secret
|
||||
|
||||
(AppSpecificAuthToken
|
||||
.update(token_name=token_name,
|
||||
token_secret=token_secret)
|
||||
.where(AppSpecificAuthToken.id == token.id,
|
||||
AppSpecificAuthToken.token_code == token.token_code)
|
||||
.execute())
|
||||
|
||||
assert (AppSpecificAuthToken
|
||||
.select()
|
||||
.where(AppSpecificAuthToken.token_name >> None)
|
||||
.count()) == 0
|
||||
|
||||
# OAuthAccessToken
|
||||
logger.info('Backfilling credentials for OAuth access tokens')
|
||||
for token in _iterate(OAuthAccessToken, ((OAuthAccessToken.token_name >> None) |
|
||||
(OAuthAccessToken.token_name == ''))):
|
||||
logger.info('Backfilling credentials for OAuth access token %s', token.id)
|
||||
token_name = token.access_token[:OAUTH_ACCESS_TOKEN_PREFIX_LENGTH]
|
||||
token_code = Credential.from_string(token.access_token[OAUTH_ACCESS_TOKEN_PREFIX_LENGTH:])
|
||||
assert token_name
|
||||
assert token.access_token[OAUTH_ACCESS_TOKEN_PREFIX_LENGTH:]
|
||||
|
||||
(OAuthAccessToken
|
||||
.update(token_name=token_name,
|
||||
token_code=token_code)
|
||||
.where(OAuthAccessToken.id == token.id,
|
||||
OAuthAccessToken.access_token == token.access_token)
|
||||
.execute())
|
||||
|
||||
assert (OAuthAccessToken
|
||||
.select()
|
||||
.where(OAuthAccessToken.token_name >> None)
|
||||
.count()) == 0
|
||||
|
||||
# OAuthAuthorizationCode
|
||||
logger.info('Backfilling credentials for OAuth auth code')
|
||||
for code in _iterate(OAuthAuthorizationCode, ((OAuthAuthorizationCode.code_name >> None) |
|
||||
(OAuthAuthorizationCode.code_name == ''))):
|
||||
logger.info('Backfilling credentials for OAuth auth code %s', code.id)
|
||||
user_code = code.code or random_string_generator(AUTHORIZATION_CODE_PREFIX_LENGTH * 2)()
|
||||
code_name = user_code[:AUTHORIZATION_CODE_PREFIX_LENGTH]
|
||||
code_credential = Credential.from_string(user_code[AUTHORIZATION_CODE_PREFIX_LENGTH:])
|
||||
assert code_name
|
||||
assert user_code[AUTHORIZATION_CODE_PREFIX_LENGTH:]
|
||||
|
||||
(OAuthAuthorizationCode
|
||||
.update(code_name=code_name, code_credential=code_credential)
|
||||
.where(OAuthAuthorizationCode.id == code.id)
|
||||
.execute())
|
||||
|
||||
assert (OAuthAuthorizationCode
|
||||
.select()
|
||||
.where(OAuthAuthorizationCode.code_name >> None)
|
||||
.count()) == 0
|
||||
|
||||
# OAuthApplication
|
||||
logger.info('Backfilling secret for OAuth applications')
|
||||
for app in _iterate(OAuthApplication, OAuthApplication.fully_migrated == False):
|
||||
logger.info('Backfilling secret for OAuth application %s', app.id)
|
||||
client_secret = app.client_secret or str(uuid.uuid4())
|
||||
secure_client_secret = _decrypted(client_secret)
|
||||
|
||||
(OAuthApplication
|
||||
.update(secure_client_secret=secure_client_secret, fully_migrated=True)
|
||||
.where(OAuthApplication.id == app.id, OAuthApplication.fully_migrated == False)
|
||||
.execute())
|
||||
|
||||
assert (OAuthApplication
|
||||
.select()
|
||||
.where(OAuthApplication.fully_migrated == False)
|
||||
.count()) == 0
|
||||
|
||||
# Adjust existing fields to be nullable.
|
||||
op.alter_column('accesstoken', 'code', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthaccesstoken', 'access_token', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthauthorizationcode', 'code', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('appspecificauthtoken', 'token_code', nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
# Adjust new fields to be non-nullable.
|
||||
op.alter_column('accesstoken', 'token_name', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('accesstoken', 'token_code', nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('appspecificauthtoken', 'token_name', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('appspecificauthtoken', 'token_secret', nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('oauthaccesstoken', 'token_name', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthaccesstoken', 'token_code', nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('oauthauthorizationcode', 'code_name', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthauthorizationcode', 'code_credential', nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column('accesstoken', 'code', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthaccesstoken', 'access_token', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthauthorizationcode', 'code', nullable=False, existing_type=sa.String(length=255))
|
||||
op.alter_column('appspecificauthtoken', 'token_code', nullable=False, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('accesstoken', 'token_name', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('accesstoken', 'token_code', nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('appspecificauthtoken', 'token_name', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('appspecificauthtoken', 'token_secret', nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('oauthaccesstoken', 'token_name', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthaccesstoken', 'token_code', nullable=True, existing_type=sa.String(length=255))
|
||||
|
||||
op.alter_column('oauthauthorizationcode', 'code_name', nullable=True, existing_type=sa.String(length=255))
|
||||
op.alter_column('oauthauthorizationcode', 'code_credential', nullable=True, existing_type=sa.String(length=255))
|
|
@ -0,0 +1,74 @@
|
|||
"""Add support for app specific tokens
|
||||
|
||||
Revision ID: 7367229b38d9
|
||||
Revises: d8989249f8f6
|
||||
Create Date: 2017-12-12 13:15:42.419764
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7367229b38d9'
|
||||
down_revision = 'd8989249f8f6'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from util.migrate import UTF8CharField
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('appspecificauthtoken',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=36), nullable=False),
|
||||
sa.Column('title', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('token_code', sa.String(length=255), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.Column('expiration', sa.DateTime(), nullable=True),
|
||||
sa.Column('last_accessed', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_appspecificauthtoken_user_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_appspecificauthtoken'))
|
||||
)
|
||||
op.create_index('appspecificauthtoken_token_code', 'appspecificauthtoken', ['token_code'], unique=True)
|
||||
op.create_index('appspecificauthtoken_user_id', 'appspecificauthtoken', ['user_id'], unique=False)
|
||||
op.create_index('appspecificauthtoken_user_id_expiration', 'appspecificauthtoken', ['user_id', 'expiration'], unique=False)
|
||||
op.create_index('appspecificauthtoken_uuid', 'appspecificauthtoken', ['uuid'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(tables.logentrykind, [
|
||||
{'name': 'create_app_specific_token'},
|
||||
{'name': 'revoke_app_specific_token'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('appspecificauthtoken', [
|
||||
('user_id', tester.TestDataType.Foreign('user')),
|
||||
('uuid', tester.TestDataType.UUID),
|
||||
('title', tester.TestDataType.UTF8Char),
|
||||
('token_code', tester.TestDataType.String),
|
||||
('created', tester.TestDataType.DateTime),
|
||||
('expiration', tester.TestDataType.DateTime),
|
||||
('last_accessed', tester.TestDataType.DateTime),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('appspecificauthtoken')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
op.execute(tables
|
||||
.logentrykind
|
||||
.delete()
|
||||
.where(tables.
|
||||
logentrykind.name == op.inline_literal('create_app_specific_token')))
|
||||
|
||||
op.execute(tables
|
||||
.logentrykind
|
||||
.delete()
|
||||
.where(tables.
|
||||
logentrykind.name == op.inline_literal('revoke_app_specific_token')))
|
340
data/migrations/versions/7a525c68eb13_add_oci_app_models.py
Normal file
340
data/migrations/versions/7a525c68eb13_add_oci_app_models.py
Normal file
|
@ -0,0 +1,340 @@
|
|||
"""Add OCI/App models
|
||||
|
||||
Revision ID: 7a525c68eb13
|
||||
Revises: e2894a3a3c19
|
||||
Create Date: 2017-01-24 16:25:52.170277
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7a525c68eb13'
|
||||
down_revision = 'e2894a3a3c19'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from sqlalchemy.sql import table, column
|
||||
from util.migrate import UTF8LongText, UTF8CharField
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.create_table(
|
||||
'tagkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagkind'))
|
||||
)
|
||||
op.create_index('tagkind_name', 'tagkind', ['name'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'blobplacementlocation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobplacementlocation'))
|
||||
)
|
||||
op.create_index('blobplacementlocation_name', 'blobplacementlocation', ['name'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'blob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('size', sa.BigInteger(), nullable=False),
|
||||
sa.Column('uncompressed_size', sa.BigInteger(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_blob_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blob'))
|
||||
)
|
||||
op.create_index('blob_digest', 'blob', ['digest'], unique=True)
|
||||
op.create_index('blob_media_type_id', 'blob', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'blobplacementlocationpreference',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['blobplacementlocation.id'], name=op.f('fk_blobplacementlocpref_locid_blobplacementlocation')),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], name=op.f('fk_blobplacementlocationpreference_user_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobplacementlocationpreference'))
|
||||
)
|
||||
op.create_index('blobplacementlocationpreference_location_id', 'blobplacementlocationpreference', ['location_id'], unique=False)
|
||||
op.create_index('blobplacementlocationpreference_user_id', 'blobplacementlocationpreference', ['user_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_json', UTF8LongText, nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifest_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifest'))
|
||||
)
|
||||
op.create_index('manifest_digest', 'manifest', ['digest'], unique=True)
|
||||
op.create_index('manifest_media_type_id', 'manifest', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlist',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('manifest_list_json', UTF8LongText, nullable=False),
|
||||
sa.Column('schema_version', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifestlist_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlist'))
|
||||
)
|
||||
op.create_index('manifestlist_digest', 'manifestlist', ['digest'], unique=True)
|
||||
op.create_index('manifestlist_media_type_id', 'manifestlist', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'bittorrentpieces',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('pieces', UTF8LongText, nullable=False),
|
||||
sa.Column('piece_length', sa.BigInteger(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_bittorrentpieces_blob_id_blob')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_bittorrentpieces'))
|
||||
)
|
||||
op.create_index('bittorrentpieces_blob_id', 'bittorrentpieces', ['blob_id'], unique=False)
|
||||
op.create_index('bittorrentpieces_blob_id_piece_length', 'bittorrentpieces', ['blob_id', 'piece_length'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'blobplacement',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_blobplacement_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['blobplacementlocation.id'], name=op.f('fk_blobplacement_location_id_blobplacementlocation')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobplacement'))
|
||||
)
|
||||
op.create_index('blobplacement_blob_id', 'blobplacement', ['blob_id'], unique=False)
|
||||
op.create_index('blobplacement_blob_id_location_id', 'blobplacement', ['blob_id', 'location_id'], unique=True)
|
||||
op.create_index('blobplacement_location_id', 'blobplacement', ['location_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'blobuploading',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.Column('byte_count', sa.BigInteger(), nullable=False),
|
||||
sa.Column('uncompressed_byte_count', sa.BigInteger(), nullable=True),
|
||||
sa.Column('chunk_count', sa.BigInteger(), nullable=False),
|
||||
sa.Column('storage_metadata', UTF8LongText, nullable=True),
|
||||
sa.Column('sha_state', UTF8LongText, nullable=True),
|
||||
sa.Column('piece_sha_state', UTF8LongText, nullable=True),
|
||||
sa.Column('piece_hashes', UTF8LongText, nullable=True),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['blobplacementlocation.id'], name=op.f('fk_blobuploading_location_id_blobplacementlocation')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_blobuploading_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobuploading'))
|
||||
)
|
||||
op.create_index('blobuploading_created', 'blobuploading', ['created'], unique=False)
|
||||
op.create_index('blobuploading_location_id', 'blobuploading', ['location_id'], unique=False)
|
||||
op.create_index('blobuploading_repository_id', 'blobuploading', ['repository_id'], unique=False)
|
||||
op.create_index('blobuploading_repository_id_uuid', 'blobuploading', ['repository_id', 'uuid'], unique=True)
|
||||
op.create_index('blobuploading_uuid', 'blobuploading', ['uuid'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'derivedimage',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('source_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('derived_manifest_json', UTF8LongText, nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('uniqueness_hash', sa.String(length=255), nullable=False),
|
||||
sa.Column('signature_blob_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_derivedimage_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_derivedimage_media_type_id_mediatype')),
|
||||
sa.ForeignKeyConstraint(['signature_blob_id'], ['blob.id'], name=op.f('fk_derivedimage_signature_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['source_manifest_id'], ['manifest.id'], name=op.f('fk_derivedimage_source_manifest_id_manifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedimage'))
|
||||
)
|
||||
op.create_index('derivedimage_blob_id', 'derivedimage', ['blob_id'], unique=False)
|
||||
op.create_index('derivedimage_media_type_id', 'derivedimage', ['media_type_id'], unique=False)
|
||||
op.create_index('derivedimage_signature_blob_id', 'derivedimage', ['signature_blob_id'], unique=False)
|
||||
op.create_index('derivedimage_source_manifest_id', 'derivedimage', ['source_manifest_id'], unique=False)
|
||||
op.create_index('derivedimage_source_manifest_id_blob_id', 'derivedimage', ['source_manifest_id', 'blob_id'], unique=True)
|
||||
op.create_index('derivedimage_source_manifest_id_media_type_id_uniqueness_hash', 'derivedimage', ['source_manifest_id', 'media_type_id', 'uniqueness_hash'], unique=True)
|
||||
op.create_index('derivedimage_uniqueness_hash', 'derivedimage', ['uniqueness_hash'], unique=True)
|
||||
op.create_index('derivedimage_uuid', 'derivedimage', ['uuid'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'manifestblob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_manifestblob_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestblob_manifest_id_manifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestblob'))
|
||||
)
|
||||
op.create_index('manifestblob_blob_id', 'manifestblob', ['blob_id'], unique=False)
|
||||
op.create_index('manifestblob_manifest_id', 'manifestblob', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestblob_manifest_id_blob_id', 'manifestblob', ['manifest_id', 'blob_id'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'manifestlabel',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('annotated_id', sa.Integer(), nullable=False),
|
||||
sa.Column('label_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['annotated_id'], ['manifest.id'], name=op.f('fk_manifestlabel_annotated_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_manifestlabel_label_id_label')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestlabel_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlabel'))
|
||||
)
|
||||
op.create_index('manifestlabel_annotated_id', 'manifestlabel', ['annotated_id'], unique=False)
|
||||
op.create_index('manifestlabel_label_id', 'manifestlabel', ['label_id'], unique=False)
|
||||
op.create_index('manifestlabel_repository_id', 'manifestlabel', ['repository_id'], unique=False)
|
||||
op.create_index('manifestlabel_repository_id_annotated_id_label_id', 'manifestlabel', ['repository_id', 'annotated_id', 'label_id'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'manifestlayer',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_index', sa.BigInteger(), nullable=False),
|
||||
sa.Column('metadata_json', UTF8LongText, nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_manifestlayer_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestlayer_manifest_id_manifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlayer'))
|
||||
)
|
||||
op.create_index('manifestlayer_blob_id', 'manifestlayer', ['blob_id'], unique=False)
|
||||
op.create_index('manifestlayer_manifest_id', 'manifestlayer', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestlayer_manifest_id_manifest_index', 'manifestlayer', ['manifest_id', 'manifest_index'], unique=True)
|
||||
op.create_index('manifestlayer_manifest_index', 'manifestlayer', ['manifest_index'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlistmanifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_list_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('operating_system', UTF8CharField(length=255), nullable=True),
|
||||
sa.Column('architecture', UTF8CharField(length=255), nullable=True),
|
||||
sa.Column('platform_json', UTF8LongText, nullable=True),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestlistmanifest_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['manifest_list_id'], ['manifestlist.id'], name=op.f('fk_manifestlistmanifest_manifest_list_id_manifestlist')),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifestlistmanifest_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlistmanifest'))
|
||||
)
|
||||
op.create_index('manifestlistmanifest_manifest_id', 'manifestlistmanifest', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_manifest_list_id', 'manifestlistmanifest', ['manifest_list_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_manifest_listid_os_arch_mtid', 'manifestlistmanifest', ['manifest_list_id', 'operating_system', 'architecture', 'media_type_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_manifest_listid_mtid', 'manifestlistmanifest', ['manifest_list_id', 'media_type_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_media_type_id', 'manifestlistmanifest', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'tag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', UTF8CharField(length=190), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_list_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lifetime_start', sa.BigInteger(), nullable=False),
|
||||
sa.Column('lifetime_end', sa.BigInteger(), nullable=True),
|
||||
sa.Column('hidden', sa.Boolean(), nullable=False),
|
||||
sa.Column('reverted', sa.Boolean(), nullable=False),
|
||||
sa.Column('protected', sa.Boolean(), nullable=False),
|
||||
sa.Column('tag_kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('linked_tag_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['linked_tag_id'], ['tag.id'], name=op.f('fk_tag_linked_tag_id_tag')),
|
||||
sa.ForeignKeyConstraint(['manifest_list_id'], ['manifestlist.id'], name=op.f('fk_tag_manifest_list_id_manifestlist')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['tag_kind_id'], ['tagkind.id'], name=op.f('fk_tag_tag_kind_id_tagkind')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tag'))
|
||||
)
|
||||
op.create_index('tag_lifetime_end', 'tag', ['lifetime_end'], unique=False)
|
||||
op.create_index('tag_linked_tag_id', 'tag', ['linked_tag_id'], unique=False)
|
||||
op.create_index('tag_manifest_list_id', 'tag', ['manifest_list_id'], unique=False)
|
||||
op.create_index('tag_repository_id', 'tag', ['repository_id'], unique=False)
|
||||
op.create_index('tag_repository_id_name_hidden', 'tag', ['repository_id', 'name', 'hidden'], unique=False)
|
||||
op.create_index('tag_repository_id_name_lifetime_end', 'tag', ['repository_id', 'name', 'lifetime_end'], unique=True)
|
||||
op.create_index('tag_repository_id_name', 'tag', ['repository_id', 'name'], unique=False)
|
||||
op.create_index('tag_tag_kind_id', 'tag', ['tag_kind_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlayerdockerv1',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_layer_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_id', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('checksum', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('compat_json', UTF8LongText, nullable=False),
|
||||
sa.ForeignKeyConstraint(['manifest_layer_id'], ['manifestlayer.id'], name=op.f('fk_manifestlayerdockerv1_manifest_layer_id_manifestlayer')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlayerdockerv1'))
|
||||
)
|
||||
op.create_index('manifestlayerdockerv1_image_id', 'manifestlayerdockerv1', ['image_id'], unique=False)
|
||||
op.create_index('manifestlayerdockerv1_manifest_layer_id', 'manifestlayerdockerv1', ['manifest_layer_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlayerscan',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('layer_id', sa.Integer(), nullable=False),
|
||||
sa.Column('scannable', sa.Boolean(), nullable=False),
|
||||
sa.Column('scanned_by', UTF8CharField(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['layer_id'], ['manifestlayer.id'], name=op.f('fk_manifestlayerscan_layer_id_manifestlayer')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlayerscan'))
|
||||
)
|
||||
op.create_index('manifestlayerscan_layer_id', 'manifestlayerscan', ['layer_id'], unique=True)
|
||||
|
||||
blobplacementlocation_table = table('blobplacementlocation',
|
||||
column('id', sa.Integer()),
|
||||
column('name', sa.String()),
|
||||
)
|
||||
|
||||
op.bulk_insert(
|
||||
blobplacementlocation_table,
|
||||
[
|
||||
{'name': 'local_eu'},
|
||||
{'name': 'local_us'},
|
||||
],
|
||||
)
|
||||
|
||||
op.bulk_insert(
|
||||
tables.mediatype,
|
||||
[
|
||||
{'name': 'application/vnd.cnr.blob.v0.tar+gzip'},
|
||||
{'name': 'application/vnd.cnr.package-manifest.helm.v0.json'},
|
||||
{'name': 'application/vnd.cnr.package-manifest.kpm.v0.json'},
|
||||
{'name': 'application/vnd.cnr.package-manifest.docker-compose.v0.json'},
|
||||
{'name': 'application/vnd.cnr.package.kpm.v0.tar+gzip'},
|
||||
{'name': 'application/vnd.cnr.package.helm.v0.tar+gzip'},
|
||||
{'name': 'application/vnd.cnr.package.docker-compose.v0.tar+gzip'},
|
||||
{'name': 'application/vnd.cnr.manifests.v0.json'},
|
||||
{'name': 'application/vnd.cnr.manifest.list.v0.json'},
|
||||
],
|
||||
)
|
||||
|
||||
tagkind_table = table('tagkind',
|
||||
column('id', sa.Integer()),
|
||||
column('name', sa.String()),
|
||||
)
|
||||
|
||||
op.bulk_insert(
|
||||
tagkind_table,
|
||||
[
|
||||
{'id': 1, 'name': 'tag'},
|
||||
{'id': 2, 'name': 'release'},
|
||||
{'id': 3, 'name': 'channel'},
|
||||
]
|
||||
)
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.drop_table('manifestlayerscan')
|
||||
op.drop_table('manifestlayerdockerv1')
|
||||
op.drop_table('tag')
|
||||
op.drop_table('manifestlistmanifest')
|
||||
op.drop_table('manifestlayer')
|
||||
op.drop_table('manifestlabel')
|
||||
op.drop_table('manifestblob')
|
||||
op.drop_table('derivedimage')
|
||||
op.drop_table('blobuploading')
|
||||
op.drop_table('blobplacement')
|
||||
op.drop_table('bittorrentpieces')
|
||||
op.drop_table('manifestlist')
|
||||
op.drop_table('manifest')
|
||||
op.drop_table('blobplacementlocationpreference')
|
||||
op.drop_table('blob')
|
||||
op.drop_table('tagkind')
|
||||
op.drop_table('blobplacementlocation')
|
|
@ -0,0 +1,35 @@
|
|||
"""Add disabled datetime to trigger
|
||||
|
||||
Revision ID: 87fbbc224f10
|
||||
Revises: 17aff2e1354e
|
||||
Create Date: 2017-10-24 14:06:37.658705
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '87fbbc224f10'
|
||||
down_revision = '17aff2e1354e'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('repositorybuildtrigger', sa.Column('disabled_datetime', sa.DateTime(), nullable=True))
|
||||
op.create_index('repositorybuildtrigger_disabled_datetime', 'repositorybuildtrigger', ['disabled_datetime'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('repositorybuildtrigger', 'disabled_datetime', tester.TestDataType.DateTime)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('repositorybuildtrigger_disabled_datetime', table_name='repositorybuildtrigger')
|
||||
op.drop_column('repositorybuildtrigger', 'disabled_datetime')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,180 @@
|
|||
"""Add V2_2 data models for Manifest, ManifestBlob and ManifestLegacyImage
|
||||
|
||||
Revision ID: 9093adccc784
|
||||
Revises: 6c21e2cfb8b6
|
||||
Create Date: 2018-08-06 16:07:50.222749
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '9093adccc784'
|
||||
down_revision = '6c21e2cfb8b6'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from image.docker.schema1 import DOCKER_SCHEMA1_CONTENT_TYPES
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('manifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_bytes', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifest_media_type_id_mediatype')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifest_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifest'))
|
||||
)
|
||||
op.create_index('manifest_digest', 'manifest', ['digest'], unique=False)
|
||||
op.create_index('manifest_media_type_id', 'manifest', ['media_type_id'], unique=False)
|
||||
op.create_index('manifest_repository_id', 'manifest', ['repository_id'], unique=False)
|
||||
op.create_index('manifest_repository_id_digest', 'manifest', ['repository_id', 'digest'], unique=True)
|
||||
op.create_index('manifest_repository_id_media_type_id', 'manifest', ['repository_id', 'media_type_id'], unique=False)
|
||||
op.create_table('manifestblob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_index', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['imagestorage.id'], name=op.f('fk_manifestblob_blob_id_imagestorage')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestblob_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestblob_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestblob'))
|
||||
)
|
||||
op.create_index('manifestblob_blob_id', 'manifestblob', ['blob_id'], unique=False)
|
||||
op.create_index('manifestblob_manifest_id', 'manifestblob', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestblob_manifest_id_blob_id', 'manifestblob', ['manifest_id', 'blob_id'], unique=True)
|
||||
op.create_index('manifestblob_manifest_id_blob_index', 'manifestblob', ['manifest_id', 'blob_index'], unique=True)
|
||||
op.create_index('manifestblob_repository_id', 'manifestblob', ['repository_id'], unique=False)
|
||||
op.create_table('manifestlabel',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('label_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_manifestlabel_label_id_label')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestlabel_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestlabel_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlabel'))
|
||||
)
|
||||
op.create_index('manifestlabel_label_id', 'manifestlabel', ['label_id'], unique=False)
|
||||
op.create_index('manifestlabel_manifest_id', 'manifestlabel', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestlabel_manifest_id_label_id', 'manifestlabel', ['manifest_id', 'label_id'], unique=True)
|
||||
op.create_index('manifestlabel_repository_id', 'manifestlabel', ['repository_id'], unique=False)
|
||||
op.create_table('manifestlegacyimage',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['image_id'], ['image.id'], name=op.f('fk_manifestlegacyimage_image_id_image')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestlegacyimage_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_manifestlegacyimage_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlegacyimage'))
|
||||
)
|
||||
op.create_index('manifestlegacyimage_image_id', 'manifestlegacyimage', ['image_id'], unique=False)
|
||||
op.create_index('manifestlegacyimage_manifest_id', 'manifestlegacyimage', ['manifest_id'], unique=True)
|
||||
op.create_index('manifestlegacyimage_repository_id', 'manifestlegacyimage', ['repository_id'], unique=False)
|
||||
op.create_table('tagmanifesttomanifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('tag_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('broken', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_tagmanifesttomanifest_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['tag_manifest_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifesttomanifest_tag_manifest_id_tagmanifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifesttomanifest'))
|
||||
)
|
||||
op.create_index('tagmanifesttomanifest_broken', 'tagmanifesttomanifest', ['broken'], unique=False)
|
||||
op.create_index('tagmanifesttomanifest_manifest_id', 'tagmanifesttomanifest', ['manifest_id'], unique=True)
|
||||
op.create_index('tagmanifesttomanifest_tag_manifest_id', 'tagmanifesttomanifest', ['tag_manifest_id'], unique=True)
|
||||
op.create_table('tagmanifestlabelmap',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('tag_manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=True),
|
||||
sa.Column('label_id', sa.Integer(), nullable=False),
|
||||
sa.Column('tag_manifest_label_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_label_id', sa.Integer(), nullable=True),
|
||||
sa.Column('broken_manifest', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()),
|
||||
sa.ForeignKeyConstraint(['label_id'], ['label.id'], name=op.f('fk_tagmanifestlabelmap_label_id_label')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_tagmanifestlabelmap_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['manifest_label_id'], ['manifestlabel.id'], name=op.f('fk_tagmanifestlabelmap_manifest_label_id_manifestlabel')),
|
||||
sa.ForeignKeyConstraint(['tag_manifest_id'], ['tagmanifest.id'], name=op.f('fk_tagmanifestlabelmap_tag_manifest_id_tagmanifest')),
|
||||
sa.ForeignKeyConstraint(['tag_manifest_label_id'], ['tagmanifestlabel.id'], name=op.f('fk_tagmanifestlabelmap_tag_manifest_label_id_tagmanifestlabel')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagmanifestlabelmap'))
|
||||
)
|
||||
op.create_index('tagmanifestlabelmap_broken_manifest', 'tagmanifestlabelmap', ['broken_manifest'], unique=False)
|
||||
op.create_index('tagmanifestlabelmap_label_id', 'tagmanifestlabelmap', ['label_id'], unique=False)
|
||||
op.create_index('tagmanifestlabelmap_manifest_id', 'tagmanifestlabelmap', ['manifest_id'], unique=False)
|
||||
op.create_index('tagmanifestlabelmap_manifest_label_id', 'tagmanifestlabelmap', ['manifest_label_id'], unique=False)
|
||||
op.create_index('tagmanifestlabelmap_tag_manifest_id', 'tagmanifestlabelmap', ['tag_manifest_id'], unique=False)
|
||||
op.create_index('tagmanifestlabelmap_tag_manifest_label_id', 'tagmanifestlabelmap', ['tag_manifest_label_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
for media_type in DOCKER_SCHEMA1_CONTENT_TYPES:
|
||||
op.bulk_insert(tables.mediatype,
|
||||
[
|
||||
{'name': media_type},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('manifest', [
|
||||
('digest', tester.TestDataType.String),
|
||||
('manifest_bytes', tester.TestDataType.JSON),
|
||||
('media_type_id', tester.TestDataType.Foreign('mediatype')),
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
])
|
||||
|
||||
tester.populate_table('manifestblob', [
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('blob_id', tester.TestDataType.Foreign('imagestorage')),
|
||||
('blob_index', tester.TestDataType.Integer),
|
||||
])
|
||||
|
||||
tester.populate_table('manifestlabel', [
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('label_id', tester.TestDataType.Foreign('label')),
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
])
|
||||
|
||||
tester.populate_table('manifestlegacyimage', [
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('image_id', tester.TestDataType.Foreign('image')),
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
])
|
||||
|
||||
tester.populate_table('tagmanifesttomanifest', [
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('tag_manifest_id', tester.TestDataType.Foreign('tagmanifest')),
|
||||
])
|
||||
|
||||
tester.populate_table('tagmanifestlabelmap', [
|
||||
('manifest_id', tester.TestDataType.Foreign('manifest')),
|
||||
('tag_manifest_id', tester.TestDataType.Foreign('tagmanifest')),
|
||||
('tag_manifest_label_id', tester.TestDataType.Foreign('tagmanifestlabel')),
|
||||
('manifest_label_id', tester.TestDataType.Foreign('manifestlabel')),
|
||||
('label_id', tester.TestDataType.Foreign('label')),
|
||||
])
|
||||
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
for media_type in DOCKER_SCHEMA1_CONTENT_TYPES:
|
||||
op.execute(tables
|
||||
.mediatype
|
||||
.delete()
|
||||
.where(tables.
|
||||
mediatype.c.name == op.inline_literal(media_type)))
|
||||
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tagmanifestlabelmap')
|
||||
op.drop_table('tagmanifesttomanifest')
|
||||
op.drop_table('manifestlegacyimage')
|
||||
op.drop_table('manifestlabel')
|
||||
op.drop_table('manifestblob')
|
||||
op.drop_table('manifest')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,31 @@
|
|||
"""Create new notification type
|
||||
|
||||
Revision ID: 94836b099894
|
||||
Revises: faf752bd2e0a
|
||||
Create Date: 2016-11-30 10:29:51.519278
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '94836b099894'
|
||||
down_revision = 'faf752bd2e0a'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.bulk_insert(tables.externalnotificationevent,
|
||||
[
|
||||
{'name': 'build_cancelled'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.execute(tables
|
||||
.externalnotificationevent
|
||||
.delete()
|
||||
.where(tables.
|
||||
externalnotificationevent.c.name == op.inline_literal('build_cancelled')))
|
|
@ -0,0 +1,101 @@
|
|||
"""back fill build expand_config
|
||||
|
||||
Revision ID: a6c463dfb9fe
|
||||
Revises: b4df55dea4b3
|
||||
Create Date: 2017-03-17 10:00:19.739858
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
import json
|
||||
import os
|
||||
|
||||
from app import app
|
||||
from peewee import *
|
||||
from data.database import BaseModel
|
||||
|
||||
revision = 'a6c463dfb9fe'
|
||||
down_revision = 'b4df55dea4b3'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
|
||||
class RepositoryBuildTrigger(BaseModel):
|
||||
config = TextField(default='{}')
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
if not app.config.get('SETUP_COMPLETE', False):
|
||||
return
|
||||
|
||||
repostioryBuildTriggers = RepositoryBuildTrigger.select()
|
||||
for repositoryBuildTrigger in repostioryBuildTriggers:
|
||||
config = json.loads(repositoryBuildTrigger.config)
|
||||
repositoryBuildTrigger.config = json.dumps(get_config_expand(config))
|
||||
repositoryBuildTrigger.save()
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
if not app.config.get('SETUP_COMPLETE', False):
|
||||
return
|
||||
|
||||
repostioryBuildTriggers = RepositoryBuildTrigger.select()
|
||||
for repositoryBuildTrigger in repostioryBuildTriggers:
|
||||
config = json.loads(repositoryBuildTrigger.config)
|
||||
repositoryBuildTrigger.config = json.dumps(get_config_expand(config))
|
||||
repositoryBuildTrigger.save()
|
||||
|
||||
|
||||
def create_context(current_subdir):
|
||||
if current_subdir == "":
|
||||
current_subdir = os.path.sep + current_subdir
|
||||
|
||||
if current_subdir[len(current_subdir) - 1] != os.path.sep:
|
||||
current_subdir += os.path.sep
|
||||
|
||||
context, _ = os.path.split(current_subdir)
|
||||
return context
|
||||
|
||||
|
||||
def create_dockerfile_path(current_subdir):
|
||||
if current_subdir == "":
|
||||
current_subdir = os.path.sep + current_subdir
|
||||
|
||||
if current_subdir[len(current_subdir) - 1] != os.path.sep:
|
||||
current_subdir += os.path.sep
|
||||
|
||||
return current_subdir + "Dockerfile"
|
||||
|
||||
|
||||
def get_config_expand(config):
|
||||
""" A function to transform old records into new records """
|
||||
if not config:
|
||||
return config
|
||||
|
||||
# skip records that have been updated
|
||||
if "context" in config or "dockerfile_path" in config:
|
||||
return config
|
||||
|
||||
config_expand = {}
|
||||
if "subdir" in config:
|
||||
config_expand = dict(config)
|
||||
config_expand["context"] = create_context(config["subdir"])
|
||||
config_expand["dockerfile_path"] = create_dockerfile_path(config["subdir"])
|
||||
|
||||
return config_expand
|
||||
|
||||
|
||||
def get_config_contract(config):
|
||||
""" A function to delete context and dockerfile_path from config """
|
||||
if not config:
|
||||
return config
|
||||
|
||||
if "context" in config:
|
||||
del config["context"]
|
||||
|
||||
if "dockerfile_path" in config:
|
||||
del config["dockerfile_path"]
|
||||
|
||||
return config
|
|
@ -0,0 +1,53 @@
|
|||
"""Add deleted namespace table
|
||||
|
||||
Revision ID: b4c2d45bc132
|
||||
Revises: 152edccba18c
|
||||
Create Date: 2018-02-27 11:43:02.329941
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b4c2d45bc132'
|
||||
down_revision = '152edccba18c'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('deletednamespace',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('namespace_id', sa.Integer(), nullable=False),
|
||||
sa.Column('marked', sa.DateTime(), nullable=False),
|
||||
sa.Column('original_username', sa.String(length=255), nullable=False),
|
||||
sa.Column('original_email', sa.String(length=255), nullable=False),
|
||||
sa.Column('queue_id', sa.String(length=255), nullable=True),
|
||||
sa.ForeignKeyConstraint(['namespace_id'], ['user.id'], name=op.f('fk_deletednamespace_namespace_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_deletednamespace'))
|
||||
)
|
||||
op.create_index('deletednamespace_namespace_id', 'deletednamespace', ['namespace_id'], unique=True)
|
||||
op.create_index('deletednamespace_original_email', 'deletednamespace', ['original_email'], unique=False)
|
||||
op.create_index('deletednamespace_original_username', 'deletednamespace', ['original_username'], unique=False)
|
||||
op.create_index('deletednamespace_queue_id', 'deletednamespace', ['queue_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('deletednamespace', [
|
||||
('namespace_id', tester.TestDataType.Foreign('user')),
|
||||
('marked', tester.TestDataType.DateTime),
|
||||
('original_username', tester.TestDataType.UTF8Char),
|
||||
('original_email', tester.TestDataType.String),
|
||||
('queue_id', tester.TestDataType.Foreign('queueitem')),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('deletednamespace')
|
||||
# ### end Alembic commands ###
|
51
data/migrations/versions/b4df55dea4b3_add_repository_kind.py
Normal file
51
data/migrations/versions/b4df55dea4b3_add_repository_kind.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""add repository kind
|
||||
|
||||
Revision ID: b4df55dea4b3
|
||||
Revises: 7a525c68eb13
|
||||
Create Date: 2017-03-19 12:59:41.484430
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b4df55dea4b3'
|
||||
down_revision = 'b8ae68ad3e52'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.create_table(
|
||||
'repositorykind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorykind'))
|
||||
)
|
||||
op.create_index('repositorykind_name', 'repositorykind', ['name'], unique=True)
|
||||
|
||||
op.bulk_insert(
|
||||
tables.repositorykind,
|
||||
[
|
||||
{'id': 1, 'name': 'image'},
|
||||
{'id': 2, 'name': 'application'},
|
||||
],
|
||||
)
|
||||
|
||||
op.add_column(u'repository', sa.Column('kind_id', sa.Integer(), nullable=False, server_default='1'))
|
||||
op.create_index('repository_kind_id', 'repository', ['kind_id'], unique=False)
|
||||
op.create_foreign_key(op.f('fk_repository_kind_id_repositorykind'), 'repository', 'repositorykind', ['kind_id'], ['id'])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('repository', 'kind_id', tester.TestDataType.Foreign('repositorykind'))
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.drop_constraint(op.f('fk_repository_kind_id_repositorykind'), 'repository', type_='foreignkey')
|
||||
op.drop_index('repository_kind_id', table_name='repository')
|
||||
op.drop_column(u'repository', 'kind_id')
|
||||
op.drop_table('repositorykind')
|
|
@ -0,0 +1,46 @@
|
|||
"""Add RobotAccountMetadata table
|
||||
|
||||
Revision ID: b547bc139ad8
|
||||
Revises: 0cf50323c78b
|
||||
Create Date: 2018-03-09 15:50:48.298880
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b547bc139ad8'
|
||||
down_revision = '0cf50323c78b'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from util.migrate import UTF8CharField
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('robotaccountmetadata',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('robot_account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('unstructured_json', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['robot_account_id'], ['user.id'], name=op.f('fk_robotaccountmetadata_robot_account_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_robotaccountmetadata'))
|
||||
)
|
||||
op.create_index('robotaccountmetadata_robot_account_id', 'robotaccountmetadata', ['robot_account_id'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('robotaccountmetadata', [
|
||||
('robot_account_id', tester.TestDataType.Foreign('user')),
|
||||
('description', tester.TestDataType.UTF8Char),
|
||||
('unstructured_json', tester.TestDataType.JSON),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('robotaccountmetadata')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,37 @@
|
|||
"""Change BlobUpload fields to BigIntegers to allow layers > 8GB
|
||||
|
||||
Revision ID: b8ae68ad3e52
|
||||
Revises: 7a525c68eb13
|
||||
Create Date: 2017-02-27 11:26:49.182349
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b8ae68ad3e52'
|
||||
down_revision = '7a525c68eb13'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.alter_column('blobupload', 'byte_count', existing_type=sa.Integer(), type_=sa.BigInteger())
|
||||
op.alter_column('blobupload', 'uncompressed_byte_count', existing_type=sa.Integer(), type_=sa.BigInteger())
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('blobupload', 'byte_count', tester.TestDataType.BigInteger)
|
||||
tester.populate_column('blobupload', 'uncompressed_byte_count', tester.TestDataType.BigInteger)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('blobupload', 'byte_count', tester.TestDataType.Integer)
|
||||
tester.populate_column('blobupload', 'uncompressed_byte_count', tester.TestDataType.Integer)
|
||||
# ### end population of test data ### #
|
||||
|
||||
op.alter_column('blobupload', 'byte_count', existing_type=sa.BigInteger(), type_=sa.Integer())
|
||||
op.alter_column('blobupload', 'uncompressed_byte_count', existing_type=sa.BigInteger(), type_=sa.Integer())
|
|
@ -0,0 +1,35 @@
|
|||
"""Add lifetime end indexes to tag tables
|
||||
|
||||
Revision ID: b9045731c4de
|
||||
Revises: e184af42242d
|
||||
Create Date: 2019-02-14 17:18:40.474310
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b9045731c4de'
|
||||
down_revision = 'e184af42242d'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('repositorytag_repository_id_lifetime_end_ts', 'repositorytag', ['repository_id', 'lifetime_end_ts'], unique=False)
|
||||
op.create_index('tag_repository_id_lifetime_end_ms', 'tag', ['repository_id', 'lifetime_end_ms'], unique=False)
|
||||
|
||||
op.create_index('repositorytag_repository_id_lifetime_start_ts', 'repositorytag', ['repository_id', 'lifetime_start_ts'], unique=False)
|
||||
op.create_index('tag_repository_id_lifetime_start_ms', 'tag', ['repository_id', 'lifetime_start_ms'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('tag_repository_id_lifetime_end_ms', table_name='tag')
|
||||
op.drop_index('repositorytag_repository_id_lifetime_end_ts', table_name='repositorytag')
|
||||
|
||||
op.drop_index('tag_repository_id_lifetime_start_ms', table_name='tag')
|
||||
op.drop_index('repositorytag_repository_id_lifetime_start_ts', table_name='repositorytag')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,71 @@
|
|||
"""Run full tag backfill
|
||||
|
||||
Revision ID: b918abdbee43
|
||||
Revises: 481623ba00ba
|
||||
Create Date: 2019-03-14 13:38:03.411609
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'b918abdbee43'
|
||||
down_revision = '481623ba00ba'
|
||||
|
||||
import logging.config
|
||||
|
||||
from app import app
|
||||
from peewee import JOIN, fn
|
||||
|
||||
from workers.tagbackfillworker import backfill_tag
|
||||
from data.database import RepositoryTag, Repository, User, TagToRepositoryTag
|
||||
from util.log import logfile_path
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
if not app.config.get('SETUP_COMPLETE', False):
|
||||
return
|
||||
|
||||
start_id = 0
|
||||
end_id = 1000
|
||||
size = 1000
|
||||
|
||||
max_id = RepositoryTag.select(fn.Max(RepositoryTag.id)).scalar()
|
||||
if max_id is None:
|
||||
return
|
||||
|
||||
logger.info("Found maximum ID %s" % max_id)
|
||||
|
||||
while True:
|
||||
if start_id > max_id:
|
||||
break
|
||||
|
||||
logger.info('Checking tag range %s - %s', start_id, end_id)
|
||||
r = list(RepositoryTag
|
||||
.select()
|
||||
.join(Repository)
|
||||
.switch(RepositoryTag)
|
||||
.join(TagToRepositoryTag, JOIN.LEFT_OUTER)
|
||||
.where(TagToRepositoryTag.id >> None)
|
||||
.where(RepositoryTag.hidden == False,
|
||||
RepositoryTag.id >= start_id,
|
||||
RepositoryTag.id < end_id))
|
||||
|
||||
if len(r) < 1000 and size < 100000:
|
||||
size *= 2
|
||||
|
||||
start_id = end_id
|
||||
end_id = start_id + size
|
||||
|
||||
if not len(r):
|
||||
continue
|
||||
|
||||
logger.info('Found %s tags to backfill', len(r))
|
||||
for index, t in enumerate(r):
|
||||
logger.info("Backfilling tag %s of %s", index, len(r))
|
||||
backfill_tag(t)
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
# Nothing to do.
|
||||
pass
|
52
data/migrations/versions/be8d1c402ce0_add_teamsync_table.py
Normal file
52
data/migrations/versions/be8d1c402ce0_add_teamsync_table.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
"""Add TeamSync table
|
||||
|
||||
Revision ID: be8d1c402ce0
|
||||
Revises: a6c463dfb9fe
|
||||
Create Date: 2017-02-23 13:34:52.356812
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'be8d1c402ce0'
|
||||
down_revision = 'a6c463dfb9fe'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from util.migrate import UTF8LongText
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('teamsync',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('team_id', sa.Integer(), nullable=False),
|
||||
sa.Column('transaction_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('last_updated', sa.DateTime(), nullable=True),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('config', UTF8LongText(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], name=op.f('fk_teamsync_service_id_loginservice')),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], name=op.f('fk_teamsync_team_id_team')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_teamsync'))
|
||||
)
|
||||
op.create_index('teamsync_last_updated', 'teamsync', ['last_updated'], unique=False)
|
||||
op.create_index('teamsync_service_id', 'teamsync', ['service_id'], unique=False)
|
||||
op.create_index('teamsync_team_id', 'teamsync', ['team_id'], unique=True)
|
||||
### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('teamsync', [
|
||||
('team_id', tester.TestDataType.Foreign('team')),
|
||||
('transaction_id', tester.TestDataType.String),
|
||||
('last_updated', tester.TestDataType.DateTime),
|
||||
('service_id', tester.TestDataType.Foreign('loginservice')),
|
||||
('config', tester.TestDataType.JSON),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('teamsync')
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,34 @@
|
|||
from image.docker.schema2 import DOCKER_SCHEMA2_CONTENT_TYPES
|
||||
|
||||
"""Add schema2 media types
|
||||
|
||||
Revision ID: c00a1f15968b
|
||||
Revises: 67f0abd172ae
|
||||
Create Date: 2018-11-13 09:20:21.968503
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c00a1f15968b'
|
||||
down_revision = '67f0abd172ae'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
for media_type in DOCKER_SCHEMA2_CONTENT_TYPES:
|
||||
op.bulk_insert(tables.mediatype,
|
||||
[
|
||||
{'name': media_type},
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
for media_type in DOCKER_SCHEMA2_CONTENT_TYPES:
|
||||
op.execute(tables
|
||||
.mediatype
|
||||
.delete()
|
||||
.where(tables.
|
||||
mediatype.c.name == op.inline_literal(media_type)))
|
|
@ -0,0 +1,82 @@
|
|||
"""Remove unencrypted fields and data
|
||||
|
||||
Revision ID: c059b952ed76
|
||||
Revises: 703298a825c2
|
||||
Create Date: 2019-08-19 16:31:00.952773
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c059b952ed76'
|
||||
down_revision = '703298a825c2'
|
||||
|
||||
import uuid
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
from data.database import FederatedLogin, User, RobotAccountToken
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken')
|
||||
op.drop_column(u'oauthaccesstoken', 'refresh_token')
|
||||
|
||||
op.drop_column('accesstoken', 'code')
|
||||
|
||||
op.drop_column('appspecificauthtoken', 'token_code')
|
||||
|
||||
op.drop_column('oauthaccesstoken', 'access_token')
|
||||
op.drop_column('oauthapplication', 'client_secret')
|
||||
|
||||
op.drop_column('oauthauthorizationcode', 'code')
|
||||
|
||||
op.drop_column('repositorybuildtrigger', 'private_key')
|
||||
op.drop_column('repositorybuildtrigger', 'auth_token')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# Overwrite all plaintext robot credentials.
|
||||
while True:
|
||||
try:
|
||||
robot_account_token = RobotAccountToken.get(fully_migrated=False)
|
||||
robot_account = robot_account_token.robot_account
|
||||
|
||||
robot_account.email = str(uuid.uuid4())
|
||||
robot_account.save()
|
||||
|
||||
federated_login = FederatedLogin.get(user=robot_account)
|
||||
federated_login.service_ident = 'robot:%s' % robot_account.id
|
||||
federated_login.save()
|
||||
|
||||
robot_account_token.fully_migrated = True
|
||||
robot_account_token.save()
|
||||
except RobotAccountToken.DoesNotExist:
|
||||
break
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column(u'oauthaccesstoken', sa.Column('refresh_token', sa.String(length=255), nullable=True))
|
||||
op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False)
|
||||
|
||||
op.add_column('repositorybuildtrigger', sa.Column('auth_token', sa.String(length=255), nullable=True))
|
||||
op.add_column('repositorybuildtrigger', sa.Column('private_key', sa.Text(), nullable=True))
|
||||
|
||||
op.add_column('oauthauthorizationcode', sa.Column('code', sa.String(length=255), nullable=True))
|
||||
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=True)
|
||||
|
||||
op.add_column('oauthapplication', sa.Column('client_secret', sa.String(length=255), nullable=True))
|
||||
op.add_column('oauthaccesstoken', sa.Column('access_token', sa.String(length=255), nullable=True))
|
||||
|
||||
op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False)
|
||||
|
||||
op.add_column('appspecificauthtoken', sa.Column('token_code', sa.String(length=255), nullable=True))
|
||||
op.create_index('appspecificauthtoken_token_code', 'appspecificauthtoken', ['token_code'], unique=True)
|
||||
|
||||
op.add_column('accesstoken', sa.Column('code', sa.String(length=255), nullable=True))
|
||||
op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True)
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,104 @@
|
|||
"""Add new fields and tables for encrypted tokens
|
||||
|
||||
Revision ID: c13c8052f7a6
|
||||
Revises: 5248ddf35167
|
||||
Create Date: 2019-08-19 15:59:36.269155
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c13c8052f7a6'
|
||||
down_revision = '5248ddf35167'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('robotaccounttoken',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('robot_account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('token', sa.String(length=255), nullable=False),
|
||||
sa.Column('fully_migrated', sa.Boolean(), nullable=False, server_default='0'),
|
||||
sa.ForeignKeyConstraint(['robot_account_id'], ['user.id'], name=op.f('fk_robotaccounttoken_robot_account_id_user')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_robotaccounttoken'))
|
||||
)
|
||||
op.create_index('robotaccounttoken_robot_account_id', 'robotaccounttoken', ['robot_account_id'], unique=True)
|
||||
|
||||
op.add_column(u'accesstoken', sa.Column('token_code', sa.String(length=255), nullable=True))
|
||||
op.add_column(u'accesstoken', sa.Column('token_name', sa.String(length=255), nullable=True))
|
||||
op.create_index('accesstoken_token_name', 'accesstoken', ['token_name'], unique=True)
|
||||
|
||||
op.add_column(u'appspecificauthtoken', sa.Column('token_name', sa.String(length=255), nullable=True))
|
||||
op.add_column(u'appspecificauthtoken', sa.Column('token_secret', sa.String(length=255), nullable=True))
|
||||
op.create_index('appspecificauthtoken_token_name', 'appspecificauthtoken', ['token_name'], unique=True)
|
||||
|
||||
op.add_column(u'emailconfirmation', sa.Column('verification_code', sa.String(length=255), nullable=True))
|
||||
|
||||
op.add_column(u'oauthaccesstoken', sa.Column('token_code', sa.String(length=255), nullable=True))
|
||||
op.add_column(u'oauthaccesstoken', sa.Column('token_name', sa.String(length=255), nullable=True))
|
||||
op.create_index('oauthaccesstoken_token_name', 'oauthaccesstoken', ['token_name'], unique=True)
|
||||
|
||||
op.add_column(u'oauthapplication', sa.Column('secure_client_secret', sa.String(length=255), nullable=True))
|
||||
op.add_column(u'oauthapplication', sa.Column('fully_migrated', sa.Boolean(), server_default='0', nullable=False))
|
||||
|
||||
op.add_column(u'oauthauthorizationcode', sa.Column('code_credential', sa.String(length=255), nullable=True))
|
||||
op.add_column(u'oauthauthorizationcode', sa.Column('code_name', sa.String(length=255), nullable=True))
|
||||
op.create_index('oauthauthorizationcode_code_name', 'oauthauthorizationcode', ['code_name'], unique=True)
|
||||
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
|
||||
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=True)
|
||||
|
||||
op.add_column(u'repositorybuildtrigger', sa.Column('secure_auth_token', sa.String(length=255), nullable=True))
|
||||
op.add_column(u'repositorybuildtrigger', sa.Column('secure_private_key', sa.Text(), nullable=True))
|
||||
op.add_column(u'repositorybuildtrigger', sa.Column('fully_migrated', sa.Boolean(), server_default='0', nullable=False))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('robotaccounttoken', [
|
||||
('robot_account_id', tester.TestDataType.Foreign('user')),
|
||||
('token', tester.TestDataType.Token),
|
||||
('fully_migrated', tester.TestDataType.Boolean),
|
||||
])
|
||||
|
||||
tester.populate_column('accesstoken', 'code', tester.TestDataType.Token)
|
||||
|
||||
tester.populate_column('appspecificauthtoken', 'token_code', tester.TestDataType.Token)
|
||||
|
||||
tester.populate_column('emailconfirmation', 'verification_code', tester.TestDataType.Token)
|
||||
|
||||
tester.populate_column('oauthaccesstoken', 'token_code', tester.TestDataType.Token)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column(u'repositorybuildtrigger', 'secure_private_key')
|
||||
op.drop_column(u'repositorybuildtrigger', 'secure_auth_token')
|
||||
|
||||
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
|
||||
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False)
|
||||
op.drop_index('oauthauthorizationcode_code_name', table_name='oauthauthorizationcode')
|
||||
op.drop_column(u'oauthauthorizationcode', 'code_name')
|
||||
op.drop_column(u'oauthauthorizationcode', 'code_credential')
|
||||
|
||||
op.drop_column(u'oauthapplication', 'secure_client_secret')
|
||||
|
||||
op.drop_index('oauthaccesstoken_token_name', table_name='oauthaccesstoken')
|
||||
op.drop_column(u'oauthaccesstoken', 'token_name')
|
||||
op.drop_column(u'oauthaccesstoken', 'token_code')
|
||||
|
||||
op.drop_column(u'emailconfirmation', 'verification_code')
|
||||
|
||||
op.drop_index('appspecificauthtoken_token_name', table_name='appspecificauthtoken')
|
||||
op.drop_column(u'appspecificauthtoken', 'token_secret')
|
||||
op.drop_column(u'appspecificauthtoken', 'token_name')
|
||||
|
||||
op.drop_index('accesstoken_token_name', table_name='accesstoken')
|
||||
op.drop_column(u'accesstoken', 'token_name')
|
||||
op.drop_column(u'accesstoken', 'token_code')
|
||||
|
||||
op.drop_table('robotaccounttoken')
|
||||
# ### end Alembic commands ###
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,26 @@
|
|||
"""Backfill RepositorySearchScore table
|
||||
|
||||
Revision ID: c3d4b7ebcdf7
|
||||
Revises: f30984525c86
|
||||
Create Date: 2017-04-13 12:01:59.572775
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c3d4b7ebcdf7'
|
||||
down_revision = 'f30984525c86'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# Add a 0 entry into the RepositorySearchScore table for each repository that isn't present
|
||||
conn = op.get_bind()
|
||||
conn.execute("insert into repositorysearchscore (repository_id, score) SELECT id, 0 FROM " +
|
||||
"repository WHERE id not in (select repository_id from repositorysearchscore)")
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
pass
|
|
@ -0,0 +1,25 @@
|
|||
"""Drop checksum on ImageStorage
|
||||
|
||||
Revision ID: c91c564aad34
|
||||
Revises: 152bb29a1bb3
|
||||
Create Date: 2018-02-21 12:17:52.405644
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'c91c564aad34'
|
||||
down_revision = '152bb29a1bb3'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.drop_column('imagestorage', 'checksum')
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.add_column('imagestorage', sa.Column('checksum', sa.String(length=255), nullable=True))
|
|
@ -0,0 +1,30 @@
|
|||
"""Add user location field
|
||||
|
||||
Revision ID: cbc8177760d9
|
||||
Revises: 7367229b38d9
|
||||
Create Date: 2018-02-02 17:39:16.589623
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'cbc8177760d9'
|
||||
down_revision = '7367229b38d9'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
from util.migrate import UTF8CharField
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.add_column('user', sa.Column('location', UTF8CharField(length=255), nullable=True))
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('user', 'location', tester.TestDataType.UTF8Char)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.drop_column('user', 'location')
|
|
@ -0,0 +1,68 @@
|
|||
"""repository mirror notification
|
||||
|
||||
Revision ID: cc6778199cdb
|
||||
Revises: c059b952ed76
|
||||
Create Date: 2019-10-03 17:41:23.316914
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'cc6778199cdb'
|
||||
down_revision = 'c059b952ed76'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
op.bulk_insert(tables.notificationkind,
|
||||
[
|
||||
{'name': 'repo_mirror_sync_started'},
|
||||
{'name': 'repo_mirror_sync_success'},
|
||||
{'name': 'repo_mirror_sync_failed'},
|
||||
])
|
||||
op.bulk_insert(tables.externalnotificationevent,
|
||||
[
|
||||
{'name': 'repo_mirror_sync_started'},
|
||||
{'name': 'repo_mirror_sync_success'},
|
||||
{'name': 'repo_mirror_sync_failed'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
|
||||
op.execute(tables
|
||||
.notificationkind
|
||||
.delete()
|
||||
.where(tables.
|
||||
notificationkind.c.name == op.inline_literal('repo_mirror_sync_started')))
|
||||
op.execute(tables
|
||||
.notificationkind
|
||||
.delete()
|
||||
.where(tables.
|
||||
notificationkind.c.name == op.inline_literal('repo_mirror_sync_success')))
|
||||
op.execute(tables
|
||||
.notificationkind
|
||||
.delete()
|
||||
.where(tables.
|
||||
notificationkind.c.name == op.inline_literal('repo_mirror_sync_failed')))
|
||||
|
||||
op.execute(tables
|
||||
.externalnotificationevent
|
||||
.delete()
|
||||
.where(tables.
|
||||
externalnotificationevent.c.name == op.inline_literal('repo_mirror_sync_started')))
|
||||
op.execute(tables
|
||||
.externalnotificationevent
|
||||
.delete()
|
||||
.where(tables.
|
||||
externalnotificationevent.c.name == op.inline_literal('repo_mirror_sync_success')))
|
||||
op.execute(tables
|
||||
.externalnotificationevent
|
||||
.delete()
|
||||
.where(tables.
|
||||
externalnotificationevent.c.name == op.inline_literal('repo_mirror_sync_failed')))
|
192
data/migrations/versions/d17c695859ea_delete_old_appr_tables.py
Normal file
192
data/migrations/versions/d17c695859ea_delete_old_appr_tables.py
Normal file
|
@ -0,0 +1,192 @@
|
|||
"""Delete old Appr tables
|
||||
|
||||
Revision ID: d17c695859ea
|
||||
Revises: 5d463ea1e8a8
|
||||
Create Date: 2018-07-16 15:21:11.593040
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd17c695859ea'
|
||||
down_revision = '5d463ea1e8a8'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.sql import table, column
|
||||
from util.migrate import UTF8LongText, UTF8CharField
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('tag')
|
||||
op.drop_table('manifestlistmanifest')
|
||||
op.drop_table('manifestlist')
|
||||
op.drop_table('manifestblob')
|
||||
op.drop_table('manifest')
|
||||
op.drop_table('blobplacement')
|
||||
op.drop_table('blob')
|
||||
op.drop_table('blobplacementlocation')
|
||||
op.drop_table('tagkind')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
'tagkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tagkind'))
|
||||
)
|
||||
op.create_index('tagkind_name', 'tagkind', ['name'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'blobplacementlocation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobplacementlocation'))
|
||||
)
|
||||
op.create_index('blobplacementlocation_name', 'blobplacementlocation', ['name'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'blob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('size', sa.BigInteger(), nullable=False),
|
||||
sa.Column('uncompressed_size', sa.BigInteger(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_blob_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blob'))
|
||||
)
|
||||
op.create_index('blob_digest', 'blob', ['digest'], unique=True)
|
||||
op.create_index('blob_media_type_id', 'blob', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_json', UTF8LongText, nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifest_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifest'))
|
||||
)
|
||||
op.create_index('manifest_digest', 'manifest', ['digest'], unique=True)
|
||||
op.create_index('manifest_media_type_id', 'manifest', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestlist',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('digest', sa.String(length=255), nullable=False),
|
||||
sa.Column('manifest_list_json', UTF8LongText, nullable=False),
|
||||
sa.Column('schema_version', UTF8CharField(length=255), nullable=False),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifestlist_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlist'))
|
||||
)
|
||||
op.create_index('manifestlist_digest', 'manifestlist', ['digest'], unique=True)
|
||||
op.create_index('manifestlist_media_type_id', 'manifestlist', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'blobplacement',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.Column('location_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_blobplacement_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['location_id'], ['blobplacementlocation.id'], name=op.f('fk_blobplacement_location_id_blobplacementlocation')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_blobplacement'))
|
||||
)
|
||||
op.create_index('blobplacement_blob_id', 'blobplacement', ['blob_id'], unique=False)
|
||||
op.create_index('blobplacement_blob_id_location_id', 'blobplacement', ['blob_id', 'location_id'], unique=True)
|
||||
op.create_index('blobplacement_location_id', 'blobplacement', ['location_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'manifestblob',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('blob_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['blob_id'], ['blob.id'], name=op.f('fk_manifestblob_blob_id_blob')),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestblob_manifest_id_manifest')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestblob'))
|
||||
)
|
||||
op.create_index('manifestblob_blob_id', 'manifestblob', ['blob_id'], unique=False)
|
||||
op.create_index('manifestblob_manifest_id', 'manifestblob', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestblob_manifest_id_blob_id', 'manifestblob', ['manifest_id', 'blob_id'], unique=True)
|
||||
|
||||
op.create_table(
|
||||
'manifestlistmanifest',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_list_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_id', sa.Integer(), nullable=False),
|
||||
sa.Column('operating_system', UTF8CharField(length=255), nullable=True),
|
||||
sa.Column('architecture', UTF8CharField(length=255), nullable=True),
|
||||
sa.Column('platform_json', UTF8LongText, nullable=True),
|
||||
sa.Column('media_type_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['manifest_id'], ['manifest.id'], name=op.f('fk_manifestlistmanifest_manifest_id_manifest')),
|
||||
sa.ForeignKeyConstraint(['manifest_list_id'], ['manifestlist.id'], name=op.f('fk_manifestlistmanifest_manifest_list_id_manifestlist')),
|
||||
sa.ForeignKeyConstraint(['media_type_id'], ['mediatype.id'], name=op.f('fk_manifestlistmanifest_media_type_id_mediatype')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_manifestlistmanifest'))
|
||||
)
|
||||
op.create_index('manifestlistmanifest_manifest_id', 'manifestlistmanifest', ['manifest_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_manifest_list_id', 'manifestlistmanifest', ['manifest_list_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_manifest_listid_os_arch_mtid', 'manifestlistmanifest', ['manifest_list_id', 'operating_system', 'architecture', 'media_type_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_manifest_listid_mtid', 'manifestlistmanifest', ['manifest_list_id', 'media_type_id'], unique=False)
|
||||
op.create_index('manifestlistmanifest_media_type_id', 'manifestlistmanifest', ['media_type_id'], unique=False)
|
||||
|
||||
op.create_table(
|
||||
'tag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', UTF8CharField(length=190), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('manifest_list_id', sa.Integer(), nullable=True),
|
||||
sa.Column('lifetime_start', sa.BigInteger(), nullable=False),
|
||||
sa.Column('lifetime_end', sa.BigInteger(), nullable=True),
|
||||
sa.Column('hidden', sa.Boolean(), nullable=False),
|
||||
sa.Column('reverted', sa.Boolean(), nullable=False),
|
||||
sa.Column('protected', sa.Boolean(), nullable=False),
|
||||
sa.Column('tag_kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('linked_tag_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['linked_tag_id'], ['tag.id'], name=op.f('fk_tag_linked_tag_id_tag')),
|
||||
sa.ForeignKeyConstraint(['manifest_list_id'], ['manifestlist.id'], name=op.f('fk_tag_manifest_list_id_manifestlist')),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_tag_repository_id_repository')),
|
||||
sa.ForeignKeyConstraint(['tag_kind_id'], ['tagkind.id'], name=op.f('fk_tag_tag_kind_id_tagkind')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_tag'))
|
||||
)
|
||||
op.create_index('tag_lifetime_end', 'tag', ['lifetime_end'], unique=False)
|
||||
op.create_index('tag_linked_tag_id', 'tag', ['linked_tag_id'], unique=False)
|
||||
op.create_index('tag_manifest_list_id', 'tag', ['manifest_list_id'], unique=False)
|
||||
op.create_index('tag_repository_id', 'tag', ['repository_id'], unique=False)
|
||||
op.create_index('tag_repository_id_name_hidden', 'tag', ['repository_id', 'name', 'hidden'], unique=False)
|
||||
op.create_index('tag_repository_id_name_lifetime_end', 'tag', ['repository_id', 'name', 'lifetime_end'], unique=True)
|
||||
op.create_index('tag_repository_id_name', 'tag', ['repository_id', 'name'], unique=False)
|
||||
op.create_index('tag_tag_kind_id', 'tag', ['tag_kind_id'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
blobplacementlocation_table = table('blobplacementlocation',
|
||||
column('id', sa.Integer()),
|
||||
column('name', sa.String()),
|
||||
)
|
||||
|
||||
op.bulk_insert(
|
||||
blobplacementlocation_table,
|
||||
[
|
||||
{'name': 'local_eu'},
|
||||
{'name': 'local_us'},
|
||||
],
|
||||
)
|
||||
|
||||
tagkind_table = table('tagkind',
|
||||
column('id', sa.Integer()),
|
||||
column('name', sa.String()),
|
||||
)
|
||||
|
||||
op.bulk_insert(
|
||||
tagkind_table,
|
||||
[
|
||||
{'id': 1, 'name': 'tag'},
|
||||
{'id': 2, 'name': 'release'},
|
||||
{'id': 3, 'name': 'channel'},
|
||||
]
|
||||
)
|
|
@ -0,0 +1,36 @@
|
|||
"""Backfill state_id and make it unique
|
||||
|
||||
Revision ID: d42c175b439a
|
||||
Revises: 3e8cc74a1e7b
|
||||
Create Date: 2017-01-18 15:11:01.635632
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd42c175b439a'
|
||||
down_revision = '3e8cc74a1e7b'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# Backfill the queueitem table's state_id field with unique values for all entries which are
|
||||
# empty.
|
||||
conn = op.get_bind()
|
||||
conn.execute("update queueitem set state_id = id where state_id = ''")
|
||||
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('queueitem_state_id', table_name='queueitem')
|
||||
op.create_index('queueitem_state_id', 'queueitem', ['state_id'], unique=True)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('queueitem_state_id', table_name='queueitem')
|
||||
op.create_index('queueitem_state_id', 'queueitem', ['state_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,28 @@
|
|||
"""Add change_tag_expiration log type
|
||||
|
||||
Revision ID: d8989249f8f6
|
||||
Revises: dc4af11a5f90
|
||||
Create Date: 2017-06-21 21:18:25.948689
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd8989249f8f6'
|
||||
down_revision = 'dc4af11a5f90'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.bulk_insert(tables.logentrykind, [
|
||||
{'name': 'change_tag_expiration'},
|
||||
])
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.execute(tables
|
||||
.logentrykind
|
||||
.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('change_tag_expiration')))
|
|
@ -0,0 +1,39 @@
|
|||
"""add notification number of failures column
|
||||
|
||||
Revision ID: dc4af11a5f90
|
||||
Revises: 53e2ac668296
|
||||
Create Date: 2017-05-16 17:24:02.630365
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'dc4af11a5f90'
|
||||
down_revision = '53e2ac668296'
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.add_column('repositorynotification', sa.Column('number_of_failures',
|
||||
sa.Integer(),
|
||||
nullable=False,
|
||||
server_default='0'))
|
||||
op.bulk_insert(tables.logentrykind, [
|
||||
{'name': 'reset_repo_notification'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('repositorynotification', 'number_of_failures', tester.TestDataType.Integer)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
op.drop_column('repositorynotification', 'number_of_failures')
|
||||
op.execute(tables
|
||||
.logentrykind
|
||||
.delete()
|
||||
.where(tables.logentrykind.c.name == op.inline_literal('reset_repo_notification')))
|
|
@ -0,0 +1,31 @@
|
|||
"""Add missing index on UUID fields
|
||||
|
||||
Revision ID: e184af42242d
|
||||
Revises: 6ec8726c0ace
|
||||
Create Date: 2019-02-14 16:35:47.768086
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e184af42242d'
|
||||
down_revision = '6ec8726c0ace'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('permissionprototype_uuid', 'permissionprototype', ['uuid'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_uuid', 'repositorybuildtrigger', ['uuid'], unique=False)
|
||||
op.create_index('user_uuid', 'user', ['uuid'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('user_uuid', table_name='user')
|
||||
op.drop_index('repositorybuildtrigger_uuid', table_name='repositorybuildtrigger')
|
||||
op.drop_index('permissionprototype_uuid', table_name='permissionprototype')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,31 @@
|
|||
"""Add full text search indexing for repo name and description
|
||||
|
||||
Revision ID: e2894a3a3c19
|
||||
Revises: d42c175b439a
|
||||
Create Date: 2017-01-11 13:55:54.890774
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e2894a3a3c19'
|
||||
down_revision = 'd42c175b439a'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('repository_description__fulltext', 'repository', ['description'], unique=False, postgresql_using='gin', postgresql_ops={'description': 'gin_trgm_ops'}, mysql_prefix='FULLTEXT')
|
||||
op.create_index('repository_name__fulltext', 'repository', ['name'], unique=False, postgresql_using='gin', postgresql_ops={'name': 'gin_trgm_ops'}, mysql_prefix='FULLTEXT')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('repository_name__fulltext', table_name='repository')
|
||||
op.drop_index('repository_description__fulltext', table_name='repository')
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,31 @@
|
|||
"""Remove blob_index from ManifestBlob table
|
||||
|
||||
Revision ID: eafdeadcebc7
|
||||
Revises: 9093adccc784
|
||||
Create Date: 2018-08-07 15:57:54.001225
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'eafdeadcebc7'
|
||||
down_revision = '9093adccc784'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('manifestblob_manifest_id_blob_index', table_name='manifestblob')
|
||||
op.drop_column('manifestblob', 'blob_index')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('manifestblob', sa.Column('blob_index', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||
op.create_index('manifestblob_manifest_id_blob_index', 'manifestblob', ['manifest_id', 'blob_index'], unique=True)
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,41 @@
|
|||
"""Add trust_enabled to repository
|
||||
|
||||
Revision ID: ed01e313d3cb
|
||||
Revises: c3d4b7ebcdf7
|
||||
Create Date: 2017-04-14 17:38:03.319695
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ed01e313d3cb'
|
||||
down_revision = 'c3d4b7ebcdf7'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('repository', sa.Column('trust_enabled', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
|
||||
### end Alembic commands ###
|
||||
op.bulk_insert(tables.logentrykind, [
|
||||
{'name': 'change_repo_trust'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('repository', 'trust_enabled', tester.TestDataType.Boolean)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('repository', 'trust_enabled')
|
||||
### end Alembic commands ###
|
||||
|
||||
op.execute(tables
|
||||
.logentrykind
|
||||
.delete()
|
||||
.where(tables.
|
||||
logentrykind.name == op.inline_literal('change_repo_trust')))
|
|
@ -0,0 +1,46 @@
|
|||
"""Add RepositorySearchScore table
|
||||
|
||||
Revision ID: f30984525c86
|
||||
Revises: be8d1c402ce0
|
||||
Create Date: 2017-04-04 14:30:13.270728
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f30984525c86'
|
||||
down_revision = 'be8d1c402ce0'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('repositorysearchscore',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('score', sa.BigInteger(), nullable=False),
|
||||
sa.Column('last_updated', sa.DateTime(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], name=op.f('fk_repositorysearchscore_repository_id_repository')),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_repositorysearchscore'))
|
||||
)
|
||||
op.create_index('repositorysearchscore_repository_id', 'repositorysearchscore', ['repository_id'], unique=True)
|
||||
op.create_index('repositorysearchscore_score', 'repositorysearchscore', ['score'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_table('repositorysearchscore', [
|
||||
('repository_id', tester.TestDataType.Foreign('repository')),
|
||||
('score', tester.TestDataType.BigInteger),
|
||||
('last_updated', tester.TestDataType.DateTime),
|
||||
])
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('repositorysearchscore')
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,43 @@
|
|||
"""update queue item table indices
|
||||
|
||||
Revision ID: f5167870dd66
|
||||
Revises: 45fd8b9869d4
|
||||
Create Date: 2016-12-08 17:26:20.333846
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'f5167870dd66'
|
||||
down_revision = '45fd8b9869d4'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('queueitem_processing_expires_available', 'queueitem', ['processing_expires', 'available'], unique=False)
|
||||
op.create_index('queueitem_pe_aafter_qname_rremaining_available', 'queueitem', ['processing_expires', 'available_after', 'queue_name', 'retries_remaining', 'available'], unique=False)
|
||||
op.create_index('queueitem_pexpires_aafter_rremaining_available', 'queueitem', ['processing_expires', 'available_after', 'retries_remaining', 'available'], unique=False)
|
||||
op.create_index('queueitem_processing_expires_queue_name_available', 'queueitem', ['processing_expires', 'queue_name', 'available'], unique=False)
|
||||
op.drop_index('queueitem_available', table_name='queueitem')
|
||||
op.drop_index('queueitem_available_after', table_name='queueitem')
|
||||
op.drop_index('queueitem_processing_expires', table_name='queueitem')
|
||||
op.drop_index('queueitem_retries_remaining', table_name='queueitem')
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_index('queueitem_retries_remaining', 'queueitem', ['retries_remaining'], unique=False)
|
||||
op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False)
|
||||
op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False)
|
||||
op.create_index('queueitem_available', 'queueitem', ['available'], unique=False)
|
||||
op.drop_index('queueitem_processing_expires_queue_name_available', table_name='queueitem')
|
||||
op.drop_index('queueitem_pexpires_aafter_rremaining_available', table_name='queueitem')
|
||||
op.drop_index('queueitem_pe_aafter_qname_rremaining_available', table_name='queueitem')
|
||||
op.drop_index('queueitem_processing_expires_available', table_name='queueitem')
|
||||
### end Alembic commands ###
|
|
@ -0,0 +1,56 @@
|
|||
"""Add user metadata fields
|
||||
|
||||
Revision ID: faf752bd2e0a
|
||||
Revises: 6c7014e84a5e
|
||||
Create Date: 2016-11-14 17:29:03.984665
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'faf752bd2e0a'
|
||||
down_revision = '6c7014e84a5e'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
|
||||
from util.migrate import UTF8CharField
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('user', sa.Column('company', UTF8CharField(length=255), nullable=True))
|
||||
op.add_column('user', sa.Column('family_name', UTF8CharField(length=255), nullable=True))
|
||||
op.add_column('user', sa.Column('given_name', UTF8CharField(length=255), nullable=True))
|
||||
### end Alembic commands ###
|
||||
|
||||
op.bulk_insert(tables.userpromptkind,
|
||||
[
|
||||
{'name':'enter_name'},
|
||||
{'name':'enter_company'},
|
||||
])
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('user', 'company', tester.TestDataType.UTF8Char)
|
||||
tester.populate_column('user', 'family_name', tester.TestDataType.UTF8Char)
|
||||
tester.populate_column('user', 'given_name', tester.TestDataType.UTF8Char)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('user', 'given_name')
|
||||
op.drop_column('user', 'family_name')
|
||||
op.drop_column('user', 'company')
|
||||
### end Alembic commands ###
|
||||
|
||||
op.execute(
|
||||
(tables.userpromptkind.delete()
|
||||
.where(tables.userpromptkind.c.name == op.inline_literal('enter_name')))
|
||||
)
|
||||
|
||||
op.execute(
|
||||
(tables.userpromptkind.delete()
|
||||
.where(tables.userpromptkind.c.name == op.inline_literal('enter_company')))
|
||||
)
|
|
@ -0,0 +1,35 @@
|
|||
"""Add state_id field to QueueItem
|
||||
|
||||
Revision ID: fc47c1ec019f
|
||||
Revises: f5167870dd66
|
||||
Create Date: 2017-01-12 15:44:23.643016
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fc47c1ec019f'
|
||||
down_revision = 'f5167870dd66'
|
||||
|
||||
from alembic import op as original_op
|
||||
from data.migrations.progress import ProgressWrapper
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('queueitem', sa.Column('state_id', sa.String(length=255), nullable=False, server_default=''))
|
||||
op.create_index('queueitem_state_id', 'queueitem', ['state_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# ### population of test data ### #
|
||||
tester.populate_column('queueitem', 'state_id', tester.TestDataType.String)
|
||||
# ### end population of test data ### #
|
||||
|
||||
|
||||
def downgrade(tables, tester, progress_reporter):
|
||||
op = ProgressWrapper(original_op, progress_reporter)
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('queueitem_state_id', table_name='queueitem')
|
||||
op.drop_column('queueitem', 'state_id')
|
||||
# ### end Alembic commands ###
|
Reference in a new issue