Merge pull request #1688 from jzelinskie/rmfk

MIGRATION: drop foreign keys on logentry table
This commit is contained in:
Jimmy Zelinskie 2016-08-08 14:23:20 -04:00 committed by GitHub
commit 2f140f1128
5 changed files with 48 additions and 44 deletions

View file

@ -46,32 +46,6 @@ def real_for_update(query):
def null_for_update(query):
return query
def _clear_log_entries(instance, model_class, log_entry_field, *extra_fields):
""" Temporary method for clearing out LogEntry records before the general deletion of a
dependent record (e.g. Repository or User). Used because deleting all of the log entries
under a transaction destroys the database for all other work.
"""
# TODO: Remove this hack once LogEntry no longer has foreign key constraints to everything.
# Delete all records in LogEntry that reference the instance.
where_clause = (log_entry_field == instance)
for field in extra_fields:
where_clause = where_clause | (field == instance)
# Delete the log entries in chunks of 1000.
deleted_count = 1
while deleted_count >= 1:
todelete = (LogEntry
.select(LogEntry.id)
.where(where_clause)
.limit(1000)
.alias('todelete'))
subquery = (LogEntry
.select(todelete.c.id)
.from_(todelete))
deleted_count = LogEntry.delete().where(LogEntry.id << subquery).execute()
def delete_instance_filtered(instance, model_class, delete_nullable, skip_transitive_deletes):
""" Deletes the DB instance recursively, skipping any models in the skip_transitive_deletes set.
@ -114,7 +88,6 @@ def delete_instance_filtered(instance, model_class, delete_nullable, skip_transi
with db_transaction():
for query, fk in filtered_ops:
model = fk.model_class
if fk.null and not delete_nullable:
model.update(**{fk.name: None}).where(query).execute()
else:
@ -347,7 +320,6 @@ class User(BaseModel):
# These models don't need to use transitive deletes, because the referenced objects
# are cleaned up directly
skip_transitive_deletes = {Image}
_clear_log_entries(self, User, LogEntry.performer, LogEntry.account)
delete_instance_filtered(self, User, delete_nullable, skip_transitive_deletes)
@ -444,7 +416,7 @@ class Repository(BaseModel):
# are cleaned up directly
skip_transitive_deletes = {RepositoryTag, RepositoryBuild, RepositoryBuildTrigger, BlobUpload,
Image, TagManifest, DerivedStorageForImage}
_clear_log_entries(self, User, LogEntry.repository)
delete_instance_filtered(self, Repository, delete_nullable, skip_transitive_deletes)
@ -739,10 +711,9 @@ class LogEntryKind(BaseModel):
class LogEntry(BaseModel):
kind = ForeignKeyField(LogEntryKind)
account = QuayUserField(index=True, related_name='account')
performer = QuayUserField(allows_robots=True, index=True, null=True,
related_name='performer', robot_null_delete=True)
repository = ForeignKeyField(Repository, null=True)
account = IntegerField(index=True, db_column='account_id')
performer = IntegerField(index=True, null=True, db_column='performer_id')
repository = IntegerField(index=True, null=True, db_column='repository_id')
datetime = DateTimeField(default=datetime.now, index=True)
ip = CharField(null=True)
metadata_json = TextField(default='{}')

View file

@ -1,6 +1,7 @@
set -e
DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
PARSED_DOCKER_HOST=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
DOCKER_IP="${PARSED_DOCKER_HOST:-127.0.0.1}"
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
PERCONA_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
@ -74,18 +75,21 @@ down_postgres() {
}
gen_migrate() {
# Generate a SQLite database with the schema as defined by the existing alembic model.
# Generate a database with the schema as defined by the existing alembic model.
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
# Generate the migration to the current model.
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
}
test_migrate() {
# Generate a SQLite database with the schema as defined by the existing alembic model.
# Generate a database with the schema as defined by the existing alembic model.
echo '> Running upgrade'
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic upgrade head
# Downgrade to verify it works in both directions.
echo '> Running downgrade'
COUNT=`ls data/migrations/versions/*.py | wc -l | tr -d ' '`
QUAY_OVERRIDE_CONFIG=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
}

View file

@ -0,0 +1,26 @@
"""remove logentry fk constraint
Revision ID: 6243159408b5
Revises: 1093d8b212bb
Create Date: 2016-08-05 13:23:46.219759
"""
# revision identifiers, used by Alembic.
revision = '6243159408b5'
down_revision = '790d91952fa8'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
op.drop_constraint(u'fk_logentry_account_id_user', 'logentry', type_='foreignkey')
op.drop_constraint(u'fk_logentry_repository_id_repository', 'logentry', type_='foreignkey')
op.drop_constraint(u'fk_logentry_performer_id_user', 'logentry', type_='foreignkey')
def downgrade(tables):
op.create_foreign_key(u'fk_logentry_performer_id_user', 'logentry', 'user', ['performer_id'], ['id'])
op.create_foreign_key(u'fk_logentry_repository_id_repository', 'logentry', 'repository', ['repository_id'], ['id'])
op.create_foreign_key(u'fk_logentry_account_id_user', 'logentry', 'user', ['account_id'], ['id'])

View file

@ -22,7 +22,7 @@ def _logs_query(selections, start_time, end_time, performer=None, repository=Non
joined = joined.where(LogEntry.performer == performer)
if namespace:
joined = joined.join(User).where(User.username == namespace)
joined = joined.join(User, on=(User.username == namespace))
if ignore:
kind_map = get_log_entry_kinds()
@ -88,6 +88,12 @@ def log_action(kind_name, user_or_organization_name, performer=None, repository=
if account is None:
account = User.select(fn.Min(User.id)).tuples().get()[0]
if performer is not None:
performer = performer.id
if repository is not None:
repository = repository.id
kind = _get_log_entry_kind(kind_name)
metadata_json = json.dumps(metadata, default=_json_serialize)
LogEntry.create(kind=kind, account=account, performer=performer,

View file

@ -1654,17 +1654,14 @@ class log_queries(object):
class check_transitive_deletes(log_queries):
def __init__(self, allowed_query_check=None):
def __init__(self):
super(check_transitive_deletes, self).__init__(query_filter=r'^DELETE.+IN \(SELECT.+$')
self.allowed_query_check = allowed_query_check
def __exit__(self, exc_type, exc_val, exc_tb):
super(check_transitive_deletes, self).__exit__(exc_type, exc_val, exc_tb)
queries = self.get_queries()
if queries and self.allowed_query_check:
for query in queries:
if query.find(self.allowed_query_check) < 0:
raise Exception('Detected transitive deletion in queries: %s' % queries)
if queries:
raise Exception('Detected transitive deletion in queries: %s' % queries)
class TestDeleteRepository(ApiTestCase):
@ -1750,7 +1747,7 @@ class TestDeleteRepository(ApiTestCase):
date=datetime.datetime.now() - datetime.timedelta(days=5), count=6)
# Delete the repository.
with check_transitive_deletes(allowed_query_check='todelete'):
with check_transitive_deletes():
self.deleteResponse(Repository, params=dict(repository=self.COMPLEX_REPO))
# Verify the repo was deleted.