From daa74b5132e8c70501440824f4032e83ed3eb117 Mon Sep 17 00:00:00 2001
From: Joseph Schorr <josephschorr@users.noreply.github.com>
Date: Mon, 9 Nov 2015 14:41:53 -0500
Subject: [PATCH 1/2] Fix test issue with initdb

Non-sqlite DBs don't like maxvalue for Integer
---
 initdb.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/initdb.py b/initdb.py
index 80c9fa952..ce51c1749 100644
--- a/initdb.py
+++ b/initdb.py
@@ -5,7 +5,6 @@ import random
 import calendar
 import os
 
-from sys import maxsize
 from datetime import datetime, timedelta
 from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite,
                     savepoint)
@@ -97,7 +96,7 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
         store.put_content('local_us', path, checksum)
 
     new_image.security_indexed = False
-    new_image.security_indexed_engine = maxsize
+    new_image.security_indexed_engine = -1
     new_image.save()
 
     creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=model_num)

From 2d2662f53fc5ada9aa77e19c7bdefe453445de55 Mon Sep 17 00:00:00 2001
From: Joseph Schorr <josephschorr@users.noreply.github.com>
Date: Mon, 9 Nov 2015 14:42:05 -0500
Subject: [PATCH 2/2] Fix deleting repos and images under MySQL

MySQL doesn't handle constraints at the end of transactions, so deleting images currently fails. This removes the constraint and just leaves parent_id as an int
---
 data/database.py                                              | 2 +-
 .../57dad559ff2d_add_support_for_quay_s_security_indexer.py   | 3 +--
 data/model/image.py                                           | 2 +-
 data/model/repository.py                                      | 4 ++--
 util/migrate/backfill_parent_id.py                            | 4 ++--
 workers/securityworker.py                                     | 4 ++--
 6 files changed, 9 insertions(+), 10 deletions(-)

diff --git a/data/database.py b/data/database.py
index c87ece328..67f5ba2ab 100644
--- a/data/database.py
+++ b/data/database.py
@@ -575,7 +575,7 @@ class Image(BaseModel):
 
   security_indexed = BooleanField(default=False)
   security_indexed_engine = IntegerField(default=-1)
-  parent = ForeignKeyField('self', index=True, null=True, related_name='children')
+  parent_id = IntegerField(index=True, null=True)
 
   class Meta:
     database = db
diff --git a/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py b/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py
index 9e4d0e6c2..078137c61 100644
--- a/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py
+++ b/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py
@@ -19,14 +19,13 @@ def upgrade(tables):
     op.add_column('image', sa.Column('security_indexed', sa.Boolean(), nullable=False, default=False, server_default=sa.sql.expression.false()))
     op.add_column('image', sa.Column('security_indexed_engine', sa.Integer(), nullable=False, default=-1, server_default="-1"))
     op.create_index('image_parent_id', 'image', ['parent_id'], unique=False)
-    op.create_foreign_key(op.f('fk_image_parent_id_image'), 'image', 'image', ['parent_id'], ['id'])
     ### end Alembic commands ###
+
     op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed'])
 
 def downgrade(tables):
    ### commands auto generated by Alembic - please adjust! ###
     op.drop_index('image_security_indexed_engine_security_indexed', 'image')
-    op.drop_constraint(op.f('fk_image_parent_id_image'), 'image', type_='foreignkey')
     op.drop_index('image_parent_id', table_name='image')
     op.drop_column('image', 'security_indexed')
     op.drop_column('image', 'security_indexed_engine')
diff --git a/data/model/image.py b/data/model/image.py
index 7b673ee2f..f668b8001 100644
--- a/data/model/image.py
+++ b/data/model/image.py
@@ -303,7 +303,7 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
 
     if parent:
       fetched.ancestors = '%s%s/' % (parent.ancestors, parent.id)
-      fetched.parent = parent
+      fetched.parent_id = parent.id
 
     fetched.save()
     fetched.storage.save()
diff --git a/data/model/repository.py b/data/model/repository.py
index f78b92b8b..8379a1f6d 100644
--- a/data/model/repository.py
+++ b/data/model/repository.py
@@ -131,12 +131,12 @@ def garbage_collect_repo(repo):
     # iterable of tuples containing [(k, v), (k, v), ...]
     all_repo_images = Image.select(Image.id, Image.storage).where(Image.repository == repo).tuples()
     images_to_storages = dict(all_repo_images)
-    to_remove = set(images_to_storages.keys()).difference(referenced_ancestors)
+    to_remove = list(set(images_to_storages.keys()).difference(referenced_ancestors))
 
     if len(to_remove) > 0:
       logger.info('Cleaning up unreferenced images: %s', to_remove)
       storage_id_whitelist = {images_to_storages[to_remove_id] for to_remove_id in to_remove}
-      Image.delete().where(Image.id << list(to_remove)).execute()
+      Image.delete().where(Image.id << to_remove).execute()
 
   if len(to_remove) > 0:
     logger.info('Garbage collecting storage for images: %s', to_remove)
diff --git a/util/migrate/backfill_parent_id.py b/util/migrate/backfill_parent_id.py
index 2a4e7b091..1d2cf4136 100644
--- a/util/migrate/backfill_parent_id.py
+++ b/util/migrate/backfill_parent_id.py
@@ -18,7 +18,7 @@ def backfill_parent_id():
     return (Image
             .select(Image.id, Image.ancestors)
             .join(ImageStorage)
-            .where(Image.parent >> None, Image.ancestors != '/',
+            .where(Image.parent_id >> None, Image.ancestors != '/',
                    ImageStorage.uploading == False))
 
   for to_backfill in yield_random_entries(fetch_batch, 10000, 0.3):
@@ -27,7 +27,7 @@ def backfill_parent_id():
         image = db_for_update(Image
                               .select()
                               .where(Image.id == to_backfill.id)).get()
-        image.parent = to_backfill.ancestors.split('/')[-2]
+        image.parent_id = int(to_backfill.ancestors.split('/')[-2])
         image.save()
       except Image.DoesNotExist:
         pass
diff --git a/workers/securityworker.py b/workers/securityworker.py
index 81402cd07..e6ccd369b 100644
--- a/workers/securityworker.py
+++ b/workers/securityworker.py
@@ -27,7 +27,7 @@ def _get_image_to_export(version):
   candidates = (Image
     .select(Image.docker_image_id, ImageStorage.uuid, ImageStorage.checksum)
     .join(ImageStorage)
-    .where(Image.security_indexed_engine < version, Image.parent >> None, ImageStorage.uploading == False, ImageStorage.checksum != '')
+    .where(Image.security_indexed_engine < version, Image.parent_id >> None, ImageStorage.uploading == False, ImageStorage.checksum != '')
     .limit(BATCH_SIZE*10)
     .alias('candidates'))
 
@@ -44,7 +44,7 @@ def _get_image_to_export(version):
   # With analyzed parent
   candidates = (Image
       .select(Image.docker_image_id, ImageStorage.uuid, ImageStorage.checksum, Parent.docker_image_id.alias('parent_docker_image_id'), ParentImageStorage.uuid.alias('parent_storage_uuid'))
-      .join(Parent, on=(Image.parent == Parent.id))
+      .join(Parent, on=(Image.parent_id == Parent.id))
       .join(ParentImageStorage, on=(ParentImageStorage.id == Parent.storage))
       .switch(Image)
       .join(ImageStorage)