Phase 2 of Appr migration

Backfills the new tables from the old ones, and switches all code to read and write from the new tables
This commit is contained in:
Joseph Schorr 2018-07-08 10:15:43 +03:00
parent d75e932d09
commit acb7d626a4
4 changed files with 38 additions and 9 deletions

View file

@ -0,0 +1,29 @@
"""Backfill new appr tables
Revision ID: 5d463ea1e8a8
Revises: 610320e9dacf
Create Date: 2018-07-08 10:01:19.756126
"""
# revision identifiers, used by Alembic.
revision = '5d463ea1e8a8'
down_revision = '610320e9dacf'
from alembic import op
import sqlalchemy as sa
from util.migrate.table_ops import copy_table_contents
def upgrade(tables, tester):
conn = op.get_bind()
copy_table_contents('blob', 'apprblob', conn)
copy_table_contents('manifest', 'apprmanifest', conn)
copy_table_contents('manifestlist', 'apprmanifestlist', conn)
copy_table_contents('blobplacement', 'apprblobplacement', conn)
copy_table_contents('manifestblob', 'apprmanifestblob', conn)
copy_table_contents('manifestlistmanifest', 'apprmanifestlistmanifest', conn)
copy_table_contents('tag', 'apprtag', conn)
def downgrade(tables, tester):
pass

View file

@ -310,5 +310,5 @@ class CNRAppModel(AppRegistryDataInterface):
return appr_model.blob.get_blob_locations(digest, self.models_ref)
# Phase 1: Read from old tables, disallow writing.
model = CNRAppModel(OLD_MODELS, features.READONLY_APP_REGISTRY)
# Phase 2: Read and write from new tables.
model = CNRAppModel(NEW_MODELS, features.READONLY_APP_REGISTRY)

View file

@ -27,7 +27,7 @@ from app import app, config_provider, all_queues, dockerfile_build_queue, notifi
from buildtrigger.basehandler import BuildTriggerHandler
from initdb import setup_database_for_testing, finished_database_for_testing
from data import database, model, appr_model
from data.appr_model.models import OLD_MODELS
from data.appr_model.models import NEW_MODELS
from data.database import RepositoryActionCount, Repository as RepositoryTable
from test.helpers import assert_action_logged
from util.secscan.fake import fake_security_scanner
@ -2155,10 +2155,10 @@ class TestDeleteRepository(ApiTestCase):
repository = model.repository.get_repository(ADMIN_ACCESS_USER, 'complex')
# Add some CNR tags and linked tags.
base_tag = appr_model.tag.create_or_update_tag(repository, 'somebasetag', OLD_MODELS)
base_tag2 = appr_model.tag.create_or_update_tag(repository, 'somebasetag2', OLD_MODELS)
appr_model.tag.create_or_update_tag(repository, 'somelinkedtag', OLD_MODELS, linked_tag=base_tag)
appr_model.tag.create_or_update_tag(repository, 'somelinkedtag2', OLD_MODELS, linked_tag=base_tag2)
base_tag = appr_model.tag.create_or_update_tag(repository, 'somebasetag', NEW_MODELS)
base_tag2 = appr_model.tag.create_or_update_tag(repository, 'somebasetag2', NEW_MODELS)
appr_model.tag.create_or_update_tag(repository, 'somelinkedtag', NEW_MODELS, linked_tag=base_tag)
appr_model.tag.create_or_update_tag(repository, 'somelinkedtag2', NEW_MODELS, linked_tag=base_tag2)
# Create some access tokens.
access_token = model.token.create_access_token(repository, 'read')

View file

@ -1,6 +1,6 @@
from data import model
from data.appr_model import blob
from data.appr_model.models import OLD_MODELS
from data.appr_model.models import NEW_MODELS
def sync_database_with_config(config):
@ -9,4 +9,4 @@ def sync_database_with_config(config):
location_names = config.get('DISTRIBUTED_STORAGE_CONFIG', {}).keys()
if location_names:
model.image.ensure_image_locations(*location_names)
blob.ensure_blob_locations(OLD_MODELS, *location_names)
blob.ensure_blob_locations(NEW_MODELS, *location_names)