Merge pull request #2182 from coreos-inc/fix-full-db-tests

Fix full database test script to not fail randomly
This commit is contained in:
josephschorr 2016-12-01 14:33:22 -05:00 committed by GitHub
commit 64c954dc58
5 changed files with 41 additions and 19 deletions

View file

@ -222,8 +222,10 @@ def setup_database_for_testing(testcase, with_storage=False, force_rebuild=False
logger.debug('Setting up DB for testing.') logger.debug('Setting up DB for testing.')
# Setup the database. # Setup the database.
wipe_database() if os.environ.get('SKIP_DB_SCHEMA', '') != 'true':
initialize_database() wipe_database()
initialize_database()
populate_database(with_storage=with_storage) populate_database(with_storage=with_storage)
models_missing_data = find_models_missing_data() models_missing_data = find_models_missing_data()
@ -416,6 +418,20 @@ def wipe_database():
def populate_database(minimal=False, with_storage=False): def populate_database(minimal=False, with_storage=False):
logger.debug('Populating the DB with test data.') logger.debug('Populating the DB with test data.')
# Note: databases set up with "real" schema (via Alembic) will not have these types
# type, so we it here it necessary.
try:
ImageStorageLocation.get(name='local_eu')
ImageStorageLocation.get(name='local_us')
except ImageStorageLocation.DoesNotExist:
ImageStorageLocation.create(name='local_eu')
ImageStorageLocation.create(name='local_us')
try:
NotificationKind.get(name='test_notification')
except NotificationKind.DoesNotExist:
NotificationKind.create(name='test_notification')
new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com') new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com')
new_user_1.verified = True new_user_1.verified = True
new_user_1.stripe_id = TEST_STRIPE_ID new_user_1.stripe_id = TEST_STRIPE_ID
@ -425,7 +441,7 @@ def populate_database(minimal=False, with_storage=False):
logger.debug('Skipping most db population because user requested mininal db') logger.debug('Skipping most db population because user requested mininal db')
return return
UserRegion.create(user=new_user_1, location=1) UserRegion.create(user=new_user_1, location=ImageStorageLocation.get(name='local_us'))
model.release.set_region_release('quay', 'us', 'v0.1.2') model.release.set_region_release('quay', 'us', 'v0.1.2')
model.user.create_confirm_email_code(new_user_1, new_email='typo@devtable.com') model.user.create_confirm_email_code(new_user_1, new_email='typo@devtable.com')

View file

@ -4,7 +4,7 @@ up_mysql() {
# Run a SQL database on port 3306 inside of Docker. # Run a SQL database on port 3306 inside of Docker.
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
# Sleep for 5s to get MySQL get started. # Sleep for 10s to get MySQL get started.
echo 'Sleeping for 10...' echo 'Sleeping for 10...'
sleep 10 sleep 10
@ -21,9 +21,9 @@ up_postgres() {
# Run a SQL database on port 5432 inside of Docker. # Run a SQL database on port 5432 inside of Docker.
docker run --name postgres -p 5432:5432 -d postgres docker run --name postgres -p 5432:5432 -d postgres
# Sleep for 5s to get SQL get started. # Sleep for 10s to get SQL get started.
echo 'Sleeping for 5...' echo 'Sleeping for 10...'
sleep 5 sleep 10
# Add the database to postgres. # Add the database to postgres.
docker run --rm --link postgres:postgres postgres sh -c 'echo "create database genschema" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres' docker run --rm --link postgres:postgres postgres sh -c 'echo "create database genschema" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres'
@ -35,20 +35,23 @@ down_postgres() {
} }
run_tests() { run_tests() {
TEST_DATABASE_URI=$1 TEST=true python -m unittest discover -f # Initialize the database with schema.
TEST_DATABASE_URI=$1 TEST=true python -m test.queue_threads -f PYTHONPATH=. TEST_DATABASE_URI=$1 TEST=true alembic upgrade head
# Run the full test suite.
SKIP_DB_SCHEMA=true TEST_DATABASE_URI=$1 TEST=true python -m unittest discover -f
} }
# NOTE: MySQL is currently broken on setup. # NOTE: MySQL is currently broken on setup.
# Test (and generate, if requested) via MySQL. # Test (and generate, if requested) via MySQL.
#echo '> Starting MySQL' echo '> Starting MySQL'
#up_mysql up_mysql
#echo '> Running Full Test Suite (mysql)' echo '> Running Full Test Suite (mysql)'
#set +e set +e
#run_tests "mysql+pymysql://root:password@127.0.0.1/genschema" run_tests "mysql+pymysql://root:password@192.168.99.100/genschema"
#set -e set -e
#down_mysql down_mysql
# Test via Postgres. # Test via Postgres.
echo '> Starting Postgres' echo '> Starting Postgres'

View file

@ -3,6 +3,7 @@
import unittest import unittest
import datetime import datetime
import logging import logging
import time
import re import re
import json as py_json import json as py_json
@ -2041,8 +2042,8 @@ class TestDeleteRepository(ApiTestCase):
params=dict(repository=self.SIMPLE_REPO)) params=dict(repository=self.SIMPLE_REPO))
# Add a build queue item for the repo and another repo. # Add a build queue item for the repo and another repo.
dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'simple'], '{}') dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'simple'], '{}', available_after=-1)
dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'anotherrepo'], '{}') dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'anotherrepo'], '{}', available_after=-1)
# Delete the repository. # Delete the repository.
self.deleteResponse(Repository, params=dict(repository=self.SIMPLE_REPO)) self.deleteResponse(Repository, params=dict(repository=self.SIMPLE_REPO))
@ -2563,6 +2564,7 @@ class TestRepositoryNotifications(ApiTestCase):
params=dict(repository=ADMIN_ACCESS_USER + '/simple', uuid=uuid)) params=dict(repository=ADMIN_ACCESS_USER + '/simple', uuid=uuid))
# Ensure the item is in the queue. # Ensure the item is in the queue.
time.sleep(1) # Makes sure the queue get works on MySQL with its second-level precision.
found = notification_queue.get() found = notification_queue.get()
self.assertIsNotNone(found) self.assertIsNotNone(found)
self.assertTrue('notification_uuid' in found['body']) self.assertTrue('notification_uuid' in found['body'])

View file

@ -47,6 +47,7 @@ class TestStorageProxy(LiveServerTestCase):
self.test_app.config['DISTRIBUTED_STORAGE_PREFERENCE'] = ['test'] self.test_app.config['DISTRIBUTED_STORAGE_PREFERENCE'] = ['test']
return self.test_app return self.test_app
@unittest.skipIf(os.environ.get('TEST_DATABASE_URI'), "not supported for non SQLite testing")
def test_storage_proxy_auth_notinstalled(self): def test_storage_proxy_auth_notinstalled(self):
# Active direct download on the fake storage. # Active direct download on the fake storage.
self.storage.put_content(['test'], 'supports_direct_download', 'true') self.storage.put_content(['test'], 'supports_direct_download', 'true')
@ -65,6 +66,7 @@ class TestStorageProxy(LiveServerTestCase):
self.assertEquals(404, resp.status_code) self.assertEquals(404, resp.status_code)
@unittest.skipIf(os.environ.get('TEST_DATABASE_URI'), "not supported for non SQLite testing")
def test_storage_proxy_auth(self): def test_storage_proxy_auth(self):
# Active direct download on the fake storage. # Active direct download on the fake storage.
self.storage.put_content(['test'], 'supports_direct_download', 'true') self.storage.put_content(['test'], 'supports_direct_download', 'true')

View file

@ -5,7 +5,6 @@ import time
from app import app, storage, image_replication_queue from app import app, storage, image_replication_queue
from data.database import CloseForLongOperation from data.database import CloseForLongOperation
from data import model from data import model
from storage.basestorage import StoragePaths
from workers.queueworker import QueueWorker, WorkerUnhealthyException from workers.queueworker import QueueWorker, WorkerUnhealthyException
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)