Merge pull request #2182 from coreos-inc/fix-full-db-tests
Fix full database test script to not fail randomly
This commit is contained in:
commit
64c954dc58
5 changed files with 41 additions and 19 deletions
22
initdb.py
22
initdb.py
|
@ -222,8 +222,10 @@ def setup_database_for_testing(testcase, with_storage=False, force_rebuild=False
|
|||
logger.debug('Setting up DB for testing.')
|
||||
|
||||
# Setup the database.
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
if os.environ.get('SKIP_DB_SCHEMA', '') != 'true':
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
|
||||
populate_database(with_storage=with_storage)
|
||||
|
||||
models_missing_data = find_models_missing_data()
|
||||
|
@ -416,6 +418,20 @@ def wipe_database():
|
|||
def populate_database(minimal=False, with_storage=False):
|
||||
logger.debug('Populating the DB with test data.')
|
||||
|
||||
# Note: databases set up with "real" schema (via Alembic) will not have these types
|
||||
# type, so we it here it necessary.
|
||||
try:
|
||||
ImageStorageLocation.get(name='local_eu')
|
||||
ImageStorageLocation.get(name='local_us')
|
||||
except ImageStorageLocation.DoesNotExist:
|
||||
ImageStorageLocation.create(name='local_eu')
|
||||
ImageStorageLocation.create(name='local_us')
|
||||
|
||||
try:
|
||||
NotificationKind.get(name='test_notification')
|
||||
except NotificationKind.DoesNotExist:
|
||||
NotificationKind.create(name='test_notification')
|
||||
|
||||
new_user_1 = model.user.create_user('devtable', 'password', 'jschorr@devtable.com')
|
||||
new_user_1.verified = True
|
||||
new_user_1.stripe_id = TEST_STRIPE_ID
|
||||
|
@ -425,7 +441,7 @@ def populate_database(minimal=False, with_storage=False):
|
|||
logger.debug('Skipping most db population because user requested mininal db')
|
||||
return
|
||||
|
||||
UserRegion.create(user=new_user_1, location=1)
|
||||
UserRegion.create(user=new_user_1, location=ImageStorageLocation.get(name='local_us'))
|
||||
model.release.set_region_release('quay', 'us', 'v0.1.2')
|
||||
|
||||
model.user.create_confirm_email_code(new_user_1, new_email='typo@devtable.com')
|
||||
|
|
|
@ -4,7 +4,7 @@ up_mysql() {
|
|||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||
|
||||
# Sleep for 5s to get MySQL get started.
|
||||
# Sleep for 10s to get MySQL get started.
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
|
@ -21,9 +21,9 @@ up_postgres() {
|
|||
# Run a SQL database on port 5432 inside of Docker.
|
||||
docker run --name postgres -p 5432:5432 -d postgres
|
||||
|
||||
# Sleep for 5s to get SQL get started.
|
||||
echo 'Sleeping for 5...'
|
||||
sleep 5
|
||||
# Sleep for 10s to get SQL get started.
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
# Add the database to postgres.
|
||||
docker run --rm --link postgres:postgres postgres sh -c 'echo "create database genschema" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres'
|
||||
|
@ -35,20 +35,23 @@ down_postgres() {
|
|||
}
|
||||
|
||||
run_tests() {
|
||||
TEST_DATABASE_URI=$1 TEST=true python -m unittest discover -f
|
||||
TEST_DATABASE_URI=$1 TEST=true python -m test.queue_threads -f
|
||||
# Initialize the database with schema.
|
||||
PYTHONPATH=. TEST_DATABASE_URI=$1 TEST=true alembic upgrade head
|
||||
|
||||
# Run the full test suite.
|
||||
SKIP_DB_SCHEMA=true TEST_DATABASE_URI=$1 TEST=true python -m unittest discover -f
|
||||
}
|
||||
|
||||
# NOTE: MySQL is currently broken on setup.
|
||||
# Test (and generate, if requested) via MySQL.
|
||||
#echo '> Starting MySQL'
|
||||
#up_mysql
|
||||
echo '> Starting MySQL'
|
||||
up_mysql
|
||||
|
||||
#echo '> Running Full Test Suite (mysql)'
|
||||
#set +e
|
||||
#run_tests "mysql+pymysql://root:password@127.0.0.1/genschema"
|
||||
#set -e
|
||||
#down_mysql
|
||||
echo '> Running Full Test Suite (mysql)'
|
||||
set +e
|
||||
run_tests "mysql+pymysql://root:password@192.168.99.100/genschema"
|
||||
set -e
|
||||
down_mysql
|
||||
|
||||
# Test via Postgres.
|
||||
echo '> Starting Postgres'
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import unittest
|
||||
import datetime
|
||||
import logging
|
||||
import time
|
||||
import re
|
||||
import json as py_json
|
||||
|
||||
|
@ -2041,8 +2042,8 @@ class TestDeleteRepository(ApiTestCase):
|
|||
params=dict(repository=self.SIMPLE_REPO))
|
||||
|
||||
# Add a build queue item for the repo and another repo.
|
||||
dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'simple'], '{}')
|
||||
dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'anotherrepo'], '{}')
|
||||
dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'simple'], '{}', available_after=-1)
|
||||
dockerfile_build_queue.put([ADMIN_ACCESS_USER, 'anotherrepo'], '{}', available_after=-1)
|
||||
|
||||
# Delete the repository.
|
||||
self.deleteResponse(Repository, params=dict(repository=self.SIMPLE_REPO))
|
||||
|
@ -2563,6 +2564,7 @@ class TestRepositoryNotifications(ApiTestCase):
|
|||
params=dict(repository=ADMIN_ACCESS_USER + '/simple', uuid=uuid))
|
||||
|
||||
# Ensure the item is in the queue.
|
||||
time.sleep(1) # Makes sure the queue get works on MySQL with its second-level precision.
|
||||
found = notification_queue.get()
|
||||
self.assertIsNotNone(found)
|
||||
self.assertTrue('notification_uuid' in found['body'])
|
||||
|
|
|
@ -47,6 +47,7 @@ class TestStorageProxy(LiveServerTestCase):
|
|||
self.test_app.config['DISTRIBUTED_STORAGE_PREFERENCE'] = ['test']
|
||||
return self.test_app
|
||||
|
||||
@unittest.skipIf(os.environ.get('TEST_DATABASE_URI'), "not supported for non SQLite testing")
|
||||
def test_storage_proxy_auth_notinstalled(self):
|
||||
# Active direct download on the fake storage.
|
||||
self.storage.put_content(['test'], 'supports_direct_download', 'true')
|
||||
|
@ -65,6 +66,7 @@ class TestStorageProxy(LiveServerTestCase):
|
|||
self.assertEquals(404, resp.status_code)
|
||||
|
||||
|
||||
@unittest.skipIf(os.environ.get('TEST_DATABASE_URI'), "not supported for non SQLite testing")
|
||||
def test_storage_proxy_auth(self):
|
||||
# Active direct download on the fake storage.
|
||||
self.storage.put_content(['test'], 'supports_direct_download', 'true')
|
||||
|
|
|
@ -5,7 +5,6 @@ import time
|
|||
from app import app, storage, image_replication_queue
|
||||
from data.database import CloseForLongOperation
|
||||
from data import model
|
||||
from storage.basestorage import StoragePaths
|
||||
from workers.queueworker import QueueWorker, WorkerUnhealthyException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
Reference in a new issue