Merge pull request #2576 from coreos-inc/full-db-tests-tox
Reenable full database testing locally and in concourse
This commit is contained in:
commit
8b148bf1d4
51 changed files with 278 additions and 318 deletions
|
@ -5,7 +5,8 @@ from base64 import b64encode
|
|||
from auth.basic import validate_basic_auth, ACCESS_TOKEN_USERNAME, OAUTH_TOKEN_USERNAME
|
||||
from auth.validateresult import AuthKind, ValidateResult
|
||||
from data import model
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
def _token(username, password):
|
||||
return 'basic ' + b64encode('%s:%s' % (username, password))
|
||||
|
|
|
@ -5,7 +5,7 @@ from flask_login import login_user
|
|||
from app import LoginWrappedDBUser
|
||||
from data import model
|
||||
from auth.cookie import validate_session_cookie
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
def test_anonymous_cookie(app):
|
||||
assert validate_session_cookie().missing
|
||||
|
|
|
@ -9,7 +9,7 @@ from auth.auth_context import get_authenticated_user
|
|||
from auth.decorators import (extract_namespace_repo_from_session, require_session_login,
|
||||
process_auth_or_cookie)
|
||||
from data import model
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
def test_extract_namespace_repo_from_session_missing(app):
|
||||
def emptyfunc():
|
||||
|
|
|
@ -3,7 +3,7 @@ import pytest
|
|||
from auth.oauth import validate_bearer_auth, validate_oauth_token
|
||||
from auth.validateresult import AuthKind, ValidateResult
|
||||
from data import model
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('header, expected_result', [
|
||||
('', ValidateResult(AuthKind.oauth, missing=True)),
|
||||
|
|
|
@ -4,7 +4,7 @@ from auth.auth_context import (get_authenticated_user, get_grant_context, get_va
|
|||
get_validated_oauth_token)
|
||||
from auth.validateresult import AuthKind, ValidateResult
|
||||
from data import model
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
def get_user():
|
||||
|
|
|
@ -19,5 +19,5 @@ run:
|
|||
mysql -e "GRANT ALL PRIVILEGES ON *.* TO 'quay'@'127.0.0.1';"
|
||||
cd quay-pull-request
|
||||
PYTHONPATH="." alembic upgrade head
|
||||
PYTHONPATH="." python -m unittest discover -f
|
||||
PYTHONPATH="." py.test "." --ignore=endpoints/appr/test/
|
||||
service mysql stop
|
||||
|
|
|
@ -20,5 +20,5 @@ run:
|
|||
su postgres -c "psql -c 'GRANT ALL PRIVILEGES ON DATABASE quaytest TO quay';"
|
||||
cd quay-pull-request
|
||||
PYTHONPATH="." alembic upgrade head
|
||||
PYTHONPATH="." python -m unittest discover -f
|
||||
PYTHONPATH="." py.test "." --ignore=endpoints/appr/test/
|
||||
service postgresql stop
|
||||
|
|
|
@ -93,8 +93,11 @@ def update_repository_score(repo):
|
|||
logger.debug('Got bucket tuple %s for bucket %s for repository %s', bucket_tuple, bucket,
|
||||
repo.id)
|
||||
|
||||
bucket_sum = bucket_tuple[0]
|
||||
bucket_count = bucket_tuple[1]
|
||||
if bucket_tuple[0] is None:
|
||||
continue
|
||||
|
||||
bucket_sum = float(bucket_tuple[0])
|
||||
bucket_count = int(bucket_tuple[1])
|
||||
if not bucket_count:
|
||||
continue
|
||||
|
||||
|
|
|
@ -3,9 +3,9 @@ import pytest
|
|||
from peewee import IntegrityError
|
||||
|
||||
from data.model.repository import create_repository, purge_repository
|
||||
from test.fixtures import database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
def test_duplicate_repository_different_kinds(database_uri):
|
||||
def test_duplicate_repository_different_kinds(initialized_db):
|
||||
# Create an image repo.
|
||||
create_repository('devtable', 'somenewrepo', None, repo_kind='image')
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import pytest
|
|||
from data.database import RepositoryActionCount, RepositorySearchScore
|
||||
from data.model.repository import create_repository
|
||||
from data.model.repositoryactioncount import update_repository_score, SEARCH_BUCKETS
|
||||
from test.fixtures import database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('bucket_sums,expected_score', [
|
||||
((0, 0, 0, 0), 0),
|
||||
|
@ -20,7 +20,7 @@ from test.fixtures import database_uri, init_db_path, sqlitedb_file
|
|||
((300, 500, 1000, 0), 1733),
|
||||
((5000, 0, 0, 0), 5434),
|
||||
])
|
||||
def test_update_repository_score(bucket_sums, expected_score, database_uri):
|
||||
def test_update_repository_score(bucket_sums, expected_score, initialized_db):
|
||||
# Create a new repository.
|
||||
repo = create_repository('devtable', 'somenewrepo', None, repo_kind='image')
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from data.model.repository import create_repository
|
||||
from data.model.tag import list_active_repo_tags, create_or_update_tag, delete_tag
|
||||
from data.model.image import find_create_or_link_image
|
||||
from test.fixtures import database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
def assert_tags(repository, *args):
|
||||
tags = list(list_active_repo_tags(repository))
|
||||
|
@ -18,7 +18,7 @@ def assert_tags(repository, *args):
|
|||
for expected in args:
|
||||
assert expected in tags_dict
|
||||
|
||||
def test_list_active_tags(database_uri):
|
||||
def test_list_active_tags(initialized_db):
|
||||
# Create a new repository.
|
||||
repository = create_repository('devtable', 'somenewrepo', None)
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from mock import patch
|
||||
|
||||
from data.model.user import create_user_noverify
|
||||
from test.fixtures import database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
def test_create_user_with_expiration(database_uri):
|
||||
def test_create_user_with_expiration(initialized_db):
|
||||
with patch('data.model.config.app_config', {'DEFAULT_TAG_EXPIRATION': '1h'}):
|
||||
user = create_user_noverify('foobar', 'foo@example.com', email_required=False)
|
||||
assert user.removed_tag_expiration_s == 60 * 60
|
||||
|
|
|
@ -4,7 +4,7 @@ from mock import Mock
|
|||
from io import BytesIO
|
||||
|
||||
from data.userfiles import DelegateUserfiles, Userfiles
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize('prefix,path,expected', [
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
import os
|
||||
import pytest
|
||||
|
||||
from data import model, database
|
||||
from data.users.federated import FederatedUsers, UserInformation
|
||||
from data.users.teamsync import sync_team, sync_teams_to_groups
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.test_ldap import mock_ldap
|
||||
from test.test_keystone_auth import fake_keystone
|
||||
from util.names import parse_robot_username
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
_FAKE_AUTH = 'fake'
|
||||
|
||||
class FakeUsers(FederatedUsers):
|
||||
|
@ -21,6 +23,8 @@ class FakeUsers(FederatedUsers):
|
|||
return (self.group_tuples, None)
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.environ.get('TEST_DATABASE_URI', '').find('postgres') >= 0,
|
||||
reason="Postgres fails when existing members are added under the savepoint")
|
||||
@pytest.mark.parametrize('starting_membership,group_membership,expected_membership', [
|
||||
# Empty team + single member in group => Single member in team.
|
||||
([],
|
||||
|
@ -225,7 +229,6 @@ def test_sync_teams_to_groups(app):
|
|||
sync_teams_to_groups(fake_auth, timedelta(seconds=120))
|
||||
|
||||
third_sync_info = model.team.get_team_sync_information('buynlarge', 'synced')
|
||||
assert third_sync_info.last_updated == current_info.last_updated
|
||||
assert third_sync_info.transaction_id == updated_sync_info.transaction_id
|
||||
|
||||
# Set the stale threshold to 10 seconds, and ensure the team is resynced, after making it
|
||||
|
|
|
@ -17,7 +17,7 @@ from endpoints.api.trigger import (BuildTriggerList, BuildTrigger, BuildTriggerS
|
|||
TriggerBuildList, BuildTriggerFieldValues, BuildTriggerSources,
|
||||
BuildTriggerSourceNamespaces)
|
||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
BUILD_ARGS = {'build_uuid': '1234'}
|
||||
IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
|
||||
|
|
|
@ -8,7 +8,7 @@ from endpoints.api.trigger import (BuildTrigger, BuildTriggerSubdirs,
|
|||
BuildTriggerFieldValues, BuildTriggerSources,
|
||||
BuildTriggerSourceNamespaces)
|
||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
BUILD_ARGS = {'build_uuid': '1234'}
|
||||
IMAGE_ARGS = {'imageid': '1234', 'image_id': 1234}
|
||||
|
|
|
@ -4,7 +4,7 @@ from data import model
|
|||
from endpoints.api import api
|
||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
||||
from endpoints.api.organization import Organization
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('expiration, expected_code', [
|
||||
(0, 200),
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
import pytest
|
||||
|
||||
from mock import patch, ANY, MagicMock
|
||||
|
||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
||||
from endpoints.api.repository import RepositoryTrust, Repository
|
||||
from features import FeatureNameValue
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from mock import patch, ANY, MagicMock
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
INVALID_RESPONSE = {
|
||||
|
|
|
@ -8,7 +8,8 @@ from endpoints.api.superuser import SuperUserRepositoryBuildLogs, SuperUserRepos
|
|||
from endpoints.api.superuser import SuperUserRepositoryBuildStatus
|
||||
from endpoints.api.signing import RepositorySignatures
|
||||
from endpoints.api.repository import RepositoryTrust
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
TEAM_PARAMS = {'orgname': 'buynlarge', 'teamname': 'owners'}
|
||||
BUILD_PARAMS = {'build_uuid': 'test-1234'}
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
from collections import Counter
|
||||
|
||||
import pytest
|
||||
|
||||
from collections import Counter
|
||||
from mock import patch
|
||||
|
||||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
||||
from endpoints.api.signing import RepositorySignatures
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from mock import patch
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
VALID_TARGETS = {
|
||||
'latest': {
|
||||
|
|
|
@ -7,9 +7,10 @@ from endpoints.api import api
|
|||
from endpoints.api.test.shared import client_with_identity, conduct_api_call
|
||||
from endpoints.api.team import OrganizationTeamSyncing, TeamMemberList
|
||||
from endpoints.api.organization import Organization
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.test_ldap import mock_ldap
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
SYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'synced'}
|
||||
UNSYNCED_TEAM_PARAMS = {'orgname': 'sellnsmall', 'teamname': 'owners'}
|
||||
|
||||
|
|
|
@ -1,28 +1,22 @@
|
|||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
|
||||
import pytest
|
||||
from cnr.models.db_base import CnrDB
|
||||
|
||||
from cnr.tests.conftest import *
|
||||
from cnr.tests.test_apiserver import BaseTestServer
|
||||
from cnr.tests.test_models import CnrTestModels
|
||||
from peewee import SqliteDatabase
|
||||
|
||||
import data.oci_model.blob as oci_blob
|
||||
from app import app as application
|
||||
from data.database import db as database
|
||||
from data.database import User, close_db_filter
|
||||
|
||||
from data.database import User
|
||||
from data.interfaces.appr import oci_app_model
|
||||
from data.model import organization, user
|
||||
from endpoints.appr import appr_bp, registry
|
||||
from endpoints.appr import registry # Needed to register the endpoint
|
||||
from endpoints.appr.cnr_backend import Channel, Package, QuayDB
|
||||
from initdb import initialize_database, populate_database, wipe_database
|
||||
|
||||
application.register_blueprint(appr_bp, url_prefix='/cnr')
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
# TODO: avoid direct usage of database
|
||||
def create_org(namespace, owner):
|
||||
try:
|
||||
User.get(username=namespace)
|
||||
|
@ -75,82 +69,17 @@ class PackageTest(Package):
|
|||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def quaydb(monkeypatch):
|
||||
def quaydb(monkeypatch, app):
|
||||
monkeypatch.setattr('endpoints.appr.cnr_backend.QuayDB.Package', PackageTest)
|
||||
monkeypatch.setattr('endpoints.appr.cnr_backend.Package', PackageTest)
|
||||
monkeypatch.setattr('endpoints.appr.registry.Package', PackageTest)
|
||||
monkeypatch.setattr('cnr.models.Package', PackageTest)
|
||||
|
||||
monkeypatch.setattr('endpoints.appr.cnr_backend.QuayDB.Channel', ChannelTest)
|
||||
# monkeypatch.setattr('data.cnrmodel.channel.Channel', ChannelTest)
|
||||
monkeypatch.setattr('endpoints.appr.registry.Channel', ChannelTest)
|
||||
monkeypatch.setattr('cnr.models.Channel', ChannelTest)
|
||||
|
||||
|
||||
def seed_db():
|
||||
create_org("titi", user.get_user("devtable"))
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def sqlitedb_file(tmpdir):
|
||||
test_db_file = tmpdir.mkdir("quaydb").join("test.db")
|
||||
return str(test_db_file)
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def init_db_path(tmpdir_factory):
|
||||
sqlitedb_file_loc = str(tmpdir_factory.mktemp("data").join("test.db"))
|
||||
sqlitedb = 'sqlite:///{0}'.format(sqlitedb_file_loc)
|
||||
conf = {"TESTING": True, "DEBUG": True, "DB_URI": sqlitedb}
|
||||
os.environ['TEST_DATABASE_URI'] = str(sqlitedb)
|
||||
os.environ['DB_URI'] = str(sqlitedb)
|
||||
database.initialize(SqliteDatabase(sqlitedb_file_loc))
|
||||
application.config.update(conf)
|
||||
application.config.update({"DB_URI": sqlitedb})
|
||||
wipe_database()
|
||||
initialize_database()
|
||||
populate_database(minimal=True)
|
||||
close_db_filter(None)
|
||||
seed_db()
|
||||
return str(sqlitedb_file_loc)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def database_uri(monkeypatch, init_db_path, sqlitedb_file):
|
||||
shutil.copy2(init_db_path, sqlitedb_file)
|
||||
database.initialize(SqliteDatabase(sqlitedb_file))
|
||||
db_path = 'sqlite:///{0}'.format(sqlitedb_file)
|
||||
monkeypatch.setenv("DB_URI", db_path)
|
||||
seed_db()
|
||||
return db_path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def appconfig(database_uri):
|
||||
conf = {"TESTING": True, "DEBUG": True, "DB_URI": database_uri}
|
||||
return conf
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def create_app():
|
||||
try:
|
||||
application.register_blueprint(appr_bp, url_prefix='')
|
||||
except:
|
||||
pass
|
||||
return application
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def app(create_app, appconfig):
|
||||
create_app.config.update(appconfig)
|
||||
return create_app
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def db():
|
||||
return CnrDB
|
||||
|
||||
|
||||
class TestServerQuayDB(BaseTestServer):
|
||||
DB_CLASS = QuayDB
|
||||
|
||||
|
@ -180,10 +109,6 @@ class TestServerQuayDB(BaseTestServer):
|
|||
class TestQuayModels(CnrTestModels):
|
||||
DB_CLASS = QuayDB
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def load_db(self, appconfig):
|
||||
return appconfig
|
||||
|
||||
@pytest.mark.xfail
|
||||
def test_channel_delete_releases(self, db_with_data1):
|
||||
""" Can't remove a release from the channel, only delete the channel entirely """
|
||||
|
|
|
@ -6,7 +6,7 @@ from flask import url_for
|
|||
from data import model
|
||||
from endpoints.appr.registry import appr_bp, blobs
|
||||
from endpoints.api.test.shared import client_with_identity
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.fixtures import *
|
||||
|
||||
BLOB_ARGS = {'digest': 'abcd1235'}
|
||||
PACKAGE_ARGS = {'release': 'r', 'media_type': 'foo'}
|
||||
|
|
|
@ -4,7 +4,8 @@ from werkzeug.exceptions import HTTPException
|
|||
|
||||
from data import model
|
||||
from endpoints.appr import require_app_repo_read
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
def test_require_app_repo_read(app):
|
||||
called = [False]
|
||||
|
|
|
@ -5,8 +5,8 @@ import pytest
|
|||
|
||||
from data import model
|
||||
from endpoints.appr.registry import appr_bp
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('login_data, expected_code', [
|
||||
({
|
||||
|
@ -30,7 +30,7 @@ def test_login(login_data, expected_code, app, client):
|
|||
if "+" in login_data['username'] and login_data['password'] is None:
|
||||
username, robotname = login_data['username'].split("+")
|
||||
_, login_data['password'] = model.user.create_robot(robotname, model.user.get_user(username))
|
||||
app.register_blueprint(appr_bp, url_prefix='/cnr')
|
||||
|
||||
url = url_for('appr.login')
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
data = {'user': login_data}
|
||||
|
|
|
@ -4,9 +4,9 @@ from data import model, database
|
|||
from data.users import get_users_handler, DatabaseUsers
|
||||
from endpoints.oauth.login import _conduct_oauth_login
|
||||
from oauth.services.github import GithubOAuthService
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from test.test_ldap import mock_ldap
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.fixture(params=[None, 'username', 'email'])
|
||||
def login_service(request, app):
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import json
|
||||
|
||||
from endpoints.notificationevent import NotificationEvent
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
def test_all_notifications(app):
|
||||
# Create a test notification.
|
||||
test_notification = AttrDict({
|
||||
|
|
14
initdb.py
14
initdb.py
|
@ -20,7 +20,7 @@ from data.database import (db, all_models, beta_classes, Role, TeamRole, Visibil
|
|||
ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind,
|
||||
QuayRegion, QuayService, UserRegion, OAuthAuthorizationCode,
|
||||
ServiceKeyApprovalType, MediaType, LabelSourceType, UserPromptKind,
|
||||
RepositoryKind, TagKind, BlobPlacementLocation)
|
||||
RepositoryKind, TagKind, BlobPlacementLocation, User)
|
||||
from data import model
|
||||
from data.queue import WorkQueue
|
||||
from app import app, storage as store, tf
|
||||
|
@ -440,6 +440,15 @@ def wipe_database():
|
|||
def populate_database(minimal=False, with_storage=False):
|
||||
logger.debug('Populating the DB with test data.')
|
||||
|
||||
# Check if the data already exists. If so, we skip. This can happen between calls from the
|
||||
# "old style" tests and the new py.test's.
|
||||
try:
|
||||
User.get(username='devtable')
|
||||
logger.debug('DB already populated')
|
||||
return
|
||||
except User.DoesNotExist:
|
||||
pass
|
||||
|
||||
# Note: databases set up with "real" schema (via Alembic) will not have these types
|
||||
# type, so we it here it necessary.
|
||||
try:
|
||||
|
@ -663,6 +672,9 @@ def populate_database(minimal=False, with_storage=False):
|
|||
liborg = model.organization.create_organization('library', 'quay+library@devtable.com', new_user_1)
|
||||
liborg.save()
|
||||
|
||||
titiorg = model.organization.create_organization('titi', 'quay+titi@devtable.com', new_user_1)
|
||||
titiorg.save()
|
||||
|
||||
thirdorg = model.organization.create_organization('sellnsmall', 'quay+sell@devtable.com', new_user_1)
|
||||
thirdorg.save()
|
||||
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
set -e
|
||||
|
||||
export TEST=true
|
||||
export TROLLIUSDEBUG=1
|
||||
|
||||
python -m unittest discover -f
|
||||
python -m test.registry_tests -f
|
||||
#python -m test.queue_threads -f
|
188
test/fixtures.py
188
test/fixtures.py
|
@ -1,25 +1,151 @@
|
|||
import os
|
||||
|
||||
from cachetools import lru_cache
|
||||
|
||||
import pytest
|
||||
import shutil
|
||||
from flask import Flask, jsonify
|
||||
from flask_login import LoginManager
|
||||
from peewee import SqliteDatabase
|
||||
from peewee import SqliteDatabase, savepoint, InternalError
|
||||
|
||||
from app import app as application
|
||||
from data import model
|
||||
from data.database import (close_db_filter, db)
|
||||
from data.database import close_db_filter, db, configure
|
||||
from data.model.user import LoginWrappedDBUser
|
||||
from endpoints.api import api_bp
|
||||
from endpoints.appr import appr_bp
|
||||
from endpoints.web import web
|
||||
|
||||
from initdb import initialize_database, populate_database
|
||||
|
||||
from path_converters import APIRepositoryPathConverter, RegexConverter, RepositoryPathConverter
|
||||
from test.testconfig import FakeTransaction
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
@lru_cache(maxsize=1) # Important! pytest is calling this multiple times (despite it being session)
|
||||
def init_db_path(tmpdir_factory):
|
||||
""" Creates a new database and appropriate configuration. Note that the initial database
|
||||
is created *once* per session. In the non-full-db-test case, the database_uri fixture
|
||||
makes a copy of the SQLite database file on disk and passes a new copy to each test.
|
||||
"""
|
||||
if os.environ.get('TEST_DATABASE_URI'):
|
||||
return _init_db_path_real_db(os.environ.get('TEST_DATABASE_URI'))
|
||||
|
||||
return _init_db_path_sqlite(tmpdir_factory)
|
||||
|
||||
def _init_db_path_real_db(db_uri):
|
||||
""" Initializes a real database for testing by populating it from scratch. Note that this does
|
||||
*not* add the tables (merely data). Callers must have migrated the database before calling
|
||||
the test suite.
|
||||
"""
|
||||
configure({
|
||||
"DB_URI": db_uri,
|
||||
"DB_CONNECTION_ARGS": {
|
||||
'threadlocals': True,
|
||||
'autorollback': True,
|
||||
},
|
||||
"DB_TRANSACTION_FACTORY": _create_transaction,
|
||||
})
|
||||
|
||||
populate_database()
|
||||
return db_uri
|
||||
|
||||
def _init_db_path_sqlite(tmpdir_factory):
|
||||
""" Initializes a SQLite database for testing by populating it from scratch and placing it into
|
||||
a temp directory file.
|
||||
"""
|
||||
sqlitedbfile = str(tmpdir_factory.mktemp("data").join("test.db"))
|
||||
sqlitedb = 'sqlite:///{0}'.format(sqlitedbfile)
|
||||
conf = {"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"DB_URI": sqlitedb}
|
||||
os.environ['DB_URI'] = str(sqlitedb)
|
||||
db.initialize(SqliteDatabase(sqlitedbfile))
|
||||
application.config.update(conf)
|
||||
application.config.update({"DB_URI": sqlitedb})
|
||||
initialize_database()
|
||||
|
||||
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
||||
|
||||
populate_database()
|
||||
close_db_filter(None)
|
||||
return str(sqlitedbfile)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def app(appconfig):
|
||||
""" Used by pytest-flask plugin to inject app by test for client See test_security by name injection of client. """
|
||||
def database_uri(monkeypatch, init_db_path, sqlitedb_file):
|
||||
""" Returns the database URI to use for testing. In the SQLite case, a new, distinct copy of
|
||||
the SQLite database is created by copying the initialized database file (sqlitedb_file)
|
||||
on a per-test basis. In the non-SQLite case, a reference to the existing database URI is
|
||||
returned.
|
||||
"""
|
||||
if os.environ.get('TEST_DATABASE_URI'):
|
||||
db_uri = os.environ['TEST_DATABASE_URI']
|
||||
monkeypatch.setenv("DB_URI", db_uri)
|
||||
return db_uri
|
||||
|
||||
# Copy the golden database file to a new path.
|
||||
shutil.copy2(init_db_path, sqlitedb_file)
|
||||
|
||||
# Monkeypatch the DB_URI.
|
||||
db_path = 'sqlite:///{0}'.format(sqlitedb_file)
|
||||
monkeypatch.setenv("DB_URI", db_path)
|
||||
return db_path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def sqlitedb_file(tmpdir):
|
||||
""" Returns the path at which the initialized, golden SQLite database file will be placed. """
|
||||
test_db_file = tmpdir.mkdir("quaydb").join("test.db")
|
||||
return str(test_db_file)
|
||||
|
||||
def _create_transaction(db):
|
||||
return FakeTransaction()
|
||||
|
||||
@pytest.fixture()
|
||||
def appconfig(database_uri):
|
||||
""" Returns application configuration for testing that references the proper database URI. """
|
||||
conf = {
|
||||
"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"DB_URI": database_uri,
|
||||
"SECRET_KEY": 'superdupersecret!!!1',
|
||||
"DB_CONNECTION_ARGS": {
|
||||
'threadlocals': True,
|
||||
'autorollback': True,
|
||||
},
|
||||
"DB_TRANSACTION_FACTORY": _create_transaction,
|
||||
}
|
||||
return conf
|
||||
|
||||
@pytest.fixture()
|
||||
def initialized_db(appconfig):
|
||||
""" Configures the database for the database found in the appconfig. """
|
||||
|
||||
# Configure the database.
|
||||
configure(appconfig)
|
||||
|
||||
# If under a test *real* database, setup a savepoint.
|
||||
under_test_real_database = bool(os.environ.get('TEST_DATABASE_URI'))
|
||||
if under_test_real_database:
|
||||
test_savepoint = savepoint(db)
|
||||
test_savepoint.__enter__()
|
||||
|
||||
yield # Run the test.
|
||||
|
||||
try:
|
||||
test_savepoint.__exit__(None, None, None)
|
||||
except InternalError:
|
||||
# If postgres fails with an exception (like IntegrityError) mid-transaction, it terminates
|
||||
# it immediately, so when we go to remove the savepoint, it complains. We can safely ignore
|
||||
# this case.
|
||||
pass
|
||||
else:
|
||||
yield
|
||||
|
||||
@pytest.fixture()
|
||||
def app(appconfig, initialized_db):
|
||||
""" Used by pytest-flask plugin to inject a custom app instance for testing. """
|
||||
app = Flask(__name__)
|
||||
login_manager = LoginManager(app)
|
||||
|
||||
|
@ -36,58 +162,10 @@ def app(appconfig):
|
|||
app.url_map.converters['regex'] = RegexConverter
|
||||
app.url_map.converters['apirepopath'] = APIRepositoryPathConverter
|
||||
app.url_map.converters['repopath'] = RepositoryPathConverter
|
||||
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
app.register_blueprint(appr_bp, url_prefix='/cnr')
|
||||
app.register_blueprint(web, url_prefix='/')
|
||||
|
||||
app.config.update(appconfig)
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def init_db_path(tmpdir_factory):
|
||||
""" Creates a new db and appropriate configuration. Used for parameter by name injection. """
|
||||
sqlitedb_file = str(tmpdir_factory.mktemp("data").join("test.db"))
|
||||
sqlitedb = 'sqlite:///{0}'.format(sqlitedb_file)
|
||||
conf = {"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"DB_URI": sqlitedb}
|
||||
os.environ['TEST_DATABASE_URI'] = str(sqlitedb)
|
||||
os.environ['DB_URI'] = str(sqlitedb)
|
||||
db.initialize(SqliteDatabase(sqlitedb_file))
|
||||
application.config.update(conf)
|
||||
application.config.update({"DB_URI": sqlitedb})
|
||||
initialize_database()
|
||||
|
||||
db.obj.execute_sql('PRAGMA foreign_keys = ON;')
|
||||
|
||||
populate_database()
|
||||
close_db_filter(None)
|
||||
return str(sqlitedb_file)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def database_uri(monkeypatch, init_db_path, sqlitedb_file):
|
||||
""" Creates the db uri. Used for parameter by name injection. """
|
||||
shutil.copy2(init_db_path, sqlitedb_file)
|
||||
db.initialize(SqliteDatabase(sqlitedb_file))
|
||||
db_path = 'sqlite:///{0}'.format(sqlitedb_file)
|
||||
monkeypatch.setenv("DB_URI", db_path)
|
||||
return db_path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def sqlitedb_file(tmpdir):
|
||||
""" Makes file for db. Used for parameter by name injection. """
|
||||
test_db_file = tmpdir.mkdir("quaydb").join("test.db")
|
||||
return str(test_db_file)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def appconfig(database_uri):
|
||||
""" Makes conf with database_uri. Used for parameter by name injection """
|
||||
conf = {
|
||||
"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"DB_URI": database_uri,
|
||||
"SECRET_KEY": 'superdupersecret!!!1',
|
||||
}
|
||||
return conf
|
||||
|
|
|
@ -13,8 +13,8 @@ up_mysql() {
|
|||
}
|
||||
|
||||
down_mysql() {
|
||||
docker kill mysql
|
||||
docker rm -v mysql
|
||||
docker kill mysql || true
|
||||
docker rm -v mysql || true
|
||||
}
|
||||
|
||||
up_postgres() {
|
||||
|
@ -30,8 +30,8 @@ up_postgres() {
|
|||
}
|
||||
|
||||
down_postgres() {
|
||||
docker kill postgres
|
||||
docker rm -v postgres
|
||||
docker kill postgres || true
|
||||
docker rm -v postgres || true
|
||||
}
|
||||
|
||||
run_tests() {
|
||||
|
@ -39,7 +39,7 @@ run_tests() {
|
|||
PYTHONPATH=. TEST_DATABASE_URI=$1 TEST=true alembic upgrade head
|
||||
|
||||
# Run the full test suite.
|
||||
SKIP_DB_SCHEMA=true TEST_DATABASE_URI=$1 TEST=true python -m unittest discover -f
|
||||
PYTHONPATH=. SKIP_DB_SCHEMA=true TEST_DATABASE_URI=$1 TEST=true py.test ${2:-.} --ignore=endpoints/appr/test/
|
||||
}
|
||||
|
||||
CIP=${CONTAINERIP-'127.0.0.1'}
|
||||
|
@ -48,20 +48,22 @@ echo "> Using container IP address $CIP"
|
|||
# NOTE: MySQL is currently broken on setup.
|
||||
# Test (and generate, if requested) via MySQL.
|
||||
echo '> Starting MySQL'
|
||||
down_mysql
|
||||
up_mysql
|
||||
|
||||
echo '> Running Full Test Suite (mysql)'
|
||||
set +e
|
||||
run_tests "mysql+pymysql://root:password@$CIP/genschema"
|
||||
run_tests "mysql+pymysql://root:password@$CIP/genschema" $1
|
||||
set -e
|
||||
down_mysql
|
||||
|
||||
# Test via Postgres.
|
||||
echo '> Starting Postgres'
|
||||
down_postgres
|
||||
up_postgres
|
||||
|
||||
echo '> Running Full Test Suite (postgres)'
|
||||
set +e
|
||||
run_tests "postgresql://postgres@$CIP/genschema"
|
||||
run_tests "postgresql://postgres@$CIP/genschema" $1
|
||||
set -e
|
||||
down_postgres
|
||||
|
|
|
@ -859,15 +859,17 @@ class TestDeleteNamespace(ApiTestCase):
|
|||
def test_deletenamespaces(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Try to first delete the user. Since they are the sole admin of two orgs, it should fail.
|
||||
# Try to first delete the user. Since they are the sole admin of three orgs, it should fail.
|
||||
with check_transitive_modifications():
|
||||
self.deleteResponse(User, expected_code=400)
|
||||
|
||||
# Delete the two orgs, checking in between.
|
||||
# Delete the three orgs, checking in between.
|
||||
with check_transitive_modifications():
|
||||
self.deleteEmptyResponse(Organization, params=dict(orgname=ORGANIZATION), expected_code=204)
|
||||
self.deleteResponse(User, expected_code=400) # Should still fail.
|
||||
self.deleteEmptyResponse(Organization, params=dict(orgname='library'), expected_code=204)
|
||||
self.deleteResponse(User, expected_code=400) # Should still fail.
|
||||
self.deleteEmptyResponse(Organization, params=dict(orgname='titi'), expected_code=204)
|
||||
|
||||
# Add some queue items for the user.
|
||||
notification_queue.put([ADMIN_ACCESS_USER, 'somerepo', 'somename'], '{}')
|
||||
|
@ -1007,7 +1009,7 @@ class TestConductSearch(ApiTestCase):
|
|||
json = self.getJsonResponse(ConductSearch,
|
||||
params=dict(query='owners'))
|
||||
|
||||
self.assertEquals(3, len(json['results']))
|
||||
self.assertEquals(4, len(json['results']))
|
||||
self.assertEquals(json['results'][0]['kind'], 'team')
|
||||
self.assertEquals(json['results'][0]['name'], 'owners')
|
||||
|
||||
|
|
|
@ -23,15 +23,10 @@ class TestRepoModel(unittest.TestCase):
|
|||
self.ctx.__exit__(True, None, None)
|
||||
|
||||
def test_popular_repo_list(self):
|
||||
# Our repository action count table should have 1 event for the only public
|
||||
# repo.
|
||||
onlypublic = model.repository.list_popular_public_repos(0, timedelta(weeks=1))
|
||||
self.assertEquals(len(onlypublic), 1)
|
||||
self.assertEquals(onlypublic[0], (PUBLIC_USERNAME, PUBLIC_REPONAME))
|
||||
|
||||
self.assertEquals(len(model.repository.list_popular_public_repos(1, timedelta(weeks=1))), 1)
|
||||
self.assertEquals(len(model.repository.list_popular_public_repos(50, timedelta(weeks=1))), 0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -1,88 +0,0 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
import shutil
|
||||
from flask import Flask, jsonify
|
||||
from flask_login import LoginManager
|
||||
from peewee import SqliteDatabase
|
||||
|
||||
from app import app as application
|
||||
from data import model
|
||||
from data.database import (close_db_filter, db)
|
||||
from data.model.user import LoginWrappedDBUser
|
||||
from endpoints.api import api_bp
|
||||
from initdb import initialize_database, populate_database
|
||||
from path_converters import APIRepositoryPathConverter, RegexConverter
|
||||
|
||||
|
||||
# TODO(jschorr): Unify with the API conftest once the other PR gets in.
|
||||
|
||||
@pytest.fixture()
|
||||
def app(appconfig):
|
||||
""" Used by pytest-flask plugin to inject app by test for client See test_security by name injection of client. """
|
||||
app = Flask(__name__)
|
||||
login_manager = LoginManager(app)
|
||||
|
||||
@app.errorhandler(model.DataModelException)
|
||||
def handle_dme(ex):
|
||||
response = jsonify({'message': ex.message})
|
||||
response.status_code = 400
|
||||
return response
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_uuid):
|
||||
return LoginWrappedDBUser(user_uuid)
|
||||
|
||||
app.url_map.converters['regex'] = RegexConverter
|
||||
app.url_map.converters['apirepopath'] = APIRepositoryPathConverter
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
app.config.update(appconfig)
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def init_db_path(tmpdir_factory):
|
||||
""" Creates a new db and appropriate configuration. Used for parameter by name injection. """
|
||||
sqlitedb_file = str(tmpdir_factory.mktemp("data").join("test.db"))
|
||||
sqlitedb = 'sqlite:///{0}'.format(sqlitedb_file)
|
||||
conf = {"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"DB_URI": sqlitedb}
|
||||
os.environ['TEST_DATABASE_URI'] = str(sqlitedb)
|
||||
os.environ['DB_URI'] = str(sqlitedb)
|
||||
db.initialize(SqliteDatabase(sqlitedb_file))
|
||||
application.config.update(conf)
|
||||
application.config.update({"DB_URI": sqlitedb})
|
||||
initialize_database()
|
||||
populate_database()
|
||||
close_db_filter(None)
|
||||
return str(sqlitedb_file)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def database_uri(monkeypatch, init_db_path, sqlitedb_file):
|
||||
""" Creates the db uri. Used for parameter by name injection. """
|
||||
shutil.copy2(init_db_path, sqlitedb_file)
|
||||
db.initialize(SqliteDatabase(sqlitedb_file))
|
||||
db_path = 'sqlite:///{0}'.format(sqlitedb_file)
|
||||
monkeypatch.setenv("DB_URI", db_path)
|
||||
return db_path
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def sqlitedb_file(tmpdir):
|
||||
""" Makes file for db. Used for parameter by name injection. """
|
||||
test_db_file = tmpdir.mkdir("quaydb").join("test.db")
|
||||
return str(test_db_file)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def appconfig(database_uri):
|
||||
""" Makes conf with database_uri. Used for parameter by name injection """
|
||||
conf = {
|
||||
"TESTING": True,
|
||||
"DEBUG": True,
|
||||
"DB_URI": database_uri,
|
||||
"SECRET_KEY": 'superdupersecret!!!1',
|
||||
}
|
||||
return conf
|
|
@ -5,19 +5,21 @@ from httmock import urlmatch, HTTMock
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_bitbucket_trigger import BitbucketTriggerValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'BITBUCKET_TRIGGER_CONFIG': {}}),
|
||||
({'BITBUCKET_TRIGGER_CONFIG': {'CONSUMER_KEY': 'foo'}}),
|
||||
({'BITBUCKET_TRIGGER_CONFIG': {'CONSUMER_SECRET': 'foo'}}),
|
||||
])
|
||||
def test_validate_invalid_bitbucket_trigger_config(unvalidated_config):
|
||||
def test_validate_invalid_bitbucket_trigger_config(unvalidated_config, app):
|
||||
validator = BitbucketTriggerValidator()
|
||||
|
||||
with pytest.raises(ConfigValidationException):
|
||||
validator.validate(unvalidated_config, None, None)
|
||||
|
||||
def test_validate_bitbucket_trigger():
|
||||
def test_validate_bitbucket_trigger(app):
|
||||
url_hit = [False]
|
||||
|
||||
@urlmatch(netloc=r'bitbucket.org')
|
||||
|
|
|
@ -3,6 +3,8 @@ import pytest
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_database import DatabaseValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config,user,user_password,expected', [
|
||||
(None, None, None, TypeError),
|
||||
({}, None, None, KeyError),
|
||||
|
@ -11,7 +13,7 @@ from util.config.validators.validate_database import DatabaseValidator
|
|||
({'DB_NOTURI': 'sqlite:///:memory:'}, None, None, KeyError),
|
||||
({'DB_URI': 'mysql:///someinvalid'}, None, None, ConfigValidationException),
|
||||
])
|
||||
def test_validate_database(unvalidated_config, user, user_password, expected):
|
||||
def test_validate_database(unvalidated_config, user, user_password, expected, app):
|
||||
validator = DatabaseValidator()
|
||||
|
||||
if expected is not None:
|
||||
|
|
|
@ -5,6 +5,8 @@ from httmock import urlmatch, HTTMock
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_github import GitHubLoginValidator, GitHubTriggerValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.fixture(params=[GitHubLoginValidator, GitHubTriggerValidator])
|
||||
def github_validator(request):
|
||||
return request.param
|
||||
|
@ -30,13 +32,13 @@ def github_validator(request):
|
|||
'ALLOWED_ORGANIZATIONS': [],
|
||||
}),
|
||||
])
|
||||
def test_validate_invalid_github_config(github_config, github_validator):
|
||||
def test_validate_invalid_github_config(github_config, github_validator, app):
|
||||
with pytest.raises(ConfigValidationException):
|
||||
unvalidated_config = {}
|
||||
unvalidated_config[github_validator.config_key] = github_config
|
||||
github_validator.validate(unvalidated_config, None, None)
|
||||
|
||||
def test_validate_github(github_validator):
|
||||
def test_validate_github(github_validator, app):
|
||||
url_hit = [False, False]
|
||||
|
||||
@urlmatch(netloc=r'somehost')
|
||||
|
|
|
@ -6,19 +6,21 @@ from httmock import urlmatch, HTTMock
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_gitlab_trigger import GitLabTriggerValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'GITLAB_TRIGGER_CONFIG': {'GITLAB_ENDPOINT': 'foo'}}),
|
||||
({'GITLAB_TRIGGER_CONFIG': {'GITLAB_ENDPOINT': 'http://someendpoint', 'CLIENT_ID': 'foo'}}),
|
||||
({'GITLAB_TRIGGER_CONFIG': {'GITLAB_ENDPOINT': 'http://someendpoint', 'CLIENT_SECRET': 'foo'}}),
|
||||
])
|
||||
def test_validate_invalid_gitlab_trigger_config(unvalidated_config):
|
||||
def test_validate_invalid_gitlab_trigger_config(unvalidated_config, app):
|
||||
validator = GitLabTriggerValidator()
|
||||
|
||||
with pytest.raises(ConfigValidationException):
|
||||
validator.validate(unvalidated_config, None, None)
|
||||
|
||||
def test_validate_gitlab_enterprise_trigger():
|
||||
def test_validate_gitlab_enterprise_trigger(app):
|
||||
url_hit = [False]
|
||||
|
||||
@urlmatch(netloc=r'somegitlab', path='/oauth/token')
|
||||
|
|
|
@ -5,19 +5,21 @@ from httmock import urlmatch, HTTMock
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_google_login import GoogleLoginValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'GOOGLE_LOGIN_CONFIG': {}}),
|
||||
({'GOOGLE_LOGIN_CONFIG': {'CLIENT_ID': 'foo'}}),
|
||||
({'GOOGLE_LOGIN_CONFIG': {'CLIENT_SECRET': 'foo'}}),
|
||||
])
|
||||
def test_validate_invalid_google_login_config(unvalidated_config):
|
||||
def test_validate_invalid_google_login_config(unvalidated_config, app):
|
||||
validator = GoogleLoginValidator()
|
||||
|
||||
with pytest.raises(ConfigValidationException):
|
||||
validator.validate(unvalidated_config, None, None)
|
||||
|
||||
def test_validate_google_login():
|
||||
def test_validate_google_login(app):
|
||||
url_hit = [False]
|
||||
@urlmatch(netloc=r'www.googleapis.com', path='/oauth2/v3/token')
|
||||
def handler(_, __):
|
||||
|
|
|
@ -6,12 +6,14 @@ from util.morecollections import AttrDict
|
|||
|
||||
from test.test_external_jwt_authn import fake_jwt
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'AUTHENTICATION_TYPE': 'Database'}),
|
||||
])
|
||||
def test_validate_noop(unvalidated_config):
|
||||
def test_validate_noop(unvalidated_config, app):
|
||||
JWTAuthValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
|
||||
|
@ -20,7 +22,7 @@ def test_validate_noop(unvalidated_config):
|
|||
({'AUTHENTICATION_TYPE': 'JWT', 'JWT_AUTH_ISSUER': 'foo'}),
|
||||
({'AUTHENTICATION_TYPE': 'JWT', 'JWT_VERIFY_ENDPOINT': 'foo'}),
|
||||
])
|
||||
def test_invalid_config(unvalidated_config):
|
||||
def test_invalid_config(unvalidated_config, app):
|
||||
with pytest.raises(ConfigValidationException):
|
||||
JWTAuthValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
|
@ -31,7 +33,7 @@ def test_invalid_config(unvalidated_config):
|
|||
('invaliduser', 'somepass', ConfigValidationException),
|
||||
('cool.user', 'password', None),
|
||||
])
|
||||
def test_validated_jwt(username, password, expected_exception):
|
||||
def test_validated_jwt(username, password, expected_exception, app):
|
||||
with fake_jwt() as jwt_auth:
|
||||
config = {}
|
||||
config['AUTHENTICATION_TYPE'] = 'JWT'
|
||||
|
|
|
@ -6,12 +6,13 @@ from util.morecollections import AttrDict
|
|||
|
||||
from test.test_keystone_auth import fake_keystone
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'AUTHENTICATION_TYPE': 'Database'}),
|
||||
])
|
||||
def test_validate_noop(unvalidated_config):
|
||||
def test_validate_noop(unvalidated_config, app):
|
||||
KeystoneValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
|
@ -22,7 +23,7 @@ def test_validate_noop(unvalidated_config):
|
|||
({'AUTHENTICATION_TYPE': 'Keystone', 'KEYSTONE_AUTH_URL': 'foo',
|
||||
'KEYSTONE_ADMIN_USERNAME': 'bar', 'KEYSTONE_ADMIN_PASSWORD': 'baz'}),
|
||||
])
|
||||
def test_invalid_config(unvalidated_config):
|
||||
def test_invalid_config(unvalidated_config, app):
|
||||
with pytest.raises(ConfigValidationException):
|
||||
KeystoneValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
|
@ -33,7 +34,7 @@ def test_invalid_config(unvalidated_config):
|
|||
('invaliduser', 'somepass', ConfigValidationException),
|
||||
('cool.user', 'password', None),
|
||||
])
|
||||
def test_validated_keystone(username, password, expected_exception):
|
||||
def test_validated_keystone(username, password, expected_exception, app):
|
||||
with fake_keystone(2) as keystone_auth:
|
||||
auth_url = keystone_auth.auth_url
|
||||
|
||||
|
|
|
@ -6,19 +6,20 @@ from util.morecollections import AttrDict
|
|||
|
||||
from test.test_ldap import mock_ldap
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'AUTHENTICATION_TYPE': 'Database'}),
|
||||
])
|
||||
def test_validate_noop(unvalidated_config):
|
||||
def test_validate_noop(unvalidated_config, app):
|
||||
LDAPValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({'AUTHENTICATION_TYPE': 'LDAP'}),
|
||||
({'AUTHENTICATION_TYPE': 'LDAP', 'LDAP_ADMIN_DN': 'foo'}),
|
||||
])
|
||||
def test_invalid_config(unvalidated_config):
|
||||
def test_invalid_config(unvalidated_config, app):
|
||||
with pytest.raises(ConfigValidationException):
|
||||
LDAPValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
|
@ -28,7 +29,7 @@ def test_invalid_config(unvalidated_config):
|
|||
'http://foo',
|
||||
'ldap:foo',
|
||||
])
|
||||
def test_invalid_uri(uri):
|
||||
def test_invalid_uri(uri, app):
|
||||
config = {}
|
||||
config['AUTHENTICATION_TYPE'] = 'LDAP'
|
||||
config['LDAP_BASE_DN'] = ['dc=quay', 'dc=io']
|
||||
|
@ -47,7 +48,7 @@ def test_invalid_uri(uri):
|
|||
('invaliduser', 'somepass', ConfigValidationException),
|
||||
('someuser', 'somepass', None),
|
||||
])
|
||||
def test_validated_ldap(username, password, expected_exception):
|
||||
def test_validated_ldap(username, password, expected_exception, app):
|
||||
config = {}
|
||||
config['AUTHENTICATION_TYPE'] = 'LDAP'
|
||||
config['LDAP_BASE_DN'] = ['dc=quay', 'dc=io']
|
||||
|
|
|
@ -7,19 +7,21 @@ from oauth.oidc import OIDC_WELLKNOWN
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_oidc import OIDCLoginValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({'SOMETHING_LOGIN_CONFIG': {}}),
|
||||
({'SOMETHING_LOGIN_CONFIG': {'OIDC_SERVER': 'foo'}}),
|
||||
({'SOMETHING_LOGIN_CONFIG': {'OIDC_SERVER': 'foo', 'CLIENT_ID': 'foobar'}}),
|
||||
({'SOMETHING_LOGIN_CONFIG': {'OIDC_SERVER': 'foo', 'CLIENT_SECRET': 'foobar'}}),
|
||||
])
|
||||
def test_validate_invalid_oidc_login_config(unvalidated_config):
|
||||
def test_validate_invalid_oidc_login_config(unvalidated_config, app):
|
||||
validator = OIDCLoginValidator()
|
||||
|
||||
with pytest.raises(ConfigValidationException):
|
||||
validator.validate(unvalidated_config, None, None)
|
||||
|
||||
def test_validate_oidc_login():
|
||||
def test_validate_oidc_login(app):
|
||||
url_hit = [False]
|
||||
@urlmatch(netloc=r'someserver', path=r'/\.well-known/openid-configuration')
|
||||
def handler(_, __):
|
||||
|
|
|
@ -8,13 +8,15 @@ from mockredis import mock_strict_redis_client
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_redis import RedisValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config,user,user_password,use_mock,expected', [
|
||||
({}, None, None, False, ConfigValidationException),
|
||||
({'BUILDLOGS_REDIS': {}}, None, None, False, ConfigValidationException),
|
||||
({'BUILDLOGS_REDIS': {'host': 'somehost'}}, None, None, False, redis.ConnectionError),
|
||||
({'BUILDLOGS_REDIS': {'host': 'localhost'}}, None, None, True, None),
|
||||
])
|
||||
def test_validate_redis(unvalidated_config, user, user_password, use_mock, expected):
|
||||
def test_validate_redis(unvalidated_config, user, user_password, use_mock, expected, app):
|
||||
with patch('redis.StrictRedis' if use_mock else 'redis.None', mock_strict_redis_client):
|
||||
validator = RedisValidator()
|
||||
if expected is not None:
|
||||
|
|
|
@ -4,6 +4,8 @@ from util.config.validators import ConfigValidationException
|
|||
from util.config.validators.validate_secscan import SecurityScannerValidator
|
||||
from util.secscan.fake import fake_security_scanner
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({'DISTRIBUTED_STORAGE_PREFERENCE': []}),
|
||||
])
|
||||
|
|
|
@ -3,12 +3,14 @@ import pytest
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_signer import SignerValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config,expected', [
|
||||
({}, None),
|
||||
({'SIGNING_ENGINE': 'foobar'}, ConfigValidationException),
|
||||
({'SIGNING_ENGINE': 'gpg2'}, Exception),
|
||||
])
|
||||
def test_validate_signer(unvalidated_config,expected):
|
||||
def test_validate_signer(unvalidated_config, expected, app):
|
||||
validator = SignerValidator()
|
||||
if expected is not None:
|
||||
with pytest.raises(expected):
|
||||
|
|
|
@ -7,12 +7,14 @@ from util.config.validators import ConfigValidationException
|
|||
from util.config.validators.validate_ssl import SSLValidator, SSL_FILENAMES
|
||||
from test.test_ssl_util import generate_test_cert
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config', [
|
||||
({}),
|
||||
({'PREFERRED_URL_SCHEME': 'http'}),
|
||||
({'PREFERRED_URL_SCHEME': 'https', 'EXTERNAL_TLS_TERMINATION': True}),
|
||||
])
|
||||
def test_skip_validate_ssl(unvalidated_config):
|
||||
def test_skip_validate_ssl(unvalidated_config, app):
|
||||
validator = SSLValidator()
|
||||
validator.validate(unvalidated_config, None, None)
|
||||
|
||||
|
@ -23,7 +25,7 @@ def test_skip_validate_ssl(unvalidated_config):
|
|||
(generate_test_cert(hostname='invalidserver'), ConfigValidationException,
|
||||
'Supported names "invalidserver" in SSL cert do not match server hostname "someserver"'),
|
||||
])
|
||||
def test_validate_ssl(cert, expected_error, error_message):
|
||||
def test_validate_ssl(cert, expected_error, error_message, app):
|
||||
with NamedTemporaryFile(delete=False) as cert_file:
|
||||
cert_file.write(cert[0])
|
||||
cert_file.seek(0)
|
||||
|
|
|
@ -4,13 +4,15 @@ import pytest
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_storage import StorageValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config, expected', [
|
||||
({}, ConfigValidationException),
|
||||
({'DISTRIBUTED_STORAGE_CONFIG': {}}, ConfigValidationException),
|
||||
({'DISTRIBUTED_STORAGE_CONFIG': {'local': None}}, ConfigValidationException),
|
||||
({'DISTRIBUTED_STORAGE_CONFIG': {'local': ['FakeStorage', {}]}}, None),
|
||||
])
|
||||
def test_validate_storage(unvalidated_config, expected):
|
||||
def test_validate_storage(unvalidated_config, expected, app):
|
||||
validator = StorageValidator()
|
||||
if expected is not None:
|
||||
with pytest.raises(expected):
|
||||
|
@ -18,7 +20,7 @@ def test_validate_storage(unvalidated_config, expected):
|
|||
else:
|
||||
validator.validate(unvalidated_config, None, None)
|
||||
|
||||
def test_validate_s3_storage():
|
||||
def test_validate_s3_storage(app):
|
||||
validator = StorageValidator()
|
||||
with moto.mock_s3():
|
||||
with pytest.raises(ConfigValidationException) as ipe:
|
||||
|
|
|
@ -10,13 +10,15 @@ def test_validate_noop(unvalidated_config):
|
|||
TimeMachineValidator.validate(unvalidated_config, None, None)
|
||||
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('default_exp,options,expected_exception', [
|
||||
('2d', ['1w', '2d'], None),
|
||||
|
||||
('2d', ['1w'], 'Default expiration must be in expiration options set'),
|
||||
('2d', ['2d', '1M'], 'Invalid tag expiration option: 1M'),
|
||||
])
|
||||
def test_validate(default_exp, options, expected_exception):
|
||||
def test_validate(default_exp, options, expected_exception, app):
|
||||
config = {}
|
||||
config['DEFAULT_TAG_EXPIRATION'] = default_exp
|
||||
config['TAG_EXPIRATION_OPTIONS'] = options
|
||||
|
|
|
@ -5,11 +5,13 @@ from httmock import urlmatch, HTTMock
|
|||
from util.config.validators import ConfigValidationException
|
||||
from util.config.validators.validate_torrent import BittorrentValidator
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
@pytest.mark.parametrize('unvalidated_config,expected', [
|
||||
({}, ConfigValidationException),
|
||||
({'BITTORRENT_ANNOUNCE_URL': 'http://faketorrent/announce'}, None),
|
||||
])
|
||||
def test_validate_torrent(unvalidated_config,expected):
|
||||
def test_validate_torrent(unvalidated_config, expected, app):
|
||||
announcer_hit = [False]
|
||||
|
||||
@urlmatch(netloc=r'faketorrent', path='/announce')
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from data import model, database
|
||||
from test.fixtures import app, appconfig, database_uri, init_db_path, sqlitedb_file
|
||||
from workers.repositoryactioncounter import RepositoryActionCountWorker
|
||||
|
||||
from test.fixtures import *
|
||||
|
||||
def test_repositoryactioncount(app):
|
||||
database.RepositoryActionCount.delete().execute()
|
||||
database.RepositorySearchScore.delete().execute()
|
||||
|
@ -9,5 +10,3 @@ def test_repositoryactioncount(app):
|
|||
rac = RepositoryActionCountWorker()
|
||||
while rac._count_repository_actions():
|
||||
continue
|
||||
|
||||
|
||||
|
|
Reference in a new issue