Merge pull request #3402 from KeyboardNerd/QUAY-1358
Change pg_trgm to be a precondition of pgsql database
This commit is contained in:
commit
ac51954e1f
7 changed files with 79 additions and 9 deletions
|
@ -102,7 +102,7 @@ class SuperUserRegistryStatus(ApiResource):
|
||||||
}
|
}
|
||||||
|
|
||||||
config = config_provider.get_config()
|
config = config_provider.get_config()
|
||||||
if config and config['SETUP_COMPLETE']:
|
if config and config.get('SETUP_COMPLETE'):
|
||||||
return {
|
return {
|
||||||
'status': 'config'
|
'status': 'config'
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import unittest
|
import unittest
|
||||||
|
import mock
|
||||||
|
|
||||||
from data.database import User
|
from data.database import User
|
||||||
from data import model
|
from data import model
|
||||||
|
@ -28,11 +29,40 @@ class FreshConfigProvider(object):
|
||||||
|
|
||||||
|
|
||||||
class TestSuperUserRegistryStatus(ApiTestCase):
|
class TestSuperUserRegistryStatus(ApiTestCase):
|
||||||
def test_registry_status(self):
|
def test_registry_status_no_config(self):
|
||||||
with FreshConfigProvider():
|
with FreshConfigProvider():
|
||||||
json = self.getJsonResponse(SuperUserRegistryStatus)
|
json = self.getJsonResponse(SuperUserRegistryStatus)
|
||||||
self.assertEquals('config-db', json['status'])
|
self.assertEquals('config-db', json['status'])
|
||||||
|
|
||||||
|
@mock.patch("config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=False))
|
||||||
|
def test_registry_status_no_database(self):
|
||||||
|
with FreshConfigProvider():
|
||||||
|
config_provider.save_config({'key': 'value'})
|
||||||
|
json = self.getJsonResponse(SuperUserRegistryStatus)
|
||||||
|
self.assertEquals('setup-db', json['status'])
|
||||||
|
|
||||||
|
@mock.patch("config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=True))
|
||||||
|
def test_registry_status_db_has_superuser(self):
|
||||||
|
with FreshConfigProvider():
|
||||||
|
config_provider.save_config({'key': 'value'})
|
||||||
|
json = self.getJsonResponse(SuperUserRegistryStatus)
|
||||||
|
self.assertEquals('config', json['status'])
|
||||||
|
|
||||||
|
@mock.patch("config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=True))
|
||||||
|
@mock.patch("config_app.config_endpoints.api.suconfig.database_has_users", mock.Mock(return_value=False))
|
||||||
|
def test_registry_status_db_no_superuser(self):
|
||||||
|
with FreshConfigProvider():
|
||||||
|
config_provider.save_config({'key': 'value'})
|
||||||
|
json = self.getJsonResponse(SuperUserRegistryStatus)
|
||||||
|
self.assertEquals('create-superuser', json['status'])
|
||||||
|
|
||||||
|
@mock.patch("config_app.config_endpoints.api.suconfig.database_is_valid", mock.Mock(return_value=True))
|
||||||
|
@mock.patch("config_app.config_endpoints.api.suconfig.database_has_users", mock.Mock(return_value=True))
|
||||||
|
def test_registry_status_setup_complete(self):
|
||||||
|
with FreshConfigProvider():
|
||||||
|
config_provider.save_config({'key': 'value', 'SETUP_COMPLETE': True})
|
||||||
|
json = self.getJsonResponse(SuperUserRegistryStatus)
|
||||||
|
self.assertEquals('config', json['status'])
|
||||||
|
|
||||||
class TestSuperUserConfigFile(ApiTestCase):
|
class TestSuperUserConfigFile(ApiTestCase):
|
||||||
def test_get_superuser_invalid_filename(self):
|
def test_get_superuser_invalid_filename(self):
|
||||||
|
|
|
@ -30,6 +30,7 @@ from data.fields import (ResumableSHA256Field, ResumableSHA1Field, JSONField, Ba
|
||||||
from data.text import match_mysql, match_like
|
from data.text import match_mysql, match_like
|
||||||
from data.read_slave import ReadSlaveModel
|
from data.read_slave import ReadSlaveModel
|
||||||
from util.names import urn_generator
|
from util.names import urn_generator
|
||||||
|
from util.validation import validate_postgres_precondition
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -70,6 +71,12 @@ SCHEME_RANDOM_FUNCTION = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
PRECONDITION_VALIDATION = {
|
||||||
|
'postgresql': validate_postgres_precondition,
|
||||||
|
'postgresql+psycopg2': validate_postgres_precondition,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
_EXTRA_ARGS = {
|
_EXTRA_ARGS = {
|
||||||
'mysql': dict(charset='utf8mb4'),
|
'mysql': dict(charset='utf8mb4'),
|
||||||
'mysql+pymysql': dict(charset='utf8mb4'),
|
'mysql+pymysql': dict(charset='utf8mb4'),
|
||||||
|
@ -284,6 +291,25 @@ def validate_database_url(url, db_kwargs, connect_timeout=5):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def validate_database_precondition(url, db_kwargs, connect_timeout=5):
|
||||||
|
""" Validates that we can connect to the given database URL and the database meets our
|
||||||
|
precondition. Raises an exception if the validation fails. """
|
||||||
|
db_kwargs = db_kwargs.copy()
|
||||||
|
try:
|
||||||
|
driver = _db_from_url(url, db_kwargs, connect_timeout=connect_timeout, allow_retry=False,
|
||||||
|
allow_pooling=False)
|
||||||
|
driver.connect()
|
||||||
|
pre_condition_check = PRECONDITION_VALIDATION.get(make_url(url).drivername)
|
||||||
|
if pre_condition_check:
|
||||||
|
pre_condition_check(driver)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
driver.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def _wrap_for_retry(driver):
|
def _wrap_for_retry(driver):
|
||||||
return type('Retrying' + driver.__class__.__name__, (RetryOperationalError, driver), {})
|
return type('Retrying' + driver.__class__.__name__, (RetryOperationalError, driver), {})
|
||||||
|
|
||||||
|
|
|
@ -15,9 +15,6 @@ import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import mysql
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
def upgrade(tables, tester):
|
def upgrade(tables, tester):
|
||||||
if op.get_bind().engine.name == 'postgresql':
|
|
||||||
op.execute('CREATE EXTENSION IF NOT EXISTS pg_trgm')
|
|
||||||
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.create_index('repository_description__fulltext', 'repository', ['description'], unique=False, postgresql_using='gin', postgresql_ops={'description': 'gin_trgm_ops'}, mysql_prefix='FULLTEXT')
|
op.create_index('repository_description__fulltext', 'repository', ['description'], unique=False, postgresql_using='gin', postgresql_ops={'description': 'gin_trgm_ops'}, mysql_prefix='FULLTEXT')
|
||||||
op.create_index('repository_name__fulltext', 'repository', ['name'], unique=False, postgresql_using='gin', postgresql_ops={'name': 'gin_trgm_ops'}, mysql_prefix='FULLTEXT')
|
op.create_index('repository_name__fulltext', 'repository', ['name'], unique=False, postgresql_using='gin', postgresql_ops={'name': 'gin_trgm_ops'}, mysql_prefix='FULLTEXT')
|
||||||
|
|
11
scripts/ci
11
scripts/ci
|
@ -12,6 +12,7 @@ IMAGE_TAR="${CACHE_DIR}/${IMAGE}-${IMAGE_TAG}.tar.gz"
|
||||||
|
|
||||||
MYSQL_IMAGE="mysql:5.7"
|
MYSQL_IMAGE="mysql:5.7"
|
||||||
POSTGRES_IMAGE="postgres:9.6"
|
POSTGRES_IMAGE="postgres:9.6"
|
||||||
|
POSTGRES_CONTAINER="test_postgres"
|
||||||
|
|
||||||
export MYSQL_ROOT_PASSWORD="quay"
|
export MYSQL_ROOT_PASSWORD="quay"
|
||||||
export MYSQL_USER="quay"
|
export MYSQL_USER="quay"
|
||||||
|
@ -110,13 +111,18 @@ postgres_ping() {
|
||||||
|
|
||||||
|
|
||||||
postgres_start() {
|
postgres_start() {
|
||||||
docker run --net=host -d -e POSTGRES_USER -e POSTGRES_PASSWORD \
|
docker run --name="${POSTGRES_CONTAINER}" --net=host -d -e POSTGRES_USER -e POSTGRES_PASSWORD \
|
||||||
-e POSTGRES_DB "${POSTGRES_IMAGE}"
|
-e POSTGRES_DB "${POSTGRES_IMAGE}"
|
||||||
|
|
||||||
if ! (sleep 10 && postgres_ping); then
|
if ! (sleep 10 && postgres_ping); then
|
||||||
echo "PostgreSQL failed to respond in time."
|
echo "PostgreSQL failed to respond in time."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
postgres_init() {
|
||||||
|
docker exec "${POSTGRES_CONTAINER}" psql -U "${POSTGRES_USER}" -d "${POSTGRES_DB}" -c 'CREATE EXTENSION IF NOT EXISTS pg_trgm;'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -129,6 +135,7 @@ postgres() {
|
||||||
|
|
||||||
load_image
|
load_image
|
||||||
postgres_start
|
postgres_start
|
||||||
|
postgres_init
|
||||||
quay_run make full-db-test
|
quay_run make full-db-test
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from peewee import OperationalError
|
from peewee import OperationalError
|
||||||
|
|
||||||
from data.database import validate_database_url
|
from data.database import validate_database_precondition
|
||||||
from util.config.validators import BaseValidator, ConfigValidationException
|
from util.config.validators import BaseValidator, ConfigValidationException
|
||||||
|
|
||||||
class DatabaseValidator(BaseValidator):
|
class DatabaseValidator(BaseValidator):
|
||||||
|
@ -12,7 +12,7 @@ class DatabaseValidator(BaseValidator):
|
||||||
config = validator_context.config
|
config = validator_context.config
|
||||||
|
|
||||||
try:
|
try:
|
||||||
validate_database_url(config['DB_URI'], config.get('DB_CONNECTION_ARGS', {}))
|
validate_database_precondition(config['DB_URI'], config.get('DB_CONNECTION_ARGS', {}))
|
||||||
except OperationalError as ex:
|
except OperationalError as ex:
|
||||||
if ex.args and len(ex.args) > 1:
|
if ex.args and len(ex.args) > 1:
|
||||||
raise ConfigValidationException(ex.args[1])
|
raise ConfigValidationException(ex.args[1])
|
||||||
|
|
|
@ -4,6 +4,7 @@ import json
|
||||||
|
|
||||||
import anunidecode # Don't listen to pylint's lies. This import is required.
|
import anunidecode # Don't listen to pylint's lies. This import is required.
|
||||||
|
|
||||||
|
from peewee import OperationalError
|
||||||
|
|
||||||
INVALID_PASSWORD_MESSAGE = 'Invalid password, password must be at least ' + \
|
INVALID_PASSWORD_MESSAGE = 'Invalid password, password must be at least ' + \
|
||||||
'8 characters and contain no whitespace.'
|
'8 characters and contain no whitespace.'
|
||||||
|
@ -89,3 +90,12 @@ def is_json(value):
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
return False
|
return False
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def validate_postgres_precondition(driver):
|
||||||
|
cursor = driver.execute_sql("SELECT extname FROM pg_extension", ("public",))
|
||||||
|
if 'pg_trgm' not in [extname for extname, in cursor.fetchall()]:
|
||||||
|
raise OperationalError("""
|
||||||
|
"pg_trgm" extension does not exists in the database.
|
||||||
|
Please run `CREATE EXTENSION IF NOT EXISTS pg_trgm;` as superuser on this database.
|
||||||
|
""")
|
||||||
|
|
Reference in a new issue