diff --git a/config_app/config_endpoints/api/suconfig.py b/config_app/config_endpoints/api/suconfig.py index ec4a84105..f7dc232b1 100644 --- a/config_app/config_endpoints/api/suconfig.py +++ b/config_app/config_endpoints/api/suconfig.py @@ -16,6 +16,7 @@ from data.runmigration import run_alembic_migration from util.config.configutil import add_enterprise_config_defaults from util.config.database import sync_database_with_config from util.config.validator import validate_service_for_config, ValidatorContext, is_valid_config_upload_filename +from util.config.validators import LocalStorageConfigValidationException logger = logging.getLogger(__name__) @@ -293,16 +294,14 @@ class SuperUserConfigValidate(ApiResource): # Note: This method is called to validate the database configuration before super users exists, # so we also allow it to be called if there is no valid registry configuration setup. Note that # this is also safe since this method does not access any information not given in the request. - if not config_provider.config_exists(): - config = request.get_json()['config'] - validator_context = ValidatorContext.from_app(app, config, request.get_json().get('password', ''), - instance_keys=instance_keys, - ip_resolver=ip_resolver, - config_provider=config_provider) - return validate_service_for_config(service, validator_context) + config = request.get_json()['config'] + validator_context = ValidatorContext.from_app(app, config, request.get_json().get('password', ''), + instance_keys=instance_keys, + ip_resolver=ip_resolver, + config_provider=config_provider) + return validate_service_for_config(service, validator_context) - abort(403) @resource('/v1/superuser/config/file/') diff --git a/config_app/config_endpoints/api/tar_config_loader.py b/config_app/config_endpoints/api/tar_config_loader.py index 63b57d214..e16b8a092 100644 --- a/config_app/config_endpoints/api/tar_config_loader.py +++ b/config_app/config_endpoints/api/tar_config_loader.py @@ -15,9 +15,9 @@ class TarConfigLoader(ApiResource): def put(self): """ Loads tarball config into the config provider """ input_stream = request.stream - tar_stream = tarfile.open(mode="r|gz", fileobj=input_stream) - - config_provider.load_from_tar_stream(tar_stream) + with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream: + # TODO: find a way to remove the contents of the directory on shutdown? + tar_stream.extractall(config_provider.config_volume) # now try to connect to the db provided in their config combined = dict(**app.config) diff --git a/config_app/config_util/config/__init__.py b/config_app/config_util/config/__init__.py index c344bb415..3735e4f66 100644 --- a/config_app/config_util/config/__init__.py +++ b/config_app/config_util/config/__init__.py @@ -1,6 +1,5 @@ from config_app.config_util.config.fileprovider import FileConfigProvider from config_app.config_util.config.testprovider import TestConfigProvider -from config_app.config_util.config.inmemoryprovider import InMemoryProvider def get_config_provider(config_volume, yaml_filename, py_filename, testing=False): @@ -9,4 +8,4 @@ def get_config_provider(config_volume, yaml_filename, py_filename, testing=False if testing: return TestConfigProvider() - return InMemoryProvider() + return FileConfigProvider(config_volume, yaml_filename, py_filename) diff --git a/config_app/config_util/config/inmemoryprovider.py b/config_app/config_util/config/inmemoryprovider.py deleted file mode 100644 index 7db5d1a89..000000000 --- a/config_app/config_util/config/inmemoryprovider.py +++ /dev/null @@ -1,84 +0,0 @@ -import logging -import yaml -import io -import os - -from config_app.config_util.config.baseprovider import BaseProvider - - -logger = logging.getLogger(__name__) - -CONFIG_FILENAME = 'config.yaml' - -class InMemoryProvider(BaseProvider): - def __init__(self): - self.files = {} - self.config = {} - self.was_loaded = False - - @property - def provider_id(self): - return 'memory' - - def update_app_config(self, app_config): - self.config = app_config - - def get_config(self): - return self.config - - def save_config(self, config_object): - self.config = config_object - self.was_loaded = True - - def config_exists(self): - return self.was_loaded - - def volume_exists(self): - return True - - def volume_file_exists(self, filename): - return any([name.startswith(filename) for name in self.files]) - - def get_volume_file(self, filename, mode='r'): - return io.BytesIO(self.files[filename]) - - def write_volume_file(self, filename, contents): - raise Exception('Not implemented yet') - - def remove_volume_file(self, filename): - raise Exception('Not implemented yet') - - def list_volume_directory(self, path): - def strip_directory(string): - if '/' in string: - return string[string.rfind('/') + 1:] - return string - - return [strip_directory(name) for name in self.files if name.startswith(path)] - - def save_volume_file(self, filename, flask_file): - self.files[filename] = flask_file.read() - - def requires_restart(self, app_config): - raise Exception('Not implemented yet') - - def get_volume_path(self, directory, filename): - return os.path.join(directory, filename) - - def load_from_tarball(self, tarfile): - for tarinfo in tarfile.getmembers(): - if tarinfo.isfile(): - self.files[tarinfo.name] = tarfile.extractfile(tarinfo.name).read() - - if self.files.has_key(CONFIG_FILENAME): - self.config = yaml.load(self.files.get(CONFIG_FILENAME)) - self.was_loaded = True - - def load_from_tar_stream(self, tarfile): - for tarinfo in tarfile: - if tarinfo.isfile(): - self.files[tarinfo.name] = tarfile.extractfile(tarinfo).read() - - if self.files.has_key(CONFIG_FILENAME): - self.config = yaml.load(self.files.get(CONFIG_FILENAME)) - self.was_loaded = True diff --git a/config_app/js/config-field-templates/config-setup-tool.html b/config_app/js/config-field-templates/config-setup-tool.html index ad86d3107..9c4dde631 100644 --- a/config_app/js/config-field-templates/config-setup-tool.html +++ b/config_app/js/config-field-templates/config-setup-tool.html @@ -1454,7 +1454,7 @@
- BitBucket Build Triggers + BitBucket Build Triggers
@@ -1496,7 +1496,7 @@
- GitLab Build Triggers + GitLab Build Triggers
diff --git a/config_app/js/core-config-setup/config-setup-tool.html b/config_app/js/core-config-setup/config-setup-tool.html index 9bec737fa..eed17afce 100644 --- a/config_app/js/core-config-setup/config-setup-tool.html +++ b/config_app/js/core-config-setup/config-setup-tool.html @@ -1455,7 +1455,7 @@
- BitBucket Build Triggers + BitBucket Build Triggers
@@ -1497,7 +1497,7 @@
- GitLab Build Triggers + GitLab Build Triggers
diff --git a/config_app/js/core-config-setup/core-config-setup.js b/config_app/js/core-config-setup/core-config-setup.js index 30126e43b..e55fb45e2 100644 --- a/config_app/js/core-config-setup/core-config-setup.js +++ b/config_app/js/core-config-setup/core-config-setup.js @@ -38,7 +38,13 @@ angular.module("quay-config") $scope.SERVICES = [ {'id': 'redis', 'title': 'Redis'}, - {'id': 'registry-storage', 'title': 'Registry Storage'}, + {'id': 'registry-storage', 'title': 'Registry Storage', 'condition': (config) => { + // We can skip validation if all of the storage locations are local, as we can't + // guarantee that this will be the same machine Q.E. will run under. Therefore, + // we just have a warning to the user that Q.E. won't start if the locations don't match + return Object.values(config.DISTRIBUTED_STORAGE_CONFIG) + .some(storageTuple => storageTuple[0] !== 'LocalStorage') + }}, {'id': 'time-machine', 'title': 'Time Machine'}, diff --git a/util/config/validator.py b/util/config/validator.py index f8f296a58..a0924c9e2 100644 --- a/util/config/validator.py +++ b/util/config/validator.py @@ -135,15 +135,15 @@ class ValidatorContext(object): url_scheme_and_hostname = URLSchemeAndHostname.from_app_config(app.config) return cls(config, - user_password, - client or app.config['HTTPCLIENT'], - app.app_context, - url_scheme_and_hostname, - app.config.get('JWT_AUTH_MAX_FRESH_S', 300), - app.config['REGISTRY_TITLE'], - ip_resolver, - instance_keys, - app.config.get('FEATURE_SECURITY_SCANNER', False), - app.config.get('TESTING', False), - get_blob_download_uri_getter(app.test_request_context('/'), url_scheme_and_hostname), - config_provider) + user_password=user_password, + http_client=client or app.config['HTTPCLIENT'], + context=app.app_context, + url_scheme_and_hostname=url_scheme_and_hostname, + jwt_auth_max=app.config.get('JWT_AUTH_MAX_FRESH_S', 300), + registry_title=app.config['REGISTRY_TITLE'], + ip_resolver=ip_resolver, + feature_sec_scanner=app.config.get('FEATURE_SECURITY_SCANNER', False), + is_testing=app.config.get('TESTING', False), + uri_creator=get_blob_download_uri_getter(app.test_request_context('/'), url_scheme_and_hostname), + config_provider=config_provider, + instance_keys=instance_keys) diff --git a/util/config/validators/validate_storage.py b/util/config/validators/validate_storage.py index 3e3de74ee..4aec66aae 100644 --- a/util/config/validators/validate_storage.py +++ b/util/config/validators/validate_storage.py @@ -1,5 +1,5 @@ from storage import get_storage_driver -from util.config.validators import BaseValidator, ConfigValidationException +from util.config.validators import BaseValidator, ConfigValidationException, LocalStorageConfigValidationException class StorageValidator(BaseValidator):