Revert inmemoryprov, skip local storage validation
This commit is contained in:
parent
f32bbf1fdc
commit
7619ab44e5
9 changed files with 35 additions and 115 deletions
|
@ -16,6 +16,7 @@ from data.runmigration import run_alembic_migration
|
|||
from util.config.configutil import add_enterprise_config_defaults
|
||||
from util.config.database import sync_database_with_config
|
||||
from util.config.validator import validate_service_for_config, ValidatorContext, is_valid_config_upload_filename
|
||||
from util.config.validators import LocalStorageConfigValidationException
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -293,17 +294,15 @@ class SuperUserConfigValidate(ApiResource):
|
|||
# Note: This method is called to validate the database configuration before super users exists,
|
||||
# so we also allow it to be called if there is no valid registry configuration setup. Note that
|
||||
# this is also safe since this method does not access any information not given in the request.
|
||||
if not config_provider.config_exists():
|
||||
config = request.get_json()['config']
|
||||
validator_context = ValidatorContext.from_app(app, config, request.get_json().get('password', ''),
|
||||
instance_keys=instance_keys,
|
||||
ip_resolver=ip_resolver,
|
||||
config_provider=config_provider)
|
||||
|
||||
return validate_service_for_config(service, validator_context)
|
||||
|
||||
|
||||
abort(403)
|
||||
|
||||
|
||||
@resource('/v1/superuser/config/file/<filename>')
|
||||
class SuperUserConfigFile(ApiResource):
|
||||
|
|
|
@ -15,9 +15,9 @@ class TarConfigLoader(ApiResource):
|
|||
def put(self):
|
||||
""" Loads tarball config into the config provider """
|
||||
input_stream = request.stream
|
||||
tar_stream = tarfile.open(mode="r|gz", fileobj=input_stream)
|
||||
|
||||
config_provider.load_from_tar_stream(tar_stream)
|
||||
with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream:
|
||||
# TODO: find a way to remove the contents of the directory on shutdown?
|
||||
tar_stream.extractall(config_provider.config_volume)
|
||||
|
||||
# now try to connect to the db provided in their config
|
||||
combined = dict(**app.config)
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from config_app.config_util.config.fileprovider import FileConfigProvider
|
||||
from config_app.config_util.config.testprovider import TestConfigProvider
|
||||
from config_app.config_util.config.inmemoryprovider import InMemoryProvider
|
||||
|
||||
|
||||
def get_config_provider(config_volume, yaml_filename, py_filename, testing=False):
|
||||
|
@ -9,4 +8,4 @@ def get_config_provider(config_volume, yaml_filename, py_filename, testing=False
|
|||
if testing:
|
||||
return TestConfigProvider()
|
||||
|
||||
return InMemoryProvider()
|
||||
return FileConfigProvider(config_volume, yaml_filename, py_filename)
|
||||
|
|
|
@ -1,84 +0,0 @@
|
|||
import logging
|
||||
import yaml
|
||||
import io
|
||||
import os
|
||||
|
||||
from config_app.config_util.config.baseprovider import BaseProvider
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_FILENAME = 'config.yaml'
|
||||
|
||||
class InMemoryProvider(BaseProvider):
|
||||
def __init__(self):
|
||||
self.files = {}
|
||||
self.config = {}
|
||||
self.was_loaded = False
|
||||
|
||||
@property
|
||||
def provider_id(self):
|
||||
return 'memory'
|
||||
|
||||
def update_app_config(self, app_config):
|
||||
self.config = app_config
|
||||
|
||||
def get_config(self):
|
||||
return self.config
|
||||
|
||||
def save_config(self, config_object):
|
||||
self.config = config_object
|
||||
self.was_loaded = True
|
||||
|
||||
def config_exists(self):
|
||||
return self.was_loaded
|
||||
|
||||
def volume_exists(self):
|
||||
return True
|
||||
|
||||
def volume_file_exists(self, filename):
|
||||
return any([name.startswith(filename) for name in self.files])
|
||||
|
||||
def get_volume_file(self, filename, mode='r'):
|
||||
return io.BytesIO(self.files[filename])
|
||||
|
||||
def write_volume_file(self, filename, contents):
|
||||
raise Exception('Not implemented yet')
|
||||
|
||||
def remove_volume_file(self, filename):
|
||||
raise Exception('Not implemented yet')
|
||||
|
||||
def list_volume_directory(self, path):
|
||||
def strip_directory(string):
|
||||
if '/' in string:
|
||||
return string[string.rfind('/') + 1:]
|
||||
return string
|
||||
|
||||
return [strip_directory(name) for name in self.files if name.startswith(path)]
|
||||
|
||||
def save_volume_file(self, filename, flask_file):
|
||||
self.files[filename] = flask_file.read()
|
||||
|
||||
def requires_restart(self, app_config):
|
||||
raise Exception('Not implemented yet')
|
||||
|
||||
def get_volume_path(self, directory, filename):
|
||||
return os.path.join(directory, filename)
|
||||
|
||||
def load_from_tarball(self, tarfile):
|
||||
for tarinfo in tarfile.getmembers():
|
||||
if tarinfo.isfile():
|
||||
self.files[tarinfo.name] = tarfile.extractfile(tarinfo.name).read()
|
||||
|
||||
if self.files.has_key(CONFIG_FILENAME):
|
||||
self.config = yaml.load(self.files.get(CONFIG_FILENAME))
|
||||
self.was_loaded = True
|
||||
|
||||
def load_from_tar_stream(self, tarfile):
|
||||
for tarinfo in tarfile:
|
||||
if tarinfo.isfile():
|
||||
self.files[tarinfo.name] = tarfile.extractfile(tarinfo).read()
|
||||
|
||||
if self.files.has_key(CONFIG_FILENAME):
|
||||
self.config = yaml.load(self.files.get(CONFIG_FILENAME))
|
||||
self.was_loaded = True
|
|
@ -1454,7 +1454,7 @@
|
|||
<!-- BitBucket Trigger -->
|
||||
<div class="co-panel" ng-if="config.FEATURE_BUILD_SUPPORT" style="margin-top: 20px;">
|
||||
<div class="co-panel-heading">
|
||||
<i class="fa fa-bitbucket"></i> BitBucket Build Triggers
|
||||
<i class="fab fa-bitbucket"></i> BitBucket Build Triggers
|
||||
</div>
|
||||
<div class="co-panel-body">
|
||||
<div class="description">
|
||||
|
@ -1496,7 +1496,7 @@
|
|||
<!-- GitLab Trigger -->
|
||||
<div class="co-panel" ng-if="config.FEATURE_BUILD_SUPPORT" style="margin-top: 20px;">
|
||||
<div class="co-panel-heading">
|
||||
<i class="fa fa-gitlab"></i> GitLab Build Triggers
|
||||
<i class="fab fa-gitlab"></i> GitLab Build Triggers
|
||||
</div>
|
||||
<div class="co-panel-body">
|
||||
<div class="description">
|
||||
|
|
|
@ -1455,7 +1455,7 @@
|
|||
<!-- BitBucket Trigger -->
|
||||
<div class="co-panel" ng-if="config.FEATURE_BUILD_SUPPORT" style="margin-top: 20px;">
|
||||
<div class="co-panel-heading">
|
||||
<i class="fa fa-bitbucket"></i> BitBucket Build Triggers
|
||||
<i class="fab fa-bitbucket"></i> BitBucket Build Triggers
|
||||
</div>
|
||||
<div class="co-panel-body">
|
||||
<div class="description">
|
||||
|
@ -1497,7 +1497,7 @@
|
|||
<!-- GitLab Trigger -->
|
||||
<div class="co-panel" ng-if="config.FEATURE_BUILD_SUPPORT" style="margin-top: 20px;">
|
||||
<div class="co-panel-heading">
|
||||
<i class="fa fa-gitlab"></i> GitLab Build Triggers
|
||||
<i class="fab fa-gitlab"></i> GitLab Build Triggers
|
||||
</div>
|
||||
<div class="co-panel-body">
|
||||
<div class="description">
|
||||
|
|
|
@ -38,7 +38,13 @@ angular.module("quay-config")
|
|||
$scope.SERVICES = [
|
||||
{'id': 'redis', 'title': 'Redis'},
|
||||
|
||||
{'id': 'registry-storage', 'title': 'Registry Storage'},
|
||||
{'id': 'registry-storage', 'title': 'Registry Storage', 'condition': (config) => {
|
||||
// We can skip validation if all of the storage locations are local, as we can't
|
||||
// guarantee that this will be the same machine Q.E. will run under. Therefore,
|
||||
// we just have a warning to the user that Q.E. won't start if the locations don't match
|
||||
return Object.values(config.DISTRIBUTED_STORAGE_CONFIG)
|
||||
.some(storageTuple => storageTuple[0] !== 'LocalStorage')
|
||||
}},
|
||||
|
||||
{'id': 'time-machine', 'title': 'Time Machine'},
|
||||
|
||||
|
|
|
@ -135,15 +135,15 @@ class ValidatorContext(object):
|
|||
url_scheme_and_hostname = URLSchemeAndHostname.from_app_config(app.config)
|
||||
|
||||
return cls(config,
|
||||
user_password,
|
||||
client or app.config['HTTPCLIENT'],
|
||||
app.app_context,
|
||||
url_scheme_and_hostname,
|
||||
app.config.get('JWT_AUTH_MAX_FRESH_S', 300),
|
||||
app.config['REGISTRY_TITLE'],
|
||||
ip_resolver,
|
||||
instance_keys,
|
||||
app.config.get('FEATURE_SECURITY_SCANNER', False),
|
||||
app.config.get('TESTING', False),
|
||||
get_blob_download_uri_getter(app.test_request_context('/'), url_scheme_and_hostname),
|
||||
config_provider)
|
||||
user_password=user_password,
|
||||
http_client=client or app.config['HTTPCLIENT'],
|
||||
context=app.app_context,
|
||||
url_scheme_and_hostname=url_scheme_and_hostname,
|
||||
jwt_auth_max=app.config.get('JWT_AUTH_MAX_FRESH_S', 300),
|
||||
registry_title=app.config['REGISTRY_TITLE'],
|
||||
ip_resolver=ip_resolver,
|
||||
feature_sec_scanner=app.config.get('FEATURE_SECURITY_SCANNER', False),
|
||||
is_testing=app.config.get('TESTING', False),
|
||||
uri_creator=get_blob_download_uri_getter(app.test_request_context('/'), url_scheme_and_hostname),
|
||||
config_provider=config_provider,
|
||||
instance_keys=instance_keys)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from storage import get_storage_driver
|
||||
from util.config.validators import BaseValidator, ConfigValidationException
|
||||
from util.config.validators import BaseValidator, ConfigValidationException, LocalStorageConfigValidationException
|
||||
|
||||
|
||||
class StorageValidator(BaseValidator):
|
||||
|
|
Reference in a new issue