Change spacing from 4 spaces to 2 spaces
This commit is contained in:
parent
ec14007268
commit
efa66d84e4
28 changed files with 936 additions and 913 deletions
|
@ -2,7 +2,6 @@ import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
# Note: this currently points to the directory above, since we're in the quay config_app dir
|
# Note: this currently points to the directory above, since we're in the quay config_app dir
|
||||||
# TODO(config_extract): revert to root directory rather than the one above
|
# TODO(config_extract): revert to root directory rather than the one above
|
||||||
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
@ -15,7 +14,6 @@ TEMPLATE_DIR = os.path.join(ROOT_DIR, 'templates/')
|
||||||
IS_KUBERNETES = 'KUBERNETES_SERVICE_HOST' in os.environ
|
IS_KUBERNETES = 'KUBERNETES_SERVICE_HOST' in os.environ
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _get_version_number_changelog():
|
def _get_version_number_changelog():
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md')) as f:
|
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md')) as f:
|
||||||
|
|
|
@ -27,14 +27,16 @@ config_provider = get_config_provider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml',
|
||||||
testing=is_testing)
|
testing=is_testing)
|
||||||
|
|
||||||
if is_testing:
|
if is_testing:
|
||||||
from test.testconfig import TestConfig
|
from test.testconfig import TestConfig
|
||||||
logger.debug('Loading test config.')
|
|
||||||
app.config.from_object(TestConfig())
|
logger.debug('Loading test config.')
|
||||||
|
app.config.from_object(TestConfig())
|
||||||
else:
|
else:
|
||||||
from config import DefaultConfig
|
from config import DefaultConfig
|
||||||
logger.debug('Loading default config.')
|
|
||||||
app.config.from_object(DefaultConfig())
|
logger.debug('Loading default config.')
|
||||||
app.teardown_request(database.close_db_filter)
|
app.config.from_object(DefaultConfig())
|
||||||
|
app.teardown_request(database.close_db_filter)
|
||||||
|
|
||||||
# Load the override config via the provider.
|
# Load the override config via the provider.
|
||||||
config_provider.update_app_config(app.config)
|
config_provider.update_app_config(app.config)
|
||||||
|
|
|
@ -3,7 +3,6 @@ from config_app.c_app import app as application
|
||||||
# Bind all of the blueprints
|
# Bind all of the blueprints
|
||||||
import config_web
|
import config_web
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
|
logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False)
|
||||||
application.run(port=5000, debug=True, threaded=True, host='0.0.0.0')
|
application.run(port=5000, debug=True, threaded=True, host='0.0.0.0')
|
||||||
|
|
|
@ -31,13 +31,13 @@ api = ApiExceptionHandlingApi()
|
||||||
api.init_app(api_bp)
|
api.init_app(api_bp)
|
||||||
|
|
||||||
def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
|
def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
|
||||||
if not metadata:
|
if not metadata:
|
||||||
metadata = {}
|
metadata = {}
|
||||||
|
|
||||||
if repo:
|
if repo:
|
||||||
repo_name = repo.name
|
repo_name = repo.name
|
||||||
|
|
||||||
model.log.log_action(kind, user_or_orgname, repo_name, user_or_orgname, request.remote_addr, metadata)
|
model.log.log_action(kind, user_or_orgname, repo_name, user_or_orgname, request.remote_addr, metadata)
|
||||||
|
|
||||||
def format_date(date):
|
def format_date(date):
|
||||||
""" Output an RFC822 date format. """
|
""" Output an RFC822 date format. """
|
||||||
|
|
|
@ -7,247 +7,248 @@ from config_app.c_app import app
|
||||||
from config_app.config_endpoints.api import method_metadata
|
from config_app.config_endpoints.api import method_metadata
|
||||||
from config_app.config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER
|
from config_app.config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def generate_route_data():
|
def generate_route_data():
|
||||||
include_internal = True
|
include_internal = True
|
||||||
compact = True
|
compact = True
|
||||||
|
|
||||||
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
|
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
|
||||||
enum=None, schema=None):
|
enum=None, schema=None):
|
||||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
|
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
|
||||||
parameter_info = {
|
parameter_info = {
|
||||||
'name': name,
|
'name': name,
|
||||||
'in': kind,
|
'in': kind,
|
||||||
'required': required
|
'required': required
|
||||||
|
}
|
||||||
|
|
||||||
|
if schema:
|
||||||
|
parameter_info['schema'] = {
|
||||||
|
'$ref': '#/definitions/%s' % schema
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
parameter_info['type'] = param_type
|
||||||
|
|
||||||
|
if enum is not None and len(list(enum)) > 0:
|
||||||
|
parameter_info['enum'] = list(enum)
|
||||||
|
|
||||||
|
return parameter_info
|
||||||
|
|
||||||
|
paths = {}
|
||||||
|
models = {}
|
||||||
|
tags = []
|
||||||
|
tags_added = set()
|
||||||
|
operation_ids = set()
|
||||||
|
|
||||||
|
for rule in app.url_map.iter_rules():
|
||||||
|
endpoint_method = app.view_functions[rule.endpoint]
|
||||||
|
|
||||||
|
# Verify that we have a view class for this API method.
|
||||||
|
if not 'view_class' in dir(endpoint_method):
|
||||||
|
continue
|
||||||
|
|
||||||
|
view_class = endpoint_method.view_class
|
||||||
|
|
||||||
|
# Hide the class if it is internal.
|
||||||
|
internal = method_metadata(view_class, 'internal')
|
||||||
|
if not include_internal and internal:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Build the tag.
|
||||||
|
parts = fully_qualified_name(view_class).split('.')
|
||||||
|
tag_name = parts[-2]
|
||||||
|
if not tag_name in tags_added:
|
||||||
|
tags_added.add(tag_name)
|
||||||
|
tags.append({
|
||||||
|
'name': tag_name,
|
||||||
|
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
|
||||||
|
})
|
||||||
|
|
||||||
|
# Build the Swagger data for the path.
|
||||||
|
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
|
||||||
|
full_name = fully_qualified_name(view_class)
|
||||||
|
path_swagger = {
|
||||||
|
'x-name': full_name,
|
||||||
|
'x-path': swagger_path,
|
||||||
|
'x-tag': tag_name
|
||||||
|
}
|
||||||
|
|
||||||
|
related_user_res = method_metadata(view_class, 'related_user_resource')
|
||||||
|
if related_user_res is not None:
|
||||||
|
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
|
||||||
|
|
||||||
|
paths[swagger_path] = path_swagger
|
||||||
|
|
||||||
|
# Add any global path parameters.
|
||||||
|
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(
|
||||||
|
view_class) else {}
|
||||||
|
if param_data_map:
|
||||||
|
path_parameters_swagger = []
|
||||||
|
for path_parameter in param_data_map:
|
||||||
|
description = param_data_map[path_parameter].get('description')
|
||||||
|
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
|
||||||
|
|
||||||
|
path_swagger['parameters'] = path_parameters_swagger
|
||||||
|
|
||||||
|
# Add the individual HTTP operations.
|
||||||
|
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
|
||||||
|
for method_name in method_names:
|
||||||
|
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
|
||||||
|
method = getattr(view_class, method_name.lower(), None)
|
||||||
|
if method is None:
|
||||||
|
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
|
||||||
|
continue
|
||||||
|
|
||||||
|
operationId = method_metadata(method, 'nickname')
|
||||||
|
operation_swagger = {
|
||||||
|
'operationId': operationId,
|
||||||
|
'parameters': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
if operationId is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if operationId in operation_ids:
|
||||||
|
raise Exception('Duplicate operation Id: %s' % operationId)
|
||||||
|
|
||||||
|
operation_ids.add(operationId)
|
||||||
|
|
||||||
|
# Mark the method as internal.
|
||||||
|
internal = method_metadata(method, 'internal')
|
||||||
|
if internal is not None:
|
||||||
|
operation_swagger['x-internal'] = True
|
||||||
|
|
||||||
|
if include_internal:
|
||||||
|
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
|
||||||
|
if requires_fresh_login is not None:
|
||||||
|
operation_swagger['x-requires-fresh-login'] = True
|
||||||
|
|
||||||
|
# Add the path parameters.
|
||||||
|
if rule.arguments:
|
||||||
|
for path_parameter in rule.arguments:
|
||||||
|
description = param_data_map.get(path_parameter, {}).get('description')
|
||||||
|
operation_swagger['parameters'].append(
|
||||||
|
swagger_parameter(path_parameter, description))
|
||||||
|
|
||||||
|
# Add the query parameters.
|
||||||
|
if '__api_query_params' in dir(method):
|
||||||
|
for query_parameter_info in method.__api_query_params:
|
||||||
|
name = query_parameter_info['name']
|
||||||
|
description = query_parameter_info['help']
|
||||||
|
param_type = TYPE_CONVERTER[query_parameter_info['type']]
|
||||||
|
required = query_parameter_info['required']
|
||||||
|
|
||||||
|
operation_swagger['parameters'].append(
|
||||||
|
swagger_parameter(name, description, kind='query',
|
||||||
|
param_type=param_type,
|
||||||
|
required=required,
|
||||||
|
enum=query_parameter_info['choices']))
|
||||||
|
|
||||||
|
# Add the OAuth security block.
|
||||||
|
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
|
||||||
|
scope = method_metadata(method, 'oauth2_scope')
|
||||||
|
if scope and not compact:
|
||||||
|
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
|
||||||
|
|
||||||
|
# Add the responses block.
|
||||||
|
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
|
||||||
|
response_schema_name = method_metadata(method, 'response_schema')
|
||||||
|
if not compact:
|
||||||
|
if response_schema_name:
|
||||||
|
models[response_schema_name] = view_class.schemas[response_schema_name]
|
||||||
|
|
||||||
|
models['ApiError'] = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'status': {
|
||||||
|
'type': 'integer',
|
||||||
|
'description': 'Status code of the response.'
|
||||||
|
},
|
||||||
|
'type': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Reference to the type of the error.'
|
||||||
|
},
|
||||||
|
'detail': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Details about the specific instance of the error.'
|
||||||
|
},
|
||||||
|
'title': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Unique error code to identify the type of error.'
|
||||||
|
},
|
||||||
|
'error_message': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Deprecated; alias for detail'
|
||||||
|
},
|
||||||
|
'error_type': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'Deprecated; alias for detail'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'required': [
|
||||||
|
'status',
|
||||||
|
'type',
|
||||||
|
'title',
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
if schema:
|
responses = {
|
||||||
parameter_info['schema'] = {
|
'400': {
|
||||||
'$ref': '#/definitions/%s' % schema
|
'description': 'Bad Request',
|
||||||
}
|
},
|
||||||
|
|
||||||
|
'401': {
|
||||||
|
'description': 'Session required',
|
||||||
|
},
|
||||||
|
|
||||||
|
'403': {
|
||||||
|
'description': 'Unauthorized access',
|
||||||
|
},
|
||||||
|
|
||||||
|
'404': {
|
||||||
|
'description': 'Not found',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, body in responses.items():
|
||||||
|
body['schema'] = {'$ref': '#/definitions/ApiError'}
|
||||||
|
|
||||||
|
if method_name == 'DELETE':
|
||||||
|
responses['204'] = {
|
||||||
|
'description': 'Deleted'
|
||||||
|
}
|
||||||
|
elif method_name == 'POST':
|
||||||
|
responses['201'] = {
|
||||||
|
'description': 'Successful creation'
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
parameter_info['type'] = param_type
|
responses['200'] = {
|
||||||
|
'description': 'Successful invocation'
|
||||||
|
}
|
||||||
|
|
||||||
if enum is not None and len(list(enum)) > 0:
|
if response_schema_name:
|
||||||
parameter_info['enum'] = list(enum)
|
responses['200']['schema'] = {
|
||||||
|
'$ref': '#/definitions/%s' % response_schema_name
|
||||||
return parameter_info
|
|
||||||
|
|
||||||
paths = {}
|
|
||||||
models = {}
|
|
||||||
tags = []
|
|
||||||
tags_added = set()
|
|
||||||
operation_ids = set()
|
|
||||||
|
|
||||||
for rule in app.url_map.iter_rules():
|
|
||||||
endpoint_method = app.view_functions[rule.endpoint]
|
|
||||||
|
|
||||||
# Verify that we have a view class for this API method.
|
|
||||||
if not 'view_class' in dir(endpoint_method):
|
|
||||||
continue
|
|
||||||
|
|
||||||
view_class = endpoint_method.view_class
|
|
||||||
|
|
||||||
# Hide the class if it is internal.
|
|
||||||
internal = method_metadata(view_class, 'internal')
|
|
||||||
if not include_internal and internal:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Build the tag.
|
|
||||||
parts = fully_qualified_name(view_class).split('.')
|
|
||||||
tag_name = parts[-2]
|
|
||||||
if not tag_name in tags_added:
|
|
||||||
tags_added.add(tag_name)
|
|
||||||
tags.append({
|
|
||||||
'name': tag_name,
|
|
||||||
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
|
|
||||||
})
|
|
||||||
|
|
||||||
# Build the Swagger data for the path.
|
|
||||||
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
|
|
||||||
full_name = fully_qualified_name(view_class)
|
|
||||||
path_swagger = {
|
|
||||||
'x-name': full_name,
|
|
||||||
'x-path': swagger_path,
|
|
||||||
'x-tag': tag_name
|
|
||||||
}
|
|
||||||
|
|
||||||
related_user_res = method_metadata(view_class, 'related_user_resource')
|
|
||||||
if related_user_res is not None:
|
|
||||||
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
|
|
||||||
|
|
||||||
paths[swagger_path] = path_swagger
|
|
||||||
|
|
||||||
# Add any global path parameters.
|
|
||||||
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(view_class) else {}
|
|
||||||
if param_data_map:
|
|
||||||
path_parameters_swagger = []
|
|
||||||
for path_parameter in param_data_map:
|
|
||||||
description = param_data_map[path_parameter].get('description')
|
|
||||||
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
|
|
||||||
|
|
||||||
path_swagger['parameters'] = path_parameters_swagger
|
|
||||||
|
|
||||||
# Add the individual HTTP operations.
|
|
||||||
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
|
|
||||||
for method_name in method_names:
|
|
||||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
|
|
||||||
method = getattr(view_class, method_name.lower(), None)
|
|
||||||
if method is None:
|
|
||||||
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
|
|
||||||
continue
|
|
||||||
|
|
||||||
operationId = method_metadata(method, 'nickname')
|
|
||||||
operation_swagger = {
|
|
||||||
'operationId': operationId,
|
|
||||||
'parameters': [],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if operationId is None:
|
operation_swagger['responses'] = responses
|
||||||
continue
|
|
||||||
|
|
||||||
if operationId in operation_ids:
|
# Add the request block.
|
||||||
raise Exception('Duplicate operation Id: %s' % operationId)
|
request_schema_name = method_metadata(method, 'request_schema')
|
||||||
|
if request_schema_name and not compact:
|
||||||
|
models[request_schema_name] = view_class.schemas[request_schema_name]
|
||||||
|
|
||||||
operation_ids.add(operationId)
|
operation_swagger['parameters'].append(
|
||||||
|
swagger_parameter('body', 'Request body contents.', kind='body',
|
||||||
|
schema=request_schema_name))
|
||||||
|
|
||||||
# Mark the method as internal.
|
# Add the operation to the parent path.
|
||||||
internal = method_metadata(method, 'internal')
|
if not internal or (internal and include_internal):
|
||||||
if internal is not None:
|
path_swagger[method_name.lower()] = operation_swagger
|
||||||
operation_swagger['x-internal'] = True
|
|
||||||
|
|
||||||
if include_internal:
|
tags.sort(key=lambda t: t['name'])
|
||||||
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
|
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
|
||||||
if requires_fresh_login is not None:
|
|
||||||
operation_swagger['x-requires-fresh-login'] = True
|
|
||||||
|
|
||||||
# Add the path parameters.
|
if compact:
|
||||||
if rule.arguments:
|
return {'paths': paths}
|
||||||
for path_parameter in rule.arguments:
|
|
||||||
description = param_data_map.get(path_parameter, {}).get('description')
|
|
||||||
operation_swagger['parameters'].append(swagger_parameter(path_parameter, description))
|
|
||||||
|
|
||||||
# Add the query parameters.
|
|
||||||
if '__api_query_params' in dir(method):
|
|
||||||
for query_parameter_info in method.__api_query_params:
|
|
||||||
name = query_parameter_info['name']
|
|
||||||
description = query_parameter_info['help']
|
|
||||||
param_type = TYPE_CONVERTER[query_parameter_info['type']]
|
|
||||||
required = query_parameter_info['required']
|
|
||||||
|
|
||||||
operation_swagger['parameters'].append(
|
|
||||||
swagger_parameter(name, description, kind='query',
|
|
||||||
param_type=param_type,
|
|
||||||
required=required,
|
|
||||||
enum=query_parameter_info['choices']))
|
|
||||||
|
|
||||||
# Add the OAuth security block.
|
|
||||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
|
|
||||||
scope = method_metadata(method, 'oauth2_scope')
|
|
||||||
if scope and not compact:
|
|
||||||
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
|
|
||||||
|
|
||||||
# Add the responses block.
|
|
||||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
|
|
||||||
response_schema_name = method_metadata(method, 'response_schema')
|
|
||||||
if not compact:
|
|
||||||
if response_schema_name:
|
|
||||||
models[response_schema_name] = view_class.schemas[response_schema_name]
|
|
||||||
|
|
||||||
models['ApiError'] = {
|
|
||||||
'type': 'object',
|
|
||||||
'properties': {
|
|
||||||
'status': {
|
|
||||||
'type': 'integer',
|
|
||||||
'description': 'Status code of the response.'
|
|
||||||
},
|
|
||||||
'type': {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'Reference to the type of the error.'
|
|
||||||
},
|
|
||||||
'detail': {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'Details about the specific instance of the error.'
|
|
||||||
},
|
|
||||||
'title': {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'Unique error code to identify the type of error.'
|
|
||||||
},
|
|
||||||
'error_message': {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'Deprecated; alias for detail'
|
|
||||||
},
|
|
||||||
'error_type': {
|
|
||||||
'type': 'string',
|
|
||||||
'description': 'Deprecated; alias for detail'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'required': [
|
|
||||||
'status',
|
|
||||||
'type',
|
|
||||||
'title',
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
responses = {
|
|
||||||
'400': {
|
|
||||||
'description': 'Bad Request',
|
|
||||||
},
|
|
||||||
|
|
||||||
'401': {
|
|
||||||
'description': 'Session required',
|
|
||||||
},
|
|
||||||
|
|
||||||
'403': {
|
|
||||||
'description': 'Unauthorized access',
|
|
||||||
},
|
|
||||||
|
|
||||||
'404': {
|
|
||||||
'description': 'Not found',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, body in responses.items():
|
|
||||||
body['schema'] = {'$ref': '#/definitions/ApiError'}
|
|
||||||
|
|
||||||
if method_name == 'DELETE':
|
|
||||||
responses['204'] = {
|
|
||||||
'description': 'Deleted'
|
|
||||||
}
|
|
||||||
elif method_name == 'POST':
|
|
||||||
responses['201'] = {
|
|
||||||
'description': 'Successful creation'
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
responses['200'] = {
|
|
||||||
'description': 'Successful invocation'
|
|
||||||
}
|
|
||||||
|
|
||||||
if response_schema_name:
|
|
||||||
responses['200']['schema'] = {
|
|
||||||
'$ref': '#/definitions/%s' % response_schema_name
|
|
||||||
}
|
|
||||||
|
|
||||||
operation_swagger['responses'] = responses
|
|
||||||
|
|
||||||
# Add the request block.
|
|
||||||
request_schema_name = method_metadata(method, 'request_schema')
|
|
||||||
if request_schema_name and not compact:
|
|
||||||
models[request_schema_name] = view_class.schemas[request_schema_name]
|
|
||||||
|
|
||||||
operation_swagger['parameters'].append(
|
|
||||||
swagger_parameter('body', 'Request body contents.', kind='body',
|
|
||||||
schema=request_schema_name))
|
|
||||||
|
|
||||||
# Add the operation to the parent path.
|
|
||||||
if not internal or (internal and include_internal):
|
|
||||||
path_swagger[method_name.lower()] = operation_swagger
|
|
||||||
|
|
||||||
tags.sort(key=lambda t: t['name'])
|
|
||||||
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
|
|
||||||
|
|
||||||
if compact:
|
|
||||||
return {'paths': paths}
|
|
||||||
|
|
|
@ -3,7 +3,8 @@ import logging
|
||||||
from flask import abort, request
|
from flask import abort, request
|
||||||
|
|
||||||
from config_app.config_endpoints.api.suconfig_models_pre_oci import pre_oci_model as model
|
from config_app.config_endpoints.api.suconfig_models_pre_oci import pre_oci_model as model
|
||||||
from config_app.config_endpoints.api import resource, ApiResource, nickname, validate_json_request, kubernetes_only
|
from config_app.config_endpoints.api import resource, ApiResource, nickname, validate_json_request, \
|
||||||
|
kubernetes_only
|
||||||
from config_app.c_app import (app, config_provider, superusers, ip_resolver,
|
from config_app.c_app import (app, config_provider, superusers, ip_resolver,
|
||||||
instance_keys, INIT_SCRIPTS_LOCATION)
|
instance_keys, INIT_SCRIPTS_LOCATION)
|
||||||
from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
|
from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
|
||||||
|
@ -11,7 +12,8 @@ from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
|
||||||
from data.database import configure
|
from data.database import configure
|
||||||
from data.runmigration import run_alembic_migration
|
from data.runmigration import run_alembic_migration
|
||||||
from util.config.configutil import add_enterprise_config_defaults
|
from util.config.configutil import add_enterprise_config_defaults
|
||||||
from util.config.validator import validate_service_for_config, ValidatorContext, is_valid_config_upload_filename
|
from util.config.validator import validate_service_for_config, ValidatorContext, \
|
||||||
|
is_valid_config_upload_filename
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -85,6 +87,7 @@ class SuperUserRegistryStatus(ApiResource):
|
||||||
""" Resource for determining the status of the registry, such as if config exists,
|
""" Resource for determining the status of the registry, such as if config exists,
|
||||||
if a database is configured, and if it has any defined users.
|
if a database is configured, and if it has any defined users.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@nickname('scRegistryStatus')
|
@nickname('scRegistryStatus')
|
||||||
def get(self):
|
def get(self):
|
||||||
""" Returns the status of the registry. """
|
""" Returns the status of the registry. """
|
||||||
|
@ -121,6 +124,7 @@ class _AlembicLogHandler(logging.Handler):
|
||||||
@resource('/v1/superuser/setupdb')
|
@resource('/v1/superuser/setupdb')
|
||||||
class SuperUserSetupDatabase(ApiResource):
|
class SuperUserSetupDatabase(ApiResource):
|
||||||
""" Resource for invoking alembic to setup the database. """
|
""" Resource for invoking alembic to setup the database. """
|
||||||
|
|
||||||
@nickname('scSetupDatabase')
|
@nickname('scSetupDatabase')
|
||||||
def get(self):
|
def get(self):
|
||||||
""" Invokes the alembic upgrade process. """
|
""" Invokes the alembic upgrade process. """
|
||||||
|
@ -251,7 +255,8 @@ class SuperUserConfigValidate(ApiResource):
|
||||||
# so we also allow it to be called if there is no valid registry configuration setup. Note that
|
# so we also allow it to be called if there is no valid registry configuration setup. Note that
|
||||||
# this is also safe since this method does not access any information not given in the request.
|
# this is also safe since this method does not access any information not given in the request.
|
||||||
config = request.get_json()['config']
|
config = request.get_json()['config']
|
||||||
validator_context = ValidatorContext.from_app(app, config, request.get_json().get('password', ''),
|
validator_context = ValidatorContext.from_app(app, config,
|
||||||
|
request.get_json().get('password', ''),
|
||||||
instance_keys=instance_keys,
|
instance_keys=instance_keys,
|
||||||
ip_resolver=ip_resolver,
|
ip_resolver=ip_resolver,
|
||||||
config_provider=config_provider,
|
config_provider=config_provider,
|
||||||
|
@ -294,6 +299,7 @@ class SuperUserKubernetesDeployment(ApiResource):
|
||||||
@resource('/v1/superuser/config/kubernetes')
|
@resource('/v1/superuser/config/kubernetes')
|
||||||
class SuperUserKubernetesConfiguration(ApiResource):
|
class SuperUserKubernetesConfiguration(ApiResource):
|
||||||
""" Resource for saving the config files to kubernetes secrets. """
|
""" Resource for saving the config files to kubernetes secrets. """
|
||||||
|
|
||||||
@kubernetes_only
|
@kubernetes_only
|
||||||
@nickname('scDeployConfiguration')
|
@nickname('scDeployConfiguration')
|
||||||
def post(self):
|
def post(self):
|
||||||
|
@ -303,6 +309,7 @@ class SuperUserKubernetesConfiguration(ApiResource):
|
||||||
@resource('/v1/superuser/config/file/<filename>')
|
@resource('/v1/superuser/config/file/<filename>')
|
||||||
class SuperUserConfigFile(ApiResource):
|
class SuperUserConfigFile(ApiResource):
|
||||||
""" Resource for fetching the status of config files and overriding them. """
|
""" Resource for fetching the status of config files and overriding them. """
|
||||||
|
|
||||||
@nickname('scConfigFileExists')
|
@nickname('scConfigFileExists')
|
||||||
def get(self, filename):
|
def get(self, filename):
|
||||||
""" Returns whether the configuration file with the given name exists. """
|
""" Returns whether the configuration file with the given name exists. """
|
||||||
|
@ -313,7 +320,6 @@ class SuperUserConfigFile(ApiResource):
|
||||||
'exists': config_provider.volume_file_exists(filename)
|
'exists': config_provider.volume_file_exists(filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@nickname('scUpdateConfigFile')
|
@nickname('scUpdateConfigFile')
|
||||||
def post(self, filename):
|
def post(self, filename):
|
||||||
""" Updates the configuration file with the given name. """
|
""" Updates the configuration file with the given name. """
|
||||||
|
|
|
@ -4,36 +4,36 @@ from six import add_metaclass
|
||||||
|
|
||||||
@add_metaclass(ABCMeta)
|
@add_metaclass(ABCMeta)
|
||||||
class SuperuserConfigDataInterface(object):
|
class SuperuserConfigDataInterface(object):
|
||||||
|
"""
|
||||||
|
Interface that represents all data store interactions required by the superuser config API.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def is_valid(self):
|
||||||
"""
|
"""
|
||||||
Interface that represents all data store interactions required by the superuser config API.
|
Returns true if the configured database is valid.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def is_valid(self):
|
def has_users(self):
|
||||||
"""
|
"""
|
||||||
Returns true if the configured database is valid.
|
Returns true if there are any users defined.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def has_users(self):
|
def create_superuser(self, username, password, email):
|
||||||
"""
|
"""
|
||||||
Returns true if there are any users defined.
|
Creates a new superuser with the given username, password and email. Returns the user's UUID.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def create_superuser(self, username, password, email):
|
def has_federated_login(self, username, service_name):
|
||||||
"""
|
"""
|
||||||
Creates a new superuser with the given username, password and email. Returns the user's UUID.
|
Returns true if the matching user has a federated login under the matching service.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def has_federated_login(self, username, service_name):
|
def attach_federated_login(self, username, service_name, federated_username):
|
||||||
"""
|
"""
|
||||||
Returns true if the matching user has a federated login under the matching service.
|
Attaches a federatated login to the matching user, under the given service.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def attach_federated_login(self, username, service_name, federated_username):
|
|
||||||
"""
|
|
||||||
Attaches a federatated login to the matching user, under the given service.
|
|
||||||
"""
|
|
||||||
|
|
|
@ -4,34 +4,34 @@ from config_app.config_endpoints.api.suconfig_models_interface import SuperuserC
|
||||||
|
|
||||||
|
|
||||||
class PreOCIModel(SuperuserConfigDataInterface):
|
class PreOCIModel(SuperuserConfigDataInterface):
|
||||||
# Note: this method is different than has_users: the user select will throw if the user
|
# Note: this method is different than has_users: the user select will throw if the user
|
||||||
# table does not exist, whereas has_users assumes the table is valid
|
# table does not exist, whereas has_users assumes the table is valid
|
||||||
def is_valid(self):
|
def is_valid(self):
|
||||||
try:
|
try:
|
||||||
list(User.select().limit(1))
|
list(User.select().limit(1))
|
||||||
return True
|
return True
|
||||||
except:
|
except:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def has_users(self):
|
def has_users(self):
|
||||||
return bool(list(User.select().limit(1)))
|
return bool(list(User.select().limit(1)))
|
||||||
|
|
||||||
def create_superuser(self, username, password, email):
|
def create_superuser(self, username, password, email):
|
||||||
return model.user.create_user(username, password, email, auto_verify=True).uuid
|
return model.user.create_user(username, password, email, auto_verify=True).uuid
|
||||||
|
|
||||||
def has_federated_login(self, username, service_name):
|
def has_federated_login(self, username, service_name):
|
||||||
user = model.user.get_user(username)
|
user = model.user.get_user(username)
|
||||||
if user is None:
|
if user is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return bool(model.user.lookup_federated_login(user, service_name))
|
return bool(model.user.lookup_federated_login(user, service_name))
|
||||||
|
|
||||||
def attach_federated_login(self, username, service_name, federated_username):
|
def attach_federated_login(self, username, service_name, federated_username):
|
||||||
user = model.user.get_user(username)
|
user = model.user.get_user(username)
|
||||||
if user is None:
|
if user is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
model.user.attach_federated_login(user, service_name, federated_username)
|
model.user.attach_federated_login(user, service_name, federated_username)
|
||||||
|
|
||||||
|
|
||||||
pre_oci_model = PreOCIModel()
|
pre_oci_model = PreOCIModel()
|
||||||
|
|
|
@ -6,165 +6,168 @@ from config_app.config_endpoints.api import format_date
|
||||||
|
|
||||||
|
|
||||||
def user_view(user):
|
def user_view(user):
|
||||||
return {
|
return {
|
||||||
'name': user.username,
|
'name': user.username,
|
||||||
'kind': 'user',
|
'kind': 'user',
|
||||||
'is_robot': user.robot,
|
'is_robot': user.robot,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class RepositoryBuild(namedtuple('RepositoryBuild',
|
class RepositoryBuild(namedtuple('RepositoryBuild',
|
||||||
['uuid', 'logs_archived', 'repository_namespace_user_username', 'repository_name',
|
['uuid', 'logs_archived', 'repository_namespace_user_username',
|
||||||
'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger', 'display_name',
|
'repository_name',
|
||||||
'started', 'job_config', 'phase', 'status', 'error', 'archive_url'])):
|
'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger',
|
||||||
"""
|
'display_name',
|
||||||
RepositoryBuild represents a build associated with a repostiory
|
'started', 'job_config', 'phase', 'status', 'error',
|
||||||
:type uuid: string
|
'archive_url'])):
|
||||||
:type logs_archived: boolean
|
"""
|
||||||
:type repository_namespace_user_username: string
|
RepositoryBuild represents a build associated with a repostiory
|
||||||
:type repository_name: string
|
:type uuid: string
|
||||||
:type can_write: boolean
|
:type logs_archived: boolean
|
||||||
:type can_write: boolean
|
:type repository_namespace_user_username: string
|
||||||
:type pull_robot: User
|
:type repository_name: string
|
||||||
:type resource_key: string
|
:type can_write: boolean
|
||||||
:type trigger: Trigger
|
:type can_write: boolean
|
||||||
:type display_name: string
|
:type pull_robot: User
|
||||||
:type started: boolean
|
:type resource_key: string
|
||||||
:type job_config: {Any -> Any}
|
:type trigger: Trigger
|
||||||
:type phase: string
|
:type display_name: string
|
||||||
:type status: string
|
:type started: boolean
|
||||||
:type error: string
|
:type job_config: {Any -> Any}
|
||||||
:type archive_url: string
|
:type phase: string
|
||||||
"""
|
:type status: string
|
||||||
|
:type error: string
|
||||||
|
:type archive_url: string
|
||||||
|
"""
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
|
|
||||||
resp = {
|
resp = {
|
||||||
'id': self.uuid,
|
'id': self.uuid,
|
||||||
'phase': self.phase,
|
'phase': self.phase,
|
||||||
'started': format_date(self.started),
|
'started': format_date(self.started),
|
||||||
'display_name': self.display_name,
|
'display_name': self.display_name,
|
||||||
'status': self.status or {},
|
'status': self.status or {},
|
||||||
'subdirectory': self.job_config.get('build_subdir', ''),
|
'subdirectory': self.job_config.get('build_subdir', ''),
|
||||||
'dockerfile_path': self.job_config.get('build_subdir', ''),
|
'dockerfile_path': self.job_config.get('build_subdir', ''),
|
||||||
'context': self.job_config.get('context', ''),
|
'context': self.job_config.get('context', ''),
|
||||||
'tags': self.job_config.get('docker_tags', []),
|
'tags': self.job_config.get('docker_tags', []),
|
||||||
'manual_user': self.job_config.get('manual_user', None),
|
'manual_user': self.job_config.get('manual_user', None),
|
||||||
'is_writer': self.can_write,
|
'is_writer': self.can_write,
|
||||||
'trigger': self.trigger.to_dict(),
|
'trigger': self.trigger.to_dict(),
|
||||||
'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
|
'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
|
||||||
'resource_key': self.resource_key,
|
'resource_key': self.resource_key,
|
||||||
'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
|
'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
|
||||||
'repository': {
|
'repository': {
|
||||||
'namespace': self.repository_namespace_user_username,
|
'namespace': self.repository_namespace_user_username,
|
||||||
'name': self.repository_name
|
'name': self.repository_name
|
||||||
},
|
},
|
||||||
'error': self.error,
|
'error': self.error,
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.can_write:
|
if self.can_write:
|
||||||
if self.resource_key is not None:
|
if self.resource_key is not None:
|
||||||
resp['archive_url'] = self.archive_url
|
resp['archive_url'] = self.archive_url
|
||||||
elif self.job_config.get('archive_url', None):
|
elif self.job_config.get('archive_url', None):
|
||||||
resp['archive_url'] = self.job_config['archive_url']
|
resp['archive_url'] = self.job_config['archive_url']
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])):
|
class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])):
|
||||||
"""
|
"""
|
||||||
Approval represents whether a key has been approved or not
|
Approval represents whether a key has been approved or not
|
||||||
:type approver: User
|
:type approver: User
|
||||||
:type approval_type: string
|
:type approval_type: string
|
||||||
:type approved_date: Date
|
:type approved_date: Date
|
||||||
:type notes: string
|
:type notes: string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {
|
return {
|
||||||
'approver': self.approver.to_dict() if self.approver else None,
|
'approver': self.approver.to_dict() if self.approver else None,
|
||||||
'approval_type': self.approval_type,
|
'approval_type': self.approval_type,
|
||||||
'approved_date': self.approved_date,
|
'approved_date': self.approved_date,
|
||||||
'notes': self.notes,
|
'notes': self.notes,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
|
class ServiceKey(
|
||||||
'expiration_date', 'rotation_duration', 'approval'])):
|
namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
|
||||||
"""
|
'expiration_date', 'rotation_duration', 'approval'])):
|
||||||
ServiceKey is an apostille signing key
|
"""
|
||||||
:type name: string
|
ServiceKey is an apostille signing key
|
||||||
:type kid: int
|
:type name: string
|
||||||
:type service: string
|
:type kid: int
|
||||||
:type jwk: string
|
:type service: string
|
||||||
:type metadata: string
|
:type jwk: string
|
||||||
:type created_date: Date
|
:type metadata: string
|
||||||
:type expiration_date: Date
|
:type created_date: Date
|
||||||
:type rotation_duration: Date
|
:type expiration_date: Date
|
||||||
:type approval: Approval
|
:type rotation_duration: Date
|
||||||
|
:type approval: Approval
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {
|
return {
|
||||||
'name': self.name,
|
'name': self.name,
|
||||||
'kid': self.kid,
|
'kid': self.kid,
|
||||||
'service': self.service,
|
'service': self.service,
|
||||||
'jwk': self.jwk,
|
'jwk': self.jwk,
|
||||||
'metadata': self.metadata,
|
'metadata': self.metadata,
|
||||||
'created_date': self.created_date,
|
'created_date': self.created_date,
|
||||||
'expiration_date': self.expiration_date,
|
'expiration_date': self.expiration_date,
|
||||||
'rotation_duration': self.rotation_duration,
|
'rotation_duration': self.rotation_duration,
|
||||||
'approval': self.approval.to_dict() if self.approval is not None else None,
|
'approval': self.approval.to_dict() if self.approval is not None else None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])):
|
class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])):
|
||||||
"""
|
"""
|
||||||
User represents a single user.
|
User represents a single user.
|
||||||
:type username: string
|
:type username: string
|
||||||
:type email: string
|
:type email: string
|
||||||
:type verified: boolean
|
:type verified: boolean
|
||||||
:type enabled: boolean
|
:type enabled: boolean
|
||||||
:type robot: User
|
:type robot: User
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
user_data = {
|
user_data = {
|
||||||
'kind': 'user',
|
'kind': 'user',
|
||||||
'name': self.username,
|
'name': self.username,
|
||||||
'username': self.username,
|
'username': self.username,
|
||||||
'email': self.email,
|
'email': self.email,
|
||||||
'verified': self.verified,
|
'verified': self.verified,
|
||||||
'enabled': self.enabled,
|
'enabled': self.enabled,
|
||||||
}
|
}
|
||||||
|
|
||||||
return user_data
|
return user_data
|
||||||
|
|
||||||
|
|
||||||
class Organization(namedtuple('Organization', ['username', 'email'])):
|
class Organization(namedtuple('Organization', ['username', 'email'])):
|
||||||
"""
|
"""
|
||||||
Organization represents a single org.
|
Organization represents a single org.
|
||||||
:type username: string
|
:type username: string
|
||||||
:type email: string
|
:type email: string
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
return {
|
|
||||||
'name': self.username,
|
|
||||||
'email': self.email,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
return {
|
||||||
|
'name': self.username,
|
||||||
|
'email': self.email,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@add_metaclass(ABCMeta)
|
@add_metaclass(ABCMeta)
|
||||||
class SuperuserDataInterface(object):
|
class SuperuserDataInterface(object):
|
||||||
|
"""
|
||||||
|
Interface that represents all data store interactions required by a superuser api.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def list_all_service_keys(self):
|
||||||
"""
|
"""
|
||||||
Interface that represents all data store interactions required by a superuser api.
|
Returns a list of service keys
|
||||||
"""
|
"""
|
||||||
@abstractmethod
|
|
||||||
def list_all_service_keys(self):
|
|
||||||
"""
|
|
||||||
Returns a list of service keys
|
|
||||||
"""
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
from data import model
|
from data import model
|
||||||
|
|
||||||
from config_app.config_endpoints.api.superuser_models_interface import SuperuserDataInterface, User, ServiceKey, Approval
|
from config_app.config_endpoints.api.superuser_models_interface import (SuperuserDataInterface, User, ServiceKey,
|
||||||
|
Approval)
|
||||||
|
|
||||||
|
|
||||||
def _create_user(user):
|
def _create_user(user):
|
||||||
if user is None:
|
if user is None:
|
||||||
|
@ -11,12 +13,15 @@ def _create_user(user):
|
||||||
def _create_key(key):
|
def _create_key(key):
|
||||||
approval = None
|
approval = None
|
||||||
if key.approval is not None:
|
if key.approval is not None:
|
||||||
approval = Approval(_create_user(key.approval.approver), key.approval.approval_type, key.approval.approved_date,
|
approval = Approval(_create_user(key.approval.approver), key.approval.approval_type,
|
||||||
|
key.approval.approved_date,
|
||||||
key.approval.notes)
|
key.approval.notes)
|
||||||
|
|
||||||
return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date, key.expiration_date,
|
return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date,
|
||||||
|
key.expiration_date,
|
||||||
key.rotation_duration, approval)
|
key.rotation_duration, approval)
|
||||||
|
|
||||||
|
|
||||||
class ServiceKeyDoesNotExist(Exception):
|
class ServiceKeyDoesNotExist(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -30,6 +35,7 @@ class PreOCIModel(SuperuserDataInterface):
|
||||||
PreOCIModel implements the data model for the SuperUser using a database schema
|
PreOCIModel implements the data model for the SuperUser using a database schema
|
||||||
before it was changed to support the OCI specification.
|
before it was changed to support the OCI specification.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def list_all_service_keys(self):
|
def list_all_service_keys(self):
|
||||||
keys = model.service_keys.list_all_keys()
|
keys = model.service_keys.list_all_keys()
|
||||||
return [_create_key(key) for key in keys]
|
return [_create_key(key) for key in keys]
|
||||||
|
@ -43,8 +49,10 @@ class PreOCIModel(SuperuserDataInterface):
|
||||||
except model.ServiceKeyAlreadyApproved:
|
except model.ServiceKeyAlreadyApproved:
|
||||||
raise ServiceKeyAlreadyApproved
|
raise ServiceKeyAlreadyApproved
|
||||||
|
|
||||||
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None):
|
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None,
|
||||||
(private_key, key) = model.service_keys.generate_service_key(service, expiration_date, metadata=metadata, name=name)
|
rotation_duration=None):
|
||||||
|
(private_key, key) = model.service_keys.generate_service_key(service, expiration_date,
|
||||||
|
metadata=metadata, name=name)
|
||||||
|
|
||||||
return private_key, key.kid
|
return private_key, key.kid
|
||||||
|
|
||||||
|
|
|
@ -10,50 +10,51 @@ from config_app.c_app import app, config_provider
|
||||||
from config_app.config_endpoints.api import resource, ApiResource, nickname
|
from config_app.config_endpoints.api import resource, ApiResource, nickname
|
||||||
from config_app.config_util.tar import tarinfo_filter_partial, strip_absolute_path_and_add_trailing_dir
|
from config_app.config_util.tar import tarinfo_filter_partial, strip_absolute_path_and_add_trailing_dir
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/configapp/initialization')
|
@resource('/v1/configapp/initialization')
|
||||||
class ConfigInitialization(ApiResource):
|
class ConfigInitialization(ApiResource):
|
||||||
"""
|
"""
|
||||||
Resource for dealing with any initialization logic for the config app
|
Resource for dealing with any initialization logic for the config app
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@nickname('scStartNewConfig')
|
@nickname('scStartNewConfig')
|
||||||
def post(self):
|
def post(self):
|
||||||
config_provider.new_config_dir()
|
config_provider.new_config_dir()
|
||||||
return make_response('OK')
|
return make_response('OK')
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/configapp/tarconfig')
|
@resource('/v1/configapp/tarconfig')
|
||||||
class TarConfigLoader(ApiResource):
|
class TarConfigLoader(ApiResource):
|
||||||
"""
|
"""
|
||||||
Resource for dealing with configuration as a tarball,
|
Resource for dealing with configuration as a tarball,
|
||||||
including loading and generating functions
|
including loading and generating functions
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@nickname('scGetConfigTarball')
|
@nickname('scGetConfigTarball')
|
||||||
def get(self):
|
def get(self):
|
||||||
config_path = config_provider.get_config_dir_path()
|
config_path = config_provider.get_config_dir_path()
|
||||||
tar_dir_prefix = strip_absolute_path_and_add_trailing_dir(config_path)
|
tar_dir_prefix = strip_absolute_path_and_add_trailing_dir(config_path)
|
||||||
temp = tempfile.NamedTemporaryFile()
|
temp = tempfile.NamedTemporaryFile()
|
||||||
|
|
||||||
tar = tarfile.open(temp.name, mode="w|gz")
|
tar = tarfile.open(temp.name, mode="w|gz")
|
||||||
for name in os.listdir(config_path):
|
for name in os.listdir(config_path):
|
||||||
tar.add(os.path.join(config_path, name), filter=tarinfo_filter_partial(tar_dir_prefix))
|
tar.add(os.path.join(config_path, name), filter=tarinfo_filter_partial(tar_dir_prefix))
|
||||||
|
|
||||||
tar.close()
|
tar.close()
|
||||||
return send_file(temp.name, mimetype='application/gzip')
|
return send_file(temp.name, mimetype='application/gzip')
|
||||||
|
|
||||||
@nickname('scUploadTarballConfig')
|
@nickname('scUploadTarballConfig')
|
||||||
def put(self):
|
def put(self):
|
||||||
""" Loads tarball config into the config provider """
|
""" Loads tarball config into the config provider """
|
||||||
# Generate a new empty dir to load the config into
|
# Generate a new empty dir to load the config into
|
||||||
config_provider.new_config_dir()
|
config_provider.new_config_dir()
|
||||||
input_stream = request.stream
|
input_stream = request.stream
|
||||||
with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream:
|
with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream:
|
||||||
tar_stream.extractall(config_provider.get_config_dir_path())
|
tar_stream.extractall(config_provider.get_config_dir_path())
|
||||||
|
|
||||||
# now try to connect to the db provided in their config to validate it works
|
# now try to connect to the db provided in their config to validate it works
|
||||||
combined = dict(**app.config)
|
combined = dict(**app.config)
|
||||||
combined.update(config_provider.get_config())
|
combined.update(config_provider.get_config())
|
||||||
configure(combined)
|
configure(combined)
|
||||||
|
|
||||||
return make_response('OK')
|
return make_response('OK')
|
||||||
|
|
|
@ -5,15 +5,14 @@ from config_app.config_endpoints.api.superuser_models_interface import user_view
|
||||||
|
|
||||||
@resource('/v1/user/')
|
@resource('/v1/user/')
|
||||||
class User(ApiResource):
|
class User(ApiResource):
|
||||||
""" Operations related to users. """
|
""" Operations related to users. """
|
||||||
|
|
||||||
@nickname('getLoggedInUser')
|
@nickname('getLoggedInUser')
|
||||||
def get(self):
|
def get(self):
|
||||||
""" Get user information for the authenticated user. """
|
""" Get user information for the authenticated user. """
|
||||||
user = get_authenticated_user()
|
user = get_authenticated_user()
|
||||||
# TODO(config): figure out if we need user validation
|
# TODO(config): figure out if we need user validation
|
||||||
# if user is None or user.organization or not UserReadPermission(user.username).can():
|
# if user is None or user.organization or not UserReadPermission(user.username).can():
|
||||||
# raise InvalidToken("Requires authentication", payload={'session_required': False})
|
# raise InvalidToken("Requires authentication", payload={'session_required': False})
|
||||||
|
|
||||||
return user_view(user)
|
|
||||||
|
|
||||||
|
return user_view(user)
|
||||||
|
|
|
@ -13,52 +13,50 @@ from config_app.config_util.k8sconfig import get_k8s_namespace
|
||||||
|
|
||||||
|
|
||||||
def truthy_bool(param):
|
def truthy_bool(param):
|
||||||
return param not in {False, 'false', 'False', '0', 'FALSE', '', 'null'}
|
return param not in {False, 'false', 'False', '0', 'FALSE', '', 'null'}
|
||||||
|
|
||||||
|
|
||||||
DEFAULT_JS_BUNDLE_NAME = 'configapp'
|
DEFAULT_JS_BUNDLE_NAME = 'configapp'
|
||||||
PARAM_REGEX = re.compile(r'<([^:>]+:)*([\w]+)>')
|
PARAM_REGEX = re.compile(r'<([^:>]+:)*([\w]+)>')
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
TYPE_CONVERTER = {
|
TYPE_CONVERTER = {
|
||||||
truthy_bool: 'boolean',
|
truthy_bool: 'boolean',
|
||||||
str: 'string',
|
str: 'string',
|
||||||
basestring: 'string',
|
basestring: 'string',
|
||||||
reqparse.text_type: 'string',
|
reqparse.text_type: 'string',
|
||||||
int: 'integer',
|
int: 'integer',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def _list_files(path, extension, contains=""):
|
def _list_files(path, extension, contains=""):
|
||||||
""" Returns a list of all the files with the given extension found under the given path. """
|
""" Returns a list of all the files with the given extension found under the given path. """
|
||||||
|
|
||||||
def matches(f):
|
def matches(f):
|
||||||
return os.path.splitext(f)[1] == '.' + extension and contains in os.path.splitext(f)[0]
|
return os.path.splitext(f)[1] == '.' + extension and contains in os.path.splitext(f)[0]
|
||||||
|
|
||||||
def join_path(dp, f):
|
def join_path(dp, f):
|
||||||
# Remove the static/ prefix. It is added in the template.
|
# Remove the static/ prefix. It is added in the template.
|
||||||
return os.path.join(dp, f)[len(ROOT_DIR) + 1 + len('config_app/static/'):]
|
return os.path.join(dp, f)[len(ROOT_DIR) + 1 + len('config_app/static/'):]
|
||||||
|
|
||||||
filepath = os.path.join(os.path.join(ROOT_DIR, 'config_app/static/'), path)
|
filepath = os.path.join(os.path.join(ROOT_DIR, 'config_app/static/'), path)
|
||||||
return [join_path(dp, f) for dp, _, files in os.walk(filepath) for f in files if matches(f)]
|
return [join_path(dp, f) for dp, _, files in os.walk(filepath) for f in files if matches(f)]
|
||||||
|
|
||||||
|
|
||||||
def render_page_template(name, route_data=None, js_bundle_name=DEFAULT_JS_BUNDLE_NAME, **kwargs):
|
def render_page_template(name, route_data=None, js_bundle_name=DEFAULT_JS_BUNDLE_NAME, **kwargs):
|
||||||
""" Renders the page template with the given name as the response and returns its contents. """
|
""" Renders the page template with the given name as the response and returns its contents. """
|
||||||
main_scripts = _list_files('build', 'js', js_bundle_name)
|
main_scripts = _list_files('build', 'js', js_bundle_name)
|
||||||
|
|
||||||
contents = render_template(name,
|
contents = render_template(name,
|
||||||
route_data=route_data,
|
route_data=route_data,
|
||||||
main_scripts=main_scripts,
|
main_scripts=main_scripts,
|
||||||
config_set=frontend_visible_config(app.config),
|
config_set=frontend_visible_config(app.config),
|
||||||
kubernetes_namespace=IS_KUBERNETES and get_k8s_namespace(),
|
kubernetes_namespace=IS_KUBERNETES and get_k8s_namespace(),
|
||||||
**kwargs)
|
**kwargs)
|
||||||
|
|
||||||
resp = make_response(contents)
|
resp = make_response(contents)
|
||||||
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
def fully_qualified_name(method_view_class):
|
def fully_qualified_name(method_view_class):
|
||||||
return '%s.%s' % (method_view_class.__module__, method_view_class.__name__)
|
return '%s.%s' % (method_view_class.__module__, method_view_class.__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -5,63 +5,62 @@ from werkzeug.exceptions import HTTPException
|
||||||
|
|
||||||
|
|
||||||
class ApiErrorType(Enum):
|
class ApiErrorType(Enum):
|
||||||
invalid_request = 'invalid_request'
|
invalid_request = 'invalid_request'
|
||||||
|
|
||||||
|
|
||||||
class ApiException(HTTPException):
|
class ApiException(HTTPException):
|
||||||
"""
|
"""
|
||||||
Represents an error in the application/problem+json format.
|
Represents an error in the application/problem+json format.
|
||||||
|
|
||||||
See: https://tools.ietf.org/html/rfc7807
|
See: https://tools.ietf.org/html/rfc7807
|
||||||
|
|
||||||
- "type" (string) - A URI reference that identifies the
|
- "type" (string) - A URI reference that identifies the
|
||||||
problem type.
|
problem type.
|
||||||
|
|
||||||
- "title" (string) - A short, human-readable summary of the problem
|
- "title" (string) - A short, human-readable summary of the problem
|
||||||
type. It SHOULD NOT change from occurrence to occurrence of the
|
type. It SHOULD NOT change from occurrence to occurrence of the
|
||||||
problem, except for purposes of localization
|
problem, except for purposes of localization
|
||||||
|
|
||||||
- "status" (number) - The HTTP status code
|
- "status" (number) - The HTTP status code
|
||||||
|
|
||||||
- "detail" (string) - A human-readable explanation specific to this
|
- "detail" (string) - A human-readable explanation specific to this
|
||||||
occurrence of the problem.
|
occurrence of the problem.
|
||||||
|
|
||||||
- "instance" (string) - A URI reference that identifies the specific
|
- "instance" (string) - A URI reference that identifies the specific
|
||||||
occurrence of the problem. It may or may not yield further
|
occurrence of the problem. It may or may not yield further
|
||||||
information if dereferenced.
|
information if dereferenced.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, error_type, status_code, error_description, payload=None):
|
def __init__(self, error_type, status_code, error_description, payload=None):
|
||||||
Exception.__init__(self)
|
Exception.__init__(self)
|
||||||
self.error_description = error_description
|
self.error_description = error_description
|
||||||
self.code = status_code
|
self.code = status_code
|
||||||
self.payload = payload
|
self.payload = payload
|
||||||
self.error_type = error_type
|
self.error_type = error_type
|
||||||
self.data = self.to_dict()
|
self.data = self.to_dict()
|
||||||
|
|
||||||
super(ApiException, self).__init__(error_description, None)
|
super(ApiException, self).__init__(error_description, None)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
rv = dict(self.payload or ())
|
rv = dict(self.payload or ())
|
||||||
|
|
||||||
if self.error_description is not None:
|
if self.error_description is not None:
|
||||||
rv['detail'] = self.error_description
|
rv['detail'] = self.error_description
|
||||||
rv['error_message'] = self.error_description # TODO: deprecate
|
rv['error_message'] = self.error_description # TODO: deprecate
|
||||||
|
|
||||||
rv['error_type'] = self.error_type.value # TODO: deprecate
|
rv['error_type'] = self.error_type.value # TODO: deprecate
|
||||||
rv['title'] = self.error_type.value
|
rv['title'] = self.error_type.value
|
||||||
rv['type'] = url_for('api.error', error_type=self.error_type.value, _external=True)
|
rv['type'] = url_for('api.error', error_type=self.error_type.value, _external=True)
|
||||||
rv['status'] = self.code
|
rv['status'] = self.code
|
||||||
|
|
||||||
return rv
|
|
||||||
|
|
||||||
|
return rv
|
||||||
|
|
||||||
|
|
||||||
class InvalidRequest(ApiException):
|
class InvalidRequest(ApiException):
|
||||||
def __init__(self, error_description, payload=None):
|
def __init__(self, error_description, payload=None):
|
||||||
ApiException.__init__(self, ApiErrorType.invalid_request, 400, error_description, payload)
|
ApiException.__init__(self, ApiErrorType.invalid_request, 400, error_description, payload)
|
||||||
|
|
||||||
|
|
||||||
class InvalidResponse(ApiException):
|
class InvalidResponse(ApiException):
|
||||||
def __init__(self, error_description, payload=None):
|
def __init__(self, error_description, payload=None):
|
||||||
ApiException.__init__(self, ApiErrorType.invalid_response, 400, error_description, payload)
|
ApiException.__init__(self, ApiErrorType.invalid_response, 400, error_description, payload)
|
||||||
|
|
|
@ -5,22 +5,19 @@ from config_app.config_endpoints.common import render_page_template
|
||||||
from config_app.config_endpoints.api.discovery import generate_route_data
|
from config_app.config_endpoints.api.discovery import generate_route_data
|
||||||
from config_app.config_endpoints.api import no_cache
|
from config_app.config_endpoints.api import no_cache
|
||||||
|
|
||||||
|
|
||||||
setup_web = Blueprint('setup_web', __name__, template_folder='templates')
|
setup_web = Blueprint('setup_web', __name__, template_folder='templates')
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=1)
|
@lru_cache(maxsize=1)
|
||||||
def _get_route_data():
|
def _get_route_data():
|
||||||
return generate_route_data()
|
return generate_route_data()
|
||||||
|
|
||||||
|
|
||||||
def render_page_template_with_routedata(name, *args, **kwargs):
|
def render_page_template_with_routedata(name, *args, **kwargs):
|
||||||
return render_page_template(name, _get_route_data(), *args, **kwargs)
|
return render_page_template(name, _get_route_data(), *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@no_cache
|
@no_cache
|
||||||
@setup_web.route('/', methods=['GET'], defaults={'path': ''})
|
@setup_web.route('/', methods=['GET'], defaults={'path': ''})
|
||||||
def index(path, **kwargs):
|
def index(path, **kwargs):
|
||||||
return render_page_template_with_routedata('index.html', js_bundle_name='configapp', **kwargs)
|
return render_page_template_with_routedata('index.html', js_bundle_name='configapp', **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,41 +6,42 @@ from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
|
||||||
|
|
||||||
|
|
||||||
class TransientDirectoryProvider(FileConfigProvider):
|
class TransientDirectoryProvider(FileConfigProvider):
|
||||||
""" Implementation of the config provider that reads and writes the data
|
""" Implementation of the config provider that reads and writes the data
|
||||||
from/to the file system, only using temporary directories,
|
from/to the file system, only using temporary directories,
|
||||||
deleting old dirs and creating new ones as requested.
|
deleting old dirs and creating new ones as requested.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||||
|
# Create a temp directory that will be cleaned up when we change the config path
|
||||||
|
# This should ensure we have no "pollution" of different configs:
|
||||||
|
# no uploaded config should ever affect subsequent config modifications/creations
|
||||||
|
temp_dir = TemporaryDirectory()
|
||||||
|
self.temp_dir = temp_dir
|
||||||
|
super(TransientDirectoryProvider, self).__init__(temp_dir.name, yaml_filename, py_filename)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def provider_id(self):
|
||||||
|
return 'transient'
|
||||||
|
|
||||||
|
def new_config_dir(self):
|
||||||
"""
|
"""
|
||||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
Update the path with a new temporary directory, deleting the old one in the process
|
||||||
# Create a temp directory that will be cleaned up when we change the config path
|
"""
|
||||||
# This should ensure we have no "pollution" of different configs:
|
self.temp_dir.cleanup()
|
||||||
# no uploaded config should ever affect subsequent config modifications/creations
|
temp_dir = TemporaryDirectory()
|
||||||
temp_dir = TemporaryDirectory()
|
|
||||||
self.temp_dir = temp_dir
|
|
||||||
super(TransientDirectoryProvider, self).__init__(temp_dir.name, yaml_filename, py_filename)
|
|
||||||
|
|
||||||
@property
|
self.config_volume = temp_dir.name
|
||||||
def provider_id(self):
|
self.temp_dir = temp_dir
|
||||||
return 'transient'
|
self.yaml_path = os.path.join(temp_dir.name, self.yaml_filename)
|
||||||
|
|
||||||
def new_config_dir(self):
|
def get_config_dir_path(self):
|
||||||
"""
|
return self.config_volume
|
||||||
Update the path with a new temporary directory, deleting the old one in the process
|
|
||||||
"""
|
|
||||||
self.temp_dir.cleanup()
|
|
||||||
temp_dir = TemporaryDirectory()
|
|
||||||
|
|
||||||
self.config_volume = temp_dir.name
|
def save_configuration_to_kubernetes(self):
|
||||||
self.temp_dir = temp_dir
|
config_path = self.get_config_dir_path()
|
||||||
self.yaml_path = os.path.join(temp_dir.name, self.yaml_filename)
|
|
||||||
|
|
||||||
def get_config_dir_path(self):
|
for name in os.listdir(config_path):
|
||||||
return self.config_volume
|
file_path = os.path.join(self.config_volume, name)
|
||||||
|
KubernetesAccessorSingleton.get_instance().save_file_as_secret(name, file_path)
|
||||||
|
|
||||||
def save_configuration_to_kubernetes(self):
|
return 200
|
||||||
config_path = self.get_config_dir_path()
|
|
||||||
|
|
||||||
for name in os.listdir(config_path):
|
|
||||||
file_path = os.path.join(self.config_volume, name)
|
|
||||||
KubernetesAccessorSingleton.get_instance().save_file_as_secret(name, file_path)
|
|
||||||
|
|
||||||
return 200
|
|
||||||
|
|
|
@ -4,9 +4,9 @@ from config_app.config_util.config.TransientDirectoryProvider import TransientDi
|
||||||
|
|
||||||
|
|
||||||
def get_config_provider(config_volume, yaml_filename, py_filename, testing=False):
|
def get_config_provider(config_volume, yaml_filename, py_filename, testing=False):
|
||||||
""" Loads and returns the config provider for the current environment. """
|
""" Loads and returns the config provider for the current environment. """
|
||||||
|
|
||||||
if testing:
|
if testing:
|
||||||
return TestConfigProvider()
|
return TestConfigProvider()
|
||||||
|
|
||||||
return TransientDirectoryProvider(config_volume, yaml_filename, py_filename)
|
return TransientDirectoryProvider(config_volume, yaml_filename, py_filename)
|
||||||
|
|
|
@ -8,64 +8,65 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BaseFileProvider(BaseProvider):
|
class BaseFileProvider(BaseProvider):
|
||||||
""" Base implementation of the config provider that reads the data from the file system. """
|
""" Base implementation of the config provider that reads the data from the file system. """
|
||||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
|
||||||
self.config_volume = config_volume
|
|
||||||
self.yaml_filename = yaml_filename
|
|
||||||
self.py_filename = py_filename
|
|
||||||
|
|
||||||
self.yaml_path = os.path.join(config_volume, yaml_filename)
|
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||||
self.py_path = os.path.join(config_volume, py_filename)
|
self.config_volume = config_volume
|
||||||
|
self.yaml_filename = yaml_filename
|
||||||
|
self.py_filename = py_filename
|
||||||
|
|
||||||
def update_app_config(self, app_config):
|
self.yaml_path = os.path.join(config_volume, yaml_filename)
|
||||||
if os.path.exists(self.py_path):
|
self.py_path = os.path.join(config_volume, py_filename)
|
||||||
logger.debug('Applying config file: %s', self.py_path)
|
|
||||||
app_config.from_pyfile(self.py_path)
|
|
||||||
|
|
||||||
if os.path.exists(self.yaml_path):
|
def update_app_config(self, app_config):
|
||||||
logger.debug('Applying config file: %s', self.yaml_path)
|
if os.path.exists(self.py_path):
|
||||||
import_yaml(app_config, self.yaml_path)
|
logger.debug('Applying config file: %s', self.py_path)
|
||||||
|
app_config.from_pyfile(self.py_path)
|
||||||
|
|
||||||
def get_config(self):
|
if os.path.exists(self.yaml_path):
|
||||||
if not self.config_exists():
|
logger.debug('Applying config file: %s', self.yaml_path)
|
||||||
return None
|
import_yaml(app_config, self.yaml_path)
|
||||||
|
|
||||||
config_obj = {}
|
def get_config(self):
|
||||||
import_yaml(config_obj, self.yaml_path)
|
if not self.config_exists():
|
||||||
return config_obj
|
return None
|
||||||
|
|
||||||
def config_exists(self):
|
config_obj = {}
|
||||||
return self.volume_file_exists(self.yaml_filename)
|
import_yaml(config_obj, self.yaml_path)
|
||||||
|
return config_obj
|
||||||
|
|
||||||
def volume_exists(self):
|
def config_exists(self):
|
||||||
return os.path.exists(self.config_volume)
|
return self.volume_file_exists(self.yaml_filename)
|
||||||
|
|
||||||
def volume_file_exists(self, filename):
|
def volume_exists(self):
|
||||||
return os.path.exists(os.path.join(self.config_volume, filename))
|
return os.path.exists(self.config_volume)
|
||||||
|
|
||||||
def get_volume_file(self, filename, mode='r'):
|
def volume_file_exists(self, filename):
|
||||||
return open(os.path.join(self.config_volume, filename), mode=mode)
|
return os.path.exists(os.path.join(self.config_volume, filename))
|
||||||
|
|
||||||
def get_volume_path(self, directory, filename):
|
def get_volume_file(self, filename, mode='r'):
|
||||||
return os.path.join(directory, filename)
|
return open(os.path.join(self.config_volume, filename), mode=mode)
|
||||||
|
|
||||||
def list_volume_directory(self, path):
|
def get_volume_path(self, directory, filename):
|
||||||
dirpath = os.path.join(self.config_volume, path)
|
return os.path.join(directory, filename)
|
||||||
if not os.path.exists(dirpath):
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not os.path.isdir(dirpath):
|
def list_volume_directory(self, path):
|
||||||
return None
|
dirpath = os.path.join(self.config_volume, path)
|
||||||
|
if not os.path.exists(dirpath):
|
||||||
|
return None
|
||||||
|
|
||||||
return os.listdir(dirpath)
|
if not os.path.isdir(dirpath):
|
||||||
|
return None
|
||||||
|
|
||||||
def requires_restart(self, app_config):
|
return os.listdir(dirpath)
|
||||||
file_config = self.get_config()
|
|
||||||
if not file_config:
|
|
||||||
return False
|
|
||||||
|
|
||||||
for key in file_config:
|
def requires_restart(self, app_config):
|
||||||
if app_config.get(key) != file_config[key]:
|
file_config = self.get_config()
|
||||||
return True
|
if not file_config:
|
||||||
|
return False
|
||||||
|
|
||||||
return False
|
for key in file_config:
|
||||||
|
if app_config.get(key) != file_config[key]:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
|
@ -4,57 +4,57 @@ import logging
|
||||||
from config_app.config_util.config.baseprovider import export_yaml, CannotWriteConfigException
|
from config_app.config_util.config.baseprovider import export_yaml, CannotWriteConfigException
|
||||||
from config_app.config_util.config.basefileprovider import BaseFileProvider
|
from config_app.config_util.config.basefileprovider import BaseFileProvider
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _ensure_parent_dir(filepath):
|
def _ensure_parent_dir(filepath):
|
||||||
""" Ensures that the parent directory of the given file path exists. """
|
""" Ensures that the parent directory of the given file path exists. """
|
||||||
try:
|
try:
|
||||||
parentpath = os.path.abspath(os.path.join(filepath, os.pardir))
|
parentpath = os.path.abspath(os.path.join(filepath, os.pardir))
|
||||||
if not os.path.isdir(parentpath):
|
if not os.path.isdir(parentpath):
|
||||||
os.makedirs(parentpath)
|
os.makedirs(parentpath)
|
||||||
except IOError as ioe:
|
except IOError as ioe:
|
||||||
raise CannotWriteConfigException(str(ioe))
|
raise CannotWriteConfigException(str(ioe))
|
||||||
|
|
||||||
|
|
||||||
class FileConfigProvider(BaseFileProvider):
|
class FileConfigProvider(BaseFileProvider):
|
||||||
""" Implementation of the config provider that reads and writes the data
|
""" Implementation of the config provider that reads and writes the data
|
||||||
from/to the file system. """
|
from/to the file system. """
|
||||||
def __init__(self, config_volume, yaml_filename, py_filename):
|
|
||||||
super(FileConfigProvider, self).__init__(config_volume, yaml_filename, py_filename)
|
|
||||||
|
|
||||||
@property
|
def __init__(self, config_volume, yaml_filename, py_filename):
|
||||||
def provider_id(self):
|
super(FileConfigProvider, self).__init__(config_volume, yaml_filename, py_filename)
|
||||||
return 'file'
|
|
||||||
|
|
||||||
def save_config(self, config_obj):
|
@property
|
||||||
export_yaml(config_obj, self.yaml_path)
|
def provider_id(self):
|
||||||
|
return 'file'
|
||||||
|
|
||||||
def write_volume_file(self, filename, contents):
|
def save_config(self, config_obj):
|
||||||
filepath = os.path.join(self.config_volume, filename)
|
export_yaml(config_obj, self.yaml_path)
|
||||||
_ensure_parent_dir(filepath)
|
|
||||||
|
|
||||||
try:
|
def write_volume_file(self, filename, contents):
|
||||||
with open(filepath, mode='w') as f:
|
filepath = os.path.join(self.config_volume, filename)
|
||||||
f.write(contents)
|
_ensure_parent_dir(filepath)
|
||||||
except IOError as ioe:
|
|
||||||
raise CannotWriteConfigException(str(ioe))
|
|
||||||
|
|
||||||
return filepath
|
try:
|
||||||
|
with open(filepath, mode='w') as f:
|
||||||
|
f.write(contents)
|
||||||
|
except IOError as ioe:
|
||||||
|
raise CannotWriteConfigException(str(ioe))
|
||||||
|
|
||||||
def remove_volume_file(self, filename):
|
return filepath
|
||||||
filepath = os.path.join(self.config_volume, filename)
|
|
||||||
os.remove(filepath)
|
|
||||||
|
|
||||||
def save_volume_file(self, filename, flask_file):
|
def remove_volume_file(self, filename):
|
||||||
filepath = os.path.join(self.config_volume, filename)
|
filepath = os.path.join(self.config_volume, filename)
|
||||||
_ensure_parent_dir(filepath)
|
os.remove(filepath)
|
||||||
|
|
||||||
# Write the file.
|
def save_volume_file(self, filename, flask_file):
|
||||||
try:
|
filepath = os.path.join(self.config_volume, filename)
|
||||||
flask_file.save(filepath)
|
_ensure_parent_dir(filepath)
|
||||||
except IOError as ioe:
|
|
||||||
raise CannotWriteConfigException(str(ioe))
|
|
||||||
|
|
||||||
return filepath
|
# Write the file.
|
||||||
|
try:
|
||||||
|
flask_file.save(filepath)
|
||||||
|
except IOError as ioe:
|
||||||
|
raise CannotWriteConfigException(str(ioe))
|
||||||
|
|
||||||
|
return filepath
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import json
|
import json
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
from config_app.config_util.config.baseprovider import BaseProvider
|
from config_app.config_util.config.baseprovider import BaseProvider
|
||||||
|
|
||||||
|
@ -9,73 +8,73 @@ REAL_FILES = ['test/data/signing-private.gpg', 'test/data/signing-public.gpg', '
|
||||||
|
|
||||||
|
|
||||||
class TestConfigProvider(BaseProvider):
|
class TestConfigProvider(BaseProvider):
|
||||||
""" Implementation of the config provider for testing. Everything is kept in-memory instead on
|
""" Implementation of the config provider for testing. Everything is kept in-memory instead on
|
||||||
the real file system. """
|
the real file system. """
|
||||||
def __init__(self):
|
|
||||||
self.clear()
|
|
||||||
|
|
||||||
def clear(self):
|
def __init__(self):
|
||||||
self.files = {}
|
self.clear()
|
||||||
self._config = {}
|
|
||||||
|
|
||||||
@property
|
def clear(self):
|
||||||
def provider_id(self):
|
self.files = {}
|
||||||
return 'test'
|
self._config = {}
|
||||||
|
|
||||||
def update_app_config(self, app_config):
|
@property
|
||||||
self._config = app_config
|
def provider_id(self):
|
||||||
|
return 'test'
|
||||||
|
|
||||||
def get_config(self):
|
def update_app_config(self, app_config):
|
||||||
if not 'config.yaml' in self.files:
|
self._config = app_config
|
||||||
return None
|
|
||||||
|
|
||||||
return json.loads(self.files.get('config.yaml', '{}'))
|
def get_config(self):
|
||||||
|
if not 'config.yaml' in self.files:
|
||||||
|
return None
|
||||||
|
|
||||||
def save_config(self, config_obj):
|
return json.loads(self.files.get('config.yaml', '{}'))
|
||||||
self.files['config.yaml'] = json.dumps(config_obj)
|
|
||||||
|
|
||||||
def config_exists(self):
|
def save_config(self, config_obj):
|
||||||
return 'config.yaml' in self.files
|
self.files['config.yaml'] = json.dumps(config_obj)
|
||||||
|
|
||||||
def volume_exists(self):
|
def config_exists(self):
|
||||||
return True
|
return 'config.yaml' in self.files
|
||||||
|
|
||||||
def volume_file_exists(self, filename):
|
def volume_exists(self):
|
||||||
if filename in REAL_FILES:
|
return True
|
||||||
return True
|
|
||||||
|
|
||||||
return filename in self.files
|
def volume_file_exists(self, filename):
|
||||||
|
if filename in REAL_FILES:
|
||||||
|
return True
|
||||||
|
|
||||||
def save_volume_file(self, filename, flask_file):
|
return filename in self.files
|
||||||
self.files[filename] = flask_file.read()
|
|
||||||
|
|
||||||
def write_volume_file(self, filename, contents):
|
def save_volume_file(self, filename, flask_file):
|
||||||
self.files[filename] = contents
|
self.files[filename] = flask_file.read()
|
||||||
|
|
||||||
def get_volume_file(self, filename, mode='r'):
|
def write_volume_file(self, filename, contents):
|
||||||
if filename in REAL_FILES:
|
self.files[filename] = contents
|
||||||
return open(filename, mode=mode)
|
|
||||||
|
|
||||||
return io.BytesIO(self.files[filename])
|
def get_volume_file(self, filename, mode='r'):
|
||||||
|
if filename in REAL_FILES:
|
||||||
|
return open(filename, mode=mode)
|
||||||
|
|
||||||
def remove_volume_file(self, filename):
|
return io.BytesIO(self.files[filename])
|
||||||
self.files.pop(filename, None)
|
|
||||||
|
|
||||||
def list_volume_directory(self, path):
|
def remove_volume_file(self, filename):
|
||||||
paths = []
|
self.files.pop(filename, None)
|
||||||
for filename in self.files:
|
|
||||||
if filename.startswith(path):
|
|
||||||
paths.append(filename[len(path)+1:])
|
|
||||||
|
|
||||||
return paths
|
def list_volume_directory(self, path):
|
||||||
|
paths = []
|
||||||
|
for filename in self.files:
|
||||||
|
if filename.startswith(path):
|
||||||
|
paths.append(filename[len(path) + 1:])
|
||||||
|
|
||||||
def requires_restart(self, app_config):
|
return paths
|
||||||
return False
|
|
||||||
|
|
||||||
def reset_for_test(self):
|
def requires_restart(self, app_config):
|
||||||
self._config['SUPER_USERS'] = ['devtable']
|
return False
|
||||||
self.files = {}
|
|
||||||
|
|
||||||
def get_volume_path(self, directory, filename):
|
def reset_for_test(self):
|
||||||
return os.path.join(directory, filename)
|
self._config['SUPER_USERS'] = ['devtable']
|
||||||
|
self.files = {}
|
||||||
|
|
||||||
|
def get_volume_path(self, directory, filename):
|
||||||
|
return os.path.join(directory, filename)
|
||||||
|
|
|
@ -11,135 +11,135 @@ logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
QE_DEPLOYMENT_LABEL = 'quay-enterprise-component'
|
QE_DEPLOYMENT_LABEL = 'quay-enterprise-component'
|
||||||
|
|
||||||
|
|
||||||
class KubernetesAccessorSingleton(object):
|
class KubernetesAccessorSingleton(object):
|
||||||
""" Singleton allowing access to kubernetes operations """
|
""" Singleton allowing access to kubernetes operations """
|
||||||
_instance = None
|
_instance = None
|
||||||
|
|
||||||
def __init__(self, kube_config=None):
|
def __init__(self, kube_config=None):
|
||||||
self.kube_config = kube_config
|
self.kube_config = kube_config
|
||||||
if kube_config is None:
|
if kube_config is None:
|
||||||
self.kube_config = KubernetesConfig.from_env()
|
self.kube_config = KubernetesConfig.from_env()
|
||||||
|
|
||||||
KubernetesAccessorSingleton._instance = self
|
KubernetesAccessorSingleton._instance = self
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_instance(cls, kube_config=None):
|
def get_instance(cls, kube_config=None):
|
||||||
"""
|
"""
|
||||||
Singleton getter implementation, returns the instance if one exists, otherwise creates the
|
Singleton getter implementation, returns the instance if one exists, otherwise creates the
|
||||||
instance and ties it to the class.
|
instance and ties it to the class.
|
||||||
:return: KubernetesAccessorSingleton
|
:return: KubernetesAccessorSingleton
|
||||||
"""
|
"""
|
||||||
if cls._instance is None:
|
if cls._instance is None:
|
||||||
return cls(kube_config)
|
return cls(kube_config)
|
||||||
|
|
||||||
return cls._instance
|
return cls._instance
|
||||||
|
|
||||||
def save_file_as_secret(self, name, file_path):
|
def save_file_as_secret(self, name, file_path):
|
||||||
with open(file_path) as f:
|
with open(file_path) as f:
|
||||||
value = f.read()
|
value = f.read()
|
||||||
self._update_secret_file(name, value)
|
self._update_secret_file(name, value)
|
||||||
|
|
||||||
def get_qe_deployments(self):
|
def get_qe_deployments(self):
|
||||||
""""
|
""""
|
||||||
Returns all deployments matching the label selector provided in the KubeConfig
|
Returns all deployments matching the label selector provided in the KubeConfig
|
||||||
"""
|
"""
|
||||||
deployment_selector_url = 'namespaces/%s/deployments?labelSelector=%s%%3D%s' % (
|
deployment_selector_url = 'namespaces/%s/deployments?labelSelector=%s%%3D%s' % (
|
||||||
self.kube_config.qe_namespace, QE_DEPLOYMENT_LABEL, self.kube_config.qe_deployment_selector
|
self.kube_config.qe_namespace, QE_DEPLOYMENT_LABEL, self.kube_config.qe_deployment_selector
|
||||||
)
|
)
|
||||||
|
|
||||||
response = self._execute_k8s_api('GET', deployment_selector_url, api_prefix='apis/extensions/v1beta1')
|
response = self._execute_k8s_api('GET', deployment_selector_url, api_prefix='apis/extensions/v1beta1')
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
return None
|
return None
|
||||||
return json.loads(response.text)
|
return json.loads(response.text)
|
||||||
|
|
||||||
def cycle_qe_deployments(self, deployment_names):
|
def cycle_qe_deployments(self, deployment_names):
|
||||||
""""
|
""""
|
||||||
Triggers a rollout of all desired deployments in the qe namespace
|
Triggers a rollout of all desired deployments in the qe namespace
|
||||||
"""
|
"""
|
||||||
|
|
||||||
for name in deployment_names:
|
for name in deployment_names:
|
||||||
logger.debug('Cycling deployment %s', name)
|
logger.debug('Cycling deployment %s', name)
|
||||||
deployment_url = 'namespaces/%s/deployments/%s' % (self.kube_config.qe_namespace, name)
|
deployment_url = 'namespaces/%s/deployments/%s' % (self.kube_config.qe_namespace, name)
|
||||||
|
|
||||||
# There is currently no command to simply rolling restart all the pods: https://github.com/kubernetes/kubernetes/issues/13488
|
# There is currently no command to simply rolling restart all the pods: https://github.com/kubernetes/kubernetes/issues/13488
|
||||||
# Instead, we modify the template of the deployment with a dummy env variable to trigger a cycle of the pods
|
# Instead, we modify the template of the deployment with a dummy env variable to trigger a cycle of the pods
|
||||||
# (based off this comment: https://github.com/kubernetes/kubernetes/issues/13488#issuecomment-240393845)
|
# (based off this comment: https://github.com/kubernetes/kubernetes/issues/13488#issuecomment-240393845)
|
||||||
self._assert_success(self._execute_k8s_api('PATCH', deployment_url, {
|
self._assert_success(self._execute_k8s_api('PATCH', deployment_url, {
|
||||||
'spec': {
|
'spec': {
|
||||||
'template': {
|
'template': {
|
||||||
'spec': {
|
'spec': {
|
||||||
'containers': [{
|
'containers': [{
|
||||||
'name': 'quay-enterprise-app', 'env': [{
|
'name': 'quay-enterprise-app', 'env': [{
|
||||||
'name': 'RESTART_TIME',
|
'name': 'RESTART_TIME',
|
||||||
'value': str(datetime.datetime.now())
|
'value': str(datetime.datetime.now())
|
||||||
}]
|
|
||||||
}]
|
}]
|
||||||
}
|
}]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}, api_prefix='apis/extensions/v1beta1', content_type='application/strategic-merge-patch+json'))
|
|
||||||
|
|
||||||
|
|
||||||
def _assert_success(self, response):
|
|
||||||
if response.status_code != 200:
|
|
||||||
logger.error('Kubernetes API call failed with response: %s => %s', response.status_code,
|
|
||||||
response.text)
|
|
||||||
raise Exception('Kubernetes API call failed: %s' % response.text)
|
|
||||||
|
|
||||||
def _update_secret_file(self, relative_file_path, value=None):
|
|
||||||
if '/' in relative_file_path:
|
|
||||||
raise Exception('Expected path from get_volume_path, but found slashes')
|
|
||||||
|
|
||||||
# Check first that the namespace for Quay Enterprise exists. If it does not, report that
|
|
||||||
# as an error, as it seems to be a common issue.
|
|
||||||
namespace_url = 'namespaces/%s' % (self.kube_config.qe_namespace)
|
|
||||||
response = self._execute_k8s_api('GET', namespace_url)
|
|
||||||
if response.status_code // 100 != 2:
|
|
||||||
msg = 'A Kubernetes namespace with name `%s` must be created to save config' % self.kube_config.qe_namespace
|
|
||||||
raise Exception(msg)
|
|
||||||
|
|
||||||
# Check if the secret exists. If not, then we create an empty secret and then update the file
|
|
||||||
# inside.
|
|
||||||
secret_url = 'namespaces/%s/secrets/%s' % (self.kube_config.qe_namespace, self.kube_config.qe_config_secret)
|
|
||||||
secret = self._lookup_secret()
|
|
||||||
if secret is None:
|
|
||||||
self._assert_success(self._execute_k8s_api('POST', secret_url, {
|
|
||||||
"kind": "Secret",
|
|
||||||
"apiVersion": "v1",
|
|
||||||
"metadata": {
|
|
||||||
"name": self.kube_config.qe_config_secret
|
|
||||||
},
|
|
||||||
"data": {}
|
|
||||||
}))
|
|
||||||
|
|
||||||
# Update the secret to reflect the file change.
|
|
||||||
secret['data'] = secret.get('data', {})
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
secret['data'][relative_file_path] = base64.b64encode(value)
|
|
||||||
else:
|
|
||||||
secret['data'].pop(relative_file_path)
|
|
||||||
|
|
||||||
self._assert_success(self._execute_k8s_api('PUT', secret_url, secret))
|
|
||||||
|
|
||||||
def _lookup_secret(self):
|
|
||||||
secret_url = 'namespaces/%s/secrets/%s' % (self.kube_config.qe_namespace, self.kube_config.qe_config_secret)
|
|
||||||
response = self._execute_k8s_api('GET', secret_url)
|
|
||||||
if response.status_code != 200:
|
|
||||||
return None
|
|
||||||
return json.loads(response.text)
|
|
||||||
|
|
||||||
def _execute_k8s_api(self, method, relative_url, data=None, api_prefix='api/v1', content_type='application/json'):
|
|
||||||
headers = {
|
|
||||||
'Authorization': 'Bearer ' + self.kube_config.service_account_token
|
|
||||||
}
|
}
|
||||||
|
}, api_prefix='apis/extensions/v1beta1', content_type='application/strategic-merge-patch+json'))
|
||||||
|
|
||||||
if data:
|
def _assert_success(self, response):
|
||||||
headers['Content-Type'] = content_type
|
if response.status_code != 200:
|
||||||
|
logger.error('Kubernetes API call failed with response: %s => %s', response.status_code,
|
||||||
|
response.text)
|
||||||
|
raise Exception('Kubernetes API call failed: %s' % response.text)
|
||||||
|
|
||||||
data = json.dumps(data) if data else None
|
def _update_secret_file(self, relative_file_path, value=None):
|
||||||
session = Session()
|
if '/' in relative_file_path:
|
||||||
url = 'https://%s/%s/%s' % (self.kube_config.api_host, api_prefix, relative_url)
|
raise Exception('Expected path from get_volume_path, but found slashes')
|
||||||
|
|
||||||
request = Request(method, url, data=data, headers=headers)
|
# Check first that the namespace for Quay Enterprise exists. If it does not, report that
|
||||||
return session.send(request.prepare(), verify=False, timeout=2)
|
# as an error, as it seems to be a common issue.
|
||||||
|
namespace_url = 'namespaces/%s' % (self.kube_config.qe_namespace)
|
||||||
|
response = self._execute_k8s_api('GET', namespace_url)
|
||||||
|
if response.status_code // 100 != 2:
|
||||||
|
msg = 'A Kubernetes namespace with name `%s` must be created to save config' % self.kube_config.qe_namespace
|
||||||
|
raise Exception(msg)
|
||||||
|
|
||||||
|
# Check if the secret exists. If not, then we create an empty secret and then update the file
|
||||||
|
# inside.
|
||||||
|
secret_url = 'namespaces/%s/secrets/%s' % (self.kube_config.qe_namespace, self.kube_config.qe_config_secret)
|
||||||
|
secret = self._lookup_secret()
|
||||||
|
if secret is None:
|
||||||
|
self._assert_success(self._execute_k8s_api('POST', secret_url, {
|
||||||
|
"kind": "Secret",
|
||||||
|
"apiVersion": "v1",
|
||||||
|
"metadata": {
|
||||||
|
"name": self.kube_config.qe_config_secret
|
||||||
|
},
|
||||||
|
"data": {}
|
||||||
|
}))
|
||||||
|
|
||||||
|
# Update the secret to reflect the file change.
|
||||||
|
secret['data'] = secret.get('data', {})
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
secret['data'][relative_file_path] = base64.b64encode(value)
|
||||||
|
else:
|
||||||
|
secret['data'].pop(relative_file_path)
|
||||||
|
|
||||||
|
self._assert_success(self._execute_k8s_api('PUT', secret_url, secret))
|
||||||
|
|
||||||
|
def _lookup_secret(self):
|
||||||
|
secret_url = 'namespaces/%s/secrets/%s' % (self.kube_config.qe_namespace, self.kube_config.qe_config_secret)
|
||||||
|
response = self._execute_k8s_api('GET', secret_url)
|
||||||
|
if response.status_code != 200:
|
||||||
|
return None
|
||||||
|
return json.loads(response.text)
|
||||||
|
|
||||||
|
def _execute_k8s_api(self, method, relative_url, data=None, api_prefix='api/v1', content_type='application/json'):
|
||||||
|
headers = {
|
||||||
|
'Authorization': 'Bearer ' + self.kube_config.service_account_token
|
||||||
|
}
|
||||||
|
|
||||||
|
if data:
|
||||||
|
headers['Content-Type'] = content_type
|
||||||
|
|
||||||
|
data = json.dumps(data) if data else None
|
||||||
|
session = Session()
|
||||||
|
url = 'https://%s/%s/%s' % (self.kube_config.api_host, api_prefix, relative_url)
|
||||||
|
|
||||||
|
request = Request(method, url, data=data, headers=headers)
|
||||||
|
return session.send(request.prepare(), verify=False, timeout=2)
|
||||||
|
|
|
@ -8,9 +8,11 @@ DEFAULT_QE_CONFIG_SECRET = 'quay-enterprise-config-secret'
|
||||||
# The name of the quay enterprise deployment (not config app) that is used to query & rollout
|
# The name of the quay enterprise deployment (not config app) that is used to query & rollout
|
||||||
DEFAULT_QE_DEPLOYMENT_SELECTOR = 'app'
|
DEFAULT_QE_DEPLOYMENT_SELECTOR = 'app'
|
||||||
|
|
||||||
|
|
||||||
def get_k8s_namespace():
|
def get_k8s_namespace():
|
||||||
return os.environ.get('QE_K8S_NAMESPACE', DEFAULT_QE_NAMESPACE)
|
return os.environ.get('QE_K8S_NAMESPACE', DEFAULT_QE_NAMESPACE)
|
||||||
|
|
||||||
|
|
||||||
class KubernetesConfig(object):
|
class KubernetesConfig(object):
|
||||||
def __init__(self, api_host='', service_account_token=SERVICE_ACCOUNT_TOKEN_PATH,
|
def __init__(self, api_host='', service_account_token=SERVICE_ACCOUNT_TOKEN_PATH,
|
||||||
qe_namespace=DEFAULT_QE_NAMESPACE,
|
qe_namespace=DEFAULT_QE_NAMESPACE,
|
||||||
|
@ -31,7 +33,7 @@ class KubernetesConfig(object):
|
||||||
with open(SERVICE_ACCOUNT_TOKEN_PATH, 'r') as f:
|
with open(SERVICE_ACCOUNT_TOKEN_PATH, 'r') as f:
|
||||||
service_token = f.read()
|
service_token = f.read()
|
||||||
|
|
||||||
api_host=os.environ.get('KUBERNETES_SERVICE_HOST', '')
|
api_host = os.environ.get('KUBERNETES_SERVICE_HOST', '')
|
||||||
port = os.environ.get('KUBERNETES_SERVICE_PORT')
|
port = os.environ.get('KUBERNETES_SERVICE_PORT')
|
||||||
if port:
|
if port:
|
||||||
api_host += ':' + port
|
api_host += ':' + port
|
||||||
|
@ -42,6 +44,3 @@ class KubernetesConfig(object):
|
||||||
|
|
||||||
return cls(api_host=api_host, service_account_token=service_token, qe_namespace=qe_namespace,
|
return cls(api_host=api_host, service_account_token=service_token, qe_namespace=qe_namespace,
|
||||||
qe_config_secret=qe_config_secret, qe_deployment_selector=qe_deployment_selector)
|
qe_config_secret=qe_config_secret, qe_deployment_selector=qe_deployment_selector)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,45 +3,45 @@ from config_app._init_config import CONF_DIR
|
||||||
|
|
||||||
|
|
||||||
def logfile_path(jsonfmt=False, debug=False):
|
def logfile_path(jsonfmt=False, debug=False):
|
||||||
"""
|
"""
|
||||||
Returns the a logfileconf path following this rules:
|
Returns the a logfileconf path following this rules:
|
||||||
- conf/logging_debug_json.conf # jsonfmt=true, debug=true
|
- conf/logging_debug_json.conf # jsonfmt=true, debug=true
|
||||||
- conf/logging_json.conf # jsonfmt=true, debug=false
|
- conf/logging_json.conf # jsonfmt=true, debug=false
|
||||||
- conf/logging_debug.conf # jsonfmt=false, debug=true
|
- conf/logging_debug.conf # jsonfmt=false, debug=true
|
||||||
- conf/logging.conf # jsonfmt=false, debug=false
|
- conf/logging.conf # jsonfmt=false, debug=false
|
||||||
Can be parametrized via envvars: JSONLOG=true, DEBUGLOG=true
|
Can be parametrized via envvars: JSONLOG=true, DEBUGLOG=true
|
||||||
"""
|
"""
|
||||||
_json = ""
|
_json = ""
|
||||||
_debug = ""
|
_debug = ""
|
||||||
|
|
||||||
if jsonfmt or os.getenv('JSONLOG', 'false').lower() == 'true':
|
if jsonfmt or os.getenv('JSONLOG', 'false').lower() == 'true':
|
||||||
_json = "_json"
|
_json = "_json"
|
||||||
|
|
||||||
if debug or os.getenv('DEBUGLOG', 'false').lower() == 'true':
|
if debug or os.getenv('DEBUGLOG', 'false').lower() == 'true':
|
||||||
_debug = "_debug"
|
_debug = "_debug"
|
||||||
|
|
||||||
return os.path.join(CONF_DIR, "logging%s%s.conf" % (_debug, _json))
|
return os.path.join(CONF_DIR, "logging%s%s.conf" % (_debug, _json))
|
||||||
|
|
||||||
|
|
||||||
def filter_logs(values, filtered_fields):
|
def filter_logs(values, filtered_fields):
|
||||||
"""
|
"""
|
||||||
Takes a dict and a list of keys to filter.
|
Takes a dict and a list of keys to filter.
|
||||||
eg:
|
eg:
|
||||||
with filtered_fields:
|
with filtered_fields:
|
||||||
[{'key': ['k1', k2'], 'fn': lambda x: 'filtered'}]
|
[{'key': ['k1', k2'], 'fn': lambda x: 'filtered'}]
|
||||||
and values:
|
and values:
|
||||||
{'k1': {'k2': 'some-secret'}, 'k3': 'some-value'}
|
{'k1': {'k2': 'some-secret'}, 'k3': 'some-value'}
|
||||||
the returned dict is:
|
the returned dict is:
|
||||||
{'k1': {k2: 'filtered'}, 'k3': 'some-value'}
|
{'k1': {k2: 'filtered'}, 'k3': 'some-value'}
|
||||||
"""
|
"""
|
||||||
for field in filtered_fields:
|
for field in filtered_fields:
|
||||||
cdict = values
|
cdict = values
|
||||||
|
|
||||||
for key in field['key'][:-1]:
|
for key in field['key'][:-1]:
|
||||||
if key in cdict:
|
if key in cdict:
|
||||||
cdict = cdict[key]
|
cdict = cdict[key]
|
||||||
|
|
||||||
last_key = field['key'][-1]
|
last_key = field['key'][-1]
|
||||||
|
|
||||||
if last_key in cdict and cdict[last_key]:
|
if last_key in cdict and cdict[last_key]:
|
||||||
cdict[last_key] = field['fn'](cdict[last_key])
|
cdict[last_key] = field['fn'](cdict[last_key])
|
||||||
|
|
|
@ -2,10 +2,12 @@ from fnmatch import fnmatch
|
||||||
|
|
||||||
import OpenSSL
|
import OpenSSL
|
||||||
|
|
||||||
|
|
||||||
class CertInvalidException(Exception):
|
class CertInvalidException(Exception):
|
||||||
""" Exception raised when a certificate could not be parsed/loaded. """
|
""" Exception raised when a certificate could not be parsed/loaded. """
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class KeyInvalidException(Exception):
|
class KeyInvalidException(Exception):
|
||||||
""" Exception raised when a key could not be parsed/loaded or successfully applied to a cert. """
|
""" Exception raised when a key could not be parsed/loaded or successfully applied to a cert. """
|
||||||
pass
|
pass
|
||||||
|
@ -24,8 +26,10 @@ def load_certificate(cert_contents):
|
||||||
|
|
||||||
_SUBJECT_ALT_NAME = 'subjectAltName'
|
_SUBJECT_ALT_NAME = 'subjectAltName'
|
||||||
|
|
||||||
|
|
||||||
class SSLCertificate(object):
|
class SSLCertificate(object):
|
||||||
""" Helper class for easier working with SSL certificates. """
|
""" Helper class for easier working with SSL certificates. """
|
||||||
|
|
||||||
def __init__(self, openssl_cert):
|
def __init__(self, openssl_cert):
|
||||||
self.openssl_cert = openssl_cert
|
self.openssl_cert = openssl_cert
|
||||||
|
|
||||||
|
|
|
@ -1,20 +1,22 @@
|
||||||
from util.config.validator import EXTRA_CA_DIRECTORY
|
from util.config.validator import EXTRA_CA_DIRECTORY
|
||||||
|
|
||||||
|
|
||||||
def strip_absolute_path_and_add_trailing_dir(path):
|
def strip_absolute_path_and_add_trailing_dir(path):
|
||||||
"""
|
"""
|
||||||
Removes the initial trailing / from the prefix path, and add the last dir one
|
Removes the initial trailing / from the prefix path, and add the last dir one
|
||||||
"""
|
"""
|
||||||
return path[1:] + '/'
|
return path[1:] + '/'
|
||||||
|
|
||||||
|
|
||||||
def tarinfo_filter_partial(prefix):
|
def tarinfo_filter_partial(prefix):
|
||||||
def tarinfo_filter(tarinfo):
|
def tarinfo_filter(tarinfo):
|
||||||
# remove leading directory info
|
# remove leading directory info
|
||||||
tarinfo.name = tarinfo.name.replace(prefix, '')
|
tarinfo.name = tarinfo.name.replace(prefix, '')
|
||||||
|
|
||||||
# ignore any directory that isn't the specified extra ca one:
|
# ignore any directory that isn't the specified extra ca one:
|
||||||
if tarinfo.isdir() and not tarinfo.name == EXTRA_CA_DIRECTORY:
|
if tarinfo.isdir() and not tarinfo.name == EXTRA_CA_DIRECTORY:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return tarinfo
|
return tarinfo
|
||||||
|
|
||||||
return tarinfo_filter
|
return tarinfo_filter
|
||||||
|
|
|
@ -1,23 +1,25 @@
|
||||||
import pytest
|
import pytest
|
||||||
import re
|
|
||||||
|
|
||||||
from httmock import urlmatch, HTTMock, response
|
from httmock import urlmatch, HTTMock, response
|
||||||
|
|
||||||
from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
|
from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
|
||||||
from config_app.config_util.k8sconfig import KubernetesConfig
|
from config_app.config_util.k8sconfig import KubernetesConfig
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('kube_config, expected_api, expected_query', [
|
@pytest.mark.parametrize('kube_config, expected_api, expected_query', [
|
||||||
({'api_host':'www.customhost.com'},
|
({'api_host': 'www.customhost.com'},
|
||||||
'/apis/extensions/v1beta1/namespaces/quay-enterprise/deployments', 'labelSelector=quay-enterprise-component%3Dapp'),
|
'/apis/extensions/v1beta1/namespaces/quay-enterprise/deployments', 'labelSelector=quay-enterprise-component%3Dapp'),
|
||||||
|
|
||||||
({'api_host':'www.customhost.com', 'qe_deployment_selector':'custom-selector'},
|
({'api_host': 'www.customhost.com', 'qe_deployment_selector': 'custom-selector'},
|
||||||
'/apis/extensions/v1beta1/namespaces/quay-enterprise/deployments', 'labelSelector=quay-enterprise-component%3Dcustom-selector'),
|
'/apis/extensions/v1beta1/namespaces/quay-enterprise/deployments',
|
||||||
|
'labelSelector=quay-enterprise-component%3Dcustom-selector'),
|
||||||
|
|
||||||
({'api_host':'www.customhost.com', 'qe_namespace':'custom-namespace'},
|
({'api_host': 'www.customhost.com', 'qe_namespace': 'custom-namespace'},
|
||||||
'/apis/extensions/v1beta1/namespaces/custom-namespace/deployments', 'labelSelector=quay-enterprise-component%3Dapp'),
|
'/apis/extensions/v1beta1/namespaces/custom-namespace/deployments', 'labelSelector=quay-enterprise-component%3Dapp'),
|
||||||
|
|
||||||
({'api_host':'www.customhost.com', 'qe_namespace':'custom-namespace', 'qe_deployment_selector':'custom-selector'},
|
({'api_host': 'www.customhost.com', 'qe_namespace': 'custom-namespace', 'qe_deployment_selector': 'custom-selector'},
|
||||||
'/apis/extensions/v1beta1/namespaces/custom-namespace/deployments', 'labelSelector=quay-enterprise-component%3Dcustom-selector'),
|
'/apis/extensions/v1beta1/namespaces/custom-namespace/deployments',
|
||||||
|
'labelSelector=quay-enterprise-component%3Dcustom-selector'),
|
||||||
])
|
])
|
||||||
def test_get_qe_deployments(kube_config, expected_api, expected_query):
|
def test_get_qe_deployments(kube_config, expected_api, expected_query):
|
||||||
config = KubernetesConfig(**kube_config)
|
config = KubernetesConfig(**kube_config)
|
||||||
|
@ -36,12 +38,15 @@ def test_get_qe_deployments(kube_config, expected_api, expected_query):
|
||||||
|
|
||||||
assert url_hit[0]
|
assert url_hit[0]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('kube_config, deployment_names, expected_api_hits', [
|
@pytest.mark.parametrize('kube_config, deployment_names, expected_api_hits', [
|
||||||
({'api_host':'www.customhost.com'}, [], []),
|
({'api_host': 'www.customhost.com'}, [], []),
|
||||||
({'api_host':'www.customhost.com'}, ['myDeployment'], ['/apis/extensions/v1beta1/namespaces/quay-enterprise/deployments/myDeployment']),
|
({'api_host': 'www.customhost.com'}, ['myDeployment'],
|
||||||
({'api_host':'www.customhost.com', 'qe_namespace':'custom-namespace'},
|
['/apis/extensions/v1beta1/namespaces/quay-enterprise/deployments/myDeployment']),
|
||||||
|
({'api_host': 'www.customhost.com', 'qe_namespace': 'custom-namespace'},
|
||||||
['myDeployment', 'otherDeployment'],
|
['myDeployment', 'otherDeployment'],
|
||||||
['/apis/extensions/v1beta1/namespaces/custom-namespace/deployments/myDeployment', '/apis/extensions/v1beta1/namespaces/custom-namespace/deployments/otherDeployment']),
|
['/apis/extensions/v1beta1/namespaces/custom-namespace/deployments/myDeployment',
|
||||||
|
'/apis/extensions/v1beta1/namespaces/custom-namespace/deployments/otherDeployment']),
|
||||||
])
|
])
|
||||||
def test_cycle_qe_deployments(kube_config, deployment_names, expected_api_hits):
|
def test_cycle_qe_deployments(kube_config, deployment_names, expected_api_hits):
|
||||||
KubernetesAccessorSingleton._instance = None
|
KubernetesAccessorSingleton._instance = None
|
||||||
|
|
|
@ -6,24 +6,27 @@ from util.config.validator import EXTRA_CA_DIRECTORY
|
||||||
|
|
||||||
from test.fixtures import *
|
from test.fixtures import *
|
||||||
|
|
||||||
class MockTarInfo:
|
|
||||||
def __init__(self, name, isdir):
|
|
||||||
self.name = name
|
|
||||||
self.isdir = lambda: isdir
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
class MockTarInfo:
|
||||||
return other is not None and self.name == other.name
|
def __init__(self, name, isdir):
|
||||||
|
self.name = name
|
||||||
|
self.isdir = lambda: isdir
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return other is not None and self.name == other.name
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('prefix,tarinfo,expected', [
|
@pytest.mark.parametrize('prefix,tarinfo,expected', [
|
||||||
# It should handle simple files
|
# It should handle simple files
|
||||||
('Users/sam/', MockTarInfo('Users/sam/config.yaml', False), MockTarInfo('config.yaml', False)),
|
('Users/sam/', MockTarInfo('Users/sam/config.yaml', False), MockTarInfo('config.yaml', False)),
|
||||||
# It should allow the extra CA dir
|
# It should allow the extra CA dir
|
||||||
('Users/sam/', MockTarInfo('Users/sam/%s' % EXTRA_CA_DIRECTORY, True), MockTarInfo('%s' % EXTRA_CA_DIRECTORY, True)),
|
('Users/sam/', MockTarInfo('Users/sam/%s' % EXTRA_CA_DIRECTORY, True), MockTarInfo('%s' % EXTRA_CA_DIRECTORY, True)),
|
||||||
# it should allow a file in that extra dir
|
# it should allow a file in that extra dir
|
||||||
('Users/sam/', MockTarInfo('Users/sam/%s/cert.crt' % EXTRA_CA_DIRECTORY, False), MockTarInfo('%s/cert.crt' % EXTRA_CA_DIRECTORY, False)),
|
('Users/sam/', MockTarInfo('Users/sam/%s/cert.crt' % EXTRA_CA_DIRECTORY, False),
|
||||||
# it should not allow a directory that isn't the CA dir
|
MockTarInfo('%s/cert.crt' % EXTRA_CA_DIRECTORY, False)),
|
||||||
('Users/sam/', MockTarInfo('Users/sam/dirignore', True), None),
|
# it should not allow a directory that isn't the CA dir
|
||||||
|
('Users/sam/', MockTarInfo('Users/sam/dirignore', True), None),
|
||||||
])
|
])
|
||||||
def test_tarinfo_filter(prefix, tarinfo, expected):
|
def test_tarinfo_filter(prefix, tarinfo, expected):
|
||||||
partial = tarinfo_filter_partial(prefix)
|
partial = tarinfo_filter_partial(prefix)
|
||||||
assert partial(tarinfo) == expected
|
assert partial(tarinfo) == expected
|
||||||
|
|
|
@ -2,7 +2,5 @@ from config_app.c_app import app as application
|
||||||
from config_app.config_endpoints.api import api_bp
|
from config_app.config_endpoints.api import api_bp
|
||||||
from config_app.config_endpoints.setup_web import setup_web
|
from config_app.config_endpoints.setup_web import setup_web
|
||||||
|
|
||||||
|
|
||||||
application.register_blueprint(setup_web)
|
application.register_blueprint(setup_web)
|
||||||
application.register_blueprint(api_bp, url_prefix='/api')
|
application.register_blueprint(api_bp, url_prefix='/api')
|
||||||
|
|
||||||
|
|
Reference in a new issue