initial import for Open Source 🎉

This commit is contained in:
Jimmy Zelinskie 2019-11-12 11:09:47 -05:00
parent 1898c361f3
commit 9c0dd3b722
2048 changed files with 218743 additions and 0 deletions

View file

@ -0,0 +1,158 @@
import logging
from flask import Blueprint, request, abort
from flask_restful import Resource, Api
from flask_restful.utils.cors import crossdomain
from data import model
from email.utils import formatdate
from calendar import timegm
from functools import partial, wraps
from jsonschema import validate, ValidationError
from config_app.c_app import app, IS_KUBERNETES
from config_app.config_endpoints.exception import InvalidResponse, InvalidRequest
logger = logging.getLogger(__name__)
api_bp = Blueprint('api', __name__)
CROSS_DOMAIN_HEADERS = ['Authorization', 'Content-Type', 'X-Requested-With']
class ApiExceptionHandlingApi(Api):
pass
@crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS)
def handle_error(self, error):
return super(ApiExceptionHandlingApi, self).handle_error(error)
api = ApiExceptionHandlingApi()
api.init_app(api_bp)
def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
if not metadata:
metadata = {}
if repo:
repo_name = repo.name
model.log.log_action(kind, user_or_orgname, repo_name, user_or_orgname, request.remote_addr, metadata)
def format_date(date):
""" Output an RFC822 date format. """
if date is None:
return None
return formatdate(timegm(date.utctimetuple()))
def resource(*urls, **kwargs):
def wrapper(api_resource):
if not api_resource:
return None
api_resource.registered = True
api.add_resource(api_resource, *urls, **kwargs)
return api_resource
return wrapper
class ApiResource(Resource):
registered = False
method_decorators = []
def options(self):
return None, 200
def add_method_metadata(name, value):
def modifier(func):
if func is None:
return None
if '__api_metadata' not in dir(func):
func.__api_metadata = {}
func.__api_metadata[name] = value
return func
return modifier
def method_metadata(func, name):
if func is None:
return None
if '__api_metadata' in dir(func):
return func.__api_metadata.get(name, None)
return None
def no_cache(f):
@wraps(f)
def add_no_cache(*args, **kwargs):
response = f(*args, **kwargs)
if response is not None:
response.headers['Cache-Control'] = 'no-cache, no-store, must-revalidate'
return response
return add_no_cache
def define_json_response(schema_name):
def wrapper(func):
@add_method_metadata('response_schema', schema_name)
@wraps(func)
def wrapped(self, *args, **kwargs):
schema = self.schemas[schema_name]
resp = func(self, *args, **kwargs)
if app.config['TESTING']:
try:
validate(resp, schema)
except ValidationError as ex:
raise InvalidResponse(ex.message)
return resp
return wrapped
return wrapper
def validate_json_request(schema_name, optional=False):
def wrapper(func):
@add_method_metadata('request_schema', schema_name)
@wraps(func)
def wrapped(self, *args, **kwargs):
schema = self.schemas[schema_name]
try:
json_data = request.get_json()
if json_data is None:
if not optional:
raise InvalidRequest('Missing JSON body')
else:
validate(json_data, schema)
return func(self, *args, **kwargs)
except ValidationError as ex:
raise InvalidRequest(ex.message)
return wrapped
return wrapper
def kubernetes_only(f):
""" Aborts the request with a 400 if the app is not running on kubernetes """
@wraps(f)
def abort_if_not_kube(*args, **kwargs):
if not IS_KUBERNETES:
abort(400)
return f(*args, **kwargs)
return abort_if_not_kube
nickname = partial(add_method_metadata, 'nickname')
import config_app.config_endpoints.api.discovery
import config_app.config_endpoints.api.kube_endpoints
import config_app.config_endpoints.api.suconfig
import config_app.config_endpoints.api.superuser
import config_app.config_endpoints.api.tar_config_loader
import config_app.config_endpoints.api.user

View file

@ -0,0 +1,254 @@
# TODO to extract the discovery stuff into a util at the top level and then use it both here and old discovery.py
import logging
import sys
from collections import OrderedDict
from config_app.c_app import app
from config_app.config_endpoints.api import method_metadata
from config_app.config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER
logger = logging.getLogger(__name__)
def generate_route_data():
include_internal = True
compact = True
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
enum=None, schema=None):
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
parameter_info = {
'name': name,
'in': kind,
'required': required
}
if schema:
parameter_info['schema'] = {
'$ref': '#/definitions/%s' % schema
}
else:
parameter_info['type'] = param_type
if enum is not None and len(list(enum)) > 0:
parameter_info['enum'] = list(enum)
return parameter_info
paths = {}
models = {}
tags = []
tags_added = set()
operation_ids = set()
for rule in app.url_map.iter_rules():
endpoint_method = app.view_functions[rule.endpoint]
# Verify that we have a view class for this API method.
if not 'view_class' in dir(endpoint_method):
continue
view_class = endpoint_method.view_class
# Hide the class if it is internal.
internal = method_metadata(view_class, 'internal')
if not include_internal and internal:
continue
# Build the tag.
parts = fully_qualified_name(view_class).split('.')
tag_name = parts[-2]
if not tag_name in tags_added:
tags_added.add(tag_name)
tags.append({
'name': tag_name,
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
})
# Build the Swagger data for the path.
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
full_name = fully_qualified_name(view_class)
path_swagger = {
'x-name': full_name,
'x-path': swagger_path,
'x-tag': tag_name
}
related_user_res = method_metadata(view_class, 'related_user_resource')
if related_user_res is not None:
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
paths[swagger_path] = path_swagger
# Add any global path parameters.
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(
view_class) else {}
if param_data_map:
path_parameters_swagger = []
for path_parameter in param_data_map:
description = param_data_map[path_parameter].get('description')
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
path_swagger['parameters'] = path_parameters_swagger
# Add the individual HTTP operations.
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
for method_name in method_names:
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
method = getattr(view_class, method_name.lower(), None)
if method is None:
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
continue
operationId = method_metadata(method, 'nickname')
operation_swagger = {
'operationId': operationId,
'parameters': [],
}
if operationId is None:
continue
if operationId in operation_ids:
raise Exception('Duplicate operation Id: %s' % operationId)
operation_ids.add(operationId)
# Mark the method as internal.
internal = method_metadata(method, 'internal')
if internal is not None:
operation_swagger['x-internal'] = True
if include_internal:
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
if requires_fresh_login is not None:
operation_swagger['x-requires-fresh-login'] = True
# Add the path parameters.
if rule.arguments:
for path_parameter in rule.arguments:
description = param_data_map.get(path_parameter, {}).get('description')
operation_swagger['parameters'].append(
swagger_parameter(path_parameter, description))
# Add the query parameters.
if '__api_query_params' in dir(method):
for query_parameter_info in method.__api_query_params:
name = query_parameter_info['name']
description = query_parameter_info['help']
param_type = TYPE_CONVERTER[query_parameter_info['type']]
required = query_parameter_info['required']
operation_swagger['parameters'].append(
swagger_parameter(name, description, kind='query',
param_type=param_type,
required=required,
enum=query_parameter_info['choices']))
# Add the OAuth security block.
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
scope = method_metadata(method, 'oauth2_scope')
if scope and not compact:
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
# Add the responses block.
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
response_schema_name = method_metadata(method, 'response_schema')
if not compact:
if response_schema_name:
models[response_schema_name] = view_class.schemas[response_schema_name]
models['ApiError'] = {
'type': 'object',
'properties': {
'status': {
'type': 'integer',
'description': 'Status code of the response.'
},
'type': {
'type': 'string',
'description': 'Reference to the type of the error.'
},
'detail': {
'type': 'string',
'description': 'Details about the specific instance of the error.'
},
'title': {
'type': 'string',
'description': 'Unique error code to identify the type of error.'
},
'error_message': {
'type': 'string',
'description': 'Deprecated; alias for detail'
},
'error_type': {
'type': 'string',
'description': 'Deprecated; alias for detail'
}
},
'required': [
'status',
'type',
'title',
]
}
responses = {
'400': {
'description': 'Bad Request',
},
'401': {
'description': 'Session required',
},
'403': {
'description': 'Unauthorized access',
},
'404': {
'description': 'Not found',
},
}
for _, body in responses.items():
body['schema'] = {'$ref': '#/definitions/ApiError'}
if method_name == 'DELETE':
responses['204'] = {
'description': 'Deleted'
}
elif method_name == 'POST':
responses['201'] = {
'description': 'Successful creation'
}
else:
responses['200'] = {
'description': 'Successful invocation'
}
if response_schema_name:
responses['200']['schema'] = {
'$ref': '#/definitions/%s' % response_schema_name
}
operation_swagger['responses'] = responses
# Add the request block.
request_schema_name = method_metadata(method, 'request_schema')
if request_schema_name and not compact:
models[request_schema_name] = view_class.schemas[request_schema_name]
operation_swagger['parameters'].append(
swagger_parameter('body', 'Request body contents.', kind='body',
schema=request_schema_name))
# Add the operation to the parent path.
if not internal or (internal and include_internal):
path_swagger[method_name.lower()] = operation_swagger
tags.sort(key=lambda t: t['name'])
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
if compact:
return {'paths': paths}

View file

@ -0,0 +1,143 @@
import logging
from flask import request, make_response
from config_app.config_util.config import get_config_as_kube_secret
from data.database import configure
from config_app.c_app import app, config_provider
from config_app.config_endpoints.api import resource, ApiResource, nickname, kubernetes_only, validate_json_request
from config_app.config_util.k8saccessor import KubernetesAccessorSingleton, K8sApiException
logger = logging.getLogger(__name__)
@resource('/v1/kubernetes/deployments/')
class SuperUserKubernetesDeployment(ApiResource):
""" Resource for the getting the status of Red Hat Quay deployments and cycling them """
schemas = {
'ValidateDeploymentNames': {
'type': 'object',
'description': 'Validates deployment names for cycling',
'required': [
'deploymentNames'
],
'properties': {
'deploymentNames': {
'type': 'array',
'description': 'The names of the deployments to cycle'
},
},
}
}
@kubernetes_only
@nickname('scGetNumDeployments')
def get(self):
return KubernetesAccessorSingleton.get_instance().get_qe_deployments()
@kubernetes_only
@validate_json_request('ValidateDeploymentNames')
@nickname('scCycleQEDeployments')
def put(self):
deployment_names = request.get_json()['deploymentNames']
return KubernetesAccessorSingleton.get_instance().cycle_qe_deployments(deployment_names)
@resource('/v1/kubernetes/deployment/<deployment>/status')
class QEDeploymentRolloutStatus(ApiResource):
@kubernetes_only
@nickname('scGetDeploymentRolloutStatus')
def get(self, deployment):
deployment_rollout_status = KubernetesAccessorSingleton.get_instance().get_deployment_rollout_status(deployment)
return {
'status': deployment_rollout_status.status,
'message': deployment_rollout_status.message,
}
@resource('/v1/kubernetes/deployments/rollback')
class QEDeploymentRollback(ApiResource):
""" Resource for rolling back deployments """
schemas = {
'ValidateDeploymentNames': {
'type': 'object',
'description': 'Validates deployment names for rolling back',
'required': [
'deploymentNames'
],
'properties': {
'deploymentNames': {
'type': 'array',
'description': 'The names of the deployments to rollback'
},
},
}
}
@kubernetes_only
@nickname('scRollbackDeployments')
@validate_json_request('ValidateDeploymentNames')
def post(self):
"""
Returns the config to its original state and rolls back deployments
:return:
"""
deployment_names = request.get_json()['deploymentNames']
# To roll back a deployment, we must do 2 things:
# 1. Roll back the config secret to its old value (discarding changes we made in this session)
# 2. Trigger a rollback to the previous revision, so that the pods will be restarted with
# the old config
old_secret = get_config_as_kube_secret(config_provider.get_old_config_dir())
kube_accessor = KubernetesAccessorSingleton.get_instance()
kube_accessor.replace_qe_secret(old_secret)
try:
for name in deployment_names:
kube_accessor.rollback_deployment(name)
except K8sApiException as e:
logger.exception('Failed to rollback deployment.')
return make_response(e.message, 503)
return make_response('Ok', 204)
@resource('/v1/kubernetes/config')
class SuperUserKubernetesConfiguration(ApiResource):
""" Resource for saving the config files to kubernetes secrets. """
@kubernetes_only
@nickname('scDeployConfiguration')
def post(self):
try:
new_secret = get_config_as_kube_secret(config_provider.get_config_dir_path())
KubernetesAccessorSingleton.get_instance().replace_qe_secret(new_secret)
except K8sApiException as e:
logger.exception('Failed to deploy qe config secret to kubernetes.')
return make_response(e.message, 503)
return make_response('Ok', 201)
@resource('/v1/kubernetes/config/populate')
class KubernetesConfigurationPopulator(ApiResource):
""" Resource for populating the local configuration from the cluster's kubernetes secrets. """
@kubernetes_only
@nickname('scKubePopulateConfig')
def post(self):
# Get a clean transient directory to write the config into
config_provider.new_config_dir()
kube_accessor = KubernetesAccessorSingleton.get_instance()
kube_accessor.save_secret_to_directory(config_provider.get_config_dir_path())
config_provider.create_copy_of_config_dir()
# We update the db configuration to connect to their specified one
# (Note, even if this DB isn't valid, it won't affect much in the config app, since we'll report an error,
# and all of the options create a new clean dir, so we'll never pollute configs)
combined = dict(**app.config)
combined.update(config_provider.get_config())
configure(combined)
return 200

View file

@ -0,0 +1,302 @@
import logging
from flask import abort, request
from config_app.config_endpoints.api.suconfig_models_pre_oci import pre_oci_model as model
from config_app.config_endpoints.api import resource, ApiResource, nickname, validate_json_request
from config_app.c_app import (app, config_provider, superusers, ip_resolver,
instance_keys, INIT_SCRIPTS_LOCATION)
from data.database import configure
from data.runmigration import run_alembic_migration
from util.config.configutil import add_enterprise_config_defaults
from util.config.validator import validate_service_for_config, ValidatorContext, \
is_valid_config_upload_filename
logger = logging.getLogger(__name__)
def database_is_valid():
""" Returns whether the database, as configured, is valid. """
return model.is_valid()
def database_has_users():
""" Returns whether the database has any users defined. """
return model.has_users()
@resource('/v1/superuser/config')
class SuperUserConfig(ApiResource):
""" Resource for fetching and updating the current configuration, if any. """
schemas = {
'UpdateConfig': {
'type': 'object',
'description': 'Updates the YAML config file',
'required': [
'config',
],
'properties': {
'config': {
'type': 'object'
},
'password': {
'type': 'string'
},
},
},
}
@nickname('scGetConfig')
def get(self):
""" Returns the currently defined configuration, if any. """
config_object = config_provider.get_config()
return {
'config': config_object
}
@nickname('scUpdateConfig')
@validate_json_request('UpdateConfig')
def put(self):
""" Updates the config override file. """
# Note: This method is called to set the database configuration before super users exists,
# so we also allow it to be called if there is no valid registry configuration setup.
config_object = request.get_json()['config']
# Add any enterprise defaults missing from the config.
add_enterprise_config_defaults(config_object, app.config['SECRET_KEY'])
# Write the configuration changes to the config override file.
config_provider.save_config(config_object)
# now try to connect to the db provided in their config to validate it works
combined = dict(**app.config)
combined.update(config_provider.get_config())
configure(combined, testing=app.config['TESTING'])
return {
'exists': True,
'config': config_object
}
@resource('/v1/superuser/registrystatus')
class SuperUserRegistryStatus(ApiResource):
""" Resource for determining the status of the registry, such as if config exists,
if a database is configured, and if it has any defined users.
"""
@nickname('scRegistryStatus')
def get(self):
""" Returns the status of the registry. """
# If there is no config file, we need to setup the database.
if not config_provider.config_exists():
return {
'status': 'config-db'
}
# If the database isn't yet valid, then we need to set it up.
if not database_is_valid():
return {
'status': 'setup-db'
}
config = config_provider.get_config()
if config and config.get('SETUP_COMPLETE'):
return {
'status': 'config'
}
return {
'status': 'create-superuser' if not database_has_users() else 'config'
}
class _AlembicLogHandler(logging.Handler):
def __init__(self):
super(_AlembicLogHandler, self).__init__()
self.records = []
def emit(self, record):
self.records.append({
'level': record.levelname,
'message': record.getMessage()
})
def _reload_config():
combined = dict(**app.config)
combined.update(config_provider.get_config())
configure(combined)
return combined
@resource('/v1/superuser/setupdb')
class SuperUserSetupDatabase(ApiResource):
""" Resource for invoking alembic to setup the database. """
@nickname('scSetupDatabase')
def get(self):
""" Invokes the alembic upgrade process. """
# Note: This method is called after the database configured is saved, but before the
# database has any tables. Therefore, we only allow it to be run in that unique case.
if config_provider.config_exists() and not database_is_valid():
combined = _reload_config()
app.config['DB_URI'] = combined['DB_URI']
db_uri = app.config['DB_URI']
escaped_db_uri = db_uri.replace('%', '%%')
log_handler = _AlembicLogHandler()
try:
run_alembic_migration(escaped_db_uri, log_handler, setup_app=False)
except Exception as ex:
return {
'error': str(ex)
}
return {
'logs': log_handler.records
}
abort(403)
@resource('/v1/superuser/config/createsuperuser')
class SuperUserCreateInitialSuperUser(ApiResource):
""" Resource for creating the initial super user. """
schemas = {
'CreateSuperUser': {
'type': 'object',
'description': 'Information for creating the initial super user',
'required': [
'username',
'password',
'email'
],
'properties': {
'username': {
'type': 'string',
'description': 'The username for the superuser'
},
'password': {
'type': 'string',
'description': 'The password for the superuser'
},
'email': {
'type': 'string',
'description': 'The e-mail address for the superuser'
},
},
},
}
@nickname('scCreateInitialSuperuser')
@validate_json_request('CreateSuperUser')
def post(self):
""" Creates the initial super user, updates the underlying configuration and
sets the current session to have that super user. """
_reload_config()
# Special security check: This method is only accessible when:
# - There is a valid config YAML file.
# - There are currently no users in the database (clean install)
#
# We do this special security check because at the point this method is called, the database
# is clean but does not (yet) have any super users for our permissions code to check against.
if config_provider.config_exists() and not database_has_users():
data = request.get_json()
username = data['username']
password = data['password']
email = data['email']
# Create the user in the database.
superuser_uuid = model.create_superuser(username, password, email)
# Add the user to the config.
config_object = config_provider.get_config()
config_object['SUPER_USERS'] = [username]
config_provider.save_config(config_object)
# Update the in-memory config for the new superuser.
superusers.register_superuser(username)
return {
'status': True
}
abort(403)
@resource('/v1/superuser/config/validate/<service>')
class SuperUserConfigValidate(ApiResource):
""" Resource for validating a block of configuration against an external service. """
schemas = {
'ValidateConfig': {
'type': 'object',
'description': 'Validates configuration',
'required': [
'config'
],
'properties': {
'config': {
'type': 'object'
},
'password': {
'type': 'string',
'description': 'The users password, used for auth validation'
}
},
},
}
@nickname('scValidateConfig')
@validate_json_request('ValidateConfig')
def post(self, service):
""" Validates the given config for the given service. """
# Note: This method is called to validate the database configuration before super users exists,
# so we also allow it to be called if there is no valid registry configuration setup. Note that
# this is also safe since this method does not access any information not given in the request.
config = request.get_json()['config']
validator_context = ValidatorContext.from_app(app, config,
request.get_json().get('password', ''),
instance_keys=instance_keys,
ip_resolver=ip_resolver,
config_provider=config_provider,
init_scripts_location=INIT_SCRIPTS_LOCATION)
return validate_service_for_config(service, validator_context)
@resource('/v1/superuser/config/file/<filename>')
class SuperUserConfigFile(ApiResource):
""" Resource for fetching the status of config files and overriding them. """
@nickname('scConfigFileExists')
def get(self, filename):
""" Returns whether the configuration file with the given name exists. """
if not is_valid_config_upload_filename(filename):
abort(404)
return {
'exists': config_provider.volume_file_exists(filename)
}
@nickname('scUpdateConfigFile')
def post(self, filename):
""" Updates the configuration file with the given name. """
if not is_valid_config_upload_filename(filename):
abort(404)
# Note: This method can be called before the configuration exists
# to upload the database SSL cert.
uploaded_file = request.files['file']
if not uploaded_file:
abort(400)
config_provider.save_volume_file(filename, uploaded_file)
return {
'status': True
}

View file

@ -0,0 +1,39 @@
from abc import ABCMeta, abstractmethod
from six import add_metaclass
@add_metaclass(ABCMeta)
class SuperuserConfigDataInterface(object):
"""
Interface that represents all data store interactions required by the superuser config API.
"""
@abstractmethod
def is_valid(self):
"""
Returns true if the configured database is valid.
"""
@abstractmethod
def has_users(self):
"""
Returns true if there are any users defined.
"""
@abstractmethod
def create_superuser(self, username, password, email):
"""
Creates a new superuser with the given username, password and email. Returns the user's UUID.
"""
@abstractmethod
def has_federated_login(self, username, service_name):
"""
Returns true if the matching user has a federated login under the matching service.
"""
@abstractmethod
def attach_federated_login(self, username, service_name, federated_username):
"""
Attaches a federatated login to the matching user, under the given service.
"""

View file

@ -0,0 +1,37 @@
from data import model
from data.database import User
from config_app.config_endpoints.api.suconfig_models_interface import SuperuserConfigDataInterface
class PreOCIModel(SuperuserConfigDataInterface):
# Note: this method is different than has_users: the user select will throw if the user
# table does not exist, whereas has_users assumes the table is valid
def is_valid(self):
try:
list(User.select().limit(1))
return True
except:
return False
def has_users(self):
return bool(list(User.select().limit(1)))
def create_superuser(self, username, password, email):
return model.user.create_user(username, password, email, auto_verify=True).uuid
def has_federated_login(self, username, service_name):
user = model.user.get_user(username)
if user is None:
return False
return bool(model.user.lookup_federated_login(user, service_name))
def attach_federated_login(self, username, service_name, federated_username):
user = model.user.get_user(username)
if user is None:
return False
model.user.attach_federated_login(user, service_name, federated_username)
pre_oci_model = PreOCIModel()

View file

@ -0,0 +1,248 @@
import logging
import pathvalidate
import os
import subprocess
from datetime import datetime
from flask import request, jsonify, make_response
from endpoints.exception import NotFound
from data.database import ServiceKeyApprovalType
from data.model import ServiceKeyDoesNotExist
from util.config.validator import EXTRA_CA_DIRECTORY
from config_app.config_endpoints.exception import InvalidRequest
from config_app.config_endpoints.api import resource, ApiResource, nickname, log_action, validate_json_request
from config_app.config_endpoints.api.superuser_models_pre_oci import pre_oci_model
from config_app.config_util.ssl import load_certificate, CertInvalidException
from config_app.c_app import app, config_provider, INIT_SCRIPTS_LOCATION
logger = logging.getLogger(__name__)
@resource('/v1/superuser/customcerts/<certpath>')
class SuperUserCustomCertificate(ApiResource):
""" Resource for managing a custom certificate. """
@nickname('uploadCustomCertificate')
def post(self, certpath):
uploaded_file = request.files['file']
if not uploaded_file:
raise InvalidRequest('Missing certificate file')
# Save the certificate.
certpath = pathvalidate.sanitize_filename(certpath)
if not certpath.endswith('.crt'):
raise InvalidRequest('Invalid certificate file: must have suffix `.crt`')
logger.debug('Saving custom certificate %s', certpath)
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
config_provider.save_volume_file(cert_full_path, uploaded_file)
logger.debug('Saved custom certificate %s', certpath)
# Validate the certificate.
try:
logger.debug('Loading custom certificate %s', certpath)
with config_provider.get_volume_file(cert_full_path) as f:
load_certificate(f.read())
except CertInvalidException:
logger.exception('Got certificate invalid error for cert %s', certpath)
return '', 204
except IOError:
logger.exception('Got IO error for cert %s', certpath)
return '', 204
# Call the update script with config dir location to install the certificate immediately.
if not app.config['TESTING']:
cert_dir = os.path.join(config_provider.get_config_dir_path(), EXTRA_CA_DIRECTORY)
if subprocess.call([os.path.join(INIT_SCRIPTS_LOCATION, 'certs_install.sh')], env={ 'CERTDIR': cert_dir }) != 0:
raise Exception('Could not install certificates')
return '', 204
@nickname('deleteCustomCertificate')
def delete(self, certpath):
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
config_provider.remove_volume_file(cert_full_path)
return '', 204
@resource('/v1/superuser/customcerts')
class SuperUserCustomCertificates(ApiResource):
""" Resource for managing custom certificates. """
@nickname('getCustomCertificates')
def get(self):
has_extra_certs_path = config_provider.volume_file_exists(EXTRA_CA_DIRECTORY)
extra_certs_found = config_provider.list_volume_directory(EXTRA_CA_DIRECTORY)
if extra_certs_found is None:
return {
'status': 'file' if has_extra_certs_path else 'none',
}
cert_views = []
for extra_cert_path in extra_certs_found:
try:
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, extra_cert_path)
with config_provider.get_volume_file(cert_full_path) as f:
certificate = load_certificate(f.read())
cert_views.append({
'path': extra_cert_path,
'names': list(certificate.names),
'expired': certificate.expired,
})
except CertInvalidException as cie:
cert_views.append({
'path': extra_cert_path,
'error': cie.message,
})
except IOError as ioe:
cert_views.append({
'path': extra_cert_path,
'error': ioe.message,
})
return {
'status': 'directory',
'certs': cert_views,
}
@resource('/v1/superuser/keys')
class SuperUserServiceKeyManagement(ApiResource):
""" Resource for managing service keys."""
schemas = {
'CreateServiceKey': {
'id': 'CreateServiceKey',
'type': 'object',
'description': 'Description of creation of a service key',
'required': ['service', 'expiration'],
'properties': {
'service': {
'type': 'string',
'description': 'The service authenticating with this key',
},
'name': {
'type': 'string',
'description': 'The friendly name of a service key',
},
'metadata': {
'type': 'object',
'description': 'The key/value pairs of this key\'s metadata',
},
'notes': {
'type': 'string',
'description': 'If specified, the extra notes for the key',
},
'expiration': {
'description': 'The expiration date as a unix timestamp',
'anyOf': [{'type': 'number'}, {'type': 'null'}],
},
},
},
}
@nickname('listServiceKeys')
def get(self):
keys = pre_oci_model.list_all_service_keys()
return jsonify({
'keys': [key.to_dict() for key in keys],
})
@nickname('createServiceKey')
@validate_json_request('CreateServiceKey')
def post(self):
body = request.get_json()
# Ensure we have a valid expiration date if specified.
expiration_date = body.get('expiration', None)
if expiration_date is not None:
try:
expiration_date = datetime.utcfromtimestamp(float(expiration_date))
except ValueError as ve:
raise InvalidRequest('Invalid expiration date: %s' % ve)
if expiration_date <= datetime.now():
raise InvalidRequest('Expiration date cannot be in the past')
# Create the metadata for the key.
metadata = body.get('metadata', {})
metadata.update({
'created_by': 'Quay Superuser Panel',
'ip': request.remote_addr,
})
# Generate a key with a private key that we *never save*.
(private_key, key_id) = pre_oci_model.generate_service_key(body['service'], expiration_date,
metadata=metadata,
name=body.get('name', ''))
# Auto-approve the service key.
pre_oci_model.approve_service_key(key_id, ServiceKeyApprovalType.SUPERUSER,
notes=body.get('notes', ''))
# Log the creation and auto-approval of the service key.
key_log_metadata = {
'kid': key_id,
'preshared': True,
'service': body['service'],
'name': body.get('name', ''),
'expiration_date': expiration_date,
'auto_approved': True,
}
log_action('service_key_create', None, key_log_metadata)
log_action('service_key_approve', None, key_log_metadata)
return jsonify({
'kid': key_id,
'name': body.get('name', ''),
'service': body['service'],
'public_key': private_key.publickey().exportKey('PEM'),
'private_key': private_key.exportKey('PEM'),
})
@resource('/v1/superuser/approvedkeys/<kid>')
class SuperUserServiceKeyApproval(ApiResource):
""" Resource for approving service keys. """
schemas = {
'ApproveServiceKey': {
'id': 'ApproveServiceKey',
'type': 'object',
'description': 'Information for approving service keys',
'properties': {
'notes': {
'type': 'string',
'description': 'Optional approval notes',
},
},
},
}
@nickname('approveServiceKey')
@validate_json_request('ApproveServiceKey')
def post(self, kid):
notes = request.get_json().get('notes', '')
try:
key = pre_oci_model.approve_service_key(kid, ServiceKeyApprovalType.SUPERUSER, notes=notes)
# Log the approval of the service key.
key_log_metadata = {
'kid': kid,
'service': key.service,
'name': key.name,
'expiration_date': key.expiration_date,
}
# Note: this may not actually be the current person modifying the config, but if they're in the config tool,
# they have full access to the DB and could pretend to be any user, so pulling any superuser is likely fine
super_user = app.config.get('SUPER_USERS', [None])[0]
log_action('service_key_approve', super_user, key_log_metadata)
except ServiceKeyDoesNotExist:
raise NotFound()
except ServiceKeyAlreadyApproved:
pass
return make_response('', 201)

View file

@ -0,0 +1,173 @@
from abc import ABCMeta, abstractmethod
from collections import namedtuple
from six import add_metaclass
from config_app.config_endpoints.api import format_date
def user_view(user):
return {
'name': user.username,
'kind': 'user',
'is_robot': user.robot,
}
class RepositoryBuild(namedtuple('RepositoryBuild',
['uuid', 'logs_archived', 'repository_namespace_user_username',
'repository_name',
'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger',
'display_name',
'started', 'job_config', 'phase', 'status', 'error',
'archive_url'])):
"""
RepositoryBuild represents a build associated with a repostiory
:type uuid: string
:type logs_archived: boolean
:type repository_namespace_user_username: string
:type repository_name: string
:type can_write: boolean
:type can_write: boolean
:type pull_robot: User
:type resource_key: string
:type trigger: Trigger
:type display_name: string
:type started: boolean
:type job_config: {Any -> Any}
:type phase: string
:type status: string
:type error: string
:type archive_url: string
"""
def to_dict(self):
resp = {
'id': self.uuid,
'phase': self.phase,
'started': format_date(self.started),
'display_name': self.display_name,
'status': self.status or {},
'subdirectory': self.job_config.get('build_subdir', ''),
'dockerfile_path': self.job_config.get('build_subdir', ''),
'context': self.job_config.get('context', ''),
'tags': self.job_config.get('docker_tags', []),
'manual_user': self.job_config.get('manual_user', None),
'is_writer': self.can_write,
'trigger': self.trigger.to_dict(),
'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
'resource_key': self.resource_key,
'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
'repository': {
'namespace': self.repository_namespace_user_username,
'name': self.repository_name
},
'error': self.error,
}
if self.can_write:
if self.resource_key is not None:
resp['archive_url'] = self.archive_url
elif self.job_config.get('archive_url', None):
resp['archive_url'] = self.job_config['archive_url']
return resp
class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])):
"""
Approval represents whether a key has been approved or not
:type approver: User
:type approval_type: string
:type approved_date: Date
:type notes: string
"""
def to_dict(self):
return {
'approver': self.approver.to_dict() if self.approver else None,
'approval_type': self.approval_type,
'approved_date': self.approved_date,
'notes': self.notes,
}
class ServiceKey(
namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
'expiration_date', 'rotation_duration', 'approval'])):
"""
ServiceKey is an apostille signing key
:type name: string
:type kid: int
:type service: string
:type jwk: string
:type metadata: string
:type created_date: Date
:type expiration_date: Date
:type rotation_duration: Date
:type approval: Approval
"""
def to_dict(self):
return {
'name': self.name,
'kid': self.kid,
'service': self.service,
'jwk': self.jwk,
'metadata': self.metadata,
'created_date': self.created_date,
'expiration_date': self.expiration_date,
'rotation_duration': self.rotation_duration,
'approval': self.approval.to_dict() if self.approval is not None else None,
}
class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])):
"""
User represents a single user.
:type username: string
:type email: string
:type verified: boolean
:type enabled: boolean
:type robot: User
"""
def to_dict(self):
user_data = {
'kind': 'user',
'name': self.username,
'username': self.username,
'email': self.email,
'verified': self.verified,
'enabled': self.enabled,
}
return user_data
class Organization(namedtuple('Organization', ['username', 'email'])):
"""
Organization represents a single org.
:type username: string
:type email: string
"""
def to_dict(self):
return {
'name': self.username,
'email': self.email,
}
@add_metaclass(ABCMeta)
class SuperuserDataInterface(object):
"""
Interface that represents all data store interactions required by a superuser api.
"""
@abstractmethod
def list_all_service_keys(self):
"""
Returns a list of service keys
"""

View file

@ -0,0 +1,60 @@
from data import model
from config_app.config_endpoints.api.superuser_models_interface import (SuperuserDataInterface, User, ServiceKey,
Approval)
def _create_user(user):
if user is None:
return None
return User(user.username, user.email, user.verified, user.enabled, user.robot)
def _create_key(key):
approval = None
if key.approval is not None:
approval = Approval(_create_user(key.approval.approver), key.approval.approval_type,
key.approval.approved_date,
key.approval.notes)
return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date,
key.expiration_date,
key.rotation_duration, approval)
class ServiceKeyDoesNotExist(Exception):
pass
class ServiceKeyAlreadyApproved(Exception):
pass
class PreOCIModel(SuperuserDataInterface):
"""
PreOCIModel implements the data model for the SuperUser using a database schema
before it was changed to support the OCI specification.
"""
def list_all_service_keys(self):
keys = model.service_keys.list_all_keys()
return [_create_key(key) for key in keys]
def approve_service_key(self, kid, approval_type, notes=''):
try:
key = model.service_keys.approve_service_key(kid, approval_type, notes=notes)
return _create_key(key)
except model.ServiceKeyDoesNotExist:
raise ServiceKeyDoesNotExist
except model.ServiceKeyAlreadyApproved:
raise ServiceKeyAlreadyApproved
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None,
rotation_duration=None):
(private_key, key) = model.service_keys.generate_service_key(service, expiration_date,
metadata=metadata, name=name)
return private_key, key.kid
pre_oci_model = PreOCIModel()

View file

@ -0,0 +1,62 @@
import os
import tempfile
import tarfile
from contextlib import closing
from flask import request, make_response, send_file
from data.database import configure
from config_app.c_app import app, config_provider
from config_app.config_endpoints.api import resource, ApiResource, nickname
from config_app.config_util.tar import tarinfo_filter_partial, strip_absolute_path_and_add_trailing_dir
@resource('/v1/configapp/initialization')
class ConfigInitialization(ApiResource):
"""
Resource for dealing with any initialization logic for the config app
"""
@nickname('scStartNewConfig')
def post(self):
config_provider.new_config_dir()
return make_response('OK')
@resource('/v1/configapp/tarconfig')
class TarConfigLoader(ApiResource):
"""
Resource for dealing with configuration as a tarball,
including loading and generating functions
"""
@nickname('scGetConfigTarball')
def get(self):
config_path = config_provider.get_config_dir_path()
tar_dir_prefix = strip_absolute_path_and_add_trailing_dir(config_path)
temp = tempfile.NamedTemporaryFile()
with closing(tarfile.open(temp.name, mode="w|gz")) as tar:
for name in os.listdir(config_path):
tar.add(os.path.join(config_path, name), filter=tarinfo_filter_partial(tar_dir_prefix))
return send_file(temp.name, mimetype='application/gzip')
@nickname('scUploadTarballConfig')
def put(self):
""" Loads tarball config into the config provider """
# Generate a new empty dir to load the config into
config_provider.new_config_dir()
input_stream = request.stream
with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream:
tar_stream.extractall(config_provider.get_config_dir_path())
config_provider.create_copy_of_config_dir()
# now try to connect to the db provided in their config to validate it works
combined = dict(**app.config)
combined.update(config_provider.get_config())
configure(combined)
return make_response('OK')

View file

@ -0,0 +1,14 @@
from auth.auth_context import get_authenticated_user
from config_app.config_endpoints.api import resource, ApiResource, nickname
from config_app.config_endpoints.api.superuser_models_interface import user_view
@resource('/v1/user/')
class User(ApiResource):
""" Operations related to users. """
@nickname('getLoggedInUser')
def get(self):
""" Get user information for the authenticated user. """
user = get_authenticated_user()
return user_view(user)