Change spacing from 4 spaces to 2 spaces

This commit is contained in:
Sam Chow 2018-08-15 15:32:24 -04:00
parent ec14007268
commit efa66d84e4
28 changed files with 936 additions and 913 deletions

View file

@ -31,13 +31,13 @@ api = ApiExceptionHandlingApi()
api.init_app(api_bp)
def log_action(kind, user_or_orgname, metadata=None, repo=None, repo_name=None):
if not metadata:
metadata = {}
if not metadata:
metadata = {}
if repo:
repo_name = repo.name
if repo:
repo_name = repo.name
model.log.log_action(kind, user_or_orgname, repo_name, user_or_orgname, request.remote_addr, metadata)
model.log.log_action(kind, user_or_orgname, repo_name, user_or_orgname, request.remote_addr, metadata)
def format_date(date):
""" Output an RFC822 date format. """

View file

@ -7,247 +7,248 @@ from config_app.c_app import app
from config_app.config_endpoints.api import method_metadata
from config_app.config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER
logger = logging.getLogger(__name__)
def generate_route_data():
include_internal = True
compact = True
include_internal = True
compact = True
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
enum=None, schema=None):
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
parameter_info = {
'name': name,
'in': kind,
'required': required
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
enum=None, schema=None):
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
parameter_info = {
'name': name,
'in': kind,
'required': required
}
if schema:
parameter_info['schema'] = {
'$ref': '#/definitions/%s' % schema
}
else:
parameter_info['type'] = param_type
if enum is not None and len(list(enum)) > 0:
parameter_info['enum'] = list(enum)
return parameter_info
paths = {}
models = {}
tags = []
tags_added = set()
operation_ids = set()
for rule in app.url_map.iter_rules():
endpoint_method = app.view_functions[rule.endpoint]
# Verify that we have a view class for this API method.
if not 'view_class' in dir(endpoint_method):
continue
view_class = endpoint_method.view_class
# Hide the class if it is internal.
internal = method_metadata(view_class, 'internal')
if not include_internal and internal:
continue
# Build the tag.
parts = fully_qualified_name(view_class).split('.')
tag_name = parts[-2]
if not tag_name in tags_added:
tags_added.add(tag_name)
tags.append({
'name': tag_name,
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
})
# Build the Swagger data for the path.
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
full_name = fully_qualified_name(view_class)
path_swagger = {
'x-name': full_name,
'x-path': swagger_path,
'x-tag': tag_name
}
related_user_res = method_metadata(view_class, 'related_user_resource')
if related_user_res is not None:
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
paths[swagger_path] = path_swagger
# Add any global path parameters.
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(
view_class) else {}
if param_data_map:
path_parameters_swagger = []
for path_parameter in param_data_map:
description = param_data_map[path_parameter].get('description')
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
path_swagger['parameters'] = path_parameters_swagger
# Add the individual HTTP operations.
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
for method_name in method_names:
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
method = getattr(view_class, method_name.lower(), None)
if method is None:
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
continue
operationId = method_metadata(method, 'nickname')
operation_swagger = {
'operationId': operationId,
'parameters': [],
}
if operationId is None:
continue
if operationId in operation_ids:
raise Exception('Duplicate operation Id: %s' % operationId)
operation_ids.add(operationId)
# Mark the method as internal.
internal = method_metadata(method, 'internal')
if internal is not None:
operation_swagger['x-internal'] = True
if include_internal:
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
if requires_fresh_login is not None:
operation_swagger['x-requires-fresh-login'] = True
# Add the path parameters.
if rule.arguments:
for path_parameter in rule.arguments:
description = param_data_map.get(path_parameter, {}).get('description')
operation_swagger['parameters'].append(
swagger_parameter(path_parameter, description))
# Add the query parameters.
if '__api_query_params' in dir(method):
for query_parameter_info in method.__api_query_params:
name = query_parameter_info['name']
description = query_parameter_info['help']
param_type = TYPE_CONVERTER[query_parameter_info['type']]
required = query_parameter_info['required']
operation_swagger['parameters'].append(
swagger_parameter(name, description, kind='query',
param_type=param_type,
required=required,
enum=query_parameter_info['choices']))
# Add the OAuth security block.
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
scope = method_metadata(method, 'oauth2_scope')
if scope and not compact:
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
# Add the responses block.
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
response_schema_name = method_metadata(method, 'response_schema')
if not compact:
if response_schema_name:
models[response_schema_name] = view_class.schemas[response_schema_name]
models['ApiError'] = {
'type': 'object',
'properties': {
'status': {
'type': 'integer',
'description': 'Status code of the response.'
},
'type': {
'type': 'string',
'description': 'Reference to the type of the error.'
},
'detail': {
'type': 'string',
'description': 'Details about the specific instance of the error.'
},
'title': {
'type': 'string',
'description': 'Unique error code to identify the type of error.'
},
'error_message': {
'type': 'string',
'description': 'Deprecated; alias for detail'
},
'error_type': {
'type': 'string',
'description': 'Deprecated; alias for detail'
}
},
'required': [
'status',
'type',
'title',
]
}
if schema:
parameter_info['schema'] = {
'$ref': '#/definitions/%s' % schema
}
responses = {
'400': {
'description': 'Bad Request',
},
'401': {
'description': 'Session required',
},
'403': {
'description': 'Unauthorized access',
},
'404': {
'description': 'Not found',
},
}
for _, body in responses.items():
body['schema'] = {'$ref': '#/definitions/ApiError'}
if method_name == 'DELETE':
responses['204'] = {
'description': 'Deleted'
}
elif method_name == 'POST':
responses['201'] = {
'description': 'Successful creation'
}
else:
parameter_info['type'] = param_type
responses['200'] = {
'description': 'Successful invocation'
}
if enum is not None and len(list(enum)) > 0:
parameter_info['enum'] = list(enum)
return parameter_info
paths = {}
models = {}
tags = []
tags_added = set()
operation_ids = set()
for rule in app.url_map.iter_rules():
endpoint_method = app.view_functions[rule.endpoint]
# Verify that we have a view class for this API method.
if not 'view_class' in dir(endpoint_method):
continue
view_class = endpoint_method.view_class
# Hide the class if it is internal.
internal = method_metadata(view_class, 'internal')
if not include_internal and internal:
continue
# Build the tag.
parts = fully_qualified_name(view_class).split('.')
tag_name = parts[-2]
if not tag_name in tags_added:
tags_added.add(tag_name)
tags.append({
'name': tag_name,
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
})
# Build the Swagger data for the path.
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
full_name = fully_qualified_name(view_class)
path_swagger = {
'x-name': full_name,
'x-path': swagger_path,
'x-tag': tag_name
}
related_user_res = method_metadata(view_class, 'related_user_resource')
if related_user_res is not None:
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
paths[swagger_path] = path_swagger
# Add any global path parameters.
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(view_class) else {}
if param_data_map:
path_parameters_swagger = []
for path_parameter in param_data_map:
description = param_data_map[path_parameter].get('description')
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
path_swagger['parameters'] = path_parameters_swagger
# Add the individual HTTP operations.
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
for method_name in method_names:
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
method = getattr(view_class, method_name.lower(), None)
if method is None:
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
continue
operationId = method_metadata(method, 'nickname')
operation_swagger = {
'operationId': operationId,
'parameters': [],
if response_schema_name:
responses['200']['schema'] = {
'$ref': '#/definitions/%s' % response_schema_name
}
if operationId is None:
continue
operation_swagger['responses'] = responses
if operationId in operation_ids:
raise Exception('Duplicate operation Id: %s' % operationId)
# Add the request block.
request_schema_name = method_metadata(method, 'request_schema')
if request_schema_name and not compact:
models[request_schema_name] = view_class.schemas[request_schema_name]
operation_ids.add(operationId)
operation_swagger['parameters'].append(
swagger_parameter('body', 'Request body contents.', kind='body',
schema=request_schema_name))
# Mark the method as internal.
internal = method_metadata(method, 'internal')
if internal is not None:
operation_swagger['x-internal'] = True
# Add the operation to the parent path.
if not internal or (internal and include_internal):
path_swagger[method_name.lower()] = operation_swagger
if include_internal:
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
if requires_fresh_login is not None:
operation_swagger['x-requires-fresh-login'] = True
tags.sort(key=lambda t: t['name'])
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
# Add the path parameters.
if rule.arguments:
for path_parameter in rule.arguments:
description = param_data_map.get(path_parameter, {}).get('description')
operation_swagger['parameters'].append(swagger_parameter(path_parameter, description))
# Add the query parameters.
if '__api_query_params' in dir(method):
for query_parameter_info in method.__api_query_params:
name = query_parameter_info['name']
description = query_parameter_info['help']
param_type = TYPE_CONVERTER[query_parameter_info['type']]
required = query_parameter_info['required']
operation_swagger['parameters'].append(
swagger_parameter(name, description, kind='query',
param_type=param_type,
required=required,
enum=query_parameter_info['choices']))
# Add the OAuth security block.
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
scope = method_metadata(method, 'oauth2_scope')
if scope and not compact:
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
# Add the responses block.
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
response_schema_name = method_metadata(method, 'response_schema')
if not compact:
if response_schema_name:
models[response_schema_name] = view_class.schemas[response_schema_name]
models['ApiError'] = {
'type': 'object',
'properties': {
'status': {
'type': 'integer',
'description': 'Status code of the response.'
},
'type': {
'type': 'string',
'description': 'Reference to the type of the error.'
},
'detail': {
'type': 'string',
'description': 'Details about the specific instance of the error.'
},
'title': {
'type': 'string',
'description': 'Unique error code to identify the type of error.'
},
'error_message': {
'type': 'string',
'description': 'Deprecated; alias for detail'
},
'error_type': {
'type': 'string',
'description': 'Deprecated; alias for detail'
}
},
'required': [
'status',
'type',
'title',
]
}
responses = {
'400': {
'description': 'Bad Request',
},
'401': {
'description': 'Session required',
},
'403': {
'description': 'Unauthorized access',
},
'404': {
'description': 'Not found',
},
}
for _, body in responses.items():
body['schema'] = {'$ref': '#/definitions/ApiError'}
if method_name == 'DELETE':
responses['204'] = {
'description': 'Deleted'
}
elif method_name == 'POST':
responses['201'] = {
'description': 'Successful creation'
}
else:
responses['200'] = {
'description': 'Successful invocation'
}
if response_schema_name:
responses['200']['schema'] = {
'$ref': '#/definitions/%s' % response_schema_name
}
operation_swagger['responses'] = responses
# Add the request block.
request_schema_name = method_metadata(method, 'request_schema')
if request_schema_name and not compact:
models[request_schema_name] = view_class.schemas[request_schema_name]
operation_swagger['parameters'].append(
swagger_parameter('body', 'Request body contents.', kind='body',
schema=request_schema_name))
# Add the operation to the parent path.
if not internal or (internal and include_internal):
path_swagger[method_name.lower()] = operation_swagger
tags.sort(key=lambda t: t['name'])
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
if compact:
return {'paths': paths}
if compact:
return {'paths': paths}

View file

@ -3,7 +3,8 @@ import logging
from flask import abort, request
from config_app.config_endpoints.api.suconfig_models_pre_oci import pre_oci_model as model
from config_app.config_endpoints.api import resource, ApiResource, nickname, validate_json_request, kubernetes_only
from config_app.config_endpoints.api import resource, ApiResource, nickname, validate_json_request, \
kubernetes_only
from config_app.c_app import (app, config_provider, superusers, ip_resolver,
instance_keys, INIT_SCRIPTS_LOCATION)
from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
@ -11,7 +12,8 @@ from config_app.config_util.k8saccessor import KubernetesAccessorSingleton
from data.database import configure
from data.runmigration import run_alembic_migration
from util.config.configutil import add_enterprise_config_defaults
from util.config.validator import validate_service_for_config, ValidatorContext, is_valid_config_upload_filename
from util.config.validator import validate_service_for_config, ValidatorContext, \
is_valid_config_upload_filename
logger = logging.getLogger(__name__)
@ -85,6 +87,7 @@ class SuperUserRegistryStatus(ApiResource):
""" Resource for determining the status of the registry, such as if config exists,
if a database is configured, and if it has any defined users.
"""
@nickname('scRegistryStatus')
def get(self):
""" Returns the status of the registry. """
@ -121,6 +124,7 @@ class _AlembicLogHandler(logging.Handler):
@resource('/v1/superuser/setupdb')
class SuperUserSetupDatabase(ApiResource):
""" Resource for invoking alembic to setup the database. """
@nickname('scSetupDatabase')
def get(self):
""" Invokes the alembic upgrade process. """
@ -251,7 +255,8 @@ class SuperUserConfigValidate(ApiResource):
# so we also allow it to be called if there is no valid registry configuration setup. Note that
# this is also safe since this method does not access any information not given in the request.
config = request.get_json()['config']
validator_context = ValidatorContext.from_app(app, config, request.get_json().get('password', ''),
validator_context = ValidatorContext.from_app(app, config,
request.get_json().get('password', ''),
instance_keys=instance_keys,
ip_resolver=ip_resolver,
config_provider=config_provider,
@ -294,6 +299,7 @@ class SuperUserKubernetesDeployment(ApiResource):
@resource('/v1/superuser/config/kubernetes')
class SuperUserKubernetesConfiguration(ApiResource):
""" Resource for saving the config files to kubernetes secrets. """
@kubernetes_only
@nickname('scDeployConfiguration')
def post(self):
@ -303,6 +309,7 @@ class SuperUserKubernetesConfiguration(ApiResource):
@resource('/v1/superuser/config/file/<filename>')
class SuperUserConfigFile(ApiResource):
""" Resource for fetching the status of config files and overriding them. """
@nickname('scConfigFileExists')
def get(self, filename):
""" Returns whether the configuration file with the given name exists. """
@ -313,7 +320,6 @@ class SuperUserConfigFile(ApiResource):
'exists': config_provider.volume_file_exists(filename)
}
@nickname('scUpdateConfigFile')
def post(self, filename):
""" Updates the configuration file with the given name. """

View file

@ -4,36 +4,36 @@ from six import add_metaclass
@add_metaclass(ABCMeta)
class SuperuserConfigDataInterface(object):
"""
Interface that represents all data store interactions required by the superuser config API.
"""
@abstractmethod
def is_valid(self):
"""
Interface that represents all data store interactions required by the superuser config API.
Returns true if the configured database is valid.
"""
@abstractmethod
def is_valid(self):
"""
Returns true if the configured database is valid.
"""
@abstractmethod
def has_users(self):
"""
Returns true if there are any users defined.
"""
@abstractmethod
def has_users(self):
"""
Returns true if there are any users defined.
"""
@abstractmethod
def create_superuser(self, username, password, email):
"""
Creates a new superuser with the given username, password and email. Returns the user's UUID.
"""
@abstractmethod
def create_superuser(self, username, password, email):
"""
Creates a new superuser with the given username, password and email. Returns the user's UUID.
"""
@abstractmethod
def has_federated_login(self, username, service_name):
"""
Returns true if the matching user has a federated login under the matching service.
"""
@abstractmethod
def has_federated_login(self, username, service_name):
"""
Returns true if the matching user has a federated login under the matching service.
"""
@abstractmethod
def attach_federated_login(self, username, service_name, federated_username):
"""
Attaches a federatated login to the matching user, under the given service.
"""
@abstractmethod
def attach_federated_login(self, username, service_name, federated_username):
"""
Attaches a federatated login to the matching user, under the given service.
"""

View file

@ -4,34 +4,34 @@ from config_app.config_endpoints.api.suconfig_models_interface import SuperuserC
class PreOCIModel(SuperuserConfigDataInterface):
# Note: this method is different than has_users: the user select will throw if the user
# table does not exist, whereas has_users assumes the table is valid
def is_valid(self):
try:
list(User.select().limit(1))
return True
except:
return False
# Note: this method is different than has_users: the user select will throw if the user
# table does not exist, whereas has_users assumes the table is valid
def is_valid(self):
try:
list(User.select().limit(1))
return True
except:
return False
def has_users(self):
return bool(list(User.select().limit(1)))
def has_users(self):
return bool(list(User.select().limit(1)))
def create_superuser(self, username, password, email):
return model.user.create_user(username, password, email, auto_verify=True).uuid
def create_superuser(self, username, password, email):
return model.user.create_user(username, password, email, auto_verify=True).uuid
def has_federated_login(self, username, service_name):
user = model.user.get_user(username)
if user is None:
return False
def has_federated_login(self, username, service_name):
user = model.user.get_user(username)
if user is None:
return False
return bool(model.user.lookup_federated_login(user, service_name))
return bool(model.user.lookup_federated_login(user, service_name))
def attach_federated_login(self, username, service_name, federated_username):
user = model.user.get_user(username)
if user is None:
return False
def attach_federated_login(self, username, service_name, federated_username):
user = model.user.get_user(username)
if user is None:
return False
model.user.attach_federated_login(user, service_name, federated_username)
model.user.attach_federated_login(user, service_name, federated_username)
pre_oci_model = PreOCIModel()

View file

@ -6,165 +6,168 @@ from config_app.config_endpoints.api import format_date
def user_view(user):
return {
'name': user.username,
'kind': 'user',
'is_robot': user.robot,
}
return {
'name': user.username,
'kind': 'user',
'is_robot': user.robot,
}
class RepositoryBuild(namedtuple('RepositoryBuild',
['uuid', 'logs_archived', 'repository_namespace_user_username', 'repository_name',
'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger', 'display_name',
'started', 'job_config', 'phase', 'status', 'error', 'archive_url'])):
"""
RepositoryBuild represents a build associated with a repostiory
:type uuid: string
:type logs_archived: boolean
:type repository_namespace_user_username: string
:type repository_name: string
:type can_write: boolean
:type can_write: boolean
:type pull_robot: User
:type resource_key: string
:type trigger: Trigger
:type display_name: string
:type started: boolean
:type job_config: {Any -> Any}
:type phase: string
:type status: string
:type error: string
:type archive_url: string
"""
['uuid', 'logs_archived', 'repository_namespace_user_username',
'repository_name',
'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger',
'display_name',
'started', 'job_config', 'phase', 'status', 'error',
'archive_url'])):
"""
RepositoryBuild represents a build associated with a repostiory
:type uuid: string
:type logs_archived: boolean
:type repository_namespace_user_username: string
:type repository_name: string
:type can_write: boolean
:type can_write: boolean
:type pull_robot: User
:type resource_key: string
:type trigger: Trigger
:type display_name: string
:type started: boolean
:type job_config: {Any -> Any}
:type phase: string
:type status: string
:type error: string
:type archive_url: string
"""
def to_dict(self):
def to_dict(self):
resp = {
'id': self.uuid,
'phase': self.phase,
'started': format_date(self.started),
'display_name': self.display_name,
'status': self.status or {},
'subdirectory': self.job_config.get('build_subdir', ''),
'dockerfile_path': self.job_config.get('build_subdir', ''),
'context': self.job_config.get('context', ''),
'tags': self.job_config.get('docker_tags', []),
'manual_user': self.job_config.get('manual_user', None),
'is_writer': self.can_write,
'trigger': self.trigger.to_dict(),
'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
'resource_key': self.resource_key,
'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
'repository': {
'namespace': self.repository_namespace_user_username,
'name': self.repository_name
},
'error': self.error,
}
resp = {
'id': self.uuid,
'phase': self.phase,
'started': format_date(self.started),
'display_name': self.display_name,
'status': self.status or {},
'subdirectory': self.job_config.get('build_subdir', ''),
'dockerfile_path': self.job_config.get('build_subdir', ''),
'context': self.job_config.get('context', ''),
'tags': self.job_config.get('docker_tags', []),
'manual_user': self.job_config.get('manual_user', None),
'is_writer': self.can_write,
'trigger': self.trigger.to_dict(),
'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None,
'resource_key': self.resource_key,
'pull_robot': user_view(self.pull_robot) if self.pull_robot else None,
'repository': {
'namespace': self.repository_namespace_user_username,
'name': self.repository_name
},
'error': self.error,
}
if self.can_write:
if self.resource_key is not None:
resp['archive_url'] = self.archive_url
elif self.job_config.get('archive_url', None):
resp['archive_url'] = self.job_config['archive_url']
if self.can_write:
if self.resource_key is not None:
resp['archive_url'] = self.archive_url
elif self.job_config.get('archive_url', None):
resp['archive_url'] = self.job_config['archive_url']
return resp
return resp
class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])):
"""
Approval represents whether a key has been approved or not
:type approver: User
:type approval_type: string
:type approved_date: Date
:type notes: string
"""
"""
Approval represents whether a key has been approved or not
:type approver: User
:type approval_type: string
:type approved_date: Date
:type notes: string
"""
def to_dict(self):
return {
'approver': self.approver.to_dict() if self.approver else None,
'approval_type': self.approval_type,
'approved_date': self.approved_date,
'notes': self.notes,
}
def to_dict(self):
return {
'approver': self.approver.to_dict() if self.approver else None,
'approval_type': self.approval_type,
'approved_date': self.approved_date,
'notes': self.notes,
}
class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
'expiration_date', 'rotation_duration', 'approval'])):
"""
ServiceKey is an apostille signing key
:type name: string
:type kid: int
:type service: string
:type jwk: string
:type metadata: string
:type created_date: Date
:type expiration_date: Date
:type rotation_duration: Date
:type approval: Approval
class ServiceKey(
namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date',
'expiration_date', 'rotation_duration', 'approval'])):
"""
ServiceKey is an apostille signing key
:type name: string
:type kid: int
:type service: string
:type jwk: string
:type metadata: string
:type created_date: Date
:type expiration_date: Date
:type rotation_duration: Date
:type approval: Approval
"""
"""
def to_dict(self):
return {
'name': self.name,
'kid': self.kid,
'service': self.service,
'jwk': self.jwk,
'metadata': self.metadata,
'created_date': self.created_date,
'expiration_date': self.expiration_date,
'rotation_duration': self.rotation_duration,
'approval': self.approval.to_dict() if self.approval is not None else None,
}
def to_dict(self):
return {
'name': self.name,
'kid': self.kid,
'service': self.service,
'jwk': self.jwk,
'metadata': self.metadata,
'created_date': self.created_date,
'expiration_date': self.expiration_date,
'rotation_duration': self.rotation_duration,
'approval': self.approval.to_dict() if self.approval is not None else None,
}
class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])):
"""
User represents a single user.
:type username: string
:type email: string
:type verified: boolean
:type enabled: boolean
:type robot: User
"""
"""
User represents a single user.
:type username: string
:type email: string
:type verified: boolean
:type enabled: boolean
:type robot: User
"""
def to_dict(self):
user_data = {
'kind': 'user',
'name': self.username,
'username': self.username,
'email': self.email,
'verified': self.verified,
'enabled': self.enabled,
}
def to_dict(self):
user_data = {
'kind': 'user',
'name': self.username,
'username': self.username,
'email': self.email,
'verified': self.verified,
'enabled': self.enabled,
}
return user_data
return user_data
class Organization(namedtuple('Organization', ['username', 'email'])):
"""
Organization represents a single org.
:type username: string
:type email: string
"""
def to_dict(self):
return {
'name': self.username,
'email': self.email,
}
"""
Organization represents a single org.
:type username: string
:type email: string
"""
def to_dict(self):
return {
'name': self.username,
'email': self.email,
}
@add_metaclass(ABCMeta)
class SuperuserDataInterface(object):
"""
Interface that represents all data store interactions required by a superuser api.
"""
@abstractmethod
def list_all_service_keys(self):
"""
Interface that represents all data store interactions required by a superuser api.
Returns a list of service keys
"""
@abstractmethod
def list_all_service_keys(self):
"""
Returns a list of service keys
"""

View file

@ -1,6 +1,8 @@
from data import model
from config_app.config_endpoints.api.superuser_models_interface import SuperuserDataInterface, User, ServiceKey, Approval
from config_app.config_endpoints.api.superuser_models_interface import (SuperuserDataInterface, User, ServiceKey,
Approval)
def _create_user(user):
if user is None:
@ -11,12 +13,15 @@ def _create_user(user):
def _create_key(key):
approval = None
if key.approval is not None:
approval = Approval(_create_user(key.approval.approver), key.approval.approval_type, key.approval.approved_date,
approval = Approval(_create_user(key.approval.approver), key.approval.approval_type,
key.approval.approved_date,
key.approval.notes)
return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date, key.expiration_date,
return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date,
key.expiration_date,
key.rotation_duration, approval)
class ServiceKeyDoesNotExist(Exception):
pass
@ -30,6 +35,7 @@ class PreOCIModel(SuperuserDataInterface):
PreOCIModel implements the data model for the SuperUser using a database schema
before it was changed to support the OCI specification.
"""
def list_all_service_keys(self):
keys = model.service_keys.list_all_keys()
return [_create_key(key) for key in keys]
@ -43,8 +49,10 @@ class PreOCIModel(SuperuserDataInterface):
except model.ServiceKeyAlreadyApproved:
raise ServiceKeyAlreadyApproved
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None):
(private_key, key) = model.service_keys.generate_service_key(service, expiration_date, metadata=metadata, name=name)
def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None,
rotation_duration=None):
(private_key, key) = model.service_keys.generate_service_key(service, expiration_date,
metadata=metadata, name=name)
return private_key, key.kid

View file

@ -10,50 +10,51 @@ from config_app.c_app import app, config_provider
from config_app.config_endpoints.api import resource, ApiResource, nickname
from config_app.config_util.tar import tarinfo_filter_partial, strip_absolute_path_and_add_trailing_dir
@resource('/v1/configapp/initialization')
class ConfigInitialization(ApiResource):
"""
Resource for dealing with any initialization logic for the config app
"""
"""
Resource for dealing with any initialization logic for the config app
"""
@nickname('scStartNewConfig')
def post(self):
config_provider.new_config_dir()
return make_response('OK')
@nickname('scStartNewConfig')
def post(self):
config_provider.new_config_dir()
return make_response('OK')
@resource('/v1/configapp/tarconfig')
class TarConfigLoader(ApiResource):
"""
Resource for dealing with configuration as a tarball,
including loading and generating functions
"""
"""
Resource for dealing with configuration as a tarball,
including loading and generating functions
"""
@nickname('scGetConfigTarball')
def get(self):
config_path = config_provider.get_config_dir_path()
tar_dir_prefix = strip_absolute_path_and_add_trailing_dir(config_path)
temp = tempfile.NamedTemporaryFile()
@nickname('scGetConfigTarball')
def get(self):
config_path = config_provider.get_config_dir_path()
tar_dir_prefix = strip_absolute_path_and_add_trailing_dir(config_path)
temp = tempfile.NamedTemporaryFile()
tar = tarfile.open(temp.name, mode="w|gz")
for name in os.listdir(config_path):
tar.add(os.path.join(config_path, name), filter=tarinfo_filter_partial(tar_dir_prefix))
tar = tarfile.open(temp.name, mode="w|gz")
for name in os.listdir(config_path):
tar.add(os.path.join(config_path, name), filter=tarinfo_filter_partial(tar_dir_prefix))
tar.close()
return send_file(temp.name, mimetype='application/gzip')
tar.close()
return send_file(temp.name, mimetype='application/gzip')
@nickname('scUploadTarballConfig')
def put(self):
""" Loads tarball config into the config provider """
# Generate a new empty dir to load the config into
config_provider.new_config_dir()
input_stream = request.stream
with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream:
tar_stream.extractall(config_provider.get_config_dir_path())
@nickname('scUploadTarballConfig')
def put(self):
""" Loads tarball config into the config provider """
# Generate a new empty dir to load the config into
config_provider.new_config_dir()
input_stream = request.stream
with tarfile.open(mode="r|gz", fileobj=input_stream) as tar_stream:
tar_stream.extractall(config_provider.get_config_dir_path())
# now try to connect to the db provided in their config to validate it works
combined = dict(**app.config)
combined.update(config_provider.get_config())
configure(combined)
# now try to connect to the db provided in their config to validate it works
combined = dict(**app.config)
combined.update(config_provider.get_config())
configure(combined)
return make_response('OK')
return make_response('OK')

View file

@ -5,15 +5,14 @@ from config_app.config_endpoints.api.superuser_models_interface import user_view
@resource('/v1/user/')
class User(ApiResource):
""" Operations related to users. """
""" Operations related to users. """
@nickname('getLoggedInUser')
def get(self):
""" Get user information for the authenticated user. """
user = get_authenticated_user()
# TODO(config): figure out if we need user validation
# if user is None or user.organization or not UserReadPermission(user.username).can():
# raise InvalidToken("Requires authentication", payload={'session_required': False})
return user_view(user)
@nickname('getLoggedInUser')
def get(self):
""" Get user information for the authenticated user. """
user = get_authenticated_user()
# TODO(config): figure out if we need user validation
# if user is None or user.organization or not UserReadPermission(user.username).can():
# raise InvalidToken("Requires authentication", payload={'session_required': False})
return user_view(user)