Add some basic endpoints to the config app backend
rename files to avoid overlap with quay app
This commit is contained in:
parent
d080ca2cc6
commit
c378e408ef
39 changed files with 3095 additions and 384 deletions
|
@ -1,14 +1,12 @@
|
|||
import logging
|
||||
|
||||
from config_app import app
|
||||
from config_app.util.config import config_provider
|
||||
|
||||
from flask import Blueprint, request, session
|
||||
from flask_restful import Resource, abort, Api, reqparse
|
||||
from flask import Blueprint
|
||||
from flask_restful import Resource, Api
|
||||
from flask_restful.utils.cors import crossdomain
|
||||
|
||||
from config_app import app
|
||||
from functools import partial, wraps
|
||||
|
||||
from jsonschema import validate, ValidationError
|
||||
from config_endpoints.exception import InvalidResponse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
api_bp = Blueprint('api', __name__)
|
||||
|
@ -17,6 +15,8 @@ CROSS_DOMAIN_HEADERS = ['Authorization', 'Content-Type', 'X-Requested-With']
|
|||
|
||||
|
||||
class ApiExceptionHandlingApi(Api):
|
||||
pass
|
||||
|
||||
@crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS)
|
||||
def handle_error(self, error):
|
||||
print('HANDLING ERROR IN API')
|
||||
|
@ -25,19 +25,12 @@ class ApiExceptionHandlingApi(Api):
|
|||
|
||||
api = ApiExceptionHandlingApi()
|
||||
|
||||
|
||||
class HelloWorld(Resource):
|
||||
def get(self):
|
||||
print("hit the dummy endpoint")
|
||||
return {'hello': 'world'}
|
||||
|
||||
|
||||
api.add_resource(HelloWorld, '/')
|
||||
|
||||
api.init_app(api_bp)
|
||||
|
||||
|
||||
def verify_not_prod(func):
|
||||
@add_method_metadata('enterprise_only', True)
|
||||
@wraps(func)
|
||||
def wrapped(*args, **kwargs):
|
||||
# Verify that we are not running on a production (i.e. hosted) stack. If so, we fail.
|
||||
# This should never happen (because of the feature-flag on SUPER_USERS), but we want to be
|
||||
|
@ -58,6 +51,7 @@ def resource(*urls, **kwargs):
|
|||
if not api_resource:
|
||||
return None
|
||||
|
||||
print('registering resource: ', urls)
|
||||
api_resource.registered = True
|
||||
api.add_resource(api_resource, *urls, **kwargs)
|
||||
return api_resource
|
||||
|
@ -105,47 +99,31 @@ def no_cache(f):
|
|||
return add_no_cache
|
||||
|
||||
|
||||
def define_json_response(schema_name):
|
||||
def wrapper(func):
|
||||
@add_method_metadata('response_schema', schema_name)
|
||||
@wraps(func)
|
||||
def wrapped(self, *args, **kwargs):
|
||||
schema = self.schemas[schema_name]
|
||||
resp = func(self, *args, **kwargs)
|
||||
|
||||
if app.config['TESTING']:
|
||||
try:
|
||||
validate(resp, schema)
|
||||
except ValidationError as ex:
|
||||
raise InvalidResponse(ex.message)
|
||||
|
||||
return resp
|
||||
return wrapped
|
||||
return wrapper
|
||||
|
||||
|
||||
nickname = partial(add_method_metadata, 'nickname')
|
||||
|
||||
api.init_app(api_bp)
|
||||
# api.decorators = [csrf_protect(),
|
||||
# crossdomain(origin='*', headers=CROSS_DOMAIN_HEADERS),
|
||||
# process_oauth, time_decorator(api_bp.name, metric_queue),
|
||||
# require_xhr_from_browser]
|
||||
|
||||
import config_endpoints.api
|
||||
import config_endpoints.api.discovery
|
||||
import config_endpoints.api.suconfig
|
||||
import config_endpoints.api.superuser
|
||||
import config_endpoints.api.user
|
||||
|
||||
|
||||
|
||||
@resource('/v1/superuser/config')
|
||||
class SuperUserConfig(ApiResource):
|
||||
""" Resource for fetching and updating the current configuration, if any. """
|
||||
schemas = {
|
||||
'UpdateConfig': {
|
||||
'type': 'object',
|
||||
'description': 'Updates the YAML config file',
|
||||
'required': [
|
||||
'config',
|
||||
'hostname'
|
||||
],
|
||||
'properties': {
|
||||
'config': {
|
||||
'type': 'object'
|
||||
},
|
||||
'hostname': {
|
||||
'type': 'string'
|
||||
},
|
||||
'password': {
|
||||
'type': 'string'
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@verify_not_prod
|
||||
@nickname('scGetConfig')
|
||||
def get(self):
|
||||
""" Returns the currently defined configuration, if any. """
|
||||
config_object = config_provider.get_config()
|
||||
return {
|
||||
'config': config_object
|
||||
}
|
||||
|
|
252
config_app/config_endpoints/api/discovery.py
Normal file
252
config_app/config_endpoints/api/discovery.py
Normal file
|
@ -0,0 +1,252 @@
|
|||
import logging
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
|
||||
from config_app import app
|
||||
from config_endpoints.api import method_metadata
|
||||
from config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_route_data():
|
||||
include_internal = True
|
||||
compact = True
|
||||
|
||||
def swagger_parameter(name, description, kind='path', param_type='string', required=True,
|
||||
enum=None, schema=None):
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#parameterObject
|
||||
parameter_info = {
|
||||
'name': name,
|
||||
'in': kind,
|
||||
'required': required
|
||||
}
|
||||
|
||||
if schema:
|
||||
parameter_info['schema'] = {
|
||||
'$ref': '#/definitions/%s' % schema
|
||||
}
|
||||
else:
|
||||
parameter_info['type'] = param_type
|
||||
|
||||
if enum is not None and len(list(enum)) > 0:
|
||||
parameter_info['enum'] = list(enum)
|
||||
|
||||
return parameter_info
|
||||
|
||||
paths = {}
|
||||
models = {}
|
||||
tags = []
|
||||
tags_added = set()
|
||||
operation_ids = set()
|
||||
|
||||
for rule in app.url_map.iter_rules():
|
||||
endpoint_method = app.view_functions[rule.endpoint]
|
||||
|
||||
# Verify that we have a view class for this API method.
|
||||
if not 'view_class' in dir(endpoint_method):
|
||||
continue
|
||||
|
||||
view_class = endpoint_method.view_class
|
||||
|
||||
# Hide the class if it is internal.
|
||||
internal = method_metadata(view_class, 'internal')
|
||||
if not include_internal and internal:
|
||||
continue
|
||||
|
||||
# Build the tag.
|
||||
parts = fully_qualified_name(view_class).split('.')
|
||||
tag_name = parts[-2]
|
||||
if not tag_name in tags_added:
|
||||
tags_added.add(tag_name)
|
||||
tags.append({
|
||||
'name': tag_name,
|
||||
'description': (sys.modules[view_class.__module__].__doc__ or '').strip()
|
||||
})
|
||||
|
||||
# Build the Swagger data for the path.
|
||||
swagger_path = PARAM_REGEX.sub(r'{\2}', rule.rule)
|
||||
full_name = fully_qualified_name(view_class)
|
||||
path_swagger = {
|
||||
'x-name': full_name,
|
||||
'x-path': swagger_path,
|
||||
'x-tag': tag_name
|
||||
}
|
||||
|
||||
related_user_res = method_metadata(view_class, 'related_user_resource')
|
||||
if related_user_res is not None:
|
||||
path_swagger['x-user-related'] = fully_qualified_name(related_user_res)
|
||||
|
||||
paths[swagger_path] = path_swagger
|
||||
|
||||
# Add any global path parameters.
|
||||
param_data_map = view_class.__api_path_params if '__api_path_params' in dir(view_class) else {}
|
||||
if param_data_map:
|
||||
path_parameters_swagger = []
|
||||
for path_parameter in param_data_map:
|
||||
description = param_data_map[path_parameter].get('description')
|
||||
path_parameters_swagger.append(swagger_parameter(path_parameter, description))
|
||||
|
||||
path_swagger['parameters'] = path_parameters_swagger
|
||||
|
||||
# Add the individual HTTP operations.
|
||||
method_names = list(rule.methods.difference(['HEAD', 'OPTIONS']))
|
||||
for method_name in method_names:
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#operation-object
|
||||
method = getattr(view_class, method_name.lower(), None)
|
||||
if method is None:
|
||||
logger.debug('Unable to find method for %s in class %s', method_name, view_class)
|
||||
continue
|
||||
|
||||
operationId = method_metadata(method, 'nickname')
|
||||
operation_swagger = {
|
||||
'operationId': operationId,
|
||||
'parameters': [],
|
||||
}
|
||||
|
||||
if operationId is None:
|
||||
continue
|
||||
|
||||
if operationId in operation_ids:
|
||||
raise Exception('Duplicate operation Id: %s' % operationId)
|
||||
|
||||
operation_ids.add(operationId)
|
||||
|
||||
# Mark the method as internal.
|
||||
internal = method_metadata(method, 'internal')
|
||||
if internal is not None:
|
||||
operation_swagger['x-internal'] = True
|
||||
|
||||
if include_internal:
|
||||
requires_fresh_login = method_metadata(method, 'requires_fresh_login')
|
||||
if requires_fresh_login is not None:
|
||||
operation_swagger['x-requires-fresh-login'] = True
|
||||
|
||||
# Add the path parameters.
|
||||
if rule.arguments:
|
||||
for path_parameter in rule.arguments:
|
||||
description = param_data_map.get(path_parameter, {}).get('description')
|
||||
operation_swagger['parameters'].append(swagger_parameter(path_parameter, description))
|
||||
|
||||
# Add the query parameters.
|
||||
if '__api_query_params' in dir(method):
|
||||
for query_parameter_info in method.__api_query_params:
|
||||
name = query_parameter_info['name']
|
||||
description = query_parameter_info['help']
|
||||
param_type = TYPE_CONVERTER[query_parameter_info['type']]
|
||||
required = query_parameter_info['required']
|
||||
|
||||
operation_swagger['parameters'].append(
|
||||
swagger_parameter(name, description, kind='query',
|
||||
param_type=param_type,
|
||||
required=required,
|
||||
enum=query_parameter_info['choices']))
|
||||
|
||||
# Add the OAuth security block.
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#securityRequirementObject
|
||||
scope = method_metadata(method, 'oauth2_scope')
|
||||
if scope and not compact:
|
||||
operation_swagger['security'] = [{'oauth2_implicit': [scope.scope]}]
|
||||
|
||||
# Add the responses block.
|
||||
# https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responsesObject
|
||||
response_schema_name = method_metadata(method, 'response_schema')
|
||||
if not compact:
|
||||
if response_schema_name:
|
||||
models[response_schema_name] = view_class.schemas[response_schema_name]
|
||||
|
||||
models['ApiError'] = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'status': {
|
||||
'type': 'integer',
|
||||
'description': 'Status code of the response.'
|
||||
},
|
||||
'type': {
|
||||
'type': 'string',
|
||||
'description': 'Reference to the type of the error.'
|
||||
},
|
||||
'detail': {
|
||||
'type': 'string',
|
||||
'description': 'Details about the specific instance of the error.'
|
||||
},
|
||||
'title': {
|
||||
'type': 'string',
|
||||
'description': 'Unique error code to identify the type of error.'
|
||||
},
|
||||
'error_message': {
|
||||
'type': 'string',
|
||||
'description': 'Deprecated; alias for detail'
|
||||
},
|
||||
'error_type': {
|
||||
'type': 'string',
|
||||
'description': 'Deprecated; alias for detail'
|
||||
}
|
||||
},
|
||||
'required': [
|
||||
'status',
|
||||
'type',
|
||||
'title',
|
||||
]
|
||||
}
|
||||
|
||||
responses = {
|
||||
'400': {
|
||||
'description': 'Bad Request',
|
||||
},
|
||||
|
||||
'401': {
|
||||
'description': 'Session required',
|
||||
},
|
||||
|
||||
'403': {
|
||||
'description': 'Unauthorized access',
|
||||
},
|
||||
|
||||
'404': {
|
||||
'description': 'Not found',
|
||||
},
|
||||
}
|
||||
|
||||
for _, body in responses.items():
|
||||
body['schema'] = {'$ref': '#/definitions/ApiError'}
|
||||
|
||||
if method_name == 'DELETE':
|
||||
responses['204'] = {
|
||||
'description': 'Deleted'
|
||||
}
|
||||
elif method_name == 'POST':
|
||||
responses['201'] = {
|
||||
'description': 'Successful creation'
|
||||
}
|
||||
else:
|
||||
responses['200'] = {
|
||||
'description': 'Successful invocation'
|
||||
}
|
||||
|
||||
if response_schema_name:
|
||||
responses['200']['schema'] = {
|
||||
'$ref': '#/definitions/%s' % response_schema_name
|
||||
}
|
||||
|
||||
operation_swagger['responses'] = responses
|
||||
|
||||
# Add the request block.
|
||||
request_schema_name = method_metadata(method, 'request_schema')
|
||||
if request_schema_name and not compact:
|
||||
models[request_schema_name] = view_class.schemas[request_schema_name]
|
||||
|
||||
operation_swagger['parameters'].append(
|
||||
swagger_parameter('body', 'Request body contents.', kind='body',
|
||||
schema=request_schema_name))
|
||||
|
||||
# Add the operation to the parent path.
|
||||
if not internal or (internal and include_internal):
|
||||
path_swagger[method_name.lower()] = operation_swagger
|
||||
|
||||
tags.sort(key=lambda t: t['name'])
|
||||
paths = OrderedDict(sorted(paths.items(), key=lambda p: p[1]['x-tag']))
|
||||
|
||||
if compact:
|
||||
return {'paths': paths}
|
87
config_app/config_endpoints/api/suconfig.py
Normal file
87
config_app/config_endpoints/api/suconfig.py
Normal file
|
@ -0,0 +1,87 @@
|
|||
import logging
|
||||
|
||||
from config_endpoints.api import resource, ApiResource, verify_not_prod, nickname
|
||||
from config_app import app, config_provider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@resource('/v1/superuser/config')
|
||||
class SuperUserConfig(ApiResource):
|
||||
""" Resource for fetching and updating the current configuration, if any. """
|
||||
schemas = {
|
||||
'UpdateConfig': {
|
||||
'type': 'object',
|
||||
'description': 'Updates the YAML config file',
|
||||
'required': [
|
||||
'config',
|
||||
'hostname'
|
||||
],
|
||||
'properties': {
|
||||
'config': {
|
||||
'type': 'object'
|
||||
},
|
||||
'hostname': {
|
||||
'type': 'string'
|
||||
},
|
||||
'password': {
|
||||
'type': 'string'
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@verify_not_prod
|
||||
@nickname('scGetConfig')
|
||||
def get(self):
|
||||
""" Returns the currently defined configuration, if any. """
|
||||
config_object = config_provider.get_config()
|
||||
logger.debug(config_object)
|
||||
logger.debug(config_provider)
|
||||
# Todo: do we even need this endpoint? Since we'll be loading the config in browser
|
||||
return {
|
||||
'config': config_object
|
||||
}
|
||||
|
||||
|
||||
@resource('/v1/superuser/registrystatus')
|
||||
class SuperUserRegistryStatus(ApiResource):
|
||||
""" Resource for determining the status of the registry, such as if config exists,
|
||||
if a database is configured, and if it has any defined users.
|
||||
"""
|
||||
@nickname('scRegistryStatus')
|
||||
@verify_not_prod
|
||||
def get(self):
|
||||
""" Returns the status of the registry. """
|
||||
|
||||
# If we have SETUP_COMPLETE, then we're ready to go!
|
||||
if app.config.get('SETUP_COMPLETE', False):
|
||||
return {
|
||||
'provider_id': config_provider.provider_id,
|
||||
'requires_restart': config_provider.requires_restart(app.config),
|
||||
'status': 'ready'
|
||||
}
|
||||
|
||||
# If there is no conf/stack volume, then report that status.
|
||||
if not config_provider.volume_exists():
|
||||
return {
|
||||
'status': 'missing-config-dir'
|
||||
}
|
||||
|
||||
# If there is no config file, we need to setup the database.
|
||||
if not config_provider.config_exists():
|
||||
return {
|
||||
'status': 'config-db'
|
||||
}
|
||||
|
||||
# If the database isn't yet valid, then we need to set it up.
|
||||
# if not database_is_valid():
|
||||
# return {
|
||||
# 'status': 'setup-db'
|
||||
# }
|
||||
#
|
||||
# return {
|
||||
# 'status': 'create-superuser' if not database_has_users() else 'config'
|
||||
# }
|
||||
|
||||
return {}
|
151
config_app/config_endpoints/api/superuser.py
Normal file
151
config_app/config_endpoints/api/superuser.py
Normal file
|
@ -0,0 +1,151 @@
|
|||
import os
|
||||
import logging
|
||||
import pathvalidate
|
||||
from flask import request
|
||||
|
||||
from config_endpoints.exception import InvalidRequest
|
||||
from config_endpoints.api import resource, ApiResource, verify_not_prod, nickname
|
||||
from config_util.ssl import load_certificate, CertInvalidException
|
||||
from config_app import app, config_provider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
EXTRA_CA_DIRECTORY = 'extra_ca_certs'
|
||||
|
||||
|
||||
@resource('/v1/superuser/customcerts/<certpath>')
|
||||
class SuperUserCustomCertificate(ApiResource):
|
||||
""" Resource for managing a custom certificate. """
|
||||
|
||||
@nickname('uploadCustomCertificate')
|
||||
@verify_not_prod
|
||||
def post(self, certpath):
|
||||
uploaded_file = request.files['file']
|
||||
if not uploaded_file:
|
||||
raise InvalidRequest('Missing certificate file')
|
||||
|
||||
# Save the certificate.
|
||||
certpath = pathvalidate.sanitize_filename(certpath)
|
||||
if not certpath.endswith('.crt'):
|
||||
raise InvalidRequest('Invalid certificate file: must have suffix `.crt`')
|
||||
|
||||
logger.debug('Saving custom certificate %s', certpath)
|
||||
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
|
||||
config_provider.save_volume_file(cert_full_path, uploaded_file)
|
||||
logger.debug('Saved custom certificate %s', certpath)
|
||||
|
||||
# Validate the certificate.
|
||||
try:
|
||||
logger.debug('Loading custom certificate %s', certpath)
|
||||
with config_provider.get_volume_file(cert_full_path) as f:
|
||||
load_certificate(f.read())
|
||||
except CertInvalidException:
|
||||
logger.exception('Got certificate invalid error for cert %s', certpath)
|
||||
return '', 204
|
||||
except IOError:
|
||||
logger.exception('Got IO error for cert %s', certpath)
|
||||
return '', 204
|
||||
|
||||
# Call the update script to install the certificate immediately.
|
||||
if not app.config['TESTING']:
|
||||
logger.debug('Calling certs_install.sh')
|
||||
if os.system('/conf/init/certs_install.sh') != 0:
|
||||
raise Exception('Could not install certificates')
|
||||
|
||||
logger.debug('certs_install.sh completed')
|
||||
|
||||
return '', 204
|
||||
|
||||
@nickname('deleteCustomCertificate')
|
||||
@verify_not_prod
|
||||
def delete(self, certpath):
|
||||
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, certpath)
|
||||
config_provider.remove_volume_file(cert_full_path)
|
||||
return '', 204
|
||||
|
||||
|
||||
@resource('/v1/superuser/customcerts')
|
||||
class SuperUserCustomCertificates(ApiResource):
|
||||
""" Resource for managing custom certificates. """
|
||||
|
||||
@nickname('getCustomCertificates')
|
||||
@verify_not_prod
|
||||
def get(self):
|
||||
has_extra_certs_path = config_provider.volume_file_exists(EXTRA_CA_DIRECTORY)
|
||||
extra_certs_found = config_provider.list_volume_directory(EXTRA_CA_DIRECTORY)
|
||||
if extra_certs_found is None:
|
||||
return {
|
||||
'status': 'file' if has_extra_certs_path else 'none',
|
||||
}
|
||||
|
||||
cert_views = []
|
||||
for extra_cert_path in extra_certs_found:
|
||||
try:
|
||||
cert_full_path = config_provider.get_volume_path(EXTRA_CA_DIRECTORY, extra_cert_path)
|
||||
with config_provider.get_volume_file(cert_full_path) as f:
|
||||
certificate = load_certificate(f.read())
|
||||
cert_views.append({
|
||||
'path': extra_cert_path,
|
||||
'names': list(certificate.names),
|
||||
'expired': certificate.expired,
|
||||
})
|
||||
except CertInvalidException as cie:
|
||||
cert_views.append({
|
||||
'path': extra_cert_path,
|
||||
'error': cie.message,
|
||||
})
|
||||
except IOError as ioe:
|
||||
cert_views.append({
|
||||
'path': extra_cert_path,
|
||||
'error': ioe.message,
|
||||
})
|
||||
|
||||
return {
|
||||
'status': 'directory',
|
||||
'certs': cert_views,
|
||||
}
|
||||
|
||||
# TODO(config) port this endpoint when (https://github.com/quay/quay/pull/3055) merged to ensure no conflicts
|
||||
# @resource('/v1/superuser/keys')
|
||||
# class SuperUserServiceKeyManagement(ApiResource):
|
||||
# """ Resource for managing service keys."""
|
||||
# schemas = {
|
||||
# 'CreateServiceKey': {
|
||||
# 'id': 'CreateServiceKey',
|
||||
# 'type': 'object',
|
||||
# 'description': 'Description of creation of a service key',
|
||||
# 'required': ['service', 'expiration'],
|
||||
# 'properties': {
|
||||
# 'service': {
|
||||
# 'type': 'string',
|
||||
# 'description': 'The service authenticating with this key',
|
||||
# },
|
||||
# 'name': {
|
||||
# 'type': 'string',
|
||||
# 'description': 'The friendly name of a service key',
|
||||
# },
|
||||
# 'metadata': {
|
||||
# 'type': 'object',
|
||||
# 'description': 'The key/value pairs of this key\'s metadata',
|
||||
# },
|
||||
# 'notes': {
|
||||
# 'type': 'string',
|
||||
# 'description': 'If specified, the extra notes for the key',
|
||||
# },
|
||||
# 'expiration': {
|
||||
# 'description': 'The expiration date as a unix timestamp',
|
||||
# 'anyOf': [{'type': 'number'}, {'type': 'null'}],
|
||||
# },
|
||||
# },
|
||||
# },
|
||||
# }
|
||||
#
|
||||
# @verify_not_prod
|
||||
# @nickname('listServiceKeys')
|
||||
# def get(self):
|
||||
# keys = pre_oci_model.list_all_service_keys()
|
||||
#
|
||||
# return jsonify({
|
||||
# 'keys': [key.to_dict() for key in keys],
|
||||
# })
|
||||
#
|
||||
|
18
config_app/config_endpoints/api/user.py
Normal file
18
config_app/config_endpoints/api/user.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
from config_endpoints.api import resource, ApiResource, nickname
|
||||
|
||||
|
||||
@resource('/v1/user/')
|
||||
class User(ApiResource):
|
||||
""" Operations related to users. """
|
||||
|
||||
@nickname('getLoggedInUser')
|
||||
def get(self):
|
||||
""" Get user information for the authenticated user. """
|
||||
# user = get_authenticated_user()
|
||||
|
||||
# return user_view(user)
|
||||
return {
|
||||
'anonymous': False,
|
||||
# 'username': user.username,
|
||||
}
|
||||
|
Reference in a new issue