diff --git a/config_app/Procfile b/config_app/Procfile index ac20de89b..22dd965c1 100644 --- a/config_app/Procfile +++ b/config_app/Procfile @@ -1,3 +1,3 @@ -app: PYTHONPATH="./" gunicorn -c conf/gunicorn_local.py config_application:application +app: PYTHONPATH="../" gunicorn -c conf/gunicorn_local.py config_application:application # webpack: npm run watch-config-app diff --git a/config_app/config_app.py b/config_app/c_app.py similarity index 81% rename from config_app/config_app.py rename to config_app/c_app.py index 616ecc157..4fbf1d1a2 100644 --- a/config_app/config_app.py +++ b/config_app/c_app.py @@ -2,7 +2,7 @@ import os import logging from flask import Flask from _init_config import CONF_DIR -from config_util.config import get_config_provider +from config_app.config_util.config import get_config_provider app = Flask(__name__) @@ -18,11 +18,11 @@ config_provider = get_config_provider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', testing=is_testing, kubernetes=is_kubernetes) if is_testing: - from config_test.testconfig import TestConfig + from config_app.config_test.testconfig import TestConfig logger.debug('Loading test config.') app.config.from_object(TestConfig()) else: - from config_app_config import DefaultConfig + from config_app.config_app_config import DefaultConfig logger.debug('Loading default config.') app.config.from_object(DefaultConfig()) # app.teardown_request(database.close_db_filter) diff --git a/config_app/conf/gunicorn_local.py b/config_app/conf/gunicorn_local.py index 96cd7d19c..7fcd83a9c 100644 --- a/config_app/conf/gunicorn_local.py +++ b/config_app/conf/gunicorn_local.py @@ -5,8 +5,8 @@ sys.path.append(os.path.join(os.path.dirname(__file__), "../")) import logging from Crypto import Random -from config_util.log import logfile_path -from config_util.workers import get_worker_count +from config_app.config_util.log import logfile_path +from config_app.config_util.workers import get_worker_count logconfig = logfile_path(debug=True) diff --git a/config_app/config_application.py b/config_app/config_application.py index b408a7984..5c8835b66 100644 --- a/config_app/config_application.py +++ b/config_app/config_application.py @@ -1,4 +1,4 @@ -from config_app import app as application +from config_app.c_app import app as application # Bind all of the blueprints import config_web diff --git a/config_app/config_endpoints/api/__init__.py b/config_app/config_endpoints/api/__init__.py index e220e27ed..b5df7b405 100644 --- a/config_app/config_endpoints/api/__init__.py +++ b/config_app/config_endpoints/api/__init__.py @@ -3,10 +3,13 @@ import logging from flask import Blueprint from flask_restful import Resource, Api from flask_restful.utils.cors import crossdomain -from config_app import app +from email.utils import formatdate +from calendar import timegm from functools import partial, wraps from jsonschema import validate, ValidationError -from config_endpoints.exception import InvalidResponse + +from config_app.c_app import app +from config_app.config_endpoints.exception import InvalidResponse logger = logging.getLogger(__name__) api_bp = Blueprint('api', __name__) @@ -28,6 +31,13 @@ api = ApiExceptionHandlingApi() api.init_app(api_bp) +def format_date(date): + """ Output an RFC822 date format. """ + if date is None: + return None + return formatdate(timegm(date.utctimetuple())) + + def verify_not_prod(func): @add_method_metadata('enterprise_only', True) @wraps(func) diff --git a/config_app/config_endpoints/api/discovery.py b/config_app/config_endpoints/api/discovery.py index dda178c62..70246847c 100644 --- a/config_app/config_endpoints/api/discovery.py +++ b/config_app/config_endpoints/api/discovery.py @@ -2,9 +2,9 @@ import logging import sys from collections import OrderedDict -from config_app import app -from config_endpoints.api import method_metadata -from config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER +from config_app.c_app import app +from config_app.config_endpoints.api import method_metadata +from config_app.config_endpoints.common import fully_qualified_name, PARAM_REGEX, TYPE_CONVERTER logger = logging.getLogger(__name__) diff --git a/config_app/config_endpoints/api/suconfig.py b/config_app/config_endpoints/api/suconfig.py index 02a3cb2d4..bc17ce3af 100644 --- a/config_app/config_endpoints/api/suconfig.py +++ b/config_app/config_endpoints/api/suconfig.py @@ -1,7 +1,7 @@ import logging -from config_endpoints.api import resource, ApiResource, verify_not_prod, nickname -from config_app import app, config_provider +from config_app.config_endpoints.api import resource, ApiResource, verify_not_prod, nickname +from config_app.c_app import app, config_provider logger = logging.getLogger(__name__) diff --git a/config_app/config_endpoints/api/superuser.py b/config_app/config_endpoints/api/superuser.py index 227257a56..5cb26fc9d 100644 --- a/config_app/config_endpoints/api/superuser.py +++ b/config_app/config_endpoints/api/superuser.py @@ -1,12 +1,14 @@ import os import logging import pathvalidate -from flask import request +from flask import request, jsonify -from config_endpoints.exception import InvalidRequest -from config_endpoints.api import resource, ApiResource, verify_not_prod, nickname -from config_util.ssl import load_certificate, CertInvalidException -from config_app import app, config_provider +from config_app.config_endpoints.exception import InvalidRequest +from config_app.config_endpoints.api import resource, ApiResource, verify_not_prod, nickname +from config_app.config_util.ssl import load_certificate, CertInvalidException +from config_app.c_app import app, config_provider + +from config_app.config_endpoints.api.superuser_models_pre_oci import pre_oci_model logger = logging.getLogger(__name__) EXTRA_CA_DIRECTORY = 'extra_ca_certs' @@ -104,48 +106,49 @@ class SuperUserCustomCertificates(ApiResource): 'certs': cert_views, } -# TODO(config) port this endpoint when (https://github.com/quay/quay/pull/3055) merged to ensure no conflicts -# @resource('/v1/superuser/keys') -# class SuperUserServiceKeyManagement(ApiResource): -# """ Resource for managing service keys.""" -# schemas = { -# 'CreateServiceKey': { -# 'id': 'CreateServiceKey', -# 'type': 'object', -# 'description': 'Description of creation of a service key', -# 'required': ['service', 'expiration'], -# 'properties': { -# 'service': { -# 'type': 'string', -# 'description': 'The service authenticating with this key', -# }, -# 'name': { -# 'type': 'string', -# 'description': 'The friendly name of a service key', -# }, -# 'metadata': { -# 'type': 'object', -# 'description': 'The key/value pairs of this key\'s metadata', -# }, -# 'notes': { -# 'type': 'string', -# 'description': 'If specified, the extra notes for the key', -# }, -# 'expiration': { -# 'description': 'The expiration date as a unix timestamp', -# 'anyOf': [{'type': 'number'}, {'type': 'null'}], -# }, -# }, -# }, -# } -# -# @verify_not_prod -# @nickname('listServiceKeys') -# def get(self): -# keys = pre_oci_model.list_all_service_keys() -# -# return jsonify({ -# 'keys': [key.to_dict() for key in keys], -# }) -# + +# TODO(config) port this endpoint when (https://github.com/quay/quay/pull/3055) merged to ensure no conflicts +@resource('/v1/superuser/keys') +class SuperUserServiceKeyManagement(ApiResource): + """ Resource for managing service keys.""" + schemas = { + 'CreateServiceKey': { + 'id': 'CreateServiceKey', + 'type': 'object', + 'description': 'Description of creation of a service key', + 'required': ['service', 'expiration'], + 'properties': { + 'service': { + 'type': 'string', + 'description': 'The service authenticating with this key', + }, + 'name': { + 'type': 'string', + 'description': 'The friendly name of a service key', + }, + 'metadata': { + 'type': 'object', + 'description': 'The key/value pairs of this key\'s metadata', + }, + 'notes': { + 'type': 'string', + 'description': 'If specified, the extra notes for the key', + }, + 'expiration': { + 'description': 'The expiration date as a unix timestamp', + 'anyOf': [{'type': 'number'}, {'type': 'null'}], + }, + }, + }, + } + + @verify_not_prod + @nickname('listServiceKeys') + def get(self): + keys = pre_oci_model.list_all_service_keys() + + return jsonify({ + 'keys': [key.to_dict() for key in keys], + }) + diff --git a/config_app/config_endpoints/api/superuser_models_interface.py b/config_app/config_endpoints/api/superuser_models_interface.py new file mode 100644 index 000000000..23d672f2b --- /dev/null +++ b/config_app/config_endpoints/api/superuser_models_interface.py @@ -0,0 +1,448 @@ +import json +from abc import ABCMeta, abstractmethod +from collections import namedtuple +from datetime import datetime + +from dateutil.relativedelta import relativedelta +from six import add_metaclass +from tzlocal import get_localzone + +# from app import avatar, superusers +# from buildtrigger.basehandler import BuildTriggerHandler +from data import model +from config_app.config_endpoints.api import format_date +from util.morecollections import AttrDict + + +def user_view(user): + return { + 'name': user.username, + 'kind': 'user', + 'is_robot': user.robot, + } + + +# class BuildTrigger( +# namedtuple('BuildTrigger', ['uuid', 'service_name', 'pull_robot', 'can_read', 'can_admin', 'for_build'])): +# """ +# BuildTrigger represent a trigger that is associated with a build +# :type uuid: string +# :type service_name: string +# :type pull_robot: User +# :type can_read: boolean +# :type can_admin: boolean +# :type for_build: boolean +# """ +# +# def to_dict(self): +# if not self.uuid: +# return None +# +# build_trigger = BuildTriggerHandler.get_handler(self) +# build_source = build_trigger.config.get('build_source') +# +# repo_url = build_trigger.get_repository_url() if build_source else None +# can_read = self.can_read or self.can_admin +# +# trigger_data = { +# 'id': self.uuid, +# 'service': self.service_name, +# 'is_active': build_trigger.is_active(), +# +# 'build_source': build_source if can_read else None, +# 'repository_url': repo_url if can_read else None, +# +# 'config': build_trigger.config if self.can_admin else {}, +# 'can_invoke': self.can_admin, +# } +# +# if not self.for_build and self.can_admin and self.pull_robot: +# trigger_data['pull_robot'] = user_view(self.pull_robot) +# +# return trigger_data + + +class RepositoryBuild(namedtuple('RepositoryBuild', + ['uuid', 'logs_archived', 'repository_namespace_user_username', 'repository_name', + 'can_write', 'can_read', 'pull_robot', 'resource_key', 'trigger', 'display_name', + 'started', 'job_config', 'phase', 'status', 'error', 'archive_url'])): + """ + RepositoryBuild represents a build associated with a repostiory + :type uuid: string + :type logs_archived: boolean + :type repository_namespace_user_username: string + :type repository_name: string + :type can_write: boolean + :type can_write: boolean + :type pull_robot: User + :type resource_key: string + :type trigger: Trigger + :type display_name: string + :type started: boolean + :type job_config: {Any -> Any} + :type phase: string + :type status: string + :type error: string + :type archive_url: string + """ + + def to_dict(self): + + resp = { + 'id': self.uuid, + 'phase': self.phase, + 'started': format_date(self.started), + 'display_name': self.display_name, + 'status': self.status or {}, + 'subdirectory': self.job_config.get('build_subdir', ''), + 'dockerfile_path': self.job_config.get('build_subdir', ''), + 'context': self.job_config.get('context', ''), + 'tags': self.job_config.get('docker_tags', []), + 'manual_user': self.job_config.get('manual_user', None), + 'is_writer': self.can_write, + 'trigger': self.trigger.to_dict(), + 'trigger_metadata': self.job_config.get('trigger_metadata', None) if self.can_read else None, + 'resource_key': self.resource_key, + 'pull_robot': user_view(self.pull_robot) if self.pull_robot else None, + 'repository': { + 'namespace': self.repository_namespace_user_username, + 'name': self.repository_name + }, + 'error': self.error, + } + + if self.can_write: + if self.resource_key is not None: + resp['archive_url'] = self.archive_url + elif self.job_config.get('archive_url', None): + resp['archive_url'] = self.job_config['archive_url'] + + return resp + + +class Approval(namedtuple('Approval', ['approver', 'approval_type', 'approved_date', 'notes'])): + """ + Approval represents whether a key has been approved or not + :type approver: User + :type approval_type: string + :type approved_date: Date + :type notes: string + """ + + def to_dict(self): + return { + 'approver': self.approver.to_dict() if self.approver else None, + 'approval_type': self.approval_type, + 'approved_date': self.approved_date, + 'notes': self.notes, + } + + +class ServiceKey(namedtuple('ServiceKey', ['name', 'kid', 'service', 'jwk', 'metadata', 'created_date', + 'expiration_date', 'rotation_duration', 'approval'])): + """ + ServiceKey is an apostille signing key + :type name: string + :type kid: int + :type service: string + :type jwk: string + :type metadata: string + :type created_date: Date + :type expiration_date: Date + :type rotation_duration: Date + :type approval: Approval + + """ + + def to_dict(self): + return { + 'name': self.name, + 'kid': self.kid, + 'service': self.service, + 'jwk': self.jwk, + 'metadata': self.metadata, + 'created_date': self.created_date, + 'expiration_date': self.expiration_date, + 'rotation_duration': self.rotation_duration, + 'approval': self.approval.to_dict() if self.approval is not None else None, + } + + +class User(namedtuple('User', ['username', 'email', 'verified', 'enabled', 'robot'])): + """ + User represents a single user. + :type username: string + :type email: string + :type verified: boolean + :type enabled: boolean + :type robot: User + """ + + def to_dict(self): + user_data = { + 'kind': 'user', + 'name': self.username, + 'username': self.username, + 'email': self.email, + 'verified': self.verified, + # todo(config) remove or add these lines from app + # 'avatar': avatar.get_data_for_user(self), + # 'super_user': superusers.is_superuser(self.username), + 'enabled': self.enabled, + } + + return user_data + + +class Organization(namedtuple('Organization', ['username', 'email'])): + """ + Organization represents a single org. + :type username: string + :type email: string + """ + + def to_dict(self): + return { + 'name': self.username, + 'email': self.email, + # todo(config) remove or add these lines from app + # 'avatar': avatar.get_data_for_org(self), + } + + +class LogEntry( + namedtuple('LogEntry', [ + 'metadata_json', 'ip', 'datetime', 'performer_email', 'performer_username', 'performer_robot', + 'account_organization', 'account_username', 'account_email', 'account_robot', 'kind', + ])): + """ + LogEntry a single log entry. + :type metadata_json: string + :type ip: string + :type datetime: string + :type performer_email: int + :type performer_username: string + :type performer_robot: boolean + :type account_organization: boolean + :type account_username: string + :type account_email: string + :type account_robot: boolean + :type kind_id: int + """ + + def to_dict(self): + view = { + 'kind': self.kind, + 'metadata': json.loads(self.metadata_json), + 'ip': self.ip, + 'datetime': format_date(self.datetime), + } + + if self.performer_username: + performer = AttrDict({'username': self.performer_username, 'email': self.performer_email}) + performer.robot = None + if self.performer_robot: + performer.robot = self.performer_robot + + view['performer'] = { + 'kind': 'user', + 'name': self.performer_username, + 'is_robot': self.performer_robot, + # todo(config) remove or add these lines from app + # 'avatar': avatar.get_data_for_user(performer), + } + + if self.account_username: + account = AttrDict({'username': self.account_username, 'email': self.account_email}) + if self.account_organization: + + view['namespace'] = { + 'kind': 'org', + 'name': self.account_username, + # todo(config) remove or add these lines from app + # 'avatar': avatar.get_data_for_org(account), + } + else: + account.robot = None + if self.account_robot: + account.robot = self.account_robot + view['namespace'] = { + 'kind': 'user', + 'name': self.account_username, + # todo(config) remove or add these lines from app + # 'avatar': avatar.get_data_for_user(account), + } + + return view + + +class LogEntryPage( + namedtuple('LogEntryPage', ['logs', 'next_page_token'])): + """ + LogEntryPage represents a single page of logs. + :type logs: [LogEntry] + :type next_page_token: {any -> any} + """ + + +class AggregatedLogEntry( + namedtuple('AggregatedLogEntry', ['count', 'kind_id', 'day', 'start_time'])): + """ + AggregatedLogEntry represents an aggregated view of logs. + :type count: int + :type kind_id: int + :type day: string + :type start_time: Date + """ + + def to_dict(self): + synthetic_date = datetime(self.start_time.year, self.start_time.month, int(self.day), tzinfo=get_localzone()) + if synthetic_date.day < self.start_time.day: + synthetic_date = synthetic_date + relativedelta(months=1) + kinds = model.log.get_log_entry_kinds() + view = { + 'kind': kinds[self.kind_id], + 'count': self.count, + 'datetime': format_date(synthetic_date), + } + + return view + + +@add_metaclass(ABCMeta) +class SuperuserDataInterface(object): + """ + Interface that represents all data store interactions required by a superuser api. + """ + + @abstractmethod + def get_logs_query(self, start_time, end_time, page_token=None): + """ + Returns a LogEntryPage. + """ + + @abstractmethod + def get_aggregated_logs(self, start_time, end_time): + """ + Returns a list of AggregatedLogEntry + """ + + @abstractmethod + def get_organizations(self): + """ + Returns a list of Organization + """ + + @abstractmethod + def get_active_users(self): + """ + Returns a list of User + """ + + @abstractmethod + def create_install_user(self, username, password, email): + """ + Returns the created user and confirmation code for email confirmation + """ + + @abstractmethod + def get_nonrobot_user(self, username): + """ + Returns a User + """ + + @abstractmethod + def create_reset_password_email_code(self, email): + """ + Returns a recover password code + """ + + @abstractmethod + def mark_user_for_deletion(self, username): + """ + Returns None + """ + + @abstractmethod + def change_password(self, username, password): + """ + Returns None + """ + + @abstractmethod + def update_email(self, username, email, auto_verify): + """ + Returns None + """ + + @abstractmethod + def update_enabled(self, username, enabled): + """ + Returns None + """ + + @abstractmethod + def take_ownership(self, namespace, authed_user): + """ + Returns id of entity and whether the entity was a user + """ + + @abstractmethod + def mark_organization_for_deletion(self, name): + """ + Returns None + """ + + @abstractmethod + def change_organization_name(self, old_org_name, new_org_name): + """ + Returns updated Organization + """ + + @abstractmethod + def list_all_service_keys(self): + """ + Returns a list of service keys + """ + + @abstractmethod + def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None): + """ + Returns a tuple of private key and public key id + """ + + @abstractmethod + def approve_service_key(self, kid, approver, approval_type, notes=''): + """ + Returns the approved Key + """ + + @abstractmethod + def get_service_key(self, kid, service=None, alive_only=True, approved_only=True): + """ + Returns ServiceKey + """ + + @abstractmethod + def set_key_expiration(self, kid, expiration_date): + """ + Returns None + """ + + @abstractmethod + def update_service_key(self, kid, name=None, metadata=None): + """ + Returns None + """ + + @abstractmethod + def delete_service_key(self, kid): + """ + Returns deleted ServiceKey + """ + + @abstractmethod + def get_repository_build(self, uuid): + """ + Returns RepositoryBuild + """ diff --git a/config_app/config_endpoints/api/superuser_models_pre_oci.py b/config_app/config_endpoints/api/superuser_models_pre_oci.py new file mode 100644 index 000000000..352c8f38d --- /dev/null +++ b/config_app/config_endpoints/api/superuser_models_pre_oci.py @@ -0,0 +1,274 @@ +from data import model + +from config_app.config_endpoints.api.superuser_models_interface import SuperuserDataInterface, User, ServiceKey, Approval + +# +# def _create_log(log, log_kind): +# account_organization = None +# account_username = None +# account_email = None +# account_robot = None +# try: +# account_organization = log.account.organization +# account_username = log.account.username +# account_email = log.account.email +# account_robot = log.account.robot +# except AttributeError: +# pass +# +# performer_robot = None +# performer_username = None +# performer_email = None +# +# try: +# performer_robot = log.performer.robot +# performer_username = log.performer.username +# performer_email = log.performer.email +# except AttributeError: +# pass +# +# return LogEntry(log.metadata_json, log.ip, log.datetime, performer_email, performer_username, +# performer_robot, account_organization, account_username, +# account_email, account_robot, log_kind[log.kind_id]) + + +def _create_user(user): + if user is None: + return None + return User(user.username, user.email, user.verified, user.enabled, user.robot) + + +def _create_key(key): + approval = None + if key.approval is not None: + approval = Approval(_create_user(key.approval.approver), key.approval.approval_type, key.approval.approved_date, + key.approval.notes) + + return ServiceKey(key.name, key.kid, key.service, key.jwk, key.metadata, key.created_date, key.expiration_date, + key.rotation_duration, approval) +# +# +# class ServiceKeyDoesNotExist(Exception): +# pass +# +# +# class ServiceKeyAlreadyApproved(Exception): +# pass +# +# +# class InvalidRepositoryBuildException(Exception): +# pass + + +class PreOCIModel(SuperuserDataInterface): + """ + PreOCIModel implements the data model for the SuperUser using a database schema + before it was changed to support the OCI specification. + """ + def get_logs_query(self, start_time, end_time, page_token=None): + pass + + def get_aggregated_logs(self, start_time, end_time): + pass + + def get_organizations(self): + pass + + def get_active_users(self): + pass + + def create_install_user(self, username, password, email): + pass + + def get_nonrobot_user(self, username): + pass + + def create_reset_password_email_code(self, email): + pass + + def mark_user_for_deletion(self, username): + pass + + def change_password(self, username, password): + pass + + def update_email(self, username, email, auto_verify): + pass + + def update_enabled(self, username, enabled): + pass + + def take_ownership(self, namespace, authed_user): + pass + + def mark_organization_for_deletion(self, name): + pass + + def change_organization_name(self, old_org_name, new_org_name): + pass + + def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None): + pass + + def approve_service_key(self, kid, approver, approval_type, notes=''): + pass + + def get_service_key(self, kid, service=None, alive_only=True, approved_only=True): + pass + + def set_key_expiration(self, kid, expiration_date): + pass + + def update_service_key(self, kid, name=None, metadata=None): + pass + + def delete_service_key(self, kid): + pass + + def get_repository_build(self, uuid): + pass + + # def get_repository_build(self, uuid): + # try: + # build = model.build.get_repository_build(uuid) + # except model.InvalidRepositoryBuildException as e: + # raise InvalidRepositoryBuildException(e.message) + # + # repo_namespace = build.repository_namespace_user_username + # repo_name = build.repository_name + # + # can_read = ReadRepositoryPermission(repo_namespace, repo_name).can() + # can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can() + # can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can() + # job_config = get_job_config(build.job_config) + # phase, status, error = _get_build_status(build) + # url = userfiles.get_file_url(self.resource_key, request.remote_addr, requires_cors=True) + # + # return RepositoryBuild(build.uuid, build.logs_archived, repo_namespace, repo_name, can_write, can_read, + # _create_user(build.pull_robot), build.resource_key, + # BuildTrigger(build.trigger.uuid, build.trigger.service.name, + # _create_user(build.trigger.pull_robot), can_read, can_admin, True), + # build.display_name, build.display_name, build.started, job_config, phase, status, error, url) + # + # def delete_service_key(self, kid): + # try: + # key = model.service_keys.delete_service_key(kid) + # except model.ServiceKeyDoesNotExist: + # raise ServiceKeyDoesNotExist + # return _create_key(key) + # + # def update_service_key(self, kid, name=None, metadata=None): + # model.service_keys.update_service_key(kid, name, metadata) + # + # def set_key_expiration(self, kid, expiration_date): + # model.service_keys.set_key_expiration(kid, expiration_date) + # + # def get_service_key(self, kid, service=None, alive_only=True, approved_only=True): + # try: + # key = model.service_keys.get_service_key(kid, approved_only=approved_only, alive_only=alive_only) + # return _create_key(key) + # except model.ServiceKeyDoesNotExist: + # raise ServiceKeyDoesNotExist + # + # def approve_service_key(self, kid, approver, approval_type, notes=''): + # try: + # key = model.service_keys.approve_service_key(kid, approver, approval_type, notes=notes) + # return _create_key(key) + # except model.ServiceKeyDoesNotExist: + # raise ServiceKeyDoesNotExist + # except model.ServiceKeyAlreadyApproved: + # raise ServiceKeyAlreadyApproved + # + # def generate_service_key(self, service, expiration_date, kid=None, name='', metadata=None, rotation_duration=None): + # (private_key, key) = model.service_keys.generate_service_key(service, expiration_date, metadata=metadata, name=name) + # + # return private_key, key.kid + + def list_all_service_keys(self): + keys = model.service_keys.list_all_keys() + return [_create_key(key) for key in keys] + + # def change_organization_name(self, old_org_name, new_org_name): + # org = model.organization.get_organization(old_org_name) + # if new_org_name is not None: + # org = model.user.change_username(org.id, new_org_name) + # + # return Organization(org.username, org.email) + # + # def mark_organization_for_deletion(self, name): + # org = model.organization.get_organization(name) + # model.user.mark_namespace_for_deletion(org, all_queues, namespace_gc_queue, force=True) + # + # def take_ownership(self, namespace, authed_user): + # entity = model.user.get_user_or_org(namespace) + # if entity is None: + # return None, False + # + # was_user = not entity.organization + # if entity.organization: + # # Add the superuser as an admin to the owners team of the org. + # model.organization.add_user_as_admin(authed_user, entity) + # else: + # # If the entity is a user, convert it to an organization and add the current superuser + # # as the admin. + # model.organization.convert_user_to_organization(entity, authed_user) + # return entity.id, was_user + # + # def update_enabled(self, username, enabled): + # user = model.user.get_nonrobot_user(username) + # model.user.update_enabled(user, bool(enabled)) + # + # def update_email(self, username, email, auto_verify): + # user = model.user.get_nonrobot_user(username) + # model.user.update_email(user, email, auto_verify) + # + # def change_password(self, username, password): + # user = model.user.get_nonrobot_user(username) + # model.user.change_password(user, password) + # + # def mark_user_for_deletion(self, username): + # user = model.user.get_nonrobot_user(username) + # model.user.mark_namespace_for_deletion(user, all_queues, namespace_gc_queue, force=True) + # + # def create_reset_password_email_code(self, email): + # code = model.user.create_reset_password_email_code(email) + # return code.code + # + # def get_nonrobot_user(self, username): + # user = model.user.get_nonrobot_user(username) + # if user is None: + # return None + # return _create_user(user) + # + # def create_install_user(self, username, password, email): + # prompts = model.user.get_default_user_prompts(features) + # user = model.user.create_user(username, password, email, auto_verify=not features.MAILING, + # email_required=features.MAILING, prompts=prompts) + # + # return_user = _create_user(user) + # # If mailing is turned on, send the user a verification email. + # if features.MAILING: + # confirmation = model.user.create_confirm_email_code(user) + # return return_user, confirmation.code + # return return_user, '' + # + # def get_active_users(self, disabled=True): + # users = model.user.get_active_users(disabled=disabled) + # return [_create_user(user) for user in users] + # + # def get_organizations(self): + # return [Organization(org.username, org.email) for org in model.organization.get_organizations()] + # + # def get_aggregated_logs(self, start_time, end_time): + # aggregated_logs = model.log.get_aggregated_logs(start_time, end_time) + # return [AggregatedLogEntry(log.count, log.kind_id, log.day, start_time) for log in aggregated_logs] + # + # def get_logs_query(self, start_time, end_time, page_token=None): + # logs_query = model.log.get_logs_query(start_time, end_time) + # logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True, + # page_token=page_token, limit=20) + # kinds = model.log.get_log_entry_kinds() + # return LogEntryPage([_create_log(log, kinds) for log in logs], next_page_token) + + +pre_oci_model = PreOCIModel() diff --git a/config_app/config_endpoints/api/user.py b/config_app/config_endpoints/api/user.py index b7ff870cf..d8a6449c3 100644 --- a/config_app/config_endpoints/api/user.py +++ b/config_app/config_endpoints/api/user.py @@ -1,4 +1,4 @@ -from config_endpoints.api import resource, ApiResource, nickname +from config_app.config_endpoints.api import resource, ApiResource, nickname @resource('/v1/user/') diff --git a/config_app/config_endpoints/setup_web.py b/config_app/config_endpoints/setup_web.py index 44a8f5cbd..541aa3df3 100644 --- a/config_app/config_endpoints/setup_web.py +++ b/config_app/config_endpoints/setup_web.py @@ -1,6 +1,6 @@ from flask import Blueprint -from config_endpoints.common import render_page_template -from config_endpoints.api.discovery import generate_route_data +from config_app.config_endpoints.common import render_page_template +from config_app.config_endpoints.api.discovery import generate_route_data # from config_util.cache import no_cache diff --git a/config_app/config_util/config/__init__.py b/config_app/config_util/config/__init__.py index b7b940d4d..16b3c0ffe 100644 --- a/config_app/config_util/config/__init__.py +++ b/config_app/config_util/config/__init__.py @@ -1,6 +1,6 @@ -from config_util.config.fileprovider import FileConfigProvider -from config_util.config.testprovider import TestConfigProvider -from config_util.config.k8sprovider import KubernetesConfigProvider +from config_app.config_util.config.fileprovider import FileConfigProvider +from config_app.config_util.config.testprovider import TestConfigProvider +from config_app.config_util.config.k8sprovider import KubernetesConfigProvider def get_config_provider(config_volume, yaml_filename, py_filename, testing=False, kubernetes=False): diff --git a/config_app/config_util/config/basefileprovider.py b/config_app/config_util/config/basefileprovider.py index 1bcf497a2..0ed1e9d35 100644 --- a/config_app/config_util/config/basefileprovider.py +++ b/config_app/config_util/config/basefileprovider.py @@ -1,7 +1,7 @@ import os import logging -from config_util.config.baseprovider import (BaseProvider, import_yaml, export_yaml, +from config_app.config_util.config.baseprovider import (BaseProvider, import_yaml, export_yaml, CannotWriteConfigException) logger = logging.getLogger(__name__) diff --git a/config_app/config_util/config/baseprovider.py b/config_app/config_util/config/baseprovider.py index ce6c6589c..6fef3b870 100644 --- a/config_app/config_util/config/baseprovider.py +++ b/config_app/config_util/config/baseprovider.py @@ -6,7 +6,7 @@ from six import add_metaclass from jsonschema import validate, ValidationError -from config_util.config.schema import CONFIG_SCHEMA +from config_app.config_util.config.schema import CONFIG_SCHEMA logger = logging.getLogger(__name__) diff --git a/config_app/config_util/config/fileprovider.py b/config_app/config_util/config/fileprovider.py index 95da64330..385fe501f 100644 --- a/config_app/config_util/config/fileprovider.py +++ b/config_app/config_util/config/fileprovider.py @@ -1,8 +1,8 @@ import os import logging -from config_util.config.baseprovider import export_yaml, CannotWriteConfigException -from config_util.config.basefileprovider import BaseFileProvider +from config_app.config_util.config.baseprovider import export_yaml, CannotWriteConfigException +from config_app.config_util.config.basefileprovider import BaseFileProvider logger = logging.getLogger(__name__) diff --git a/config_app/config_util/config/k8sprovider.py b/config_app/config_util/config/k8sprovider.py index 5d65af70b..57d0a5f8c 100644 --- a/config_app/config_util/config/k8sprovider.py +++ b/config_app/config_util/config/k8sprovider.py @@ -6,8 +6,8 @@ import time from requests import Request, Session -from config_util.config.baseprovider import CannotWriteConfigException, get_yaml -from config_util.config.basefileprovider import BaseFileProvider +from config_app.config_util.config.baseprovider import CannotWriteConfigException, get_yaml +from config_app.config_util.config.basefileprovider import BaseFileProvider logger = logging.getLogger(__name__) diff --git a/config_app/config_util/config/testprovider.py b/config_app/config_util/config/testprovider.py index 87f0309c3..32e0127c8 100644 --- a/config_app/config_util/config/testprovider.py +++ b/config_app/config_util/config/testprovider.py @@ -3,7 +3,7 @@ import io import os from datetime import datetime, timedelta -from config_util.config.baseprovider import BaseProvider +from config_app.config_util.config.baseprovider import BaseProvider REAL_FILES = ['test/data/signing-private.gpg', 'test/data/signing-public.gpg', 'test/data/test.pem'] diff --git a/config_app/config_web.py b/config_app/config_web.py index 29339541f..487f8b78e 100644 --- a/config_app/config_web.py +++ b/config_app/config_web.py @@ -1,6 +1,6 @@ -from config_app import app as application -from config_endpoints.api import api_bp -from config_endpoints.setup_web import setup_web +from config_app.c_app import app as application +from config_app.config_endpoints.api import api_bp +from config_app.config_endpoints.setup_web import setup_web application.register_blueprint(setup_web)