diff --git a/Dockerfile b/Dockerfile index 96c556549..cb20b2746 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,6 +21,23 @@ RUN venv/bin/pip freeze ADD binary_dependencies binary_dependencies RUN gdebi --n binary_dependencies/*.deb +# Install cfssl +RUN mkdir /gocode +ENV GOPATH /gocode +RUN curl -O https://storage.googleapis.com/golang/go1.6.linux-amd64.tar.gz && \ + tar -xvf go1.6.linux-amd64.tar.gz && \ + sudo mv go /usr/local && \ + rm -rf go1.6.linux-amd64.tar.gz && \ + /usr/local/go/bin/go get -u github.com/cloudflare/cfssl/cmd/cfssl && \ + /usr/local/go/bin/go get -u github.com/cloudflare/cfssl/cmd/cfssljson && \ + sudo cp /gocode/bin/cfssljson /bin/cfssljson && \ + sudo cp /gocode/bin/cfssl /bin/cfssl && \ + sudo rm -rf /gocode && sudo rm -rf /usr/local/go + +# Install jwtproxy +RUN curl -L -o /usr/local/bin/jwtproxy https://github.com/coreos/jwtproxy/releases/download/v0.0.1/jwtproxy-linux-x64 +RUN chmod +x /usr/local/bin/jwtproxy + # Install Grunt RUN ln -s /usr/bin/nodejs /usr/bin/node RUN npm install -g grunt-cli @@ -29,10 +46,8 @@ RUN npm install -g grunt-cli ADD grunt grunt RUN cd grunt && npm install -# Add all of the files! -ADD . . - # Run grunt +ADD static static RUN cd grunt && grunt RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev libgpgme11-dev nodejs npm @@ -43,6 +58,7 @@ RUN rm -rf grunt ADD conf/init/copy_config_files.sh /etc/my_init.d/ ADD conf/init/doupdatelimits.sh /etc/my_init.d/ ADD conf/init/copy_syslog_config.sh /etc/my_init.d/ +ADD conf/init/create_certs.sh /etc/my_init.d/ ADD conf/init/runmigration.sh /etc/my_init.d/ ADD conf/init/syslog-ng.conf /etc/syslog-ng/ ADD conf/init/zz_boot.sh /etc/my_init.d/ @@ -53,16 +69,26 @@ RUN rm -rf /etc/service/syslog-forwarder # Download any external libs. RUN mkdir static/fonts static/ldn +ADD external_libraries.py external_libraries.py RUN venv/bin/python -m external_libraries RUN mkdir /usr/local/nginx/logs/ # TODO(ssewell): only works on a detached head, make work with ref -RUN cat .git/HEAD > GIT_HEAD +ADD .git/HEAD GIT_HEAD + +# Add all of the files! +ADD . . # Run the tests -RUN TEST=true venv/bin/python -m unittest discover -f -RUN TEST=true venv/bin/python -m test.registry_tests -f +ARG RUN_TESTS=true +ENV RUN_TESTS ${RUN_TESTS} +RUN if [ "$RUN_TESTS" = true ]; then \ + TEST=true venv/bin/python -m unittest discover -f; \ + fi +RUN if [ "$RUN_TESTS" = true ]; then \ + TEST=true venv/bin/python -m test.registry_tests -f; \ + fi RUN PYTHONPATH=. venv/bin/alembic heads | grep -E '^[0-9a-f]+ \(head\)$' > ALEMBIC_HEAD VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp", "/conf/etcd"] diff --git a/boot.py b/boot.py index f78ad6ab8..a195b3ef6 100644 --- a/boot.py +++ b/boot.py @@ -1,15 +1,85 @@ #!/usr/bin/env python +from datetime import datetime, timedelta +from urlparse import urlunparse + +from jinja2 import Template +from cachetools import lru_cache import release + from app import app from data.model.release import set_region_release from util.config.database import sync_database_with_config +from util.generatepresharedkey import generate_key + + +@lru_cache(maxsize=1) +def get_audience(): + audience = app.config.get('JWTPROXY_AUDIENCE') + + if audience: + return audience + + scheme = app.config.get('PREFERRED_URL_SCHEME') + hostname = app.config.get('SERVER_HOSTNAME') + + # hostname includes port, use that + if ':' in hostname: + return urlunparse((scheme, hostname, '', '', '', '')) + + # no port, guess based on scheme + if scheme == 'https': + port = '443' + else: + port = '80' + + return urlunparse((scheme, hostname + ':' + port, '', '', '', '')) + + +def create_quay_service_key(): + """ + Creates a service key for quay to use in the jwtproxy + """ + minutes_until_expiration = app.config.get('QUAY_SERVICE_KEY_EXPIRATION', 120) + expiration = datetime.now() + timedelta(minutes=minutes_until_expiration) + quay_key, key_id = generate_key('quay', get_audience(), expiration_date=expiration) + + with open('/conf/quay.kid', mode='w') as f: + f.truncate(0) + f.write(key_id) + + with open('/conf/quay.pem', mode='w') as f: + f.truncate(0) + f.write(quay_key.exportKey()) + + return key_id + + +def create_jwtproxy_conf(quay_key_id): + """ + Generates the jwtproxy conf from the jinja template + """ + audience = get_audience() + registry = audience + '/keys' + + with open("/conf/jwtproxy_conf.yaml.jnj") as f: + template = Template(f.read()) + rendered = template.render( + audience=audience, + registry=registry, + key_id=quay_key_id + ) + + with open('/conf/jwtproxy_conf.yaml', 'w') as f: + f.write(rendered) def main(): if app.config.get('SETUP_COMPLETE', False): sync_database_with_config(app.config) + quay_key_id = create_quay_service_key() + create_jwtproxy_conf(quay_key_id) # Record deploy if release.REGION and release.GIT_HEAD: diff --git a/conf/http-base.conf b/conf/http-base.conf index 492ce40e4..22f01aa52 100644 --- a/conf/http-base.conf +++ b/conf/http-base.conf @@ -37,6 +37,9 @@ map $http_x_forwarded_proto $proper_scheme { upstream web_app_server { server unix:/tmp/gunicorn_web.sock fail_timeout=0; } +upstream jwtproxy_secscan { + server unix:/tmp/jwtproxy_secscan.sock fail_timeout=0; +} upstream verbs_app_server { server unix:/tmp/gunicorn_verbs.sock fail_timeout=0; } diff --git a/conf/init/create_certs.sh b/conf/init/create_certs.sh new file mode 100755 index 000000000..6cbb9c748 --- /dev/null +++ b/conf/init/create_certs.sh @@ -0,0 +1,10 @@ +#! /bin/bash +set -e + +# Create certs for jwtproxy to mitm outgoing TLS connections +echo '{"CN":"CA","key":{"algo":"rsa","size":2048}}' | cfssl gencert -initca - | cfssljson -bare mitm +cp mitm-key.pem /conf/mitm.key +cp mitm.pem /conf/mitm.cert +cp mitm.pem /usr/local/share/ca-certificates/mitm.crt + +update-ca-certificates diff --git a/conf/init/service/jwtproxy/log/run b/conf/init/service/jwtproxy/log/run new file mode 100755 index 000000000..0fd684fe2 --- /dev/null +++ b/conf/init/service/jwtproxy/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec logger -i -t jwtproxy diff --git a/conf/init/service/jwtproxy/run b/conf/init/service/jwtproxy/run new file mode 100755 index 000000000..5e23ea36c --- /dev/null +++ b/conf/init/service/jwtproxy/run @@ -0,0 +1,9 @@ +#! /bin/bash + +echo 'Starting jwtproxy' + +cd / +/usr/local/bin/jwtproxy --config conf/jwtproxy_conf.yaml +rm /tmp/jwtproxy_secscan.sock + +echo 'Jwtproxy exited' diff --git a/conf/init/service/service_key_worker/log/run b/conf/init/service/service_key_worker/log/run new file mode 100755 index 000000000..410fabb1a --- /dev/null +++ b/conf/init/service/service_key_worker/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec logger -i -t service_key_worker diff --git a/conf/init/service/service_key_worker/run b/conf/init/service/service_key_worker/run new file mode 100755 index 000000000..20b578c24 --- /dev/null +++ b/conf/init/service/service_key_worker/run @@ -0,0 +1,8 @@ +#! /bin/bash + +echo 'Starting service key worker' + +cd / +venv/bin/python -m workers.service_key_worker 2>&1 + +echo 'Service key worker exited' diff --git a/conf/jwtproxy_conf.yaml.jnj b/conf/jwtproxy_conf.yaml.jnj new file mode 100644 index 000000000..e1cbd7136 --- /dev/null +++ b/conf/jwtproxy_conf.yaml.jnj @@ -0,0 +1,27 @@ +jwtproxy: + signer_proxy: + enabled: true + listen_addr: :8080 + ca_key_file: /conf/mitm.key + ca_crt_file: /conf/mitm.cert + + signer: + issuer: quay + expiration_time: 5m + max_skew: 1m + private_key: + type: preshared + options: + key_id: {{ key_id }} + private_key_path: /conf/quay.pem + verifier_proxies: + - enabled: true + listen_addr: unix:/tmp/jwtproxy_secscan.sock + verifier: + upstream: unix:/tmp/gunicorn_web.sock + audience: {{ audience }} + key_server: + type: keyregistry + options: + issuer: clair + registry: {{ registry }} diff --git a/conf/server-base.conf b/conf/server-base.conf index a2b8d1511..04513c122 100644 --- a/conf/server-base.conf +++ b/conf/server-base.conf @@ -49,6 +49,10 @@ location ~ ^/(v1/repositories|v2/auth)/ { limit_req zone=repositories burst=10; } +location /secscan/ { + proxy_pass http://jwtproxy_secscan; +} + location ~ ^/v2 { # If we're being accessed via v1.quay.io, pretend we don't support v2. if ($host = "v1.quay.io") { diff --git a/config.py b/config.py index 06407c7b1..ea9fada91 100644 --- a/config.py +++ b/config.py @@ -1,8 +1,6 @@ -import requests import os.path -from data.buildlogs import BuildLogs -from data.userevent import UserEventBuilder +import requests def build_requests_session(): @@ -292,6 +290,14 @@ class DefaultConfig(object): 'API_TIMEOUT_POST_SECONDS': 480, } + # JWTProxy Settings + # The address (sans schema) to proxy outgoing requests through the jwtproxy + # to be signed + JWTPROXY_SIGNER = 'localhost:8080' + # The audience that jwtproxy should verify on incoming requests + # If None, will be calculated off of the SERVER_HOSTNAME (default) + JWTPROXY_AUDIENCE = None + # Torrent management flags FEATURE_BITTORRENT = False BITTORRENT_PIECE_SIZE = 512 * 1024 @@ -303,3 +309,18 @@ class DefaultConfig(object): # hide the ID range for production (in which this value is overridden). Should *not* # be relied upon for secure encryption otherwise. PAGE_TOKEN_KEY = 'um=/?Kqgp)2yQaS/A6C{NL=dXE&>C:}(' + + # The timeout for service key approval. + UNAPPROVED_SERVICE_KEY_TTL_SEC = 60 * 60 * 24 # One day + + # How long to wait before GCing an expired service key. + EXPIRED_SERVICE_KEY_TTL_SEC = 60 * 60 * 24 * 7 # One week + + # The ID of the user account in the database to be used for service audit logs. If none, the + # lowest user in the database will be used. + SERVICE_LOG_ACCOUNT_ID = None + + # Quay's service key expiration in minutes + QUAY_SERVICE_KEY_EXPIRATION = 120 + # Number of minutes between expiration refresh in minutes + QUAY_SERVICE_KEY_REFRESH = 60 diff --git a/data/database.py b/data/database.py index e7f090de2..fea50c69a 100644 --- a/data/database.py +++ b/data/database.py @@ -1,20 +1,23 @@ -import string -import logging -import uuid -import time -import toposort -import resumablehashlib -import sys import inspect +import logging +import string +import sys +import time +import uuid -from random import SystemRandom -from datetime import datetime -from peewee import * -from data.read_slave import ReadSlaveModel -from data.fields import ResumableSHA256Field, ResumableSHA1Field, JSONField, Base64BinaryField -from sqlalchemy.engine.url import make_url from collections import defaultdict +from datetime import datetime +from random import SystemRandom +import resumablehashlib +import toposort + +from enum import Enum +from peewee import * +from sqlalchemy.engine.url import make_url + +from data.fields import ResumableSHA256Field, ResumableSHA1Field, JSONField, Base64BinaryField +from data.read_slave import ReadSlaveModel from util.names import urn_generator @@ -769,6 +772,7 @@ class Notification(BaseModel): metadata_json = TextField(default='{}') created = DateTimeField(default=datetime.now, index=True) dismissed = BooleanField(default=False) + lookup_path = CharField(null=True, index=True) class ExternalNotificationEvent(BaseModel): @@ -866,5 +870,34 @@ class TorrentInfo(BaseModel): (('storage', 'piece_length'), True), ) + +class ServiceKeyApprovalType(Enum): + SUPERUSER = 'Super User API' + KEY_ROTATION = 'Key Rotation' + AUTOMATIC = 'Automatic' + + +_ServiceKeyApproverProxy = Proxy() +class ServiceKeyApproval(BaseModel): + approver = ForeignKeyField(_ServiceKeyApproverProxy, null=True) + approval_type = CharField(index=True) + approved_date = DateTimeField(default=datetime.utcnow) + notes = TextField(default='') + +_ServiceKeyApproverProxy.initialize(User) + + +class ServiceKey(BaseModel): + name = CharField() + kid = CharField(unique=True, index=True) + service = CharField(index=True) + jwk = JSONField() + metadata = JSONField() + created_date = DateTimeField(default=datetime.utcnow) + expiration_date = DateTimeField(null=True) + rotation_duration = IntegerField(null=True) + approval = ForeignKeyField(ServiceKeyApproval, index=True, null=True) + + is_model = lambda x: inspect.isclass(x) and issubclass(x, BaseModel) and x is not BaseModel all_models = [model[1] for model in inspect.getmembers(sys.modules[__name__], is_model)] diff --git a/data/migrations/migration.sh b/data/migrations/migration.sh index 65521f6a6..e78547dfb 100755 --- a/data/migrations/migration.sh +++ b/data/migrations/migration.sh @@ -26,9 +26,9 @@ up_mariadb() { # Run a SQL database on port 3306 inside of Docker. docker run --name mariadb -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mariadb - # Sleep for 10s to get MySQL get started. - echo 'Sleeping for 10...' - sleep 10 + # Sleep for 20s to get MySQL get started. + echo 'Sleeping for 20...' + sleep 20 # Add the database to mysql. docker run --rm --link mariadb:mariadb mariadb sh -c 'echo "create database genschema" | mysql -h"$MARIADB_PORT_3306_TCP_ADDR" -P"$MARIADB_PORT_3306_TCP_PORT" -uroot -ppassword' @@ -43,9 +43,9 @@ up_percona() { # Run a SQL database on port 3306 inside of Docker. docker run --name percona -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d percona - # Sleep for 10s - echo 'Sleeping for 10...' - sleep 10 + # Sleep for 20s + echo 'Sleeping for 20...' + sleep 20 # Add the daabase to mysql. docker run --rm --link percona:percona percona sh -c 'echo "create database genschema" | mysql -h $PERCONA_PORT_3306_TCP_ADDR -uroot -ppassword' diff --git a/data/migrations/versions/a3ba52d02dec_initial_keyserver.py b/data/migrations/versions/a3ba52d02dec_initial_keyserver.py new file mode 100644 index 000000000..4e59ba1eb --- /dev/null +++ b/data/migrations/versions/a3ba52d02dec_initial_keyserver.py @@ -0,0 +1,91 @@ +"""initial keyserver + +Revision ID: a3ba52d02dec +Revises: e4129c93e477 +Create Date: 2016-03-30 15:28:32.036753 + +""" + +# revision identifiers, used by Alembic. +revision = 'a3ba52d02dec' +down_revision = 'e4129c93e477' + +from alembic import op +import sqlalchemy as sa + +from util.migrate import UTF8LongText + +def upgrade(tables): + op.create_table( + 'servicekeyapproval', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('approver_id', sa.Integer(), nullable=True), + sa.Column('approval_type', sa.String(length=255), nullable=False), + sa.Column('approved_date', sa.DateTime(), nullable=False), + sa.Column('notes', UTF8LongText(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekeyapproval')), + ) + op.create_index('servicekeyapproval_approval_type', 'servicekeyapproval', ['approval_type'], unique=False) + op.create_index('servicekeyapproval_approver_id', 'servicekeyapproval', ['approver_id'], unique=False) + + + op.bulk_insert( + tables.notificationkind, + [{'name':'service_key_submitted'}], + ) + + + op.bulk_insert(tables.logentrykind, [ + {'name':'service_key_create'}, + {'name':'service_key_approve'}, + {'name':'service_key_delete'}, + {'name':'service_key_modify'}, + {'name':'service_key_extend'}, + {'name':'service_key_rotate'}, + ]) + + + op.create_table( + 'servicekey', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('kid', sa.String(length=255), nullable=False), + sa.Column('service', sa.String(length=255), nullable=False), + sa.Column('jwk', UTF8LongText(), nullable=False), + sa.Column('metadata', UTF8LongText(), nullable=False), + sa.Column('created_date', sa.DateTime(), nullable=False), + sa.Column('expiration_date', sa.DateTime(), nullable=True), + sa.Column('rotation_duration', sa.Integer(), nullable=True), + sa.Column('approval_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['approval_id'], ['servicekeyapproval.id'], + name=op.f('fk_servicekey_approval_id_servicekeyapproval')), + sa.PrimaryKeyConstraint('id', name=op.f('pk_servicekey')), + ) + op.create_index('servicekey_approval_id', 'servicekey', ['approval_id'], unique=False) + op.create_index('servicekey_kid', 'servicekey', ['kid'], unique=True) + op.create_index('servicekey_service', 'servicekey', ['service'], unique=False) + + + op.add_column(u'notification', sa.Column('lookup_path', sa.String(length=255), nullable=True)) + op.create_index('notification_lookup_path', 'notification', ['lookup_path'], unique=False) + + +def downgrade(tables): + op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_create'))) + op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_approve'))) + op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_delete'))) + op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_modify'))) + op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_extend'))) + op.execute(tables.logentrykind.delete().where(tables.logentrykind.c.name == op.inline_literal('service_key_rotate'))) + + + op.execute(tables.notificationkind.delete().where(tables.notificationkind.c.name == op.inline_literal('service_key_submitted'))) + + + op.drop_column(u'notification', 'lookup_path') + + + op.drop_table('servicekey') + + + op.drop_table('servicekeyapproval') diff --git a/data/model/__init__.py b/data/model/__init__.py index b6f4cd975..b138bcf35 100644 --- a/data/model/__init__.py +++ b/data/model/__init__.py @@ -76,6 +76,18 @@ class InvalidManifestException(DataModelException): pass +class ServiceKeyDoesNotExist(DataModelException): + pass + + +class ServiceKeyAlreadyApproved(DataModelException): + pass + + +class ServiceNameInvalid(DataModelException): + pass + + class TooManyLoginAttemptsException(Exception): def __init__(self, message, retry_after): super(TooManyLoginAttemptsException, self).__init__(message) @@ -95,4 +107,5 @@ config = Config() # moving the minimal number of things to _basequery # TODO document the methods and modules for each one of the submodules below. from data.model import (blob, build, image, log, notification, oauth, organization, permission, - repository, storage, tag, team, token, user, release, modelutil) + repository, service_keys, storage, tag, team, token, user, release, + modelutil) diff --git a/data/model/log.py b/data/model/log.py index 56c029a1e..0dbf53b87 100644 --- a/data/model/log.py +++ b/data/model/log.py @@ -1,13 +1,15 @@ import json +from calendar import timegm from peewee import JOIN_LEFT_OUTER, SQL, fn from datetime import datetime, timedelta, date from cachetools import lru_cache from data.database import LogEntry, LogEntryKind, User, db +from data.model import config -# TODO: Find a way to get logs without slowing down pagination significantly. -def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None): +def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None, + ignore=None): joined = (LogEntry .select(*selections) .switch(LogEntry) @@ -22,6 +24,11 @@ def _logs_query(selections, start_time, end_time, performer=None, repository=Non if namespace: joined = joined.join(User).where(User.username == namespace) + if ignore: + kind_map = get_log_entry_kinds() + ignore_ids = [kind_map[kind_name] for kind_name in ignore] + joined = joined.where(~(LogEntry.kind << ignore_ids)) + return joined @@ -30,22 +37,25 @@ def get_log_entry_kinds(): kind_map = {} for kind in LogEntryKind.select(): kind_map[kind.id] = kind.name + kind_map[kind.name] = kind.id return kind_map -def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None): +def get_aggregated_logs(start_time, end_time, performer=None, repository=None, namespace=None, + ignore=None): date = db.extract_date('day', LogEntry.datetime) selections = [LogEntry.kind, date.alias('day'), fn.Count(LogEntry.id).alias('count')] - query = _logs_query(selections, start_time, end_time, performer, repository, namespace) + query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore) return query.group_by(date, LogEntry.kind) -def get_logs_query(start_time, end_time, performer=None, repository=None, namespace=None): +def get_logs_query(start_time, end_time, performer=None, repository=None, namespace=None, + ignore=None): Performer = User.alias() selections = [LogEntry, Performer] - query = _logs_query(selections, start_time, end_time, performer, repository, namespace) + query = _logs_query(selections, start_time, end_time, performer, repository, namespace, ignore) query = (query.switch(LogEntry) .join(Performer, JOIN_LEFT_OUTER, on=(LogEntry.performer == Performer.id).alias('performer'))) @@ -53,15 +63,30 @@ def get_logs_query(start_time, end_time, performer=None, repository=None, namesp return query +def _json_serialize(obj): + if isinstance(obj, datetime): + return timegm(obj.utctimetuple()) + + return obj + + def log_action(kind_name, user_or_organization_name, performer=None, repository=None, ip=None, metadata={}, timestamp=None): if not timestamp: timestamp = datetime.today() + account = None + if user_or_organization_name is not None: + account = User.get(User.username == user_or_organization_name).id + else: + account = config.app_config.get('SERVICE_LOG_ACCOUNT_ID') + if account is None: + account = User.select(fn.Min(User.id)).tuples().get()[0] + kind = LogEntryKind.get(LogEntryKind.name == kind_name) - account = User.get(User.username == user_or_organization_name) + metadata_json = json.dumps(metadata, default=_json_serialize) LogEntry.create(kind=kind, account=account, performer=performer, - repository=repository, ip=ip, metadata_json=json.dumps(metadata), + repository=repository, ip=ip, metadata_json=metadata_json, datetime=timestamp) diff --git a/data/model/notification.py b/data/model/notification.py index c42ddd07d..194e2975b 100644 --- a/data/model/notification.py +++ b/data/model/notification.py @@ -6,10 +6,11 @@ from data.database import (Notification, NotificationKind, User, Team, TeamMembe ExternalNotificationMethod, Namespace) -def create_notification(kind_name, target, metadata={}): +def create_notification(kind_name, target, metadata={}, lookup_path=None): kind_ref = NotificationKind.get(name=kind_name) notification = Notification.create(kind=kind_ref, target=target, - metadata_json=json.dumps(metadata)) + metadata_json=json.dumps(metadata), + lookup_path=lookup_path) return notification @@ -27,6 +28,12 @@ def lookup_notification(user, uuid): return results[0] +def lookup_notifications_by_path_prefix(prefix): + return list((Notification + .select() + .where(Notification.lookup_path % prefix))) + + def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=False, page=None, limit=None): @@ -69,6 +76,13 @@ def list_notifications(user, kind_name=None, id_filter=None, include_dismissed=F return query.order_by(base_query.c.created.desc()) +def delete_all_notifications_by_path_prefix(prefix): + (Notification + .delete() + .where(Notification.lookup_path ** (prefix + '%')) + .execute()) + + def delete_all_notifications_by_kind(kind_name): kind_ref = NotificationKind.get(name=kind_name) (Notification @@ -87,9 +101,10 @@ def delete_matching_notifications(target, kind_name, **kwargs): kind_ref = NotificationKind.get(name=kind_name) # Load all notifications for the user with the given kind. - notifications = Notification.select().where( - Notification.target == target, - Notification.kind == kind_ref) + notifications = (Notification + .select() + .where(Notification.target == target, + Notification.kind == kind_ref)) # For each, match the metadata to the specified values. for notification in notifications: diff --git a/data/model/service_keys.py b/data/model/service_keys.py new file mode 100644 index 000000000..382433da3 --- /dev/null +++ b/data/model/service_keys.py @@ -0,0 +1,199 @@ +import re + +from calendar import timegm +from datetime import datetime, timedelta +from peewee import JOIN_LEFT_OUTER + +from Crypto.PublicKey import RSA +from jwkest.jwk import RSAKey + +from data.database import db_for_update, User, ServiceKey, ServiceKeyApproval +from data.model import (ServiceKeyDoesNotExist, ServiceKeyAlreadyApproved, ServiceNameInvalid, + db_transaction, config) +from data.model.notification import create_notification, delete_all_notifications_by_path_prefix +from util.security.fingerprint import canonical_kid + + +_SERVICE_NAME_REGEX = re.compile(r'^[a-z0-9_]+$') + +def _expired_keys_clause(service): + return ((ServiceKey.service == service) & + (ServiceKey.expiration_date <= datetime.utcnow())) + + +def _stale_expired_keys_service_clause(service): + return ((ServiceKey.service == service) & _stale_expired_keys_clause()) + + +def _stale_expired_keys_clause(): + expired_ttl = timedelta(seconds=config.app_config['EXPIRED_SERVICE_KEY_TTL_SEC']) + return (ServiceKey.expiration_date <= (datetime.utcnow() - expired_ttl)) + + +def _stale_unapproved_keys_clause(service): + unapproved_ttl = timedelta(seconds=config.app_config['UNAPPROVED_SERVICE_KEY_TTL_SEC']) + return ((ServiceKey.service == service) & + (ServiceKey.approval >> None) & + (ServiceKey.created_date <= (datetime.utcnow() - unapproved_ttl))) + + +def _gc_expired(service): + ServiceKey.delete().where(_stale_expired_keys_service_clause(service) | + _stale_unapproved_keys_clause(service)).execute() + + +def _verify_service_name(service_name): + if not _SERVICE_NAME_REGEX.match(service_name): + raise ServiceNameInvalid + + +def _notify_superusers(key): + notification_metadata = { + 'name': key.name, + 'kid': key.kid, + 'service': key.service, + 'jwk': key.jwk, + 'metadata': key.metadata, + 'created_date': timegm(key.created_date.utctimetuple()), + } + + if key.expiration_date is not None: + notification_metadata['expiration_date'] = timegm(key.expiration_date.utctimetuple()) + + if len(config.app_config['SUPER_USERS']) > 0: + superusers = User.select().where(User.username << config.app_config['SUPER_USERS']) + for superuser in superusers: + create_notification('service_key_submitted', superuser, metadata=notification_metadata, + lookup_path='/service_key_approval/{0}/{1}'.format(key.kid, superuser.id)) + + +def create_service_key(name, kid, service, jwk, metadata, expiration_date, rotation_duration=None): + _verify_service_name(service) + _gc_expired(service) + + key = ServiceKey.create(name=name, kid=kid, service=service, jwk=jwk, metadata=metadata, + expiration_date=expiration_date, rotation_duration=rotation_duration) + + _notify_superusers(key) + return key + + +def generate_service_key(service, expiration_date, kid=None, name='', metadata=None, + rotation_duration=None): + private_key = RSA.generate(2048) + jwk = RSAKey(key=private_key.publickey()).serialize() + if kid is None: + kid = canonical_kid(jwk) + + key = create_service_key(name, kid, service, jwk, metadata or {}, expiration_date, + rotation_duration=rotation_duration) + return (private_key, key) + + +def replace_service_key(old_kid, kid, jwk, metadata, expiration_date): + try: + with db_transaction(): + key = db_for_update(ServiceKey.select().where(ServiceKey.kid == old_kid)).get() + key.metadata.update(metadata) + + ServiceKey.create(name=key.name, kid=kid, service=key.service, jwk=jwk, + metadata=key.metadata, expiration_date=expiration_date, + rotation_duration=key.rotation_duration, approval=key.approval) + key.delete_instance() + except ServiceKey.DoesNotExist: + raise ServiceKeyDoesNotExist + + _notify_superusers(key) + delete_all_notifications_by_path_prefix('/service_key_approval/{0}'.format(old_kid)) + _gc_expired(key.service) + + +def update_service_key(kid, name=None, metadata=None): + try: + with db_transaction(): + key = db_for_update(ServiceKey.select().where(ServiceKey.kid == kid)).get() + if name is not None: + key.name = name + + if metadata is not None: + key.metadata.update(metadata) + + key.save() + except ServiceKey.DoesNotExist: + raise ServiceKeyDoesNotExist + + +def delete_service_key(kid): + try: + key = ServiceKey.get(kid=kid) + ServiceKey.delete().where(ServiceKey.kid == kid).execute() + except ServiceKey.DoesNotExist: + raise ServiceKeyDoesNotExist + + delete_all_notifications_by_path_prefix('/service_key_approval/{0}'.format(kid)) + _gc_expired(key.service) + return key + + +def set_key_expiration(kid, expiration_date): + try: + service_key = get_service_key(kid) + except ServiceKey.DoesNotExist: + raise ServiceKeyDoesNotExist + + service_key.expiration_date = expiration_date + service_key.save() + + +def approve_service_key(kid, approver, approval_type, notes=''): + try: + with db_transaction(): + key = db_for_update(ServiceKey.select().where(ServiceKey.kid == kid)).get() + if key.approval is not None: + raise ServiceKeyAlreadyApproved + + approval = ServiceKeyApproval.create(approver=approver, approval_type=approval_type, + notes=notes) + key.approval = approval + key.save() + except ServiceKey.DoesNotExist: + raise ServiceKeyDoesNotExist + + delete_all_notifications_by_path_prefix('/service_key_approval/{0}'.format(kid)) + return key + + +def _list_service_keys_query(kid=None, service=None, approved_only=False, approval_type=None): + query = ServiceKey.select().join(ServiceKeyApproval, JOIN_LEFT_OUTER) + + if approved_only: + query = query.where(~(ServiceKey.approval >> None)) + + if approval_type is not None: + query = query.where(ServiceKeyApproval.approval_type == approval_type) + + if service is not None: + query = query.where(ServiceKey.service == service) + query = query.where(~(_expired_keys_clause(service)) | + ~(_stale_unapproved_keys_clause(service))) + + if kid is not None: + query = query.where(ServiceKey.kid == kid) + + query = query.where(~(_stale_expired_keys_clause()) | (ServiceKey.expiration_date >> None)) + return query + + +def list_all_keys(): + return list(_list_service_keys_query()) + + +def list_service_keys(service): + return list(_list_service_keys_query(service=service, approved_only=True)) + + +def get_service_key(kid, service=None): + try: + return _list_service_keys_query(kid=kid, service=service).get() + except ServiceKey.DoesNotExist: + raise ServiceKeyDoesNotExist diff --git a/endpoints/api/logs.py b/endpoints/api/logs.py index c3c65ef55..3602bbace 100644 --- a/endpoints/api/logs.py +++ b/endpoints/api/logs.py @@ -16,6 +16,8 @@ from auth import scopes from app import avatar LOGS_PER_PAGE = 20 +SERVICE_LEVEL_LOG_KINDS = set(['service_key_create', 'service_key_approve', 'service_key_delete', + 'service_key_modify', 'service_key_extend', 'service_key_rotate']) def log_view(log, kinds): view = { @@ -79,11 +81,12 @@ def _validate_logs_arguments(start_time, end_time, performer_name): def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, - page_token=None): + page_token=None, ignore=None): (start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name) kinds = model.log.get_log_entry_kinds() logs_query = model.log.get_logs_query(start_time, end_time, performer=performer, - repository=repository, namespace=namespace) + repository=repository, namespace=namespace, + ignore=ignore) logs, next_page_token = model.modelutil.paginate(logs_query, database.LogEntry, descending=True, page_token=page_token, limit=LOGS_PER_PAGE) @@ -95,12 +98,14 @@ def get_logs(start_time, end_time, performer_name=None, repository=None, namespa }, next_page_token -def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None): +def get_aggregate_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, + ignore=None): (start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name) kinds = model.log.get_log_entry_kinds() aggregated_logs = model.log.get_aggregated_logs(start_time, end_time, performer=performer, - repository=repository, namespace=namespace) + repository=repository, namespace=namespace, + ignore=ignore) return { 'aggregated': [aggregated_log_view(log, kinds, start_time) for log in aggregated_logs] @@ -126,7 +131,8 @@ class RepositoryLogs(RepositoryParamResource): start_time = parsed_args['starttime'] end_time = parsed_args['endtime'] - return get_logs(start_time, end_time, repository=repo, page_token=page_token) + return get_logs(start_time, end_time, repository=repo, page_token=page_token, + ignore=SERVICE_LEVEL_LOG_KINDS) @resource('/v1/user/logs') @@ -147,7 +153,7 @@ class UserLogs(ApiResource): user = get_authenticated_user() return get_logs(start_time, end_time, performer_name=performer_name, namespace=user.username, - page_token=page_token) + page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS) @resource('/v1/organization//logs') @@ -172,7 +178,7 @@ class OrgLogs(ApiResource): end_time = parsed_args['endtime'] return get_logs(start_time, end_time, namespace=orgname, performer_name=performer_name, - page_token=page_token) + page_token=page_token, ignore=SERVICE_LEVEL_LOG_KINDS) raise Unauthorized() @@ -194,7 +200,8 @@ class RepositoryAggregateLogs(RepositoryParamResource): start_time = parsed_args['starttime'] end_time = parsed_args['endtime'] - return get_aggregate_logs(start_time, end_time, repository=repo) + return get_aggregate_logs(start_time, end_time, repository=repo, + ignore=SERVICE_LEVEL_LOG_KINDS) @resource('/v1/user/aggregatelogs') @@ -237,6 +244,6 @@ class OrgAggregateLogs(ApiResource): end_time = parsed_args['endtime'] return get_aggregate_logs(start_time, end_time, namespace=orgname, - performer_name=performer_name) + performer_name=performer_name, ignore=SERVICE_LEVEL_LOG_KINDS) raise Unauthorized() diff --git a/endpoints/api/superuser.py b/endpoints/api/superuser.py index 3279c9e9f..fc7ef8c8e 100644 --- a/endpoints/api/superuser.py +++ b/endpoints/api/superuser.py @@ -1,23 +1,27 @@ """ Superuser API. """ -import string import logging import os +import string +from datetime import datetime from random import SystemRandom -from flask import request + +from flask import request, make_response, jsonify import features from app import app, avatar, superusers, authentication, config_provider +from auth import scopes +from auth.auth_context import get_authenticated_user +from auth.permissions import SuperUserPermission from endpoints.api import (ApiResource, nickname, resource, validate_json_request, internal_only, require_scope, show_if, parse_args, query_param, abort, require_fresh_login, path_param, verify_not_prod, - page_support) + page_support, log_action) from endpoints.api.logs import get_logs, get_aggregate_logs from data import model -from auth.permissions import SuperUserPermission -from auth import scopes +from data.database import ServiceKeyApprovalType from util.useremails import send_confirmation_email, send_recovery_email @@ -139,6 +143,8 @@ def org_view(org): def user_view(user, password=None): user_data = { + 'kind': 'user', + 'name': user.username, 'username': user.username, 'email': user.email, 'verified': user.verified, @@ -467,3 +473,299 @@ class SuperUserOrganizationManagement(ApiResource): return org_view(org) abort(403) + + +def key_view(key): + return { + 'name': key.name, + 'kid': key.kid, + 'service': key.service, + 'jwk': key.jwk, + 'metadata': key.metadata, + 'created_date': key.created_date, + 'expiration_date': key.expiration_date, + 'rotation_duration': key.rotation_duration, + 'approval': approval_view(key.approval) if key.approval is not None else None, + } + + +def approval_view(approval): + return { + 'approver': user_view(approval.approver) if approval.approver else None, + 'approval_type': approval.approval_type, + 'approved_date': approval.approved_date, + 'notes': approval.notes, + } + + +@resource('/v1/superuser/keys') +@show_if(features.SUPER_USERS) +class SuperUserServiceKeyManagement(ApiResource): + """ Resource for managing service keys.""" + schemas = { + 'CreateServiceKey': { + 'id': 'CreateServiceKey', + 'type': 'object', + 'description': 'Description of creation of a service key', + 'required': ['service', 'expiration'], + 'properties': { + 'service': { + 'type': 'string', + 'description': 'The service authenticating with this key', + }, + 'name': { + 'type': 'string', + 'description': 'The friendly name of a service key', + }, + 'metadata': { + 'type': 'object', + 'description': 'The key/value pairs of this key\'s metadata', + }, + 'notes': { + 'type': 'string', + 'description': 'If specified, the extra notes for the key', + }, + 'expiration': { + 'description': 'The expiration date as a unix timestamp', + 'anyOf': [{'type': 'number'}, {'type': 'null'}], + }, + }, + }, + } + + @verify_not_prod + @nickname('listServiceKeys') + @require_scope(scopes.SUPERUSER) + def get(self): + if SuperUserPermission().can(): + keys = model.service_keys.list_all_keys() + + return jsonify({ + 'keys': [key_view(key) for key in keys], + }) + + abort(403) + + @require_fresh_login + @verify_not_prod + @nickname('createServiceKey') + @require_scope(scopes.SUPERUSER) + @validate_json_request('CreateServiceKey') + def post(self): + if SuperUserPermission().can(): + body = request.get_json() + + # Ensure we have a valid expiration date if specified. + expiration_date = body.get('expiration', None) + if expiration_date is not None: + try: + expiration_date = datetime.utcfromtimestamp(float(expiration_date)) + except ValueError: + abort(400) + + if expiration_date <= datetime.now(): + abort(400) + + # Create the metadata for the key. + user = get_authenticated_user() + metadata = body.get('metadata', {}) + metadata.update({ + 'created_by': 'Quay Superuser Panel', + 'creator': user.username, + 'ip': request.remote_addr, + }) + + # Generate a key with a private key that we *never save*. + (private_key, key) = model.service_keys.generate_service_key(body['service'], expiration_date, + metadata=metadata, + name=body.get('name', '')) + # Auto-approve the service key. + model.service_keys.approve_service_key(key.kid, user, ServiceKeyApprovalType.SUPERUSER, + notes=body.get('notes', '')) + + # Log the creation and auto-approval of the service key. + key_log_metadata = { + 'kid': key.kid, + 'preshared': True, + 'service': body['service'], + 'name': body.get('name', ''), + 'expiration_date': expiration_date, + 'auto_approved': True, + } + + log_action('service_key_create', None, key_log_metadata) + log_action('service_key_approve', None, key_log_metadata) + + return jsonify({ + 'kid': key.kid, + 'name': body.get('name', ''), + 'public_key': private_key.publickey().exportKey('PEM'), + 'private_key': private_key.exportKey('PEM'), + }) + + abort(403) + + +@resource('/v1/superuser/keys/') +@path_param('kid', 'The unique identifier for a service key') +@show_if(features.SUPER_USERS) +class SuperUserServiceKey(ApiResource): + """ Resource for managing service keys. """ + schemas = { + 'PutServiceKey': { + 'id': 'PutServiceKey', + 'type': 'object', + 'description': 'Description of updates for a service key', + 'properties': { + 'name': { + 'type': 'string', + 'description': 'The friendly name of a service key', + }, + 'metadata': { + 'type': 'object', + 'description': 'The key/value pairs of this key\'s metadata', + }, + 'expiration': { + 'description': 'The expiration date as a unix timestamp', + 'anyOf': [{'type': 'number'}, {'type': 'null'}], + }, + }, + }, + } + + @verify_not_prod + @nickname('getServiceKey') + @require_scope(scopes.SUPERUSER) + def get(self, kid): + if SuperUserPermission().can(): + try: + key = model.service_keys.get_service_key(kid) + return jsonify(key_view(key)) + except model.service_keys.ServiceKeyDoesNotExist: + abort(404) + + abort(403) + + @require_fresh_login + @verify_not_prod + @nickname('updateServiceKey') + @require_scope(scopes.SUPERUSER) + @validate_json_request('PutServiceKey') + def put(self, kid): + if SuperUserPermission().can(): + body = request.get_json() + try: + key = model.service_keys.get_service_key(kid) + except model.service_keys.ServiceKeyDoesNotExist: + abort(404) + + key_log_metadata = { + 'kid': key.kid, + 'service': key.service, + 'name': body.get('name', key.name), + 'expiration_date': key.expiration_date, + } + + if 'expiration' in body: + expiration_date = body['expiration'] + if expiration_date is not None and expiration_date != '': + try: + expiration_date = datetime.utcfromtimestamp(float(expiration_date)) + except ValueError: + abort(400) + + if expiration_date <= datetime.now(): + abort(400) + + key_log_metadata.update({ + 'old_expiration_date': key.expiration_date, + 'expiration_date': expiration_date, + }) + + log_action('service_key_extend', None, key_log_metadata) + model.service_keys.set_key_expiration(kid, expiration_date) + + + if 'name' in body or 'metadata' in body: + model.service_keys.update_service_key(kid, body.get('name'), body.get('metadata')) + log_action('service_key_modify', None, key_log_metadata) + + return jsonify(key_view(model.service_keys.get_service_key(kid))) + + abort(403) + + @require_fresh_login + @verify_not_prod + @nickname('deleteServiceKey') + @require_scope(scopes.SUPERUSER) + def delete(self, kid): + if SuperUserPermission().can(): + try: + key = model.service_keys.delete_service_key(kid) + except model.service_keys.ServiceKeyDoesNotExist: + abort(404) + + key_log_metadata = { + 'kid': kid, + 'service': key.service, + 'name': key.name, + 'created_date': key.created_date, + 'expiration_date': key.expiration_date, + } + + log_action('service_key_delete', None, key_log_metadata) + return make_response('', 204) + + abort(403) + + +@resource('/v1/superuser/approvedkeys/') +@path_param('kid', 'The unique identifier for a service key') +@show_if(features.SUPER_USERS) +class SuperUserServiceKeyApproval(ApiResource): + """ Resource for approving service keys. """ + + schemas = { + 'ApproveServiceKey': { + 'id': 'ApproveServiceKey', + 'type': 'object', + 'description': 'Information for approving service keys', + 'properties': { + 'notes': { + 'type': 'string', + 'description': 'Optional approval notes', + }, + }, + }, + } + + @require_fresh_login + @verify_not_prod + @nickname('approveServiceKey') + @require_scope(scopes.SUPERUSER) + @validate_json_request('ApproveServiceKey') + def post(self, kid): + if SuperUserPermission().can(): + notes = request.get_json().get('notes', '') + approver = get_authenticated_user() + try: + key = model.service_keys.approve_service_key(kid, approver, ServiceKeyApprovalType.SUPERUSER, + notes=notes) + + # Log the approval of the service key. + key_log_metadata = { + 'kid': kid, + 'service': key.service, + 'name': key.name, + 'expiration_date': key.expiration_date, + } + + log_action('service_key_approve', None, key_log_metadata) + except model.ServiceKeyDoesNotExist: + abort(404) + except model.ServiceKeyAlreadyApproved: + pass + + return make_response('', 201) + + abort(403) diff --git a/endpoints/key_server.py b/endpoints/key_server.py new file mode 100644 index 000000000..3839ffc5e --- /dev/null +++ b/endpoints/key_server.py @@ -0,0 +1,212 @@ +import logging + +from datetime import datetime, timedelta + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicNumbers +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicNumbers +from flask import Blueprint, jsonify, abort, request, make_response +from jwkest.jwk import keyrep, RSAKey, ECKey +from jwt import get_unverified_header + +import data.model +import data.model.service_keys +from data.model.log import log_action + +from app import app +from auth.registry_jwt_auth import TOKEN_REGEX +from util.security import strictjwt + + +logger = logging.getLogger(__name__) +key_server = Blueprint('key_server', __name__) + +JWT_HEADER_NAME = 'Authorization' +JWT_AUDIENCE = app.config['PREFERRED_URL_SCHEME'] + '://' + app.config['SERVER_HOSTNAME'] + + +def _validate_jwk(jwk): + if 'kty' not in jwk: + abort(400) + + if jwk['kty'] == 'EC': + if 'x' not in jwk or 'y' not in jwk: + abort(400) + elif jwk['kty'] == 'RSA': + if 'e' not in jwk or 'n' not in jwk: + abort(400) + else: + abort(400) + + +def _jwk_dict_to_public_key(jwk): + jwkest_key = keyrep(jwk) + if isinstance(jwkest_key, RSAKey): + pycrypto_key = jwkest_key.key + return RSAPublicNumbers(e=pycrypto_key.e, n=pycrypto_key.n).public_key(default_backend()) + elif isinstance(jwkest_key, ECKey): + x, y = jwkest_key.get_key() + return EllipticCurvePublicNumbers(x, y, jwkest_key.curve).public_key(default_backend()) + + +def _validate_jwt(encoded_jwt, jwk, service): + public_key = _jwk_dict_to_public_key(jwk) + + try: + strictjwt.decode(encoded_jwt, public_key, algorithms=['RS256'], + audience=JWT_AUDIENCE, issuer=service) + except strictjwt.InvalidTokenError: + logger.exception('JWT validation failure') + abort(400) + + +def _signer_kid(encoded_jwt): + headers = get_unverified_header(encoded_jwt) + return headers.get('kid', None) + + +def _signer_key(service, signer_kid): + try: + return data.model.service_keys.get_service_key(signer_kid, service=service) + except data.model.ServiceKeyDoesNotExist: + abort(403) + + +@key_server.route('/services//keys', methods=['GET']) +def list_service_keys(service): + keys = data.model.service_keys.list_service_keys(service) + return jsonify({'keys': [key.jwk for key in keys]}) + + +@key_server.route('/services//keys/', methods=['GET']) +def get_service_key(service, kid): + try: + key = data.model.service_keys.get_service_key(kid) + except data.model.ServiceKeyDoesNotExist: + abort(404) + + if key.approval is None: + abort(409) + + if key.expiration_date is not None and key.expiration_date <= datetime.utcnow(): + abort(403) + + resp = jsonify(key.jwk) + lifetime = min(timedelta(days=1), ((key.expiration_date or datetime.max) - datetime.utcnow())) + resp.cache_control.max_age = max(0, lifetime.total_seconds()) + return resp + + +@key_server.route('/services//keys/', methods=['PUT']) +def put_service_key(service, kid): + metadata = {'ip': request.remote_addr} + + rotation_duration = request.args.get('rotation', None) + expiration_date = request.args.get('expiration', None) + if expiration_date is not None: + try: + expiration_date = datetime.utcfromtimestamp(float(expiration_date)) + except ValueError: + logger.exception('Error parsing expiration date on key') + abort(400) + + try: + jwk = request.get_json() + except ValueError: + logger.exception('Error parsing JWK') + abort(400) + + jwt_header = request.headers.get(JWT_HEADER_NAME, '') + match = TOKEN_REGEX.match(jwt_header) + if match is None: + logger.error('Could not find matching bearer token') + abort(400) + + encoded_jwt = match.group(1) + + _validate_jwk(jwk) + + signer_kid = _signer_kid(encoded_jwt) + + if kid == signer_kid or signer_kid is None: + # The key is self-signed. Create a new instance and await approval. + _validate_jwt(encoded_jwt, jwk, service) + data.model.service_keys.create_service_key('', kid, service, jwk, metadata, expiration_date, + rotation_duration=rotation_duration) + + key_log_metadata = { + 'kid': kid, + 'preshared': False, + 'service': service, + 'name': '', + 'expiration_date': expiration_date, + 'user_agent': request.headers.get('User-Agent'), + 'ip': request.remote_addr, + } + + log_action('service_key_create', None, metadata=key_log_metadata, ip=request.remote_addr) + return make_response('', 202) + + metadata.update({'created_by': 'Key Rotation'}) + signer_key = _signer_key(service, signer_kid) + signer_jwk = signer_key.jwk + if signer_key.service != service: + abort(403) + + _validate_jwt(encoded_jwt, signer_jwk, service) + + try: + data.model.service_keys.replace_service_key(signer_key.kid, kid, jwk, metadata, expiration_date) + except data.model.ServiceKeyDoesNotExist: + abort(404) + + key_log_metadata = { + 'kid': kid, + 'signer_kid': signer_key.kid, + 'service': service, + 'name': signer_key.name, + 'expiration_date': expiration_date, + 'user_agent': request.headers.get('User-Agent'), + 'ip': request.remote_addr, + } + + log_action('service_key_rotate', None, metadata=key_log_metadata, ip=request.remote_addr) + return make_response('', 200) + + +@key_server.route('/services//keys/', methods=['DELETE']) +def delete_service_key(service, kid): + jwt_header = request.headers.get(JWT_HEADER_NAME, '') + match = TOKEN_REGEX.match(jwt_header) + if match is None: + abort(400) + + encoded_jwt = match.group(1) + + signer_kid = _signer_kid(encoded_jwt) + signer_key = _signer_key(service, signer_kid) + + self_signed = kid == signer_kid or signer_kid == '' + approved_key_for_service = signer_key.approval is not None + + if self_signed or approved_key_for_service: + _validate_jwt(encoded_jwt, signer_key.jwk, service) + + try: + data.model.service_keys.delete_service_key(kid) + except data.model.ServiceKeyDoesNotExist: + abort(404) + + key_log_metadata = { + 'kid': kid, + 'signer_kid': signer_key.kid, + 'service': service, + 'name': signer_key.name, + 'user_agent': request.headers.get('User-Agent'), + 'ip': request.remote_addr, + } + + log_action('service_key_delete', None, metadata=key_log_metadata, ip=request.remote_addr) + return make_response('', 204) + + abort(403) diff --git a/endpoints/web.py b/endpoints/web.py index cc12565c8..803cd7954 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -1,14 +1,12 @@ import json import logging -from jwkest import long_to_base64 +from urlparse import urlparse + from cachetools import lru_cache -from cryptography.x509 import load_pem_x509_certificate -from cryptography.hazmat.backends import default_backend from flask import (abort, redirect, request, url_for, make_response, Response, Blueprint, send_from_directory, jsonify, send_file) from flask.ext.login import current_user -from urlparse import urlparse import features @@ -30,7 +28,7 @@ from endpoints.common import (common_login, render_page_template, route_show_if, from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf from endpoints.decorators import anon_protect, anon_allowed from health.healthcheck import get_healthchecker -from util.cache import no_cache, cache_control +from util.cache import no_cache from util.headers import parse_basic_auth from util.invoice import renderInvoiceToPdf from util.seo import render_snapshot @@ -688,24 +686,3 @@ def redirect_to_namespace(namespace): return redirect(url_for('web.org_view', path=namespace)) else: return redirect(url_for('web.user_view', path=namespace)) - -@lru_cache(maxsize=1) -def _load_certificate_bytes(certificate_file_path): - with open(certificate_file_path) as cert_file: - return load_pem_x509_certificate(cert_file.read(), default_backend()).public_key() - -@route_show_if(features.BITTORRENT) -@cache_control(max_age=300) -@web.route('/keys', methods=['GET']) -def jwk_set_uri(): - certificate = _load_certificate_bytes(app.config['JWT_AUTH_CERTIFICATE_PATH']) - return jsonify({ - 'keys': [{ - 'kty': 'RSA', - 'alg': 'RS256', - 'use': 'sig', - 'n': long_to_base64(certificate.public_numbers().n), - 'e': long_to_base64(certificate.public_numbers().e), - }], - 'issuer': JWT_ISSUER, - }) diff --git a/external_libraries.py b/external_libraries.py index 5cbf53f99..45d7f7059 100644 --- a/external_libraries.py +++ b/external_libraries.py @@ -11,16 +11,19 @@ EXTERNAL_JS = [ 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-route.min.js', 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-sanitize.min.js', 'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-animate.min.js', + 'cdn.jsdelivr.net/g/momentjs', 'cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.2.0/js/bootstrap-datepicker.min.js', - 'cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3,momentjs', + 'cdnjs.cloudflare.com/ajax/libs/bootstrap-datetimepicker/4.17.37/js/bootstrap-datetimepicker.min.js', + 'cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3', 'cdn.ravenjs.com/1.1.14/jquery,native/raven.min.js', ] EXTERNAL_CSS = [ 'netdna.bootstrapcdn.com/font-awesome/4.5.0/css/font-awesome.css', 'netdna.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css', - 'fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,600,700', - 's3.amazonaws.com/cdn.core-os.net/icons/core-icons.css' + 'fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,700', + 's3.amazonaws.com/cdn.core-os.net/icons/core-icons.css', + 'cdnjs.cloudflare.com/ajax/libs/bootstrap-datetimepicker/4.17.37/css/bootstrap-datetimepicker.min.css', ] EXTERNAL_FONTS = [ diff --git a/initdb.py b/initdb.py index 63cebbeb4..871ef22e5 100644 --- a/initdb.py +++ b/initdb.py @@ -12,19 +12,24 @@ from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, save from itertools import count from uuid import UUID, uuid4 from threading import Event +from hashlib import sha256 +from Crypto.PublicKey import RSA +from jwkest.jwk import RSAKey from email.utils import formatdate from data.database import (db, all_models, Role, TeamRole, Visibility, LoginService, BuildTriggerService, AccessTokenKind, LogEntryKind, ImageStorageLocation, ImageStorageTransformation, ImageStorageSignatureKind, ExternalNotificationEvent, ExternalNotificationMethod, NotificationKind, - QuayRegion, QuayService, UserRegion, OAuthAuthorizationCode) + QuayRegion, QuayService, UserRegion, OAuthAuthorizationCode, + ServiceKeyApprovalType) from data import model from data.queue import WorkQueue from app import app, storage as store, tf from storage.basestorage import StoragePaths from endpoints.v2.manifest import _generate_and_store_manifest + from workers import repositoryactioncounter @@ -150,6 +155,32 @@ def __create_subtree(with_storage, repo, structure, creator_username, parent, ta __create_subtree(with_storage, repo, subtree, creator_username, new_image, tag_map) +def __generate_service_key(kid, name, user, timestamp, approval_type, expiration=None, + metadata=None, service='sample_service', rotation_duration=None): + _, key = model.service_keys.generate_service_key(service, expiration, kid=kid, + name=name, metadata=metadata, + rotation_duration=rotation_duration) + + if approval_type is not None: + model.service_keys.approve_service_key(key.kid, user, approval_type, + notes='The **test** approval') + + key_metadata = { + 'kid': kid, + 'preshared': True, + 'service': service, + 'name': name, + 'expiration_date': expiration, + 'auto_approved': True + } + + model.log.log_action('service_key_approve', None, performer=user, + timestamp=timestamp, metadata=key_metadata) + + model.log.log_action('service_key_create', None, performer=user, + timestamp=timestamp, metadata=key_metadata) + + def __generate_repository(with_storage, user_obj, name, description, is_public, permissions, structure): repo = model.repository.create_repository(user_obj.username, name, user_obj) @@ -305,6 +336,13 @@ def initialize_database(): LogEntryKind.create(name='repo_verb') + LogEntryKind.create(name='service_key_create') + LogEntryKind.create(name='service_key_approve') + LogEntryKind.create(name='service_key_delete') + LogEntryKind.create(name='service_key_modify') + LogEntryKind.create(name='service_key_extend') + LogEntryKind.create(name='service_key_rotate') + ImageStorageLocation.create(name='local_eu') ImageStorageLocation.create(name='local_us') @@ -336,6 +374,7 @@ def initialize_database(): NotificationKind.create(name='build_success') NotificationKind.create(name='build_failure') NotificationKind.create(name='vulnerability_found') + NotificationKind.create(name='service_key_submitted') NotificationKind.create(name='password_required') NotificationKind.create(name='over_private_usage') @@ -613,6 +652,27 @@ def populate_database(minimal=False, with_storage=False): six_ago = today - timedelta(5) four_ago = today - timedelta(4) + __generate_service_key('kid1', 'somesamplekey', new_user_1, today, + ServiceKeyApprovalType.SUPERUSER) + __generate_service_key('kid2', 'someexpiringkey', new_user_1, week_ago, + ServiceKeyApprovalType.SUPERUSER, today + timedelta(days=14)) + + __generate_service_key('kid3', 'unapprovedkey', new_user_1, today, None) + + __generate_service_key('kid4', 'autorotatingkey', new_user_1, six_ago, + ServiceKeyApprovalType.KEY_ROTATION, today + timedelta(days=1), + rotation_duration=timedelta(hours=12).total_seconds()) + + __generate_service_key('kid5', 'key for another service', new_user_1, today, + ServiceKeyApprovalType.SUPERUSER, today + timedelta(days=14), + service='different_sample_service') + + __generate_service_key('kid6', 'someexpiredkey', new_user_1, week_ago, + ServiceKeyApprovalType.SUPERUSER, today - timedelta(days=1)) + + __generate_service_key('kid7', 'somewayexpiredkey', new_user_1, week_ago, + ServiceKeyApprovalType.SUPERUSER, today - timedelta(days=30)) + model.log.log_action('org_create_team', org.username, performer=new_user_1, timestamp=week_ago, metadata={'team': 'readers'}) diff --git a/requirements-nover.txt b/requirements-nover.txt index a73a6768d..d19715a0a 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -63,3 +63,4 @@ bencode cryptography httmock moto +timeparse diff --git a/requirements.txt b/requirements.txt index ecd12d9fd..ce982e43e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -108,6 +108,7 @@ SQLAlchemy==1.0.12 stevedore==1.12.0 stringscore==0.1.0 stripe==1.32.0 +timeparse==0.5.5 toposort==1.4 trollius==2.1 tzlocal==1.2.2 diff --git a/static/css/core-ui.css b/static/css/core-ui.css index 3fd80f895..601b79981 100644 --- a/static/css/core-ui.css +++ b/static/css/core-ui.css @@ -55,6 +55,30 @@ a:focus { outline: none !important; } +.co-form-table label { + white-space: nowrap; +} + +.co-form-table td { + padding: 8px; +} + +.co-form-table td:first-child { + vertical-align: top; + padding-top: 14px; +} + +.co-form-table td .co-help-text { + margin-top: 10px; + margin-bottom: 4px; +} + +.co-help-text { + margin-top: 6px; + color: #aaa; + display: inline-block; +} + .co-options-menu .fa-gear { color: #999; cursor: pointer; @@ -1184,9 +1208,9 @@ a:focus { .co-checkable-menu-state.some:after { content: "-"; - font-size: 19px; - top: -6px; - left: 3px; + font-size: 24px; + top: -10px; + left: 4px; } @media (min-width: 768px) { diff --git a/static/css/directives/ui/markdown-editor.css b/static/css/directives/ui/markdown-editor.css new file mode 100644 index 000000000..bf5602db7 --- /dev/null +++ b/static/css/directives/ui/markdown-editor.css @@ -0,0 +1,31 @@ +.markdown-editor-element .wmd-panel .btn { + background-color: #ddd; +} + +.markdown-editor-element .wmd-panel .btn:hover { + background-color: #eee; +} + +.markdown-editor-element .wmd-panel .btn:active { + background-color: #ccc; +} + +.markdown-editor-element .preview-btn { + float: right; +} + +.markdown-editor-element .preview-btn.active { + box-shadow: inset 0 3px 5px rgba(0,0,0,.125); +} + +.markdown-editor-element .preview-panel .markdown-view { + border: 1px solid #eee; + padding: 4px; + min-height: 150px; +} + +.markdown-editor-element .preview-top-bar { + height: 43px; + line-height: 43px; + color: #ddd; +} \ No newline at end of file diff --git a/static/css/directives/ui/service-keys-manager.css b/static/css/directives/ui/service-keys-manager.css new file mode 100644 index 000000000..c7b1e54ae --- /dev/null +++ b/static/css/directives/ui/service-keys-manager.css @@ -0,0 +1,121 @@ +.service-keys-manager-element .co-filter-box { + float: right; +} + +.service-keys-manager-element .manager-header { + margin-bottom: 20px; +} + +@media (max-width: 767px) { + .service-keys-manager-element .co-filter-box { + float: none; + display: block; + } +} + +.service-keys-manager-element .approval-user .pretext { + vertical-align: middle; + margin-right: 4px; + font-size: 12px; + color: #777; +} + +.service-keys-manager-element .expired a { + color: #D64456; +} + +.service-keys-manager-element .critical a { + color: #F77454; +} + +.service-keys-manager-element .warning a { + color: #FCA657; +} + +.service-keys-manager-element .info a { + color: #2FC98E; +} + +.service-keys-manager-element .rotation { + color: #777; +} + +.service-keys-manager-element .no-expiration { + color: #128E72; +} + +.service-keys-manager-element .approval-automatic { + font-size: 12px; + color: #777; +} + +.service-keys-manager-element i.fa { + margin-right: 4px; +} + +.service-keys-manager-element .approval-rotation { + font-size: 12px; + color: #777; +} + +.service-keys-manager-element .approval-rotation i.fa { + margin-right: 6px; +} + +.service-keys-manager-element .subtitle { + color: #999; + font-size: 90%; + text-transform: uppercase; + font-weight: 300; + padding-top: 0!important; + text-align: left; + margin-bottom: 6px; + margin-top: 10px; +} + +.service-keys-manager-element .approval-required i.fa { + margin-right: 4px; +} + +.service-keys-manager-element .approval-required a { + margin-left: 10px; +} + +.service-keys-manager-element .unnamed { + color: #ddd; +} + +.service-keys-manager-element .key-display { + margin-top: 10px; + font-size: 12px; + font-family: Menlo,Monaco,Consolas,"Courier New",monospace; + background: white; + min-height: 500px; +} + +.service-keys-manager-element .max-text { + display: inline-block; + max-width: 400px; + overflow: hidden; + text-overflow: ellipsis; + white-space: nowrap; + vertical-align: middle; +} + + +.service-keys-manager-element .keys-list { + list-style: circle; + padding: 10px; + padding-left: 40px; +} + +.service-keys-manager-element .keys-list li { + padding: 4px; + font-family: Consolas, "Lucida Console", Monaco, monospace; +} + +.service-keys-manager-element .expiration-form .datetime-picker { + margin-top: 4px; + display: block; + margin-bottom: 2px; +} diff --git a/static/css/quay.css b/static/css/quay.css index f2095b9e7..d7172b6b8 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -1382,7 +1382,6 @@ p.editable:hover i { .modal-body textarea { width: 100%; height: 150px; - border: 0px; } .tag-specific-images-view .image-listings { @@ -4034,7 +4033,7 @@ i.rocket-icon { text-align: center; } - .section-description-header { +.section-description-header { position: relative; margin-bottom: 10px; min-height: 50px; diff --git a/static/directives/datetime-picker.html b/static/directives/datetime-picker.html new file mode 100644 index 000000000..25f57d701 --- /dev/null +++ b/static/directives/datetime-picker.html @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/static/directives/markdown-editor.html b/static/directives/markdown-editor.html new file mode 100644 index 000000000..4f838dffe --- /dev/null +++ b/static/directives/markdown-editor.html @@ -0,0 +1,11 @@ +
+ Preview +
+
+ +
+
+
Viewing preview
+
+
+
\ No newline at end of file diff --git a/static/directives/repo-view/repo-panel-tags.html b/static/directives/repo-view/repo-panel-tags.html index 91b3931da..360082a45 100644 --- a/static/directives/repo-view/repo-panel-tags.html +++ b/static/directives/repo-view/repo-panel-tags.html @@ -27,17 +27,17 @@
-
+
All Tags
-
+
No Tags
-
+
Commit SHAs
-
{{ it.image_id.substr(0, 12) }}
diff --git a/static/directives/service-keys-manager.html b/static/directives/service-keys-manager.html new file mode 100644 index 000000000..964523dab --- /dev/null +++ b/static/directives/service-keys-manager.html @@ -0,0 +1,365 @@ +
+
+
+ +
+ +
+ Service keys provide a recognized means of authentication between Quay Enterprise and external services, as well as between external services.
Example services include Quay Security Scanner speaking to a Clair cluster, or Quay Enterprise speaking to its + build workers. +
+ +
+ +
+ All Keys +
+
+ No Keys +
+
+ Unapproved Keys +
+
+ Expired Keys +
+
+ + + + + + + + + + + + + + Showing {{ orderedKeys.entries.length }} of {{ keys.length }} keys + + + +
+ + +
+
No service keys defined
+
There are no keys defined for working with external services
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Name + + Service Name + + Created + + Expires + + Approval Status +
+ + + + + + + (Unnamed) + + + + + + + Automatically rotated + + + + + + Expiresd + + + + + Does not expire + + + + Generated Automatically + + + Approved by + + + Approved via key rotation + + + Awaiting Approval Approve Now + + + + + Set Friendly Name + + + Change Expiration Time + + + Approve Key + + + Delete Key + + +
+
Full Key ID
+ + +
+
Approval notes
+
+
+
+ +
+
No matching keys found.
+
Try expanding your filtering terms.
+
+
+ + +
+
+ Please choose the new expiration date and time (if any) for the following keys: +
    +
  • {{ getKeyTitle(key) }}
  • +
+ + + + If specified, the date and time at which the keys expire. It is highly recommended to have an expiration date. + +
+
+ + +
+
+ + + + If specified, the date and time that the key expires. It is highly recommended to have an expiration date. + +
+
+ + +
+ Are you sure you want to delete the follopwing service keys?
+ All external services that use these keys for authentication will fail. +
    +
  • {{ getKeyTitle(key) }}
  • +
+
+ + +
+ Are you sure you want to delete service key {{ getKeyTitle(deleteKeyInfo.key) }}?

+ All external services that use this key for authentication will fail. +
+ + +
+
+
+ Approve the following service keys? +
    +
  • {{ getKeyTitle(key) }}
  • +
+
+
+ + Enter optional notes for additional human-readable information about why the keys were approved. + +
+
+ + +
+
+
+ Approve service key {{ getKeyTitle(approvalKeyInfo.key) }}? +
+
+ + Enter optional notes for additional human-readable information about why the key was approved. + +
+
+ + + + + + +
diff --git a/static/js/core-ui.js b/static/js/core-ui.js index ef0e651b8..047988d71 100644 --- a/static/js/core-ui.js +++ b/static/js/core-ui.js @@ -674,8 +674,8 @@ angular.module("core-ui", []) }; this.checkByFilter = function(filter) { - $scope.controller.checkByFilter(function(tag) { - return filter({'tag': tag}); + $scope.controller.checkByFilter(function(item) { + return filter({'item': item}); }); }; } diff --git a/static/js/directives/ui/datetime-picker.js b/static/js/directives/ui/datetime-picker.js new file mode 100644 index 000000000..7f5e2c8d9 --- /dev/null +++ b/static/js/directives/ui/datetime-picker.js @@ -0,0 +1,52 @@ +/** + * An element which displays a datetime picker. + */ +angular.module('quay').directive('datetimePicker', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/datetime-picker.html', + replace: false, + transclude: true, + restrict: 'C', + scope: { + 'datetime': '=datetime', + }, + controller: function($scope, $element) { + $scope.entered_datetime = null; + + $(function() { + $element.find('input').datetimepicker({ + 'format': 'LLL', + 'sideBySide': true, + 'showClear': true, + 'minDate': new Date() + }); + + $element.find('input').on("dp.change", function (e) { + $scope.datetime = e.date ? e.date.unix() : null; + }); + }); + + $scope.$watch('entered_datetime', function(value) { + if (!value) { + if ($scope.datetime) { + $scope.datetime = null; + } + return; + } + + $scope.datetime = (new Date(value)).getTime()/1000; + }); + + $scope.$watch('datetime', function(value) { + if (!value) { + $scope.entered_datetime = null; + return; + } + + $scope.entered_datetime = moment.unix(value).format('LLL'); + }); + } + }; + return directiveDefinitionObject; +}); \ No newline at end of file diff --git a/static/js/directives/ui/logs-view.js b/static/js/directives/ui/logs-view.js index 3bae2d67e..d791c1584 100644 --- a/static/js/directives/ui/logs-view.js +++ b/static/js/directives/ui/logs-view.js @@ -37,6 +37,14 @@ angular.module('quay').directive('logsView', function () { return ''; }; + var getServiceKeyTitle = function(metadata) { + if (metadata.name) { + return metadata.name; + } + + return metadata.kind.substr(0, 12); + }; + var logDescriptions = { 'account_change_plan': 'Change plan', 'account_change_cc': 'Update credit card', @@ -195,6 +203,20 @@ angular.module('quay').directive('logsView', function () { 'regenerate_robot_token': 'Regenerated token for robot {robot}', + 'service_key_create': function(metadata) { + if (metadata.preshared) { + return 'Manual creation of preshared service key {kid} for service {service}'; + } else { + return 'Creation of service key {kid} for service {service} by {user_agent}'; + } + }, + + 'service_key_approve': 'Approval of service key {kid}', + 'service_key_modify': 'Modification of service key {kid}', + 'service_key_delete': 'Deletion of service key {kid}', + 'service_key_extend': 'Change of expiration of service key {kid} from {old_expiration_date} to {expiration_date}', + 'service_key_rotate': 'Automatic rotation of service key {kid} by {user_agent}', + // Note: These are deprecated. 'add_repo_webhook': 'Add webhook in repository {repo}', 'delete_repo_webhook': 'Delete webhook in repository {repo}' @@ -245,6 +267,12 @@ angular.module('quay').directive('logsView', function () { 'add_repo_notification': 'Add repository notification', 'delete_repo_notification': 'Delete repository notification', 'regenerate_robot_token': 'Regenerate Robot Token', + 'service_key_create': 'Create Service Key', + 'service_key_approve': 'Approve Service Key', + 'service_key_modify': 'Modify Service Key', + 'service_key_delete': 'Delete Service Key', + 'service_key_extend': 'Extend Service Key Expiration', + 'service_key_rotate': 'Automatic rotation of Service Key', // Note: these are deprecated. 'add_repo_webhook': 'Add webhook', diff --git a/static/js/directives/ui/markdown-editor.js b/static/js/directives/ui/markdown-editor.js new file mode 100644 index 000000000..e68b36a3a --- /dev/null +++ b/static/js/directives/ui/markdown-editor.js @@ -0,0 +1,32 @@ +/** + * An element which display an inline editor for writing and previewing markdown text. + */ +angular.module('quay').directive('markdownEditor', function () { + var counter = 0; + + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/markdown-editor.html', + replace: false, + transclude: false, + restrict: 'C', + scope: { + 'content': '=content', + }, + controller: function($scope, $element, $timeout) { + $scope.id = (counter++); + $scope.previewing = false; + + $timeout(function() { + var converter = Markdown.getSanitizingConverter(); + var editor = new Markdown.Editor(converter, '-' + $scope.id); + editor.run(); + }); + + $scope.togglePreview = function() { + $scope.previewing = !$scope.previewing; + }; + } + }; + return directiveDefinitionObject; +}); diff --git a/static/js/directives/ui/service-keys-manager.js b/static/js/directives/ui/service-keys-manager.js new file mode 100644 index 000000000..ab33545cb --- /dev/null +++ b/static/js/directives/ui/service-keys-manager.js @@ -0,0 +1,383 @@ +/** + * An element which displays a panel for managing keys for external services. + */ +angular.module('quay').directive('serviceKeysManager', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/service-keys-manager.html', + replace: false, + transclude: true, + restrict: 'C', + scope: { + 'isEnabled': '=isEnabled' + }, + controller: function($scope, $element, ApiService, TableService, UIService) { + $scope.options = { + 'filter': null, + 'predicate': 'expiration_datetime', + 'reverse': false, + }; + + $scope.deleteKeysInfo = null; + $scope.approveKeysInfo = null; + $scope.changeKeysInfo = null; + + $scope.checkedKeys = UIService.createCheckStateController([], 'kid'); + + $scope.TableService = TableService; + $scope.newKey = null; + $scope.creatingKey = false; + $scope.context = { + 'expirationChangeInfo': null + }; + + var buildOrderedKeys = function() { + if (!$scope.keys) { + return; + } + + var keys = $scope.keys.map(function(key) { + var expiration_datetime = -Number.MAX_VALUE; + if (key.rotation_duration) { + expiration_datetime = -(Number.MAX_VALUE/2); + } else if (key.expiration_date) { + expiration_datetime = new Date(key.expiration_date).valueOf() * (-1); + } + + return $.extend(key, { + 'creation_datetime': new Date(key.creation_date).valueOf() * (-1), + 'expiration_datetime': expiration_datetime, + 'expanded': false + }); + }); + + $scope.orderedKeys = TableService.buildOrderedItems(keys, $scope.options, + ['name', 'kid', 'service'], + ['creation_datetime', 'expiration_datetime']) + + $scope.checkedKeys = UIService.createCheckStateController($scope.orderedKeys.visibleEntries, 'kid'); + }; + + var loadServiceKeys = function() { + $scope.options.filter = null; + $scope.now = new Date(); + $scope.keysResource = ApiService.listServiceKeysAsResource().get(function(resp) { + $scope.keys = resp['keys']; + buildOrderedKeys(); + }); + }; + + $scope.getKeyTitle = function(key) { + if (!key) { return ''; } + return key.name || key.kid.substr(0, 12); + }; + + $scope.toggleDetails = function(key) { + key.expanded = !key.expanded; + }; + + $scope.getRotationDate = function(key) { + return moment(key.created_date).add(key.rotation_duration, 's').format('LLL'); + }; + + $scope.getExpirationInfo = function(key) { + if (!key.expiration_date) { + return ''; + } + + if (key.rotation_duration) { + var rotate_date = moment(key.created_date).add(key.rotation_duration, 's') + if (moment().isBefore(rotate_date)) { + return {'className': 'rotation', 'icon': 'fa-refresh', 'willRotate': true}; + } + } + + expiration_date = moment(key.expiration_date); + if (moment().isAfter(expiration_date)) { + return {'className': 'expired', 'icon': 'fa-warning'}; + } + + if (moment().add(1, 'week').isAfter(expiration_date)) { + return {'className': 'critical', 'icon': 'fa-warning'}; + } + + if (moment().add(1, 'month').isAfter(expiration_date)) { + return {'className': 'warning', 'icon': 'fa-warning'}; + } + + return {'className': 'info', 'icon': 'fa-check'}; + }; + + $scope.showChangeName = function(key) { + bootbox.prompt({ + 'size': 'small', + 'title': 'Enter a friendly name for key ' + $scope.getKeyTitle(key), + 'value': key.name || '', + 'callback': function(value) { + if (value != null) { + var data = { + 'name': value + }; + + var params = { + 'kid': key.kid + }; + + ApiService.updateServiceKey(data, params).then(function(resp) { + loadServiceKeys(); + }, ApiService.errorDisplay('Could not update service key')); + } + } + }); + }; + + $scope.showChangeExpiration = function(key) { + $scope.context.expirationChangeInfo = { + 'key': key, + 'expiration_date': key.expiration_date ? (new Date(key.expiration_date).getTime() / 1000) : null + }; + }; + + $scope.changeKeyExpiration = function(changeInfo, callback) { + var errorHandler = ApiService.errorDisplay('Could not change expiration on service key', function() { + loadServiceKeys(); + callback(false); + }); + + var data = { + 'expiration': changeInfo.expiration_date + }; + + var params = { + 'kid': changeInfo.key.kid + }; + + ApiService.updateServiceKey(data, params).then(function(resp) { + loadServiceKeys(); + callback(true); + }, errorHandler); + }; + + $scope.createServiceKey = function() { + $scope.creatingKey = true; + ApiService.createServiceKey($scope.newKey).then(function(resp) { + $scope.creatingKey = false; + $('#createKeyModal').modal('hide'); + $scope.createdKey = resp; + $('#createdKeyModal').modal('show'); + loadServiceKeys(); + }, ApiService.errorDisplay('Could not create service key')); + }; + + $scope.showApproveKey = function(key) { + $scope.approvalKeyInfo = { + 'key': key, + 'notes': '' + }; + }; + + $scope.approveKey = function(approvalKeyInfo, callback) { + var errorHandler = ApiService.errorDisplay('Could not approve service key', function() { + loadServiceKeys(); + callback(false); + }); + + var data = { + 'notes': approvalKeyInfo.notes + }; + + var params = { + 'kid': approvalKeyInfo.key.kid + }; + + ApiService.approveServiceKey(data, params).then(function(resp) { + loadServiceKeys(); + callback(true); + }, errorHandler); + }; + + $scope.showCreateKey = function() { + $scope.newKey = { + 'expiration': null + }; + + $('#createKeyModal').modal('show'); + }; + + $scope.showDeleteKey = function(key) { + $scope.deleteKeyInfo = { + 'key': key + }; + }; + + $scope.deleteKey = function(deleteKeyInfo, callback) { + var errorHandler = ApiService.errorDisplay('Could not delete service key', function() { + loadServiceKeys(); + callback(false); + }); + + var params = { + 'kid': deleteKeyInfo.key.kid + }; + + ApiService.deleteServiceKey(null, params).then(function(resp) { + loadServiceKeys(); + callback(true); + }, errorHandler); + }; + + $scope.isDownloadSupported = function() { + var isSafari = /^((?!chrome).)*safari/i.test(navigator.userAgent); + if (isSafari) { + // Doesn't work properly in Safari, sadly. + return false; + } + + try { return !!new Blob(); } catch(e) {} + return false; + }; + + $scope.downloadPrivateKey = function(key) { + var blob = new Blob([key.private_key]); + saveAs(blob, $scope.getKeyTitle(key) + '.pem'); + }; + + $scope.askDeleteMultipleKeys = function(keys) { + $scope.deleteKeysInfo = { + 'keys': keys + }; + }; + + $scope.askApproveMultipleKeys = function(keys) { + $scope.approveKeysInfo = { + 'keys': keys + }; + }; + + $scope.askChangeExpirationMultipleKeys = function(keys) { + $scope.changeKeysInfo = { + 'keys': keys + }; + }; + + $scope.allKeyFilter = function(key) { + return true; + }; + + $scope.noKeyFilter = function(key) { + return false; + }; + + $scope.unapprovedKeyFilter = function(key) { + return !key.approval; + }; + + $scope.expiredKeyFilter = function(key) { + return $scope.getExpirationInfo(key)['className'] == 'expired'; + }; + + $scope.allRequireApproval = function(keys) { + for (var i = 0; i < keys.length; ++i) { + if (keys[i].approval) { + return false; + } + } + + return true; + }; + + $scope.allExpired = function(keys) { + for (var i = 0; i < keys.length; ++i) { + if (!$scope.expiredKeyFilter(keys[i])) { + return false; + } + } + + return true; + }; + + var forAllKeys = function(keys, error_msg, performer, callback) { + var counter = 0; + var performAction = function() { + if (counter >= keys.length) { + loadServiceKeys(); + callback(true); + return; + } + + var key = keys[counter]; + var errorHandler = function(resp) { + if (resp.status != 404) { + bootbox.alert(error_msg); + loadServiceKeys(); + callback(false); + return; + } + + performAction(); + }; + + counter++; + performer(key).then(performAction, errorHandler); + }; + + performAction(); + }; + + $scope.deleteKeys = function(info, callback) { + var performer = function(key) { + var params = { + 'kid': key.kid + }; + + return ApiService.deleteServiceKey(null, params); + }; + + forAllKeys(info.keys, 'Could not delete service key', performer, callback); + }; + + $scope.approveKeys = function(info, callback) { + var performer = function(key) { + var params = { + 'kid': key.kid + }; + + var data = { + 'notes': $scope.approveKeysInfo.notes + }; + + return ApiService.approveServiceKey(data, params); + }; + + forAllKeys(info.keys, 'Could not approve service key', performer, callback); + }; + + $scope.changeKeysExpiration = function(info, callback) { + var performer = function(key) { + var data = { + 'expiration': info.expiration_date || null + }; + + var params = { + 'kid': key.kid + }; + + return ApiService.updateServiceKey(data, params); + }; + + forAllKeys(info.keys, 'Could not update service key', performer, callback); + }; + + $scope.$watch('options.filter', buildOrderedKeys); + $scope.$watch('options.predicate', buildOrderedKeys); + $scope.$watch('options.reverse', buildOrderedKeys); + + $scope.$watch('isEnabled', function(value) { + if (value) { + loadServiceKeys(); + } + }); + } + }; + return directiveDefinitionObject; +}); \ No newline at end of file diff --git a/static/js/pages/superuser.js b/static/js/pages/superuser.js index 740481b81..933c384ac 100644 --- a/static/js/pages/superuser.js +++ b/static/js/pages/superuser.js @@ -31,6 +31,7 @@ $scope.csrf_token = encodeURIComponent(window.__token); $scope.dashboardActive = false; $scope.currentConfig = null; + $scope.serviceKeysActive = false; $scope.setDashboardActive = function(active) { $scope.dashboardActive = active; @@ -46,6 +47,10 @@ $('#createUserModal').modal('show'); }; + $scope.loadServiceKeys = function() { + $scope.serviceKeysActive = true; + }; + $scope.viewSystemLogs = function(service) { if ($scope.pollChannel) { $scope.pollChannel.stop(); diff --git a/static/js/services/notification-service.js b/static/js/services/notification-service.js index 17f0262d5..875fed7dd 100644 --- a/static/js/services/notification-service.js +++ b/static/js/services/notification-service.js @@ -131,6 +131,42 @@ function($rootScope, $interval, UserService, ApiService, StringBuilderService, P return '/repository/' + metadata.repository + '?tab=tags'; }, 'dismissable': true + }, + 'service_key_submitted': { + 'level': 'primary', + 'message': 'Service key {kid} for service {service} requests approval

Key was created on {created_date}', + 'actions': [ + { + 'title': 'Approve Key', + 'kind': 'primary', + 'handler': function(notification) { + var params = { + 'kid': notification.metadata.kid + }; + + ApiService.approveServiceKey({}, params).then(function(resp) { + notificationService.update(); + window.location = '/superuser/?tab=servicekeys'; + }, ApiService.errorDisplay('Could not approve service key')); + } + }, + { + 'title': 'Delete Key', + 'kind': 'default', + 'handler': function(notification) { + var params = { + 'kid': notification.metadata.kid + }; + + ApiService.deleteServiceKey(null, params).then(function(resp) { + notificationService.update(); + }, ApiService.errorDisplay('Could not delete service key')); + } + } + ], + 'page': function(metadata) { + return '/superuser/?tab=servicekeys'; + }, } }; diff --git a/static/js/services/string-builder-service.js b/static/js/services/string-builder-service.js index 44251185e..87fe2cd66 100644 --- a/static/js/services/string-builder-service.js +++ b/static/js/services/string-builder-service.js @@ -25,6 +25,38 @@ angular.module('quay').factory('StringBuilderService', ['$sce', 'UtilService', f 'client_id': 'chain' }; + var filters = { + 'obj': function(value) { + if (!value) { return []; } + return Object.getOwnPropertyNames(value); + }, + + 'updated_tags': function(value) { + if (!value) { return []; } + return Object.getOwnPropertyNames(value); + }, + + 'kid': function(kid, metadata) { + if (metadata.name) { + return metadata.name; + } + + return metadata.kid.substr(0, 12); + }, + + 'created_date': function(value) { + return moment.unix(value).format('LLL'); + }, + + 'expiration_date': function(value) { + return moment.unix(value).format('LLL'); + }, + + 'old_expiration_date': function(value) { + return moment.unix(value).format('LLL'); + } + }; + stringBuilderService.buildUrl = function(value_or_func, metadata) { var url = value_or_func; if (typeof url != 'string') { @@ -105,18 +137,6 @@ angular.module('quay').factory('StringBuilderService', ['$sce', 'UtilService', f } stringBuilderService.buildString = function(value_or_func, metadata, opt_codetag) { - var filters = { - 'obj': function(value) { - if (!value) { return []; } - return Object.getOwnPropertyNames(value); - }, - - 'updated_tags': function(value) { - if (!value) { return []; } - return Object.getOwnPropertyNames(value); - } - }; - var description = value_or_func; if (typeof description != 'string') { description = description(metadata); @@ -126,7 +146,7 @@ angular.module('quay').factory('StringBuilderService', ['$sce', 'UtilService', f if (metadata.hasOwnProperty(key)) { var value = metadata[key] != null ? metadata[key] : '(Unknown)'; if (filters[key]) { - value = filters[key](value); + value = filters[key](value, metadata); } description = stringBuilderService.replaceField(description, '', key, value, opt_codetag); diff --git a/static/partials/super-user.html b/static/partials/super-user.html index d124222b7..09c58dcee 100644 --- a/static/partials/super-user.html +++ b/static/partials/super-user.html @@ -24,6 +24,10 @@ tab-target="#organizations" tab-init="loadOrganizations()"> + + + @@ -50,6 +54,11 @@ configuration-saved="configurationSaved(config)">
+ +
+
+
+
diff --git a/test/data/test.db b/test/data/test.db index c65ad6987..d9741741c 100644 Binary files a/test/data/test.db and b/test/data/test.db differ diff --git a/test/helpers.py b/test/helpers.py new file mode 100644 index 000000000..1ebb94fdb --- /dev/null +++ b/test/helpers.py @@ -0,0 +1,21 @@ +from data.database import LogEntryKind, LogEntry + +class assert_action_logged(object): + """ Specialized assertion for ensuring that a log entry of a particular kind was added under the + context of this call. + """ + def __init__(self, log_kind): + self.log_kind = log_kind + self.existing_count = 0 + + def _get_log_count(self): + return LogEntry.select(LogEntry.kind == LogEntryKind.get(name=self.log_kind)).count() + + def __enter__(self): + self.existing_count = self._get_log_count() + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + updated_count = self._get_log_count() + error_msg = 'Missing new log entry of kind %s' % self.log_kind + assert self.existing_count == (updated_count - 1), error_msg diff --git a/test/test_api_security.py b/test/test_api_security.py index c785aaee4..b73c27bf5 100644 --- a/test/test_api_security.py +++ b/test/test_api_security.py @@ -48,7 +48,8 @@ from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPe from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement, SuperUserSendRecoveryEmail, ChangeLog, SuperUserOrganizationManagement, SuperUserOrganizationList, - SuperUserAggregateLogs) + SuperUserAggregateLogs, SuperUserServiceKeyManagement, + SuperUserServiceKey, SuperUserServiceKeyApproval) from endpoints.api.secscan import RepositoryImageSecurity @@ -3911,6 +3912,97 @@ class TestSuperUserSendRecoveryEmail(ApiTestCase): self._run_test('POST', 404, 'devtable', None) +class TestSuperUserServiceKeyApproval(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(SuperUserServiceKeyApproval, kid=1234) + + def test_post_anonymous(self): + self._run_test('POST', 401, None, {}) + + def test_post_freshuser(self): + self._run_test('POST', 403, 'freshuser', {}) + + def test_post_reader(self): + self._run_test('POST', 403, 'reader', {}) + + def test_post_devtable(self): + self._run_test('POST', 404, 'devtable', {}) + + + +class TestSuperUserServiceKeyManagement(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(SuperUserServiceKeyManagement) + + def test_get_anonymous(self): + self._run_test('GET', 403, None, None) + + def test_get_freshuser(self): + self._run_test('GET', 403, 'freshuser', None) + + def test_get_reader(self): + self._run_test('GET', 403, 'reader', None) + + def test_get_devtable(self): + self._run_test('GET', 200, 'devtable', None) + + def test_post_anonymous(self): + self._run_test('POST', 401, None, dict(service='someservice', expiration=None)) + + def test_post_freshuser(self): + self._run_test('POST', 403, 'freshuser', dict(service='someservice', expiration=None)) + + def test_post_reader(self): + self._run_test('POST', 403, 'reader', dict(service='someservice', expiration=None)) + + def test_post_devtable(self): + self._run_test('POST', 200, 'devtable', dict(service='someservice', expiration=None)) + + +class TestSuperUserServiceKey(ApiTestCase): + def setUp(self): + ApiTestCase.setUp(self) + self._set_url(SuperUserServiceKey, kid=1234) + + def test_get_anonymous(self): + self._run_test('GET', 403, None, None) + + def test_get_freshuser(self): + self._run_test('GET', 403, 'freshuser', None) + + def test_get_reader(self): + self._run_test('GET', 403, 'reader', None) + + def test_get_devtable(self): + self._run_test('GET', 404, 'devtable', None) + + def test_delete_anonymous(self): + self._run_test('DELETE', 401, None, None) + + def test_delete_freshuser(self): + self._run_test('DELETE', 403, 'freshuser', None) + + def test_delete_reader(self): + self._run_test('DELETE', 403, 'reader', None) + + def test_delete_devtable(self): + self._run_test('DELETE', 404, 'devtable', None) + + def test_put_anonymous(self): + self._run_test('PUT', 401, None, {}) + + def test_put_freshuser(self): + self._run_test('PUT', 403, 'freshuser', {}) + + def test_put_reader(self): + self._run_test('PUT', 403, 'reader', {}) + + def test_put_devtable(self): + self._run_test('PUT', 404, 'devtable', {}) + + class TestTeamMemberInvite(ApiTestCase): def setUp(self): ApiTestCase.setUp(self) diff --git a/test/test_api_usage.py b/test/test_api_usage.py index 7f6e69c7c..0579cbec0 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -6,12 +6,15 @@ import logging import re import json as py_json +from calendar import timegm from StringIO import StringIO from urllib import urlencode from urlparse import urlparse, urlunparse, parse_qs from playhouse.test_utils import assert_query_count, _QueryLogHandler from httmock import urlmatch, HTTMock +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.backends import default_backend from endpoints.api import api_bp, api from endpoints.building import PreparedBuild @@ -20,7 +23,7 @@ from app import app, config_provider from buildtrigger.basehandler import BuildTriggerHandler from initdb import setup_database_for_testing, finished_database_for_testing from data import database, model -from data.database import RepositoryActionCount +from data.database import RepositoryActionCount, LogEntry, LogEntryKind from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags @@ -53,7 +56,9 @@ from endpoints.api.organization import (OrganizationList, OrganizationMember, from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission, RepositoryTeamPermissionList, RepositoryUserPermissionList) -from endpoints.api.superuser import SuperUserLogs, SuperUserList, SuperUserManagement +from endpoints.api.superuser import (SuperUserLogs, SuperUserList, SuperUserManagement, + SuperUserServiceKeyManagement, SuperUserServiceKey, + SuperUserServiceKeyApproval) from endpoints.api.secscan import RepositoryImageSecurity from endpoints.api.suconfig import (SuperUserRegistryStatus, SuperUserConfig, SuperUserConfigFile, SuperUserCreateInitialSuperUser) @@ -3554,6 +3559,165 @@ class TestRepositoryImageSecurity(ApiTestCase): self.assertEquals(1, response['data']['Layer']['IndexedByVersion']) +class TestSuperUserKeyManagement(ApiTestCase): + def test_get_update_keys(self): + self.login(ADMIN_ACCESS_USER) + + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_modify') + existing_modify = model.log.LogEntry.select().where(LogEntry.kind == kind).count() + + json = self.getJsonResponse(SuperUserServiceKeyManagement) + key_count = len(json['keys']) + + key = json['keys'][0] + self.assertTrue('name' in key) + self.assertTrue('service' in key) + self.assertTrue('kid' in key) + self.assertTrue('created_date' in key) + self.assertTrue('expiration_date' in key) + self.assertTrue('jwk' in key) + self.assertTrue('approval' in key) + self.assertTrue('metadata' in key) + + # Update the key's name. + self.putJsonResponse(SuperUserServiceKey, params=dict(kid=key['kid']), + data=dict(name='somenewname')) + + # Ensure the key's name has been changed. + json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=key['kid'])) + self.assertEquals('somenewname', json['name']) + + # Ensure a log was added for the modification. + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_modify') + self.assertEquals(existing_modify + 1, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + # Update the key's metadata. + self.putJsonResponse(SuperUserServiceKey, params=dict(kid=key['kid']), + data=dict(metadata=dict(foo='bar'))) + + # Ensure the key's metadata has been changed. + json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=key['kid'])) + self.assertEquals('bar', json['metadata']['foo']) + + # Ensure a log was added for the modification. + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_modify') + self.assertEquals(existing_modify + 2, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + # Change the key's expiration. + self.putJsonResponse(SuperUserServiceKey, params=dict(kid=key['kid']), + data=dict(expiration=None)) + + # Ensure the key's expiration has been changed. + json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=key['kid'])) + self.assertIsNone(json['expiration_date']) + + # Ensure a log was added for the modification. + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_extend') + self.assertEquals(1, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + # Delete the key. + self.deleteResponse(SuperUserServiceKey, params=dict(kid=key['kid'])) + + # Ensure the key no longer exists. + self.getResponse(SuperUserServiceKey, params=dict(kid=key['kid']), expected_code=404) + + json = self.getJsonResponse(SuperUserServiceKeyManagement) + self.assertEquals(key_count - 1, len(json['keys'])) + + # Ensure a log was added for the deletion. + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_delete') + self.assertEquals(1, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + def test_approve_key(self): + self.login(ADMIN_ACCESS_USER) + + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_approve') + existing_log_count = model.log.LogEntry.select().where(LogEntry.kind == kind).count() + + # Ensure the key is not yet approved. + json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid='kid3')) + self.assertEquals('unapprovedkey', json['name']) + self.assertIsNone(json['approval']) + + # Approve the key. + self.postResponse(SuperUserServiceKeyApproval, params=dict(kid='kid3'), + data=dict(notes='testapprove'), expected_code=201) + + # Ensure the key is approved. + json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid='kid3')) + self.assertEquals('unapprovedkey', json['name']) + self.assertIsNotNone(json['approval']) + self.assertEquals('ServiceKeyApprovalType.SUPERUSER', json['approval']['approval_type']) + self.assertEquals(ADMIN_ACCESS_USER, json['approval']['approver']['username']) + self.assertEquals('testapprove', json['approval']['notes']) + + # Ensure the approval was logged. + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_approve') + self.assertEquals(existing_log_count + 1, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + def test_approve_preapproved(self): + self.login(ADMIN_ACCESS_USER) + + new_key = { + 'service': 'coolservice', + 'name': 'mynewkey', + 'metadata': dict(foo='baz'), + 'notes': 'whazzup!?', + 'expiration': timegm((datetime.datetime.now() + datetime.timedelta(days=1)).utctimetuple()), + } + + # Create the key (preapproved automatically) + json = self.postJsonResponse(SuperUserServiceKeyManagement, data=new_key) + + # Try to approve again. + self.postResponse(SuperUserServiceKeyApproval, params=dict(kid=json['kid']), expected_code=201) + + def test_create_key(self): + self.login(ADMIN_ACCESS_USER) + + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_create') + existing_log_count = model.log.LogEntry.select().where(LogEntry.kind == kind).count() + + new_key = { + 'service': 'coolservice', + 'name': 'mynewkey', + 'metadata': dict(foo='baz'), + 'notes': 'whazzup!?', + 'expiration': timegm((datetime.datetime.now() + datetime.timedelta(days=1)).utctimetuple()), + } + + # Create the key. + json = self.postJsonResponse(SuperUserServiceKeyManagement, data=new_key) + self.assertEquals('mynewkey', json['name']) + self.assertTrue('kid' in json) + self.assertTrue('public_key' in json) + self.assertTrue('private_key' in json) + + # Verify the private key is a valid PEM. + serialization.load_pem_private_key(json['private_key'].encode('utf-8'), None, default_backend()) + + # Verify the key. + kid = json['kid'] + + json = self.getJsonResponse(SuperUserServiceKey, params=dict(kid=kid)) + self.assertEquals('mynewkey', json['name']) + self.assertEquals('coolservice', json['service']) + self.assertEquals('baz', json['metadata']['foo']) + self.assertEquals(kid, json['kid']) + + self.assertIsNotNone(json['approval']) + self.assertEquals('ServiceKeyApprovalType.SUPERUSER', json['approval']['approval_type']) + self.assertEquals(ADMIN_ACCESS_USER, json['approval']['approver']['username']) + self.assertEquals('whazzup!?', json['approval']['notes']) + + # Ensure that there are logs for the creation and auto-approval. + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_create') + self.assertEquals(existing_log_count + 1, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + kind = LogEntryKind.get(LogEntryKind.name == 'service_key_approve') + self.assertEquals(existing_log_count + 1, model.log.LogEntry.select().where(LogEntry.kind == kind).count()) + + class TestSuperUserManagement(ApiTestCase): def test_get_user(self): self.login(ADMIN_ACCESS_USER) diff --git a/test/test_endpoints.py b/test/test_endpoints.py index 8dd200d9b..45c381587 100644 --- a/test/test_endpoints.py +++ b/test/test_endpoints.py @@ -1,21 +1,32 @@ # coding=utf-8 -import unittest import json as py_json - -from data import model -from flask import url_for -from app import app -from endpoints.web import web as web_bp -from endpoints.api import api, api_bp -from endpoints.api.user import Signin -from initdb import setup_database_for_testing, finished_database_for_testing +import time +import unittest from urllib import urlencode from urlparse import urlparse, urlunparse, parse_qs +import jwt + +from Crypto.PublicKey import RSA +from flask import url_for +from jwkest.jwk import RSAKey + +from app import app +from data import model +from data.database import ServiceKeyApprovalType +from endpoints import key_server +from endpoints.api import api, api_bp +from endpoints.api.user import Signin +from endpoints.web import web as web_bp +from initdb import setup_database_for_testing, finished_database_for_testing +from test.helpers import assert_action_logged + + try: app.register_blueprint(web_bp, url_prefix='') + app.register_blueprint(key_server.key_server, url_prefix='') except ValueError: # This blueprint was already registered pass @@ -30,6 +41,7 @@ except ValueError: CSRF_TOKEN_KEY = '_csrf_token' CSRF_TOKEN = '123csrfforme' + class EndpointTestCase(unittest.TestCase): maxDiff = None @@ -60,6 +72,19 @@ class EndpointTestCase(unittest.TestCase): self.assertEquals(rv.status_code, expected_code) return rv.data + def deleteResponse(self, resource_name, headers=None, expected_code=204, **kwargs): + headers = headers or {} + rv = self.app.delete(url_for(resource_name, **kwargs), headers=headers) + self.assertEquals(rv.status_code, expected_code) + return rv.data + + def putResponse(self, resource_name, headers=None, data=None, expected_code=204, **kwargs): + headers = headers or {} + data = data or {} + rv = self.app.put(url_for(resource_name, **kwargs), headers=headers, data=py_json.dumps(data)) + self.assertEquals(rv.status_code, expected_code) + return rv.data + def login(self, username, password): rv = self.app.post(EndpointTestCase._add_csrf(api.url_for(Signin)), data=py_json.dumps(dict(username=username, password=password)), @@ -164,8 +189,139 @@ class WebEndpointTestCase(EndpointTestCase): self.getResponse('web.redirect_to_namespace', namespace='devtable', expected_code=302) self.getResponse('web.redirect_to_namespace', namespace='buynlarge', expected_code=302) - def test_jwk_set_uri(self): - self.getResponse('web.jwk_set_uri') + +class KeyServerTestCase(EndpointTestCase): + def _get_test_jwt_payload(self): + return { + 'iss': 'sample_service', + 'aud': key_server.JWT_AUDIENCE, + 'exp': int(time.time()) + 60, + 'iat': int(time.time()), + 'nbf': int(time.time()), + } + + def test_list_service_keys(self): + unapproved_key = model.service_keys.get_service_key(kid='kid3') + expired_key = model.service_keys.get_service_key(kid='kid6') + + rv = self.getResponse('key_server.list_service_keys', service='sample_service') + jwkset = py_json.loads(rv) + + # Make sure the hidden keys are not returned and the visible ones are returned. + self.assertTrue(len(jwkset['keys']) > 0) + expired_key_found = False + for jwk in jwkset['keys']: + self.assertNotEquals(jwk, unapproved_key.jwk) + + if expired_key.jwk == jwk: + expired_key_found = True + + self.assertTrue(expired_key_found) + + + def test_get_service_key(self): + # 200 for an approved key + self.getResponse('key_server.get_service_key', service='sample_service', kid='kid1') + + # 409 for an unapproved key + self.getResponse('key_server.get_service_key', service='sample_service', kid='kid3', + expected_code=409) + + # 404 for a non-existant key + self.getResponse('key_server.get_service_key', service='sample_service', kid='kid9999', + expected_code=404) + + # 403 for an approved but expired key that is inside of the 2 week window. + self.getResponse('key_server.get_service_key', service='sample_service', kid='kid6', + expected_code=403) + + # 404 for an approved, expired key that is outside of the 2 week window. + self.getResponse('key_server.get_service_key', service='sample_service', kid='kid7', + expected_code=404) + + def test_put_service_key(self): + # No Authorization header should yield a 400 + self.putResponse('key_server.put_service_key', service='sample_service', kid='kid420', + expected_code=400) + + # Mint a JWT with our test payload + private_key = RSA.generate(2048) + jwk = RSAKey(key=private_key.publickey()).serialize() + payload = self._get_test_jwt_payload() + token = jwt.encode(payload, private_key.exportKey('PEM'), 'RS256') + + # Invalid service name should yield a 400. + self.putResponse('key_server.put_service_key', service='sample service', kid='kid420', + headers={ + 'Authorization': 'Bearer %s' % token, + 'Content-Type': 'application/json', + }, data=jwk, expected_code=400) + + # Publish a new key + with assert_action_logged('service_key_create'): + self.putResponse('key_server.put_service_key', service='sample_service', kid='kid420', + headers={ + 'Authorization': 'Bearer %s' % token, + 'Content-Type': 'application/json', + }, data=jwk, expected_code=202) + + # Ensure that the key exists but is unapproved. + self.getResponse('key_server.get_service_key', service='sample_service', kid='kid420', + expected_code=409) + + # Rotate that new key + with assert_action_logged('service_key_rotate'): + token = jwt.encode(payload, private_key.exportKey('PEM'), 'RS256', headers={'kid': 'kid420'}) + self.putResponse('key_server.put_service_key', service='sample_service', kid='kid6969', + headers={ + 'Authorization': 'Bearer %s' % token, + 'Content-Type': 'application/json', + }, data=jwk, expected_code=200) + + # Rotation should only work when signed by the previous key + private_key = RSA.generate(2048) + jwk = RSAKey(key=private_key.publickey()).serialize() + token = jwt.encode(payload, private_key.exportKey('PEM'), 'RS256', headers={'kid': 'kid420'}) + self.putResponse('key_server.put_service_key', service='sample_service', kid='kid6969', + headers={ + 'Authorization': 'Bearer %s' % token, + 'Content-Type': 'application/json', + }, data=jwk, expected_code=403) + + + def test_delete_service_key(self): + # No Authorization header should yield a 400 + self.deleteResponse('key_server.delete_service_key', expected_code=400, + service='sample_service', kid='kid1') + + # Generate two keys and approve one + private_key, _ = model.service_keys.generate_service_key('sample_service', None, kid='kid123') + model.service_keys.generate_service_key('sample_service', None, kid='kid321') + model.service_keys.approve_service_key('kid123', 1, ServiceKeyApprovalType.SUPERUSER) + + # Mint a JWT with our test payload + token = jwt.encode(self._get_test_jwt_payload(), private_key.exportKey('PEM'), 'RS256', + headers={'kid': 'kid123'}) + + # Using the credentials of our approved key, delete our unapproved key + with assert_action_logged('service_key_delete'): + self.deleteResponse('key_server.delete_service_key', + headers={'Authorization': 'Bearer %s' % token}, + expected_code=204, service='sample_service', kid='kid321') + + # Attempt to delete a key signed by a key from a different service + bad_token = jwt.encode(self._get_test_jwt_payload(), private_key.exportKey('PEM'), 'RS256', + headers={'kid': 'kid5'}) + self.deleteResponse('key_server.delete_service_key', + headers={'Authorization': 'Bearer %s' % bad_token}, + expected_code=403, service='sample_service', kid='kid123') + + # Delete a self-signed, approved key + with assert_action_logged('service_key_delete'): + self.deleteResponse('key_server.delete_service_key', + headers={'Authorization': 'Bearer %s' % token}, + expected_code=204, service='sample_service', kid='kid123') + if __name__ == '__main__': unittest.main() diff --git a/util/generatepresharedkey.py b/util/generatepresharedkey.py new file mode 100644 index 000000000..4343deb51 --- /dev/null +++ b/util/generatepresharedkey.py @@ -0,0 +1,47 @@ +from app import app +from data import model +from data.database import ServiceKeyApprovalType +from data.model.log import log_action +from timeparse import ParseDatetime + +import argparse + +def generate_key(service, name, expiration_date=None, notes=None): + metadata = { + 'created_by': 'CLI tool', + } + + # Generate a key with a private key that we *never save*. + (private_key, key) = model.service_keys.generate_service_key(service, expiration_date, + metadata=metadata, + name=name) + # Auto-approve the service key. + model.service_keys.approve_service_key(key.kid, None, ServiceKeyApprovalType.AUTOMATIC, + notes=notes or '') + + # Log the creation and auto-approval of the service key. + key_log_metadata = { + 'kid': key.kid, + 'preshared': True, + 'service': service, + 'name': name, + 'expiration_date': expiration_date, + 'auto_approved': True, + } + + log_action('service_key_create', None, metadata=key_log_metadata) + log_action('service_key_approve', None, metadata=key_log_metadata) + return private_key, key.kid + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='Generates a preshared key') + parser.add_argument('service', help='The service name for which the key is being generated') + parser.add_argument('name', help='The friendly name for the key') + parser.add_argument('--expiration', help='The optional expiration date/time for the key', + default=None, action=ParseDatetime) + parser.add_argument('--notes', help='Optional notes about the key', default=None) + + args = parser.parse_args() + generated, _ = generate_key(args.service, args.name, args.expiration, args.notes) + print generated.exportKey('PEM') diff --git a/util/secscan/api.py b/util/secscan/api.py index 65ff8b37a..344865e9c 100644 --- a/util/secscan/api.py +++ b/util/secscan/api.py @@ -42,9 +42,6 @@ class SecurityScannerAPI(object): self._security_config = config.get('SECURITY_SCANNER') self._target_version = self._security_config['ENGINE_VERSION_TARGET'] - self._certificate = config_validator.cert() - self._keys = config_validator.keypair() - def _get_image_url(self, image): """ Gets the download URL for an image and if the storage doesn't exist, @@ -253,8 +250,14 @@ class SecurityScannerAPI(object): api_url = urljoin(endpoint, '/' + security_config['API_VERSION']) + '/' url = urljoin(api_url, relative_url) + signer_proxy_url = self.config.get('JWTPROXY_SIGNER', 'localhost:8080') + with CloseForLongOperation(self.config): logger.debug('%sing security URL %s', method.upper(), url) return client.request(method, url, json=body, params=params, timeout=timeout, - cert=self._keys, verify=self._certificate, headers=headers) + verify='/conf/mitm.cert', headers=headers, + proxies={ + 'https': 'https://' + signer_proxy_url, + 'http': 'http://' + signer_proxy_url + }) diff --git a/util/secscan/validator.py b/util/secscan/validator.py index 44739a825..bd82cec26 100644 --- a/util/secscan/validator.py +++ b/util/secscan/validator.py @@ -57,9 +57,5 @@ class SecurityConfigValidator(object): logger.debug('ENDPOINT field in SECURITY_SCANNER configuration must start with http or https') return False - if endpoint.startswith('https://') and (self._certificate is False or self._keys is None): - logger.debug('Certificate and key pair required for talking to security worker over HTTPS') - return False - return True diff --git a/util/security/fingerprint.py b/util/security/fingerprint.py new file mode 100644 index 000000000..1341d5780 --- /dev/null +++ b/util/security/fingerprint.py @@ -0,0 +1,35 @@ +import collections +import json + +from hashlib import sha256 + + +def canonicalize(json_obj): + """This function canonicalizes a Python object that will be serialized as JSON. + + Args: + json_obj (object): the Python object that will later be serialized as JSON. + + Returns: + object: json_obj now sorted to its canonical form. + + """ + if isinstance(json_obj, collections.MutableMapping): + sorted_obj = sorted({key: canonicalize(val) for key, val in json_obj.items()}.items()) + return collections.OrderedDict(sorted_obj) + elif isinstance(json_obj, (list, tuple)): + return [canonicalize(val) for val in json_obj] + return json_obj + + +def canonical_kid(jwk): + """This function returns the SHA256 hash of a canonical JWK. + + Args: + jwk (object): the JWK for which a kid will be generated. + + Returns: + string: the unique kid for the given JWK. + + """ + return sha256(json.dumps(canonicalize(jwk), separators=(',', ':'))).hexdigest() diff --git a/web.py b/web.py index 445c2fa5b..4c1a4f4c0 100644 --- a/web.py +++ b/web.py @@ -4,14 +4,15 @@ import logging.config from app import app as application from endpoints.api import api_bp -from endpoints.web import web -from endpoints.webhooks import webhooks -from endpoints.realtime import realtime -from endpoints.oauthlogin import oauthlogin +from endpoints.bitbuckettrigger import bitbuckettrigger from endpoints.githubtrigger import githubtrigger from endpoints.gitlabtrigger import gitlabtrigger -from endpoints.bitbuckettrigger import bitbuckettrigger +from endpoints.key_server import key_server +from endpoints.oauthlogin import oauthlogin +from endpoints.realtime import realtime from endpoints.secscan import secscan +from endpoints.web import web +from endpoints.webhooks import webhooks if os.environ.get('DEBUGLOG') == 'true': logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False) @@ -25,3 +26,4 @@ application.register_blueprint(api_bp, url_prefix='/api') application.register_blueprint(webhooks, url_prefix='/webhooks') application.register_blueprint(realtime, url_prefix='/realtime') application.register_blueprint(secscan, url_prefix='/secscan') +application.register_blueprint(key_server, url_prefix='/keys') diff --git a/workers/service_key_worker.py b/workers/service_key_worker.py new file mode 100644 index 000000000..59a07ef62 --- /dev/null +++ b/workers/service_key_worker.py @@ -0,0 +1,32 @@ +import logging +from datetime import datetime, timedelta + +from app import app +from data.model.service_keys import set_key_expiration +from workers.worker import Worker + +logger = logging.getLogger(__name__) + +class ServiceKeyWorker(Worker): + def __init__(self): + super(ServiceKeyWorker, self).__init__() + self.add_operation(self._refresh_service_keys, + app.config.get('QUAY_SERVICE_KEY_REFRESH', 60)*60) + + def _refresh_service_keys(self): + """ + Refreshes active service keys so they don't get garbage collected. + """ + with open("/conf/quay.kid") as f: + kid = f.read() + + minutes_until_expiration = app.config.get('QUAY_SERVICE_KEY_EXPIRATION', 120) + expiration = timedelta(minutes=minutes_until_expiration) + + logger.debug('Starting refresh of automatic service keys') + set_key_expiration(kid, datetime.now() + expiration) + logger.debug('Finished refresh of automatic service keys') + +if __name__ == "__main__": + worker = ServiceKeyWorker() + worker.start()