Add signing to the ACI converter

This commit is contained in:
Joseph Schorr 2015-02-04 15:29:24 -05:00
parent 81b5b8d1dc
commit bfb0784abc
14 changed files with 311 additions and 90 deletions

View file

@ -20,7 +20,7 @@ ADD requirements.txt requirements.txt
RUN virtualenv --distribute venv RUN virtualenv --distribute venv
RUN venv/bin/pip install -r requirements.txt RUN venv/bin/pip install -r requirements.txt
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev gpgme
############################### ###############################
# END COMMON SECION # END COMMON SECION

3
app.py
View file

@ -26,6 +26,7 @@ from data.archivedlogs import LogArchive
from data.queue import WorkQueue from data.queue import WorkQueue
from data.userevent import UserEventsBuilderModule from data.userevent import UserEventsBuilderModule
from avatars.avatars import Avatar from avatars.avatars import Avatar
from util.signing import Signer
# pylint: disable=invalid-name,too-many-public-methods,too-few-public-methods,too-many-ancestors # pylint: disable=invalid-name,too-many-public-methods,too-few-public-methods,too-many-ancestors
@ -55,6 +56,7 @@ class Flask(BaseFlask):
return Config(root_path, self.default_config) return Config(root_path, self.default_config)
OVERRIDE_CONFIG_DIRECTORY = 'conf/stack/'
OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml' OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml'
OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py' OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
@ -135,6 +137,7 @@ build_logs = BuildLogs(app)
queue_metrics = QueueMetrics(app) queue_metrics = QueueMetrics(app)
authentication = UserAuthentication(app) authentication = UserAuthentication(app)
userevents = UserEventsBuilderModule(app) userevents = UserEventsBuilderModule(app)
signer = Signer(app, OVERRIDE_CONFIG_DIRECTORY)
github_login = GithubOAuthConfig(app, 'GITHUB_LOGIN_CONFIG') github_login = GithubOAuthConfig(app, 'GITHUB_LOGIN_CONFIG')
github_trigger = GithubOAuthConfig(app, 'GITHUB_TRIGGER_CONFIG') github_trigger = GithubOAuthConfig(app, 'GITHUB_TRIGGER_CONFIG')

View file

@ -44,7 +44,7 @@ class DefaultConfig(object):
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True POPULATE_DB_TEST_DATA = True
PREFERRED_URL_SCHEME = 'http' PREFERRED_URL_SCHEME = 'http'
SERVER_HOSTNAME = 'localhost:5000' SERVER_HOSTNAME = '10.0.2.2'
AVATAR_KIND = 'local' AVATAR_KIND = 'local'

View file

@ -352,6 +352,24 @@ class ImageStorageTransformation(BaseModel):
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
class ImageStorageSignatureKind(BaseModel):
name = CharField(index=True, unique=True)
class ImageStorageSignature(BaseModel):
storage = ForeignKeyField(ImageStorage, index=True)
kind = ForeignKeyField(ImageStorageSignatureKind)
signature = TextField(null=True)
uploading = BooleanField(default=True, null=True)
class Meta:
database = db
read_slaves = (read_slave,)
indexes = (
(('kind', 'storage'), True),
)
class DerivedImageStorage(BaseModel): class DerivedImageStorage(BaseModel):
source = ForeignKeyField(ImageStorage, null=True, related_name='source') source = ForeignKeyField(ImageStorage, null=True, related_name='source')
derivative = ForeignKeyField(ImageStorage, related_name='derivative') derivative = ForeignKeyField(ImageStorage, related_name='derivative')
@ -550,4 +568,4 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
Notification, ImageStorageLocation, ImageStoragePlacement, Notification, ImageStorageLocation, ImageStoragePlacement,
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification, ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage, RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
TeamMemberInvite] TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind]

View file

@ -14,7 +14,8 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
ExternalNotificationEvent, ExternalNotificationMethod, ExternalNotificationEvent, ExternalNotificationMethod,
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite, RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
DerivedImageStorage, ImageStorageTransformation, random_string_generator, DerivedImageStorage, ImageStorageTransformation, random_string_generator,
db, BUILD_PHASE, QuayUserField) db, BUILD_PHASE, QuayUserField, ImageStorageSignature,
ImageStorageSignatureKind)
from peewee import JOIN_LEFT_OUTER, fn from peewee import JOIN_LEFT_OUTER, fn
from util.validation import (validate_username, validate_email, validate_password, from util.validation import (validate_username, validate_email, validate_password,
INVALID_PASSWORD_MESSAGE) INVALID_PASSWORD_MESSAGE)
@ -1317,7 +1318,28 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
ancestors='/') ancestors='/')
def find_or_create_derived_storage(source, transformation_name, preferred_location): def find_or_create_storage_signature(storage, signature_kind):
found = lookup_storage_signature(storage, signature_kind)
if found is None:
kind = ImageStorageSignatureKind.get(name=signature_kind)
found = ImageStorageSignature.create(storage=storage, kind=kind)
return found
def lookup_storage_signature(storage, signature_kind):
kind = ImageStorageSignatureKind.get(name=signature_kind)
try:
return (ImageStorageSignature
.select()
.where(ImageStorageSignature.storage == storage,
ImageStorageSignature.kind == kind)
.get())
except ImageStorageSignature.DoesNotExist:
return None
def find_derived_storage(source, transformation_name):
try: try:
found = (ImageStorage found = (ImageStorage
.select(ImageStorage, DerivedImageStorage) .select(ImageStorage, DerivedImageStorage)
@ -1330,6 +1352,14 @@ def find_or_create_derived_storage(source, transformation_name, preferred_locati
found.locations = {placement.location.name for placement in found.imagestorageplacement_set} found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
return found return found
except ImageStorage.DoesNotExist: except ImageStorage.DoesNotExist:
return None
def find_or_create_derived_storage(source, transformation_name, preferred_location):
existing = find_derived_storage(source, transformation_name)
if existing is not None:
return existing
logger.debug('Creating storage dervied from source: %s', source.uuid) logger.debug('Creating storage dervied from source: %s', source.uuid)
trans = ImageStorageTransformation.get(name=transformation_name) trans = ImageStorageTransformation.get(name=transformation_name)
new_storage = _create_storage(preferred_location) new_storage = _create_storage(preferred_location)

View file

@ -2,9 +2,9 @@ import logging
import json import json
import hashlib import hashlib
from flask import redirect, Blueprint, abort, send_file from flask import redirect, Blueprint, abort, send_file, make_response
from app import app from app import app, signer
from auth.auth import process_auth from auth.auth import process_auth
from auth.permissions import ReadRepositoryPermission from auth.permissions import ReadRepositoryPermission
from data import model from data import model
@ -53,6 +53,26 @@ def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, imag
return stream.read return stream.read
def _sign_sythentic_image(verb, linked_storage_uuid, queue_file):
signature = None
try:
signature = signer.detached_sign(queue_file)
except:
logger.exception('Exception when signing %s image %s', verb, linked_storage_uuid)
return
with database.UseThenDisconnect(app.config):
try:
derived = model.get_storage_by_uuid(linked_storage_uuid)
except model.InvalidImageException:
return
signature_entry = model.find_or_create_storage_signature(derived, signer.name)
signature_entry.signature = signature
signature_entry.uploading = False
signature_entry.save()
def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_locations, queue_file): def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_locations, queue_file):
store = Storage(app) store = Storage(app)
@ -76,10 +96,13 @@ def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_location
# pylint: disable=too-many-locals # pylint: disable=too-many-locals
def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwargs): def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
permission = ReadRepositoryPermission(namespace, repository) permission = ReadRepositoryPermission(namespace, repository)
# pylint: disable=no-member # pylint: disable=no-member
if permission.can() or model.repository_is_public(namespace, repository): if not permission.can() and not model.repository_is_public(namespace, repository):
abort(403)
# Lookup the requested tag. # Lookup the requested tag.
try: try:
tag_image = model.get_tag_image(namespace, repository, tag) tag_image = model.get_tag_image(namespace, repository, tag)
@ -92,7 +115,6 @@ def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwar
abort(404) abort(404)
# If there is a data checker, call it first. # If there is a data checker, call it first.
store = Storage(app)
uuid = repo_image.storage.uuid uuid = repo_image.storage.uuid
image_json = None image_json = None
@ -104,9 +126,45 @@ def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwar
logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb) logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
abort(404) abort(404)
return (repo_image, tag_image, image_json)
# pylint: disable=too-many-locals
def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwargs):
# Verify that the image exists and that we have access to it.
store = Storage(app)
result = _verify_repo_verb(store, namespace, repository, tag, verb, checker)
(repo_image, tag_image, image_json) = result
# Lookup the derived image storage for the verb.
derived = model.find_derived_storage(repo_image.storage, verb)
if derived is None or derived.uploading:
abort(404)
# Check if we have a valid signer configured.
if not signer.name:
abort(404)
# Lookup the signature for the verb.
signature_entry = model.lookup_storage_signature(derived, signer.name)
if signature_entry is None:
abort(404)
# Return the signature.
return make_response(signature_entry.signature)
# pylint: disable=too-many-locals
def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=None, **kwargs):
# Verify that the image exists and that we have access to it.
store = Storage(app)
result = _verify_repo_verb(store, namespace, repository, tag, verb, checker)
(repo_image, tag_image, image_json) = result
# Log the action. # Log the action.
track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs) track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs)
# Lookup/create the derived image storage for the verb.
derived = model.find_or_create_derived_storage(repo_image.storage, verb, derived = model.find_or_create_derived_storage(repo_image.storage, verb,
store.preferred_locations[0]) store.preferred_locations[0])
@ -125,6 +183,8 @@ def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwar
return send_file(store.stream_read_file(derived.locations, derived_layer_path)) return send_file(store.stream_read_file(derived.locations, derived_layer_path))
# Load the ancestry for the image. # Load the ancestry for the image.
uuid = repo_image.storage.uuid
logger.debug('Building and returning derived %s image %s', verb, derived.uuid) logger.debug('Building and returning derived %s image %s', verb, derived.uuid)
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid)) ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
full_image_list = json.loads(ancestry_data) full_image_list = json.loads(ancestry_data)
@ -137,12 +197,12 @@ def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwar
# Calculate a synthetic image ID. # Calculate a synthetic image ID.
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest() synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest()
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
def _cleanup(): def _cleanup():
# Close any existing DB connection once the process has exited. # Close any existing DB connection once the process has exited.
database.close_db_filter(None) database.close_db_filter(None)
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
args = (formatter, namespace, repository, tag, synthetic_image_id, image_json, full_image_list) args = (formatter, namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
queue_process = QueueProcess(_open_stream, queue_process = QueueProcess(_open_stream,
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
@ -151,6 +211,11 @@ def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwar
client_queue_file = QueueFile(queue_process.create_queue(), 'client') client_queue_file = QueueFile(queue_process.create_queue(), 'client')
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage') storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
# If signing is required, add a QueueFile for signing the image as we stream it out.
signing_queue_file = None
if sign and signer.name:
signing_queue_file = QueueFile(queue_process.create_queue(), 'signing')
# Start building. # Start building.
queue_process.run() queue_process.run()
@ -158,19 +223,18 @@ def _repo_verb(namespace, repository, tag, verb, formatter, checker=None, **kwar
storage_args = (verb, derived.uuid, derived.locations, storage_queue_file) storage_args = (verb, derived.uuid, derived.locations, storage_queue_file)
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup) QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
if sign and signer.name:
signing_args = (verb, derived.uuid, signing_queue_file)
QueueProcess.run_process(_sign_sythentic_image, signing_args, finished=_cleanup)
# Close the database handle here for this process before we send the long download. # Close the database handle here for this process before we send the long download.
database.close_db_filter(None) database.close_db_filter(None)
# Return the client's data. # Return the client's data.
return send_file(client_queue_file) return send_file(client_queue_file)
abort(403)
def os_arch_checker(os, arch):
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/aci/<os>/<arch>/', methods=['GET'])
@process_auth
# pylint: disable=unused-argument
def get_aci_image(server, namespace, repository, tag, os, arch):
def checker(image_json): def checker(image_json):
# Verify the architecture and os. # Verify the architecture and os.
operating_system = image_json.get('os', 'linux') operating_system = image_json.get('os', 'linux')
@ -183,8 +247,23 @@ def get_aci_image(server, namespace, repository, tag, os, arch):
return True return True
return checker
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/sig/<os>/<arch>/', methods=['GET'])
@process_auth
# pylint: disable=unused-argument
def get_aci_signature(server, namespace, repository, tag, os, arch):
return _repo_verb_signature(namespace, repository, tag, 'aci', checker=os_arch_checker(os, arch),
os=os, arch=arch)
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/aci/<os>/<arch>/', methods=['GET'])
@process_auth
# pylint: disable=unused-argument
def get_aci_image(server, namespace, repository, tag, os, arch):
return _repo_verb(namespace, repository, tag, 'aci', ACIImage(), return _repo_verb(namespace, repository, tag, 'aci', ACIImage(),
checker=checker, os=os, arch=arch) sign=True, checker=os_arch_checker(os, arch), os=os, arch=arch)
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET']) @verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])

View file

@ -1,7 +1,7 @@
import logging import logging
from flask import (abort, redirect, request, url_for, make_response, Response, from flask import (abort, redirect, request, url_for, make_response, Response,
Blueprint, send_from_directory, jsonify) Blueprint, send_from_directory, jsonify, send_file)
from avatar_generator import Avatar from avatar_generator import Avatar
from flask.ext.login import current_user from flask.ext.login import current_user
@ -10,7 +10,7 @@ from health.healthcheck import HealthCheck
from data import model from data import model
from data.model.oauth import DatabaseAuthorizationProvider from data.model.oauth import DatabaseAuthorizationProvider
from app import app, billing as stripe, build_logs, avatar from app import app, billing as stripe, build_logs, avatar, signer
from auth.auth import require_session_login, process_oauth from auth.auth import require_session_login, process_oauth
from auth.permissions import AdministerOrganizationPermission, ReadRepositoryPermission from auth.permissions import AdministerOrganizationPermission, ReadRepositoryPermission
from util.invoice import renderInvoiceToPdf from util.invoice import renderInvoiceToPdf
@ -57,6 +57,14 @@ def snapshot(path = ''):
abort(404) abort(404)
@web.route('/aci-signing-key')
@no_cache
def aci_signing_key():
if not signer.name:
abort(404)
return send_file(signer.public_key_path)
@web.route('/plans/') @web.route('/plans/')
@no_cache @no_cache
@route_show_if(features.BILLING) @route_show_if(features.BILLING)

View file

@ -180,8 +180,11 @@ class ACIImage(TarImageFormatter):
"group": config.get('Group', '') or 'root', "group": config.get('Group', '') or 'root',
"eventHandlers": [], "eventHandlers": [],
"workingDirectory": config.get('WorkingDir', '') or '/', "workingDirectory": config.get('WorkingDir', '') or '/',
"environment": [{"name": key, "value": value} # TODO(jschorr): Use the commented version once rocket has upgraded to 0.3.0.
for (key, value) in [e.split('=') for e in config.get('Env')]], #"environment": [{"name": key, "value": value}
# for (key, value) in [e.split('=') for e in config.get('Env')]],
"environment": {key: value
for (key, value) in [e.split('=') for e in config.get('Env')]},
"isolators": ACIImage._build_isolators(config), "isolators": ACIImage._build_isolators(config),
"mountPoints": ACIImage._build_volumes(config), "mountPoints": ACIImage._build_volumes(config),
"ports": ACIImage._build_ports(config), "ports": ACIImage._build_ports(config),

View file

@ -257,6 +257,8 @@ def initialize_database():
ImageStorageTransformation.create(name='squash') ImageStorageTransformation.create(name='squash')
ImageStorageTransformation.create(name='aci') ImageStorageTransformation.create(name='aci')
ImageStorageSignatureKind.create(name='gpg2')
# NOTE: These MUST be copied over to NotificationKind, since every external # NOTE: These MUST be copied over to NotificationKind, since every external
# notification can also generate a Quay.io notification. # notification can also generate a Quay.io notification.
ExternalNotificationEvent.create(name='repo_push') ExternalNotificationEvent.create(name='repo_push')

View file

@ -41,3 +41,4 @@ git+https://github.com/DevTable/anunidecode.git
git+https://github.com/DevTable/avatar-generator.git git+https://github.com/DevTable/avatar-generator.git
git+https://github.com/DevTable/pygithub.git git+https://github.com/DevTable/pygithub.git
gipc gipc
pygpgme

View file

@ -48,6 +48,7 @@ python-dateutil==2.2
python-ldap==2.4.18 python-ldap==2.4.18
python-magic==0.4.6 python-magic==0.4.6
pytz==2014.9 pytz==2014.9
pygpgme==0.3
raven==5.1.1 raven==5.1.1
redis==2.10.3 redis==2.10.3
reportlab==2.7 reportlab==2.7

View file

@ -11,6 +11,8 @@
<meta name="google-site-verification" content="GalDznToijTsHYmLjJvE4QaB9uk_IP16aaGDz5D75T4" /> <meta name="google-site-verification" content="GalDznToijTsHYmLjJvE4QaB9uk_IP16aaGDz5D75T4" />
<meta name="fragment" content="!" /> <meta name="fragment" content="!" />
<meta name="ac-discovery" content="{{ hostname }} {{ preferred_scheme }}://{{ hostname }}/c1/aci/{name}/{version}/{ext}/{os}/{arch}/"> <meta name="ac-discovery" content="{{ hostname }} {{ preferred_scheme }}://{{ hostname }}/c1/aci/{name}/{version}/{ext}/{os}/{arch}/">
<meta name="ac-discovery-pubkeys" content="{{ hostname }} { preferred_scheme }}://{{ hostname }}/aci-signing-key">
{% endblock %} {% endblock %}

69
util/signing.py Normal file
View file

@ -0,0 +1,69 @@
import gpgme
import os
from StringIO import StringIO
class GPG2Signer(object):
""" Helper class for signing data using GPG2. """
def __init__(self, app, key_directory):
if not app.config.get('GPG2_PRIVATE_KEY_NAME'):
raise Exception('Missing configuration key GPG2_PRIVATE_KEY_NAME')
if not app.config.get('GPG2_PRIVATE_KEY_FILENAME'):
raise Exception('Missing configuration key GPG2_PRIVATE_KEY_FILENAME')
if not app.config.get('GPG2_PUBLIC_KEY_FILENAME'):
raise Exception('Missing configuration key GPG2_PUBLIC_KEY_FILENAME')
self._ctx = gpgme.Context()
self._ctx.armor = True
self._private_key_name = app.config['GPG2_PRIVATE_KEY_NAME']
self._public_key_path = os.path.join(key_directory, app.config['GPG2_PUBLIC_KEY_FILENAME'])
key_file = os.path.join(key_directory, app.config['GPG2_PRIVATE_KEY_FILENAME'])
if not os.path.exists(key_file):
raise Exception('Missing key file %s' % key_file)
with open(key_file, 'rb') as fp:
self._ctx.import_(fp)
@property
def name(self):
return 'gpg2'
@property
def public_key_path(self):
return self._public_key_path
def detached_sign(self, stream):
""" Signs the given stream, returning the signature. """
ctx = self._ctx
ctx.signers = [ctx.get_key(self._private_key_name)]
signature = StringIO()
new_sigs = ctx.sign(stream, signature, gpgme.SIG_MODE_DETACH)
signature.seek(0)
return signature.getvalue()
class Signer(object):
def __init__(self, app=None, key_directory=None):
self.app = app
if app is not None:
self.state = self.init_app(app, key_directory)
else:
self.state = None
def init_app(self, app, key_directory):
preference = app.config.get('SIGNING_ENGINE', None)
if preference is None:
return None
return SIGNING_ENGINES[preference](app, key_directory)
def __getattr__(self, name):
return getattr(self.state, name, None)
SIGNING_ENGINES = {
'gpg2': GPG2Signer
}

View file

@ -44,6 +44,11 @@ class TarLayerFormat(object):
# properly handle large filenames. # properly handle large filenames.
clone = copy.deepcopy(tar_info) clone = copy.deepcopy(tar_info)
clone.name = os.path.join(self.path_prefix, clone.name) clone.name = os.path.join(self.path_prefix, clone.name)
# If the entry is a link of some kind, and it is not relative, then prefix it as well.
if clone.linkname and clone.type == tarfile.LNKTYPE:
clone.linkname = os.path.join(self.path_prefix, clone.linkname)
yield clone.tobuf() yield clone.tobuf()
else: else:
yield tar_info.tobuf() yield tar_info.tobuf()