diff --git a/app.py b/app.py index 0fcda573c..01c4ae6ab 100644 --- a/app.py +++ b/app.py @@ -35,6 +35,7 @@ from util.saas.metricqueue import MetricQueue from util.config.provider import get_config_provider from util.config.configutil import generate_secret_key from util.config.superusermanager import SuperUserManager +from util.secscan.secscanendpoint import SecurityScanEndpoint OVERRIDE_CONFIG_DIRECTORY = 'conf/stack/' OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml' @@ -147,6 +148,7 @@ image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf) dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf, reporter=MetricQueueReporter(metric_queue)) notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf) +secscan_endpoint = SecurityScanEndpoint(app, config_provider) database.configure(app.config) model.config.app_config = app.config diff --git a/conf/init/service/securityworker/log/run b/conf/init/service/securityworker/log/run new file mode 100644 index 000000000..8de3dfdec --- /dev/null +++ b/conf/init/service/securityworker/log/run @@ -0,0 +1,2 @@ +#!/bin/sh +exec logger -i -t securityworker diff --git a/conf/init/service/securityworker/run b/conf/init/service/securityworker/run new file mode 100644 index 000000000..c40f9aa4b --- /dev/null +++ b/conf/init/service/securityworker/run @@ -0,0 +1,8 @@ +#! /bin/bash + +echo 'Starting security scanner worker' + +cd / +venv/bin/python -m workers.securityworker 2>&1 + +echo 'Security scanner worker exited' diff --git a/config.py b/config.py index fd7813c67..40d685706 100644 --- a/config.py +++ b/config.py @@ -250,3 +250,12 @@ class DefaultConfig(object): # Experiment: Async garbage collection EXP_ASYNC_GARBAGE_COLLECTION = [] + + # Security scanner + FEATURE_SECURITY_SCANNER = False + SECURITY_SCANNER = { + 'ENDPOINT': 'http://192.168.99.100:6060', + 'ENGINE_VERSION_TARGET': 1, + 'API_VERSION': 'v1', + 'API_TIMEOUT_SECONDS': 10, + } diff --git a/data/database.py b/data/database.py index cc0beafa6..c87ece328 100644 --- a/data/database.py +++ b/data/database.py @@ -472,9 +472,6 @@ class RepositoryBuildTrigger(BaseModel): pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot', robot_null_delete=True) - # TODO(jschorr): Remove this column once we verify the backfill has succeeded. - used_legacy_github = BooleanField(null=True, default=False) - class EmailConfirmation(BaseModel): code = CharField(default=random_string_generator(), unique=True, index=True) @@ -487,11 +484,12 @@ class EmailConfirmation(BaseModel): class ImageStorage(BaseModel): uuid = CharField(default=uuid_generator, index=True, unique=True) - checksum = CharField(null=True) + checksum = CharField(null=True) # TODO remove when all checksums have been moved back to Image image_size = BigIntegerField(null=True) uncompressed_size = BigIntegerField(null=True) uploading = BooleanField(default=True, null=True) cas_path = BooleanField(default=True) + content_checksum = CharField(null=True, index=True) class ImageStorageTransformation(BaseModel): @@ -573,6 +571,11 @@ class Image(BaseModel): command = TextField(null=True) aggregate_size = BigIntegerField(null=True) v1_json_metadata = TextField(null=True) + v1_checksum = CharField(null=True) + + security_indexed = BooleanField(default=False) + security_indexed_engine = IntegerField(default=-1) + parent = ForeignKeyField('self', index=True, null=True, related_name='children') class Meta: database = db @@ -580,6 +583,8 @@ class Image(BaseModel): indexes = ( # we don't really want duplicates (('repository', 'docker_image_id'), True), + + (('security_indexed_engine', 'security_indexed'), False), ) @@ -746,6 +751,7 @@ class RepositoryNotification(BaseModel): method = ForeignKeyField(ExternalNotificationMethod) title = CharField(null=True) config_json = TextField() + event_config_json = TextField(default='{}') class RepositoryAuthorizedEmail(BaseModel): diff --git a/data/migrations/versions/2827d36939e4_separate_v1_and_v2_checksums.py b/data/migrations/versions/2827d36939e4_separate_v1_and_v2_checksums.py new file mode 100644 index 000000000..f3ee69d0e --- /dev/null +++ b/data/migrations/versions/2827d36939e4_separate_v1_and_v2_checksums.py @@ -0,0 +1,30 @@ +"""Separate v1 and v2 checksums. + +Revision ID: 2827d36939e4 +Revises: 73669db7e12 +Create Date: 2015-11-04 16:29:48.905775 + +""" + +# revision identifiers, used by Alembic. +revision = '2827d36939e4' +down_revision = '5cdc2d819c5' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('image', sa.Column('v1_checksum', sa.String(length=255), nullable=True)) + op.add_column('imagestorage', sa.Column('content_checksum', sa.String(length=255), nullable=True)) + op.create_index('imagestorage_content_checksum', 'imagestorage', ['content_checksum'], unique=False) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_index('imagestorage_content_checksum', table_name='imagestorage') + op.drop_column('imagestorage', 'content_checksum') + op.drop_column('image', 'v1_checksum') + ### end Alembic commands ### diff --git a/data/migrations/versions/50925110da8c_add_event_specific_config.py b/data/migrations/versions/50925110da8c_add_event_specific_config.py new file mode 100644 index 000000000..4a7672b70 --- /dev/null +++ b/data/migrations/versions/50925110da8c_add_event_specific_config.py @@ -0,0 +1,27 @@ +"""Add event-specific config + +Revision ID: 50925110da8c +Revises: 2fb9492c20cc +Create Date: 2015-10-13 18:03:14.859839 + +""" + +# revision identifiers, used by Alembic. +revision = '50925110da8c' +down_revision = '57dad559ff2d' + +from alembic import op +import sqlalchemy as sa +from util.migrate import UTF8LongText + + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('repositorynotification', sa.Column('event_config_json', UTF8LongText, nullable=False)) + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('repositorynotification', 'event_config_json') + ### end Alembic commands ### diff --git a/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py b/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py new file mode 100644 index 000000000..ed317f327 --- /dev/null +++ b/data/migrations/versions/57dad559ff2d_add_support_for_quay_s_security_indexer.py @@ -0,0 +1,32 @@ +"""add support for quay's security indexer +Revision ID: 57dad559ff2d +Revises: 154f2befdfbe +Create Date: 2015-07-13 16:51:41.669249 +""" + +# revision identifiers, used by Alembic. +revision = '57dad559ff2d' +down_revision = '73669db7e12' + +from alembic import op +import sqlalchemy as sa + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('image', sa.Column('parent_id', sa.Integer(), nullable=True)) + op.add_column('image', sa.Column('security_indexed', sa.Boolean(), nullable=False, default=False, server_default=sa.sql.expression.false())) + op.add_column('image', sa.Column('security_indexed_engine', sa.Integer(), nullable=False, default=-1, server_default="-1")) + op.create_index('image_parent_id', 'image', ['parent_id'], unique=False) + op.create_foreign_key(op.f('fk_image_parent_id_image'), 'image', 'image', ['parent_id'], ['id']) + ### end Alembic commands ### + op.create_index('image_security_indexed_engine_security_indexed', 'image', ['security_indexed_engine', 'security_indexed']) + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_index('image_security_indexed_engine_security_indexed', 'image') + op.drop_constraint(op.f('fk_image_parent_id_image'), 'image', type_='foreignkey') + op.drop_index('image_parent_id', table_name='image') + op.drop_column('image', 'security_indexed') + op.drop_column('image', 'security_indexed_engine') + op.drop_column('image', 'parent_id') + ### end Alembic commands ### diff --git a/data/migrations/versions/5cdc2d819c5_add_vulnerability_found_event.py b/data/migrations/versions/5cdc2d819c5_add_vulnerability_found_event.py new file mode 100644 index 000000000..76051323a --- /dev/null +++ b/data/migrations/versions/5cdc2d819c5_add_vulnerability_found_event.py @@ -0,0 +1,41 @@ +"""Add vulnerability_found event + +Revision ID: 5cdc2d819c5 +Revises: 50925110da8c +Create Date: 2015-10-13 18:05:32.157858 + +""" + +# revision identifiers, used by Alembic. +revision = '5cdc2d819c5' +down_revision = '50925110da8c' + +from alembic import op +import sqlalchemy as sa + + + +def upgrade(tables): + op.bulk_insert(tables.externalnotificationevent, + [ + {'id':6, 'name':'vulnerability_found'}, + ]) + + op.bulk_insert(tables.notificationkind, + [ + {'id':11, 'name':'vulnerability_found'}, + ]) + + +def downgrade(tables): + op.execute( + (tables.externalnotificationevent.delete() + .where(tables.externalnotificationevent.c.name == op.inline_literal('vulnerability_found'))) + + ) + + op.execute( + (tables.notificationkind.delete() + .where(tables.notificationkind.c.name == op.inline_literal('vulnerability_found'))) + + ) \ No newline at end of file diff --git a/data/migrations/versions/73669db7e12_remove_legacy_github_column.py b/data/migrations/versions/73669db7e12_remove_legacy_github_column.py new file mode 100644 index 000000000..38698c5eb --- /dev/null +++ b/data/migrations/versions/73669db7e12_remove_legacy_github_column.py @@ -0,0 +1,25 @@ +"""Remove legacy github column + +Revision ID: 73669db7e12 +Revises: 35f538da62 +Create Date: 2015-11-04 16:18:18.107314 + +""" + +# revision identifiers, used by Alembic. +revision = '73669db7e12' +down_revision = '35f538da62' + +from alembic import op +import sqlalchemy as sa + +def upgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.drop_column('repositorybuildtrigger', 'used_legacy_github') + ### end Alembic commands ### + + +def downgrade(tables): + ### commands auto generated by Alembic - please adjust! ### + op.add_column('repositorybuildtrigger', sa.Column('used_legacy_github', sa.Boolean(), nullable=True)) + ### end Alembic commands ### diff --git a/data/model/blob.py b/data/model/blob.py index 4bad62584..5547c7646 100644 --- a/data/model/blob.py +++ b/data/model/blob.py @@ -17,7 +17,7 @@ def get_repo_blob_by_digest(namespace, repo_name, blob_digest): .join(Repository) .join(Namespace) .where(Repository.name == repo_name, Namespace.username == namespace, - ImageStorage.checksum == blob_digest)) + ImageStorage.content_checksum == blob_digest)) if not placements: raise BlobDoesNotExist('Blob does not exist with digest: {0}'.format(blob_digest)) @@ -35,11 +35,11 @@ def store_blob_record_and_temp_link(namespace, repo_name, blob_digest, location_ repo = _basequery.get_existing_repository(namespace, repo_name) try: - storage = ImageStorage.get(checksum=blob_digest) + storage = ImageStorage.get(content_checksum=blob_digest) location = ImageStorageLocation.get(name=location_name) ImageStoragePlacement.get(storage=storage, location=location) except ImageStorage.DoesNotExist: - storage = ImageStorage.create(checksum=blob_digest) + storage = ImageStorage.create(content_checksum=blob_digest) except ImageStoragePlacement.DoesNotExist: ImageStoragePlacement.create(storage=storage, location=location) diff --git a/data/model/image.py b/data/model/image.py index 078875417..7b673ee2f 100644 --- a/data/model/image.py +++ b/data/model/image.py @@ -284,10 +284,7 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created except Image.DoesNotExist: raise DataModelException('No image with specified id and repository') - # We cleanup any old checksum in case it's a retry after a fail - fetched.storage.checksum = None fetched.created = datetime.now() - if created_date_str is not None: try: fetched.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None) @@ -295,12 +292,18 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created # parse raises different exceptions, so we cannot use a specific kind of handler here. pass + # We cleanup any old checksum in case it's a retry after a fail + fetched.v1_checksum = None + fetched.storage.checksum = None # TODO remove when storage checksums are no longer read + fetched.storage.content_checksum = None + fetched.comment = comment fetched.command = command fetched.v1_json_metadata = v1_json_metadata if parent: fetched.ancestors = '%s%s/' % (parent.ancestors, parent.id) + fetched.parent = parent fetched.save() fetched.storage.save() diff --git a/data/model/notification.py b/data/model/notification.py index 87ae7f7ca..409d6c000 100644 --- a/data/model/notification.py +++ b/data/model/notification.py @@ -113,12 +113,13 @@ def delete_matching_notifications(target, kind_name, **kwargs): notification.delete_instance() -def create_repo_notification(repo, event_name, method_name, config, title=None): +def create_repo_notification(repo, event_name, method_name, method_config, event_config, title=None): event = ExternalNotificationEvent.get(ExternalNotificationEvent.name == event_name) method = ExternalNotificationMethod.get(ExternalNotificationMethod.name == method_name) return RepositoryNotification.create(repository=repo, event=event, method=method, - config_json=json.dumps(config), title=title) + config_json=json.dumps(method_config), title=title, + event_config_json=json.dumps(event_config)) def get_repo_notification(uuid): diff --git a/data/model/user.py b/data/model/user.py index 1079f8e0d..d05ea1693 100644 --- a/data/model/user.py +++ b/data/model/user.py @@ -651,9 +651,12 @@ def detach_external_login(user, service_name): def delete_user(user): - user.delete_instance(recursive=True, delete_nullable=True) + # Delete any repositories under the user's namespace. + for repo in list(Repository.select().where(Repository.namespace_user == user)): + repository.purge_repository(user.username, repo.name) - # TODO: also delete any repository data associated + # Delete the user itself. + user.delete_instance(recursive=True, delete_nullable=True) def get_pull_credentials(robotname): diff --git a/digest/checksums.py b/digest/checksums.py index ea30e4dc1..95a39ce96 100644 --- a/digest/checksums.py +++ b/digest/checksums.py @@ -75,6 +75,14 @@ def simple_checksum_handler(json_data): return h, fn +def content_checksum_handler(): + h = hashlib.sha256() + + def fn(buf): + h.update(buf) + return h, fn + + def compute_simple(fp, json_data): data = json_data + '\n' return 'sha256:{0}'.format(sha256_file(fp, data)) diff --git a/endpoints/api/__init__.py b/endpoints/api/__init__.py index 70b76f7e4..a83a7ac58 100644 --- a/endpoints/api/__init__.py +++ b/endpoints/api/__init__.py @@ -93,6 +93,11 @@ class NotFound(ApiException): ApiException.__init__(self, None, 404, 'Not Found', payload) +class DownstreamIssue(ApiException): + def __init__(self, payload=None): + ApiException.__init__(self, None, 520, 'Downstream Issue', payload) + + @api_bp.app_errorhandler(ApiException) @crossdomain(origin='*', headers=['Authorization', 'Content-Type']) def handle_api_error(error): @@ -418,4 +423,5 @@ import endpoints.api.tag import endpoints.api.team import endpoints.api.trigger import endpoints.api.user +import endpoints.api.secscan diff --git a/endpoints/api/repositorynotification.py b/endpoints/api/repositorynotification.py index 832328cbe..30c71cf54 100644 --- a/endpoints/api/repositorynotification.py +++ b/endpoints/api/repositorynotification.py @@ -57,6 +57,10 @@ class RepositoryNotificationList(RepositoryParamResource): 'type': 'object', 'description': 'JSON config information for the specific method of notification' }, + 'eventConfig': { + 'type': 'object', + 'description': 'JSON config information for the specific event of notification', + }, 'title': { 'type': 'string', 'description': 'The human-readable title of the notification', @@ -84,6 +88,7 @@ class RepositoryNotificationList(RepositoryParamResource): new_notification = model.notification.create_repo_notification(repo, parsed['event'], parsed['method'], parsed['config'], + parsed['eventConfig'], parsed.get('title', None)) resp = notification_view(new_notification) diff --git a/endpoints/api/secscan.py b/endpoints/api/secscan.py new file mode 100644 index 000000000..ab3f73051 --- /dev/null +++ b/endpoints/api/secscan.py @@ -0,0 +1,103 @@ +""" List and manage repository vulnerabilities and other sec information. """ + +import logging +import features +import json +import requests + +from app import secscan_endpoint +from data import model +from endpoints.api import (require_repo_read, NotFound, DownstreamIssue, path_param, + RepositoryParamResource, resource, nickname, show_if, parse_args, + query_param) + + +logger = logging.getLogger(__name__) + + +def _call_security_api(relative_url, *args, **kwargs): + """ Issues an HTTP call to the sec API at the given relative URL. """ + try: + response = secscan_endpoint.call_api(relative_url, *args, **kwargs) + except requests.exceptions.Timeout: + raise DownstreamIssue(payload=dict(message='API call timed out')) + except requests.exceptions.ConnectionError: + raise DownstreamIssue(payload=dict(message='Could not connect to downstream service')) + + if response.status_code == 404: + raise NotFound() + + try: + response_data = json.loads(response.text) + except ValueError: + raise DownstreamIssue(payload=dict(message='Non-json response from downstream service')) + + if response.status_code / 100 != 2: + logger.warning('Got %s status code to call: %s', response.status_code, response.text) + raise DownstreamIssue(payload=dict(message=response_data['Message'])) + + return response_data + + +@show_if(features.SECURITY_SCANNER) +@resource('/v1/repository//tag//vulnerabilities') +@path_param('repository', 'The full path of the repository. e.g. namespace/name') +@path_param('tag', 'The name of the tag') +class RepositoryTagVulnerabilities(RepositoryParamResource): + """ Operations for managing the vulnerabilities in a repository tag. """ + + @require_repo_read + @nickname('getRepoTagVulnerabilities') + @parse_args + @query_param('minimumPriority', 'Minimum vulnerability priority', type=str, + default='Low') + def get(self, args, namespace, repository, tag): + """ Fetches the vulnerabilities (if any) for a repository tag. """ + try: + tag_image = model.tag.get_tag_image(namespace, repository, tag) + except model.DataModelException: + raise NotFound() + + if not tag_image.security_indexed: + logger.debug('Image %s for tag %s under repository %s/%s not security indexed', + tag_image.docker_image_id, tag, namespace, repository) + return { + 'security_indexed': False + } + + data = _call_security_api('layers/%s/vulnerabilities', tag_image.docker_image_id, + minimumPriority=args.minimumPriority) + + return { + 'security_indexed': True, + 'data': data, + } + + +@show_if(features.SECURITY_SCANNER) +@resource('/v1/repository//image//packages') +@path_param('repository', 'The full path of the repository. e.g. namespace/name') +@path_param('imageid', 'The image ID') +class RepositoryImagePackages(RepositoryParamResource): + """ Operations for listing the packages added/removed in an image. """ + + @require_repo_read + @nickname('getRepoImagePackages') + def get(self, namespace, repository, imageid): + """ Fetches the packages added/removed in the given repo image. """ + repo_image = model.image.get_repo_image(namespace, repository, imageid) + if repo_image is None: + raise NotFound() + + if not repo_image.security_indexed: + return { + 'security_indexed': False + } + + data = _call_security_api('layers/%s/packages/diff', repo_image.docker_image_id) + + return { + 'security_indexed': True, + 'data': data, + } + diff --git a/endpoints/notificationevent.py b/endpoints/notificationevent.py index b1d319a1f..ebd7e10b6 100644 --- a/endpoints/notificationevent.py +++ b/endpoints/notificationevent.py @@ -84,6 +84,40 @@ def _build_summary(event_data): return summary +class VulnerabilityFoundEvent(NotificationEvent): + @classmethod + def event_name(cls): + return 'vulnerability_found' + + def get_level(self, event_data, notification_data): + priority = event_data['vulnerability']['priority'] + if priority == 'Defcon1' or priority == 'Critical': + return 'error' + + if priority == 'Medium' or priority == 'High': + return 'warning' + + return 'info' + + def get_sample_data(self, repository): + return build_event_data(repository, { + 'tags': ['latest', 'prod'], + 'image': 'some-image-id', + 'vulnerability': { + 'id': 'CVE-FAKE-CVE', + 'description': 'A futurist vulnerability', + 'link': 'https://security-tracker.debian.org/tracker/CVE-FAKE-CVE', + 'priority': 'Critical', + }, + }) + + def get_summary(self, event_data, notification_data): + msg = '%s vulnerability detected in repository %s in tags %s' + return msg % (event_data['vulnerability']['priority'], + event_data['repository'], + ', '.join(event_data['tags'])) + + class BuildQueueEvent(NotificationEvent): @classmethod def event_name(cls): diff --git a/endpoints/v1/registry.py b/endpoints/v1/registry.py index 3d049c757..19915363c 100644 --- a/endpoints/v1/registry.py +++ b/endpoints/v1/registry.py @@ -249,6 +249,10 @@ def put_image_layer(namespace, repository, image_id): h, sum_hndlr = checksums.simple_checksum_handler(json_data) sr.add_handler(sum_hndlr) + # Add a handler which computes the content checksum only + ch, content_sum_hndlr = checksums.content_checksum_handler() + sr.add_handler(content_sum_hndlr) + # Stream write the data to storage. with database.CloseForLongOperation(app.config): try: @@ -278,6 +282,7 @@ def put_image_layer(namespace, repository, image_id): # We don't have a checksum stored yet, that's fine skipping the check. # Not removing the mark though, image is not downloadable yet. session['checksum'] = csums + session['content_checksum'] = 'sha256:{0}'.format(ch.hexdigest()) return make_response('true', 200) checksum = repo_image.storage.checksum @@ -339,8 +344,9 @@ def put_image_checksum(namespace, repository, image_id): abort(409, 'Cannot set checksum for image %(image_id)s', issue='image-write-error', image_id=image_id) - logger.debug('Storing image checksum') - err = store_checksum(repo_image.storage, checksum) + logger.debug('Storing image and content checksums') + content_checksum = session.get('content_checksum', None) + err = store_checksum(repo_image, checksum, content_checksum) if err: abort(400, err) @@ -429,14 +435,18 @@ def generate_ancestry(image_id, uuid, locations, parent_id=None, parent_uuid=Non store.put_content(locations, store.image_ancestry_path(uuid), json.dumps(data)) -def store_checksum(image_storage, checksum): +def store_checksum(image_with_storage, checksum, content_checksum): checksum_parts = checksum.split(':') if len(checksum_parts) != 2: return 'Invalid checksum format' # We store the checksum - image_storage.checksum = checksum - image_storage.save() + image_with_storage.storage.checksum = checksum # TODO remove when v1 checksums are on image only + image_with_storage.storage.content_checksum = content_checksum + image_with_storage.storage.save() + + image_with_storage.v1_checksum = checksum + image_with_storage.save() @v1_bp.route('/images//json', methods=['PUT']) diff --git a/events/vulnerability_found.html b/events/vulnerability_found.html new file mode 100644 index 000000000..f20f4053b --- /dev/null +++ b/events/vulnerability_found.html @@ -0,0 +1,4 @@ +A {{ event_data.vulnerability.priority }} vulnerability ({{ event_data.vulnerability.id }}) was detected in tags + {{ 'tags' | icon_image }} +{% for tag in event_data.tags %}{%if loop.index > 1 %}, {% endif %}{{ (event_data.repository, tag) | repository_tag_reference }}{% endfor %} in + repository {{ event_data.repository | repository_reference }} \ No newline at end of file diff --git a/initdb.py b/initdb.py index 33b8e2b5a..80c9fa952 100644 --- a/initdb.py +++ b/initdb.py @@ -5,6 +5,7 @@ import random import calendar import os +from sys import maxsize from datetime import datetime, timedelta from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, savepoint_sqlite, savepoint) @@ -82,7 +83,7 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map): new_image_locations = new_image.storage.locations new_image.storage.uuid = __gen_image_uuid(repo, image_num) new_image.storage.uploading = False - new_image.storage.checksum = checksum + new_image.storage.content_checksum = checksum new_image.storage.save() # Write some data for the storage. @@ -95,6 +96,10 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map): path = path_builder(new_image.storage.uuid) store.put_content('local_us', path, checksum) + new_image.security_indexed = False + new_image.security_indexed_engine = maxsize + new_image.save() + creation_time = REFERENCE_DATE + timedelta(weeks=image_num) + timedelta(days=model_num) command_list = SAMPLE_CMDS[image_num % len(SAMPLE_CMDS)] command = json.dumps(command_list) if command_list else None @@ -309,6 +314,7 @@ def initialize_database(): ExternalNotificationEvent.create(name='build_start') ExternalNotificationEvent.create(name='build_success') ExternalNotificationEvent.create(name='build_failure') + ExternalNotificationEvent.create(name='vulnerability_found') ExternalNotificationMethod.create(name='quay_notification') ExternalNotificationMethod.create(name='email') @@ -323,6 +329,7 @@ def initialize_database(): NotificationKind.create(name='build_start') NotificationKind.create(name='build_success') NotificationKind.create(name='build_failure') + NotificationKind.create(name='vulnerability_found') NotificationKind.create(name='password_required') NotificationKind.create(name='over_private_usage') diff --git a/static/css/directives/ui/create-external-notification-dialog.css b/static/css/directives/ui/create-external-notification-dialog.css new file mode 100644 index 000000000..12394955c --- /dev/null +++ b/static/css/directives/ui/create-external-notification-dialog.css @@ -0,0 +1,20 @@ +#createNotificationModal .dropdown-select { + margin: 0px; +} + +#createNotificationModal .options-table { + width: 100%; + margin-bottom: 10px; +} + +#createNotificationModal .options-table td { + padding-bottom: 6px; +} + +#createNotificationModal .options-table td.name { + width: 160px; +} + +#createNotificationModal .options-table-wrapper { + padding: 10px; +} \ No newline at end of file diff --git a/static/directives/create-external-notification-dialog.html b/static/directives/create-external-notification-dialog.html index 592efc322..b249e819f 100644 --- a/static/directives/create-external-notification-dialog.html +++ b/static/directives/create-external-notification-dialog.html @@ -1,12 +1,12 @@ -