diff --git a/data/model/image.py b/data/model/image.py index f8a699273..b865c048b 100644 --- a/data/model/image.py +++ b/data/model/image.py @@ -429,57 +429,16 @@ def ensure_image_locations(*names): data = [{'name': name} for name in insert_names] ImageStorageLocation.insert_many(data).execute() -def get_secscan_candidates(engine_version, batch_size): +def get_image_with_storage_and_parent_base(): Parent = Image.alias() ParentImageStorage = ImageStorage.alias() - rimages = [] - - # Collect the images without parents. - candidates = list(Image - .select(Image.id) - .join(ImageStorage) - .where(Image.security_indexed_engine < engine_version, - Image.parent >> None, - ImageStorage.uploading == False) - .limit(batch_size*10)) - - if len(candidates) > 0: - images = (Image - .select(Image, ImageStorage) - .join(ImageStorage) - .where(Image.id << candidates) - .order_by(db_random_func()) - .limit(batch_size)) - rimages.extend(images) - - # Collect the images with analyzed parents. - candidates = list(Image - .select(Image.id) - .join(Parent, on=(Image.parent == Parent.id)) - .switch(Image) - .join(ImageStorage) - .where(Image.security_indexed_engine < engine_version, - Parent.security_indexed_engine == engine_version, - ImageStorage.uploading == False) - .limit(batch_size*10)) - - if len(candidates) > 0: - images = (Image - .select(Image, ImageStorage, Parent, ParentImageStorage) - .join(Parent, on=(Image.parent == Parent.id)) - .join(ParentImageStorage, on=(ParentImageStorage.id == Parent.storage)) - .switch(Image) - .join(ImageStorage) - .where(Image.id << candidates) - .order_by(db_random_func()) - .limit(batch_size)) - rimages.extend(images) - - # Shuffle the images, otherwise the images without parents will always be on the top - random.shuffle(rimages) - - return rimages + return (Image + .select(Image, ImageStorage, Parent, ParentImageStorage) + .join(ImageStorage) + .switch(Image) + .join(Parent, JOIN_LEFT_OUTER, on=(Image.parent == Parent.id)) + .join(ParentImageStorage, JOIN_LEFT_OUTER, on=(ParentImageStorage.id == Parent.storage))) def set_secscan_status(image, indexed, version): query = (Image @@ -490,12 +449,13 @@ def set_secscan_status(image, indexed, version): ids_to_update = [row.id for row in query] if not ids_to_update: - return + return False - (Image - .update(security_indexed=indexed, security_indexed_engine=version) - .where(Image.id << ids_to_update) - .execute()) + return (Image + .update(security_indexed=indexed, security_indexed_engine=version) + .where(Image.id << ids_to_update) + .where((Image.security_indexed_engine != version) | (Image.security_indexed != indexed)) + .execute()) != 0 def find_or_create_derived_storage(source_image, transformation_name, preferred_location): @@ -539,5 +499,3 @@ def delete_derived_storage_by_uuid(storage_uuid): return image_storage.delete_instance(recursive=True) - - diff --git a/endpoints/api/secscan.py b/endpoints/api/secscan.py index 91a5ee891..146227a76 100644 --- a/endpoints/api/secscan.py +++ b/endpoints/api/secscan.py @@ -1,4 +1,4 @@ -""" List and manage repository vulnerabilities and other sec information. """ +""" List and manage repository vulnerabilities and other security information. """ import logging import features @@ -9,7 +9,7 @@ from app import secscan_api from data import model from endpoints.api import (require_repo_read, NotFound, DownstreamIssue, path_param, RepositoryParamResource, resource, nickname, show_if, parse_args, - query_param) + query_param, truthy_bool) logger = logging.getLogger(__name__) @@ -54,19 +54,19 @@ def _get_status(repo_image): @show_if(features.SECURITY_SCANNER) -@resource('/v1/repository//image//vulnerabilities') +@resource('/v1/repository//image//security') @path_param('repository', 'The full path of the repository. e.g. namespace/name') @path_param('imageid', 'The image ID') -class RepositoryImageVulnerabilities(RepositoryParamResource): +class RepositoryImageSecurity(RepositoryParamResource): """ Operations for managing the vulnerabilities in a repository image. """ @require_repo_read - @nickname('getRepoImageVulnerabilities') + @nickname('getRepoImageSecurity') @parse_args() - @query_param('minimumPriority', 'Minimum vulnerability priority', type=str, - default='Low') + @query_param('vulnerabilities', 'Include vulnerabilities informations', type=truthy_bool, + default=False) def get(self, namespace, repository, imageid, parsed_args): - """ Fetches the vulnerabilities (if any) for a repository tag. """ + """ Fetches the features and vulnerabilities (if any) for a repository tag. """ repo_image = model.image.get_repo_image(namespace, repository, imageid) if repo_image is None: raise NotFound() @@ -79,40 +79,12 @@ class RepositoryImageVulnerabilities(RepositoryParamResource): } layer_id = '%s.%s' % (repo_image.docker_image_id, repo_image.storage.uuid) - data = _call_security_api('layers/%s/vulnerabilities', layer_id, - minimumPriority=parsed_args.minimumPriority) + if parsed_args.vulnerabilities: + data = _call_security_api('layers/%s?vulnerabilities', layer_id) + else: + data = _call_security_api('layers/%s?features', layer_id) return { 'status': _get_status(repo_image), 'data': data, } - - -@show_if(features.SECURITY_SCANNER) -@resource('/v1/repository//image//packages') -@path_param('repository', 'The full path of the repository. e.g. namespace/name') -@path_param('imageid', 'The image ID') -class RepositoryImagePackages(RepositoryParamResource): - """ Operations for listing the packages added/removed in an image. """ - - @require_repo_read - @nickname('getRepoImagePackages') - def get(self, namespace, repository, imageid): - """ Fetches the packages added/removed in the given repo image. """ - repo_image = model.image.get_repo_image(namespace, repository, imageid) - if repo_image is None: - raise NotFound() - - if not repo_image.security_indexed: - return { - 'status': _get_status(repo_image), - } - - layer_id = '%s.%s' % (repo_image.docker_image_id, repo_image.storage.uuid) - data = _call_security_api('layers/%s/packages', layer_id) - - return { - 'status': _get_status(repo_image), - 'data': data, - } - diff --git a/static/js/directives/repo-view/repo-panel-tags.js b/static/js/directives/repo-view/repo-panel-tags.js index ddce687bf..21e061826 100644 --- a/static/js/directives/repo-view/repo-panel-tags.js +++ b/static/js/directives/repo-view/repo-panel-tags.js @@ -156,41 +156,48 @@ angular.module('quay').directive('repoPanelTags', function () { var params = { 'imageid': image_id, 'repository': $scope.repository.namespace + '/' + $scope.repository.name, + 'vulnerabilities': true, }; - ApiService.getRepoImageVulnerabilities(null, params).then(function(resp) { + ApiService.getRepoImageSecurity(null, params).then(function(resp) { imageData.loading = false; imageData.status = resp['status']; if (imageData.status == 'scanned') { - var vulnerabilities = resp.data.Vulnerabilities; - - imageData.hasVulnerabilities = !!vulnerabilities.length; - imageData.vulnerabilities = vulnerabilities; - + var vulnerabilities = []; var highest = { - 'Priority': 'Unknown', + 'Severity': 'Unknown', 'Count': 0, 'index': 100000 }; - resp.data.Vulnerabilities.forEach(function(v) { - if (VulnerabilityService.LEVELS[v.Priority].index == 0) { - $scope.defcon1[v.ID] = v; - $scope.hasDefcon1 = true; - } + if (resp.data && resp.data.Layer && resp.data.Layer.Features) { + resp.data.Layer.Features.forEach(function(feature) { + if (feature.Vulnerabilities) { + feature.Vulnerabilities.forEach(function(vuln) { + if (VulnerabilityService.LEVELS[vuln.Severity].index == 0) { + $scope.defcon1[vuln.ID] = v; + $scope.hasDefcon1 = true; + } - if (VulnerabilityService.LEVELS[v.Priority].index < highest.index) { - highest = { - 'Priority': v.Priority, - 'Count': 1, - 'index': VulnerabilityService.LEVELS[v.Priority].index + if (VulnerabilityService.LEVELS[vuln.Severity].index < highest.index) { + highest = { + 'Priority': vuln.Severity, + 'Count': 1, + 'index': VulnerabilityService.LEVELS[vuln.Severity].index + } + } else if (VulnerabilityService.LEVELS[vuln.Severity].index == highest.index) { + highest['Count']++; + } + + vulnerabilities.push(vuln); + }); } - } else if (VulnerabilityService.LEVELS[v.Priority].index == highest.index) { - highest['Count']++; - } - }); + }); + } + imageData.hasVulnerabilities = !!vulnerabilities.length; + imageData.vulnerabilities = vulnerabilities; imageData.highestVulnerability = highest; } }, function() { @@ -355,4 +362,3 @@ angular.module('quay').directive('repoPanelTags', function () { }; return directiveDefinitionObject; }); - diff --git a/static/js/pages/image-view.js b/static/js/pages/image-view.js index 253c389b2..5e377eac0 100644 --- a/static/js/pages/image-view.js +++ b/static/js/pages/image-view.js @@ -45,39 +45,53 @@ loadImage(); loadRepository(); - $scope.downloadPackages = function() { - if (!Features.SECURITY_SCANNER || $scope.packagesResource) { return; } - - var params = { - 'repository': namespace + '/' + name, - 'imageid': imageid - }; - - $scope.packagesResource = ApiService.getRepoImagePackagesAsResource(params).get(function(packages) { - $scope.packages = packages; - return packages; - }); - }; - - $scope.loadImageVulnerabilities = function() { - if (!Features.SECURITY_SCANNER || $scope.vulnerabilitiesResource) { return; } + $scope.loadImageSecurity = function() { + if (!Features.SECURITY_SCANNER || $scope.securityResource) { return; } $scope.VulnerabilityLevels = VulnerabilityService.getLevels(); var params = { 'repository': namespace + '/' + name, - 'imageid': imageid + 'imageid': imageid, + 'vulnerabilities': true, }; - $scope.vulnerabilitiesResource = ApiService.getRepoImageVulnerabilitiesAsResource(params).get(function(resp) { - $scope.vulnerabilityInfo = resp; - $scope.vulnerabilities = []; + $scope.securityResource = ApiService.getRepoImageSecurityAsResource(params).get(function(resp) { + $scope.securityStatus = resp.status; + $scope.securityFeatures = []; + $scope.securityVulnerabilities = []; - if (resp.data && resp.data.Vulnerabilities) { - resp.data.Vulnerabilities.forEach(function(vuln) { - vuln_copy = jQuery.extend({}, vuln); - vuln_copy['index'] = VulnerabilityService.LEVELS[vuln['Priority']]['index']; - $scope.vulnerabilities.push(vuln_copy); + if (resp.data && resp.data.Layer && resp.data.Layer.Features) { + resp.data.Layer.Features.forEach(function(feature) { + feature_obj = { + 'name': feature.Name, + 'namespace': feature.Namespace, + 'version': feature.Version, + 'addedBy': feature.AddedBy, + } + feature_vulnerabilities = [] + + if (feature.Vulnerabilities) { + feature.Vulnerabilities.forEach(function(vuln) { + vuln_obj = { + 'name': vuln.Name, + 'namespace': vuln.Namespace, + 'description': vuln.Description, + 'link': vuln.Link, + 'severity': vuln.Severity, + 'metadata': vuln.Metadata, + 'feature': jQuery.extend({}, feature_obj), + 'fixedBy': vuln.FixedBy, + 'index': VulnerabilityService.LEVELS[vuln['Severity']]['index'], + } + + feature_vulnerabilities.push(vuln_obj) + $scope.securityVulnerabilities.push(vuln_obj); + }); + } + + feature_obj['vulnerabilities'] = feature_vulnerabilities + $scope.securityFeatures.push(feature_obj); }); } @@ -94,4 +108,4 @@ }, 100); }; } -})(); \ No newline at end of file +})(); diff --git a/static/partials/image-view.html b/static/partials/image-view.html index 3cd8627a3..b3343802f 100644 --- a/static/partials/image-view.html +++ b/static/partials/image-view.html @@ -21,13 +21,13 @@ - @@ -42,51 +42,58 @@ ng-repeat="parent in reversedHistory"> - -
-
+ +
+
-
+

Image Security

-
+
This image has not been indexed yet
Please try again in a few minutes.
-
+
This image could not be indexed
Our security scanner was unable to index this image.
-
+
This image contains no recognized security vulnerabilities
- Quay currently indexes Debian, Red Hat and Ubuntu packages. + Quay currently indexes Debian, Red Hat and Ubuntu based images.
-
+
+ + - - + + + + + + +
Vulnerability PriorityIntroduced byFixed by Description
{{ vulnerability.ID }}
{{ vulnerability.name }} - - {{ vulnerability.Description }}{{ vulnerability.feature.name }} {{ vulnerability.feature.version }} + {{ vulnerability.feature.name }} {{ vulnerability.fixedBy }} + {{ vulnerability.description }}
-
No matching vulnerabilities found
@@ -110,41 +117,43 @@
- +
-
-
+
+

Image Packages

-
+
This image has not been indexed yet
Please try again in a few minutes.
-
+
This image could not be indexed
Our security scanner was unable to index this image.
- +
- + + - - - - + + + + +
Package Name Package VersionOSPackage OSNumber of vulnerabilities
{{ package.Name }}{{ package.Version }}{{ package.OS }}
{{ feature.name }}{{ feature.version }}{{ feature.namespace }}{{ feature.vulnerabilities.length }}
-
No matching packages found
diff --git a/util/migrate/allocator.py b/util/migrate/allocator.py index c0502a843..bf998e1dd 100644 --- a/util/migrate/allocator.py +++ b/util/migrate/allocator.py @@ -14,9 +14,9 @@ class NoAvailableKeysError(ValueError): class CompletedKeys(object): - def __init__(self, max_index): + def __init__(self, min_index, max_index): self._max_index = max_index - self._min_index = 0 + self._min_index = min_index self._slabs = RBTree() def _get_previous_or_none(self, index): @@ -118,7 +118,7 @@ class CompletedKeys(object): return random.randint(hole_start, rand_max_bound) -def yield_random_entries(batch_query, primary_key_field, batch_size, max_id): +def yield_random_entries(batch_query, primary_key_field, batch_size, max_id, min_id=0): """ This method will yield items from random blocks in the database. We will track metadata about which keys are available for work, and we will complete the backfill when there is no more work to be done. The method yields tupes of (candidate, Event), and if the work was @@ -126,8 +126,9 @@ def yield_random_entries(batch_query, primary_key_field, batch_size, max_id): an "id" field which can be inspected. """ + min_id = max(min_id, 0) max_id = max(max_id, 1) - allocator = CompletedKeys(max_id + 1) + allocator = CompletedKeys(min_id, max_id + 1) try: while True: diff --git a/workers/securityworker.py b/workers/securityworker.py index e6fe492a1..51998e068 100644 --- a/workers/securityworker.py +++ b/workers/securityworker.py @@ -5,24 +5,26 @@ import requests import features import time -from endpoints.notificationhelper import spawn_notification +from peewee import fn from collections import defaultdict + from app import app, config_provider, storage, secscan_api +from endpoints.notificationhelper import spawn_notification from workers.worker import Worker from data import model +from data.database import (Image, UseThenDisconnect, ExternalNotificationEvent) from data.model.tag import filter_tags_have_repository_event, get_tags_for_image -from data.model.image import get_secscan_candidates, set_secscan_status +from data.model.image import set_secscan_status, get_image_with_storage_and_parent_base from data.model.storage import get_storage_locations -from data.database import ExternalNotificationEvent from util.secscan.api import SecurityConfigValidator - -logger = logging.getLogger(__name__) +from util.migrate.allocator import yield_random_entries BATCH_SIZE = 50 INDEXING_INTERVAL = 30 API_METHOD_INSERT = '/v1/layers' -API_METHOD_VERSION = '/v1/versions/engine' +API_METHOD_GET_WITH_VULNERABILITIES = '/v1/layers/%s?vulnerabilities' +logger = logging.getLogger(__name__) class SecurityWorker(Worker): def __init__(self): @@ -40,6 +42,26 @@ class SecurityWorker(Worker): else: logger.warning('Failed to validate security scan configuration') + def _new_request(self, image): + """ Create the request body to submit the given image for analysis. """ + url = self._get_image_url(image) + if url is None: + return None + + request = { + 'Layer': { + 'Name': '%s.%s' % (image.docker_image_id, image.storage.uuid), + 'Path': url, + 'Format': 'Docker' + } + } + + if image.parent.docker_image_id and image.parent.storage.uuid: + request['Layer']['ParentName'] = '%s.%s' % (image.parent.docker_image_id, + image.parent.storage.uuid) + + return request + def _get_image_url(self, image): """ Gets the download URL for an image and if the storage doesn't exist, marks the image as unindexed. """ @@ -71,147 +93,157 @@ class SecurityWorker(Worker): return uri - def _new_request(self, image): - url = self._get_image_url(image) - if url is None: - return None + def _index_images(self): + def batch_query(): + base_query = get_image_with_storage_and_parent_base() + return base_query.where(Image.security_indexed_engine < self._target_version) - request = { - 'ID': '%s.%s' % (image.docker_image_id, image.storage.uuid), - 'Path': url, - } + min_id = (Image + .select(fn.Min(Image.id)) + .where(Image.security_indexed_engine < self._target_version) + .scalar()) + max_id = Image.select(fn.Max(Image.id)).scalar() - if image.parent is not None: - request['ParentID'] = '%s.%s' % (image.parent.docker_image_id, - image.parent.storage.uuid) + with UseThenDisconnect(app.config): + for candidate, abt in yield_random_entries(batch_query, Image.id, BATCH_SIZE, max_id, min_id): + _, continue_batch = self._analyze_recursively(candidate) + if not continue_batch: + logger.info('Another worker pre-empted us for layer: %s', candidate.id) + abt.set() - return request + def _analyze_recursively(self, layer): + """ Analyzes a layer and all its parents """ + if layer.parent_id and layer.parent.security_indexed_engine < self._target_version: + # The image has a parent that is not analyzed yet with this engine. + # Get the parent to get it's own parent and recurse. + try: + base_query = get_image_with_storage_and_parent_base() + parent_layer = base_query.where(Image.id == layer.parent_id).get() + except Image.DoesNotExist: + logger.warning("Image %s has Image %s as parent but doesn't exist.", layer.id, + layer.parent_id) - def _analyze_image(self, image): - """ Analyzes an image by passing it to Clair. """ - request = self._new_request(image) + return False, set_secscan_status(layer, False, self._target_version) + + cont, _ = self._analyze_recursively(parent_layer) + if not cont: + # The analysis failed for some reason and did not mark the layer as failed, + # thus we should not try to analyze the children of that layer. + # Interrupt the recursive analysis and return as no-one pre-empted us. + return False, True + + # Now we know all parents are analyzed. + return self._analyze(layer) + + def _analyze(self, layer): + """ Analyzes a single layer. + Return two bools, the first one tells us if we should evaluate its children, the second + one is set to False when another worker pre-empted the candidate's analysis for us. """ + + # If the parent couldn't be analyzed with the target version or higher, we can't analyze + # this image. Mark it as failed with the current target version. + if (layer.parent_id and not layer.parent.security_indexed and + layer.parent.security_indexed_engine >= self._target_version): + return True, set_secscan_status(layer, False, self._target_version) + + request = self._new_request(layer) if request is None: - return False + return False, True # Analyze the image. try: - logger.info('Analyzing %s', request['ID']) + logger.info('Analyzing layer %s', request['Layer']['Name']) # Using invalid certificates doesn't return proper errors because of # https://github.com/shazow/urllib3/issues/556 - httpResponse = requests.post(self._api + API_METHOD_INSERT, json=request, - cert=self._keys, verify=self._cert) - jsonResponse = httpResponse.json() + http_response = requests.post(self._api + API_METHOD_INSERT, json=request, + cert=self._keys, verify=self._cert) + json_response = http_response.json() except (requests.exceptions.RequestException, ValueError): - logger.exception('An exception occurred when analyzing layer ID %s', request['ID']) - return False + logger.exception('An exception occurred when analyzing layer %s', request['Layer']['Name']) + return False, True # Handle any errors from the security scanner. - if httpResponse.status_code != 201: - message = jsonResponse.get('Message', '') - if 'OS and/or package manager are not supported' in message or 'could not extract' in message: - # The current engine could not index this layer or we tried to index a manifest. - logger.warning('A warning event occurred when analyzing layer ID %s : %s', - request['ID'], jsonResponse['Message']) + if http_response.status_code != 201: + message = json_response.get('Error').get('Message', '') + logger.warning('A warning event occurred when analyzing layer %s (status code %s): %s', + request['Layer']['Name'], http_response.status_code, message) - # Hopefully, there is no version lower than the target one running - set_secscan_status(image, False, self._target_version) - - return False + # 422 means that the layer could not be analyzed: + # - the layer could not be extracted (manifest?) + # - the layer operating system / package manager is unsupported + # Set the layer as failed. + if http_response.status_code == 422: + return True, set_secscan_status(layer, False, self._target_version) else: - logger.warning('Got non-201 when analyzing layer ID %s: %s', request['ID'], jsonResponse) - return False + return False, True # Verify that the version matches. - api_version = jsonResponse['Version'] + api_version = json_response['Layer']['IndexedByVersion'] if api_version < self._target_version: - logger.warning('An engine runs on version %d but the target version is %d') + logger.warning('An engine runs on version %d but the target version is %d', api_version, + self._target_version) # Mark the image as analyzed. - logger.debug('Layer %s analyzed successfully', image.id) - set_secscan_status(image, True, api_version) + logger.info('Analyzed layer %s successfully', request['Layer']['Name']) + set_status = set_secscan_status(layer, True, api_version) - return True + # If we are the one who've done the job successfully first, get the vulnerabilities and + # send notifications to the repos that have a tag on that layer. + # TODO(josephschorr): Adapt this depending on the new notification format we adopt. + # if set_status: + # # Get the tags of the layer we analyzed. + # repository_map = defaultdict(list) + # event = ExternalNotificationEvent.get(name='vulnerability_found') + # matching = list(filter_tags_have_repository_event(get_tags_for_image(layer.id), event)) + # + # for tag in matching: + # repository_map[tag.repository_id].append(tag) + # + # # If there is at least one tag, + # # Lookup the vulnerabilities for the image, now that it is analyzed. + # if len(repository_map) > 0: + # logger.debug('Loading vulnerabilities for layer %s', layer.id) + # sec_data = self._get_vulnerabilities(layer) + # + # if sec_data is not None: + # # Dispatch events for any detected vulnerabilities + # logger.debug('Got vulnerabilities for layer %s: %s', layer.id, sec_data) + # + # for repository_id in repository_map: + # tags = repository_map[repository_id] + # + # for vuln in sec_data['Vulnerabilities']: + # event_data = { + # 'tags': [tag.name for tag in tags], + # 'vulnerability': { + # 'id': vuln['Name'], + # 'description': vuln['Description'], + # 'link': vuln['Link'], + # 'priority': vuln['Priority'], + # }, + # } + # + # spawn_notification(tags[0].repository, 'vulnerability_found', event_data) - def _get_vulnerabilities(self, image): + return True, set_status + + def _get_vulnerabilities(self, layer): """ Returns the vulnerabilities detected (if any) or None on error. """ try: - response = secscan_api.call('layers/%s/vulnerabilities', None, - '%s.%s' % (image.docker_image_id, image.storage.uuid)) + response = secscan_api.call(self._api + API_METHOD_GET_WITH_VULNERABILITIES, None, + '%s.%s' % (layer.docker_image_id, layer.storage.uuid)) + logger.debug('Got response %s for vulnerabilities for layer %s', - response.status_code, image.id) + response.status_code, layer.id) + if response.status_code == 404: return None except (requests.exceptions.RequestException, ValueError): - logger.exception('Failed to get vulnerability response for %s', image.id) + logger.exception('Failed to get vulnerability response for %s', layer.id) return None return response.json() - def _index_images(self): - logger.debug('Started indexing') - event = ExternalNotificationEvent.get(name='vulnerability_found') - - while True: - # Lookup the images to index. - images = [] - logger.debug('Looking up images to index') - images = get_secscan_candidates(self._target_version, BATCH_SIZE) - - if not images: - logger.debug('No more images left to analyze') - return - - logger.debug('Found %d images to index', len(images)) - for image in images: - # If we couldn't analyze the parent, we can't analyze this image. - if (image.parent and not image.parent.security_indexed and - image.parent.security_indexed_engine >= self._target_version): - set_secscan_status(image, False, self._target_version) - continue - - # Analyze the image. - analyzed = self._analyze_image(image) - if not analyzed: - continue - - # Get the tags of the image we analyzed - matching = list(filter_tags_have_repository_event(get_tags_for_image(image.id), event)) - - repository_map = defaultdict(list) - - for tag in matching: - repository_map[tag.repository_id].append(tag) - - # If there is at least one tag, - # Lookup the vulnerabilities for the image, now that it is analyzed. - if len(repository_map) > 0: - logger.debug('Loading vulnerabilities for layer %s', image.id) - sec_data = self._get_vulnerabilities(image) - - if sec_data is None: - continue - - if not sec_data.get('Vulnerabilities'): - continue - - # Dispatch events for any detected vulnerabilities - logger.debug('Got vulnerabilities for layer %s: %s', image.id, sec_data) - - for repository_id in repository_map: - tags = repository_map[repository_id] - - for vuln in sec_data['Vulnerabilities']: - event_data = { - 'tags': [tag.name for tag in tags], - 'vulnerability': { - 'id': vuln['ID'], - 'description': vuln['Description'], - 'link': vuln['Link'], - 'priority': vuln['Priority'], - }, - } - - spawn_notification(tags[0].repository, 'vulnerability_found', event_data) if __name__ == '__main__': if not features.SECURITY_SCANNER: