Implement against new Clair paginated notification system
This commit is contained in:
parent
b34314a584
commit
f498e92d58
10 changed files with 447 additions and 101 deletions
|
@ -1,80 +1,45 @@
|
|||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
from collections import defaultdict
|
||||
import json
|
||||
|
||||
import features
|
||||
|
||||
from app import secscan_notification_queue, secscan_api
|
||||
from data import model
|
||||
from data.model.tag import filter_tags_have_repository_event, get_matching_tags
|
||||
from data.database import (Image, ImageStorage, ExternalNotificationEvent,
|
||||
Repository, RepositoryNotification, RepositoryTag)
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
from workers.queueworker import QueueWorker
|
||||
|
||||
from workers.queueworker import QueueWorker, JobException
|
||||
from util.secscan.notifier import process_notification_data
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_EXTENDED_SECONDS = 600
|
||||
|
||||
class SecurityNotificationWorker(QueueWorker):
|
||||
def process_queue_item(self, data):
|
||||
cve_id = data['Name']
|
||||
vulnerability = data['Content']['Vulnerability']
|
||||
priority = vulnerability['Priority']
|
||||
notification_name = data['Name']
|
||||
current_page = data.get('page', None)
|
||||
|
||||
# Lookup the external event for when we have vulnerabilities.
|
||||
event = ExternalNotificationEvent.get(name='vulnerability_found')
|
||||
while True:
|
||||
(response_data, should_retry) = secscan_api.get_notification(notification_name)
|
||||
if response_data is None:
|
||||
if should_retry:
|
||||
raise JobException()
|
||||
else:
|
||||
# Return to mark the job as "complete", as we'll never be able to finish it.
|
||||
logger.error('Failed to handle security notification %s', notification_name)
|
||||
return
|
||||
|
||||
# For each layer, retrieving the matching tags and join with repository to determine which
|
||||
# require new notifications.
|
||||
tag_map = defaultdict(set)
|
||||
repository_map = {}
|
||||
notification_data = response_data['Notification']
|
||||
if not process_notification_data(notification_data):
|
||||
raise JobException()
|
||||
|
||||
# Find all tags that contain the layer(s) introducing the vulnerability,
|
||||
# in repositories that have the event setup.
|
||||
content = data['Content']
|
||||
layer_ids = content.get('NewIntroducingLayersIDs', content.get('IntroducingLayersIDs', []))
|
||||
for layer_id in layer_ids:
|
||||
(docker_image_id, storage_uuid) = layer_id.split('.', 2)
|
||||
# Check for a next page of results. If none, we're done.
|
||||
if 'NextPage' not in notification_data:
|
||||
return
|
||||
|
||||
matching = get_matching_tags(docker_image_id, storage_uuid, RepositoryTag, Repository,
|
||||
Image, ImageStorage)
|
||||
tags = list(filter_tags_have_repository_event(matching, event))
|
||||
|
||||
check_map = {}
|
||||
for tag in tags:
|
||||
# Verify that the tag's root image has the vulnerability.
|
||||
tag_layer_id = '%s.%s' % (tag.image.docker_image_id, tag.image.storage.uuid)
|
||||
logger.debug('Checking if layer %s is vulnerable to %s', tag_layer_id, cve_id)
|
||||
|
||||
if not tag_layer_id in check_map:
|
||||
is_vulerable = secscan_api.check_layer_vulnerable(tag_layer_id, cve_id)
|
||||
check_map[tag_layer_id] = is_vulerable
|
||||
|
||||
logger.debug('Result of layer %s is vulnerable to %s check: %s', tag_layer_id, cve_id,
|
||||
check_map[tag_layer_id])
|
||||
|
||||
if check_map[tag_layer_id]:
|
||||
# Add the vulnerable tag to the list.
|
||||
tag_map[tag.repository_id].add(tag.name)
|
||||
repository_map[tag.repository_id] = tag.repository
|
||||
|
||||
# For each of the tags found, issue a notification.
|
||||
for repository_id in tag_map:
|
||||
tags = tag_map[repository_id]
|
||||
event_data = {
|
||||
'tags': list(tags),
|
||||
'vulnerability': {
|
||||
'id': data['Name'],
|
||||
'description': vulnerability['Description'],
|
||||
'link': vulnerability['Link'],
|
||||
'priority': priority,
|
||||
},
|
||||
}
|
||||
|
||||
spawn_notification(repository_map[repository_id], 'vulnerability_found', event_data)
|
||||
# Otherwise, save the next page token into the queue item (so we can pick up from here if
|
||||
# something goes wrong in the next loop iteration), and continue.
|
||||
current_page = notification_data['NextPage']
|
||||
data['page'] = current_page
|
||||
self.extend_processing(_EXTENDED_SECONDS, json.dumps(data))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
Reference in a new issue