Implement against new Clair paginated notification system
This commit is contained in:
parent
b34314a584
commit
f498e92d58
10 changed files with 447 additions and 101 deletions
|
@ -3,7 +3,7 @@
|
|||
PRIORITY_LEVELS = {
|
||||
'Unknown': {
|
||||
'title': 'Unknown',
|
||||
'index': '6',
|
||||
'index': 6,
|
||||
'level': 'info',
|
||||
|
||||
'description': 'Unknown is either a security problem that has not been assigned ' +
|
||||
|
@ -13,7 +13,7 @@ PRIORITY_LEVELS = {
|
|||
|
||||
'Negligible': {
|
||||
'title': 'Negligible',
|
||||
'index': '5',
|
||||
'index': 5,
|
||||
'level': 'info',
|
||||
|
||||
'description': 'Negligible is technically a security problem, but is only theoretical ' +
|
||||
|
@ -24,7 +24,7 @@ PRIORITY_LEVELS = {
|
|||
|
||||
'Low': {
|
||||
'title': 'Low',
|
||||
'index': '4',
|
||||
'index': 4,
|
||||
'level': 'warning',
|
||||
|
||||
'description': 'Low is a security problem, but is hard to exploit due to environment, ' +
|
||||
|
@ -36,7 +36,7 @@ PRIORITY_LEVELS = {
|
|||
'Medium': {
|
||||
'title': 'Medium',
|
||||
'value': 'Medium',
|
||||
'index': '3',
|
||||
'index': 3,
|
||||
'level': 'warning',
|
||||
|
||||
'description': 'Medium is a real security problem, and is exploitable for many people. ' +
|
||||
|
@ -48,7 +48,7 @@ PRIORITY_LEVELS = {
|
|||
'High': {
|
||||
'title': 'High',
|
||||
'value': 'High',
|
||||
'index': '2',
|
||||
'index': 2,
|
||||
'level': 'warning',
|
||||
|
||||
'description': 'High is a real problem, exploitable for many people in a default installation. ' +
|
||||
|
@ -60,7 +60,7 @@ PRIORITY_LEVELS = {
|
|||
'Critical': {
|
||||
'title': 'Critical',
|
||||
'value': 'Critical',
|
||||
'index': '1',
|
||||
'index': 1,
|
||||
'level': 'error',
|
||||
|
||||
'description': 'Critical is a world-burning problem, exploitable for nearly all people in ' +
|
||||
|
@ -72,7 +72,7 @@ PRIORITY_LEVELS = {
|
|||
'Defcon1': {
|
||||
'title': 'Defcon 1',
|
||||
'value': 'Defcon1',
|
||||
'index': '0',
|
||||
'index': 0,
|
||||
'level': 'error',
|
||||
|
||||
'description': 'Defcon1 is a Critical problem which has been manually highlighted ' +
|
||||
|
|
|
@ -19,8 +19,8 @@ class APIRequestFailure(Exception):
|
|||
|
||||
_API_METHOD_INSERT = 'layers'
|
||||
_API_METHOD_GET_LAYER = 'layers/%s'
|
||||
_API_METHOD_GET_WITH_VULNERABILITIES_FLAG = '?vulnerabilities'
|
||||
_API_METHOD_GET_WITH_FEATURES_FLAG = '?features'
|
||||
_API_METHOD_MARK_NOTIFICATION_READ = 'notifications/%s'
|
||||
_API_METHOD_GET_NOTIFICATION = 'notifications/%s'
|
||||
|
||||
|
||||
class SecurityScannerAPI(object):
|
||||
|
@ -113,7 +113,7 @@ class SecurityScannerAPI(object):
|
|||
|
||||
logger.info('Analyzing layer %s', request['Layer']['Name'])
|
||||
try:
|
||||
response = self._call(_API_METHOD_INSERT, request)
|
||||
response = self._call('POST', _API_METHOD_INSERT, request)
|
||||
json_response = response.json()
|
||||
except requests.exceptions.Timeout:
|
||||
logger.exception('Timeout when trying to post layer data response for %s', layer.id)
|
||||
|
@ -146,35 +146,94 @@ class SecurityScannerAPI(object):
|
|||
return api_version, False
|
||||
|
||||
|
||||
def check_layer_vulnerable(self, layer_id, cve_name):
|
||||
""" Checks to see if the layer with the given ID is vulnerable to the specified CVE. """
|
||||
layer_data = self._get_layer_data(layer_id, include_vulnerabilities=True)
|
||||
if layer_data is None or 'Layer' not in layer_data or 'Features' not in layer_data['Layer']:
|
||||
return False
|
||||
|
||||
for feature in layer_data['Layer']['Features']:
|
||||
for vuln in feature.get('Vulnerabilities', []):
|
||||
if vuln['Name'] == cve_name:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_notification(self, notification_name, layer_limit=10, page=None):
|
||||
""" Gets the data for a specific notification, with optional page token.
|
||||
Returns a tuple of the data (None on failure) and whether to retry.
|
||||
"""
|
||||
try:
|
||||
params = {
|
||||
'limit': layer_limit
|
||||
}
|
||||
|
||||
if page is not None:
|
||||
params['page'] = page
|
||||
|
||||
response = self._call('GET', _API_METHOD_GET_NOTIFICATION % notification_name, params=params)
|
||||
json_response = response.json()
|
||||
except requests.exceptions.Timeout:
|
||||
logger.exception('Timeout when trying to get notification for %s', notification_name)
|
||||
return None, True
|
||||
except requests.exceptions.ConnectionError:
|
||||
logger.exception('Connection error when trying to get notification for %s', notification_name)
|
||||
return None, True
|
||||
except (requests.exceptions.RequestException, ValueError):
|
||||
logger.exception('Failed to get notification for %s', notification_name)
|
||||
return None, False
|
||||
|
||||
if response.status_code != 200:
|
||||
return None, response.status_code != 404 and response.status_code != 400
|
||||
|
||||
return json_response, False
|
||||
|
||||
|
||||
def mark_notification_read(self, notification_name):
|
||||
""" Marks a security scanner notification as read. """
|
||||
try:
|
||||
response = self._call('DELETE', _API_METHOD_MARK_NOTIFICATION_READ % notification_name)
|
||||
return response.status_code == 200
|
||||
except requests.exceptions.RequestException:
|
||||
logger.exception('Failed to mark notification as read: %s', notification_name)
|
||||
return False
|
||||
|
||||
|
||||
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
|
||||
""" Returns the layer data for the specified layer. On error, returns None. """
|
||||
layer_id = '%s.%s' % (layer.docker_image_id, layer.storage.uuid)
|
||||
return self._get_layer_data(layer_id, include_features, include_vulnerabilities)
|
||||
|
||||
|
||||
def _get_layer_data(self, layer_id, include_features=False, include_vulnerabilities=False):
|
||||
try:
|
||||
flag = ''
|
||||
params = {}
|
||||
if include_features:
|
||||
flag = _API_METHOD_GET_WITH_FEATURES_FLAG
|
||||
params = {'features': True}
|
||||
|
||||
if include_vulnerabilities:
|
||||
flag = _API_METHOD_GET_WITH_VULNERABILITIES_FLAG
|
||||
params = {'vulnerabilities': True}
|
||||
|
||||
response = self._call(_API_METHOD_GET_LAYER + flag, None, layer_id)
|
||||
response = self._call('GET', _API_METHOD_GET_LAYER % layer_id, params=params)
|
||||
logger.debug('Got response %s for vulnerabilities for layer %s',
|
||||
response.status_code, layer_id)
|
||||
json_response = response.json()
|
||||
except requests.exceptions.Timeout:
|
||||
raise APIRequestFailure('API call timed out')
|
||||
except requests.exceptions.ConnectionError:
|
||||
raise APIRequestFailure('Could not connect to security service')
|
||||
except (requests.exceptions.RequestException, ValueError):
|
||||
logger.exception('Failed to get layer data response for %s', layer.id)
|
||||
logger.exception('Failed to get layer data response for %s', layer_id)
|
||||
raise APIRequestFailure()
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
|
||||
return response.json()
|
||||
return json_response
|
||||
|
||||
|
||||
def _call(self, relative_url, body=None, *args, **kwargs):
|
||||
def _call(self, method, relative_url, params=None, body=None):
|
||||
""" Issues an HTTP call to the sec API at the given relative URL.
|
||||
This function disconnects from the database while awaiting a response
|
||||
from the API server.
|
||||
|
@ -184,18 +243,21 @@ class SecurityScannerAPI(object):
|
|||
raise Exception('Cannot call unconfigured security system')
|
||||
|
||||
api_url = urljoin(security_config['ENDPOINT'], '/' + security_config['API_VERSION']) + '/'
|
||||
url = urljoin(api_url, relative_url % args)
|
||||
url = urljoin(api_url, relative_url)
|
||||
|
||||
client = self.config['HTTPCLIENT']
|
||||
timeout = security_config.get('API_TIMEOUT_SECONDS', 1)
|
||||
logger.debug('Looking up sec information: %s', url)
|
||||
|
||||
with CloseForLongOperation(self.config):
|
||||
if body is not None:
|
||||
if method == 'POST':
|
||||
logger.debug('POSTing security URL %s', url)
|
||||
return client.post(url, json=body, params=kwargs, timeout=timeout, cert=self._keys,
|
||||
return client.post(url, json=body, params=params, timeout=timeout, cert=self._keys,
|
||||
verify=self._certificate)
|
||||
elif method == 'DELETE':
|
||||
logger.debug('DELETEing security URL %s', url)
|
||||
return client.delete(url, params=params, timeout=timeout, cert=self._keys,
|
||||
verify=self._certificate)
|
||||
else:
|
||||
logger.debug('GETing security URL %s', url)
|
||||
return client.get(url, params=kwargs, timeout=timeout, cert=self._keys,
|
||||
return client.get(url, params=params, timeout=timeout, cert=self._keys,
|
||||
verify=self._certificate)
|
||||
|
|
103
util/secscan/notifier.py
Normal file
103
util/secscan/notifier.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
import logging
|
||||
import sys
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from app import secscan_api
|
||||
from data.model.tag import filter_tags_have_repository_event, get_matching_tags
|
||||
from data.database import (Image, ImageStorage, ExternalNotificationEvent, Repository,
|
||||
RepositoryTag)
|
||||
from endpoints.notificationhelper import spawn_notification
|
||||
from util.secscan import PRIORITY_LEVELS
|
||||
from util.secscan.api import APIRequestFailure
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def process_notification_data(notification_data):
|
||||
""" Processes the given notification data to spawn vulnerability notifications as necessary.
|
||||
Returns whether the processing succeeded.
|
||||
"""
|
||||
new_data = notification_data['New']
|
||||
old_data = notification_data.get('Old', {})
|
||||
|
||||
new_vuln = new_data['Vulnerability']
|
||||
old_vuln = old_data.get('Vulnerability', {})
|
||||
|
||||
new_layer_ids = set(new_data.get('LayersIntroducingVulnerability', []))
|
||||
old_layer_ids = set(old_data.get('LayersIntroducingVulnerability', []))
|
||||
|
||||
new_severity = PRIORITY_LEVELS.get(new_vuln.get('Severity', 'Unknown'), {'index': sys.maxint})
|
||||
old_severity = PRIORITY_LEVELS.get(old_vuln.get('Severity', 'Unknown'), {'index': sys.maxint})
|
||||
|
||||
# By default we only notify the new layers that are affected by the vulnerability. If, however,
|
||||
# the severity of the vulnerability has increased, we need to notify *all* layers, as we might
|
||||
# need to send new notifications for older layers.
|
||||
notify_layers = new_layer_ids - old_layer_ids
|
||||
if new_severity['index'] < old_severity['index']:
|
||||
notify_layers = new_layer_ids | old_layer_ids
|
||||
|
||||
if not notify_layers:
|
||||
# Nothing more to do.
|
||||
return True
|
||||
|
||||
# Lookup the external event for when we have vulnerabilities.
|
||||
event = ExternalNotificationEvent.get(name='vulnerability_found')
|
||||
|
||||
# For each layer, retrieving the matching tags and join with repository to determine which
|
||||
# require new notifications.
|
||||
tag_map = defaultdict(set)
|
||||
repository_map = {}
|
||||
cve_id = new_vuln['Name']
|
||||
|
||||
# Find all tags that contain the layer(s) introducing the vulnerability,
|
||||
# in repositories that have the event setup.
|
||||
for layer_id in notify_layers:
|
||||
# Split the layer ID into its Docker Image ID and storage ID.
|
||||
(docker_image_id, storage_uuid) = layer_id.split('.', 2)
|
||||
|
||||
# Find the matching tags.
|
||||
matching = get_matching_tags(docker_image_id, storage_uuid, RepositoryTag, Repository,
|
||||
Image, ImageStorage)
|
||||
tags = list(filter_tags_have_repository_event(matching, event))
|
||||
|
||||
check_map = {}
|
||||
for tag in tags:
|
||||
# Verify that the tag's root image has the vulnerability.
|
||||
tag_layer_id = '%s.%s' % (tag.image.docker_image_id, tag.image.storage.uuid)
|
||||
logger.debug('Checking if layer %s is vulnerable to %s', tag_layer_id, cve_id)
|
||||
|
||||
if not tag_layer_id in check_map:
|
||||
try:
|
||||
is_vulerable = secscan_api.check_layer_vulnerable(tag_layer_id, cve_id)
|
||||
except APIRequestFailure:
|
||||
return False
|
||||
|
||||
check_map[tag_layer_id] = is_vulerable
|
||||
|
||||
logger.debug('Result of layer %s is vulnerable to %s check: %s', tag_layer_id, cve_id,
|
||||
check_map[tag_layer_id])
|
||||
|
||||
if check_map[tag_layer_id]:
|
||||
# Add the vulnerable tag to the list.
|
||||
tag_map[tag.repository_id].add(tag.name)
|
||||
repository_map[tag.repository_id] = tag.repository
|
||||
|
||||
# For each of the tags found, issue a notification.
|
||||
for repository_id in tag_map:
|
||||
tags = tag_map[repository_id]
|
||||
event_data = {
|
||||
'tags': list(tags),
|
||||
'vulnerability': {
|
||||
'id': cve_id,
|
||||
'description': new_vuln.get('Description', None),
|
||||
'link': new_vuln.get('Link', None),
|
||||
'priority': new_severity['title'],
|
||||
'has_fix': 'FixedIn' in new_vuln,
|
||||
},
|
||||
}
|
||||
|
||||
spawn_notification(repository_map[repository_id], 'vulnerability_found', event_data)
|
||||
|
||||
return True
|
||||
|
Reference in a new issue