2017-07-11 18:12:57 +00:00
|
|
|
import os
|
2015-10-26 19:13:58 +00:00
|
|
|
import logging
|
2016-12-22 19:55:26 +00:00
|
|
|
|
2017-04-19 17:51:13 +00:00
|
|
|
from abc import ABCMeta, abstractmethod
|
|
|
|
from six import add_metaclass
|
2016-12-22 19:55:26 +00:00
|
|
|
from urlparse import urljoin
|
|
|
|
|
2015-10-26 19:13:58 +00:00
|
|
|
import requests
|
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
from data import model
|
2018-12-04 16:56:06 +00:00
|
|
|
from data.database import CloseForLongOperation, TagManifest, Image, Manifest, ManifestLegacyImage
|
2016-02-24 21:01:27 +00:00
|
|
|
from data.model.storage import get_storage_locations
|
2018-12-04 16:56:06 +00:00
|
|
|
from data.registry_model.datatypes import Manifest as ManifestDataType, LegacyImage
|
2017-04-19 17:51:13 +00:00
|
|
|
from util.abchelpers import nooper
|
2017-01-23 19:36:19 +00:00
|
|
|
from util.failover import failover, FailoverException
|
2016-02-24 21:01:27 +00:00
|
|
|
from util.secscan.validator import SecurityConfigValidator
|
2016-05-31 20:48:19 +00:00
|
|
|
from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
|
2016-05-04 21:40:09 +00:00
|
|
|
|
2017-02-01 23:17:25 +00:00
|
|
|
from _init import CONF_DIR
|
2016-05-04 21:40:09 +00:00
|
|
|
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the security scanner has to call the layer URL
|
|
|
|
|
2016-12-15 21:27:24 +00:00
|
|
|
UNKNOWN_PARENT_LAYER_ERROR_MSG = 'worker: parent layer is unknown, it must be processed first'
|
2015-10-26 19:13:58 +00:00
|
|
|
|
2017-07-11 18:12:57 +00:00
|
|
|
MITM_CERT_PATH = os.path.join(CONF_DIR, 'mitm.cert')
|
2017-02-01 23:17:25 +00:00
|
|
|
|
2017-01-23 19:36:19 +00:00
|
|
|
DEFAULT_HTTP_HEADERS = {'Connection': 'close'}
|
|
|
|
|
2015-10-26 19:13:58 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2016-05-04 21:40:09 +00:00
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
class AnalyzeLayerException(Exception):
|
2016-12-15 21:27:24 +00:00
|
|
|
""" Exception raised when a layer fails to analyze due to a request issue. """
|
|
|
|
|
|
|
|
class AnalyzeLayerRetryException(Exception):
|
|
|
|
""" Exception raised when a layer fails to analyze due to a request issue, and the request should
|
|
|
|
be retried.
|
|
|
|
"""
|
|
|
|
|
|
|
|
class MissingParentLayerException(AnalyzeLayerException):
|
|
|
|
""" Exception raised when the parent of the layer is missing from the security scanner. """
|
|
|
|
|
|
|
|
class InvalidLayerException(AnalyzeLayerException):
|
|
|
|
""" Exception raised when the layer itself cannot be handled by the security scanner. """
|
2015-10-26 19:13:58 +00:00
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
class APIRequestFailure(Exception):
|
|
|
|
""" Exception raised when there is a failure to conduct an API request. """
|
2015-10-26 19:13:58 +00:00
|
|
|
|
2017-03-01 03:12:44 +00:00
|
|
|
class Non200ResponseException(Exception):
|
|
|
|
""" Exception raised when the upstream API returns a non-200 HTTP status code. """
|
|
|
|
def __init__(self, response):
|
|
|
|
super(Non200ResponseException, self).__init__()
|
|
|
|
self.response = response
|
|
|
|
|
2015-10-26 19:13:58 +00:00
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
_API_METHOD_INSERT = 'layers'
|
|
|
|
_API_METHOD_GET_LAYER = 'layers/%s'
|
2016-12-22 19:55:26 +00:00
|
|
|
_API_METHOD_DELETE_LAYER = 'layers/%s'
|
2016-02-25 20:58:42 +00:00
|
|
|
_API_METHOD_MARK_NOTIFICATION_READ = 'notifications/%s'
|
|
|
|
_API_METHOD_GET_NOTIFICATION = 'notifications/%s'
|
2016-05-02 19:29:31 +00:00
|
|
|
_API_METHOD_PING = 'metrics'
|
2015-10-26 19:13:58 +00:00
|
|
|
|
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
def compute_layer_id(layer):
|
|
|
|
""" Returns the ID for the layer in the security scanner. """
|
2018-08-23 20:36:04 +00:00
|
|
|
# NOTE: this is temporary until we switch to Clair V3.
|
2018-12-04 16:56:06 +00:00
|
|
|
if isinstance(layer, ManifestDataType):
|
|
|
|
if layer._is_tag_manifest:
|
|
|
|
layer = TagManifest.get(id=layer._db_id).tag.image
|
|
|
|
else:
|
|
|
|
manifest = Manifest.get(id=layer._db_id)
|
|
|
|
try:
|
|
|
|
layer = ManifestLegacyImage.get(manifest=manifest).image
|
|
|
|
except ManifestLegacyImage.DoesNotExist:
|
|
|
|
return None
|
2018-08-23 20:36:04 +00:00
|
|
|
elif isinstance(layer, LegacyImage):
|
|
|
|
layer = Image.get(id=layer._db_id)
|
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
return '%s.%s' % (layer.docker_image_id, layer.storage.uuid)
|
|
|
|
|
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
class SecurityScannerAPI(object):
|
2017-04-19 17:51:13 +00:00
|
|
|
""" Helper class for talking to the Security Scan service (usually Clair). """
|
2018-05-29 17:50:51 +00:00
|
|
|
def __init__(self, config, storage, server_hostname=None, client=None, skip_validation=False, uri_creator=None, instance_keys=None):
|
2017-04-19 17:51:13 +00:00
|
|
|
feature_enabled = config.get('FEATURE_SECURITY_SCANNER', False)
|
|
|
|
has_valid_config = skip_validation
|
|
|
|
|
|
|
|
if not skip_validation and feature_enabled:
|
2018-05-29 17:50:51 +00:00
|
|
|
config_validator = SecurityConfigValidator(feature_enabled, config.get('SECURITY_SCANNER_ENDPOINT'))
|
2017-04-19 17:51:13 +00:00
|
|
|
has_valid_config = config_validator.valid()
|
|
|
|
|
|
|
|
if feature_enabled and has_valid_config:
|
2018-05-29 17:50:51 +00:00
|
|
|
self.state = ImplementedSecurityScannerAPI(config, storage, server_hostname, client=client, uri_creator=uri_creator, instance_keys=instance_keys)
|
2017-04-19 17:51:13 +00:00
|
|
|
else:
|
|
|
|
self.state = NoopSecurityScannerAPI()
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
return getattr(self.state, name, None)
|
|
|
|
|
2016-05-02 19:29:31 +00:00
|
|
|
|
2017-04-19 17:51:13 +00:00
|
|
|
@add_metaclass(ABCMeta)
|
|
|
|
class SecurityScannerAPIInterface(object):
|
|
|
|
""" Helper class for talking to the Security Scan service (usually Clair). """
|
|
|
|
@abstractmethod
|
|
|
|
def cleanup_layers(self, layers):
|
|
|
|
""" Callback invoked by garbage collection to cleanup any layers that no longer
|
|
|
|
need to be stored in the security scanner.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def ping(self):
|
|
|
|
""" Calls GET on the metrics endpoint of the security scanner to ensure it is running
|
|
|
|
and properly configured. Returns the HTTP response.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def delete_layer(self, layer):
|
|
|
|
""" Calls DELETE on the given layer in the security scanner, removing it from
|
|
|
|
its database.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def analyze_layer(self, layer):
|
|
|
|
""" Posts the given layer to the security scanner for analysis, blocking until complete.
|
|
|
|
Returns the analysis version on success or raises an exception deriving from
|
|
|
|
AnalyzeLayerException on failure. Callers should handle all cases of AnalyzeLayerException.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def check_layer_vulnerable(self, layer_id, cve_name):
|
|
|
|
""" Checks to see if the layer with the given ID is vulnerable to the specified CVE. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def get_notification(self, notification_name, layer_limit=100, page=None):
|
|
|
|
""" Gets the data for a specific notification, with optional page token.
|
|
|
|
Returns a tuple of the data (None on failure) and whether to retry.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def mark_notification_read(self, notification_name):
|
|
|
|
""" Marks a security scanner notification as read. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
|
|
|
|
""" Returns the layer data for the specified layer. On error, returns None. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
@nooper
|
|
|
|
class NoopSecurityScannerAPI(SecurityScannerAPIInterface):
|
|
|
|
""" No-op version of the security scanner API. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface):
|
|
|
|
""" Helper class for talking to the Security Scan service (Clair). """
|
2018-05-29 17:50:51 +00:00
|
|
|
# TODO(sam) refactor this to not take an app config, and instead just the things it needs as a config object
|
|
|
|
def __init__(self, config, storage, server_hostname, client=None, uri_creator=None, instance_keys=None):
|
2016-05-02 19:29:31 +00:00
|
|
|
self._config = config
|
2018-05-29 17:50:51 +00:00
|
|
|
self._instance_keys = instance_keys
|
|
|
|
self._client = client
|
2016-02-24 21:01:27 +00:00
|
|
|
self._storage = storage
|
2018-05-29 17:50:51 +00:00
|
|
|
self._server_hostname = server_hostname
|
2016-02-24 21:01:27 +00:00
|
|
|
self._default_storage_locations = config['DISTRIBUTED_STORAGE_PREFERENCE']
|
2016-05-02 19:29:31 +00:00
|
|
|
self._target_version = config.get('SECURITY_SCANNER_ENGINE_VERSION_TARGET', 2)
|
2018-05-29 17:50:51 +00:00
|
|
|
self._uri_creator = uri_creator
|
2015-11-12 22:47:19 +00:00
|
|
|
|
2016-05-04 21:40:09 +00:00
|
|
|
def _get_image_url_and_auth(self, image):
|
|
|
|
""" Returns a tuple of the url and the auth header value that must be used
|
|
|
|
to fetch the layer data itself. If the image can't be addressed, we return
|
|
|
|
None.
|
2016-02-24 21:01:27 +00:00
|
|
|
"""
|
2018-05-29 17:50:51 +00:00
|
|
|
if self._instance_keys is None:
|
|
|
|
raise Exception('No Instance keys provided to Security Scanner API')
|
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
path = model.storage.get_layer_path(image.storage)
|
|
|
|
locations = self._default_storage_locations
|
|
|
|
|
|
|
|
if not self._storage.exists(locations, path):
|
|
|
|
locations = get_storage_locations(image.storage.uuid)
|
|
|
|
if not locations or not self._storage.exists(locations, path):
|
2016-12-22 19:55:26 +00:00
|
|
|
logger.warning('Could not find a valid location to download layer %s out of %s',
|
|
|
|
compute_layer_id(image), locations)
|
2016-05-04 21:40:09 +00:00
|
|
|
return None, None
|
2016-02-24 21:01:27 +00:00
|
|
|
|
|
|
|
uri = self._storage.get_direct_download_url(locations, path)
|
2016-05-04 21:40:09 +00:00
|
|
|
auth_header = None
|
2016-02-24 21:01:27 +00:00
|
|
|
if uri is None:
|
2016-05-04 21:40:09 +00:00
|
|
|
# Use the registry API instead, with a signed JWT giving access
|
|
|
|
repo_name = image.repository.name
|
|
|
|
namespace_name = image.repository.namespace_user.username
|
|
|
|
repository_and_namespace = '/'.join([namespace_name, repo_name])
|
|
|
|
|
|
|
|
# Generate the JWT which will authorize this
|
2018-05-29 17:50:51 +00:00
|
|
|
audience = self._server_hostname
|
2017-03-22 17:19:22 +00:00
|
|
|
context, subject = build_context_and_subject()
|
2016-05-04 21:40:09 +00:00
|
|
|
access = [{
|
|
|
|
'type': 'repository',
|
|
|
|
'name': repository_and_namespace,
|
|
|
|
'actions': ['pull'],
|
|
|
|
}]
|
2016-05-31 20:48:19 +00:00
|
|
|
|
|
|
|
auth_token = generate_bearer_token(audience, subject, context, access,
|
|
|
|
TOKEN_VALIDITY_LIFETIME_S, self._instance_keys)
|
|
|
|
auth_header = 'Bearer ' + auth_token
|
2016-05-04 21:40:09 +00:00
|
|
|
|
2018-05-29 17:50:51 +00:00
|
|
|
uri = self._uri_creator(repository_and_namespace, image.storage.content_checksum)
|
2016-05-04 21:40:09 +00:00
|
|
|
|
|
|
|
return uri, auth_header
|
2015-11-12 20:46:31 +00:00
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
def _new_analyze_request(self, layer):
|
|
|
|
""" Create the request body to submit the given layer for analysis. If the layer's URL cannot
|
2016-02-24 21:01:27 +00:00
|
|
|
be found, returns None.
|
|
|
|
"""
|
2018-12-04 16:56:06 +00:00
|
|
|
layer_id = compute_layer_id(layer)
|
|
|
|
if layer_id is None:
|
|
|
|
return None
|
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
url, auth_header = self._get_image_url_and_auth(layer)
|
2016-02-24 21:01:27 +00:00
|
|
|
if url is None:
|
|
|
|
return None
|
|
|
|
|
2016-05-04 21:40:09 +00:00
|
|
|
layer_request = {
|
2018-12-04 16:56:06 +00:00
|
|
|
'Name': layer_id,
|
2016-05-04 21:40:09 +00:00
|
|
|
'Path': url,
|
|
|
|
'Format': 'Docker',
|
2016-02-24 21:01:27 +00:00
|
|
|
}
|
2015-11-12 20:46:31 +00:00
|
|
|
|
2016-05-04 21:40:09 +00:00
|
|
|
if auth_header is not None:
|
2016-05-05 17:28:06 +00:00
|
|
|
layer_request['Headers'] = {
|
|
|
|
'Authorization': auth_header,
|
|
|
|
}
|
2016-05-04 21:40:09 +00:00
|
|
|
|
2018-03-09 19:19:20 +00:00
|
|
|
if layer.parent is not None:
|
|
|
|
if layer.parent.docker_image_id and layer.parent.storage.uuid:
|
|
|
|
layer_request['ParentName'] = compute_layer_id(layer.parent)
|
2015-11-12 20:46:31 +00:00
|
|
|
|
2016-05-04 21:40:09 +00:00
|
|
|
return {
|
|
|
|
'Layer': layer_request,
|
|
|
|
}
|
2015-11-12 20:46:31 +00:00
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
def cleanup_layers(self, layers):
|
|
|
|
""" Callback invoked by garbage collection to cleanup any layers that no longer
|
|
|
|
need to be stored in the security scanner.
|
|
|
|
"""
|
|
|
|
for layer in layers:
|
|
|
|
self.delete_layer(layer)
|
2015-11-12 20:46:31 +00:00
|
|
|
|
2016-05-02 19:29:31 +00:00
|
|
|
def ping(self):
|
|
|
|
""" Calls GET on the metrics endpoint of the security scanner to ensure it is running
|
|
|
|
and properly configured. Returns the HTTP response.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return self._call('GET', _API_METHOD_PING)
|
2017-03-24 21:28:16 +00:00
|
|
|
except requests.exceptions.Timeout as tie:
|
2016-05-02 19:29:31 +00:00
|
|
|
logger.exception('Timeout when trying to connect to security scanner endpoint')
|
2017-03-24 21:28:16 +00:00
|
|
|
msg = 'Timeout when trying to connect to security scanner endpoint: %s' % tie.message
|
|
|
|
raise Exception(msg)
|
|
|
|
except requests.exceptions.ConnectionError as ce:
|
2016-05-02 19:29:31 +00:00
|
|
|
logger.exception('Connection error when trying to connect to security scanner endpoint')
|
2017-03-24 21:28:16 +00:00
|
|
|
msg = 'Connection error when trying to connect to security scanner endpoint: %s' % ce.message
|
|
|
|
raise Exception(msg)
|
|
|
|
except (requests.exceptions.RequestException, ValueError) as ve:
|
2016-05-02 19:29:31 +00:00
|
|
|
logger.exception('Exception when trying to connect to security scanner endpoint')
|
2017-03-24 21:28:16 +00:00
|
|
|
msg = 'Exception when trying to connect to security scanner endpoint: %s' % ve
|
|
|
|
raise Exception(msg)
|
2016-05-02 19:29:31 +00:00
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
def delete_layer(self, layer):
|
|
|
|
""" Calls DELETE on the given layer in the security scanner, removing it from
|
|
|
|
its database.
|
|
|
|
"""
|
|
|
|
layer_id = compute_layer_id(layer)
|
2018-12-04 16:56:06 +00:00
|
|
|
if layer_id is None:
|
|
|
|
return None
|
|
|
|
|
2016-12-22 19:55:26 +00:00
|
|
|
try:
|
2017-03-01 03:12:44 +00:00
|
|
|
self._call('DELETE', _API_METHOD_DELETE_LAYER % layer_id)
|
|
|
|
return True
|
|
|
|
except Non200ResponseException:
|
|
|
|
return False
|
2016-12-22 19:55:26 +00:00
|
|
|
except requests.exceptions.RequestException:
|
|
|
|
logger.exception('Failed to delete layer: %s', layer_id)
|
|
|
|
return False
|
2016-05-02 19:29:31 +00:00
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
def analyze_layer(self, layer):
|
|
|
|
""" Posts the given layer to the security scanner for analysis, blocking until complete.
|
2016-12-15 21:27:24 +00:00
|
|
|
Returns the analysis version on success or raises an exception deriving from
|
|
|
|
AnalyzeLayerException on failure. Callers should handle all cases of AnalyzeLayerException.
|
2016-02-24 21:01:27 +00:00
|
|
|
"""
|
2017-03-01 03:12:44 +00:00
|
|
|
def _response_json(request, response):
|
|
|
|
try:
|
|
|
|
return response.json()
|
|
|
|
except ValueError:
|
|
|
|
logger.exception('Failed to decode JSON when analyzing layer %s', request['Layer']['Name'])
|
|
|
|
raise AnalyzeLayerException
|
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
request = self._new_analyze_request(layer)
|
|
|
|
if not request:
|
2017-04-21 20:52:47 +00:00
|
|
|
logger.error('Could not build analyze request for layer %s', layer.id)
|
2016-12-15 21:27:24 +00:00
|
|
|
raise AnalyzeLayerException
|
2016-02-24 21:01:27 +00:00
|
|
|
|
|
|
|
logger.info('Analyzing layer %s', request['Layer']['Name'])
|
2015-11-09 22:12:22 +00:00
|
|
|
try:
|
2016-03-02 21:05:11 +00:00
|
|
|
response = self._call('POST', _API_METHOD_INSERT, body=request)
|
2016-02-24 21:01:27 +00:00
|
|
|
except requests.exceptions.Timeout:
|
|
|
|
logger.exception('Timeout when trying to post layer data response for %s', layer.id)
|
2016-12-15 21:27:24 +00:00
|
|
|
raise AnalyzeLayerRetryException
|
2016-02-24 21:01:27 +00:00
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
logger.exception('Connection error when trying to post layer data response for %s', layer.id)
|
2016-12-15 21:27:24 +00:00
|
|
|
raise AnalyzeLayerRetryException
|
2017-03-01 03:12:44 +00:00
|
|
|
except (requests.exceptions.RequestException) as re:
|
2016-12-15 21:27:24 +00:00
|
|
|
logger.exception('Failed to post layer data response for %s: %s', layer.id, re)
|
|
|
|
raise AnalyzeLayerException
|
2017-03-01 03:12:44 +00:00
|
|
|
except Non200ResponseException as ex:
|
|
|
|
message = _response_json(request, ex.response).get('Error').get('Message', '')
|
2016-02-24 21:01:27 +00:00
|
|
|
logger.warning('A warning event occurred when analyzing layer %s (status code %s): %s',
|
2017-03-01 03:12:44 +00:00
|
|
|
request['Layer']['Name'], ex.response.status_code, message)
|
2016-02-24 21:01:27 +00:00
|
|
|
# 400 means the layer could not be analyzed due to a bad request.
|
2017-03-01 03:12:44 +00:00
|
|
|
if ex.response.status_code == 400:
|
2016-12-15 21:27:24 +00:00
|
|
|
if message == UNKNOWN_PARENT_LAYER_ERROR_MSG:
|
|
|
|
raise MissingParentLayerException('Bad request to security scanner: %s' % message)
|
|
|
|
else:
|
2017-04-21 20:52:47 +00:00
|
|
|
logger.exception('Got non-200 response for analyze of layer %s', layer.id)
|
2016-12-15 21:27:24 +00:00
|
|
|
raise AnalyzeLayerException('Bad request to security scanner: %s' % message)
|
2016-02-24 21:01:27 +00:00
|
|
|
# 422 means that the layer could not be analyzed:
|
2016-12-15 21:27:24 +00:00
|
|
|
# - the layer could not be extracted (might be a manifest or an invalid .tar.gz)
|
2016-02-24 21:01:27 +00:00
|
|
|
# - the layer operating system / package manager is unsupported
|
2017-03-01 03:12:44 +00:00
|
|
|
elif ex.response.status_code == 422:
|
2016-12-15 21:27:24 +00:00
|
|
|
raise InvalidLayerException
|
2017-04-21 20:52:47 +00:00
|
|
|
|
2016-12-15 21:27:24 +00:00
|
|
|
# Otherwise, it is some other error and we should retry.
|
2017-03-01 03:12:44 +00:00
|
|
|
raise AnalyzeLayerRetryException
|
2016-02-24 21:01:27 +00:00
|
|
|
|
2016-12-15 21:27:24 +00:00
|
|
|
# Return the parsed API version.
|
2017-03-01 03:12:44 +00:00
|
|
|
return _response_json(request, response)['Layer']['IndexedByVersion']
|
2016-02-24 21:01:27 +00:00
|
|
|
|
2016-02-25 20:58:42 +00:00
|
|
|
def check_layer_vulnerable(self, layer_id, cve_name):
|
|
|
|
""" Checks to see if the layer with the given ID is vulnerable to the specified CVE. """
|
|
|
|
layer_data = self._get_layer_data(layer_id, include_vulnerabilities=True)
|
|
|
|
if layer_data is None or 'Layer' not in layer_data or 'Features' not in layer_data['Layer']:
|
|
|
|
return False
|
|
|
|
|
|
|
|
for feature in layer_data['Layer']['Features']:
|
|
|
|
for vuln in feature.get('Vulnerabilities', []):
|
|
|
|
if vuln['Name'] == cve_name:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2016-03-19 00:28:06 +00:00
|
|
|
def get_notification(self, notification_name, layer_limit=100, page=None):
|
2016-02-25 20:58:42 +00:00
|
|
|
""" Gets the data for a specific notification, with optional page token.
|
|
|
|
Returns a tuple of the data (None on failure) and whether to retry.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
params = {
|
|
|
|
'limit': layer_limit
|
|
|
|
}
|
|
|
|
|
|
|
|
if page is not None:
|
|
|
|
params['page'] = page
|
|
|
|
|
|
|
|
response = self._call('GET', _API_METHOD_GET_NOTIFICATION % notification_name, params=params)
|
|
|
|
json_response = response.json()
|
|
|
|
except requests.exceptions.Timeout:
|
|
|
|
logger.exception('Timeout when trying to get notification for %s', notification_name)
|
|
|
|
return None, True
|
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
logger.exception('Connection error when trying to get notification for %s', notification_name)
|
|
|
|
return None, True
|
|
|
|
except (requests.exceptions.RequestException, ValueError):
|
|
|
|
logger.exception('Failed to get notification for %s', notification_name)
|
|
|
|
return None, False
|
2017-03-01 03:12:44 +00:00
|
|
|
except Non200ResponseException as ex:
|
|
|
|
return None, ex.response.status_code != 404 and ex.response.status_code != 400
|
2016-02-25 20:58:42 +00:00
|
|
|
|
|
|
|
return json_response, False
|
|
|
|
|
|
|
|
def mark_notification_read(self, notification_name):
|
|
|
|
""" Marks a security scanner notification as read. """
|
|
|
|
try:
|
2017-03-01 03:12:44 +00:00
|
|
|
self._call('DELETE', _API_METHOD_MARK_NOTIFICATION_READ % notification_name)
|
|
|
|
return True
|
|
|
|
except Non200ResponseException:
|
|
|
|
return False
|
2016-02-25 20:58:42 +00:00
|
|
|
except requests.exceptions.RequestException:
|
|
|
|
logger.exception('Failed to mark notification as read: %s', notification_name)
|
|
|
|
return False
|
|
|
|
|
2016-02-24 21:01:27 +00:00
|
|
|
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
|
|
|
|
""" Returns the layer data for the specified layer. On error, returns None. """
|
2016-12-22 19:55:26 +00:00
|
|
|
layer_id = compute_layer_id(layer)
|
2018-12-04 16:56:06 +00:00
|
|
|
if layer_id is None:
|
|
|
|
return None
|
|
|
|
|
2016-02-25 20:58:42 +00:00
|
|
|
return self._get_layer_data(layer_id, include_features, include_vulnerabilities)
|
|
|
|
|
|
|
|
def _get_layer_data(self, layer_id, include_features=False, include_vulnerabilities=False):
|
2017-02-14 19:35:01 +00:00
|
|
|
params = {}
|
|
|
|
if include_features:
|
|
|
|
params = {'features': True}
|
2015-11-09 22:12:22 +00:00
|
|
|
|
2017-02-14 19:35:01 +00:00
|
|
|
if include_vulnerabilities:
|
|
|
|
params = {'vulnerabilities': True}
|
2015-11-09 22:12:22 +00:00
|
|
|
|
2017-02-14 19:35:01 +00:00
|
|
|
try:
|
2016-02-25 20:58:42 +00:00
|
|
|
response = self._call('GET', _API_METHOD_GET_LAYER % layer_id, params=params)
|
2016-02-24 21:01:27 +00:00
|
|
|
logger.debug('Got response %s for vulnerabilities for layer %s',
|
|
|
|
response.status_code, layer_id)
|
2017-06-21 19:27:56 +00:00
|
|
|
try:
|
|
|
|
return response.json()
|
|
|
|
except ValueError:
|
|
|
|
logger.exception('Failed to decode response JSON')
|
|
|
|
return None
|
|
|
|
|
2017-03-01 03:12:44 +00:00
|
|
|
except Non200ResponseException as ex:
|
|
|
|
logger.debug('Got failed response %s for vulnerabilities for layer %s',
|
|
|
|
ex.response.status_code, layer_id)
|
|
|
|
if ex.response.status_code == 404:
|
2017-02-14 19:35:01 +00:00
|
|
|
return None
|
2017-06-21 19:27:56 +00:00
|
|
|
else:
|
2017-02-14 19:35:01 +00:00
|
|
|
logger.error(
|
|
|
|
'downstream security service failure: status %d, text: %s',
|
2017-03-01 03:12:44 +00:00
|
|
|
ex.response.status_code,
|
|
|
|
ex.response.text,
|
2017-02-14 19:35:01 +00:00
|
|
|
)
|
2017-06-21 19:27:56 +00:00
|
|
|
if ex.response.status_code // 100 == 5:
|
|
|
|
raise APIRequestFailure('Downstream service returned 5xx')
|
|
|
|
else:
|
|
|
|
raise APIRequestFailure('Downstream service returned non-200')
|
2016-02-24 21:01:27 +00:00
|
|
|
except requests.exceptions.Timeout:
|
|
|
|
raise APIRequestFailure('API call timed out')
|
|
|
|
except requests.exceptions.ConnectionError:
|
|
|
|
raise APIRequestFailure('Could not connect to security service')
|
2017-02-14 19:35:01 +00:00
|
|
|
except requests.exceptions.RequestException:
|
2016-02-25 20:58:42 +00:00
|
|
|
logger.exception('Failed to get layer data response for %s', layer_id)
|
2016-02-24 21:01:27 +00:00
|
|
|
raise APIRequestFailure()
|
|
|
|
|
2015-11-09 22:12:22 +00:00
|
|
|
|
2017-01-23 19:36:19 +00:00
|
|
|
def _request(self, method, endpoint, path, body, params, timeout):
|
|
|
|
""" Issues an HTTP request to the security endpoint. """
|
|
|
|
url = _join_api_url(endpoint, self._config.get('SECURITY_SCANNER_API_VERSION', 'v1'), path)
|
|
|
|
signer_proxy_url = self._config.get('JWTPROXY_SIGNER', 'localhost:8080')
|
2016-03-14 16:07:51 +00:00
|
|
|
|
2017-01-23 19:36:19 +00:00
|
|
|
logger.debug('%sing security URL %s', method.upper(), url)
|
2017-03-01 03:12:44 +00:00
|
|
|
resp = self._client.request(method, url, json=body, params=params, timeout=timeout,
|
2017-01-23 19:36:19 +00:00
|
|
|
verify=MITM_CERT_PATH, headers=DEFAULT_HTTP_HEADERS,
|
|
|
|
proxies={'https': 'https://' + signer_proxy_url,
|
|
|
|
'http': 'http://' + signer_proxy_url})
|
2017-03-01 03:12:44 +00:00
|
|
|
if resp.status_code // 100 != 2:
|
|
|
|
raise Non200ResponseException(resp)
|
|
|
|
return resp
|
2016-03-14 16:07:51 +00:00
|
|
|
|
2017-01-23 19:36:19 +00:00
|
|
|
def _call(self, method, path, params=None, body=None):
|
|
|
|
""" Issues an HTTP request to the security endpoint handling the logic of using an alternative
|
|
|
|
BATCH endpoint for non-GET requests and failover for GET requests.
|
|
|
|
"""
|
2017-02-14 19:33:11 +00:00
|
|
|
timeout = self._config.get('SECURITY_SCANNER_API_TIMEOUT_SECONDS', 1)
|
2017-01-23 19:36:19 +00:00
|
|
|
endpoint = self._config['SECURITY_SCANNER_ENDPOINT']
|
2015-10-26 19:13:58 +00:00
|
|
|
|
2016-05-02 19:29:31 +00:00
|
|
|
with CloseForLongOperation(self._config):
|
2017-01-23 19:36:19 +00:00
|
|
|
# If the request isn't a read, attempt to use a batch stack and do not fail over.
|
|
|
|
if method != 'GET':
|
|
|
|
if self._config.get('SECURITY_SCANNER_ENDPOINT_BATCH') is not None:
|
|
|
|
endpoint = self._config['SECURITY_SCANNER_ENDPOINT_BATCH']
|
|
|
|
timeout = self._config.get('SECURITY_SCANNER_API_BATCH_TIMEOUT_SECONDS') or timeout
|
|
|
|
return self._request(method, endpoint, path, body, params, timeout)
|
|
|
|
|
|
|
|
# The request is read-only and can failover.
|
2017-02-14 19:33:11 +00:00
|
|
|
all_endpoints = [endpoint] + self._config.get('SECURITY_SCANNER_READONLY_FAILOVER_ENDPOINTS', [])
|
2017-03-01 03:12:44 +00:00
|
|
|
return _failover_read_request(*[((self._request, endpoint, path, body, params, timeout), {})
|
|
|
|
for endpoint in all_endpoints])
|
2017-01-23 19:36:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _join_api_url(endpoint, api_version, path):
|
|
|
|
pathless_url = urljoin(endpoint, '/' + api_version) + '/'
|
|
|
|
return urljoin(pathless_url, path)
|
|
|
|
|
|
|
|
|
|
|
|
@failover
|
|
|
|
def _failover_read_request(request_fn, endpoint, path, body, params, timeout):
|
|
|
|
""" This function auto-retries read-only requests until they return a 2xx status code. """
|
2017-02-23 20:01:32 +00:00
|
|
|
try:
|
2017-03-01 03:12:44 +00:00
|
|
|
return request_fn('GET', endpoint, path, body, params, timeout)
|
|
|
|
except (requests.exceptions.RequestException, Non200ResponseException) as ex:
|
|
|
|
raise FailoverException(ex)
|