Merge remote-tracking branch 'upstream/master' into python-registry-v2
This commit is contained in:
commit
0459c3bc54
55 changed files with 1480 additions and 360 deletions
|
@ -22,12 +22,19 @@ def notification_view(note):
|
|||
except:
|
||||
config = {}
|
||||
|
||||
event_config = {}
|
||||
try:
|
||||
event_config = json.loads(note.event_config_json)
|
||||
except:
|
||||
event_config = {}
|
||||
|
||||
return {
|
||||
'uuid': note.uuid,
|
||||
'event': note.event.name,
|
||||
'method': note.method.name,
|
||||
'config': config,
|
||||
'title': note.title,
|
||||
'event_config': event_config,
|
||||
}
|
||||
|
||||
|
||||
|
@ -160,7 +167,7 @@ class TestRepositoryNotification(RepositoryParamResource):
|
|||
raise NotFound()
|
||||
|
||||
event_info = NotificationEvent.get_event(test_note.event.name)
|
||||
sample_data = event_info.get_sample_data(repository=test_note.repository)
|
||||
sample_data = event_info.get_sample_data(test_note)
|
||||
notification_data = build_notification_data(test_note, sample_data)
|
||||
notification_queue.put([test_note.repository.namespace_user.username, repository,
|
||||
test_note.event.name], json.dumps(notification_data))
|
||||
|
|
|
@ -5,7 +5,7 @@ import features
|
|||
import json
|
||||
import requests
|
||||
|
||||
from app import secscan_endpoint
|
||||
from app import secscan_api
|
||||
from data import model
|
||||
from endpoints.api import (require_repo_read, NotFound, DownstreamIssue, path_param,
|
||||
RepositoryParamResource, resource, nickname, show_if, parse_args,
|
||||
|
@ -15,10 +15,17 @@ from endpoints.api import (require_repo_read, NotFound, DownstreamIssue, path_pa
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SCAN_STATUS(object):
|
||||
""" Security scan status enum """
|
||||
SCANNED = 'scanned'
|
||||
FAILED = 'failed'
|
||||
QUEUED = 'queued'
|
||||
|
||||
|
||||
def _call_security_api(relative_url, *args, **kwargs):
|
||||
""" Issues an HTTP call to the sec API at the given relative URL. """
|
||||
try:
|
||||
response = secscan_endpoint.call_api(relative_url, *args, **kwargs)
|
||||
response = secscan_api.call(relative_url, None, *args, **kwargs)
|
||||
except requests.exceptions.Timeout:
|
||||
raise DownstreamIssue(payload=dict(message='API call timed out'))
|
||||
except requests.exceptions.ConnectionError:
|
||||
|
@ -39,37 +46,44 @@ def _call_security_api(relative_url, *args, **kwargs):
|
|||
return response_data
|
||||
|
||||
|
||||
def _get_status(repo_image):
|
||||
if repo_image.security_indexed_engine is not None and repo_image.security_indexed_engine >= 0:
|
||||
return SCAN_STATUS.SCANNED if repo_image.security_indexed else SCAN_STATUS.FAILED
|
||||
|
||||
return SCAN_STATUS.QUEUED
|
||||
|
||||
|
||||
@show_if(features.SECURITY_SCANNER)
|
||||
@resource('/v1/repository/<repopath:repository>/tag/<tag>/vulnerabilities')
|
||||
@resource('/v1/repository/<repopath:repository>/image/<imageid>/vulnerabilities')
|
||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||
@path_param('tag', 'The name of the tag')
|
||||
class RepositoryTagVulnerabilities(RepositoryParamResource):
|
||||
""" Operations for managing the vulnerabilities in a repository tag. """
|
||||
@path_param('imageid', 'The image ID')
|
||||
class RepositoryImageVulnerabilities(RepositoryParamResource):
|
||||
""" Operations for managing the vulnerabilities in a repository image. """
|
||||
|
||||
@require_repo_read
|
||||
@nickname('getRepoTagVulnerabilities')
|
||||
@nickname('getRepoImageVulnerabilities')
|
||||
@parse_args
|
||||
@query_param('minimumPriority', 'Minimum vulnerability priority', type=str,
|
||||
default='Low')
|
||||
def get(self, args, namespace, repository, tag):
|
||||
def get(self, args, namespace, repository, imageid):
|
||||
""" Fetches the vulnerabilities (if any) for a repository tag. """
|
||||
try:
|
||||
tag_image = model.tag.get_tag_image(namespace, repository, tag)
|
||||
except model.DataModelException:
|
||||
repo_image = model.image.get_repo_image(namespace, repository, imageid)
|
||||
if repo_image is None:
|
||||
raise NotFound()
|
||||
|
||||
if not tag_image.security_indexed:
|
||||
logger.debug('Image %s for tag %s under repository %s/%s not security indexed',
|
||||
tag_image.docker_image_id, tag, namespace, repository)
|
||||
if not repo_image.security_indexed:
|
||||
logger.debug('Image %s under repository %s/%s not security indexed',
|
||||
repo_image.docker_image_id, namespace, repository)
|
||||
return {
|
||||
'security_indexed': False
|
||||
'status': _get_status(repo_image),
|
||||
}
|
||||
|
||||
data = _call_security_api('layers/%s/vulnerabilities', tag_image.docker_image_id,
|
||||
layer_id = '%s.%s' % (repo_image.docker_image_id, repo_image.storage.uuid)
|
||||
data = _call_security_api('layers/%s/vulnerabilities', layer_id,
|
||||
minimumPriority=args.minimumPriority)
|
||||
|
||||
return {
|
||||
'security_indexed': True,
|
||||
'status': _get_status(repo_image),
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
@ -91,13 +105,14 @@ class RepositoryImagePackages(RepositoryParamResource):
|
|||
|
||||
if not repo_image.security_indexed:
|
||||
return {
|
||||
'security_indexed': False
|
||||
'status': _get_status(repo_image),
|
||||
}
|
||||
|
||||
data = _call_security_api('layers/%s/packages/diff', repo_image.docker_image_id)
|
||||
layer_id = '%s.%s' % (repo_image.docker_image_id, repo_image.storage.uuid)
|
||||
data = _call_security_api('layers/%s/packages', layer_id)
|
||||
|
||||
return {
|
||||
'security_indexed': True,
|
||||
'status': _get_status(repo_image),
|
||||
'data': data,
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ from werkzeug.routing import BaseConverter
|
|||
from functools import wraps
|
||||
from config import frontend_visible_config
|
||||
from external_libraries import get_external_javascript, get_external_css
|
||||
from util.secscan.api import PRIORITY_LEVELS
|
||||
|
||||
import features
|
||||
|
||||
|
@ -183,6 +184,7 @@ def render_page_template(name, **kwargs):
|
|||
config_set=json.dumps(frontend_visible_config(app.config)),
|
||||
oauth_set=json.dumps(get_oauth_config()),
|
||||
scope_set=json.dumps(scopes.app_scopes(app.config)),
|
||||
vuln_priority_set=json.dumps(PRIORITY_LEVELS),
|
||||
mixpanel_key=app.config.get('MIXPANEL_KEY', ''),
|
||||
google_analytics_key=app.config.get('GOOGLE_ANALYTICS_KEY', ''),
|
||||
sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import logging
|
||||
import time
|
||||
import json
|
||||
|
||||
from datetime import datetime
|
||||
from notificationhelper import build_event_data
|
||||
from util.jinjautil import get_template_env
|
||||
from util.secscan.api import PRIORITY_LEVELS, get_priority_for_index
|
||||
|
||||
template_env = get_template_env("events")
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -37,13 +39,18 @@ class NotificationEvent(object):
|
|||
'notification_data': notification_data
|
||||
})
|
||||
|
||||
def get_sample_data(self, repository=None):
|
||||
def get_sample_data(self, notification):
|
||||
"""
|
||||
Returns sample data for testing the raising of this notification, with an optional
|
||||
repository.
|
||||
Returns sample data for testing the raising of this notification, with an example notification.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def should_perform(self, event_data, notification_data):
|
||||
"""
|
||||
Whether a notification for this event should be performed. By default returns True.
|
||||
"""
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def event_name(cls):
|
||||
"""
|
||||
|
@ -71,8 +78,8 @@ class RepoPushEvent(NotificationEvent):
|
|||
def get_summary(self, event_data, notification_data):
|
||||
return 'Repository %s updated' % (event_data['repository'])
|
||||
|
||||
def get_sample_data(self, repository):
|
||||
return build_event_data(repository, {
|
||||
def get_sample_data(self, notification):
|
||||
return build_event_data(notification.repository, {
|
||||
'updated_tags': {'latest': 'someimageid', 'foo': 'anotherimage'},
|
||||
'pruned_image_count': 3
|
||||
})
|
||||
|
@ -99,18 +106,27 @@ class VulnerabilityFoundEvent(NotificationEvent):
|
|||
|
||||
return 'info'
|
||||
|
||||
def get_sample_data(self, repository):
|
||||
return build_event_data(repository, {
|
||||
def get_sample_data(self, notification):
|
||||
event_config = json.loads(notification.event_config_json)
|
||||
|
||||
return build_event_data(notification.repository, {
|
||||
'tags': ['latest', 'prod'],
|
||||
'image': 'some-image-id',
|
||||
'vulnerability': {
|
||||
'id': 'CVE-FAKE-CVE',
|
||||
'description': 'A futurist vulnerability',
|
||||
'link': 'https://security-tracker.debian.org/tracker/CVE-FAKE-CVE',
|
||||
'priority': 'Critical',
|
||||
'priority': get_priority_for_index(event_config['level'])
|
||||
},
|
||||
})
|
||||
|
||||
def should_perform(self, event_data, notification_data):
|
||||
event_config = json.loads(notification_data.event_config_json)
|
||||
expected_level_index = event_config['level']
|
||||
priority = PRIORITY_LEVELS[event_data['vulnerability']['priority']]
|
||||
actual_level_index = priority['index']
|
||||
return expected_level_index <= actual_level_index
|
||||
|
||||
def get_summary(self, event_data, notification_data):
|
||||
msg = '%s vulnerability detected in repository %s in tags %s'
|
||||
return msg % (event_data['vulnerability']['priority'],
|
||||
|
@ -126,10 +142,10 @@ class BuildQueueEvent(NotificationEvent):
|
|||
def get_level(self, event_data, notification_data):
|
||||
return 'info'
|
||||
|
||||
def get_sample_data(self, repository):
|
||||
def get_sample_data(self, notification):
|
||||
build_uuid = 'fake-build-id'
|
||||
|
||||
return build_event_data(repository, {
|
||||
return build_event_data(notification.repository, {
|
||||
'is_manual': False,
|
||||
'build_id': build_uuid,
|
||||
'build_name': 'some-fake-build',
|
||||
|
@ -165,10 +181,10 @@ class BuildStartEvent(NotificationEvent):
|
|||
def get_level(self, event_data, notification_data):
|
||||
return 'info'
|
||||
|
||||
def get_sample_data(self, repository):
|
||||
def get_sample_data(self, notification):
|
||||
build_uuid = 'fake-build-id'
|
||||
|
||||
return build_event_data(repository, {
|
||||
return build_event_data(notification.repository, {
|
||||
'build_id': build_uuid,
|
||||
'build_name': 'some-fake-build',
|
||||
'docker_tags': ['latest', 'foo', 'bar'],
|
||||
|
@ -193,10 +209,10 @@ class BuildSuccessEvent(NotificationEvent):
|
|||
def get_level(self, event_data, notification_data):
|
||||
return 'success'
|
||||
|
||||
def get_sample_data(self, repository):
|
||||
def get_sample_data(self, notification):
|
||||
build_uuid = 'fake-build-id'
|
||||
|
||||
return build_event_data(repository, {
|
||||
return build_event_data(notification.repository, {
|
||||
'build_id': build_uuid,
|
||||
'build_name': 'some-fake-build',
|
||||
'docker_tags': ['latest', 'foo', 'bar'],
|
||||
|
@ -222,10 +238,10 @@ class BuildFailureEvent(NotificationEvent):
|
|||
def get_level(self, event_data, notification_data):
|
||||
return 'error'
|
||||
|
||||
def get_sample_data(self, repository):
|
||||
def get_sample_data(self, notification):
|
||||
build_uuid = 'fake-build-id'
|
||||
|
||||
return build_event_data(repository, {
|
||||
return build_event_data(notification.repository, {
|
||||
'build_id': build_uuid,
|
||||
'build_name': 'some-fake-build',
|
||||
'docker_tags': ['latest', 'foo', 'bar'],
|
||||
|
|
25
endpoints/secscan.py
Normal file
25
endpoints/secscan.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
import logging
|
||||
import json
|
||||
|
||||
import features
|
||||
|
||||
from app import secscan_notification_queue
|
||||
from flask import request, make_response, Blueprint
|
||||
from endpoints.common import route_show_if
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
secscan = Blueprint('secscan', __name__)
|
||||
|
||||
@route_show_if(features.SECURITY_SCANNER)
|
||||
@secscan.route('/notification', methods=['POST'])
|
||||
def secscan_notification():
|
||||
data = request.get_json()
|
||||
logger.debug('Got notification from Clair: %s', data)
|
||||
|
||||
content = data['Content']
|
||||
layer_ids = content.get('NewIntroducingLayersIDs', content.get('IntroducingLayersIDs', []))
|
||||
if not layer_ids:
|
||||
return make_response('Okay')
|
||||
|
||||
secscan_notification_queue.put(['notification', data['Name']], json.dumps(data))
|
||||
return make_response('Okay')
|
|
@ -7,7 +7,7 @@ from functools import wraps
|
|||
from datetime import datetime
|
||||
from time import time
|
||||
|
||||
from app import storage as store, image_diff_queue, image_replication_queue, app
|
||||
from app import storage as store, image_replication_queue, app
|
||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||
from auth.auth_context import get_authenticated_user, get_grant_user_context
|
||||
from digest import checksums
|
||||
|
@ -41,20 +41,9 @@ def _finish_image(namespace, repository, repo_image):
|
|||
# Checksum is ok, we remove the marker
|
||||
set_uploading_flag(repo_image, False)
|
||||
|
||||
image_id = repo_image.docker_image_id
|
||||
|
||||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
logger.debug('Adding layer to diff queue')
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
|
||||
# Send a job to the work queue to replicate the image layer.
|
||||
if features.STORAGE_REPLICATION:
|
||||
repo = model.repository.get_repository(namespace, repository)
|
||||
image_replication_queue.put([repo_image.storage.uuid], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'storage_id': repo_image.storage.uuid,
|
||||
|
|
Reference in a new issue