Merge pull request #2183 from charltonaustin/metrics_for_unscanned_images
Adding in some metrics around clair sec scan.
This commit is contained in:
commit
7b3d8e3977
4 changed files with 56 additions and 16 deletions
|
@ -3,7 +3,7 @@ import dateutil.parser
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from peewee import JOIN_LEFT_OUTER, IntegrityError
|
from peewee import JOIN_LEFT_OUTER, IntegrityError, fn
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from data.model import (DataModelException, db_transaction, _basequery, storage,
|
from data.model import (DataModelException, db_transaction, _basequery, storage,
|
||||||
|
@ -471,6 +471,40 @@ def ensure_image_locations(*names):
|
||||||
data = [{'name': name} for name in insert_names]
|
data = [{'name': name} for name in insert_names]
|
||||||
ImageStorageLocation.insert_many(data).execute()
|
ImageStorageLocation.insert_many(data).execute()
|
||||||
|
|
||||||
|
|
||||||
|
def get_max_id_for_sec_scan():
|
||||||
|
""" Gets the maximum id for a clair sec scan """
|
||||||
|
return Image.select(fn.Max(Image.id)).scalar()
|
||||||
|
|
||||||
|
|
||||||
|
def get_min_id_for_sec_scan(version):
|
||||||
|
""" Gets the minimum id for a clair sec scan """
|
||||||
|
return (Image
|
||||||
|
.select(fn.Min(Image.id))
|
||||||
|
.where(Image.security_indexed_engine < version)
|
||||||
|
.scalar())
|
||||||
|
|
||||||
|
|
||||||
|
def total_image_count():
|
||||||
|
""" Returns the total number of images in DB """
|
||||||
|
return Image.select().count()
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_id():
|
||||||
|
""" Returns the primary key for Image DB model """
|
||||||
|
return Image.id
|
||||||
|
|
||||||
|
|
||||||
|
def get_images_eligible_for_scan(clair_version):
|
||||||
|
""" Returns a query that gives all images eligible for a clair scan """
|
||||||
|
return get_image_with_storage_and_parent_base().where(Image.security_indexed_engine < clair_version)
|
||||||
|
|
||||||
|
|
||||||
|
def get_count_of_images_eligible_for_scan(clair_version):
|
||||||
|
""" Returns a query that gives all images eligible for a clair scan """
|
||||||
|
return get_images_eligible_for_scan(clair_version).count()
|
||||||
|
|
||||||
|
|
||||||
def get_image_with_storage_and_parent_base():
|
def get_image_with_storage_and_parent_base():
|
||||||
Parent = Image.alias()
|
Parent = Image.alias()
|
||||||
ParentImageStorage = ImageStorage.alias()
|
ParentImageStorage = ImageStorage.alias()
|
||||||
|
|
|
@ -20,7 +20,7 @@ BUILDER_START_TIME_BUCKETS = [.5, 1.0, 5.0, 10.0, 30.0, 60.0, 120.0, 180.0, 240.
|
||||||
|
|
||||||
class MetricQueue(object):
|
class MetricQueue(object):
|
||||||
""" Object to which various metrics are written, for distribution to metrics collection
|
""" Object to which various metrics are written, for distribution to metrics collection
|
||||||
system(s) such Prometheus.
|
system(s) such as Prometheus.
|
||||||
"""
|
"""
|
||||||
def __init__(self, prom):
|
def __init__(self, prom):
|
||||||
# Define the various exported metrics.
|
# Define the various exported metrics.
|
||||||
|
|
|
@ -1,10 +1,13 @@
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
import features
|
||||||
|
|
||||||
from app import app, metric_queue
|
from app import app, metric_queue
|
||||||
from data.database import UseThenDisconnect
|
from data.database import UseThenDisconnect
|
||||||
from data import model
|
from data import model
|
||||||
|
from data.model.image import total_image_count, get_count_of_images_eligible_for_scan
|
||||||
from util.locking import GlobalLock, LockNotAcquiredException
|
from util.locking import GlobalLock, LockNotAcquiredException
|
||||||
|
from workers.securityworker import unscanned_images_gauge, images_gauge
|
||||||
from workers.worker import Worker
|
from workers.worker import Worker
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -40,6 +43,13 @@ class GlobalPrometheusStatsWorker(Worker):
|
||||||
metric_queue.org_count.Set(model.organization.get_active_org_count())
|
metric_queue.org_count.Set(model.organization.get_active_org_count())
|
||||||
metric_queue.robot_count.Set(model.user.get_robot_count())
|
metric_queue.robot_count.Set(model.user.get_robot_count())
|
||||||
|
|
||||||
|
if features.SECURITY_SCANNER:
|
||||||
|
# Clair repo counts.
|
||||||
|
unscanned_images_gauge.set(
|
||||||
|
get_count_of_images_eligible_for_scan(app.config.get('SECURITY_SCANNER_ENGINE_VERSION_TARGET', 2))
|
||||||
|
)
|
||||||
|
images_gauge.set(total_image_count())
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||||
|
|
|
@ -1,15 +1,13 @@
|
||||||
import logging
|
|
||||||
import logging.config
|
import logging.config
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import features
|
import features
|
||||||
|
|
||||||
from peewee import fn
|
from app import app, secscan_api, prometheus
|
||||||
|
|
||||||
from app import app, secscan_api
|
|
||||||
from workers.worker import Worker
|
from workers.worker import Worker
|
||||||
from data.database import Image, UseThenDisconnect
|
from data.database import UseThenDisconnect
|
||||||
from data.model.image import get_image_with_storage_and_parent_base
|
from data.model.image import (get_images_eligible_for_scan, get_max_id_for_sec_scan,
|
||||||
|
get_min_id_for_sec_scan, get_image_id)
|
||||||
from util.secscan.api import SecurityConfigValidator
|
from util.secscan.api import SecurityConfigValidator
|
||||||
from util.secscan.analyzer import LayerAnalyzer
|
from util.secscan.analyzer import LayerAnalyzer
|
||||||
from util.migrate.allocator import yield_random_entries
|
from util.migrate.allocator import yield_random_entries
|
||||||
|
@ -19,6 +17,8 @@ BATCH_SIZE = 50
|
||||||
INDEXING_INTERVAL = 30
|
INDEXING_INTERVAL = 30
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
unscanned_images_gauge = prometheus.create_gauge('unscanned_images', 'Number of images that clair needs to scan.')
|
||||||
|
images_gauge = prometheus.create_gauge('all_images', 'Total number of images that clair can scan.')
|
||||||
|
|
||||||
class SecurityWorker(Worker):
|
class SecurityWorker(Worker):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -29,10 +29,7 @@ class SecurityWorker(Worker):
|
||||||
self._analyzer = LayerAnalyzer(app.config, secscan_api)
|
self._analyzer = LayerAnalyzer(app.config, secscan_api)
|
||||||
|
|
||||||
# Get the ID of the first image we want to analyze.
|
# Get the ID of the first image we want to analyze.
|
||||||
self._min_id = (Image
|
self._min_id = get_min_id_for_sec_scan(self._target_version)
|
||||||
.select(fn.Min(Image.id))
|
|
||||||
.where(Image.security_indexed_engine < self._target_version)
|
|
||||||
.scalar())
|
|
||||||
|
|
||||||
self.add_operation(self._index_images, INDEXING_INTERVAL)
|
self.add_operation(self._index_images, INDEXING_INTERVAL)
|
||||||
else:
|
else:
|
||||||
|
@ -40,17 +37,16 @@ class SecurityWorker(Worker):
|
||||||
|
|
||||||
def _index_images(self):
|
def _index_images(self):
|
||||||
def batch_query():
|
def batch_query():
|
||||||
base_query = get_image_with_storage_and_parent_base()
|
return get_images_eligible_for_scan(self._target_version)
|
||||||
return base_query.where(Image.security_indexed_engine < self._target_version)
|
|
||||||
|
|
||||||
# Get the ID of the last image we can analyze. Will be None if there are no images in the
|
# Get the ID of the last image we can analyze. Will be None if there are no images in the
|
||||||
# database.
|
# database.
|
||||||
max_id = Image.select(fn.Max(Image.id)).scalar()
|
max_id = get_max_id_for_sec_scan()
|
||||||
if max_id is None:
|
if max_id is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
with UseThenDisconnect(app.config):
|
with UseThenDisconnect(app.config):
|
||||||
for candidate, abt in yield_random_entries(batch_query, Image.id, BATCH_SIZE, max_id,
|
for candidate, abt in yield_random_entries(batch_query, get_image_id(), BATCH_SIZE, max_id,
|
||||||
self._min_id):
|
self._min_id):
|
||||||
_, continue_batch = self._analyzer.analyze_recursively(candidate)
|
_, continue_batch = self._analyzer.analyze_recursively(candidate)
|
||||||
if not continue_batch:
|
if not continue_batch:
|
||||||
|
|
Reference in a new issue