Add a reporter for verbs to track number of storage streams are needed

This commit is contained in:
Joseph Schorr 2018-03-23 14:39:38 -04:00
parent 110366f656
commit dd470bdc9d
7 changed files with 43 additions and 14 deletions

View file

@ -21,6 +21,7 @@ from util.http import exact_abort
from util.registry.filelike import wrap_with_handler
from util.registry.queuefile import QueueFile
from util.registry.queueprocess import QueueProcess
from util.registry.tarlayerformat import TarLayerFormatterReporter
from util.registry.torrent import (
make_torrent, per_user_torrent_filename, public_torrent_filename, PieceHasher)
@ -32,7 +33,15 @@ license_validator.enforce_license_before_request(verbs)
LAYER_MIMETYPE = 'binary/octet-stream'
def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
class VerbReporter(TarLayerFormatterReporter):
def __init__(self, kind):
self.kind = kind
def report_pass(self, pass_count):
metric_queue.verb_action_passes.Inc(labelvalues=[self.kind, pass_count])
def _open_stream(formatter, repo_image, tag, derived_image_id, handlers, reporter):
"""
This method generates a stream of data which will be replicated and read from the queue files.
This method runs in a separate process.
@ -47,14 +56,14 @@ def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
yield current_image
def image_stream_getter(store, current_image):
def func():
def get_stream_for_storage():
current_image_path = model.get_blob_path(current_image.blob)
current_image_stream = store.stream_read_file(current_image.blob.locations,
current_image_path)
logger.debug('Returning image layer %s: %s', current_image.image_id, current_image_path)
return current_image_stream
return func
return get_stream_for_storage
def tar_stream_getter_iterator():
# Re-Initialize the storage engine because some may not respond well to forking (e.g. S3)
@ -63,7 +72,7 @@ def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
yield image_stream_getter(store, current_image)
stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image,
tar_stream_getter_iterator)
tar_stream_getter_iterator, reporter=reporter)
for handler_fn in handlers:
stream = wrap_with_handler(stream, handler_fn)
@ -286,7 +295,8 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
handlers = [hasher.update]
args = (formatter, repo_image, tag, derived_image_id, handlers)
reporter = VerbReporter(verb)
args = (formatter, repo_image, tag, derived_image_id, handlers, reporter)
queue_process = QueueProcess(
_open_stream,
8 * 1024,