Merge pull request #3032 from coreos-inc/joseph.schorr/QUAY-885/squashed-sym
Retarget broken hard links in squashed images
This commit is contained in:
commit
323eb63747
9 changed files with 368 additions and 254 deletions
|
@ -21,6 +21,7 @@ from util.http import exact_abort
|
|||
from util.registry.filelike import wrap_with_handler
|
||||
from util.registry.queuefile import QueueFile
|
||||
from util.registry.queueprocess import QueueProcess
|
||||
from util.registry.tarlayerformat import TarLayerFormatterReporter
|
||||
from util.registry.torrent import (
|
||||
make_torrent, per_user_torrent_filename, public_torrent_filename, PieceHasher)
|
||||
|
||||
|
@ -31,7 +32,15 @@ verbs = Blueprint('verbs', __name__)
|
|||
LAYER_MIMETYPE = 'binary/octet-stream'
|
||||
|
||||
|
||||
def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
|
||||
class VerbReporter(TarLayerFormatterReporter):
|
||||
def __init__(self, kind):
|
||||
self.kind = kind
|
||||
|
||||
def report_pass(self, pass_count):
|
||||
metric_queue.verb_action_passes.Inc(labelvalues=[self.kind, pass_count])
|
||||
|
||||
|
||||
def _open_stream(formatter, repo_image, tag, derived_image_id, handlers, reporter):
|
||||
"""
|
||||
This method generates a stream of data which will be replicated and read from the queue files.
|
||||
This method runs in a separate process.
|
||||
|
@ -45,19 +54,24 @@ def _open_stream(formatter, repo_image, tag, derived_image_id, handlers):
|
|||
for current_image in image_list:
|
||||
yield current_image
|
||||
|
||||
def get_next_layer():
|
||||
# Re-Initialize the storage engine because some may not respond well to forking (e.g. S3)
|
||||
store = Storage(app, metric_queue, config_provider=config_provider, ip_resolver=ip_resolver)
|
||||
for current_image in image_list:
|
||||
def image_stream_getter(store, current_image):
|
||||
def get_stream_for_storage():
|
||||
current_image_path = model.get_blob_path(current_image.blob)
|
||||
current_image_stream = store.stream_read_file(current_image.blob.locations,
|
||||
current_image_path)
|
||||
|
||||
logger.debug('Returning image layer %s: %s', current_image.image_id, current_image_path)
|
||||
yield current_image_stream
|
||||
return current_image_stream
|
||||
return get_stream_for_storage
|
||||
|
||||
def tar_stream_getter_iterator():
|
||||
# Re-Initialize the storage engine because some may not respond well to forking (e.g. S3)
|
||||
store = Storage(app, metric_queue, config_provider=config_provider, ip_resolver=ip_resolver)
|
||||
for current_image in image_list:
|
||||
yield image_stream_getter(store, current_image)
|
||||
|
||||
stream = formatter.build_stream(repo_image, tag, derived_image_id, get_next_image,
|
||||
get_next_layer)
|
||||
tar_stream_getter_iterator, reporter=reporter)
|
||||
|
||||
for handler_fn in handlers:
|
||||
stream = wrap_with_handler(stream, handler_fn)
|
||||
|
@ -280,7 +294,8 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
|
|||
# Create a queue process to generate the data. The queue files will read from the process
|
||||
# and send the results to the client and storage.
|
||||
handlers = [hasher.update]
|
||||
args = (formatter, repo_image, tag, derived_image_id, handlers)
|
||||
reporter = VerbReporter(verb)
|
||||
args = (formatter, repo_image, tag, derived_image_id, handlers, reporter)
|
||||
queue_process = QueueProcess(
|
||||
_open_stream,
|
||||
8 * 1024,
|
||||
|
|
Reference in a new issue