2015-01-13 22:46:11 +00:00
|
|
|
import copy
|
|
|
|
import json
|
2015-12-17 18:39:01 +00:00
|
|
|
import math
|
2016-05-23 23:52:17 +00:00
|
|
|
import calendar
|
2015-01-13 22:46:11 +00:00
|
|
|
|
2016-08-02 22:45:30 +00:00
|
|
|
from app import app
|
2016-08-30 19:05:15 +00:00
|
|
|
from image.common import TarImageFormatter
|
2016-08-02 22:45:30 +00:00
|
|
|
from util.registry.gzipwrap import GZIP_BUFFER_SIZE
|
|
|
|
from util.registry.streamlayerformat import StreamLayerMerger
|
|
|
|
|
|
|
|
|
2015-01-13 22:46:11 +00:00
|
|
|
class FileEstimationException(Exception):
|
2016-08-02 22:45:30 +00:00
|
|
|
"""
|
|
|
|
Exception raised by build_docker_load_stream if the estimated size of the layer tar was lower
|
|
|
|
than the actual size. This means the sent tar header is wrong, and we have to fail.
|
2015-01-13 22:46:11 +00:00
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-08-02 22:45:30 +00:00
|
|
|
class SquashedDockerImageFormatter(TarImageFormatter):
|
|
|
|
"""
|
|
|
|
Image formatter which produces a squashed image compatible with the `docker load` command.
|
2015-01-13 22:46:11 +00:00
|
|
|
"""
|
|
|
|
|
2016-08-02 22:45:30 +00:00
|
|
|
# Multiplier against the image size reported by Docker to account for the tar metadata.
|
2015-12-14 20:39:45 +00:00
|
|
|
# Note: This multiplier was not formally calculated in anyway and should be adjusted overtime
|
|
|
|
# if/when we encounter issues with it. Unfortunately, we cannot make it too large or the Docker
|
2016-08-02 22:45:30 +00:00
|
|
|
# daemon dies when trying to load the entire tar into memory.
|
2015-12-14 20:39:45 +00:00
|
|
|
SIZE_MULTIPLIER = 1.2
|
2015-08-25 18:18:20 +00:00
|
|
|
|
2016-12-20 19:01:06 +00:00
|
|
|
def stream_generator(self, repo_image, tag, synthetic_image_id, get_image_iterator,
|
2018-03-23 18:39:38 +00:00
|
|
|
tar_stream_getter_iterator, reporter=None):
|
2016-05-23 23:52:17 +00:00
|
|
|
image_mtime = 0
|
2016-09-01 23:00:11 +00:00
|
|
|
created = next(get_image_iterator()).v1_metadata.created
|
2016-05-23 23:52:17 +00:00
|
|
|
if created is not None:
|
|
|
|
image_mtime = calendar.timegm(created.utctimetuple())
|
|
|
|
|
2015-08-18 15:53:48 +00:00
|
|
|
|
2015-01-13 22:46:11 +00:00
|
|
|
# Docker import V1 Format (.tar):
|
|
|
|
# repositories - JSON file containing a repo -> tag -> image map
|
|
|
|
# {image ID folder}:
|
|
|
|
# json - The layer JSON
|
2016-08-09 19:11:35 +00:00
|
|
|
# layer.tar - The tarballed contents of the layer
|
2015-01-13 22:46:11 +00:00
|
|
|
# VERSION - The docker import version: '1.0'
|
2018-03-23 18:39:38 +00:00
|
|
|
layer_merger = StreamLayerMerger(tar_stream_getter_iterator, reporter=reporter)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# Yield the repositories file:
|
|
|
|
synthetic_layer_info = {}
|
|
|
|
synthetic_layer_info[tag + '.squash'] = synthetic_image_id
|
|
|
|
|
|
|
|
hostname = app.config['SERVER_HOSTNAME']
|
|
|
|
repositories = {}
|
2016-12-20 19:01:06 +00:00
|
|
|
namespace = repo_image.repository.namespace_name
|
|
|
|
repository = repo_image.repository.name
|
2015-01-13 22:46:11 +00:00
|
|
|
repositories[hostname + '/' + namespace + '/' + repository] = synthetic_layer_info
|
|
|
|
|
2016-05-23 23:52:17 +00:00
|
|
|
yield self.tar_file('repositories', json.dumps(repositories), mtime=image_mtime)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# Yield the image ID folder.
|
2016-05-23 23:52:17 +00:00
|
|
|
yield self.tar_folder(synthetic_image_id, mtime=image_mtime)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# Yield the JSON layer data.
|
2016-09-01 23:00:11 +00:00
|
|
|
layer_json = SquashedDockerImageFormatter._build_layer_json(repo_image, synthetic_image_id)
|
2016-05-23 23:52:17 +00:00
|
|
|
yield self.tar_file(synthetic_image_id + '/json', json.dumps(layer_json), mtime=image_mtime)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# Yield the VERSION file.
|
2016-05-23 23:52:17 +00:00
|
|
|
yield self.tar_file(synthetic_image_id + '/VERSION', '1.0', mtime=image_mtime)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# Yield the merged layer data's header.
|
|
|
|
estimated_file_size = 0
|
|
|
|
for image in get_image_iterator():
|
2015-08-18 15:53:48 +00:00
|
|
|
# In V1 we have the actual uncompressed size, which is needed for back compat with
|
|
|
|
# older versions of Docker.
|
|
|
|
# In V2, we use the size given in the image JSON.
|
2016-09-01 23:00:11 +00:00
|
|
|
if image.blob.uncompressed_size:
|
|
|
|
estimated_file_size += image.blob.uncompressed_size
|
2015-08-18 15:53:48 +00:00
|
|
|
else:
|
2016-09-01 23:00:11 +00:00
|
|
|
image_json = image.compat_metadata
|
2016-12-20 19:01:06 +00:00
|
|
|
estimated_file_size += (image_json.get('Size', 0) *
|
|
|
|
SquashedDockerImageFormatter.SIZE_MULTIPLIER)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
2015-12-17 18:39:01 +00:00
|
|
|
# Make sure the estimated file size is an integer number of bytes.
|
|
|
|
estimated_file_size = int(math.ceil(estimated_file_size))
|
|
|
|
|
2016-05-23 23:52:17 +00:00
|
|
|
yield self.tar_file_header(synthetic_image_id + '/layer.tar', estimated_file_size,
|
|
|
|
mtime=image_mtime)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# Yield the contents of the merged layer.
|
|
|
|
yielded_size = 0
|
|
|
|
for entry in layer_merger.get_generator():
|
|
|
|
yield entry
|
|
|
|
yielded_size += len(entry)
|
|
|
|
|
|
|
|
# If the yielded size is more than the estimated size (which is unlikely but possible), then
|
|
|
|
# raise an exception since the tar header will be wrong.
|
|
|
|
if yielded_size > estimated_file_size:
|
2015-08-25 18:18:20 +00:00
|
|
|
message = "Expected %s bytes, found %s bytes" % (estimated_file_size, yielded_size)
|
|
|
|
raise FileEstimationException(message)
|
2015-01-13 22:46:11 +00:00
|
|
|
|
|
|
|
# If the yielded size is less than the estimated size (which is likely), fill the rest with
|
|
|
|
# zeros.
|
|
|
|
if yielded_size < estimated_file_size:
|
|
|
|
to_yield = estimated_file_size - yielded_size
|
|
|
|
while to_yield > 0:
|
|
|
|
yielded = min(to_yield, GZIP_BUFFER_SIZE)
|
|
|
|
yield '\0' * yielded
|
|
|
|
to_yield -= yielded
|
|
|
|
|
|
|
|
# Yield any file padding to 512 bytes that is necessary.
|
|
|
|
yield self.tar_file_padding(estimated_file_size)
|
|
|
|
|
2016-08-02 22:45:30 +00:00
|
|
|
# Last two records are empty in tar spec.
|
2015-01-13 22:46:11 +00:00
|
|
|
yield '\0' * 512
|
|
|
|
yield '\0' * 512
|
|
|
|
|
|
|
|
|
2015-02-02 21:53:39 +00:00
|
|
|
@staticmethod
|
2016-09-01 23:00:11 +00:00
|
|
|
def _build_layer_json(repo_image, synthetic_image_id):
|
|
|
|
layer_json = repo_image.compat_metadata
|
2015-01-13 22:46:11 +00:00
|
|
|
updated_json = copy.deepcopy(layer_json)
|
|
|
|
updated_json['id'] = synthetic_image_id
|
|
|
|
|
|
|
|
if 'parent' in updated_json:
|
|
|
|
del updated_json['parent']
|
|
|
|
|
|
|
|
if 'config' in updated_json and 'Image' in updated_json['config']:
|
|
|
|
updated_json['config']['Image'] = synthetic_image_id
|
|
|
|
|
|
|
|
if 'container_config' in updated_json and 'Image' in updated_json['container_config']:
|
|
|
|
updated_json['container_config']['Image'] = synthetic_image_id
|
|
|
|
|
|
|
|
return updated_json
|