Hash v1 uploads for torrent chunks
This commit is contained in:
parent
44fcc7e44b
commit
8f80d7064b
6 changed files with 98 additions and 69 deletions
|
@ -18,6 +18,7 @@ from auth.permissions import (ReadRepositoryPermission,
|
|||
ModifyRepositoryPermission)
|
||||
from data import model, database
|
||||
from util.registry import gzipstream
|
||||
from util.registry.torrent import PieceHasher
|
||||
from endpoints.v1 import v1_bp
|
||||
from endpoints.decorators import anon_protect
|
||||
|
||||
|
@ -214,6 +215,10 @@ def put_image_layer(namespace, repository, image_id):
|
|||
size_info, size_hndlr = gzipstream.calculate_size_handler()
|
||||
sr.add_handler(size_hndlr)
|
||||
|
||||
# Add a handler to hash the chunks of the upload for torrenting
|
||||
piece_hasher = PieceHasher(app.config['TORRENT_PIECE_SIZE'])
|
||||
sr.add_handler(piece_hasher.update)
|
||||
|
||||
# Add a handler which computes the checksum.
|
||||
h, sum_hndlr = checksums.simple_checksum_handler(json_data)
|
||||
sr.add_handler(sum_hndlr)
|
||||
|
@ -231,8 +236,11 @@ def put_image_layer(namespace, repository, image_id):
|
|||
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
|
||||
|
||||
# Save the size of the image.
|
||||
model.image.set_image_size(image_id, namespace, repository, size_info.compressed_size,
|
||||
size_info.uncompressed_size)
|
||||
updated_storage = model.storage.set_image_storage_metadata(image_id, namespace, repository,
|
||||
size_info.compressed_size,
|
||||
size_info.uncompressed_size)
|
||||
pieces_bytes = piece_hasher.piece_hashes + piece_hasher.hash_fragment.digest()
|
||||
model.storage.save_torrent_info(updated_storage, app.config['TORRENT_PIECE_SIZE'], pieces_bytes)
|
||||
|
||||
# Append the computed checksum.
|
||||
csums = []
|
||||
|
|
Reference in a new issue