Merge pull request #1142 from coreos-inc/fixtorrentupload

Fix piece hash calculation
This commit is contained in:
josephschorr 2016-01-12 17:47:16 -05:00
commit b515548f11
3 changed files with 16 additions and 6 deletions

View file

@ -1,6 +1,6 @@
import logging import logging
from peewee import JOIN_LEFT_OUTER, fn, SQL from peewee import JOIN_LEFT_OUTER, fn, SQL, IntegrityError
from data.model import (config, db_transaction, InvalidImageException, TorrentInfoDoesNotExist, from data.model import (config, db_transaction, InvalidImageException, TorrentInfoDoesNotExist,
DataModelException, _basequery) DataModelException, _basequery)
@ -255,7 +255,11 @@ def get_storage_locations(uuid):
def save_torrent_info(storage_object, piece_length, pieces): def save_torrent_info(storage_object, piece_length, pieces):
try:
TorrentInfo.create(storage=storage_object, piece_length=piece_length, pieces=pieces) TorrentInfo.create(storage=storage_object, piece_length=piece_length, pieces=pieces)
except IntegrityError:
# TorrentInfo already exists for this storage.
pass
def get_torrent_info(blob): def get_torrent_info(blob):
try: try:

View file

@ -224,11 +224,17 @@ def _upload_chunk(namespace, repo_name, upload_uuid):
input_fp = wrap_with_handler(input_fp, found.sha_state.update) input_fp = wrap_with_handler(input_fp, found.sha_state.update)
# Add a hasher for calculating SHA1s for torrents if this is the first chunk and/or we have
# already calculated hash data for the previous chunk(s).
piece_hasher = None piece_hasher = None
# TODO remove this when all in-progress blob uploads reliably contain piece hashes if found.chunk_count == 0 or found.piece_sha_state:
if start_offset == 0: initial_sha1_value = found.piece_sha_state or resumablehashlib.sha1()
piece_hasher = PieceHasher(app.config['TORRENT_PIECE_SIZE'], start_offset, '', initial_sha1_pieces_value = found.piece_hashes or ''
resumablehashlib.sha1())
piece_hasher = PieceHasher(app.config['TORRENT_PIECE_SIZE'], start_offset,
initial_sha1_pieces_value,
initial_sha1_value)
input_fp = wrap_with_handler(input_fp, piece_hasher.update) input_fp = wrap_with_handler(input_fp, piece_hasher.update)
# If this is the first chunk and we're starting at the 0 offset, add a handler to gunzip the # If this is the first chunk and we're starting at the 0 offset, add a handler to gunzip the

Binary file not shown.