2016-05-02 22:10:16 +00:00
|
|
|
import hashlib
|
|
|
|
import time
|
|
|
|
import urllib
|
2016-01-07 19:15:32 +00:00
|
|
|
|
|
|
|
from cachetools import lru_cache
|
2015-12-30 22:19:19 +00:00
|
|
|
|
2016-08-03 17:54:14 +00:00
|
|
|
import bencode
|
|
|
|
import jwt
|
|
|
|
import resumablehashlib
|
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
from app import app, instance_keys
|
2015-12-31 19:10:36 +00:00
|
|
|
|
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
ANNOUNCE_URL = app.config['BITTORRENT_ANNOUNCE_URL']
|
|
|
|
FILENAME_PEPPER = app.config['BITTORRENT_FILENAME_PEPPER']
|
|
|
|
REGISTRY_TITLE = app.config['REGISTRY_TITLE']
|
2015-12-31 19:27:38 +00:00
|
|
|
|
2016-08-03 17:54:14 +00:00
|
|
|
|
2016-01-07 19:15:32 +00:00
|
|
|
@lru_cache(maxsize=1)
|
|
|
|
def _load_private_key(private_key_file_path):
|
|
|
|
with open(private_key_file_path) as private_key_file:
|
|
|
|
return private_key_file.read()
|
|
|
|
|
|
|
|
def _torrent_jwt(info_dict):
|
|
|
|
token_data = {
|
2016-05-31 20:48:19 +00:00
|
|
|
'iss': instance_keys.service_name,
|
2016-01-07 19:15:32 +00:00
|
|
|
'aud': ANNOUNCE_URL,
|
|
|
|
'infohash': _infohash(info_dict),
|
|
|
|
}
|
|
|
|
|
2016-05-31 20:48:19 +00:00
|
|
|
return jwt.encode(token_data, instance_keys.local_private_key, 'RS256')
|
2016-01-07 19:15:32 +00:00
|
|
|
|
|
|
|
def _infohash(infodict):
|
|
|
|
digest = hashlib.sha1()
|
|
|
|
digest.update(bencode.bencode(infodict))
|
|
|
|
return urllib.quote(digest.digest())
|
2015-12-31 19:10:36 +00:00
|
|
|
|
|
|
|
def make_torrent(name, webseed, length, piece_length, pieces):
|
2016-01-07 19:15:32 +00:00
|
|
|
info_dict = {
|
|
|
|
'name': name,
|
|
|
|
'length': length,
|
|
|
|
'piece length': piece_length,
|
|
|
|
'pieces': pieces,
|
2016-02-08 18:56:31 +00:00
|
|
|
'private': 1,
|
2016-01-07 19:15:32 +00:00
|
|
|
}
|
|
|
|
|
2015-12-31 19:10:36 +00:00
|
|
|
return bencode.bencode({
|
2016-01-07 19:15:32 +00:00
|
|
|
'announce': ANNOUNCE_URL + "?jwt=" + _torrent_jwt(info_dict),
|
2015-12-31 19:10:36 +00:00
|
|
|
'url-list': webseed,
|
|
|
|
'encoding': 'UTF-8',
|
2016-01-04 20:57:25 +00:00
|
|
|
'created by': REGISTRY_TITLE,
|
2015-12-31 19:10:36 +00:00
|
|
|
'creation date': int(time.time()),
|
2016-01-07 19:15:32 +00:00
|
|
|
'info': info_dict,
|
2015-12-31 19:10:36 +00:00
|
|
|
})
|
|
|
|
|
2016-01-07 19:15:32 +00:00
|
|
|
def public_torrent_filename(blob_uuid):
|
|
|
|
return hashlib.sha256(blob_uuid).hexdigest()
|
|
|
|
|
|
|
|
def per_user_torrent_filename(user_uuid, blob_uuid):
|
2016-01-22 20:53:21 +00:00
|
|
|
return hashlib.sha256(FILENAME_PEPPER + "||" + blob_uuid + "||" + user_uuid).hexdigest()
|
2016-01-07 19:15:32 +00:00
|
|
|
|
2015-12-30 22:19:19 +00:00
|
|
|
|
|
|
|
class PieceHasher(object):
|
2016-01-08 21:38:02 +00:00
|
|
|
""" Utility for computing torrent piece hashes as the data flows through the update
|
|
|
|
method of this class. Users should get the final value by calling final_piece_hashes
|
|
|
|
since new chunks are allocated lazily.
|
|
|
|
"""
|
2016-01-05 17:14:52 +00:00
|
|
|
def __init__(self, piece_size, starting_offset=0, starting_piece_hash_bytes='',
|
|
|
|
hash_fragment_to_resume=None):
|
2015-12-30 22:19:19 +00:00
|
|
|
if not isinstance(starting_offset, (int, long)):
|
|
|
|
raise TypeError('starting_offset must be an integer')
|
|
|
|
elif not isinstance(piece_size, (int, long)):
|
|
|
|
raise TypeError('piece_size must be an integer')
|
|
|
|
|
|
|
|
self._current_offset = starting_offset
|
|
|
|
self._piece_size = piece_size
|
2015-12-31 17:30:13 +00:00
|
|
|
self._piece_hashes = bytearray(starting_piece_hash_bytes)
|
2015-12-30 22:19:19 +00:00
|
|
|
|
2016-01-05 17:14:52 +00:00
|
|
|
if hash_fragment_to_resume is None:
|
|
|
|
self._hash_fragment = resumablehashlib.sha1()
|
|
|
|
else:
|
|
|
|
self._hash_fragment = hash_fragment_to_resume
|
|
|
|
|
|
|
|
|
2015-12-30 22:19:19 +00:00
|
|
|
def update(self, buf):
|
|
|
|
buf_offset = 0
|
|
|
|
while buf_offset < len(buf):
|
|
|
|
buf_bytes_to_hash = buf[0:self._piece_length_remaining()]
|
|
|
|
to_hash_len = len(buf_bytes_to_hash)
|
|
|
|
|
|
|
|
if self._piece_offset() == 0 and to_hash_len > 0 and self._current_offset > 0:
|
|
|
|
# We are opening a new piece
|
2015-12-31 17:30:13 +00:00
|
|
|
self._piece_hashes.extend(self._hash_fragment.digest())
|
2015-12-30 22:19:19 +00:00
|
|
|
self._hash_fragment = resumablehashlib.sha1()
|
|
|
|
|
|
|
|
self._hash_fragment.update(buf_bytes_to_hash)
|
|
|
|
self._current_offset += to_hash_len
|
|
|
|
buf_offset += to_hash_len
|
|
|
|
|
2016-01-20 21:33:10 +00:00
|
|
|
@property
|
|
|
|
def hashed_bytes(self):
|
|
|
|
return self._current_offset
|
|
|
|
|
2015-12-30 22:19:19 +00:00
|
|
|
def _piece_length_remaining(self):
|
|
|
|
return self._piece_size - (self._current_offset % self._piece_size)
|
|
|
|
|
|
|
|
def _piece_offset(self):
|
|
|
|
return self._current_offset % self._piece_size
|
|
|
|
|
|
|
|
@property
|
|
|
|
def piece_hashes(self):
|
2015-12-31 17:30:13 +00:00
|
|
|
return self._piece_hashes
|
2015-12-30 22:19:19 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def hash_fragment(self):
|
|
|
|
return self._hash_fragment
|
2016-01-06 17:01:15 +00:00
|
|
|
|
|
|
|
def final_piece_hashes(self):
|
|
|
|
return self._piece_hashes + self._hash_fragment.digest()
|