This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/util/registry/torrent.py
2016-01-20 18:15:32 -05:00

120 lines
3.4 KiB
Python

import time
import hashlib
import urllib
import bencode
import resumablehashlib
import jwt
from cachetools import lru_cache
from app import app
ANNOUNCE_URL = app.config.get('TORRENT_ANNOUNCE_URL')
NAMING_SALT = app.config.get('TORRENT_NAMING_SALT')
REGISTRY_TITLE = app.config.get('REGISTRY_TITLE')
JWT_ISSUER = app.config.get('JWT_AUTH_TOKEN_ISSUER')
@lru_cache(maxsize=1)
def _load_private_key(private_key_file_path):
with open(private_key_file_path) as private_key_file:
return private_key_file.read()
def _torrent_jwt(info_dict):
token_data = {
'iss': JWT_ISSUER,
'aud': ANNOUNCE_URL,
'infohash': _infohash(info_dict),
}
private_key = _load_private_key(app.config['JWT_AUTH_PRIVATE_KEY_PATH'])
return jwt.encode(token_data, private_key, 'RS256')
def _infohash(infodict):
digest = hashlib.sha1()
digest.update(bencode.bencode(infodict))
return urllib.quote(digest.digest())
def make_torrent(name, webseed, length, piece_length, pieces):
info_dict = {
'name': name,
'length': length,
'piece length': piece_length,
'pieces': pieces,
}
return bencode.bencode({
'announce': ANNOUNCE_URL + "?jwt=" + _torrent_jwt(info_dict),
'url-list': webseed,
'encoding': 'UTF-8',
'created by': REGISTRY_TITLE,
'creation date': int(time.time()),
'info': info_dict,
})
def public_torrent_filename(blob_uuid):
return hashlib.sha256(blob_uuid).hexdigest()
def per_user_torrent_filename(user_uuid, blob_uuid):
return hashlib.sha256(blob_uuid + user_uuid + NAMING_SALT).hexdigest()
class PieceHasher(object):
""" Utility for computing torrent piece hashes as the data flows through the update
method of this class. Users should get the final value by calling final_piece_hashes
since new chunks are allocated lazily.
"""
def __init__(self, piece_size, starting_offset=0, starting_piece_hash_bytes='',
hash_fragment_to_resume=None):
if not isinstance(starting_offset, (int, long)):
raise TypeError('starting_offset must be an integer')
elif not isinstance(piece_size, (int, long)):
raise TypeError('piece_size must be an integer')
self._current_offset = starting_offset
self._piece_size = piece_size
self._piece_hashes = bytearray(starting_piece_hash_bytes)
if hash_fragment_to_resume is None:
self._hash_fragment = resumablehashlib.sha1()
else:
self._hash_fragment = hash_fragment_to_resume
def update(self, buf):
buf_offset = 0
while buf_offset < len(buf):
buf_bytes_to_hash = buf[0:self._piece_length_remaining()]
to_hash_len = len(buf_bytes_to_hash)
if self._piece_offset() == 0 and to_hash_len > 0 and self._current_offset > 0:
# We are opening a new piece
self._piece_hashes.extend(self._hash_fragment.digest())
self._hash_fragment = resumablehashlib.sha1()
self._hash_fragment.update(buf_bytes_to_hash)
self._current_offset += to_hash_len
buf_offset += to_hash_len
@property
def hashed_bytes(self):
return self._current_offset
def _piece_length_remaining(self):
return self._piece_size - (self._current_offset % self._piece_size)
def _piece_offset(self):
return self._current_offset % self._piece_size
@property
def piece_hashes(self):
return self._piece_hashes
@property
def hash_fragment(self):
return self._hash_fragment
def final_piece_hashes(self):
return self._piece_hashes + self._hash_fragment.digest()