This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/util/registry/torrent.py

126 lines
3.6 KiB
Python
Raw Normal View History

2015-12-31 19:10:36 +00:00
import time
import hashlib
2016-01-07 19:15:32 +00:00
import urllib
2015-12-31 19:10:36 +00:00
2016-01-08 00:07:23 +00:00
from cryptography.x509 import load_pem_x509_certificate
from cryptography.hazmat.backends import default_backend
2015-12-31 19:10:36 +00:00
import bencode
import resumablehashlib
2016-01-07 19:15:32 +00:00
import jwt
from cachetools import lru_cache
2015-12-31 19:10:36 +00:00
from app import app
2015-12-31 19:35:06 +00:00
ANNOUNCE_URL = app.config.get('TORRENT_ANNOUNCE_URL')
NAMING_SALT = app.config.get('TORRENT_NAMING_SALT')
2016-01-04 20:57:25 +00:00
REGISTRY_TITLE = app.config.get('REGISTRY_TITLE')
2016-01-07 19:15:32 +00:00
JWT_ISSUER = app.config.get('JWT_AUTH_TOKEN_ISSUER')
2015-12-31 19:10:36 +00:00
2015-12-31 19:27:38 +00:00
2016-01-07 19:15:32 +00:00
@lru_cache(maxsize=1)
def _load_certificate_bytes(certificate_file_path):
with open(certificate_file_path) as cert_file:
return load_pem_x509_certificate(cert_file.read(), default_backend()).public_key()
@lru_cache(maxsize=1)
def _load_private_key(private_key_file_path):
with open(private_key_file_path) as private_key_file:
return private_key_file.read()
def _torrent_jwt(info_dict):
token_data = {
'iss': JWT_ISSUER,
'aud': ANNOUNCE_URL,
'infohash': _infohash(info_dict),
}
certificate = _load_certificate_bytes(app.config['JWT_AUTH_CERTIFICATE_PATH'])
token_headers = {
'x5c': [certificate],
}
2015-12-31 19:10:36 +00:00
2016-01-07 19:15:32 +00:00
private_key = _load_private_key(app.config['JWT_AUTH_PRIVATE_KEY_PATH'])
return jwt.encode(token_data, private_key, 'RS256', headers=token_headers)
def _infohash(infodict):
digest = hashlib.sha1()
digest.update(bencode.bencode(infodict))
return urllib.quote(digest.digest())
2015-12-31 19:10:36 +00:00
def make_torrent(name, webseed, length, piece_length, pieces):
2016-01-07 19:15:32 +00:00
info_dict = {
'name': name,
'length': length,
'piece length': piece_length,
'pieces': pieces,
}
2015-12-31 19:10:36 +00:00
return bencode.bencode({
2016-01-07 19:15:32 +00:00
'announce': ANNOUNCE_URL + "?jwt=" + _torrent_jwt(info_dict),
2015-12-31 19:10:36 +00:00
'url-list': webseed,
'encoding': 'UTF-8',
2016-01-04 20:57:25 +00:00
'created by': REGISTRY_TITLE,
2015-12-31 19:10:36 +00:00
'creation date': int(time.time()),
2016-01-07 19:15:32 +00:00
'info': info_dict,
2015-12-31 19:10:36 +00:00
})
2016-01-07 19:15:32 +00:00
def public_torrent_filename(blob_uuid):
return hashlib.sha256(blob_uuid).hexdigest()
def per_user_torrent_filename(user_uuid, blob_uuid):
return hashlib.sha256(blob_uuid + user_uuid + NAMING_SALT).hexdigest()
class PieceHasher(object):
2016-01-05 17:14:52 +00:00
def __init__(self, piece_size, starting_offset=0, starting_piece_hash_bytes='',
hash_fragment_to_resume=None):
if not isinstance(starting_offset, (int, long)):
raise TypeError('starting_offset must be an integer')
elif not isinstance(piece_size, (int, long)):
raise TypeError('piece_size must be an integer')
self._current_offset = starting_offset
self._piece_size = piece_size
self._piece_hashes = bytearray(starting_piece_hash_bytes)
2016-01-05 17:14:52 +00:00
if hash_fragment_to_resume is None:
self._hash_fragment = resumablehashlib.sha1()
else:
self._hash_fragment = hash_fragment_to_resume
def update(self, buf):
buf_offset = 0
while buf_offset < len(buf):
buf_bytes_to_hash = buf[0:self._piece_length_remaining()]
to_hash_len = len(buf_bytes_to_hash)
if self._piece_offset() == 0 and to_hash_len > 0 and self._current_offset > 0:
# We are opening a new piece
self._piece_hashes.extend(self._hash_fragment.digest())
self._hash_fragment = resumablehashlib.sha1()
self._hash_fragment.update(buf_bytes_to_hash)
self._current_offset += to_hash_len
buf_offset += to_hash_len
def _piece_length_remaining(self):
return self._piece_size - (self._current_offset % self._piece_size)
def _piece_offset(self):
return self._current_offset % self._piece_size
@property
def piece_hashes(self):
return self._piece_hashes
@property
def hash_fragment(self):
return self._hash_fragment
def final_piece_hashes(self):
return self._piece_hashes + self._hash_fragment.digest()