This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/data/userfiles.py

175 lines
5.1 KiB
Python

import boto
import os
import logging
import hashlib
from boto.s3.key import Key
from uuid import uuid4
from flask import url_for
logger = logging.getLogger(__name__)
class FakeUserfiles(object):
def prepare_for_drop(self, mime_type):
return ('http://fake/url', uuid4())
def store_file(self, file_like_obj, content_type):
raise NotImplementedError()
def get_file_url(self, file_id, expires_in=300):
return ('http://fake/url')
def get_file_checksum(self, file_id):
return 'abcdefg'
class S3FileWriteException(Exception):
pass
class S3Userfiles(object):
def __init__(self, path, s3_access_key, s3_secret_key, bucket_name):
self._initialized = False
self._bucket_name = bucket_name
self._access_key = s3_access_key
self._secret_key = s3_secret_key
self._prefix = path
self._s3_conn = None
self._bucket = None
def _initialize_s3(self):
if not self._initialized:
self._s3_conn = boto.connect_s3(self._access_key, self._secret_key)
self._bucket = self._s3_conn.get_bucket(self._bucket_name)
self._initialized = True
def prepare_for_drop(self, mime_type):
""" Returns a signed URL to upload a file to our bucket. """
self._initialize_s3()
logger.debug('Requested upload url with content type: %s' % mime_type)
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
url = k.generate_url(300, 'PUT', headers={'Content-Type': mime_type},
encrypt_key=True)
return (url, file_id)
def store_file(self, file_like_obj, content_type):
self._initialize_s3()
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
logger.debug('Setting s3 content type to: %s' % content_type)
k.set_metadata('Content-Type', content_type)
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
rewind=True)
if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3')
return file_id
def get_file_url(self, file_id, expires_in=300, mime_type=None):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key)
headers = None
if mime_type:
headers={'Content-Type': mime_type}
return k.generate_url(expires_in, headers=headers)
def get_file_checksum(self, file_id):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]
def upload_userfile_endpoint(file_id):
raise NotImplementedError()
def download_userfile_endpoint(file_id):
raise NotImplementedError()
class LocalUserfiles(object):
def __init__(self, path):
self._root_path = path
self._buffer_size = 64 * 1024 # 64 KB
def prepare_for_drop(self, mime_type):
file_id = str(uuid4())
return (url_for('upload_userfile_endpoint', file_id=file_id), file_id)
def store_file(self, file_like_obj, content_type):
file_id = str(uuid4())
path = os.path.join(self._root_path, file_id)
with open(path, 'w') as to_write:
while True:
try:
buf = file_like_obj.read(self._buffer_size)
if not buf:
break
to_write.write(buf)
except IOError:
break
return file_id
def get_file_url(self, file_id, expires_in=300):
return url_for('download_userfile_endpoint', file_id=file_id)
def get_file_checksum(self, file_id):
path = os.path.join(self._root_path, file_id)
sha_hash = hashlib.sha256()
with open(path, 'r') as to_hash:
while True:
buf = to_hash.read(self._buffer_size)
if not buf:
break
sha_hash.update(buf)
return sha_hash.hexdigest()[:7]
class Userfiles(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
storage_type = app.config.get('USERFILES_TYPE', 'LocalUserfiles')
path = app.config.get('USERFILES_PATH', '')
if storage_type == 'LocalUserfiles':
app.add_url_rule('/userfiles/<file_id>', 'upload_userfile_endpoint',
upload_userfile_endpoint, methods=['PUT'])
app.add_url_rule('/userfiles/<file_id>', 'download_userfile_endpoint',
download_userfile_endpoint, methods=['GET'])
userfiles = LocalUserfiles(path)
elif storage_type == 'S3Userfiles':
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
secret_key = app.config.get('USERFILES_AWS_SECRET_KEY', '')
bucket = app.config.get('USERFILES_S3_BUCKET', '')
userfiles = S3Userfiles(path, access_key, secret_key, bucket)
elif storage_type == 'FakeUserfiles':
userfiles = FakeUserfiles()
else:
raise RuntimeError('Unknown userfiles type: %s' % storage_type)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['userfiles'] = userfiles
return userfiles
def __getattr__(self, name):
return getattr(self.state, name, None)