Finish the implementation of local userfiles. Strip charsets from mimetypes in the build worker. Add canonical name ordering to the build queue. Port all queues to the canonical naming version.
This commit is contained in:
parent
808026dc00
commit
61a6db236f
13 changed files with 112 additions and 41 deletions
|
@ -2,10 +2,12 @@ import boto
|
|||
import os
|
||||
import logging
|
||||
import hashlib
|
||||
import magic
|
||||
|
||||
from boto.s3.key import Key
|
||||
from uuid import uuid4
|
||||
from flask import url_for
|
||||
from flask import url_for, request, send_file
|
||||
from flask.views import View
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -88,43 +90,84 @@ class S3Userfiles(object):
|
|||
return k.etag[1:-1][:7]
|
||||
|
||||
|
||||
def upload_userfile_endpoint(file_id):
|
||||
raise NotImplementedError()
|
||||
class UserfilesHandlers(View):
|
||||
methods = ['GET', 'PUT']
|
||||
|
||||
def __init__(self, local_userfiles):
|
||||
self._userfiles = local_userfiles
|
||||
self._magic = magic.Magic(mime=True)
|
||||
|
||||
def download_userfile_endpoint(file_id):
|
||||
raise NotImplementedError()
|
||||
def get(self, file_id):
|
||||
path = self._userfiles.file_path(file_id)
|
||||
logger.debug('Sending path: %s' % path)
|
||||
return send_file(path, mimetype=self._magic.from_file(path))
|
||||
|
||||
def put(self, file_id):
|
||||
input_stream = request.stream
|
||||
if request.headers.get('transfer-encoding') == 'chunked':
|
||||
# Careful, might work only with WSGI servers supporting chunked
|
||||
# encoding (Gunicorn)
|
||||
input_stream = request.environ['wsgi.input']
|
||||
|
||||
self._userfiles.store_stream(input_stream, file_id)
|
||||
|
||||
def dispatch_request(self, file_id):
|
||||
if request.method == 'GET':
|
||||
return self.get(file_id)
|
||||
elif request.method == 'PUT':
|
||||
return self.put(file_id)
|
||||
|
||||
|
||||
class LocalUserfiles(object):
|
||||
def __init__(self, path):
|
||||
def __init__(self, app, path):
|
||||
self._root_path = path
|
||||
self._buffer_size = 64 * 1024 # 64 KB
|
||||
self._app = app
|
||||
|
||||
def _build_url_adapter(self):
|
||||
return self._app.url_map.bind(self._app.config['SERVER_HOSTNAME'],
|
||||
script_name=self._app.config['APPLICATION_ROOT'] or '/',
|
||||
url_scheme=self._app.config['PREFERRED_URL_SCHEME'])
|
||||
|
||||
def prepare_for_drop(self, mime_type):
|
||||
file_id = str(uuid4())
|
||||
return (url_for('upload_userfile_endpoint', file_id=file_id), file_id)
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return (url_for('userfiles_handlers', file_id=file_id, _external=True), file_id)
|
||||
|
||||
def file_path(self, file_id):
|
||||
if '..' in file_id or file_id.startswith('/'):
|
||||
raise RuntimeError('Invalid Filename')
|
||||
return os.path.join(self._root_path, file_id)
|
||||
|
||||
def store_stream(self, stream, file_id):
|
||||
path = self.file_path(file_id)
|
||||
dirname = os.path.dirname(path)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
file_id = str(uuid4())
|
||||
path = os.path.join(self._root_path, file_id)
|
||||
with open(path, 'w') as to_write:
|
||||
while True:
|
||||
try:
|
||||
buf = file_like_obj.read(self._buffer_size)
|
||||
buf = stream.read(self._buffer_size)
|
||||
if not buf:
|
||||
break
|
||||
to_write.write(buf)
|
||||
except IOError:
|
||||
break
|
||||
|
||||
def store_file(self, file_like_obj, content_type):
|
||||
file_id = str(uuid4())
|
||||
self.store_stream(file_like_obj, content_type)
|
||||
return file_id
|
||||
|
||||
def get_file_url(self, file_id, expires_in=300):
|
||||
return url_for('download_userfile_endpoint', file_id=file_id)
|
||||
with self._app.app_context() as ctx:
|
||||
ctx.url_adapter = self._build_url_adapter()
|
||||
return url_for('userfiles_handlers', file_id=file_id, _external=True)
|
||||
|
||||
def get_file_checksum(self, file_id):
|
||||
path = os.path.join(self._root_path, file_id)
|
||||
path = self.file_path(file_id)
|
||||
sha_hash = hashlib.sha256()
|
||||
with open(path, 'r') as to_hash:
|
||||
while True:
|
||||
|
@ -148,11 +191,10 @@ class Userfiles(object):
|
|||
path = app.config.get('USERFILES_PATH', '')
|
||||
|
||||
if storage_type == 'LocalUserfiles':
|
||||
app.add_url_rule('/userfiles/<file_id>', 'upload_userfile_endpoint',
|
||||
upload_userfile_endpoint, methods=['PUT'])
|
||||
app.add_url_rule('/userfiles/<file_id>', 'download_userfile_endpoint',
|
||||
download_userfile_endpoint, methods=['GET'])
|
||||
userfiles = LocalUserfiles(path)
|
||||
userfiles = LocalUserfiles(app, path)
|
||||
app.add_url_rule('/userfiles/<file_id>',
|
||||
view_func=UserfilesHandlers.as_view('userfiles_handlers',
|
||||
local_userfiles=userfiles))
|
||||
|
||||
elif storage_type == 'S3Userfiles':
|
||||
access_key = app.config.get('USERFILES_AWS_ACCESS_KEY', '')
|
||||
|
|
Reference in a new issue