Merge remote-tracking branch 'origin/master' into nomenclature

Conflicts:
	endpoints/common.py
	endpoints/notificationhelper.py
	test/data/test.db
	workers/dockerfilebuild.py
This commit is contained in:
Jake Moshenko 2014-10-23 13:25:37 -04:00
commit 1461310ab8
200 changed files with 240935 additions and 798 deletions

View file

@ -317,7 +317,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
if not found_repository:
return {
'status': 'error',
'message': 'Repository "%s" was not found' % (base_image)
'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
}
# If the repository is private and the user cannot see that repo, then
@ -326,7 +326,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
if found_repository.visibility.name != 'public' and not can_read:
return {
'status': 'error',
'message': 'Repository "%s" was not found' % (base_image)
'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
}
# Check to see if the repository is public. If not, we suggest the
@ -450,18 +450,18 @@ class BuildTriggerFieldValues(RepositoryParamResource):
""" Custom verb to fetch a values list for a particular field name. """
@require_repo_admin
@nickname('listTriggerFieldValues')
def get(self, namespace, repository, trigger_uuid, field_name):
def post(self, namespace, repository, trigger_uuid, field_name):
""" List the field values for a custom run field. """
try:
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
config = request.get_json() or json.loads(trigger.config)
user_permission = UserAdminPermission(trigger.connected_user.username)
if user_permission.can():
trigger_handler = BuildTriggerBase.get_trigger_for_service(trigger.service.name)
values = trigger_handler.list_field_values(trigger.auth_token, json.loads(trigger.config),
field_name)
values = trigger_handler.list_field_values(trigger.auth_token, config, field_name)
if values is None:
raise NotFound()

View file

@ -106,7 +106,15 @@ def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
logger.debug('Aliasing with state: %s' % state)
analytics.alias(to_login.username, state)
except model.DataModelException, ex:
except model.InvalidEmailAddressException as ieex:
message = "The e-mail address %s is already associated " % (email, )
message = message + "with an existing %s account." % (app.config['REGISTRY_TITLE_SHORT'], )
message = message + "\nPlease log in with your username and password and "
message = message + "associate your %s account to use it in the future." % (service_name, )
return render_ologin_error(service_name, message)
except model.DataModelException as ex:
return render_ologin_error(service_name, ex.message)
if common_login(to_login):

View file

@ -10,6 +10,7 @@ from flask.ext.principal import identity_changed
from random import SystemRandom
from data import model
from data.database import db
from app import app, login_manager, dockerfile_build_queue, notification_queue
from auth.permissions import QuayDeferredPermissionUser
from auth import scopes
@ -170,6 +171,10 @@ def render_page_template(name, **kwargs):
external_styles = get_external_css(local=not app.config.get('USE_CDN', True))
external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True))
contact_href = None
if len(app.config.get('CONTACT_INFO', [])) == 1:
contact_href = app.config['CONTACT_INFO'][0]
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
external_styles=external_styles,
external_scripts=external_scripts,
@ -186,6 +191,7 @@ def render_page_template(name, **kwargs):
show_chat=features.OLARK_CHAT,
cache_buster=cache_buster,
has_billing=features.BILLING,
contact_href=contact_href,
**kwargs))
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
@ -217,14 +223,15 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
'build_subdir': subdir
}
build_request = model.create_repository_build(repository, token, job_config,
dockerfile_id, build_name,
trigger, pull_robot_name=pull_robot_name)
with app.config['DB_TRANSACTION_FACTORY'](db):
build_request = model.create_repository_build(repository, token, job_config,
dockerfile_id, build_name,
trigger, pull_robot_name=pull_robot_name)
dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({
'build_uuid': build_request.uuid,
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
}), retries_remaining=1)
dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({
'build_uuid': build_request.uuid,
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
}), retries_remaining=1)
# Add the build to the repo's log.
metadata = {

View file

@ -70,7 +70,7 @@ def create_user():
abort(400, 'User creation is disabled. Please speak to your administrator.')
user_data = request.get_json()
if not 'username' in user_data:
if not user_data or not 'username' in user_data:
abort(400, 'Missing username')
username = user_data['username']
@ -299,13 +299,6 @@ def update_images(namespace, repository):
# Make sure the repo actually exists.
abort(404, message='Unknown repository', issue='unknown-repo')
profile.debug('Parsing image data')
image_with_checksums = json.loads(request.data.decode('utf8'))
updated_tags = {}
for image in image_with_checksums:
updated_tags[image['Tag']] = image['id']
if get_authenticated_user():
profile.debug('Publishing push event')
username = get_authenticated_user().username
@ -326,12 +319,11 @@ def update_images(namespace, repository):
# Generate a job for each notification that has been added to this repo
profile.debug('Adding notifications for repository')
updated_tags = session.get('pushed_tags', {})
event_data = {
'updated_tags': updated_tags,
'pushed_image_count': len(image_with_checksums),
'pruned_image_count': num_removed
}
spawn_notification(repo, 'repo_push', event_data)
return make_response('Updated', 204)

View file

@ -1,7 +1,9 @@
import logging
from notificationhelper import build_event_data
from util.jinjautil import get_template_env
template_env = get_template_env("events")
logger = logging.getLogger(__name__)
class InvalidNotificationEventException(Exception):
@ -14,7 +16,7 @@ class NotificationEvent(object):
def get_level(self, event_data, notification_data):
"""
Returns a 'level' representing the severity of the event.
Valid values are: 'info', 'warning', 'error', 'primary'
Valid values are: 'info', 'warning', 'error', 'primary', 'success'
"""
raise NotImplementedError
@ -28,7 +30,10 @@ class NotificationEvent(object):
"""
Returns a human readable HTML message for the given notification data.
"""
raise NotImplementedError
return template_env.get_template(self.event_name() + '.html').render({
'event_data': event_data,
'notification_data': notification_data
})
def get_sample_data(self, repository=None):
"""
@ -59,32 +64,14 @@ class RepoPushEvent(NotificationEvent):
return 'repo_push'
def get_level(self, event_data, notification_data):
return 'info'
return 'primary'
def get_summary(self, event_data, notification_data):
return 'Repository %s updated' % (event_data['repository'])
def get_message(self, event_data, notification_data):
if not event_data.get('updated_tags', {}).keys():
html = """
Repository <a href="%s">%s</a> has been updated via a push.
""" % (event_data['homepage'],
event_data['repository'])
else:
html = """
Repository <a href="%s">%s</a> has been updated via a push.
<br><br>
Tags Updated: %s
""" % (event_data['homepage'],
event_data['repository'],
', '.join(event_data['updated_tags'].keys()))
return html
def get_sample_data(self, repository):
return build_event_data(repository, {
'updated_tags': {'latest': 'someimageid', 'foo': 'anotherimage'},
'pushed_image_count': 10,
'pruned_image_count': 3
})
@ -109,26 +96,7 @@ class BuildQueueEvent(NotificationEvent):
}, subpage='/build?current=%s' % build_uuid)
def get_summary(self, event_data, notification_data):
return 'Build queued for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
is_manual = event_data['is_manual']
if is_manual:
html = """
A <a href="%s">new build</a> has been manually queued to start on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['repository'], event_data['build_id'])
else:
html = """
A <a href="%s">new build</a> has been queued via a %s trigger to start on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['trigger_kind'],
event_data['repository'], event_data['build_id'])
return html
return 'Build queued for repository %s' % (event_data['repository'])
class BuildStartEvent(NotificationEvent):
@ -152,15 +120,6 @@ class BuildStartEvent(NotificationEvent):
def get_summary(self, event_data, notification_data):
return 'Build started for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
html = """
A <a href="%s">new build</a> has started on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['repository'], event_data['build_id'])
return html
class BuildSuccessEvent(NotificationEvent):
@classmethod
@ -168,7 +127,7 @@ class BuildSuccessEvent(NotificationEvent):
return 'build_success'
def get_level(self, event_data, notification_data):
return 'primary'
return 'success'
def get_sample_data(self, repository):
build_uuid = 'fake-build-id'
@ -183,15 +142,6 @@ class BuildSuccessEvent(NotificationEvent):
def get_summary(self, event_data, notification_data):
return 'Build succeeded for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
html = """
A <a href="%s">build</a> has finished on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['repository'], event_data['build_id'])
return html
class BuildFailureEvent(NotificationEvent):
@classmethod
@ -215,13 +165,3 @@ class BuildFailureEvent(NotificationEvent):
def get_summary(self, event_data, notification_data):
return 'Build failure for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
html = """
A <a href="%s">build</a> has failed on repository %s.
<br><br>
Reason: %s<br>
Build ID: %s<br>
""" % (event_data['homepage'], event_data['repository'],
event_data['error_message'], event_data['build_id'])
return html

View file

@ -1,5 +1,6 @@
from app import app, notification_queue
from data import model
from auth.auth_context import get_authenticated_user, get_validated_oauth_token
import json
@ -27,19 +28,35 @@ def build_event_data(repo, extra_data={}, subpage=None):
event_data.update(extra_data)
return event_data
def build_notification_data(notification, event_data):
def build_notification_data(notification, event_data, performer_data=None):
if not performer_data:
performer_data = {}
oauth_token = get_validated_oauth_token()
if oauth_token:
performer_data['oauth_token_id'] = oauth_token.id
performer_data['oauth_token_application_id'] = oauth_token.application.client_id
performer_data['oauth_token_application'] = oauth_token.application.name
performer_user = get_authenticated_user()
if performer_user:
performer_data['entity_id'] = performer_user.id
performer_data['entity_name'] = performer_user.username
return {
'notification_uuid': notification.uuid,
'event_data': event_data
'event_data': event_data,
'performer_data': performer_data,
}
def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[]):
def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[],
performer_data=None):
event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage)
notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name,
event_name=event_name)
for notification in notifications:
notification_data = build_notification_data(notification, event_data)
for notification in list(notifications):
notification_data = build_notification_data(notification, event_data, performer_data)
path = [str(repo.namespace_user.id), repo.name, event_name] + pathargs
notification_queue.put(path, json.dumps(notification_data))

View file

@ -211,7 +211,7 @@ class FlowdockMethod(NotificationMethod):
if not token:
return
owner = model.get_user(notification.repository.namespace_user.username)
owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -267,7 +267,7 @@ class HipchatMethod(NotificationMethod):
if not token or not room_id:
return
owner = model.get_user(notification.repository.namespace_user.username)
owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -279,6 +279,7 @@ class HipchatMethod(NotificationMethod):
'info': 'gray',
'warning': 'yellow',
'error': 'red',
'success': 'green',
'primary': 'purple'
}.get(level, 'gray')
@ -303,6 +304,56 @@ class HipchatMethod(NotificationMethod):
raise NotificationMethodPerformException(ex.message)
from HTMLParser import HTMLParser
class SlackAdjuster(HTMLParser):
def __init__(self):
self.reset()
self.result = []
def handle_data(self, d):
self.result.append(d)
def get_attr(self, attrs, name):
for attr in attrs:
if attr[0] == name:
return attr[1]
return ''
def handle_starttag(self, tag, attrs):
if tag == 'a':
self.result.append('<%s|' % (self.get_attr(attrs, 'href'), ))
if tag == 'i':
self.result.append('_')
if tag == 'b' or tag == 'strong':
self.result.append('*')
if tag == 'img':
self.result.append(self.get_attr(attrs, 'alt'))
self.result.append(' ')
def handle_endtag(self, tag):
if tag == 'a':
self.result.append('>')
if tag == 'b' or tag == 'strong':
self.result.append('*')
if tag == 'i':
self.result.append('_')
def get_data(self):
return ''.join(self.result)
def adjust_tags(html):
s = SlackAdjuster()
s.feed(html)
return s.get_data()
class SlackMethod(NotificationMethod):
""" Method for sending notifications to Slack via the API:
https://api.slack.com/docs/attachments
@ -318,12 +369,11 @@ class SlackMethod(NotificationMethod):
if not config_data.get('subdomain', '').isalnum():
raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name')
def formatForSlack(self, message):
def format_for_slack(self, message):
message = message.replace('\n', '')
message = re.sub(r'\s+', ' ', message)
message = message.replace('<br>', '\n')
message = re.sub(r'<a href="(.+)">(.+)</a>', '<\\1|\\2>', message)
return message
return adjust_tags(message)
def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json)
@ -334,7 +384,7 @@ class SlackMethod(NotificationMethod):
if not token or not subdomain:
return
owner = model.get_user(notification.repository.namespace_user.username)
owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -346,6 +396,7 @@ class SlackMethod(NotificationMethod):
'info': '#ffffff',
'warning': 'warning',
'error': 'danger',
'success': 'good',
'primary': 'good'
}.get(level, '#ffffff')
@ -359,8 +410,9 @@ class SlackMethod(NotificationMethod):
'attachments': [
{
'fallback': summary,
'text': self.formatForSlack(message),
'color': color
'text': self.format_for_slack(message),
'color': color,
'mrkdwn_in': ["text"]
}
]
}

View file

@ -197,12 +197,15 @@ def put_image_layer(namespace, repository, image_id):
# Create a socket reader to read the input stream containing the layer data.
sr = SocketReader(input_stream)
# Add a handler that store the data in storage.
tmp, store_hndlr = store.temp_store_handler()
sr.add_handler(store_hndlr)
# Add a handler that copies the data into a temp file. This is used to calculate the tarsum,
# which is only needed for older versions of Docker.
requires_tarsum = session.get('checksum_format') == 'tarsum'
if requires_tarsum:
tmp, tmp_hndlr = store.temp_store_handler()
sr.add_handler(tmp_hndlr)
# Add a handler to compute the uncompressed size of the layer.
uncompressed_size_info, size_hndlr = gzipstream.calculate_size_handler()
# Add a handler to compute the compressed and uncompressed sizes of the layer.
size_info, size_hndlr = gzipstream.calculate_size_handler()
sr.add_handler(size_hndlr)
# Add a handler which computes the checksum.
@ -217,14 +220,15 @@ def put_image_layer(namespace, repository, image_id):
csums.append('sha256:{0}'.format(h.hexdigest()))
try:
image_size = tmp.tell()
# Save the size of the image.
model.set_image_size(image_id, namespace, repository, image_size, uncompressed_size_info.size)
model.set_image_size(image_id, namespace, repository, size_info.compressed_size,
size_info.uncompressed_size)
if requires_tarsum:
tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data))
tmp.close()
tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data))
tmp.close()
except (IOError, checksums.TarError) as e:
logger.debug('put_image_layer: Error when computing tarsum '
'{0}'.format(e))
@ -268,7 +272,19 @@ def put_image_checksum(namespace, repository, image_id):
if not permission.can():
abort(403)
checksum = request.headers.get('X-Docker-Checksum')
# Docker Version < 0.10 (tarsum+sha):
old_checksum = request.headers.get('X-Docker-Checksum')
# Docker Version >= 0.10 (sha):
new_checksum = request.headers.get('X-Docker-Checksum-Payload')
# Store whether we need to calculate the tarsum.
if new_checksum:
session['checksum_format'] = 'sha256'
else:
session['checksum_format'] = 'tarsum'
checksum = new_checksum or old_checksum
if not checksum:
abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum',
image_id=image_id)
@ -279,6 +295,9 @@ def put_image_checksum(namespace, repository, image_id):
profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id)
if not repo_image or not repo_image.storage:
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
uuid = repo_image.storage.uuid
profile.debug('Looking up repo layer data')

View file

@ -2,7 +2,7 @@
import logging
import json
from flask import abort, request, jsonify, make_response, Blueprint
from flask import abort, request, jsonify, make_response, Blueprint, session
from app import app
from util.names import parse_repository_name
@ -59,6 +59,12 @@ def put_tag(namespace, repository, tag):
docker_image_id = json.loads(request.data)
model.create_or_update_tag(namespace, repository, tag, docker_image_id)
# Store the updated tag.
if not 'pushed_tags' in session:
session['pushed_tags'] = {}
session['pushed_tags'][tag] = docker_image_id
return make_response('Created', 200)
abort(403)

View file

@ -3,11 +3,13 @@ import io
import os.path
import tarfile
import base64
import re
from github import Github, UnknownObjectException, GithubException
from tempfile import SpooledTemporaryFile
from app import app, userfiles as user_files
from util.tarfileappender import TarfileAppender
client = app.config['HTTPCLIENT']
@ -229,13 +231,35 @@ class GithubBuildTrigger(BuildTrigger):
return repos_by_org
def matches_branch(self, branch_name, regex):
if not regex:
return False
m = regex.match(branch_name)
if not m:
return False
return len(m.group(0)) == len(branch_name)
def list_build_subdirs(self, auth_token, config):
gh_client = self._get_client(auth_token)
source = config['build_source']
try:
try:
repo = gh_client.get_repo(source)
default_commit = repo.get_branch(repo.default_branch or 'master').commit
# Find the first matching branch.
branches = None
if 'branch_regex' in config:
try:
regex = re.compile(config['branch_regex'])
branches = [branch.name for branch in repo.get_branches()
if self.matches_branch(branch.name, regex)]
except:
pass
branches = branches or [repo.default_branch or 'master']
default_commit = repo.get_branch(branches[0]).commit
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
return [os.path.dirname(elem.path) for elem in commit_tree.tree
@ -301,10 +325,17 @@ class GithubBuildTrigger(BuildTrigger):
with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make boto multipart happy
# Seek to position 0 to make tarfile happy.
tarball.seek(0)
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
entries = {
tarball_subdir + '/.git/HEAD': commit_sha,
tarball_subdir + '/.git/objects/': None,
tarball_subdir + '/.git/refs/': None
}
appender = TarfileAppender(tarball, entries).get_stream()
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
logger.debug('Successfully prepared job')
@ -330,7 +361,7 @@ class GithubBuildTrigger(BuildTrigger):
payload = request.get_json()
if not payload or payload.get('head_commit') is None:
raise SkipRequestException()
if 'zen' in payload:
raise ValidationRequestException()
@ -339,6 +370,16 @@ class GithubBuildTrigger(BuildTrigger):
commit_sha = payload['head_commit']['id']
commit_message = payload['head_commit'].get('message', '')
if 'branch_regex' in config:
try:
regex = re.compile(config['branch_regex'])
except:
regex = re.compile('.*')
branch = ref.split('/')[-1]
if not self.matches_branch(branch, regex):
raise SkipRequestException()
if should_skip_commit(commit_message):
raise SkipRequestException()

View file

@ -4,11 +4,12 @@ import hashlib
from flask import redirect, Blueprint, abort, send_file
from app import storage as store, app
from app import app
from auth.auth import process_auth
from auth.permissions import ReadRepositoryPermission
from data import model
from data import database
from storage import Storage
from util.queuefile import QueueFile
from util.queueprocess import QueueProcess
@ -19,8 +20,9 @@ from util.dockerloadformat import build_docker_load_stream
verbs = Blueprint('verbs', __name__)
logger = logging.getLogger(__name__)
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_list):
store = Storage(app)
def get_next_image():
for current_image_id in image_list:
yield model.get_repo_image(namespace, repository, current_image_id)
@ -43,14 +45,23 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
database.configure(app.config)
store = Storage(app)
def handle_exception(ex):
logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex)
model.delete_derived_storage_by_uuid(linked_storage_uuid)
queue_file.add_exception_handler(handle_exception)
image_path = store.image_layer_path(linked_storage_uuid)
store.stream_write(linked_locations, image_path, queue_file)
queue_file.close()
database.configure(app.config)
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
done_uploading.uploading = False
done_uploading.save()
if not queue_file.raised_exception:
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
done_uploading.uploading = False
done_uploading.save()
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
@ -59,8 +70,9 @@ def get_squashed_tag(namespace, repository, tag):
permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository):
# Lookup the requested tag.
tag_image = model.get_tag_image(namespace, repository, tag)
if not tag_image:
try:
tag_image = model.get_tag_image(namespace, repository, tag)
except model.DataModelException:
abort(404)
# Lookup the tag's image and storage.
@ -68,6 +80,7 @@ def get_squashed_tag(namespace, repository, tag):
if not repo_image:
abort(404)
store = Storage(app)
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
store.preferred_locations[0])
if not derived.uploading:
@ -96,8 +109,14 @@ def get_squashed_tag(namespace, repository, tag):
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
def _cleanup():
# Close any existing DB connection once the process has exited.
database.close_db_filter(None)
args = (namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
queue_process = QueueProcess(_open_stream, 8 * 1024, 10 * 1024 * 1024, args) # 8K/10M chunk/max
queue_process = QueueProcess(_open_stream,
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
args, finished=_cleanup)
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
@ -107,7 +126,7 @@ def get_squashed_tag(namespace, repository, tag):
# Start the storage saving.
storage_args = (derived.uuid, derived.locations, storage_queue_file)
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args)
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
# Return the client's data.
return send_file(client_queue_file)