2015-05-14 20:47:38 +00:00
|
|
|
""" Create, list, cancel and get status/logs of repository builds. """
|
|
|
|
|
2014-03-13 20:31:37 +00:00
|
|
|
import logging
|
|
|
|
import json
|
2014-11-21 19:27:06 +00:00
|
|
|
import datetime
|
2015-08-14 21:22:19 +00:00
|
|
|
import hashlib
|
2014-03-13 20:31:37 +00:00
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
from flask import request
|
2015-08-14 21:22:19 +00:00
|
|
|
from rfc3987 import parse as uri_parse
|
2014-03-13 20:31:37 +00:00
|
|
|
|
2015-02-23 18:38:01 +00:00
|
|
|
from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
|
2015-09-11 21:40:32 +00:00
|
|
|
from buildtrigger.basehandler import BuildTriggerHandler
|
2014-03-13 20:31:37 +00:00
|
|
|
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
2014-03-14 20:09:16 +00:00
|
|
|
require_repo_read, require_repo_write, validate_json_request,
|
2014-08-19 23:05:28 +00:00
|
|
|
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
2015-02-13 20:54:01 +00:00
|
|
|
path_param, InvalidRequest, require_repo_admin)
|
2015-04-29 21:04:52 +00:00
|
|
|
from endpoints.building import start_build, PreparedBuild
|
2015-07-15 21:25:41 +00:00
|
|
|
from data import database
|
|
|
|
from data import model
|
2014-04-02 01:49:06 +00:00
|
|
|
from auth.auth_context import get_authenticated_user
|
2015-04-30 19:33:19 +00:00
|
|
|
from auth.permissions import (ReadRepositoryPermission, ModifyRepositoryPermission,
|
|
|
|
AdministerRepositoryPermission, AdministerOrganizationPermission)
|
|
|
|
|
2014-03-28 18:20:06 +00:00
|
|
|
from data.buildlogs import BuildStatusRetrievalError
|
2014-04-02 01:49:06 +00:00
|
|
|
from util.names import parse_robot_username
|
2014-03-13 20:31:37 +00:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2014-03-19 19:39:44 +00:00
|
|
|
def get_trigger_config(trigger):
|
|
|
|
try:
|
|
|
|
return json.loads(trigger.config)
|
|
|
|
except:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
|
|
def get_job_config(build_obj):
|
|
|
|
try:
|
|
|
|
return json.loads(build_obj.job_config)
|
|
|
|
except:
|
2015-04-30 19:33:19 +00:00
|
|
|
return {}
|
2014-03-19 19:39:44 +00:00
|
|
|
|
|
|
|
|
2014-04-02 01:49:06 +00:00
|
|
|
def user_view(user):
|
|
|
|
return {
|
|
|
|
'name': user.username,
|
|
|
|
'kind': 'user',
|
|
|
|
'is_robot': user.robot,
|
|
|
|
}
|
|
|
|
|
2015-05-08 01:11:15 +00:00
|
|
|
def trigger_view(trigger, can_read=False, can_admin=False, for_build=False):
|
2014-11-24 21:07:38 +00:00
|
|
|
if trigger and trigger.uuid:
|
2015-04-24 22:36:48 +00:00
|
|
|
build_trigger = BuildTriggerHandler.get_handler(trigger)
|
2015-05-03 17:38:11 +00:00
|
|
|
build_source = build_trigger.config.get('build_source')
|
|
|
|
|
|
|
|
repo_url = build_trigger.get_repository_url() if build_source else None
|
|
|
|
|
2015-05-03 18:02:05 +00:00
|
|
|
if can_admin:
|
|
|
|
can_read = True
|
|
|
|
|
2015-05-08 01:11:15 +00:00
|
|
|
is_connected_user = False
|
|
|
|
if (can_admin and get_authenticated_user() and
|
|
|
|
trigger.connected_user_id == get_authenticated_user().id):
|
|
|
|
is_connected_user = True
|
|
|
|
|
|
|
|
trigger_data = {
|
2014-03-17 17:10:12 +00:00
|
|
|
'id': trigger.uuid,
|
2015-05-08 01:11:15 +00:00
|
|
|
'service': trigger.service.name,
|
2015-04-24 22:36:48 +00:00
|
|
|
'is_active': build_trigger.is_active(),
|
2015-05-08 01:11:15 +00:00
|
|
|
|
|
|
|
'build_source': build_source if can_read else None,
|
2015-05-03 17:38:11 +00:00
|
|
|
'repository_url': repo_url if can_read else None,
|
2015-05-08 01:11:15 +00:00
|
|
|
|
|
|
|
'config': build_trigger.config if can_admin else {},
|
|
|
|
'is_connected_user': is_connected_user,
|
2014-03-17 17:10:12 +00:00
|
|
|
}
|
|
|
|
|
2015-05-08 01:11:15 +00:00
|
|
|
if not for_build and can_admin and trigger.pull_robot:
|
|
|
|
trigger_data['pull_robot'] = user_view(trigger.pull_robot)
|
|
|
|
|
|
|
|
return trigger_data
|
|
|
|
|
2014-03-17 17:10:12 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
2015-04-30 19:33:19 +00:00
|
|
|
def build_status_view(build_obj):
|
2014-03-28 18:42:29 +00:00
|
|
|
phase = build_obj.phase
|
2014-03-28 18:20:06 +00:00
|
|
|
try:
|
|
|
|
status = build_logs.get_status(build_obj.uuid)
|
|
|
|
except BuildStatusRetrievalError:
|
2014-03-28 18:42:29 +00:00
|
|
|
status = {}
|
|
|
|
phase = 'cannot_load'
|
2014-03-28 18:20:06 +00:00
|
|
|
|
2014-11-21 19:27:06 +00:00
|
|
|
# If the status contains a heartbeat, then check to see if has been written in the last few
|
|
|
|
# minutes. If not, then the build timed out.
|
2014-12-11 19:10:54 +00:00
|
|
|
if phase != database.BUILD_PHASE.COMPLETE and phase != database.BUILD_PHASE.ERROR:
|
|
|
|
if status is not None and 'heartbeat' in status and status['heartbeat']:
|
2014-12-22 17:14:16 +00:00
|
|
|
heartbeat = datetime.datetime.utcfromtimestamp(status['heartbeat'])
|
|
|
|
if datetime.datetime.utcnow() - heartbeat > datetime.timedelta(minutes=1):
|
2014-12-11 19:10:54 +00:00
|
|
|
phase = database.BUILD_PHASE.INTERNAL_ERROR
|
2014-11-21 19:27:06 +00:00
|
|
|
|
2015-04-30 20:47:16 +00:00
|
|
|
# If the phase is internal error, return 'error' instead if the number of retries
|
2015-02-12 21:19:44 +00:00
|
|
|
# on the queue item is 0.
|
|
|
|
if phase == database.BUILD_PHASE.INTERNAL_ERROR:
|
2015-02-23 18:38:01 +00:00
|
|
|
retry = build_obj.queue_id and dockerfile_build_queue.has_retries_remaining(build_obj.queue_id)
|
|
|
|
if not retry:
|
2015-02-12 21:19:44 +00:00
|
|
|
phase = database.BUILD_PHASE.ERROR
|
|
|
|
|
2015-04-30 19:33:19 +00:00
|
|
|
repo_namespace = build_obj.repository.namespace_user.username
|
|
|
|
repo_name = build_obj.repository.name
|
|
|
|
|
|
|
|
can_read = ReadRepositoryPermission(repo_namespace, repo_name).can()
|
|
|
|
can_write = ModifyRepositoryPermission(repo_namespace, repo_name).can()
|
|
|
|
can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()
|
|
|
|
|
|
|
|
job_config = get_job_config(build_obj)
|
|
|
|
|
2014-03-13 20:31:37 +00:00
|
|
|
resp = {
|
|
|
|
'id': build_obj.uuid,
|
2014-03-28 18:42:29 +00:00
|
|
|
'phase': phase,
|
2014-03-17 17:10:12 +00:00
|
|
|
'started': format_date(build_obj.started),
|
2014-03-13 20:31:37 +00:00
|
|
|
'display_name': build_obj.display_name,
|
2014-03-25 19:50:03 +00:00
|
|
|
'status': status or {},
|
2015-04-30 19:33:19 +00:00
|
|
|
'subdirectory': job_config.get('build_subdir', ''),
|
|
|
|
'tags': job_config.get('docker_tags', []),
|
2015-07-15 21:25:41 +00:00
|
|
|
'manual_user': job_config.get('manual_user', None),
|
2014-03-13 20:31:37 +00:00
|
|
|
'is_writer': can_write,
|
2015-05-08 01:11:15 +00:00
|
|
|
'trigger': trigger_view(build_obj.trigger, can_read, can_admin, for_build=True),
|
2015-04-30 19:33:19 +00:00
|
|
|
'trigger_metadata': job_config.get('trigger_metadata', None) if can_read else None,
|
2014-03-13 20:31:37 +00:00
|
|
|
'resource_key': build_obj.resource_key,
|
2015-02-26 22:45:28 +00:00
|
|
|
'pull_robot': user_view(build_obj.pull_robot) if build_obj.pull_robot else None,
|
|
|
|
'repository': {
|
2015-04-30 19:33:19 +00:00
|
|
|
'namespace': repo_namespace,
|
|
|
|
'name': repo_name
|
2015-02-26 22:45:28 +00:00
|
|
|
}
|
2014-03-13 20:31:37 +00:00
|
|
|
}
|
2014-03-17 17:10:12 +00:00
|
|
|
|
2015-08-14 21:22:19 +00:00
|
|
|
if can_write:
|
|
|
|
if build_obj.resource_key is not None:
|
|
|
|
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
|
|
|
|
elif job_config.get('archive_url', None):
|
|
|
|
resp['archive_url'] = job_config['archive_url']
|
2014-03-13 20:31:37 +00:00
|
|
|
|
|
|
|
return resp
|
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/build/')
|
2014-08-19 23:05:28 +00:00
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
2014-03-13 20:31:37 +00:00
|
|
|
class RepositoryBuildList(RepositoryParamResource):
|
|
|
|
""" Resource related to creating and listing repository builds. """
|
|
|
|
schemas = {
|
|
|
|
'RepositoryBuildRequest': {
|
|
|
|
'type': 'object',
|
|
|
|
'description': 'Description of a new repository build.',
|
|
|
|
'properties': {
|
|
|
|
'file_id': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'The file id that was generated when the build spec was uploaded',
|
|
|
|
},
|
2015-08-14 21:22:19 +00:00
|
|
|
'archive_url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'The URL of the .tar.gz to build. Must start with "http" or "https".',
|
|
|
|
},
|
2014-03-13 20:31:37 +00:00
|
|
|
'subdirectory': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'Subdirectory in which the Dockerfile can be found',
|
|
|
|
},
|
2014-04-02 01:49:06 +00:00
|
|
|
'pull_robot': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'Username of a Quay robot account to use as pull credentials',
|
2014-06-26 23:55:16 +00:00
|
|
|
},
|
|
|
|
'docker_tags': {
|
|
|
|
'type': 'array',
|
2015-05-14 20:47:38 +00:00
|
|
|
'description': 'The tags to which the built images will be pushed. ' +
|
|
|
|
'If none specified, "latest" is used.',
|
2014-06-26 23:55:16 +00:00
|
|
|
'items': {
|
|
|
|
'type': 'string'
|
|
|
|
},
|
|
|
|
'minItems': 1,
|
|
|
|
'uniqueItems': True
|
2014-03-27 22:33:13 +00:00
|
|
|
}
|
2014-03-13 20:31:37 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2014-03-17 17:10:12 +00:00
|
|
|
@require_repo_read
|
2016-01-26 21:27:36 +00:00
|
|
|
@parse_args()
|
2014-03-13 20:31:37 +00:00
|
|
|
@query_param('limit', 'The maximum number of builds to return', type=int, default=5)
|
2015-03-13 22:34:28 +00:00
|
|
|
@query_param('since', 'Returns all builds since the given unix timecode', type=int, default=None)
|
2014-03-13 20:31:37 +00:00
|
|
|
@nickname('getRepoBuilds')
|
2016-01-26 21:27:36 +00:00
|
|
|
def get(self, namespace, repository, parsed_args):
|
2014-03-13 20:31:37 +00:00
|
|
|
""" Get the list of repository builds. """
|
2016-01-26 21:27:36 +00:00
|
|
|
limit = parsed_args.get('limit', 5)
|
|
|
|
since = parsed_args.get('since', None)
|
2014-03-13 20:31:37 +00:00
|
|
|
|
2015-03-13 22:34:28 +00:00
|
|
|
if since is not None:
|
|
|
|
since = datetime.datetime.utcfromtimestamp(since)
|
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
builds = model.build.list_repository_builds(namespace, repository, limit, since=since)
|
2014-03-13 20:31:37 +00:00
|
|
|
return {
|
2015-04-30 19:33:19 +00:00
|
|
|
'builds': [build_status_view(build) for build in builds]
|
2014-03-13 20:31:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
@require_repo_write
|
|
|
|
@nickname('requestRepoBuild')
|
|
|
|
@validate_json_request('RepositoryBuildRequest')
|
|
|
|
def post(self, namespace, repository):
|
|
|
|
""" Request that a repository be built and pushed from the specified input. """
|
|
|
|
logger.debug('User requested repository initialization.')
|
|
|
|
request_json = request.get_json()
|
|
|
|
|
2015-08-14 21:22:19 +00:00
|
|
|
dockerfile_id = request_json.get('file_id', None)
|
|
|
|
archive_url = request_json.get('archive_url', None)
|
|
|
|
|
|
|
|
if not dockerfile_id and not archive_url:
|
|
|
|
raise InvalidRequest('file_id or archive_url required')
|
|
|
|
|
|
|
|
if archive_url:
|
|
|
|
archive_match = None
|
|
|
|
try:
|
|
|
|
archive_match = uri_parse(archive_url, 'URI')
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if not archive_match:
|
|
|
|
raise InvalidRequest('Invalid Archive URL: Must be a valid URI')
|
|
|
|
|
|
|
|
scheme = archive_match.get('scheme', None)
|
|
|
|
if scheme != 'http' and scheme != 'https':
|
|
|
|
raise InvalidRequest('Invalid Archive URL: Must be http or https')
|
|
|
|
|
2014-03-13 20:31:37 +00:00
|
|
|
subdir = request_json['subdirectory'] if 'subdirectory' in request_json else ''
|
2014-06-26 23:55:16 +00:00
|
|
|
tags = request_json.get('docker_tags', ['latest'])
|
2014-04-02 01:49:06 +00:00
|
|
|
pull_robot_name = request_json.get('pull_robot', None)
|
|
|
|
|
|
|
|
# Verify the security behind the pull robot.
|
|
|
|
if pull_robot_name:
|
|
|
|
result = parse_robot_username(pull_robot_name)
|
|
|
|
if result:
|
2015-07-15 21:25:41 +00:00
|
|
|
try:
|
|
|
|
model.user.lookup_robot(pull_robot_name)
|
|
|
|
except model.InvalidRobotException:
|
2014-04-02 01:49:06 +00:00
|
|
|
raise NotFound()
|
|
|
|
|
|
|
|
# Make sure the user has administer permissions for the robot's namespace.
|
2015-07-15 21:25:41 +00:00
|
|
|
(robot_namespace, _) = result
|
2014-04-02 01:49:06 +00:00
|
|
|
if not AdministerOrganizationPermission(robot_namespace).can():
|
|
|
|
raise Unauthorized()
|
|
|
|
else:
|
|
|
|
raise Unauthorized()
|
2014-03-13 20:31:37 +00:00
|
|
|
|
|
|
|
# Check if the dockerfile resource has already been used. If so, then it
|
2015-04-29 21:04:52 +00:00
|
|
|
# can only be reused if the user has access to the repository in which the
|
|
|
|
# dockerfile was previously built.
|
2015-08-14 21:22:19 +00:00
|
|
|
if dockerfile_id:
|
|
|
|
associated_repository = model.build.get_repository_for_resource(dockerfile_id)
|
|
|
|
if associated_repository:
|
|
|
|
if not ModifyRepositoryPermission(associated_repository.namespace_user.username,
|
|
|
|
associated_repository.name):
|
|
|
|
raise Unauthorized()
|
2014-03-13 20:31:37 +00:00
|
|
|
|
|
|
|
# Start the build.
|
2015-07-15 21:25:41 +00:00
|
|
|
repo = model.repository.get_repository(namespace, repository)
|
2014-03-13 20:31:37 +00:00
|
|
|
|
2015-08-14 21:22:19 +00:00
|
|
|
build_name = (user_files.get_file_checksum(dockerfile_id)
|
|
|
|
if dockerfile_id
|
|
|
|
else hashlib.sha224(archive_url).hexdigest()[0:7])
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
prepared = PreparedBuild()
|
2015-08-14 21:22:19 +00:00
|
|
|
prepared.build_name = build_name
|
2015-04-29 21:04:52 +00:00
|
|
|
prepared.dockerfile_id = dockerfile_id
|
2015-08-14 21:22:19 +00:00
|
|
|
prepared.archive_url = archive_url
|
2015-04-29 21:04:52 +00:00
|
|
|
prepared.tags = tags
|
|
|
|
prepared.subdirectory = subdir
|
|
|
|
prepared.is_manual = True
|
|
|
|
prepared.metadata = {}
|
2014-03-13 20:31:37 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
build_request = start_build(repo, prepared, pull_robot_name=pull_robot_name)
|
2015-04-30 19:33:19 +00:00
|
|
|
resp = build_status_view(build_request)
|
2014-03-13 20:31:37 +00:00
|
|
|
repo_string = '%s/%s' % (namespace, repository)
|
|
|
|
headers = {
|
2014-03-17 19:23:49 +00:00
|
|
|
'Location': api.url_for(RepositoryBuildStatus, repository=repo_string,
|
|
|
|
build_uuid=build_request.uuid),
|
2014-03-13 20:31:37 +00:00
|
|
|
}
|
|
|
|
return resp, 201, headers
|
|
|
|
|
|
|
|
|
2015-02-13 20:54:01 +00:00
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/build/<build_uuid>')
|
2015-02-13 20:54:01 +00:00
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
|
|
|
@path_param('build_uuid', 'The UUID of the build')
|
|
|
|
class RepositoryBuildResource(RepositoryParamResource):
|
|
|
|
""" Resource for dealing with repository builds. """
|
2015-02-26 22:45:28 +00:00
|
|
|
@require_repo_read
|
|
|
|
@nickname('getRepoBuild')
|
|
|
|
def get(self, namespace, repository, build_uuid):
|
|
|
|
""" Returns information about a build. """
|
|
|
|
try:
|
2015-07-15 21:25:41 +00:00
|
|
|
build = model.build.get_repository_build(build_uuid)
|
|
|
|
except model.build.InvalidRepositoryBuildException:
|
2015-02-26 22:45:28 +00:00
|
|
|
raise NotFound()
|
|
|
|
|
2015-04-30 19:33:19 +00:00
|
|
|
return build_status_view(build)
|
2015-02-26 22:45:28 +00:00
|
|
|
|
2015-02-13 20:54:01 +00:00
|
|
|
@require_repo_admin
|
|
|
|
@nickname('cancelRepoBuild')
|
|
|
|
def delete(self, namespace, repository, build_uuid):
|
|
|
|
""" Cancels a repository build if it has not yet been picked up by a build worker. """
|
|
|
|
try:
|
2015-07-15 21:25:41 +00:00
|
|
|
build = model.build.get_repository_build(build_uuid)
|
|
|
|
except model.build.InvalidRepositoryBuildException:
|
2015-02-13 20:54:01 +00:00
|
|
|
raise NotFound()
|
|
|
|
|
|
|
|
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
|
|
|
|
raise NotFound()
|
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
if model.build.cancel_repository_build(build, dockerfile_build_queue):
|
2015-02-13 20:54:01 +00:00
|
|
|
return 'Okay', 201
|
|
|
|
else:
|
|
|
|
raise InvalidRequest('Build is currently running or has finished')
|
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/build/<build_uuid>/status')
|
2014-08-19 23:05:28 +00:00
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
|
|
|
@path_param('build_uuid', 'The UUID of the build')
|
2014-03-13 20:31:37 +00:00
|
|
|
class RepositoryBuildStatus(RepositoryParamResource):
|
|
|
|
""" Resource for dealing with repository build status. """
|
|
|
|
@require_repo_read
|
|
|
|
@nickname('getRepoBuildStatus')
|
|
|
|
def get(self, namespace, repository, build_uuid):
|
|
|
|
""" Return the status for the builds specified by the build uuids. """
|
2015-07-15 21:25:41 +00:00
|
|
|
build = model.build.get_repository_build(build_uuid)
|
2014-10-01 18:23:15 +00:00
|
|
|
if (not build or build.repository.name != repository or
|
|
|
|
build.repository.namespace_user.username != namespace):
|
2014-03-17 20:57:35 +00:00
|
|
|
raise NotFound()
|
2014-11-24 21:07:38 +00:00
|
|
|
|
2015-04-30 19:33:19 +00:00
|
|
|
return build_status_view(build)
|
2014-03-13 20:31:37 +00:00
|
|
|
|
|
|
|
|
2016-01-21 20:40:51 +00:00
|
|
|
@resource('/v1/repository/<apirepopath:repository>/build/<build_uuid>/logs')
|
2014-08-19 23:05:28 +00:00
|
|
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
|
|
|
@path_param('build_uuid', 'The UUID of the build')
|
2014-03-13 20:31:37 +00:00
|
|
|
class RepositoryBuildLogs(RepositoryParamResource):
|
|
|
|
""" Resource for loading repository build logs. """
|
|
|
|
@require_repo_write
|
|
|
|
@nickname('getRepoBuildLogs')
|
|
|
|
def get(self, namespace, repository, build_uuid):
|
|
|
|
""" Return the build logs for the build specified by the build uuid. """
|
|
|
|
response_obj = {}
|
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
build = model.build.get_repository_build(build_uuid)
|
2014-10-01 18:23:15 +00:00
|
|
|
if (not build or build.repository.name != repository or
|
|
|
|
build.repository.namespace_user.username != namespace):
|
|
|
|
raise NotFound()
|
2014-03-13 20:31:37 +00:00
|
|
|
|
2015-04-22 19:16:59 +00:00
|
|
|
# If the logs have been archived, just return a URL of the completed archive
|
2014-09-11 19:33:10 +00:00
|
|
|
if build.logs_archived:
|
2015-04-22 19:16:59 +00:00
|
|
|
return {
|
|
|
|
'logs_url': log_archive.get_file_url(build.uuid)
|
|
|
|
}
|
2014-09-11 19:33:10 +00:00
|
|
|
|
2014-03-13 20:31:37 +00:00
|
|
|
start = int(request.args.get('start', 0))
|
|
|
|
|
2014-03-28 18:20:06 +00:00
|
|
|
try:
|
|
|
|
count, logs = build_logs.get_log_entries(build.uuid, start)
|
|
|
|
except BuildStatusRetrievalError:
|
|
|
|
count, logs = (0, [])
|
2014-03-13 20:31:37 +00:00
|
|
|
|
|
|
|
response_obj.update({
|
|
|
|
'start': start,
|
|
|
|
'total': count,
|
|
|
|
'logs': [log for log in logs],
|
|
|
|
})
|
|
|
|
|
|
|
|
return response_obj
|
2014-03-14 20:09:16 +00:00
|
|
|
|
|
|
|
|
|
|
|
@resource('/v1/filedrop/')
|
2014-03-14 22:07:03 +00:00
|
|
|
@internal_only
|
2014-03-14 20:09:16 +00:00
|
|
|
class FileDropResource(ApiResource):
|
|
|
|
""" Custom verb for setting up a client side file transfer. """
|
|
|
|
schemas = {
|
|
|
|
'FileDropRequest': {
|
|
|
|
'type': 'object',
|
|
|
|
'description': 'Description of the file that the user wishes to upload.',
|
2014-03-17 16:25:41 +00:00
|
|
|
'required': [
|
|
|
|
'mimeType',
|
|
|
|
],
|
2014-03-14 20:09:16 +00:00
|
|
|
'properties': {
|
|
|
|
'mimeType': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'Type of the file which is about to be uploaded',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
@nickname('getFiledropUrl')
|
|
|
|
@validate_json_request('FileDropRequest')
|
|
|
|
def post(self):
|
|
|
|
""" Request a URL to which a file may be uploaded. """
|
|
|
|
mime_type = request.get_json()['mimeType']
|
2014-09-09 19:54:03 +00:00
|
|
|
(url, file_id) = user_files.prepare_for_drop(mime_type, requires_cors=True)
|
2014-03-14 20:09:16 +00:00
|
|
|
return {
|
|
|
|
'url': url,
|
2014-03-18 19:58:37 +00:00
|
|
|
'file_id': str(file_id),
|
2014-03-14 20:09:16 +00:00
|
|
|
}
|