Build job does not have a request context when calling get_file_url
We therefore need to specify some sort of IP or get_file_url will attempt to get it from context
This commit is contained in:
parent
3bef21253d
commit
2ce4e49711
6 changed files with 12 additions and 8 deletions
|
@ -77,7 +77,7 @@ class BuildJob(object):
|
||||||
if not self.repo_build.resource_key:
|
if not self.repo_build.resource_key:
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
return user_files.get_file_url(self.repo_build.resource_key, requires_cors=False)
|
return user_files.get_file_url(self.repo_build.resource_key, '127.0.0.1', requires_cors=False)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pull_credentials(self):
|
def pull_credentials(self):
|
||||||
|
|
|
@ -105,9 +105,9 @@ class DelegateUserfiles(object):
|
||||||
content_encoding)
|
content_encoding)
|
||||||
return file_id
|
return file_id
|
||||||
|
|
||||||
def get_file_url(self, file_id, expires_in=300, requires_cors=False):
|
def get_file_url(self, file_id, remote_ip, expires_in=300, requires_cors=False):
|
||||||
path = self.get_file_id_path(file_id)
|
path = self.get_file_id_path(file_id)
|
||||||
url = self._storage.get_direct_download_url(self._locations, path, request.remote_addr, expires_in,
|
url = self._storage.get_direct_download_url(self._locations, path, remote_ip, expires_in,
|
||||||
requires_cors)
|
requires_cors)
|
||||||
|
|
||||||
if url is None:
|
if url is None:
|
||||||
|
|
|
@ -151,7 +151,8 @@ def build_status_view(build_obj):
|
||||||
|
|
||||||
if can_write or features.READER_BUILD_LOGS:
|
if can_write or features.READER_BUILD_LOGS:
|
||||||
if build_obj.resource_key is not None:
|
if build_obj.resource_key is not None:
|
||||||
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key, requires_cors=True)
|
resp['archive_url'] = user_files.get_file_url(build_obj.resource_key,
|
||||||
|
request.remote_addr, requires_cors=True)
|
||||||
elif job_config.get('archive_url', None):
|
elif job_config.get('archive_url', None):
|
||||||
resp['archive_url'] = job_config['archive_url']
|
resp['archive_url'] = job_config['archive_url']
|
||||||
|
|
||||||
|
@ -402,7 +403,7 @@ def get_logs_or_log_url(build):
|
||||||
# If the logs have been archived, just return a URL of the completed archive
|
# If the logs have been archived, just return a URL of the completed archive
|
||||||
if build.logs_archived:
|
if build.logs_archived:
|
||||||
return {
|
return {
|
||||||
'logs_url': log_archive.get_file_url(build.uuid, requires_cors=True)
|
'logs_url': log_archive.get_file_url(build.uuid, request.remote_addr, requires_cors=True)
|
||||||
}
|
}
|
||||||
start = int(request.args.get('start', 0))
|
start = int(request.args.get('start', 0))
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
import features
|
import features
|
||||||
|
|
||||||
|
from flask import request
|
||||||
|
|
||||||
from app import all_queues, userfiles
|
from app import all_queues, userfiles
|
||||||
from auth.permissions import ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission
|
from auth.permissions import ReadRepositoryPermission, ModifyRepositoryPermission, AdministerRepositoryPermission
|
||||||
from data import model, database
|
from data import model, database
|
||||||
|
@ -85,7 +88,7 @@ class PreOCIModel(SuperuserDataInterface):
|
||||||
can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()
|
can_admin = AdministerRepositoryPermission(repo_namespace, repo_name).can()
|
||||||
job_config = get_job_config(build.job_config)
|
job_config = get_job_config(build.job_config)
|
||||||
phase, status, error = _get_build_status(build)
|
phase, status, error = _get_build_status(build)
|
||||||
url = userfiles.get_file_url(self.resource_key, requires_cors=True)
|
url = userfiles.get_file_url(self.resource_key, request.remote_addr, requires_cors=True)
|
||||||
|
|
||||||
return RepositoryBuild(build.uuid, build.logs_archived, repo_namespace, repo_name, can_write, can_read,
|
return RepositoryBuild(build.uuid, build.logs_archived, repo_namespace, repo_name, can_write, can_read,
|
||||||
_create_user(build.pull_robot), build.resource_key,
|
_create_user(build.pull_robot), build.resource_key,
|
||||||
|
|
|
@ -363,7 +363,7 @@ def buildlogs(build_uuid):
|
||||||
|
|
||||||
# If the logs have been archived, just return a URL of the completed archive
|
# If the logs have been archived, just return a URL of the completed archive
|
||||||
if found_build.logs_archived:
|
if found_build.logs_archived:
|
||||||
return redirect(log_archive.get_file_url(found_build.uuid))
|
return redirect(log_archive.get_file_url(found_build.uuid, request.remote_addr))
|
||||||
|
|
||||||
_, logs = build_logs.get_log_entries(found_build.uuid, 0)
|
_, logs = build_logs.get_log_entries(found_build.uuid, 0)
|
||||||
response = jsonify({
|
response = jsonify({
|
||||||
|
|
|
@ -6,7 +6,7 @@ import requests
|
||||||
w = workers.dockerfilebuild.DockerfileBuildWorker(100, None)
|
w = workers.dockerfilebuild.DockerfileBuildWorker(100, None)
|
||||||
|
|
||||||
resource_key = '5c0a985c-405d-4161-b0ac-603c3757b5f9'
|
resource_key = '5c0a985c-405d-4161-b0ac-603c3757b5f9'
|
||||||
resource_url = user_files.get_file_url(resource_key, requires_cors=False)
|
resource_url = user_files.get_file_url(resource_key, '127.0.0.1', requires_cors=False)
|
||||||
print resource_url
|
print resource_url
|
||||||
|
|
||||||
docker_resource = requests.get(resource_url, stream=True)
|
docker_resource = requests.get(resource_url, stream=True)
|
||||||
|
|
Reference in a new issue