Fix the subprocesses to also use their own storage classes; this fixes a socket error when talking to S3
This commit is contained in:
parent
7ef3be5842
commit
adc915a5eb
1 changed files with 6 additions and 2 deletions
|
@ -4,11 +4,12 @@ import hashlib
|
|||
|
||||
from flask import redirect, Blueprint, abort, send_file
|
||||
|
||||
from app import storage as store, app
|
||||
from app import app
|
||||
from auth.auth import process_auth
|
||||
from auth.permissions import ReadRepositoryPermission
|
||||
from data import model
|
||||
from data import database
|
||||
from storage import Storage
|
||||
|
||||
from util.queuefile import QueueFile
|
||||
from util.queueprocess import QueueProcess
|
||||
|
@ -19,8 +20,9 @@ from util.dockerloadformat import build_docker_load_stream
|
|||
verbs = Blueprint('verbs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_list):
|
||||
store = Storage(app)
|
||||
|
||||
def get_next_image():
|
||||
for current_image_id in image_list:
|
||||
yield model.get_repo_image(namespace, repository, current_image_id)
|
||||
|
@ -44,6 +46,7 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima
|
|||
|
||||
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
|
||||
database.configure(app.config)
|
||||
store = Storage(app)
|
||||
|
||||
def handle_exception(ex):
|
||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
|
@ -75,6 +78,7 @@ def get_squashed_tag(namespace, repository, tag):
|
|||
if not repo_image:
|
||||
abort(404)
|
||||
|
||||
store = Storage(app)
|
||||
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
|
||||
store.preferred_locations[0])
|
||||
if not derived.uploading:
|
||||
|
|
Reference in a new issue