Change verbs to use short lived database connections
This commit is contained in:
parent
c569299e5c
commit
23d9bd2b42
3 changed files with 37 additions and 16 deletions
|
@ -35,6 +35,19 @@ class CallableProxy(Proxy):
|
|||
raise AttributeError('Cannot use uninitialized Proxy.')
|
||||
return self.obj(*args, **kwargs)
|
||||
|
||||
class UseThenDisconnect(object):
|
||||
""" Helper object for conducting work with a database and then tearing it down. """
|
||||
|
||||
def __init__(self, config_object):
|
||||
self.config_object = config_object
|
||||
|
||||
def __enter__(self):
|
||||
configure(self.config_object)
|
||||
|
||||
def __exit__(self, type, value, traceback):
|
||||
close_db_filter(None)
|
||||
|
||||
|
||||
db = Proxy()
|
||||
read_slave = Proxy()
|
||||
db_random_func = CallableProxy()
|
||||
|
|
|
@ -1423,6 +1423,12 @@ def lookup_repository_images(namespace_name, repository_name, docker_image_ids):
|
|||
return Image.select().where(Image.repository == repo, Image.docker_image_id << docker_image_ids)
|
||||
|
||||
|
||||
def get_matching_repository_images(namespace_name, repository_name, docker_image_ids):
|
||||
def modify_query(q):
|
||||
return q.where(Image.docker_image_id << docker_image_ids)
|
||||
|
||||
return _get_repository_images_base(namespace_name, repository_name, modify_query)
|
||||
|
||||
def get_repository_images(namespace_name, repository_name):
|
||||
return _get_repository_images_base(namespace_name, repository_name, lambda q: q)
|
||||
|
||||
|
|
|
@ -21,26 +21,28 @@ from util.dockerloadformat import build_docker_load_stream
|
|||
verbs = Blueprint('verbs', __name__)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_list):
|
||||
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_id_list):
|
||||
store = Storage(app)
|
||||
|
||||
# TODO: PERFORMANCE: configure DB, load the images, cache them, then disconnect.
|
||||
# For performance reasons, we load the full image list here, cache it, then disconnect from
|
||||
# the database.
|
||||
with database.UseThenDisconnect(app.config):
|
||||
image_list = model.get_matching_repository_images(namespace, repository, image_id_list)
|
||||
|
||||
def get_next_image():
|
||||
for current_image_id in image_list:
|
||||
yield model.get_repo_image(namespace, repository, current_image_id)
|
||||
for current_image in image_list:
|
||||
yield current_image
|
||||
|
||||
def get_next_layer():
|
||||
for current_image_id in image_list:
|
||||
current_image_entry = model.get_repo_image(namespace, repository, current_image_id)
|
||||
for current_image_entry in image_list:
|
||||
current_image_path = store.image_layer_path(current_image_entry.storage.uuid)
|
||||
current_image_stream = store.stream_read_file(current_image_entry.storage.locations,
|
||||
current_image_path)
|
||||
|
||||
current_image_id = current_image_entry.id
|
||||
logger.debug('Returning image layer %s: %s' % (current_image_id, current_image_path))
|
||||
yield current_image_stream
|
||||
|
||||
database.configure(app.config)
|
||||
stream = build_docker_load_stream(namespace, repository, tag, synthetic_image_id, image_json,
|
||||
get_next_image, get_next_layer)
|
||||
|
||||
|
@ -48,25 +50,25 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima
|
|||
|
||||
|
||||
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
|
||||
database.configure(app.config)
|
||||
store = Storage(app)
|
||||
|
||||
def handle_exception(ex):
|
||||
logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex)
|
||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
|
||||
with database.UseThenDisconnect(app.config):
|
||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||
|
||||
queue_file.add_exception_handler(handle_exception)
|
||||
|
||||
# TODO: PERFORMANCE: disconnect from the DB and reconnect once the stream write finishes or on
|
||||
# error.
|
||||
image_path = store.image_layer_path(linked_storage_uuid)
|
||||
store.stream_write(linked_locations, image_path, queue_file)
|
||||
queue_file.close()
|
||||
|
||||
if not queue_file.raised_exception:
|
||||
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
||||
done_uploading.uploading = False
|
||||
done_uploading.save()
|
||||
with database.UseThenDisconnect(app.config):
|
||||
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
||||
done_uploading.uploading = False
|
||||
done_uploading.save()
|
||||
|
||||
|
||||
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
|
||||
|
@ -136,8 +138,8 @@ def get_squashed_tag(namespace, repository, tag):
|
|||
storage_args = (derived.uuid, derived.locations, storage_queue_file)
|
||||
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
|
||||
|
||||
# TODO: PERFORMANCE: close the database handle here for this process before we send the long
|
||||
# download.
|
||||
# Close the database handle here for this process before we send the long download.
|
||||
database.close_db_filter(None)
|
||||
|
||||
# Return the client's data.
|
||||
return send_file(client_queue_file)
|
||||
|
|
Reference in a new issue