Change registry code to disconnect from the DB before long I/O operations

This commit is contained in:
Joseph Schorr 2014-11-06 18:00:52 -05:00
parent 23d9bd2b42
commit d5bbb57481
3 changed files with 26 additions and 7 deletions

View file

@ -35,6 +35,22 @@ class CallableProxy(Proxy):
raise AttributeError('Cannot use uninitialized Proxy.') raise AttributeError('Cannot use uninitialized Proxy.')
return self.obj(*args, **kwargs) return self.obj(*args, **kwargs)
class CloseForLongOperation(object):
""" Helper object which disconnects the database then reconnects after the nested operation
completes.
"""
def __init__(self, config_object):
self.config_object = config_object
def __enter__(self):
close_db_filter(None)
def __exit__(self, type, value, traceback):
configure(self.config_object)
class UseThenDisconnect(object): class UseThenDisconnect(object):
""" Helper object for conducting work with a database and then tearing it down. """ """ Helper object for conducting work with a database and then tearing it down. """
@ -69,6 +85,7 @@ def _db_from_url(url, db_kwargs):
def configure(config_object): def configure(config_object):
logger.debug('Configuring database')
db_kwargs = dict(config_object['DB_CONNECTION_ARGS']) db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
write_db_uri = config_object['DB_URI'] write_db_uri = config_object['DB_URI']
db.initialize(_db_from_url(write_db_uri, db_kwargs)) db.initialize(_db_from_url(write_db_uri, db_kwargs))

View file

@ -13,7 +13,7 @@ from util import checksums, changes
from util.http import abort, exact_abort from util.http import abort, exact_abort
from auth.permissions import (ReadRepositoryPermission, from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission) ModifyRepositoryPermission)
from data import model from data import model, database
from util import gzipstream from util import gzipstream
@ -152,8 +152,9 @@ def get_image_layer(namespace, repository, image_id, headers):
profile.debug('Streaming layer data') profile.debug('Streaming layer data')
# TODO: DATABASE: We should disconnect from the database here, so that # Close the database handle here for this process before we send the long download.
# we're not holding the DB handle during this long download. database.close_db_filter(None)
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers) return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
except (IOError, AttributeError): except (IOError, AttributeError):
profile.debug('Image not found') profile.debug('Image not found')
@ -215,10 +216,8 @@ def put_image_layer(namespace, repository, image_id):
h, sum_hndlr = checksums.simple_checksum_handler(json_data) h, sum_hndlr = checksums.simple_checksum_handler(json_data)
sr.add_handler(sum_hndlr) sr.add_handler(sum_hndlr)
# TODO: DATABASE: We should disconnect from the database here and reconnect AFTER, so that
# we're not holding the DB handle during this long upload.
# Stream write the data to storage. # Stream write the data to storage.
with database.CloseForLongOperation(app.config):
store.stream_write(repo_image.storage.locations, layer_path, sr) store.stream_write(repo_image.storage.locations, layer_path, sr)
# Append the computed checksum. # Append the computed checksum.

View file

@ -101,6 +101,9 @@ def get_squashed_tag(namespace, repository, tag):
logger.debug('Redirecting to download URL for derived image %s', derived.uuid) logger.debug('Redirecting to download URL for derived image %s', derived.uuid)
return redirect(download_url) return redirect(download_url)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
logger.debug('Sending cached derived image %s', derived.uuid) logger.debug('Sending cached derived image %s', derived.uuid)
return send_file(store.stream_read_file(derived.locations, derived_layer_path)) return send_file(store.stream_read_file(derived.locations, derived_layer_path))