diff --git a/data/database.py b/data/database.py index 96e85a7d2..2b7b3c6ad 100644 --- a/data/database.py +++ b/data/database.py @@ -21,8 +21,24 @@ SCHEME_DRIVERS = { 'postgresql+psycopg2': PostgresqlDatabase, } +SCHEME_RANDOM_FUNCTION = { + 'mysql': fn.Rand, + 'mysql+pymysql': fn.Rand, + 'sqlite': fn.Random, + 'postgresql': fn.Random, + 'postgresql+psycopg2': fn.Random, +} + +class CallableProxy(Proxy): + def __call__(self, *args, **kwargs): + if self.obj is None: + raise AttributeError('Cannot use uninitialized Proxy.') + return self.obj(*args, **kwargs) + db = Proxy() read_slave = Proxy() +db_random_func = CallableProxy() + def _db_from_url(url, db_kwargs): parsed_url = make_url(url) @@ -38,11 +54,15 @@ def _db_from_url(url, db_kwargs): return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs) + def configure(config_object): db_kwargs = dict(config_object['DB_CONNECTION_ARGS']) write_db_uri = config_object['DB_URI'] db.initialize(_db_from_url(write_db_uri, db_kwargs)) + parsed_write_uri = make_url(write_db_uri) + db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername]) + read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None) if read_slave_uri is not None: read_slave.initialize(_db_from_url(read_slave_uri, db_kwargs)) diff --git a/test/data/test.db b/test/data/test.db index 2c16ee353..29f6e1444 100644 Binary files a/test/data/test.db and b/test/data/test.db differ diff --git a/workers/buildlogsarchiver.py b/workers/buildlogsarchiver.py index 213481eed..9b621eb39 100644 --- a/workers/buildlogsarchiver.py +++ b/workers/buildlogsarchiver.py @@ -7,7 +7,7 @@ from gzip import GzipFile from data import model from data.archivedlogs import JSON_MIMETYPE -from data.database import RepositoryBuild +from data.database import RepositoryBuild, db_random_func from app import build_logs, log_archive from util.streamingjsonencoder import StreamingJSONEncoder @@ -22,7 +22,7 @@ def archive_redis_buildlogs(): avoid needing two-phase commit. """ try: # Get a random build to archive - to_archive = model.archivable_buildlogs_query().order_by(fn.Random()).get() + to_archive = model.archivable_buildlogs_query().order_by(db_random_func()).get() logger.debug('Archiving: %s', to_archive.uuid) length, entries = build_logs.get_log_entries(to_archive.uuid, 0)