Archived logs commit 1. Squash me.

This commit is contained in:
Jake Moshenko 2014-09-08 16:43:17 -04:00
parent 54fbb2a4c0
commit 451e034ca1
9 changed files with 402 additions and 22 deletions

View file

@ -0,0 +1,42 @@
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from peewee import fn
from tempfile import SpooledTemporaryFile
from data import model
from data.database import configure, RepositoryBuild
from app import app, build_logs, log_archive
from util.streamingjsonencoder import StreamingJSONEncoder
POLL_PERIOD_SECONDS = 30
logger = logging.getLogger(__name__)
sched = BlockingScheduler()
@sched.scheduled_job(trigger='interval', seconds=5)
def archive_redis_buildlogs():
""" Archive a single build, choosing a candidate at random. This process must be idempotent to
avoid needing two-phase commit. """
try:
# Get a random build to archive
to_archive = model.archivable_buildlogs_query().order_by(fn.Random()).get()
logger.debug('Archiving: %s', to_archive.uuid)
length, entries = build_logs.get_log_entries(to_archive.uuid, 0)
to_encode = {
'start': 0,
'total': length,
'logs': entries,
}
for chunk in StreamingJSONEncoder().iterencode(to_encode):
print chunk
except RepositoryBuild.DoesNotExist:
logger.debug('No more builds to archive')
if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
sched.start()