Finish the build logs archiver, add handlers for cloud and local that handle gzip encoded archived content.

This commit is contained in:
Jake Moshenko 2014-09-11 15:33:10 -04:00
parent 2455c17f96
commit 8b3a3178b0
10 changed files with 82 additions and 18 deletions

View file

@ -3,10 +3,12 @@ import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from peewee import fn
from tempfile import SpooledTemporaryFile
from gzip import GzipFile
from data import model
from data.database import configure, RepositoryBuild
from app import app, build_logs, log_archive
from data.archivedlogs import JSON_MIMETYPE
from data.database import RepositoryBuild
from app import build_logs, log_archive
from util.streamingjsonencoder import StreamingJSONEncoder
POLL_PERIOD_SECONDS = 30
@ -14,7 +16,7 @@ POLL_PERIOD_SECONDS = 30
logger = logging.getLogger(__name__)
sched = BlockingScheduler()
@sched.scheduled_job(trigger='interval', seconds=5)
@sched.scheduled_job(trigger='interval', seconds=1)
def archive_redis_buildlogs():
""" Archive a single build, choosing a candidate at random. This process must be idempotent to
avoid needing two-phase commit. """
@ -30,8 +32,19 @@ def archive_redis_buildlogs():
'logs': entries,
}
for chunk in StreamingJSONEncoder().iterencode(to_encode):
print chunk
with SpooledTemporaryFile() as tempfile:
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
for chunk in StreamingJSONEncoder().iterencode(to_encode):
zipstream.write(chunk)
tempfile.seek(0)
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
file_id=to_archive.uuid)
to_archive.logs_archived = True
to_archive.save()
build_logs.delete_log_entries(to_archive.uuid)
except RepositoryBuild.DoesNotExist:
logger.debug('No more builds to archive')