Resolve race condition between multiple log archivers
This commit is contained in:
parent
8499612c4c
commit
a159bd3e77
2 changed files with 26 additions and 14 deletions
|
@ -248,3 +248,13 @@ def get_archivable_build():
|
|||
return RepositoryBuild.get(id=found_id)
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def mark_build_archived(build_uuid):
|
||||
""" Mark a build as archived, and return True if we were the ones who actually
|
||||
updated the row. """
|
||||
return (RepositoryBuild
|
||||
.update(logs_archived=True)
|
||||
.where(RepositoryBuild.uuid == build_uuid,
|
||||
RepositoryBuild.logs_archived == False)
|
||||
.execute()) > 0
|
||||
|
|
|
@ -15,6 +15,7 @@ MEMORY_TEMPFILE_SIZE = 64 * 1024 # Large enough to handle approximately 99% of
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ArchiveBuildLogsWorker(Worker):
|
||||
def __init__(self):
|
||||
super(ArchiveBuildLogsWorker, self).__init__()
|
||||
|
@ -38,22 +39,23 @@ class ArchiveBuildLogsWorker(Worker):
|
|||
'logs': entries,
|
||||
}
|
||||
|
||||
with CloseForLongOperation(app.config):
|
||||
with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile:
|
||||
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
|
||||
for chunk in StreamingJSONEncoder().iterencode(to_encode):
|
||||
zipstream.write(chunk)
|
||||
if length > 0:
|
||||
with CloseForLongOperation(app.config):
|
||||
with SpooledTemporaryFile(MEMORY_TEMPFILE_SIZE) as tempfile:
|
||||
with GzipFile('testarchive', fileobj=tempfile) as zipstream:
|
||||
for chunk in StreamingJSONEncoder().iterencode(to_encode):
|
||||
zipstream.write(chunk)
|
||||
|
||||
tempfile.seek(0)
|
||||
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
|
||||
file_id=to_archive.uuid)
|
||||
tempfile.seek(0)
|
||||
log_archive.store_file(tempfile, JSON_MIMETYPE, content_encoding='gzip',
|
||||
file_id=to_archive.uuid)
|
||||
|
||||
to_update = model.build.get_repository_build(to_archive.uuid)
|
||||
to_update.logs_archived = True
|
||||
to_update.save()
|
||||
|
||||
build_logs.expire_status(to_update.uuid)
|
||||
build_logs.delete_log_entries(to_update.uuid)
|
||||
we_updated = model.build.mark_build_archived(to_archive.uuid)
|
||||
if we_updated:
|
||||
build_logs.expire_status(to_archive.uuid)
|
||||
build_logs.delete_log_entries(to_archive.uuid)
|
||||
else:
|
||||
logger.debug('Another worker pre-empted us when archiving: %s', to_archive.uuid)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Reference in a new issue