This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/workers/diffsworker.py

78 lines
1.9 KiB
Python
Raw Normal View History

import logging
import json
2013-10-18 19:28:16 +00:00
import daemon
import time
2013-10-18 21:52:55 +00:00
import argparse
from apscheduler.scheduler import Scheduler
from data.queue import image_diff_queue
from endpoints.registry import process_image_changes
root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__)
def process_work_items():
logger.debug('Getting work item from queue.')
2013-10-18 19:28:16 +00:00
item = image_diff_queue.get()
while item:
logger.debug('Queue gave us some work: %s' % item.body)
request = json.loads(item.body)
process_image_changes(request['namespace'], request['repository'],
request['image_id'])
image_diff_queue.complete(item)
item = image_diff_queue.get()
logger.debug('No more work.')
def start_worker():
logger.debug("Scheduling worker.")
2013-10-18 21:52:55 +00:00
sched = Scheduler()
sched.start()
sched.add_interval_job(process_work_items, seconds=30)
2013-10-18 21:52:55 +00:00
while True:
time.sleep(60 * 60 * 24) # sleep one day, basically forever
parser = argparse.ArgumentParser(description='Worker daemon to compute diffs')
parser.add_argument('-D', action='store_true', default=False,
help='Run the worker in daemon mode.')
parser.add_argument('--log', default='diffsworker.log',
help='Specify the log file for the worker as a daemon.')
args = parser.parse_args()
# if not args.D:
# else:
# logging.basicConfig(format=FORMAT, level=logging.DEBUG)
# start_worker(args)
2013-10-18 21:52:55 +00:00
if args.D:
handler = logging.FileHandler(args.log)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
with daemon.DaemonContext(files_preserve=[handler.stream]):
start_worker()
2013-10-18 21:52:55 +00:00
else:
handler = logging.StreamHandler()
handler.setFormatter(formatter)
root_logger.addHandler(handler)
start_worker()