2013-10-17 22:25:19 +00:00
|
|
|
import logging
|
|
|
|
import json
|
2013-10-18 19:28:16 +00:00
|
|
|
import daemon
|
|
|
|
import time
|
2013-10-18 21:52:55 +00:00
|
|
|
import argparse
|
2013-10-17 22:25:19 +00:00
|
|
|
|
|
|
|
from apscheduler.scheduler import Scheduler
|
|
|
|
|
2013-10-18 21:27:09 +00:00
|
|
|
from data.queue import image_diff_queue
|
2013-10-30 17:22:00 +00:00
|
|
|
from data.database import db as db_connection
|
2013-10-17 22:25:19 +00:00
|
|
|
from endpoints.registry import process_image_changes
|
|
|
|
|
|
|
|
|
2013-10-20 18:52:56 +00:00
|
|
|
root_logger = logging.getLogger('')
|
|
|
|
root_logger.setLevel(logging.DEBUG)
|
|
|
|
|
|
|
|
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
|
|
|
formatter = logging.Formatter(FORMAT)
|
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
def process_work_items():
|
|
|
|
logger.debug('Getting work item from queue.')
|
|
|
|
|
2013-10-18 19:28:16 +00:00
|
|
|
item = image_diff_queue.get()
|
2013-10-17 22:25:19 +00:00
|
|
|
|
2013-10-20 07:26:06 +00:00
|
|
|
while item:
|
2013-10-17 22:25:19 +00:00
|
|
|
logger.debug('Queue gave us some work: %s' % item.body)
|
|
|
|
|
|
|
|
request = json.loads(item.body)
|
2013-10-18 21:27:09 +00:00
|
|
|
process_image_changes(request['namespace'], request['repository'],
|
2013-10-17 22:25:19 +00:00
|
|
|
request['image_id'])
|
2013-10-18 21:27:09 +00:00
|
|
|
|
|
|
|
image_diff_queue.complete(item)
|
|
|
|
|
2013-10-20 07:26:06 +00:00
|
|
|
item = image_diff_queue.get()
|
|
|
|
|
|
|
|
logger.debug('No more work.')
|
2013-10-17 22:25:19 +00:00
|
|
|
|
2013-10-30 17:22:00 +00:00
|
|
|
if not db_connection.is_closed():
|
|
|
|
logger.debug('Closing thread db connection.')
|
|
|
|
db_connection.close()
|
|
|
|
|
2013-10-17 22:25:19 +00:00
|
|
|
|
2013-10-20 18:52:56 +00:00
|
|
|
def start_worker():
|
|
|
|
logger.debug("Scheduling worker.")
|
2013-10-18 21:52:55 +00:00
|
|
|
|
|
|
|
sched = Scheduler()
|
|
|
|
sched.start()
|
|
|
|
|
2013-10-20 07:26:06 +00:00
|
|
|
sched.add_interval_job(process_work_items, seconds=30)
|
2013-10-18 21:52:55 +00:00
|
|
|
|
|
|
|
while True:
|
|
|
|
time.sleep(60 * 60 * 24) # sleep one day, basically forever
|
|
|
|
|
2013-10-20 18:52:56 +00:00
|
|
|
|
|
|
|
parser = argparse.ArgumentParser(description='Worker daemon to compute diffs')
|
|
|
|
parser.add_argument('-D', action='store_true', default=False,
|
|
|
|
help='Run the worker in daemon mode.')
|
|
|
|
parser.add_argument('--log', default='diffsworker.log',
|
|
|
|
help='Specify the log file for the worker as a daemon.')
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
|
2013-10-18 21:52:55 +00:00
|
|
|
if args.D:
|
2013-10-20 18:52:56 +00:00
|
|
|
handler = logging.FileHandler(args.log)
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
root_logger.addHandler(handler)
|
|
|
|
with daemon.DaemonContext(files_preserve=[handler.stream]):
|
|
|
|
start_worker()
|
2013-10-18 21:52:55 +00:00
|
|
|
|
|
|
|
else:
|
2013-10-20 18:52:56 +00:00
|
|
|
handler = logging.StreamHandler()
|
|
|
|
handler.setFormatter(formatter)
|
|
|
|
root_logger.addHandler(handler)
|
|
|
|
start_worker()
|