import logging
import json
import daemon
import time
import argparse

from apscheduler.scheduler import Scheduler

from data.queue import image_diff_queue
from endpoints.registry import process_image_changes



root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)

FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
formatter = logging.Formatter(FORMAT)

logger = logging.getLogger(__name__)


def process_work_items():
  logger.debug('Getting work item from queue.')

  item = image_diff_queue.get()

  while item:
    logger.debug('Queue gave us some work: %s' % item.body)

    request = json.loads(item.body)
    process_image_changes(request['namespace'], request['repository'],
                          request['image_id'])

    image_diff_queue.complete(item)

    item = image_diff_queue.get()

  logger.debug('No more work.')


def start_worker():
  logger.debug("Scheduling worker.")

  sched = Scheduler()
  sched.start()

  sched.add_interval_job(process_work_items, seconds=30)

  while True:
    time.sleep(60 * 60 * 24)  # sleep one day, basically forever


parser = argparse.ArgumentParser(description='Worker daemon to compute diffs')
parser.add_argument('-D', action='store_true', default=False,
                    help='Run the worker in daemon mode.')
parser.add_argument('--log', default='diffsworker.log',
                    help='Specify the log file for the worker as a daemon.')
args = parser.parse_args()


# if not args.D:
# else:
#   logging.basicConfig(format=FORMAT, level=logging.DEBUG)    
#   start_worker(args)

if args.D:
  handler = logging.FileHandler(args.log)
  handler.setFormatter(formatter)
  root_logger.addHandler(handler)
  with daemon.DaemonContext(files_preserve=[handler.stream]):
    start_worker()

else:
  handler = logging.StreamHandler()
  handler.setFormatter(formatter)
  root_logger.addHandler(handler)
  start_worker()