import logging
import json
import daemon
import time
import argparse

from apscheduler.scheduler import Scheduler

from data.queue import image_diff_queue
from data.database import db as db_connection
from data.model import DataModelException
from endpoints.registry import process_image_changes


root_logger = logging.getLogger('')
root_logger.setLevel(logging.DEBUG)

FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
formatter = logging.Formatter(FORMAT)

logger = logging.getLogger(__name__)


def process_work_items():
  logger.debug('Getting work item from queue.')

  item = image_diff_queue.get()

  while item:
    logger.debug('Queue gave us some work: %s' % item.body)

    request = json.loads(item.body)
    try:
      image_id = request['image_id']
      namespace = request['namespace']
      repository = request['repository']

      process_image_changes(namespace, repository, image_id)
    except DataModelException:
      # This exception is unrecoverable, and the item should continue and be
      # marked as complete.
      msg = ('Image does not exist in database \'%s\' for repo \'%s/\'%s\'' %
             (image_id, namespace, repository))
      logger.warning(msg)

    image_diff_queue.complete(item)

    item = image_diff_queue.get()

  logger.debug('No more work.')

  if not db_connection.is_closed():
    logger.debug('Closing thread db connection.')
    db_connection.close()


def start_worker():
  logger.debug("Scheduling worker.")

  sched = Scheduler()
  sched.start()

  sched.add_interval_job(process_work_items, seconds=30)

  while True:
    time.sleep(60 * 60 * 24)  # sleep one day, basically forever


parser = argparse.ArgumentParser(description='Worker daemon to compute diffs')
parser.add_argument('-D', action='store_true', default=False,
                    help='Run the worker in daemon mode.')
parser.add_argument('--log', default='diffsworker.log',
                    help='Specify the log file for the worker as a daemon.')
args = parser.parse_args()


if args.D:
  handler = logging.FileHandler(args.log)
  handler.setFormatter(formatter)
  root_logger.addHandler(handler)
  with daemon.DaemonContext(files_preserve=[handler.stream]):
    start_worker()

else:
  handler = logging.StreamHandler()
  handler.setFormatter(formatter)
  root_logger.addHandler(handler)
  start_worker()