Refactor the util directory to use subpackages.

This commit is contained in:
Jake Moshenko 2015-08-03 15:49:10 -04:00
parent 974ccaa2e7
commit 18100be481
46 changed files with 36 additions and 39 deletions

0
util/saas/__init__.py Normal file
View file

74
util/saas/analytics.py Normal file
View file

@ -0,0 +1,74 @@
import json
import logging
from Queue import Queue
from threading import Thread
from mixpanel import BufferedConsumer, Mixpanel
logger = logging.getLogger(__name__)
class MixpanelQueuingConsumer(object):
def __init__(self, request_queue):
self._mp_queue = request_queue
def send(self, endpoint, json_message):
logger.debug('Queuing mixpanel request.')
self._mp_queue.put(json.dumps([endpoint, json_message]))
class SendToMixpanel(Thread):
def __init__(self, request_queue):
Thread.__init__(self)
self.daemon = True
self._mp_queue = request_queue
self._consumer = BufferedConsumer()
def run(self):
logger.debug('Starting mixpanel sender process.')
while True:
mp_request = self._mp_queue.get()
logger.debug('Got queued mixpanel request.')
try:
self._consumer.send(*json.loads(mp_request))
except:
logger.exception('Failed to send Mixpanel request.')
class FakeMixpanel(object):
def track(*args, **kwargs):
pass
class Analytics(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
analytics_type = app.config.get('ANALYTICS_TYPE', 'FakeAnalytics')
if analytics_type == 'Mixpanel':
mixpanel_key = app.config.get('MIXPANEL_KEY', '')
logger.debug('Initializing mixpanel with key: %s' %
app.config['MIXPANEL_KEY'])
request_queue = Queue()
analytics = Mixpanel(mixpanel_key, MixpanelQueuingConsumer(request_queue))
SendToMixpanel(request_queue).start()
else:
analytics = FakeMixpanel()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['analytics'] = analytics
return analytics
def __getattr__(self, name):
return getattr(self.state, name, None)

47
util/saas/cloudwatch.py Normal file
View file

@ -0,0 +1,47 @@
import logging
import boto
from Queue import Queue
from threading import Thread
logger = logging.getLogger(__name__)
def get_queue(app):
"""
Returns a queue to a new CloudWatchSender.
"""
access_key = app.config['CLOUDWATCH_AWS_ACCESS_KEY']
secret_key = app.config['CLOUDWATCH_AWS_SECRET_KEY']
queue = Queue()
sender = CloudWatchSender(queue, access_key, secret_key)
sender.start()
return queue
class CloudWatchSender(Thread):
"""
CloudWatchSender loops indefinitely and pulls metrics off of a queue then sends it to CloudWatch.
"""
def __init__(self, request_queue, aws_access_key, aws_secret_key):
Thread.__init__(self)
self.daemon = True
self._aws_access_key = aws_access_key
self._aws_secret_key = aws_secret_key
self._put_metrics_queue = request_queue
def run(self):
try:
logger.debug('Starting CloudWatch sender process.')
connection = boto.connect_cloudwatch(self._aws_access_key, self._aws_secret_key)
except:
logger.exception('Failed to connect to CloudWatch.')
while True:
put_metric_args, kwargs = self._put_metrics_queue.get()
logger.debug('Got queued put metrics request.')
try:
connection.put_metric_data(*put_metric_args, **kwargs)
except:
logger.exception('Failed to write to CloudWatch')

36
util/saas/exceptionlog.py Normal file
View file

@ -0,0 +1,36 @@
from raven.contrib.flask import Sentry as FlaskSentry
class FakeSentryClient(object):
def captureException(self, *args, **kwargs):
pass
def user_context(self, *args, **kwargs):
pass
class FakeSentry(object):
def __init__(self):
self.client = FakeSentryClient()
class Sentry(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
sentry_type = app.config.get('EXCEPTION_LOG_TYPE', 'FakeSentry')
if sentry_type == 'Sentry':
sentry = FlaskSentry(app)
else:
sentry = FakeSentry()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['sentry'] = sentry
return sentry
def __getattr__(self, name):
return getattr(self.state, name, None)

56
util/saas/queuemetrics.py Normal file
View file

@ -0,0 +1,56 @@
import logging
from util.saas.cloudwatch import get_queue
logger = logging.getLogger(__name__)
class NullReporter(object):
def report(self, *args):
pass
class CloudWatchReporter(object):
""" CloudWatchReporter reports work queue metrics to CloudWatch """
def __init__(self, request_queue, namespace, need_capacity_name, build_percent_name):
self._namespace = namespace
self._need_capacity_name = need_capacity_name
self._build_percent_name = build_percent_name
self._put_metrics_queue = request_queue
def _send_to_queue(self, *args, **kwargs):
self._put_metrics_queue.put((args, kwargs))
def report(self, currently_processing, running_count, total_count):
logger.debug('Worker indicated %s running count and %s total count', running_count,
total_count)
need_capacity_count = total_count - running_count
self._send_to_queue(self._namespace, self._need_capacity_name, need_capacity_count,
unit='Count')
building_percent = 100 if currently_processing else 0
self._send_to_queue(self._namespace, self._build_percent_name, building_percent,
unit='Percent')
class QueueMetrics(object):
"""
QueueMetrics initializes a reporter for recording metrics of work queues.
"""
def __init__(self, app=None):
self._app = app
self._reporter = NullReporter()
if app is not None:
reporter_type = app.config.get('QUEUE_METRICS_TYPE', 'Null')
if reporter_type == 'CloudWatch':
namespace = app.config['QUEUE_METRICS_NAMESPACE']
req_capacity_name = app.config['QUEUE_METRICS_CAPACITY_SHORTAGE_NAME']
build_percent_name = app.config['QUEUE_METRICS_BUILD_PERCENT_NAME']
request_queue = get_queue(app)
self._reporter = CloudWatchReporter(request_queue, namespace, req_capacity_name,
build_percent_name)
def __getattr__(self, name):
return getattr(self._reporter, name, None)