initial import for Open Source 🎉

This commit is contained in:
Jimmy Zelinskie 2019-11-12 11:09:47 -05:00
parent 1898c361f3
commit 9c0dd3b722
2048 changed files with 218743 additions and 0 deletions

0
util/saas/__init__.py Normal file
View file

73
util/saas/analytics.py Normal file
View file

@ -0,0 +1,73 @@
import json
import logging
from Queue import Queue
from threading import Thread
from mixpanel import BufferedConsumer, Mixpanel
logger = logging.getLogger(__name__)
class MixpanelQueuingConsumer(object):
def __init__(self, request_queue):
self._mp_queue = request_queue
def send(self, endpoint, json_message):
logger.debug('Queuing mixpanel request.')
self._mp_queue.put(json.dumps([endpoint, json_message]))
class SendToMixpanel(Thread):
def __init__(self, request_queue):
Thread.__init__(self)
self.daemon = True
self._mp_queue = request_queue
self._consumer = BufferedConsumer()
def run(self):
logger.debug('Starting mixpanel sender process.')
while True:
mp_request = self._mp_queue.get()
logger.debug('Got queued mixpanel request.')
try:
self._consumer.send(*json.loads(mp_request))
except:
logger.exception('Failed to send Mixpanel request.')
class _FakeMixpanel(object):
def track(*args, **kwargs):
pass
class Analytics(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
analytics_type = app.config.get('ANALYTICS_TYPE', 'FakeAnalytics')
if analytics_type == 'Mixpanel':
mixpanel_key = app.config.get('MIXPANEL_KEY', '')
logger.debug('Initializing mixpanel with key: %s', app.config['MIXPANEL_KEY'])
request_queue = Queue()
analytics = Mixpanel(mixpanel_key, MixpanelQueuingConsumer(request_queue))
SendToMixpanel(request_queue).start()
else:
analytics = _FakeMixpanel()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['analytics'] = analytics
return analytics
def __getattr__(self, name):
return getattr(self.state, name, None)

96
util/saas/cloudwatch.py Normal file
View file

@ -0,0 +1,96 @@
import logging
import boto
import time
import random
from Queue import Empty
from threading import Thread
logger = logging.getLogger(__name__)
MAX_BATCH_METRICS = 20
# Sleep for this much time between failed send requests.
# This prevents hammering cloudwatch when it's not available.
FAILED_SEND_SLEEP_SECS = 15
def start_cloudwatch_sender(metrics, app):
"""
Starts sending from metrics to a new CloudWatchSender.
"""
access_key = app.config.get('CLOUDWATCH_AWS_ACCESS_KEY')
secret_key = app.config.get('CLOUDWATCH_AWS_SECRET_KEY')
namespace = app.config.get('CLOUDWATCH_NAMESPACE')
if not namespace:
logger.debug('CloudWatch not configured')
return
sender = CloudWatchSender(metrics, access_key, secret_key, namespace)
sender.start()
class CloudWatchSender(Thread):
"""
CloudWatchSender loops indefinitely and pulls metrics off of a queue then sends it to CloudWatch.
"""
def __init__(self, metrics, aws_access_key, aws_secret_key, namespace):
Thread.__init__(self)
self.daemon = True
self._aws_access_key = aws_access_key
self._aws_secret_key = aws_secret_key
self._metrics = metrics
self._namespace = namespace
def run(self):
try:
logger.debug('Starting CloudWatch sender process.')
connection = boto.connect_cloudwatch(self._aws_access_key, self._aws_secret_key)
except:
logger.exception('Failed to connect to CloudWatch.')
self._metrics.enable_deprecated()
while True:
metrics = {
'name': [],
'value': [],
'unit': [],
'timestamp': [],
'dimensions': [],
}
metric = self._metrics.get_deprecated()
append_metric(metrics, metric)
while len(metrics['name']) < MAX_BATCH_METRICS:
try:
metric = self._metrics.get_nowait_deprecated()
append_metric(metrics, metric)
except Empty:
break
try:
connection.put_metric_data(self._namespace, **metrics)
logger.debug('Sent %d CloudWatch metrics', len(metrics['name']))
except:
for i in range(len(metrics['name'])):
self._metrics.put_deprecated(metrics['name'][i], metrics['value'][i],
unit=metrics['unit'][i],
dimensions=metrics['dimensions'][i],
timestamp=metrics['timestamp'][i],
)
logger.exception('Failed to write to CloudWatch: %s', metrics)
logger.debug('Attempted to requeue %d metrics.', len(metrics['name']))
# random int between 1/2 and 1 1/2 of FAILED_SEND_SLEEP duration
sleep_secs = random.randint(FAILED_SEND_SLEEP_SECS/2, 3*FAILED_SEND_SLEEP_SECS/2)
time.sleep(sleep_secs)
def append_metric(metrics, m):
name, value, kwargs = m
metrics['name'].append(name)
metrics['value'].append(value)
metrics['unit'].append(kwargs.get('unit'))
metrics['dimensions'].append(kwargs.get('dimensions'))
metrics['timestamp'].append(kwargs.get('timestamp'))

36
util/saas/exceptionlog.py Normal file
View file

@ -0,0 +1,36 @@
from raven.contrib.flask import Sentry as FlaskSentry
class FakeSentryClient(object):
def captureException(self, *args, **kwargs):
pass
def user_context(self, *args, **kwargs):
pass
class FakeSentry(object):
def __init__(self):
self.client = FakeSentryClient()
class Sentry(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
sentry_type = app.config.get('EXCEPTION_LOG_TYPE', 'FakeSentry')
if sentry_type == 'Sentry':
sentry = FlaskSentry(app, register_signal=False)
else:
sentry = FakeSentry()
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['sentry'] = sentry
return sentry
def __getattr__(self, name):
return getattr(self.state, name, None)

183
util/saas/useranalytics.py Normal file
View file

@ -0,0 +1,183 @@
import logging
from hashlib import sha1
from concurrent.futures import ThreadPoolExecutor
from marketorestpython.client import MarketoClient
from util.asyncwrapper import AsyncExecutorWrapper, NullExecutor, NullExecutorCancelled
logger = logging.getLogger(__name__)
class LeadNotFoundException(Exception):
pass
def build_error_callback(message_when_exception):
def maybe_log_error(response_future):
try:
response_future.result()
except NullExecutorCancelled:
pass
except Exception:
logger.exception('User analytics: %s', message_when_exception)
return maybe_log_error
class _MarketoAnalyticsClient(object):
""" User analytics implementation which will report user changes to the
Marketo API.
"""
def __init__(self, marketo_client, munchkin_private_key, lead_source):
""" Instantiate with the given marketorestpython.client, the Marketo
Munchkin Private Key, and the Lead Source that we want to set when we
create new lead records in Marketo.
"""
self._marketo = marketo_client
self._munchkin_private_key = munchkin_private_key
self._lead_source = lead_source
def _get_lead_metadata(self, given_name, family_name, company, location):
metadata = {}
if given_name:
metadata['firstName'] = given_name
if family_name:
metadata['lastName'] = family_name
if company:
metadata['company'] = company
if location:
metadata['location'] = location
return metadata
def create_lead(self, email, username, given_name, family_name, company, location):
lead_data = dict(
email=email,
Quay_Username__c=username,
leadSource='Web - Product Trial',
Lead_Source_Detail__c=self._lead_source,
)
lead_data.update(self._get_lead_metadata(given_name, family_name,
company, location))
self._marketo.create_update_leads(
action='createOrUpdate',
leads=[lead_data],
asyncProcessing=True,
lookupField='email',
)
def _find_leads_by_email(self, email):
# Fetch the existing user from the database by email
found = self._marketo.get_multiple_leads_by_filter_type(
filterType='email',
filterValues=[email],
)
if not found:
raise LeadNotFoundException('No lead found with email: {}'.format(email))
return found
def change_email(self, old_email, new_email):
found = self._find_leads_by_email(old_email)
# Update using their user id.
updated = [dict(id=lead['id'], email=new_email) for lead in found]
self._marketo.create_update_leads(
action='updateOnly',
leads=updated,
asyncProcessing=True,
lookupField='id',
)
def change_metadata(self, email, given_name=None, family_name=None, company=None, location=None):
lead_data = self._get_lead_metadata(given_name, family_name, company, location)
if not lead_data:
return
# Update using their email address.
lead_data['email'] = email
self._marketo.create_update_leads(
action='updateOnly',
leads=[lead_data],
asyncProcessing=True,
lookupField='email',
)
def change_username(self, email, new_username):
# Update using their email.
self._marketo.create_update_leads(
action='updateOnly',
leads=[{
'email': email,
'Quay_Username__c': new_username,
}],
asyncProcessing=True,
lookupField='email',
)
@AsyncExecutorWrapper.sync
def get_user_analytics_metadata(self, user_obj):
""" Return a list of properties that should be added to the user object to allow
analytics associations.
"""
if not self._munchkin_private_key:
return dict()
marketo_user_hash = sha1(self._munchkin_private_key)
marketo_user_hash.update(user_obj.email)
return dict(
marketo_user_hash=marketo_user_hash.hexdigest(),
)
class UserAnalytics(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.state = self.init_app(app)
else:
self.state = None
def init_app(self, app):
analytics_type = app.config.get('USER_ANALYTICS_TYPE', 'FakeAnalytics')
marketo_munchkin_id = ''
marketo_munchkin_private_key = ''
marketo_client_id = ''
marketo_client_secret = ''
marketo_lead_source = ''
executor = NullExecutor()
if analytics_type == 'Marketo':
marketo_munchkin_id = app.config['MARKETO_MUNCHKIN_ID']
marketo_munchkin_private_key = app.config['MARKETO_MUNCHKIN_PRIVATE_KEY']
marketo_client_id = app.config['MARKETO_CLIENT_ID']
marketo_client_secret = app.config['MARKETO_CLIENT_SECRET']
marketo_lead_source = app.config['MARKETO_LEAD_SOURCE']
logger.debug('Initializing marketo with keys: %s %s %s', marketo_munchkin_id,
marketo_client_id, marketo_client_secret)
executor = ThreadPoolExecutor(max_workers=1)
marketo_client = MarketoClient(marketo_munchkin_id, marketo_client_id, marketo_client_secret)
client_wrapper = _MarketoAnalyticsClient(marketo_client, marketo_munchkin_private_key,
marketo_lead_source)
user_analytics = AsyncExecutorWrapper(client_wrapper, executor)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['user_analytics'] = user_analytics
return user_analytics
def __getattr__(self, name):
return getattr(self.state, name, None)