initial import for Open Source 🎉
This commit is contained in:
parent
1898c361f3
commit
9c0dd3b722
2048 changed files with 218743 additions and 0 deletions
0
tools/__init__.py
Normal file
0
tools/__init__.py
Normal file
86
tools/deleteinvalidlayers.py
Normal file
86
tools/deleteinvalidlayers.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
from data.database import ImageStorage, Image, ImageStoragePlacement, ImageStorageLocation, RepositoryTag
|
||||
from data import model
|
||||
from app import storage as storage_system
|
||||
from tqdm import tqdm
|
||||
|
||||
def find_broken_storages():
|
||||
broken_storages = set()
|
||||
|
||||
print "Checking storages..."
|
||||
placement_count = ImageStoragePlacement.select().count()
|
||||
placements = (ImageStoragePlacement
|
||||
.select()
|
||||
.join(ImageStorage)
|
||||
.switch(ImageStoragePlacement)
|
||||
.join(ImageStorageLocation))
|
||||
|
||||
for placement in tqdm(placements, total=placement_count):
|
||||
path = model.storage.get_layer_path(placement.storage)
|
||||
if not storage_system.exists([placement.location.name], path):
|
||||
broken_storages.add(placement.storage.id)
|
||||
|
||||
return list(broken_storages)
|
||||
|
||||
def delete_broken_layers():
|
||||
result = raw_input('Please make sure your registry is not running and enter "GO" to continue: ')
|
||||
if result != 'GO':
|
||||
print "Declined to run"
|
||||
return
|
||||
|
||||
broken_storages = find_broken_storages()
|
||||
if not broken_storages:
|
||||
print "No broken layers found"
|
||||
return
|
||||
|
||||
# Find all the images referencing the broken layers.
|
||||
print "Finding broken images..."
|
||||
IMAGE_BATCH_SIZE = 100
|
||||
|
||||
all_images = []
|
||||
for i in tqdm(range(0, len(broken_storages) / IMAGE_BATCH_SIZE)):
|
||||
start = i * IMAGE_BATCH_SIZE
|
||||
end = (i + 1) * IMAGE_BATCH_SIZE
|
||||
|
||||
images = Image.select().join(ImageStorage).where(Image.storage << broken_storages[start:end])
|
||||
all_images.extend(images)
|
||||
|
||||
if not all_images:
|
||||
print "No broken layers found"
|
||||
return
|
||||
|
||||
# Find all the tags containing the images.
|
||||
print "Finding associated tags for %s images..." % len(all_images)
|
||||
all_tags = {}
|
||||
for image in tqdm(all_images):
|
||||
query = model.tag.get_matching_tags(image.docker_image_id, image.storage.uuid, RepositoryTag)
|
||||
for tag in query:
|
||||
all_tags[tag.id] = tag
|
||||
|
||||
# Ask to delete them.
|
||||
print ""
|
||||
print "The following tags were found to reference invalid images:"
|
||||
for tag in all_tags.values():
|
||||
print "%s/%s: %s" % (tag.repository.namespace_user.username, tag.repository.name, tag.name)
|
||||
|
||||
if not all_tags:
|
||||
print "(Tags in time machine)"
|
||||
|
||||
print ""
|
||||
result = raw_input('Enter "DELETENOW" to delete these tags and ALL associated images (THIS IS PERMANENT): ')
|
||||
if result != 'DELETENOW':
|
||||
print "Declined to delete"
|
||||
return
|
||||
|
||||
print ""
|
||||
print "Marking tags to be GCed..."
|
||||
for tag in tqdm(all_tags.values()):
|
||||
tag.lifetime_end_ts = 0
|
||||
tag.save()
|
||||
|
||||
print "GCing all repositories..."
|
||||
for tag in tqdm(all_tags.values()):
|
||||
model.repository.garbage_collect_repo(tag.repository)
|
||||
|
||||
print "All done! You may now restart your registry."
|
||||
|
||||
delete_broken_layers()
|
89
tools/email-viewer/emails.py
Normal file
89
tools/email-viewer/emails.py
Normal file
|
@ -0,0 +1,89 @@
|
|||
from flask import Flask, render_template
|
||||
import datetime
|
||||
import os
|
||||
|
||||
tmpl_dir = '../../emails'
|
||||
|
||||
app = Flask(__name__, template_folder=tmpl_dir)
|
||||
|
||||
@app.template_filter()
|
||||
def user_reference_filter(value):
|
||||
return value
|
||||
|
||||
@app.template_filter()
|
||||
def admin_reference(value):
|
||||
return value
|
||||
|
||||
@app.template_filter()
|
||||
def repository_reference(value):
|
||||
return value
|
||||
|
||||
@app.template_filter()
|
||||
def team_reference(value):
|
||||
return value
|
||||
|
||||
app.jinja_env.filters['user_reference'] = user_reference_filter
|
||||
app.jinja_env.filters['admin_reference'] = admin_reference
|
||||
app.jinja_env.filters['repository_reference'] = repository_reference
|
||||
app.jinja_env.filters['team_reference'] = team_reference
|
||||
|
||||
app_title = 'Quay.io (local)'
|
||||
|
||||
def app_link_handler(url=None, title=None):
|
||||
""" Just because it is in the original email tempaltes """
|
||||
return 'http://example.com/example'
|
||||
|
||||
def render_with_options(template=None):
|
||||
""" Pass a bunch of common variables when rendering templates """
|
||||
return render_template(template, username="exampleuser", user_reference="testing",
|
||||
app_logo="https://quay.io/static/img/quay-horizontal-color.svg", token="sdf8SdfKGRME9dse_dfdf",
|
||||
app_link=app_link_handler, namespace="booboo", repository="foobar", organization="buynlarge",
|
||||
admin_usernames=["lazercat", "booboocoreos"], teamname="creators", inviter="devtable",
|
||||
hosted=False, app_title=app_title, app_url="https://quay.io")
|
||||
|
||||
def get_templates():
|
||||
""" Return a list of the available templates """
|
||||
return [t.replace('.html', '') for t in os.listdir('../../emails')]
|
||||
|
||||
@app.route("/")
|
||||
def template_test():
|
||||
return render_template('email-template-viewer.html', templates=get_templates())
|
||||
|
||||
@app.route("/changeemail")
|
||||
def changeemail():
|
||||
return render_with_options('changeemail.html');
|
||||
|
||||
@app.route("/confirmemail")
|
||||
def confirmemail():
|
||||
return render_with_options('confirmemail.html');
|
||||
|
||||
@app.route("/emailchanged")
|
||||
def emailchanged():
|
||||
return render_with_options('emailchanged.html');
|
||||
|
||||
@app.route("/orgrecovery")
|
||||
def orgrecovery():
|
||||
return render_with_options('orgrecovery.html');
|
||||
|
||||
@app.route("/passwordchanged")
|
||||
def passwordchanged():
|
||||
return render_with_options('passwordchanged.html');
|
||||
|
||||
@app.route("/paymentfailure")
|
||||
def paymentfailure():
|
||||
return render_with_options('paymentfailure.html');
|
||||
|
||||
@app.route("/recovery")
|
||||
def recovery():
|
||||
return render_with_options('recovery.html');
|
||||
|
||||
@app.route("/repoauthorizeemail")
|
||||
def repoauthorizeemail():
|
||||
return render_with_options('repoauthorizeemail.html');
|
||||
|
||||
@app.route("/teaminvite")
|
||||
def teaminvite():
|
||||
return render_with_options('teaminvite.html');
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
6
tools/email-viewer/requirements.txt
Normal file
6
tools/email-viewer/requirements.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
Flask==0.10.1
|
||||
Jinja2==2.7.2
|
||||
MarkupSafe==0.18
|
||||
Werkzeug==0.9.4
|
||||
itsdangerous==0.23
|
||||
wsgiref==0.1.2
|
35
tools/emailinvoice.py
Normal file
35
tools/emailinvoice.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
import stripe
|
||||
from app import app
|
||||
|
||||
from util.invoice import renderInvoiceToHtml
|
||||
from util.useremails import send_invoice_email
|
||||
|
||||
from data import model
|
||||
|
||||
import argparse
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask_mail import Mail
|
||||
|
||||
def sendInvoice(invoice_id):
|
||||
invoice = stripe.Invoice.retrieve(invoice_id)
|
||||
if not invoice['customer']:
|
||||
print 'No customer found'
|
||||
return
|
||||
|
||||
customer_id = invoice['customer']
|
||||
user = model.user.get_user_or_org_by_customer_id(customer_id)
|
||||
if not user:
|
||||
print 'No user found for customer %s' % (customer_id)
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
invoice_html = renderInvoiceToHtml(invoice, user)
|
||||
send_invoice_email(user.invoice_email_address or user.email, invoice_html)
|
||||
print 'Invoice sent to %s' % (user.invoice_email_address or user.email)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Email an invoice')
|
||||
parser.add_argument('invoice_id', help='The invoice ID')
|
||||
args = parser.parse_args()
|
||||
sendInvoice(args.invoice_id)
|
25
tools/freeloaders.py
Normal file
25
tools/freeloaders.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
from data import model
|
||||
from data.database import User
|
||||
from app import billing as stripe
|
||||
from data.plans import get_plan
|
||||
|
||||
def get_private_allowed(customer):
|
||||
if not customer.stripe_id:
|
||||
return 0
|
||||
|
||||
subscription = stripe.Customer.retrieve(customer.stripe_id).get('subscription', None)
|
||||
if subscription is None:
|
||||
return 0
|
||||
|
||||
plan = get_plan(subscription.plan.id)
|
||||
return plan['privateRepos']
|
||||
|
||||
# Find customers who have more private repositories than their plans allow
|
||||
users = User.select()
|
||||
|
||||
usage = [(user.username, model.user.get_private_repo_count(user.username),
|
||||
get_private_allowed(user)) for user in users]
|
||||
|
||||
for username, used, allowed in usage:
|
||||
if used > allowed:
|
||||
print('Violation: %s %s > %s' % (username, used, allowed))
|
33
tools/generatekeypair.py
Normal file
33
tools/generatekeypair.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import argparse
|
||||
import json
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from jwkest.jwk import RSAKey
|
||||
from util.security.fingerprint import canonical_kid
|
||||
|
||||
def generate_key_pair(filename, kid=None):
|
||||
private_key = RSA.generate(2048)
|
||||
jwk = RSAKey(key=private_key.publickey()).serialize()
|
||||
if kid is None:
|
||||
kid = canonical_kid(jwk)
|
||||
|
||||
print("Writing public key to %s.jwk" % filename)
|
||||
with open('%s.jwk' % filename, mode='w') as f:
|
||||
f.truncate(0)
|
||||
f.write(json.dumps(jwk))
|
||||
|
||||
print("Writing key ID to %s.kid" % filename)
|
||||
with open('%s.kid' % filename, mode='w') as f:
|
||||
f.truncate(0)
|
||||
f.write(kid)
|
||||
|
||||
print("Writing private key to %s.pem" % filename)
|
||||
with open('%s.pem' % filename, mode='w') as f:
|
||||
f.truncate(0)
|
||||
f.write(private_key.exportKey())
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generates a key pair into files')
|
||||
parser.add_argument('filename', help='The filename prefix for the generated key files')
|
||||
args = parser.parse_args()
|
||||
generate_key_pair(args.filename)
|
294
tools/invoices.py
Normal file
294
tools/invoices.py
Normal file
|
@ -0,0 +1,294 @@
|
|||
import stripe as _stripe
|
||||
_stripe.api_version = '2016-06-15'
|
||||
|
||||
import logging
|
||||
import time
|
||||
import sys
|
||||
import csv
|
||||
import codecs
|
||||
|
||||
from itertools import groupby
|
||||
from datetime import datetime, timedelta, date
|
||||
from cStringIO import StringIO
|
||||
|
||||
from app import billing as stripe
|
||||
|
||||
|
||||
def _format_timestamp(stripe_timestamp):
|
||||
if stripe_timestamp is None:
|
||||
return None
|
||||
date_obj = date.fromtimestamp(stripe_timestamp)
|
||||
return date_obj.strftime('%m/%d/%Y')
|
||||
|
||||
|
||||
def _format_money(stripe_money):
|
||||
return stripe_money/100.0
|
||||
|
||||
|
||||
def _paginate_list(stripe_klass, num_days, **incoming_kwargs):
|
||||
now = datetime.utcnow()
|
||||
starting_from = now - timedelta(days=num_days)
|
||||
starting_timestamp = str(int(time.mktime(starting_from.timetuple())))
|
||||
created = {'gte': starting_timestamp}
|
||||
|
||||
list_req_kwargs = dict(incoming_kwargs)
|
||||
has_more = True
|
||||
|
||||
while has_more:
|
||||
list_response = stripe_klass.list(limit=100, created=created, **list_req_kwargs)
|
||||
|
||||
for list_response_item in list_response.data:
|
||||
yield list_response_item
|
||||
|
||||
has_more = list_response.has_more
|
||||
|
||||
list_req_kwargs['starting_after'] = list_response_item.id
|
||||
|
||||
|
||||
def list_charges(num_days):
|
||||
""" List all charges that have occurred in the past specified number of days.
|
||||
"""
|
||||
|
||||
for charge in _paginate_list(stripe.Charge, num_days, expand=['data.invoice']):
|
||||
yield charge
|
||||
|
||||
|
||||
def list_refunds(num_days):
|
||||
""" List all refunds that have occurred in the past specified number of days.
|
||||
"""
|
||||
expand = ['data.charge', 'data.charge.invoice']
|
||||
for refund in _paginate_list(stripe.Refund, num_days, expand=expand):
|
||||
yield refund
|
||||
|
||||
|
||||
def format_refund(refund):
|
||||
""" Generator which will return one or more line items corresponding to the
|
||||
specified refund.
|
||||
"""
|
||||
refund_period_start = None
|
||||
refund_period_end = None
|
||||
invoice_iterable = expand_invoice(refund.charge.invoice, refund.charge.amount)
|
||||
for _, period_start, period_end, _ in invoice_iterable:
|
||||
if period_start is not None and (period_start < refund_period_start or
|
||||
refund_period_start is None):
|
||||
refund_period_start = period_start
|
||||
if period_end is not None and (period_end > refund_period_end or
|
||||
refund_period_end is None):
|
||||
refund_period_end = period_end
|
||||
|
||||
card = refund.charge.source
|
||||
yield (refund.created, [
|
||||
_format_timestamp(refund.created),
|
||||
_format_timestamp(refund_period_start),
|
||||
_format_timestamp(refund_period_end),
|
||||
_format_money(-1 * refund.amount),
|
||||
'credit_card',
|
||||
'Refunded',
|
||||
None,
|
||||
refund.id,
|
||||
refund.charge.customer,
|
||||
card.address_city,
|
||||
card.address_state,
|
||||
card.address_country,
|
||||
card.address_zip,
|
||||
card.country,
|
||||
])
|
||||
|
||||
def _date_key(line_item):
|
||||
return line_item.start, line_item.end
|
||||
|
||||
|
||||
def expand_invoice(invoice, total_amount):
|
||||
if invoice is None:
|
||||
yield total_amount, None, None, None
|
||||
else:
|
||||
data_iter = groupby(invoice.lines.data, lambda li: (li.period.start, li.period.end))
|
||||
for (period_start, period_end), line_items_iter in data_iter:
|
||||
line_items = list(line_items_iter)
|
||||
period_amount = sum(line_item.amount for line_item in line_items)
|
||||
yield period_amount, period_start, period_end, line_items[-1].plan
|
||||
|
||||
|
||||
def format_charge(charge):
|
||||
""" Generator which will return one or more line items corresponding to the
|
||||
line items for this charge.
|
||||
"""
|
||||
ch_status = 'Paid'
|
||||
if charge.failure_code is not None:
|
||||
ch_status = 'Failed'
|
||||
|
||||
card = charge.source
|
||||
|
||||
# Amount remaining to be accounted for
|
||||
remaining_charge_amount = charge.amount
|
||||
|
||||
discount_start = sys.maxint
|
||||
discount_end = sys.maxint
|
||||
discount_percent = 0
|
||||
try:
|
||||
if charge.invoice and charge.invoice.discount:
|
||||
discount_obj = charge.invoice.discount
|
||||
assert discount_obj.coupon.amount_off is None
|
||||
|
||||
discount_start = discount_obj.start
|
||||
discount_end = sys.maxint if not discount_obj.end else discount_obj.end
|
||||
discount_percent = discount_obj.coupon.percent_off/100.0
|
||||
assert discount_percent > 0
|
||||
except AssertionError:
|
||||
logging.exception('Discount of strange variety: %s', discount_obj)
|
||||
raise
|
||||
|
||||
invoice_iterable = expand_invoice(charge.invoice, charge.amount)
|
||||
for line_amount, period_start, period_end, plan in invoice_iterable:
|
||||
yield (charge.created, [
|
||||
_format_timestamp(charge.created),
|
||||
_format_timestamp(period_start),
|
||||
_format_timestamp(period_end),
|
||||
_format_money(line_amount),
|
||||
'credit_card',
|
||||
ch_status,
|
||||
plan.name if plan is not None else None,
|
||||
charge.id,
|
||||
charge.customer,
|
||||
card.address_city,
|
||||
card.address_state,
|
||||
card.address_country,
|
||||
card.address_zip,
|
||||
card.country,
|
||||
])
|
||||
|
||||
remaining_charge_amount -= line_amount
|
||||
|
||||
# Assumption: a discount applies if the beginning of a subscription
|
||||
# billing period is in the window when the discount is active.
|
||||
# Assumption the second: A discount is inclusive at the start second
|
||||
# and exclusive on the end second.
|
||||
#
|
||||
# I can't find docs or examples to prove or disprove either asusmption.
|
||||
if period_start >= discount_start and period_start < discount_end:
|
||||
discount_amount = -1 * line_amount * discount_percent
|
||||
|
||||
try:
|
||||
assert period_start != discount_start
|
||||
except AssertionError:
|
||||
logging.exception('We found a line item which matches the discount start: %s',
|
||||
charge.id)
|
||||
raise
|
||||
|
||||
try:
|
||||
assert period_start != discount_end
|
||||
except AssertionError:
|
||||
logging.exception('We found a line item which matches the discount end: %s',
|
||||
charge.id)
|
||||
raise
|
||||
|
||||
discount_name = 'Discount' if plan is None else '{} Discount'.format(plan.name)
|
||||
|
||||
yield (charge.created, [
|
||||
_format_timestamp(charge.created),
|
||||
_format_timestamp(period_start) if period_start is not None else None,
|
||||
_format_timestamp(period_end) if period_end is not None else None,
|
||||
_format_money(discount_amount),
|
||||
'credit_card',
|
||||
ch_status,
|
||||
discount_name,
|
||||
charge.id,
|
||||
charge.customer,
|
||||
card.address_city,
|
||||
card.address_state,
|
||||
card.address_country,
|
||||
card.address_zip,
|
||||
card.country,
|
||||
])
|
||||
|
||||
remaining_charge_amount -= discount_amount
|
||||
|
||||
# Make sure our line items added up to the actual charge amount
|
||||
if remaining_charge_amount != 0:
|
||||
logging.warning('Unable to fully account (%s) for charge amount (%s): %s',
|
||||
remaining_charge_amount, charge.amount, charge.id)
|
||||
|
||||
|
||||
class _UnicodeWriter(object):
|
||||
"""
|
||||
A CSV writer which will write rows to CSV file "f",
|
||||
which is encoded in the given encoding.
|
||||
"""
|
||||
def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds):
|
||||
# Redirect output to a queue
|
||||
self.queue = StringIO()
|
||||
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
|
||||
self.stream = f
|
||||
self.encoder = codecs.getincrementalencoder(encoding)()
|
||||
|
||||
@staticmethod
|
||||
def _encode_cell(cell):
|
||||
if cell is None:
|
||||
return cell
|
||||
return unicode(cell).encode('utf-8')
|
||||
|
||||
def writerow(self, row):
|
||||
self.writer.writerow([self._encode_cell(s) for s in row])
|
||||
# Fetch UTF-8 output from the queue ...
|
||||
data = self.queue.getvalue()
|
||||
data = data.decode('utf-8')
|
||||
# ... and reencode it into the target encoding
|
||||
data = self.encoder.encode(data)
|
||||
# write to the target stream
|
||||
self.stream.write(data)
|
||||
# empty queue
|
||||
self.queue.truncate(0)
|
||||
|
||||
|
||||
def _merge_row_streams(*row_generators):
|
||||
""" Descending merge sort of multiple row streams in the form of (tx_date, [row data]).
|
||||
Works recursively on an arbitrary number of row streams.
|
||||
"""
|
||||
if len(row_generators) == 1:
|
||||
for only_candidate in row_generators[0]:
|
||||
yield only_candidate
|
||||
|
||||
else:
|
||||
my_generator = row_generators[0]
|
||||
other_generator = _merge_row_streams(*row_generators[1:])
|
||||
|
||||
other_done = False
|
||||
|
||||
try:
|
||||
other_next = next(other_generator)
|
||||
except StopIteration:
|
||||
other_done = True
|
||||
|
||||
for my_next in my_generator:
|
||||
while not other_done and other_next[0] > my_next[0]:
|
||||
yield other_next
|
||||
|
||||
try:
|
||||
other_next = next(other_generator)
|
||||
except StopIteration:
|
||||
other_done = True
|
||||
yield my_next
|
||||
|
||||
for other_next in other_generator:
|
||||
yield other_next
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(level=logging.WARN)
|
||||
|
||||
days = 30
|
||||
if len(sys.argv) > 1:
|
||||
days = int(sys.argv[1])
|
||||
|
||||
refund_rows = (refund_line_item
|
||||
for one_refund in list_refunds(days)
|
||||
for refund_line_item in format_refund(one_refund))
|
||||
|
||||
rows = (line_item
|
||||
for one_charge in list_charges(days)
|
||||
for line_item in format_charge(one_charge))
|
||||
|
||||
transaction_writer = _UnicodeWriter(sys.stdout)
|
||||
for _, row in _merge_row_streams(refund_rows, rows):
|
||||
transaction_writer.writerow(row)
|
||||
sys.stdout.flush()
|
50
tools/migratebranchregex.py
Normal file
50
tools/migratebranchregex.py
Normal file
|
@ -0,0 +1,50 @@
|
|||
import argparse
|
||||
import logging
|
||||
import json
|
||||
|
||||
from app import app
|
||||
from data import model
|
||||
from data.database import RepositoryBuildTrigger, configure
|
||||
from data.model.build import update_build_trigger
|
||||
|
||||
configure(app.config)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def run_branchregex_migration():
|
||||
encountered = set()
|
||||
while True:
|
||||
found = list(RepositoryBuildTrigger.select().where(RepositoryBuildTrigger.config ** "%branch_regex%",
|
||||
~(RepositoryBuildTrigger.config ** "%branchtag_regex%")))
|
||||
found = [f for f in found if not f.uuid in encountered]
|
||||
|
||||
if not found:
|
||||
logger.debug('No additional records found')
|
||||
return
|
||||
|
||||
logger.debug('Found %s records to be changed', len(found))
|
||||
for trigger in found:
|
||||
encountered.add(trigger.uuid)
|
||||
|
||||
try:
|
||||
config = json.loads(trigger.config)
|
||||
except:
|
||||
logging.error("Cannot parse config for trigger %s", trigger.uuid)
|
||||
continue
|
||||
|
||||
logger.debug("Checking trigger %s", trigger.uuid)
|
||||
existing_regex = config['branch_regex']
|
||||
logger.debug("Found branch regex '%s'", existing_regex)
|
||||
|
||||
sub_regex = existing_regex.split('|')
|
||||
new_regex = '|'.join(['heads/' + sub for sub in sub_regex])
|
||||
config['branchtag_regex'] = new_regex
|
||||
|
||||
logger.debug("Updating to branchtag regex '%s'", new_regex)
|
||||
update_build_trigger(trigger, config)
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||
|
||||
run_branchregex_migration()
|
45
tools/monthlyrevenue.py
Normal file
45
tools/monthlyrevenue.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
from app import billing
|
||||
from collections import defaultdict
|
||||
|
||||
offset = 0
|
||||
|
||||
def empty_tuple():
|
||||
return (0, 0)
|
||||
|
||||
plan_revenue = defaultdict(empty_tuple)
|
||||
|
||||
batch = billing.Customer.all(count=100, offset=offset)
|
||||
while batch.data:
|
||||
for cust in batch.data:
|
||||
if cust.subscription:
|
||||
sub = cust.subscription
|
||||
total_customer_revenue = sub.plan.amount * sub.quantity
|
||||
|
||||
if cust.discount and cust.discount.coupon:
|
||||
coupon = cust.discount.coupon
|
||||
|
||||
if coupon.percent_off:
|
||||
total_customer_revenue *= (1 - coupon.percent_off/100.0)
|
||||
|
||||
if coupon.amount_off:
|
||||
total_customer_revenue -= coupon.amount_off
|
||||
|
||||
subscribers, revenue = plan_revenue[sub.plan.id]
|
||||
plan_revenue[sub.plan.id] = (subscribers + 1,
|
||||
revenue + total_customer_revenue)
|
||||
offset += len(batch.data)
|
||||
batch = billing.Customer.all(count=100, offset=offset)
|
||||
|
||||
def format_money(total_cents):
|
||||
dollars = total_cents / 100
|
||||
cents = total_cents % 100
|
||||
return dollars, cents
|
||||
|
||||
total_monthly_revenue = 0
|
||||
for plan_id, (subs, rev) in plan_revenue.items():
|
||||
total_monthly_revenue += rev
|
||||
d, c = format_money(rev)
|
||||
print '%s: $%d.%02d(%s)' % (plan_id, d, c, subs)
|
||||
|
||||
d, c = format_money(total_monthly_revenue)
|
||||
print 'Monthly revenue: $%d.%02d' % (d, c)
|
15
tools/orphans.py
Normal file
15
tools/orphans.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
from data.database import Image, ImageStorage
|
||||
from peewee import JOIN, fn
|
||||
from app import app
|
||||
|
||||
orphaned = (ImageStorage
|
||||
.select()
|
||||
.where(ImageStorage.uploading == False)
|
||||
.join(Image, JOIN.LEFT_OUTER)
|
||||
.group_by(ImageStorage)
|
||||
.having(fn.Count(Image.id) == 0))
|
||||
|
||||
counter = 0
|
||||
for orphan in orphaned:
|
||||
counter += 1
|
||||
print orphan.uuid
|
19
tools/parsebuildpack.py
Normal file
19
tools/parsebuildpack.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
from app import userfiles as user_files
|
||||
|
||||
import workers.dockerfilebuild
|
||||
import requests
|
||||
|
||||
w = workers.dockerfilebuild.DockerfileBuildWorker(100, None)
|
||||
|
||||
resource_key = '5c0a985c-405d-4161-b0ac-603c3757b5f9'
|
||||
resource_url = user_files.get_file_url(resource_key, '127.0.0.1', requires_cors=False)
|
||||
print resource_url
|
||||
|
||||
docker_resource = requests.get(resource_url, stream=True)
|
||||
c_type = docker_resource.headers['content-type']
|
||||
|
||||
if ';' in c_type:
|
||||
c_type = c_type.split(';')[0]
|
||||
|
||||
build_dir = w._mime_processors[c_type](docker_resource)
|
||||
print build_dir
|
31
tools/phpmyadmin/Dockerfile
Normal file
31
tools/phpmyadmin/Dockerfile
Normal file
|
@ -0,0 +1,31 @@
|
|||
FROM phusion/baseimage:0.9.9
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV HOME /root
|
||||
ENV UPDATE_APT 2
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
# Install LAMP
|
||||
RUN apt-get install -y lamp-server^
|
||||
|
||||
# Install phpMyAdmin
|
||||
RUN mysqld & \
|
||||
service apache2 start; \
|
||||
sleep 5; \
|
||||
printf y\\n\\n\\n1\\n | apt-get install -y phpmyadmin; \
|
||||
sleep 15; \
|
||||
mysqladmin -u root shutdown
|
||||
|
||||
|
||||
# Setup phpmyadmin to run
|
||||
RUN echo "Include /etc/phpmyadmin/apache.conf" >> /etc/apache2/apache2.conf
|
||||
RUN rm /etc/phpmyadmin/config.inc.php
|
||||
|
||||
ADD config.inc.php /etc/phpmyadmin/config.inc.php
|
||||
|
||||
ADD run-admin.sh /etc/service/phpadmin/run
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["/sbin/my_init"]
|
59
tools/phpmyadmin/config.inc.php
Normal file
59
tools/phpmyadmin/config.inc.php
Normal file
|
@ -0,0 +1,59 @@
|
|||
<?php
|
||||
/**
|
||||
* Debian local configuration file
|
||||
*
|
||||
* This file overrides the settings made by phpMyAdmin interactive setup
|
||||
* utility.
|
||||
*
|
||||
* For example configuration see
|
||||
* /usr/share/doc/phpmyadmin/examples/config.sample.inc.php
|
||||
* or
|
||||
* /usr/share/doc/phpmyadmin/examples/config.manyhosts.inc.php
|
||||
*
|
||||
* NOTE: do not add security sensitive data to this file (like passwords)
|
||||
* unless you really know what you're doing. If you do, any user that can
|
||||
* run PHP or CGI on your webserver will be able to read them. If you still
|
||||
* want to do this, make sure to properly secure the access to this file
|
||||
* (also on the filesystem level).
|
||||
*/
|
||||
|
||||
// Load secret generated on postinst
|
||||
include('/var/lib/phpmyadmin/blowfish_secret.inc.php');
|
||||
|
||||
// Load autoconf local config
|
||||
include('/var/lib/phpmyadmin/config.inc.php');
|
||||
|
||||
/**
|
||||
* Server(s) configuration
|
||||
*/
|
||||
$i = 0;
|
||||
// The $cfg['Servers'] array starts with $cfg['Servers'][1]. Do not use $cfg['Servers'][0].
|
||||
// You can disable a server config entry by setting host to ''.
|
||||
$i++;
|
||||
|
||||
/**
|
||||
* Read configuration from dbconfig-common
|
||||
* You can regenerate it using: dpkg-reconfigure -plow phpmyadmin
|
||||
*/
|
||||
if (is_readable('/etc/phpmyadmin/config-db.php')) {
|
||||
require('/etc/phpmyadmin/config-db.php');
|
||||
} else {
|
||||
error_log('phpmyadmin: Failed to load /etc/phpmyadmin/config-db.php.'
|
||||
. ' Check group www-data has read access.');
|
||||
}
|
||||
|
||||
$cfg['Servers'][$i]['auth_type'] = 'HTTP';
|
||||
$cfg['Servers'][$i]['hide_db'] = '(mysql|information_schema|phpmyadmin)';
|
||||
/* Server parameters */
|
||||
$cfg['Servers'][$i]['host'] = 'db1.quay.io';
|
||||
$cfg['Servers'][$i]['ssl'] = true;
|
||||
|
||||
/*
|
||||
* End of servers configuration
|
||||
*/
|
||||
|
||||
/*
|
||||
* Directories for saving/loading files from server
|
||||
*/
|
||||
$cfg['UploadDir'] = '';
|
||||
$cfg['SaveDir'] = '';
|
4
tools/phpmyadmin/run-admin.sh
Executable file
4
tools/phpmyadmin/run-admin.sh
Executable file
|
@ -0,0 +1,4 @@
|
|||
#! /bin/bash
|
||||
|
||||
service apache2 start
|
||||
mysqld
|
44
tools/relationships.py
Normal file
44
tools/relationships.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
from data.database import User, Repository, TeamMember
|
||||
|
||||
|
||||
def fix_ident(ident):
|
||||
return str(ident).translate(None, '-/.')
|
||||
|
||||
|
||||
with open('outfile.dot', 'w') as outfile:
|
||||
outfile.write('digraph relationships {\n')
|
||||
|
||||
for repo in Repository.select():
|
||||
ns = fix_ident(repo.namespace_user.username)
|
||||
outfile.write('%s_%s -> %s\n' % (ns, fix_ident(repo.name), ns))
|
||||
|
||||
teams_in_orgs = set()
|
||||
|
||||
for member in TeamMember.select():
|
||||
if '+' in member.user.username:
|
||||
continue
|
||||
|
||||
org_name = fix_ident(member.team.organization.username)
|
||||
|
||||
team_to_org = (member.team.name, member.team.organization.username)
|
||||
if not team_to_org in teams_in_orgs:
|
||||
teams_in_orgs.add(team_to_org)
|
||||
outfile.write('%s_%s -> %s\n' % (org_name, fix_ident(member.team.name),
|
||||
org_name))
|
||||
|
||||
team_name = fix_ident(member.team.name)
|
||||
|
||||
outfile.write('%s -> %s_%s\n' % (fix_ident(member.user.username), org_name,
|
||||
team_name))
|
||||
outfile.write('%s_%s [shape=box]\n' % (org_name, team_name))
|
||||
|
||||
for user in User.select():
|
||||
if '+' in user.username:
|
||||
continue
|
||||
|
||||
if user.organization:
|
||||
outfile.write('%s [shape=circle]\n' % fix_ident(user.username))
|
||||
else:
|
||||
outfile.write('%s [shape=triangle]\n' % fix_ident(user.username))
|
||||
|
||||
outfile.write('}')
|
27
tools/renameuser.py
Normal file
27
tools/renameuser.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from app import app
|
||||
from data import model
|
||||
|
||||
import argparse
|
||||
|
||||
def renameUser(username, new_name):
|
||||
if username == new_name:
|
||||
raise Exception('Must give a new username')
|
||||
|
||||
check = model.user.get_user_or_org(new_name)
|
||||
if check is not None:
|
||||
raise Exception('New username %s already exists' % new_name)
|
||||
|
||||
existing = model.user.get_user_or_org(username)
|
||||
if existing is None:
|
||||
raise Exception('Username %s does not exist' % username)
|
||||
|
||||
print 'Renaming user...'
|
||||
model.user.change_username(existing.id, new_name)
|
||||
print 'Rename complete'
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Rename a user')
|
||||
parser.add_argument('username', help='The current username')
|
||||
parser.add_argument('new_name', help='The new username')
|
||||
args = parser.parse_args()
|
||||
renameUser(args.username, args.new_name)
|
33
tools/renderinvoice.py
Normal file
33
tools/renderinvoice.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
import stripe
|
||||
from app import app
|
||||
|
||||
from util.invoice import renderInvoiceToPdf
|
||||
|
||||
from data import model
|
||||
|
||||
import argparse
|
||||
|
||||
def sendInvoice(invoice_id):
|
||||
invoice = stripe.Invoice.retrieve(invoice_id)
|
||||
if not invoice['customer']:
|
||||
print 'No customer found'
|
||||
return
|
||||
|
||||
customer_id = invoice['customer']
|
||||
user = model.user.get_user_or_org_by_customer_id(customer_id)
|
||||
if not user:
|
||||
print 'No user found for customer %s' % (customer_id)
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
file_data = renderInvoiceToPdf(invoice, user)
|
||||
with open('invoice.pdf', 'wb') as f:
|
||||
f.write(file_data)
|
||||
|
||||
print 'Invoice output as invoice.pdf'
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generate an invoice')
|
||||
parser.add_argument('invoice_id', help='The invoice ID')
|
||||
args = parser.parse_args()
|
||||
sendInvoice(args.invoice_id)
|
11
tools/screenshots/README.md
Normal file
11
tools/screenshots/README.md
Normal file
|
@ -0,0 +1,11 @@
|
|||
run with:
|
||||
|
||||
```
|
||||
casperjs screenshots.js
|
||||
```
|
||||
|
||||
debug run (i.e. hit localhost):
|
||||
|
||||
```
|
||||
casperjs screenshots.js --d
|
||||
```
|
181
tools/screenshots/screenshots.js
Normal file
181
tools/screenshots/screenshots.js
Normal file
|
@ -0,0 +1,181 @@
|
|||
var width = 1060;
|
||||
var height = 768;
|
||||
|
||||
var casper = require('casper').create({
|
||||
viewportSize: {
|
||||
width: width,
|
||||
height: height
|
||||
},
|
||||
verbose: true,
|
||||
logLevel: "debug"
|
||||
});
|
||||
|
||||
var options = casper.cli.options;
|
||||
var isDebug = !!options['d'];
|
||||
|
||||
var rootUrl = isDebug ? 'http://localhost:5000/' : 'https://quay.io/';
|
||||
var repo = isDebug ? 'complex' : 'r0';
|
||||
var org = isDebug ? 'buynlarge' : 'devtable'
|
||||
var orgrepo = isDebug ? 'buynlarge/orgrepo' : 'quay/testconnect2';
|
||||
var buildrepo = isDebug ? 'devtable/building' : 'quay/testconnect2';
|
||||
|
||||
var outputDir = "screenshots/";
|
||||
|
||||
casper.on("remote.message", function(msg, trace) {
|
||||
this.echo("Message: " + msg, "DEBUG");
|
||||
});
|
||||
|
||||
casper.on("page.error", function(msg, trace) {
|
||||
this.echo("Page error: " + msg, "ERROR");
|
||||
for (var i = 0; i < trace.length; i++) {
|
||||
this.echo(JSON.stringify(trace[i]), "ERROR");
|
||||
}
|
||||
});
|
||||
|
||||
casper.start(rootUrl + 'signin', function () {
|
||||
this.wait(1000);
|
||||
});
|
||||
|
||||
casper.thenClick('.accordion-toggle[data-target="#collapseSignin"]', function() {
|
||||
this.wait(1000);
|
||||
});
|
||||
|
||||
casper.then(function () {
|
||||
this.fill('.form-signin', {
|
||||
'username': isDebug ? 'devtable' : 'quaydemo',
|
||||
'password': isDebug ? 'password': 'C>K98%y"_=54x"<',
|
||||
}, false);
|
||||
});
|
||||
|
||||
casper.thenClick('.form-signin button[type=submit]', function() {
|
||||
this.waitForText('Repositories');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.waitForSelector('.repo-list');
|
||||
this.log('Generating repositories screenshot.');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'user-home.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating repository view screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'repository/devtable/' + repo + '?tag=v2.0', function() {
|
||||
this.wait(1000);
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'repo-view.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating repository tags screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'repository/devtable/' + repo + '?tab=tags&tag=v2.0', function() {
|
||||
this.wait(1000);
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'repo-tags.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating repository tree screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'repository/devtable/' + repo + '?tab=changes&tag=v2.0,prod,staging', function() {
|
||||
this.wait(5000);
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'repo-tree.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating repository settings screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'repository/devtable/' + repo + '?tab=settings', function() {
|
||||
this.wait(1000);
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'repo-settings.png');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'repository/devtable/' + repo + '?tab=tags', function() {
|
||||
this.wait(1000);
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating organization view screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'organization/' + org, function() {
|
||||
this.waitForSelector('.organization-name');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'org-view.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating organization teams screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'organization/' + org + '?tab=teams', function() {
|
||||
this.waitForText('Owners');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'org-teams.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating organization settings screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'organization/' + org + '?tab=usage', function() {
|
||||
this.wait(1000)
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'org-settings.png');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating organization logs screenshot.');
|
||||
});
|
||||
|
||||
casper.thenClick('a[data-target="#logs"]', function() {
|
||||
this.waitForSelector('svg > g', function() {
|
||||
this.wait(1000, function() {
|
||||
this.capture(outputDir + 'org-logs.png', {
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: width,
|
||||
height: height + 200
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.log('Generating build history screenshot.');
|
||||
});
|
||||
|
||||
casper.thenOpen(rootUrl + 'repository/' + buildrepo + '?tab=builds', function() {
|
||||
this.wait(10000);
|
||||
this.waitForText('Triggered By');
|
||||
});
|
||||
|
||||
casper.then(function() {
|
||||
this.capture(outputDir + 'build-history.png');
|
||||
});
|
||||
|
||||
casper.run();
|
27
tools/sendconfirmemail.py
Normal file
27
tools/sendconfirmemail.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from app import app
|
||||
|
||||
from util.useremails import send_confirmation_email
|
||||
|
||||
from data import model
|
||||
|
||||
import argparse
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask_mail import Mail
|
||||
|
||||
def sendConfirmation(username):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
confirmation_code = model.user.create_confirm_email_code(user)
|
||||
send_confirmation_email(user.username, user.email, confirmation_code)
|
||||
print 'Email sent to %s' % (user.email)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Sends a confirmation email')
|
||||
parser.add_argument('username', help='The username')
|
||||
args = parser.parse_args()
|
||||
sendConfirmation(args.username)
|
27
tools/sendresetemail.py
Normal file
27
tools/sendresetemail.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from app import app
|
||||
|
||||
from util.useremails import send_recovery_email
|
||||
|
||||
from data import model
|
||||
|
||||
import argparse
|
||||
|
||||
from flask import Flask, current_app
|
||||
from flask_mail import Mail
|
||||
|
||||
def sendReset(username):
|
||||
user = model.user.get_nonrobot_user(username)
|
||||
if not user:
|
||||
print 'No user found'
|
||||
return
|
||||
|
||||
|
||||
with app.app_context():
|
||||
confirmation_code = model.user.create_reset_password_email_code(user.email)
|
||||
send_recovery_email(user.email, confirmation_code)
|
||||
print 'Email sent to %s' % (user.email)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Sends a reset email')
|
||||
parser.add_argument('username', help='The username')
|
||||
args = parser.parse_args()
|
||||
sendReset(args.username)
|
14
tools/sharedimagestorage.py
Normal file
14
tools/sharedimagestorage.py
Normal file
|
@ -0,0 +1,14 @@
|
|||
from data.database import Image, ImageStorage
|
||||
|
||||
query = ImageStorage.select().annotate(Image)
|
||||
|
||||
saved_bytes = 0
|
||||
total_bytes = 0
|
||||
|
||||
for storage in query:
|
||||
if storage.image_size is not None:
|
||||
saved_bytes += (storage.count - 1) * storage.image_size
|
||||
total_bytes += storage.count * storage.image_size
|
||||
|
||||
print "Saved: %s" % saved_bytes
|
||||
print "Total: %s" % total_bytes
|
Reference in a new issue