Flesh out the create API and wire everything up together. Next up, testing.

This commit is contained in:
yackob03 2013-10-25 01:14:38 -04:00
parent 2afb8c85b1
commit 9b9a29c310
10 changed files with 156 additions and 15 deletions

View file

@ -3,6 +3,7 @@ import logging
import shutil
import os
import re
import requests
from flask import Flask, request, send_file, jsonify, redirect, url_for, abort
from zipfile import ZipFile
@ -39,7 +40,7 @@ def prepare_zip(request_file):
# Save the zip file to temp somewhere
with TemporaryFile() as zip_file:
request_file.save(zip_file)
zip_file.write(request_file.content)
to_extract = ZipFile(zip_file)
to_extract.extractall(build_dir)
@ -49,7 +50,8 @@ def prepare_zip(request_file):
def prepare_dockerfile(request_file):
build_dir = mkdtemp(prefix='docker-build-')
dockerfile_path = os.path.join(build_dir, "Dockerfile")
request_file.save(dockerfile_path)
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write(request_file.content)
return build_dir
@ -141,10 +143,12 @@ pool = ThreadPool(1)
@app.route('/build/', methods=['POST'])
def start_build():
docker_input = request.files['dockerfile']
c_type = docker_input.content_type
resource_url = request.values['resource_url']
tag_name = request.values['tag']
download_resource = requests.get(resource_url)
download_resource.get()
logger.info('Request to build file of type: %s with tag: %s' %
(c_type, tag_name))
@ -175,7 +179,9 @@ def start_build():
pool.apply_async(build_image, [build_dir, tag_name, num_steps,
result_object])
return redirect(url_for('get_status', job_id=job_id))
resp = make_response('Created', 201)
resp.headers['Location'] = url_for('get_status', job_id=job_id)
return resp
@app.route('/build/<job_id>')

View file

@ -1,2 +1,3 @@
flask
requests
-e git+git://github.com/DevTable/docker-py.git#egg=docker-py

View file

@ -42,10 +42,13 @@ class RDSMySQL(object):
DB_DRIVER = MySQLDatabase
class S3Storage(object):
class AWSCredentials(object):
AWS_ACCESS_KEY = 'AKIAJWZWUIS24TWSMWRA'
AWS_SECRET_KEY = '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'
REGISTRY_S3_BUCKET = 'quay-registry'
class S3Storage(AWSCredentials):
STORAGE_KIND = 's3'
@ -89,11 +92,12 @@ class DigitalOceanConfig():
DO_CLIENT_ID = 'LJ44y2wwYj1MD0BRxS6qHA'
DO_CLIENT_SECRET = 'b9357a6f6ff45a33bb03f6dbbad135f9'
DO_SSH_KEY_ID = '46986'
DO_SSH_PRIVATE_KEY_FILENAME = 'certs/digital_ocean'
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
DigitalOceanConfig):
DigitalOceanConfig, AWSCredentials):
REGISTRY_SERVER = 'localhost:5000'
LOGGING_CONFIG = {
'level': logging.DEBUG,

View file

@ -151,10 +151,11 @@ class RepositoryTag(BaseModel):
class RepositoryBuild(BaseModel):
repository = ForeignKeyField(Repository)
resource_key = CharField()
digitalocean_build_node_id = IntegerField(null=True)
phase = CharField(default='waiting')
status_url = CharField(null=True)
repository = ForeignKeyField(Repository)
class QueueItem(BaseModel):

View file

@ -287,8 +287,8 @@ def set_repository_visibility(repo, visibility):
repo.save()
def create_repository(namespace, name, owner):
private = Visibility.get(name='private')
def create_repository(namespace, name, owner, visibility='private'):
private = Visibility.get(name=visibility)
repo = Repository.create(namespace=namespace, name=name,
visibility=private)
admin = Role.get(name='admin')
@ -560,3 +560,7 @@ def get_repository_build(request_dbid):
except RepositoryBuild.DoesNotExist:
msg = 'Unable to locate a build by id: %s' % request_dbid
raise InvalidRepositoryBuildException(msg)
def create_repository_build(repo, resource_key):
return RepositoryBuild.create(repository=repo, resource_key=resource_key)

34
data/userfiles.py Normal file
View file

@ -0,0 +1,34 @@
import boto
import os
from boto.s3.key import Key
from uuid import uuid4
class S3FileWriteException(Exception):
pass
class UserRequestFiles(object):
def __init__(self, s3_access_key, s3_secret_key, bucket_name):
self._s3_conn = boto.s3.connection.S3Connection(s3_access_key,
s3_secret_key,
is_secure=False)
self._bucket = self._s3_conn.get_bucket(bucket_name)
self._prefix = 'userfiles'
def store_file(self, flask_file):
file_id = str(uuid4())
full_key = os.path.join(self._prefix, file_id)
k = Key(full_key)
bytes_written = k.set_contents_from_file(flask_file)
if bytes_written == 0:
raise S3FileWriteException('Unable to write file to S3')
return file_id
def get_file_url(self, file_id, expires_in=300):
full_key = os.path.join(self._prefix, file_id)
k = Key(full_key)
return k.generate_url(expires_in)

View file

@ -2,7 +2,7 @@ import logging
import stripe
import re
from flask import request, make_response, jsonify, abort
from flask import request, make_response, jsonify, abort, url_for
from flask.ext.login import login_required, current_user, logout_user
from flask.ext.principal import identity_changed, AnonymousIdentity
from functools import wraps
@ -11,6 +11,8 @@ from collections import defaultdict
import storage
from data import model
from data.userfiles import UserRequestFiles
from data.queue import dockerfile_build_queue
from app import app
from util.email import send_confirmation_email, send_recovery_email
from util.names import parse_repository_name
@ -170,10 +172,34 @@ def get_matching_users(prefix):
})
user_files = UserRequestFiles(app.config['AWS_ACCESS_KEY'],
app.config['AWS_SECRET_KEY'],
app.config['REGISTRY_S3_BUCKET'])
@app.route('/api/repository/', methods=['POST'])
@api_login_required
def create_repo_api():
pass
namespace_name = request.values['namespace']
repository_name = request.values['repository']
visibility = request.values['visibility']
owner = current_user.db_user()
repo = model.create_repository(namespace_name, repository_name, owner,
visibility)
if request.values['initialize']:
logger.debug('User requested repository initialization.')
dockerfile_source = request.files['initializedata']
dockerfile_id = user_files.store_file(dockerfile_source)
build_request = model.create_repository_build(repo, dockerfile_id)
dockerfile_build_queue.put(json.dumps({'request_id': build_request.id}))
resp = make_response('Created', 201)
resp.headers['Location'] = url_for('get_repo_api', namespace=namespace_name,
repository=repository_name)
return resp
@app.route('/api/find/repository', methods=['GET'])

View file

@ -14,4 +14,5 @@ mixpanel-py
beautifulsoup4
marisa-trie
apscheduler
python-daemon
python-daemon
paramiko

Binary file not shown.

View file

@ -4,11 +4,13 @@ import daemon
import time
import argparse
import digitalocean
import requests
from apscheduler.scheduler import Scheduler
from multiprocessing.pool import ThreadPool
from data.queue import dockerfile_build_queue
from data.userfiles import UserRequestFiles
from data import model
from app import app
@ -22,6 +24,21 @@ formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__)
def try_connection(url, retries=5, period=5):
try:
return requests.get(url)
except ConnectionError as ex:
if retries:
logger.debug('Retrying connection to url: %s after %ss' % (url, period))
time.sleep(period)
return try_connection(url, retries-1, period)
raise ex
def get_status(url):
return requests.get(url).json()['status']
def babysit_builder(request):
manager = digitalocean.Manager(client_id=app.config['DO_CLIENT_ID'],
api_key=app.config['DO_CLIENT_SECRET'])
@ -60,16 +77,62 @@ def babysit_builder(request):
repository_build.phase = 'initializing'
repository_build.save()
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh_client.connect(self._container_ip, self._config.sshd_port, "root",
look_for_keys=False,
key_filename=app.config['DO_SSH_PRIVATE_KEY_FILENAME'])
# tell it to pull and run the buildserver
# Pull and run the buildserver
pull_cmd = 'docker pull quay.io/quay/buildserver'
_, stdout, _ = ssh_client.exec_command(pull_cmd)
start_cmd = 'sudo docker run -d -privileged quay.io/quay/buildserver'
_, stdout, _ = ssh_client.exec_command(start_cmd)
# wait for the server to be ready
logger.debug('Waiting for buildserver to be ready')
build_endpoint = 'http://%s:5002/build/' % droplet.ip_address
try:
try_connection()
except ConnectionError:
#TODO cleanup
pass
# send it the job
logger.debug('Sending build server request')
user_files = UserRequestFiles(app.config['AWS_ACCESS_KEY'],
app.config['AWS_SECRET_KEY'],
app.config['REGISTRY_S3_BUCKET'])
repo = repository_build.repository
payload = {
'tag': 'quay.io/%s/%s' % (repo.namespace, repo.name),
'resource_url': user_files.get_file_url(repository_build.resource_key),
}
start_build = requests.post(build_endpoint, data=payload)
# wait for the job to be complete
status_url = start_build.headers['Location']
logger.debug('Waiting for job to be complete')
status = get_status(status_url)
while status != 'error' and status != 'completed':
logger.debug('Job status is: %s' % status)
time.sleep(5)
status = get_status(status_url)
logger.debug('Job complete with status: %s' % status)
if status == 'error':
repository_build.phase = 'error'
else:
repository_build.phase = 'completed'
repository_build.save()
# clean up the DO node
logger.debug('Cleaning up DO node.')
droplet.destroy()
return True
@ -91,7 +154,8 @@ def process_work_items(pool):
dockerfile_build_queue.complete(local_item)
return complete_callback
pool.apply_async(babysit_builder, [request], callback=build_callback(item))
pool.apply_async(babysit_builder, [request],
callback=build_callback(item))
item = dockerfile_build_queue.get()