Flesh out the create API and wire everything up together. Next up, testing.

This commit is contained in:
yackob03 2013-10-25 01:14:38 -04:00
parent 2afb8c85b1
commit 9b9a29c310
10 changed files with 156 additions and 15 deletions

View file

@ -4,11 +4,13 @@ import daemon
import time
import argparse
import digitalocean
import requests
from apscheduler.scheduler import Scheduler
from multiprocessing.pool import ThreadPool
from data.queue import dockerfile_build_queue
from data.userfiles import UserRequestFiles
from data import model
from app import app
@ -22,6 +24,21 @@ formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__)
def try_connection(url, retries=5, period=5):
try:
return requests.get(url)
except ConnectionError as ex:
if retries:
logger.debug('Retrying connection to url: %s after %ss' % (url, period))
time.sleep(period)
return try_connection(url, retries-1, period)
raise ex
def get_status(url):
return requests.get(url).json()['status']
def babysit_builder(request):
manager = digitalocean.Manager(client_id=app.config['DO_CLIENT_ID'],
api_key=app.config['DO_CLIENT_SECRET'])
@ -60,16 +77,62 @@ def babysit_builder(request):
repository_build.phase = 'initializing'
repository_build.save()
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh_client.connect(self._container_ip, self._config.sshd_port, "root",
look_for_keys=False,
key_filename=app.config['DO_SSH_PRIVATE_KEY_FILENAME'])
# tell it to pull and run the buildserver
# Pull and run the buildserver
pull_cmd = 'docker pull quay.io/quay/buildserver'
_, stdout, _ = ssh_client.exec_command(pull_cmd)
start_cmd = 'sudo docker run -d -privileged quay.io/quay/buildserver'
_, stdout, _ = ssh_client.exec_command(start_cmd)
# wait for the server to be ready
logger.debug('Waiting for buildserver to be ready')
build_endpoint = 'http://%s:5002/build/' % droplet.ip_address
try:
try_connection()
except ConnectionError:
#TODO cleanup
pass
# send it the job
logger.debug('Sending build server request')
user_files = UserRequestFiles(app.config['AWS_ACCESS_KEY'],
app.config['AWS_SECRET_KEY'],
app.config['REGISTRY_S3_BUCKET'])
repo = repository_build.repository
payload = {
'tag': 'quay.io/%s/%s' % (repo.namespace, repo.name),
'resource_url': user_files.get_file_url(repository_build.resource_key),
}
start_build = requests.post(build_endpoint, data=payload)
# wait for the job to be complete
status_url = start_build.headers['Location']
logger.debug('Waiting for job to be complete')
status = get_status(status_url)
while status != 'error' and status != 'completed':
logger.debug('Job status is: %s' % status)
time.sleep(5)
status = get_status(status_url)
logger.debug('Job complete with status: %s' % status)
if status == 'error':
repository_build.phase = 'error'
else:
repository_build.phase = 'completed'
repository_build.save()
# clean up the DO node
logger.debug('Cleaning up DO node.')
droplet.destroy()
return True
@ -91,7 +154,8 @@ def process_work_items(pool):
dockerfile_build_queue.complete(local_item)
return complete_callback
pool.apply_async(babysit_builder, [request], callback=build_callback(item))
pool.apply_async(babysit_builder, [request],
callback=build_callback(item))
item = dockerfile_build_queue.get()