Merge branch 'dockerbuild' of https://bitbucket.org/yackob03/quay into dockerbuild

This commit is contained in:
Joseph Schorr 2013-10-29 18:57:07 -04:00
commit 8a9b696200
9 changed files with 40 additions and 22 deletions

View file

@ -1,6 +1,7 @@
to prepare a new host:
```
sudo apt-get install software-properties-common
sudo apt-add-repository -y ppa:nginx/stable
sudo apt-get update
sudo apt-get install -y git python-virtualenv python-dev phantomjs

View file

@ -124,7 +124,7 @@ def build_image(build_dir, tag_name, num_steps, result_object):
except Exception as e:
logger.exception('Exception when processing request.')
result_object['status'] = 'error'
result_object['message'] = e.message
result_object['message'] = str(e.message)
MIME_PROCESSORS = {
@ -146,7 +146,15 @@ build = {
pool = ThreadPool(1)
def start_build(resource_url, tag_name, acccess_token):
@app.before_first_request
def start_build():
resource_url = os.environ['RESOURCE_URL']
tag_name = os.environ['TAG']
acccess_token = os.environ['TOKEN']
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
(resource_url, tag_name, acccess_token))
# Save the token
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
if host:
@ -201,13 +209,4 @@ def health_check():
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
resource_url = os.environ['RESOURCE_URL']
tag_name = os.environ['TAG']
acccess_token = os.environ['TOKEN']
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
(resource_url, tag_name, acccess_token))
start_build(resource_url, tag_name, acccess_token)
app.run(host='0.0.0.0', port=5002)

View file

@ -103,7 +103,6 @@ class BuildNodeConfig(object):
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
DigitalOceanConfig, AWSCredentials, BuildNodeConfig):
REGISTRY_SERVER = 'localhost:5000'
LOGGING_CONFIG = {
'level': logging.DEBUG,
'format': LOG_FORMAT
@ -117,7 +116,6 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelTestConfig,
GitHubProdConfig, DigitalOceanConfig,
BuildNodeConfig):
REGISTRY_SERVER = 'localhost:5000'
LOGGING_CONFIG = {
'level': logging.DEBUG,
'format': LOG_FORMAT
@ -128,7 +126,6 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
class ProductionConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelProdConfig,
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig):
REGISTRY_SERVER = 'quay.io'
LOGGING_CONFIG = {
'stream': sys.stderr,
'level': logging.DEBUG,

View file

@ -54,6 +54,10 @@ class WorkQueue(object):
def complete(self, completed_item):
completed_item.delete_instance()
def incomplete(self, incomplete_item):
incomplete_item.available = True
incomplete_item.save()
image_diff_queue = WorkQueue('imagediff')
dockerfile_build_queue = WorkQueue('dockerfilebuild')

View file

@ -2,6 +2,7 @@ import json
import urllib
import json
import logging
import urlparse
from flask import request, make_response, jsonify, abort
from functools import wraps
@ -25,7 +26,9 @@ def generate_headers(role='read'):
def wrapper(namespace, repository, *args, **kwargs):
response = f(namespace, repository, *args, **kwargs)
response.headers['X-Docker-Endpoints'] = app.config['REGISTRY_SERVER']
# We run our index and registry on the same hosts for now
registry_server = urlparse.urlparse(request.url).netloc
response.headers['X-Docker-Endpoints'] = registry_server
has_token_request = request.headers.get('X-Docker-Token', '')

View file

@ -10,6 +10,7 @@ events {
}
http {
types_hash_max_size 2048;
include /etc/nginx/mime.types;
default_type application/octet-stream;
@ -36,8 +37,8 @@ http {
keepalive_timeout 5;
ssl on;
ssl_certificate /home/ubuntu/quay/certs/unified.cert;
ssl_certificate_key /home/ubuntu/quay/certs/quay.key;
ssl_certificate ./certs/unified.cert;
ssl_certificate_key ./certs/quay.key;
ssl_session_timeout 5m;
ssl_protocols SSLv3 TLSv1;
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;

View file

@ -15,4 +15,5 @@ beautifulsoup4
marisa-trie
apscheduler
python-daemon
paramiko
paramiko
python-digitalocean

View file

@ -12,6 +12,7 @@ beautifulsoup4==4.3.2
blinker==1.3
boto==2.15.0
distribute==0.6.34
ecdsa==0.10
eventlet==0.14.0
greenlet==0.4.1
gunicorn==18.0
@ -19,10 +20,13 @@ itsdangerous==0.23
lockfile==0.9.1
marisa-trie==0.5.1
mixpanel-py==3.0.0
paramiko==1.12.0
peewee==2.1.4
py-bcrypt==0.4
pycrypto==2.6.1
python-daemon==1.6
python-dateutil==2.1
python-digitalocean==0.5
requests==2.0.0
six==1.4.1
stripe==1.9.8

View file

@ -5,7 +5,6 @@ import time
import argparse
import digitalocean
import requests
import paramiko
from apscheduler.scheduler import Scheduler
from multiprocessing.pool import ThreadPool
@ -43,7 +42,7 @@ def retry_command(to_call, args=[], kwargs={}, retries=5, period=5):
def get_status(url):
return requests.get(url).json()['status']
return retry_command(requests.get, [url]).json()['status']
def babysit_builder(request):
@ -109,11 +108,16 @@ def babysit_builder(request):
repository_build.phase = 'initializing'
repository_build.save()
# We wait until here to import paramiko because otherwise it doesn't work
# under the daemon context.
import paramiko
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
logger.debug('Connecting to droplet through ssh at ip: %s' %
droplet.ip_address)
retry_command(ssh_client.connect, [droplet.ip_address, 22, 'root'],
{'look_for_keys': False,
{'look_for_keys': False, 'timeout': 10.0,
'key_filename': app.config['DO_SSH_PRIVATE_KEY_FILENAME']})
# Load the node with the pull token
@ -183,7 +187,7 @@ def babysit_builder(request):
# clean up the DO node
logger.debug('Cleaning up DO node.')
# retry_command(droplet.destroy)
retry_command(droplet.destroy)
repository_build.status_url = None
repository_build.build_node_id = None;
@ -212,6 +216,10 @@ def process_work_items(pool):
def complete_callback(completed):
if completed:
dockerfile_build_queue.complete(local_item)
else:
# We have a retryable error, add the job back to the queue
dockerfile_build_queue.incomplete(local_item)
return complete_callback
logger.debug('Sending work item to thread pool: %s' % pool)