Merge branch 'dockerbuild' of https://bitbucket.org/yackob03/quay into dockerbuild
This commit is contained in:
commit
8a9b696200
9 changed files with 40 additions and 22 deletions
|
@ -1,6 +1,7 @@
|
||||||
to prepare a new host:
|
to prepare a new host:
|
||||||
|
|
||||||
```
|
```
|
||||||
|
sudo apt-get install software-properties-common
|
||||||
sudo apt-add-repository -y ppa:nginx/stable
|
sudo apt-add-repository -y ppa:nginx/stable
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y git python-virtualenv python-dev phantomjs
|
sudo apt-get install -y git python-virtualenv python-dev phantomjs
|
||||||
|
|
|
@ -124,7 +124,7 @@ def build_image(build_dir, tag_name, num_steps, result_object):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception('Exception when processing request.')
|
logger.exception('Exception when processing request.')
|
||||||
result_object['status'] = 'error'
|
result_object['status'] = 'error'
|
||||||
result_object['message'] = e.message
|
result_object['message'] = str(e.message)
|
||||||
|
|
||||||
|
|
||||||
MIME_PROCESSORS = {
|
MIME_PROCESSORS = {
|
||||||
|
@ -146,7 +146,15 @@ build = {
|
||||||
pool = ThreadPool(1)
|
pool = ThreadPool(1)
|
||||||
|
|
||||||
|
|
||||||
def start_build(resource_url, tag_name, acccess_token):
|
@app.before_first_request
|
||||||
|
def start_build():
|
||||||
|
resource_url = os.environ['RESOURCE_URL']
|
||||||
|
tag_name = os.environ['TAG']
|
||||||
|
acccess_token = os.environ['TOKEN']
|
||||||
|
|
||||||
|
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
|
||||||
|
(resource_url, tag_name, acccess_token))
|
||||||
|
|
||||||
# Save the token
|
# Save the token
|
||||||
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
|
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
|
||||||
if host:
|
if host:
|
||||||
|
@ -201,13 +209,4 @@ def health_check():
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
|
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
|
||||||
resource_url = os.environ['RESOURCE_URL']
|
|
||||||
tag_name = os.environ['TAG']
|
|
||||||
acccess_token = os.environ['TOKEN']
|
|
||||||
|
|
||||||
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
|
|
||||||
(resource_url, tag_name, acccess_token))
|
|
||||||
|
|
||||||
start_build(resource_url, tag_name, acccess_token)
|
|
||||||
|
|
||||||
app.run(host='0.0.0.0', port=5002)
|
app.run(host='0.0.0.0', port=5002)
|
||||||
|
|
|
@ -103,7 +103,6 @@ class BuildNodeConfig(object):
|
||||||
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
||||||
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
|
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
|
||||||
DigitalOceanConfig, AWSCredentials, BuildNodeConfig):
|
DigitalOceanConfig, AWSCredentials, BuildNodeConfig):
|
||||||
REGISTRY_SERVER = 'localhost:5000'
|
|
||||||
LOGGING_CONFIG = {
|
LOGGING_CONFIG = {
|
||||||
'level': logging.DEBUG,
|
'level': logging.DEBUG,
|
||||||
'format': LOG_FORMAT
|
'format': LOG_FORMAT
|
||||||
|
@ -117,7 +116,6 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
StripeLiveConfig, MixpanelTestConfig,
|
StripeLiveConfig, MixpanelTestConfig,
|
||||||
GitHubProdConfig, DigitalOceanConfig,
|
GitHubProdConfig, DigitalOceanConfig,
|
||||||
BuildNodeConfig):
|
BuildNodeConfig):
|
||||||
REGISTRY_SERVER = 'localhost:5000'
|
|
||||||
LOGGING_CONFIG = {
|
LOGGING_CONFIG = {
|
||||||
'level': logging.DEBUG,
|
'level': logging.DEBUG,
|
||||||
'format': LOG_FORMAT
|
'format': LOG_FORMAT
|
||||||
|
@ -128,7 +126,6 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
class ProductionConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
class ProductionConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
StripeLiveConfig, MixpanelProdConfig,
|
StripeLiveConfig, MixpanelProdConfig,
|
||||||
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig):
|
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig):
|
||||||
REGISTRY_SERVER = 'quay.io'
|
|
||||||
LOGGING_CONFIG = {
|
LOGGING_CONFIG = {
|
||||||
'stream': sys.stderr,
|
'stream': sys.stderr,
|
||||||
'level': logging.DEBUG,
|
'level': logging.DEBUG,
|
||||||
|
|
|
@ -54,6 +54,10 @@ class WorkQueue(object):
|
||||||
def complete(self, completed_item):
|
def complete(self, completed_item):
|
||||||
completed_item.delete_instance()
|
completed_item.delete_instance()
|
||||||
|
|
||||||
|
def incomplete(self, incomplete_item):
|
||||||
|
incomplete_item.available = True
|
||||||
|
incomplete_item.save()
|
||||||
|
|
||||||
|
|
||||||
image_diff_queue = WorkQueue('imagediff')
|
image_diff_queue = WorkQueue('imagediff')
|
||||||
dockerfile_build_queue = WorkQueue('dockerfilebuild')
|
dockerfile_build_queue = WorkQueue('dockerfilebuild')
|
||||||
|
|
|
@ -2,6 +2,7 @@ import json
|
||||||
import urllib
|
import urllib
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
import urlparse
|
||||||
|
|
||||||
from flask import request, make_response, jsonify, abort
|
from flask import request, make_response, jsonify, abort
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
@ -25,7 +26,9 @@ def generate_headers(role='read'):
|
||||||
def wrapper(namespace, repository, *args, **kwargs):
|
def wrapper(namespace, repository, *args, **kwargs):
|
||||||
response = f(namespace, repository, *args, **kwargs)
|
response = f(namespace, repository, *args, **kwargs)
|
||||||
|
|
||||||
response.headers['X-Docker-Endpoints'] = app.config['REGISTRY_SERVER']
|
# We run our index and registry on the same hosts for now
|
||||||
|
registry_server = urlparse.urlparse(request.url).netloc
|
||||||
|
response.headers['X-Docker-Endpoints'] = registry_server
|
||||||
|
|
||||||
has_token_request = request.headers.get('X-Docker-Token', '')
|
has_token_request = request.headers.get('X-Docker-Token', '')
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,7 @@ events {
|
||||||
}
|
}
|
||||||
|
|
||||||
http {
|
http {
|
||||||
|
types_hash_max_size 2048;
|
||||||
include /etc/nginx/mime.types;
|
include /etc/nginx/mime.types;
|
||||||
|
|
||||||
default_type application/octet-stream;
|
default_type application/octet-stream;
|
||||||
|
@ -36,8 +37,8 @@ http {
|
||||||
keepalive_timeout 5;
|
keepalive_timeout 5;
|
||||||
|
|
||||||
ssl on;
|
ssl on;
|
||||||
ssl_certificate /home/ubuntu/quay/certs/unified.cert;
|
ssl_certificate ./certs/unified.cert;
|
||||||
ssl_certificate_key /home/ubuntu/quay/certs/quay.key;
|
ssl_certificate_key ./certs/quay.key;
|
||||||
ssl_session_timeout 5m;
|
ssl_session_timeout 5m;
|
||||||
ssl_protocols SSLv3 TLSv1;
|
ssl_protocols SSLv3 TLSv1;
|
||||||
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
|
||||||
|
|
|
@ -16,3 +16,4 @@ marisa-trie
|
||||||
apscheduler
|
apscheduler
|
||||||
python-daemon
|
python-daemon
|
||||||
paramiko
|
paramiko
|
||||||
|
python-digitalocean
|
|
@ -12,6 +12,7 @@ beautifulsoup4==4.3.2
|
||||||
blinker==1.3
|
blinker==1.3
|
||||||
boto==2.15.0
|
boto==2.15.0
|
||||||
distribute==0.6.34
|
distribute==0.6.34
|
||||||
|
ecdsa==0.10
|
||||||
eventlet==0.14.0
|
eventlet==0.14.0
|
||||||
greenlet==0.4.1
|
greenlet==0.4.1
|
||||||
gunicorn==18.0
|
gunicorn==18.0
|
||||||
|
@ -19,10 +20,13 @@ itsdangerous==0.23
|
||||||
lockfile==0.9.1
|
lockfile==0.9.1
|
||||||
marisa-trie==0.5.1
|
marisa-trie==0.5.1
|
||||||
mixpanel-py==3.0.0
|
mixpanel-py==3.0.0
|
||||||
|
paramiko==1.12.0
|
||||||
peewee==2.1.4
|
peewee==2.1.4
|
||||||
py-bcrypt==0.4
|
py-bcrypt==0.4
|
||||||
|
pycrypto==2.6.1
|
||||||
python-daemon==1.6
|
python-daemon==1.6
|
||||||
python-dateutil==2.1
|
python-dateutil==2.1
|
||||||
|
python-digitalocean==0.5
|
||||||
requests==2.0.0
|
requests==2.0.0
|
||||||
six==1.4.1
|
six==1.4.1
|
||||||
stripe==1.9.8
|
stripe==1.9.8
|
||||||
|
|
|
@ -5,7 +5,6 @@ import time
|
||||||
import argparse
|
import argparse
|
||||||
import digitalocean
|
import digitalocean
|
||||||
import requests
|
import requests
|
||||||
import paramiko
|
|
||||||
|
|
||||||
from apscheduler.scheduler import Scheduler
|
from apscheduler.scheduler import Scheduler
|
||||||
from multiprocessing.pool import ThreadPool
|
from multiprocessing.pool import ThreadPool
|
||||||
|
@ -43,7 +42,7 @@ def retry_command(to_call, args=[], kwargs={}, retries=5, period=5):
|
||||||
|
|
||||||
|
|
||||||
def get_status(url):
|
def get_status(url):
|
||||||
return requests.get(url).json()['status']
|
return retry_command(requests.get, [url]).json()['status']
|
||||||
|
|
||||||
|
|
||||||
def babysit_builder(request):
|
def babysit_builder(request):
|
||||||
|
@ -109,11 +108,16 @@ def babysit_builder(request):
|
||||||
repository_build.phase = 'initializing'
|
repository_build.phase = 'initializing'
|
||||||
repository_build.save()
|
repository_build.save()
|
||||||
|
|
||||||
|
# We wait until here to import paramiko because otherwise it doesn't work
|
||||||
|
# under the daemon context.
|
||||||
|
import paramiko
|
||||||
ssh_client = paramiko.SSHClient()
|
ssh_client = paramiko.SSHClient()
|
||||||
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||||
|
|
||||||
|
logger.debug('Connecting to droplet through ssh at ip: %s' %
|
||||||
|
droplet.ip_address)
|
||||||
retry_command(ssh_client.connect, [droplet.ip_address, 22, 'root'],
|
retry_command(ssh_client.connect, [droplet.ip_address, 22, 'root'],
|
||||||
{'look_for_keys': False,
|
{'look_for_keys': False, 'timeout': 10.0,
|
||||||
'key_filename': app.config['DO_SSH_PRIVATE_KEY_FILENAME']})
|
'key_filename': app.config['DO_SSH_PRIVATE_KEY_FILENAME']})
|
||||||
|
|
||||||
# Load the node with the pull token
|
# Load the node with the pull token
|
||||||
|
@ -183,7 +187,7 @@ def babysit_builder(request):
|
||||||
|
|
||||||
# clean up the DO node
|
# clean up the DO node
|
||||||
logger.debug('Cleaning up DO node.')
|
logger.debug('Cleaning up DO node.')
|
||||||
# retry_command(droplet.destroy)
|
retry_command(droplet.destroy)
|
||||||
|
|
||||||
repository_build.status_url = None
|
repository_build.status_url = None
|
||||||
repository_build.build_node_id = None;
|
repository_build.build_node_id = None;
|
||||||
|
@ -212,6 +216,10 @@ def process_work_items(pool):
|
||||||
def complete_callback(completed):
|
def complete_callback(completed):
|
||||||
if completed:
|
if completed:
|
||||||
dockerfile_build_queue.complete(local_item)
|
dockerfile_build_queue.complete(local_item)
|
||||||
|
else:
|
||||||
|
# We have a retryable error, add the job back to the queue
|
||||||
|
dockerfile_build_queue.incomplete(local_item)
|
||||||
|
|
||||||
return complete_callback
|
return complete_callback
|
||||||
|
|
||||||
logger.debug('Sending work item to thread pool: %s' % pool)
|
logger.debug('Sending work item to thread pool: %s' % pool)
|
||||||
|
|
Reference in a new issue