Add the first part of the build worker that runs on quay.
This commit is contained in:
parent
9dc9e0c940
commit
847e91b696
10 changed files with 196 additions and 11 deletions
27
certs/digital_ocean
Normal file
27
certs/digital_ocean
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEowIBAAKCAQEAwjlIK0HodmDNrZAmaALtr9RLriRSeeLh76gV8KHmjRweeT7v
|
||||||
|
dmhKeGP1nOAs17caZkcwsW0tiDbCeIv2MisV405sScjPOxFivWpY8tL72sgVuOAl
|
||||||
|
ReZauOGZ4M1ZcSa/YbT7tnFCIayYE9pde4ih5LmYZqKsBsaNq3ErcMnAzqG77D95
|
||||||
|
8swuVwhz/INioBWwe4FjO76/0DqS357hT5yHDWthJD6UUH12VajPKBtXEvGNUtNL
|
||||||
|
vdq+drm9omt2y0seMn47fZXiNIulLv7ojsWKwtRMTsGcjnv6VMZAVAuX11u4cJd+
|
||||||
|
oPTbDl0D+02B7XYcxABqdMZcOc1/7VTUlFFd4wIDAQABAoIBAAs4V+z3z8AW84rV
|
||||||
|
SwKzOJvxvbV/r6wO6VJ4+Vt/XtxEBZanhhnnCHZP//5iDPUhRMsnza5SSlEWKMHi
|
||||||
|
BAT97DPHcgYJLb+Rz4x1ulG80oPfDzIw8LZLCm6nycXs1v/sZx3z4J63iER9vgNX
|
||||||
|
mBLs371g42b6esmhasm+re3EGflV0LeY1IX0MY40pqGndmW8Fly1QH179TrMzVUJ
|
||||||
|
btu3i2JrwWmKk5zO5YGm0SYY5QQGCdjPj6SL+idDniAefEvbjJYz2qOaPOF3wj/7
|
||||||
|
r8dAnmyaP10Q3JojT01Et5ltMfr0oF2/pic9tWYGrgn/aIuoXUXj0SF3Pfgrb/4L
|
||||||
|
Et1kzFECgYEA8Tb/9bYzQgtaQTQfzFU/KnsIKKnrxh73rZwnIxG59WvN0Ws41Byf
|
||||||
|
rv8fEbXWU8Yj0drxRSud9fADr99lZGWFxle8rSW5+qqoUxG8n/fkktzHxyPE/9Mh
|
||||||
|
pZW7un7a5/glKgUpHpjaOCZj9rhdF1AwdUXLSo1sFc7VBsKvKiKJAT0CgYEAziDt
|
||||||
|
A9h5lOgiLGf1xdBq3qmLIlARz7fivAcZ5acSGN5k6MFFxjHNqhcXRusqs7g+hvCN
|
||||||
|
eRupdwfgSdLwrTfvxuY4pCcddfYIZO3uUZYs/glvYRtIxaP2kMBkZTs9KzI02Bjv
|
||||||
|
zT3NPReR/46SqW0zvYTlRFSY7VZ0eRED/5xnjZ8CgYAZdlrSjyceA6DFXUE2CpGe
|
||||||
|
ZFpaIIW45i/y7ZbcBtUAaR7SymS3T0Yz7M5UykMTmMjTMC9jw9Tqzyk0eXp0fJsA
|
||||||
|
cuaByIe3RCh8jFTC9iH0tsWH6eizsI/OsN2eNCHbdsBFjUHn7u6qGrNWqeN5wIc8
|
||||||
|
+d8ZwY/1RV4LVqWy5u5baQKBgHLFvJMWluQFuPl2zU9etBLU3ma1pKU/I11EqvPH
|
||||||
|
afk044UCEKLBml1pzAkt6jH1lcM2798OOvbPCOCyNlaMvdLG36TvLqU+3/+qx7bf
|
||||||
|
4p90i3LLaWK64BBLP9tp9640n13vzJ5AGiY5GI7uSNVTu6p789hvLlOAfwvmII7T
|
||||||
|
/IjLAoGBAO6iU8i6pAOaKa7+/uExXx6xwk3vqQtovxByo1/m7NpyUtT+ElDSq+t9
|
||||||
|
7f+3TzzPB6ggdMl8d+PSyHR3o7KjVPgOSe7zld7eePhUrLjwZ4lh5ohcvhvYfaRL
|
||||||
|
0EgRTaTb+zLtCAvJS/ilNnJoIcxUmD8u5uSXpY7vAleSOiQTJRTh
|
||||||
|
-----END RSA PRIVATE KEY-----
|
1
certs/digital_ocean.pub
Normal file
1
certs/digital_ocean.pub
Normal file
|
@ -0,0 +1 @@
|
||||||
|
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDCOUgrQeh2YM2tkCZoAu2v1EuuJFJ54uHvqBXwoeaNHB55Pu92aEp4Y/Wc4CzXtxpmRzCxbS2INsJ4i/YyKxXjTmxJyM87EWK9aljy0vvayBW44CVF5lq44ZngzVlxJr9htPu2cUIhrJgT2l17iKHkuZhmoqwGxo2rcStwycDOobvsP3nyzC5XCHP8g2KgFbB7gWM7vr/QOpLfnuFPnIcNa2EkPpRQfXZVqM8oG1cS8Y1S00u92r52ub2ia3bLSx4yfjt9leI0i6Uu/uiOxYrC1ExOwZyOe/pUxkBUC5fXW7hwl36g9NsOXQP7TYHtdhzEAGp0xlw5zX/tVNSUUV3j jake@coreserver
|
13
config.py
13
config.py
|
@ -85,8 +85,15 @@ class GitHubProdConfig(GitHubTestConfig):
|
||||||
GITHUB_CLIENT_SECRET = 'f89d8bb28ea3bd4e1c68808500d185a816be53b1'
|
GITHUB_CLIENT_SECRET = 'f89d8bb28ea3bd4e1c68808500d185a816be53b1'
|
||||||
|
|
||||||
|
|
||||||
|
class DigitalOceanConfig():
|
||||||
|
DO_CLIENT_ID = 'LJ44y2wwYj1MD0BRxS6qHA'
|
||||||
|
DO_CLIENT_SECRET = 'b9357a6f6ff45a33bb03f6dbbad135f9'
|
||||||
|
DO_SSH_KEY_ID = '46986'
|
||||||
|
|
||||||
|
|
||||||
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
||||||
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig):
|
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
|
||||||
|
DigitalOceanConfig):
|
||||||
REGISTRY_SERVER = 'localhost:5000'
|
REGISTRY_SERVER = 'localhost:5000'
|
||||||
LOGGING_CONFIG = {
|
LOGGING_CONFIG = {
|
||||||
'level': logging.DEBUG,
|
'level': logging.DEBUG,
|
||||||
|
@ -98,7 +105,7 @@ class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
||||||
|
|
||||||
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
StripeLiveConfig, MixpanelTestConfig,
|
StripeLiveConfig, MixpanelTestConfig,
|
||||||
GitHubProdConfig):
|
GitHubProdConfig, DigitalOceanConfig):
|
||||||
REGISTRY_SERVER = 'localhost:5000'
|
REGISTRY_SERVER = 'localhost:5000'
|
||||||
LOGGING_CONFIG = {
|
LOGGING_CONFIG = {
|
||||||
'level': logging.DEBUG,
|
'level': logging.DEBUG,
|
||||||
|
@ -109,7 +116,7 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
|
|
||||||
class ProductionConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
class ProductionConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
StripeLiveConfig, MixpanelProdConfig,
|
StripeLiveConfig, MixpanelProdConfig,
|
||||||
GitHubProdConfig):
|
GitHubProdConfig, DigitalOceanConfig):
|
||||||
REGISTRY_SERVER = 'quay.io'
|
REGISTRY_SERVER = 'quay.io'
|
||||||
LOGGING_CONFIG = {
|
LOGGING_CONFIG = {
|
||||||
'stream': sys.stderr,
|
'stream': sys.stderr,
|
||||||
|
|
|
@ -150,6 +150,13 @@ class RepositoryTag(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryBuild(BaseModel):
|
||||||
|
digitalocean_build_node_id = IntegerField(null=True)
|
||||||
|
phase = CharField(default='waiting')
|
||||||
|
status_url = CharField(null=True)
|
||||||
|
repository = ForeignKeyField(Repository)
|
||||||
|
|
||||||
|
|
||||||
class QueueItem(BaseModel):
|
class QueueItem(BaseModel):
|
||||||
queue_name = CharField(index=True)
|
queue_name = CharField(index=True)
|
||||||
body = TextField()
|
body = TextField()
|
||||||
|
@ -162,7 +169,7 @@ def initialize_db():
|
||||||
create_model_tables([User, Repository, Image, AccessToken, Role,
|
create_model_tables([User, Repository, Image, AccessToken, Role,
|
||||||
RepositoryPermission, Visibility, RepositoryTag,
|
RepositoryPermission, Visibility, RepositoryTag,
|
||||||
EmailConfirmation, FederatedLogin, LoginService,
|
EmailConfirmation, FederatedLogin, LoginService,
|
||||||
QueueItem])
|
QueueItem, RepositoryBuild])
|
||||||
Role.create(name='admin')
|
Role.create(name='admin')
|
||||||
Role.create(name='write')
|
Role.create(name='write')
|
||||||
Role.create(name='read')
|
Role.create(name='read')
|
||||||
|
|
|
@ -30,6 +30,10 @@ class InvalidTokenException(DataModelException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidRepositoryBuildException(DataModelException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def create_user(username, password, email):
|
def create_user(username, password, email):
|
||||||
if not validate_email(email):
|
if not validate_email(email):
|
||||||
raise InvalidEmailAddressException('Invalid email address: %s' % email)
|
raise InvalidEmailAddressException('Invalid email address: %s' % email)
|
||||||
|
@ -548,3 +552,11 @@ def load_token_data(code):
|
||||||
return fetched[0]
|
return fetched[0]
|
||||||
else:
|
else:
|
||||||
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
||||||
|
|
||||||
|
|
||||||
|
def get_repository_build(request_dbid):
|
||||||
|
try:
|
||||||
|
return RepositoryBuild.get(RepositoryBuild == request_dbid)
|
||||||
|
except RepositoryBuild.DoesNotExist:
|
||||||
|
msg = 'Unable to locate a build by id: %s' % request_dbid
|
||||||
|
raise InvalidRepositoryBuildException(msg)
|
||||||
|
|
|
@ -56,3 +56,4 @@ class WorkQueue(object):
|
||||||
|
|
||||||
|
|
||||||
image_diff_queue = WorkQueue('imagediff')
|
image_diff_queue = WorkQueue('imagediff')
|
||||||
|
dockerfile_build_queue = WorkQueue('dockerfilebuild')
|
||||||
|
|
Binary file not shown.
|
@ -10,7 +10,6 @@ from data.queue import image_diff_queue
|
||||||
from endpoints.registry import process_image_changes
|
from endpoints.registry import process_image_changes
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
root_logger = logging.getLogger('')
|
root_logger = logging.getLogger('')
|
||||||
root_logger.setLevel(logging.DEBUG)
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
@ -59,11 +58,6 @@ parser.add_argument('--log', default='diffsworker.log',
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
# if not args.D:
|
|
||||||
# else:
|
|
||||||
# logging.basicConfig(format=FORMAT, level=logging.DEBUG)
|
|
||||||
# start_worker(args)
|
|
||||||
|
|
||||||
if args.D:
|
if args.D:
|
||||||
handler = logging.FileHandler(args.log)
|
handler = logging.FileHandler(args.log)
|
||||||
handler.setFormatter(formatter)
|
handler.setFormatter(formatter)
|
||||||
|
|
136
workers/dockerfilebuild.py
Normal file
136
workers/dockerfilebuild.py
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import daemon
|
||||||
|
import time
|
||||||
|
import argparse
|
||||||
|
import digitalocean
|
||||||
|
|
||||||
|
from apscheduler.scheduler import Scheduler
|
||||||
|
from multiprocessing.pool import ThreadPool
|
||||||
|
from fabric.api import env
|
||||||
|
|
||||||
|
from data.queue import dockerfile_build_queue
|
||||||
|
from data import model
|
||||||
|
from app import app
|
||||||
|
|
||||||
|
|
||||||
|
root_logger = logging.getLogger('')
|
||||||
|
root_logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
||||||
|
formatter = logging.Formatter(FORMAT)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def babysit_builder(request):
|
||||||
|
manager = digitalocean.Manager(client_id=app.config['DO_CLIENT_ID'],
|
||||||
|
api_key=app.config['DO_CLIENT_SECRET'])
|
||||||
|
repository_build = model.get_repository_build(request['build_id'])
|
||||||
|
|
||||||
|
# check if there is already a DO node for this build job, if so clean it up
|
||||||
|
old_id = repository_build.digitalocean_build_node_id
|
||||||
|
if old_id
|
||||||
|
old_droplet = digitalocean.Droplet(old_id)
|
||||||
|
old_droplet.destroy()
|
||||||
|
|
||||||
|
# start the DO node
|
||||||
|
name = 'dockerfile-build-%s' % repository_build.id
|
||||||
|
droplet = digitalocean.Droplet(client_id=app.config['DO_CLIENT_ID'],
|
||||||
|
api_key=app.config['DO_CLIENT_SECRET'],
|
||||||
|
name=name,
|
||||||
|
region_id=1, # New York,
|
||||||
|
image_id=1004145, # Docker on 13.04
|
||||||
|
size_id=66, # 512MB,
|
||||||
|
backup_active=False)
|
||||||
|
droplet.create(ssh_key_ids=[app.config['DO_SSH_KEY_ID']])
|
||||||
|
repository_build.digitalocean_build_node_id = droplet.id
|
||||||
|
repository_build.phase = 'starting'
|
||||||
|
repository_build.save()
|
||||||
|
|
||||||
|
startup = droplet.get_events()[0]
|
||||||
|
while int(startup.percentage) != 100:
|
||||||
|
logger.debug('Droplet startup percentage: %s' % startup.percentage)
|
||||||
|
time.sleep(5)
|
||||||
|
startup.load()
|
||||||
|
|
||||||
|
droplet.load()
|
||||||
|
logger.debug('Droplet started at ip address: %s' % droplet.ip_address)
|
||||||
|
|
||||||
|
# connect to it with ssh
|
||||||
|
repository_build.phase = 'initializing'
|
||||||
|
repository_build.save()
|
||||||
|
|
||||||
|
env.host_string = 'root@%s' % droplet.ip_address
|
||||||
|
|
||||||
|
# tell it to pull and run the buildserver
|
||||||
|
|
||||||
|
# wait for the server to be ready
|
||||||
|
|
||||||
|
# send it the job
|
||||||
|
|
||||||
|
# wait for the job to be complete
|
||||||
|
|
||||||
|
# clean up the DO node
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def process_work_items(pool):
|
||||||
|
logger.debug('Getting work item from queue.')
|
||||||
|
|
||||||
|
item = dockerfile_build_queue.get()
|
||||||
|
|
||||||
|
while item:
|
||||||
|
logger.debug('Queue gave us some work: %s' % item.body)
|
||||||
|
|
||||||
|
request = json.loads(item.body)
|
||||||
|
|
||||||
|
def build_callback(item):
|
||||||
|
local_item = item
|
||||||
|
def complete_callback(completed):
|
||||||
|
if completed:
|
||||||
|
dockerfile_build_queue.complete(local_item)
|
||||||
|
return complete_callback
|
||||||
|
|
||||||
|
pool.apply_async(babysit_builder, [request], callback=build_callback(item))
|
||||||
|
|
||||||
|
item = dockerfile_build_queue.get()
|
||||||
|
|
||||||
|
logger.debug('No more work.')
|
||||||
|
|
||||||
|
|
||||||
|
def start_worker():
|
||||||
|
pool = ThreadPool(3)
|
||||||
|
logger.debug("Scheduling worker.")
|
||||||
|
|
||||||
|
sched = Scheduler()
|
||||||
|
sched.start()
|
||||||
|
|
||||||
|
sched.add_interval_job(process_work_items, args=[pool], seconds=30)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
time.sleep(60 * 60 * 24) # sleep one day, basically forever
|
||||||
|
|
||||||
|
|
||||||
|
desc = 'Worker daemon to monitor dockerfile build'
|
||||||
|
parser = argparse.ArgumentParser(description=desc)
|
||||||
|
parser.add_argument('-D', action='store_true', default=False,
|
||||||
|
help='Run the worker in daemon mode.')
|
||||||
|
parser.add_argument('--log', default='dockerfilebuild.log',
|
||||||
|
help='Specify the log file for the worker as a daemon.')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
if args.D:
|
||||||
|
handler = logging.FileHandler(args.log)
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(handler)
|
||||||
|
with daemon.DaemonContext(files_preserve=[handler.stream]):
|
||||||
|
start_worker()
|
||||||
|
|
||||||
|
else:
|
||||||
|
handler = logging.StreamHandler()
|
||||||
|
handler.setFormatter(formatter)
|
||||||
|
root_logger.addHandler(handler)
|
||||||
|
start_worker()
|
Reference in a new issue