Merge branch 'bobthe'

Conflicts:
	test/data/test.db
This commit is contained in:
yackob03 2014-02-13 12:47:59 -05:00
commit 8f6cdabde3
41 changed files with 1519 additions and 744 deletions

View file

@ -1,5 +1,4 @@
import logging import logging
import os
from app import app as application from app import app as application
from data.model import db as model_db from data.model import db as model_db
@ -20,10 +19,6 @@ from endpoints.webhooks import webhooks
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
if application.config.get('INCLUDE_TEST_ENDPOINTS', False):
logger.debug('Loading test endpoints.')
import endpoints.test
application.register_blueprint(web) application.register_blueprint(web)
application.register_blueprint(index, url_prefix='/v1') application.register_blueprint(index, url_prefix='/v1')
application.register_blueprint(tags, url_prefix='/v1') application.register_blueprint(tags, url_prefix='/v1')

View file

@ -1,26 +0,0 @@
FROM lopter/raring-base
MAINTAINER jake@devtable.com
RUN echo deb http://archive.ubuntu.com/ubuntu precise universe > /etc/apt/sources.list.d/universe.list
RUN apt-get update -qq
RUN apt-get install -qqy iptables ca-certificates lxc python-virtualenv git python-dev xz-utils aufs-tools
# This will use the latest public release. To use your own, comment it out...
ADD https://get.docker.io/builds/Linux/x86_64/docker-latest /usr/local/bin/docker
# ...then uncomment the following line, and copy your docker binary to current dir.
#ADD ./docker /usr/local/bin/docker
# Install the files
ADD ./startserver /usr/local/bin/startserver
ADD ./buildserver.py ./buildserver.py
ADD ./requirements.txt ./requirements.txt
RUN chmod +x /usr/local/bin/docker /usr/local/bin/startserver
RUN virtualenv --distribute venv
RUN venv/bin/pip install -r requirements.txt
VOLUME /var/lib/docker
EXPOSE 5002
CMD startserver

View file

@ -1,13 +0,0 @@
To build:
```
sudo docker build -t quay.io/quay/buildserver .
sudo docker push quay.io/quay/buildserver
```
To run:
```
sudo docker pull quay.io/quay/buildserver
sudo docker run -d -privileged -lxc-conf="lxc.aa_profile=unconfined" quay.io/quay/buildserver
```

View file

@ -1,214 +0,0 @@
import docker
import logging
import shutil
import os
import re
import requests
import json
from flask import Flask, jsonify, abort, make_response
from zipfile import ZipFile
from tempfile import TemporaryFile, mkdtemp
from multiprocessing.pool import ThreadPool
from base64 import b64encode
BUFFER_SIZE = 8 * 1024
LOG_FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - ' + \
'%(funcName)s - %(message)s'
app = Flask(__name__)
logger = logging.getLogger(__name__)
def count_steps(dockerfile_path):
with open(dockerfile_path, 'r') as dockerfileobj:
steps = 0
for line in dockerfileobj.readlines():
stripped = line.strip()
if stripped and stripped[0] is not '#':
steps += 1
return steps
def prepare_zip(request_file):
build_dir = mkdtemp(prefix='docker-build-')
# Save the zip file to temp somewhere
with TemporaryFile() as zip_file:
zip_file.write(request_file.content)
to_extract = ZipFile(zip_file)
to_extract.extractall(build_dir)
return build_dir
def prepare_dockerfile(request_file):
build_dir = mkdtemp(prefix='docker-build-')
dockerfile_path = os.path.join(build_dir, "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write(request_file.content)
return build_dir
def total_completion(statuses, total_images):
percentage_with_sizes = float(len(statuses.values()))/total_images
sent_bytes = sum([status[u'current'] for status in statuses.values()])
total_bytes = sum([status[u'total'] for status in statuses.values()])
return float(sent_bytes)/total_bytes*percentage_with_sizes
def build_image(build_dir, tag_name, num_steps, result_object):
try:
logger.debug('Starting build.')
docker_cl = docker.Client(timeout=1200)
result_object['status'] = 'building'
build_status = docker_cl.build(path=build_dir, tag=tag_name, stream=True)
current_step = 0
built_image = None
for status in build_status:
# logger.debug('Status: %s', str(status))
step_increment = re.search(r'Step ([0-9]+) :', status)
if step_increment:
current_step = int(step_increment.group(1))
logger.debug('Step now: %s/%s' % (current_step, num_steps))
result_object['current_command'] = current_step
continue
complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
if complete:
built_image = complete.group(1)
logger.debug('Final image ID is: %s' % built_image)
continue
shutil.rmtree(build_dir)
# Get the image count
if not built_image:
result_object['status'] = 'error'
result_object['message'] = 'Unable to build dockerfile.'
return
history = json.loads(docker_cl.history(built_image))
num_images = len(history)
result_object['total_images'] = num_images
result_object['status'] = 'pushing'
logger.debug('Pushing to tag name: %s' % tag_name)
resp = docker_cl.push(tag_name, stream=True)
for status_str in resp:
status = json.loads(status_str)
logger.debug('Status: %s', status_str)
if u'status' in status:
status_msg = status[u'status']
if status_msg == 'Pushing':
if u'progressDetail' in status and u'id' in status:
image_id = status[u'id']
detail = status[u'progressDetail']
if u'current' in detail and 'total' in detail:
images = result_object['image_completion']
images[image_id] = detail
result_object['push_completion'] = total_completion(images,
num_images)
elif u'errorDetail' in status:
result_object['status'] = 'error'
if u'message' in status[u'errorDetail']:
result_object['message'] = str(status[u'errorDetail'][u'message'])
return
result_object['status'] = 'complete'
except Exception as e:
logger.exception('Exception when processing request.')
result_object['status'] = 'error'
result_object['message'] = str(e.message)
MIME_PROCESSORS = {
'application/zip': prepare_zip,
'text/plain': prepare_dockerfile,
'application/octet-stream': prepare_dockerfile,
}
# If this format it should also be changed in the api method get_repo_builds
build = {
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': 'waiting',
'message': None,
'image_completion': {},
}
pool = ThreadPool(1)
@app.before_first_request
def start_build():
resource_url = os.environ['RESOURCE_URL']
tag_name = os.environ['TAG']
acccess_token = os.environ['TOKEN']
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
(resource_url, tag_name, acccess_token))
# Save the token
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
if host:
docker_endpoint = 'http://%s/v1/' % host.group(1)
dockercfg_path = os.path.join(os.environ.get('HOME', '.'), '.dockercfg')
token = b64encode('$token:%s' % acccess_token)
with open(dockercfg_path, 'w') as dockercfg:
payload = {
docker_endpoint: {
'auth': token,
'email': '',
}
}
dockercfg.write(json.dumps(payload))
else:
raise Exception('Invalid tag name: %s' % tag_name)
docker_resource = requests.get(resource_url)
c_type = docker_resource.headers['content-type']
logger.info('Request to build file of type: %s with tag: %s' %
(c_type, tag_name))
if c_type not in MIME_PROCESSORS:
raise Exception('Invalid dockerfile content type: %s' % c_type)
build_dir = MIME_PROCESSORS[c_type](docker_resource)
dockerfile_path = os.path.join(build_dir, "Dockerfile")
num_steps = count_steps(dockerfile_path)
logger.debug('Dockerfile had %s steps' % num_steps)
logger.info('Sending job to builder pool.')
build['total_commands'] = num_steps
pool.apply_async(build_image, [build_dir, tag_name, num_steps,
build])
@app.route('/build/', methods=['GET'])
def get_status():
if build:
return jsonify(build)
abort(404)
@app.route('/status/', methods=['GET'])
def health_check():
return make_response('Running')
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
app.run(host='0.0.0.0', port=5002, threaded=True)

View file

@ -1,5 +0,0 @@
mock==1.0.1
requests==1.2.3
six==1.3.0
flask==0.10.1
-e git+git://github.com/DevTable/docker-py.git#egg=docker-py

View file

@ -1,48 +0,0 @@
#!/bin/bash
# First, make sure that cgroups are mounted correctly.
CGROUP=/sys/fs/cgroup
[ -d $CGROUP ] ||
mkdir $CGROUP
mountpoint -q $CGROUP ||
mount -n -t tmpfs -o uid=0,gid=0,mode=0755 cgroup $CGROUP || {
echo "Could not make a tmpfs mount. Did you use -privileged?"
exit 1
}
# Mount the cgroup hierarchies exactly as they are in the parent system.
for SUBSYS in $(cut -d: -f2 /proc/1/cgroup)
do
[ -d $CGROUP/$SUBSYS ] || mkdir $CGROUP/$SUBSYS
mountpoint -q $CGROUP/$SUBSYS ||
mount -n -t cgroup -o $SUBSYS cgroup $CGROUP/$SUBSYS
done
# Note: as I write those lines, the LXC userland tools cannot setup
# a "sub-container" properly if the "devices" cgroup is not in its
# own hierarchy. Let's detect this and issue a warning.
grep -q :devices: /proc/1/cgroup ||
echo "WARNING: the 'devices' cgroup should be in its own hierarchy."
grep -qw devices /proc/1/cgroup ||
echo "WARNING: it looks like the 'devices' cgroup is not mounted."
# Now, close extraneous file descriptors.
pushd /proc/self/fd
for FD in *
do
case "$FD" in
# Keep stdin/stdout/stderr
[012])
;;
# Nuke everything else
*)
eval exec "$FD>&-"
;;
esac
done
popd
docker -d &
exec venv/bin/python buildserver.py

View file

@ -1,15 +1,16 @@
import logging import logging
import os
import logstash_formatter import logstash_formatter
from peewee import MySQLDatabase, SqliteDatabase from peewee import MySQLDatabase, SqliteDatabase
from storage.s3 import S3Storage from storage.s3 import S3Storage
from storage.local import LocalStorage from storage.local import LocalStorage
from data.userfiles import UserRequestFiles from data.userfiles import UserRequestFiles
from data.buildlogs import BuildLogs
from util import analytics from util import analytics
from test.teststorage import FakeStorage, FakeUserfiles from test.teststorage import FakeStorage, FakeUserfiles
from test import analytics as fake_analytics from test import analytics as fake_analytics
from test.testlogs import TestBuildLogs
class FlaskConfig(object): class FlaskConfig(object):
@ -86,6 +87,15 @@ class S3Userfiles(AWSCredentials):
AWSCredentials.REGISTRY_S3_BUCKET) AWSCredentials.REGISTRY_S3_BUCKET)
class RedisBuildLogs(object):
BUILDLOGS = BuildLogs('logs.quay.io')
class TestBuildLogs(object):
BUILDLOGS = TestBuildLogs('logs.quay.io', 'devtable', 'building',
'deadbeef-dead-beef-dead-beefdeadbeef')
class StripeTestConfig(object): class StripeTestConfig(object):
STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y' STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y'
STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh' STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh'
@ -135,13 +145,13 @@ class BuildNodeConfig(object):
BUILD_NODE_PULL_TOKEN = 'F02O2E86CQLKZUQ0O81J8XDHQ6F0N1V36L9JTOEEK6GKKMT1GI8PTJQT4OU88Y6G' BUILD_NODE_PULL_TOKEN = 'F02O2E86CQLKZUQ0O81J8XDHQ6F0N1V36L9JTOEEK6GKKMT1GI8PTJQT4OU88Y6G'
def logs_init_builder(level=logging.DEBUG): def logs_init_builder(level=logging.DEBUG,
formatter=logstash_formatter.LogstashFormatter()):
@staticmethod @staticmethod
def init_logs(): def init_logs():
handler = logging.StreamHandler() handler = logging.StreamHandler()
root_logger = logging.getLogger('') root_logger = logging.getLogger('')
root_logger.setLevel(level) root_logger.setLevel(level)
formatter = logstash_formatter.LogstashFormatter()
handler.setFormatter(formatter) handler.setFormatter(formatter)
root_logger.addHandler(handler) root_logger.addHandler(handler)
@ -149,26 +159,25 @@ def logs_init_builder(level=logging.DEBUG):
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles, class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
FakeAnalytics, StripeTestConfig): FakeAnalytics, StripeTestConfig, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder(logging.WARN) LOGGING_CONFIG = logs_init_builder(logging.WARN)
POPULATE_DB_TEST_DATA = True POPULATE_DB_TEST_DATA = True
TESTING = True TESTING = True
INCLUDE_TEST_ENDPOINTS = True
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB, class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig, StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
DigitalOceanConfig, BuildNodeConfig, S3Userfiles): DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
LOGGING_CONFIG = logs_init_builder() TestBuildLogs):
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True POPULATE_DB_TEST_DATA = True
INCLUDE_TEST_ENDPOINTS = True
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL, class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelTestConfig, StripeLiveConfig, MixpanelTestConfig,
GitHubProdConfig, DigitalOceanConfig, GitHubProdConfig, DigitalOceanConfig,
BuildNodeConfig, S3Userfiles): BuildNodeConfig, S3Userfiles, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder() LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0
@ -176,7 +185,7 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL, class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelProdConfig, StripeLiveConfig, MixpanelProdConfig,
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig, GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
S3Userfiles): S3Userfiles, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder() LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0

63
data/buildlogs.py Normal file
View file

@ -0,0 +1,63 @@
import redis
import json
class BuildLogs(object):
ERROR = 'error'
COMMAND = 'command'
PHASE = 'phase'
def __init__(self, redis_host):
self._redis = redis.StrictRedis(host=redis_host)
@staticmethod
def _logs_key(build_id):
return 'builds/%s/logs' % build_id
def append_log_entry(self, build_id, log_obj):
"""
Appends the serialized form of log_obj to the end of the log entry list
and returns the new length of the list.
"""
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def append_log_message(self, build_id, log_message, log_type=None):
"""
Wraps the message in an envelope and push it to the end of the log entry
list and returns the index at which it was inserted.
"""
log_obj = {
'message': log_message
}
if log_type:
log_obj['type'] = log_type
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1
def get_log_entries(self, build_id, start_index):
"""
Returns a tuple of the current length of the list and an iterable of the
requested log entries.
"""
llen = self._redis.llen(self._logs_key(build_id))
log_entries = self._redis.lrange(self._logs_key(build_id), start_index, -1)
return (llen, (json.loads(entry) for entry in log_entries))
@staticmethod
def _status_key(build_id):
return 'builds/%s/status' % build_id
def set_status(self, build_id, status_obj):
"""
Sets the status key for this build to json serialized form of the supplied
obj.
"""
self._redis.set(self._status_key(build_id), json.dumps(status_obj))
def get_status(self, build_id):
"""
Loads the status information for the specified build id.
"""
fetched = self._redis.get(self._status_key(build_id))
return json.loads(fetched) if fetched else None

View file

@ -1,5 +1,6 @@
import string import string
import logging import logging
import uuid
from random import SystemRandom from random import SystemRandom
from datetime import datetime from datetime import datetime
@ -20,6 +21,10 @@ def random_string_generator(length=16):
return random_string return random_string
def uuid_generator():
return str(uuid.uuid4())
class BaseModel(Model): class BaseModel(Model):
class Meta: class Meta:
database = db database = db
@ -125,7 +130,7 @@ class RepositoryPermission(BaseModel):
class PermissionPrototype(BaseModel): class PermissionPrototype(BaseModel):
org = ForeignKeyField(User, index=True, related_name='orgpermissionproto') org = ForeignKeyField(User, index=True, related_name='orgpermissionproto')
uuid = CharField() uuid = CharField(default=uuid_generator)
activating_user = ForeignKeyField(User, index=True, null=True, activating_user = ForeignKeyField(User, index=True, null=True,
related_name='userpermissionproto') related_name='userpermissionproto')
delegate_user = ForeignKeyField(User, related_name='receivingpermission', delegate_user = ForeignKeyField(User, related_name='receivingpermission',
@ -204,13 +209,14 @@ class RepositoryTag(BaseModel):
class RepositoryBuild(BaseModel): class RepositoryBuild(BaseModel):
repository = ForeignKeyField(Repository) uuid = CharField(default=uuid_generator, index=True)
repository = ForeignKeyField(Repository, index=True)
access_token = ForeignKeyField(AccessToken) access_token = ForeignKeyField(AccessToken)
resource_key = CharField() resource_key = CharField()
tag = CharField() tag = CharField()
build_node_id = IntegerField(null=True)
phase = CharField(default='waiting') phase = CharField(default='waiting')
status_url = CharField(null=True) started = DateTimeField(default=datetime.now)
display_name = CharField()
class QueueItem(BaseModel): class QueueItem(BaseModel):

View file

@ -4,9 +4,7 @@ import datetime
import dateutil.parser import dateutil.parser
import operator import operator
import json import json
import uuid
from datetime import timedelta
from database import * from database import *
from util.validation import * from util.validation import *
@ -731,8 +729,7 @@ def update_prototype_permission(org, uid, role_name):
def add_prototype_permission(org, role_name, activating_user, def add_prototype_permission(org, role_name, activating_user,
delegate_user=None, delegate_team=None): delegate_user=None, delegate_team=None):
new_role = Role.get(Role.name == role_name) new_role = Role.get(Role.name == role_name)
uid = str(uuid.uuid4()) return PermissionPrototype.create(org=org, role=new_role,
return PermissionPrototype.create(org=org, uuid=uid, role=new_role,
activating_user=activating_user, activating_user=activating_user,
delegate_user=delegate_user, delegate_team=delegate_team) delegate_user=delegate_user, delegate_team=delegate_team)
@ -1287,13 +1284,18 @@ def load_token_data(code):
raise InvalidTokenException('Invalid delegate token code: %s' % code) raise InvalidTokenException('Invalid delegate token code: %s' % code)
def get_repository_build(request_dbid): def get_repository_build(namespace_name, repository_name, build_uuid):
try: joined = RepositoryBuild.select().join(Repository)
return RepositoryBuild.get(RepositoryBuild.id == request_dbid) fetched = list(joined.where(Repository.name == repository_name,
except RepositoryBuild.DoesNotExist: Repository.namespace == namespace_name,
msg = 'Unable to locate a build by id: %s' % request_dbid RepositoryBuild.uuid == build_uuid))
if not fetched:
msg = 'Unable to locate a build by id: %s' % build_uuid
raise InvalidRepositoryBuildException(msg) raise InvalidRepositoryBuildException(msg)
return fetched[0]
def list_repository_builds(namespace_name, repository_name, def list_repository_builds(namespace_name, repository_name,
include_inactive=True): include_inactive=True):
@ -1307,9 +1309,11 @@ def list_repository_builds(namespace_name, repository_name,
return fetched return fetched
def create_repository_build(repo, access_token, resource_key, tag): def create_repository_build(repo, access_token, resource_key, tag,
display_name):
return RepositoryBuild.create(repository=repo, access_token=access_token, return RepositoryBuild.create(repository=repo, access_token=access_token,
resource_key=resource_key, tag=tag) resource_key=resource_key, tag=tag,
display_name=display_name)
def create_webhook(repo, params_obj): def create_webhook(repo, params_obj):

View file

@ -64,5 +64,5 @@ class WorkQueue(object):
image_diff_queue = WorkQueue('imagediff') image_diff_queue = WorkQueue('imagediff')
dockerfile_build_queue = WorkQueue('dockerfilebuild') dockerfile_build_queue = WorkQueue('dockerfilebuild2')
webhook_queue = WorkQueue('webhook') webhook_queue = WorkQueue('webhook')

View file

@ -59,3 +59,9 @@ class UserRequestFiles(object):
full_key = os.path.join(self._prefix, file_id) full_key = os.path.join(self._prefix, file_id)
k = Key(self._bucket, full_key) k = Key(self._bucket, full_key)
return k.generate_url(expires_in) return k.generate_url(expires_in)
def get_file_checksum(self, file_id):
self._initialize_s3()
full_key = os.path.join(self._prefix, file_id)
k = self._bucket.lookup(full_key)
return k.etag[1:-1][:7]

View file

@ -31,6 +31,7 @@ from datetime import datetime, timedelta
store = app.config['STORAGE'] store = app.config['STORAGE']
user_files = app.config['USERFILES'] user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
route_data = None route_data = None
@ -69,7 +70,7 @@ def get_route_data():
routes = [] routes = []
for rule in app.url_map.iter_rules(): for rule in app.url_map.iter_rules():
if rule.endpoint.startswith('api.'): if rule.endpoint.startswith('api.'):
endpoint_method = globals()[rule.endpoint[4:]] # Remove api. endpoint_method = app.view_functions[rule.endpoint]
is_internal = '__internal_call' in dir(endpoint_method) is_internal = '__internal_call' in dir(endpoint_method)
is_org_api = '__user_call' in dir(endpoint_method) is_org_api = '__user_call' in dir(endpoint_method)
methods = list(rule.methods.difference(['HEAD', 'OPTIONS'])) methods = list(rule.methods.difference(['HEAD', 'OPTIONS']))
@ -1148,40 +1149,69 @@ def get_repo(namespace, repository):
abort(403) # Permission denied abort(403) # Permission denied
def build_status_view(build_obj):
status = build_logs.get_status(build_obj.uuid)
return {
'id': build_obj.uuid,
'phase': build_obj.phase,
'started': build_obj.started,
'display_name': build_obj.display_name,
'status': status,
}
@api.route('/repository/<path:repository>/build/', methods=['GET']) @api.route('/repository/<path:repository>/build/', methods=['GET'])
@parse_repository_name @parse_repository_name
def get_repo_builds(namespace, repository): def get_repo_builds(namespace, repository):
permission = ReadRepositoryPermission(namespace, repository) permission = ReadRepositoryPermission(namespace, repository)
is_public = model.repository_is_public(namespace, repository) is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public: if permission.can() or is_public:
def build_view(build_obj):
# TODO(jake): Filter these logs if the current user can only *read* the repo.
if build_obj.status_url:
# Delegate the status to the build node
node_status = requests.get(build_obj.status_url).json()
node_status['id'] = build_obj.id
return node_status
# If there was no status url, do the best we can
# The format of this block should mirror that of the buildserver.
return {
'id': build_obj.id,
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': build_obj.phase,
'message': None,
'image_completion': {},
}
builds = model.list_repository_builds(namespace, repository) builds = model.list_repository_builds(namespace, repository)
return jsonify({ return jsonify({
'builds': [build_view(build) for build in builds] 'builds': [build_status_view(build) for build in builds]
}) })
abort(403) # Permission denied abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/<build_uuid>/status',
methods=['GET'])
@parse_repository_name
def get_repo_build_status(namespace, repository, build_uuid):
permission = ReadRepositoryPermission(namespace, repository)
is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public:
build = model.get_repository_build(namespace, repository, build_uuid)
return jsonify(build_status_view(build))
abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/<build_uuid>/logs',
methods=['GET'])
@parse_repository_name
def get_repo_build_logs(namespace, repository, build_uuid):
permission = ModifyRepositoryPermission(namespace, repository)
if permission.can():
response_obj = {}
build = model.get_repository_build(namespace, repository, build_uuid)
start = int(request.args.get('start', 0))
count, logs = build_logs.get_log_entries(build.uuid, start)
response_obj.update({
'start': start,
'total': count,
'logs': [log for log in logs],
})
return jsonify(response_obj)
abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/', methods=['POST']) @api.route('/repository/<path:repository>/build/', methods=['POST'])
@api_login_required @api_login_required
@parse_repository_name @parse_repository_name
@ -1193,20 +1223,28 @@ def request_repo_build(namespace, repository):
repo = model.get_repository(namespace, repository) repo = model.get_repository(namespace, repository)
token = model.create_access_token(repo, 'write') token = model.create_access_token(repo, 'write')
display_name = user_files.get_file_checksum(dockerfile_id)
logger.debug('**********Md5: %s' % display_name)
host = urlparse.urlparse(request.url).netloc host = urlparse.urlparse(request.url).netloc
tag = '%s/%s/%s' % (host, repo.namespace, repo.name) tag = '%s/%s/%s' % (host, repo.namespace, repo.name)
build_request = model.create_repository_build(repo, token, dockerfile_id, build_request = model.create_repository_build(repo, token, dockerfile_id,
tag) tag, display_name)
dockerfile_build_queue.put(json.dumps({'build_id': build_request.id})) dockerfile_build_queue.put(json.dumps({
'build_uuid': build_request.uuid,
'namespace': namespace,
'repository': repository,
}))
log_action('build_dockerfile', namespace, log_action('build_dockerfile', namespace,
{'repo': repository, 'namespace': namespace, {'repo': repository, 'namespace': namespace,
'fileid': dockerfile_id}, repo=repo) 'fileid': dockerfile_id}, repo=repo)
resp = jsonify({ resp = jsonify(build_status_view(build_request))
'started': True repo_string = '%s/%s' % (namespace, repository)
}) resp.headers['Location'] = url_for('api.get_repo_build_status',
repository=repo_string,
build_uuid=build_request.uuid)
resp.status_code = 201 resp.status_code = 201
return resp return resp

View file

@ -1,61 +0,0 @@
import math
from random import SystemRandom
from flask import jsonify
from app import app
def generate_image_completion(rand_func):
images = {}
for image_id in range(rand_func.randint(1, 11)):
total = int(math.pow(abs(rand_func.gauss(0, 1000)), 2))
current = rand_func.randint(0, total)
image_id = 'image_id_%s' % image_id
images[image_id] = {
'total': total,
'current': current,
}
return images
@app.route('/test/build/status', methods=['GET'])
def generate_random_build_status():
response = {
'id': 1,
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': None,
'message': None,
'image_completion': {},
}
random = SystemRandom()
phases = {
'waiting': {},
'starting': {
'total_commands': 7,
'current_command': 0,
},
'initializing': {},
'error': {
'message': 'Oops!'
},
'complete': {},
'building': {
'total_commands': 7,
'current_command': random.randint(1, 7),
},
'pushing': {
'total_commands': 7,
'current_command': 7,
'push_completion': random.random(),
'image_completion': generate_image_completion(random),
},
}
phase = random.choice(phases.keys())
response['status'] = phase
response.update(phases[phase])
return jsonify(response)

View file

@ -275,6 +275,13 @@ def populate_database():
'Empty repository which is building.', 'Empty repository which is building.',
False, [], (0, [], None)) False, [], (0, [], None))
token = model.create_access_token(building, 'write')
tag = 'ci.devtable.com:5000/%s/%s' % (building.namespace, building.name)
build = model.create_repository_build(building, token, '123-45-6789', tag,
'build-name')
build.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef'
build.save()
org = model.create_organization('buynlarge', 'quay@devtable.com', org = model.create_organization('buynlarge', 'quay@devtable.com',
new_user_1) new_user_1)
org.stripe_id = TEST_STRIPE_ID org.stripe_id = TEST_STRIPE_ID
@ -298,19 +305,11 @@ def populate_database():
model.add_user_to_team(new_user_2, reader_team) model.add_user_to_team(new_user_2, reader_team)
model.add_user_to_team(reader, reader_team) model.add_user_to_team(reader, reader_team)
token = model.create_access_token(building, 'write')
tag = 'ci.devtable.com:5000/%s/%s' % (building.namespace, building.name)
build = model.create_repository_build(building, token, '123-45-6789', tag)
build.build_node_id = 1
build.phase = 'building'
build.status_url = 'http://localhost:5000/test/build/status'
build.save()
__generate_repository(new_user_1, 'superwide', None, False, [], __generate_repository(new_user_1, 'superwide', None, False, [],
[(10, [], 'latest2'), [(10, [], 'latest2'),
(2, [], 'latest3'), (2, [], 'latest3'),
(2, [(1, [], 'latest11'), (2, [], 'latest12')], 'latest4'), (2, [(1, [], 'latest11'), (2, [], 'latest12')],
'latest4'),
(2, [], 'latest5'), (2, [], 'latest5'),
(2, [], 'latest6'), (2, [], 'latest6'),
(2, [], 'latest7'), (2, [], 'latest7'),

View file

@ -19,3 +19,7 @@ paramiko
python-digitalocean python-digitalocean
xhtml2pdf xhtml2pdf
logstash_formatter logstash_formatter
redis
hiredis
git+https://github.com/dotcloud/docker-py.git
loremipsum

View file

@ -13,16 +13,20 @@ beautifulsoup4==4.3.2
blinker==1.3 blinker==1.3
boto==2.24.0 boto==2.24.0
distribute==0.6.34 distribute==0.6.34
git+https://github.com/dotcloud/docker-py.git
ecdsa==0.10 ecdsa==0.10
gevent==1.0 gevent==1.0
greenlet==0.4.2 greenlet==0.4.2
gunicorn==18.0 gunicorn==18.0
hiredis==0.1.2
html5lib==1.0b3 html5lib==1.0b3
itsdangerous==0.23 itsdangerous==0.23
lockfile==0.9.1 lockfile==0.9.1
logstash-formatter==0.5.8 logstash-formatter==0.5.8
loremipsum==1.0.2
marisa-trie==0.5.1 marisa-trie==0.5.1
mixpanel-py==3.1.1 mixpanel-py==3.1.1
mock==1.0.1
paramiko==1.12.1 paramiko==1.12.1
peewee==2.2.0 peewee==2.2.0
py-bcrypt==0.4 py-bcrypt==0.4
@ -31,9 +35,11 @@ pycrypto==2.6.1
python-daemon==1.6 python-daemon==1.6
python-dateutil==2.2 python-dateutil==2.2
python-digitalocean==0.6 python-digitalocean==0.6
redis==2.9.1
reportlab==2.7 reportlab==2.7
requests==2.2.1 requests==2.2.1
six==1.5.2 six==1.5.2
stripe==1.12.0 stripe==1.12.0
websocket-client==0.11.0
wsgiref==0.1.2 wsgiref==0.1.2
xhtml2pdf==0.0.5 xhtml2pdf==0.0.5

View file

@ -507,35 +507,103 @@ i.toggle-icon:hover {
color: #428bca; color: #428bca;
} }
.status-boxes .popover { .status-box a {
margin-right: 20px; padding: 6px;
color: black;
} }
.status-boxes .popover-content { .status-box a b {
width: 260px; margin-right: 10px;
} }
.build-statuses { .build-info {
margin: 4px;
padding: 4px;
margin-left: 6px;
margin-right: 6px;
border-bottom: 1px solid #eee;
}
.build-info.clickable:hover {
background: rgba(66, 139, 202, 0.2);
cursor: pointer;
border-radius: 4px;
}
.build-info:last-child {
border-bottom: 0px;
}
.phase-icon {
border-radius: 50%;
display: inline-block;
width: 12px;
height: 12px;
margin-right: 6px;
}
.active .build-tab-link .phase-icon {
box-shadow: 0px 0px 10px #FFFFFF, 0px 0px 10px #FFFFFF;
}
.build-status .phase-icon {
margin-top: 4px;
float: left;
}
.phase-icon.error {
background-color: red;
}
.phase-icon.waiting, .phase-icon.starting, .phase-icon.initializing {
background-color: #ddd;
}
.phase-icon.building {
background-color: #f0ad4e;
}
.phase-icon.pushing {
background-color: #5cb85c;
}
.phase-icon.complete {
background-color: #428bca;
}
.build-status {
display: inline-block;
} }
.build-status-container { .build-status-container {
padding: 4px; padding: 4px;
margin-bottom: 10px; margin-bottom: 10px;
border-bottom: 1px solid #eee; border-bottom: 1px solid #eee;
width: 230px; width: 350px;
} }
.build-status-container .build-message { .build-status-container .build-message {
display: block; display: block;
white-space: nowrap; white-space: nowrap;
font-size: 12px; font-size: 14px;
margin-bottom: 10px;
padding-bottom: 10px;
border-bottom: 1px solid #eee;
margin-left: 20px;
} }
.build-status-container .progress { .build-status-container .progress {
height: 12px; height: 10px;
margin: 0px; margin: 0px;
margin-top: 10px; margin-top: 10px;
width: 230px; margin-left: 20px;
width: 310px;
}
.build-status-container .timing {
margin-left: 20px;
margin-top: 6px;
} }
.build-status-container:last-child { .build-status-container:last-child {
@ -1633,6 +1701,185 @@ p.editable:hover i {
padding-left: 44px; padding-left: 44px;
} }
.repo-build .build-id:before {
content: "Build ID: "
}
.repo-build .build-id {
float: right;
font-size: 12px;
color: #aaa;
padding: 10px;
}
.repo-build .build-pane .timing {
float: right;
}
.repo-build .build-tab-link {
white-space: nowrap;
}
.repo-build .build-pane .build-header {
padding-top: 10px;
border-bottom: 1px solid #eee;
padding-bottom: 10px;
}
.repo-build .build-pane .build-progress {
margin-top: 16px;
margin-bottom: 10px;
}
.repo-build .build-pane .build-progress .progress {
height: 14px;
margin-bottom: 0px;
}
.repo-build .build-pane .quay-spinner {
margin-top: 4px;
display: inline-block;
}
.repo-build .build-pane .build-logs {
background: #222;
color: white;
padding: 10px;
overflow: auto;
}
.repo-build .build-pane .build-logs .container-header {
padding: 2px;
}
.repo-build .build-pane .build-logs .container-logs {
margin: 4px;
padding-bottom: 4px;
}
.repo-build .build-pane .build-logs .command-title,
.repo-build .build-pane .build-logs .log-entry .message {
font-family: Consolas, "Lucida Console", Monaco, monospace;
font-size: 13px;
}
.repo-build .build-pane .build-logs .container-header {
cursor: pointer;
position: relative;
}
.repo-build .build-pane .build-logs .container-header i.fa.chevron {
color: #666;
margin-right: 4px;
width: 14px;
text-align: center;
position: absolute;
top: 6px;
left: 0px;
}
.repo-build .build-pane .build-logs .log-container.command {
margin-left: 42px;
}
.repo-build .build-pane .build-logs .container-header.building {
margin-bottom: 10px;
}
.repo-build .build-pane .build-logs .container-header.pushing {
margin-top: 10px;
}
.repo-build .build-log-error-element {
position: relative;
display: inline-block;
margin: 10px;
padding: 10px;
background: rgba(255, 0, 0, 0.17);
border-radius: 10px;
margin-left: 22px;
}
.repo-build .build-log-error-element i.fa {
color: red;
position: absolute;
top: 13px;
left: 11px;
}
.repo-build .build-log-error-element .error-message {
display: inline-block;
margin-left: 25px;
}
.repo-build .build-pane .build-logs .container-header .label {
padding-top: 4px;
text-align: right;
margin-right: 4px;
width: 86px;
display: inline-block;
border-right: 4px solid #aaa;
background-color: #444;
position: absolute;
top: 4px;
left: 24px;
}
.repo-build .build-pane .build-logs .container-header .container-content {
display: block;
padding-left: 20px;
}
.repo-build .build-pane .build-logs .container-header .container-content.build-log-command {
padding-left: 120px;
}
.label.FROM {
border-color: #5bc0de !important;
}
.label.CMD, .label.EXPOSE, .label.ENTRYPOINT {
border-color: #428bca !important;
}
.label.RUN, .label.ADD {
border-color: #5cb85c !important;
}
.label.ENV, .label.VOLUME, .label.USER, .label.WORKDIR {
border-color: #f0ad4e !important;
}
.label.MAINTAINER {
border-color: #aaa !important;
}
.repo-build .build-pane .build-logs .log-entry {
position: relative;
}
.repo-build .build-pane .build-logs .log-entry .message {
display: inline-block;
margin-left: 46px;
}
.repo-build .build-pane .build-logs .log-entry .id {
color: #aaa;
padding-right: 6px;
margin-right: 6px;
text-align: right;
font-size: 12px;
width: 40px;
position: absolute;
top: 4px;
left: 4px;
}
.repo-admin .right-info { .repo-admin .right-info {
font-size: 11px; font-size: 11px;
margin-top: 10px; margin-top: 10px;
@ -1676,16 +1923,6 @@ p.editable:hover i {
cursor: pointer; cursor: pointer;
} }
.repo .build-info {
padding: 10px;
margin: 0px;
}
.repo .build-info .progress {
margin: 0px;
margin-top: 10px;
}
.repo .section { .repo .section {
display: block; display: block;
margin-bottom: 20px; margin-bottom: 20px;

View file

@ -0,0 +1,6 @@
<span class="command" bindonce>
<span class="label" bo-class="getCommandKind(command.message)" bo-show="getCommandKind(command.message)"
bo-text="getCommandKind(command.message)">
</span>
<span class="command-title" bo-html="getCommandTitleHtml(command.message)"></span>
</span>

View file

@ -0,0 +1,4 @@
<span bindonce class="build-log-error-element">
<i class="fa fa-exclamation-triangle"></i>
<span class="error-message" bo-text="error.message"></span>
</span>

View file

@ -0,0 +1,4 @@
<span bindonce class="build-log-phase-element">
<span class="phase-icon" ng-class="phase.message"></span>
<span class="build-message" phase="phase.message"></span>
</span>

View file

@ -0,0 +1 @@
<span class="build-message-element">{{ getBuildMessage(phase) }}</span>

View file

@ -0,0 +1,6 @@
<div class="build-progress-element">
<div class="progress" ng-class="getPercentage(build) < 100 ? 'active progress-striped' : ''">
<div class="progress-bar" role="progressbar" aria-valuenow="{{ getPercentage(build) }}" aria-valuemin="0" aria-valuemax="100" style="{{ 'width: ' + getPercentage(build) + '%' }}">
</div>
</div>
</div>

View file

@ -1,8 +1,11 @@
<div id="build-status-container" class="build-status-container"> <div id="build-status-container" class="build-status-container">
<span class="build-message">{{ getBuildMessage(build) }}</span> <div>
<div class="progress" ng-class="getBuildProgress(build) < 100 ? 'active progress-striped' : ''" ng-show="getBuildProgress(build) >= 0"> <span class="phase-icon" ng-class="build.phase"></span>
<div class="progress-bar" role="progressbar" aria-valuenow="{{ getBuildProgress(build) }}" aria-valuemin="0" aria-valuemax="100" style="{{ 'width: ' + getBuildProgress(build) + '%' }}"> <span class="build-message" phase="build.phase"></span>
</div>
</div> </div>
<div class="timing">
<i class="fa fa-clock-o"></i>
Started: <span am-time-ago="build.started || 0"></span>
</div>
<div class="build-progress" build="build"></div>
</div> </div>

View file

@ -103,7 +103,7 @@ function getMarkedDown(string) {
} }
// Start the application code itself. // Start the application code itself.
quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'restangular', 'angularMoment', 'angulartics', /*'angulartics.google.analytics',*/ 'angulartics.mixpanel', '$strap.directives', 'ngCookies', 'ngSanitize', 'angular-md5'], function($provide, cfpLoadingBarProvider) { quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'restangular', 'angularMoment', 'angulartics', /*'angulartics.google.analytics',*/ 'angulartics.mixpanel', '$strap.directives', 'ngCookies', 'ngSanitize', 'angular-md5', 'pasvaz.bindonce'], function($provide, cfpLoadingBarProvider) {
cfpLoadingBarProvider.includeSpinner = false; cfpLoadingBarProvider.includeSpinner = false;
$provide.factory('UtilService', ['$sanitize', function($sanitize) { $provide.factory('UtilService', ['$sanitize', function($sanitize) {
@ -151,7 +151,7 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'rest
$provide.factory('ApiService', ['Restangular', function(Restangular) { $provide.factory('ApiService', ['Restangular', function(Restangular) {
var apiService = {}; var apiService = {};
var getResource = function(path) { var getResource = function(path, opt_background) {
var resource = {}; var resource = {};
resource.url = path; resource.url = path;
resource.withOptions = function(options) { resource.withOptions = function(options) {
@ -169,6 +169,12 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'rest
'hasError': false 'hasError': false
}; };
if (opt_background) {
performer.withHttpConfig({
'ignoreLoadingBar': true
});
}
performer.get(options).then(function(resp) { performer.get(options).then(function(resp) {
result.value = processor(resp); result.value = processor(resp);
result.loading = false; result.loading = false;
@ -240,27 +246,33 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'rest
var buildMethodsForEndpoint = function(endpoint) { var buildMethodsForEndpoint = function(endpoint) {
var method = endpoint.methods[0].toLowerCase(); var method = endpoint.methods[0].toLowerCase();
var methodName = formatMethodName(endpoint['name']); var methodName = formatMethodName(endpoint['name']);
apiService[methodName] = function(opt_options, opt_parameters) { apiService[methodName] = function(opt_options, opt_parameters, opt_background) {
return Restangular.one(buildUrl(endpoint['path'], opt_parameters))['custom' + method.toUpperCase()](opt_options); var one = Restangular.one(buildUrl(endpoint['path'], opt_parameters));
if (opt_background) {
one.withHttpConfig({
'ignoreLoadingBar': true
});
}
return one['custom' + method.toUpperCase()](opt_options);
}; };
if (method == 'get') { if (method == 'get') {
apiService[methodName + 'AsResource'] = function(opt_parameters) { apiService[methodName + 'AsResource'] = function(opt_parameters, opt_background) {
return getResource(buildUrl(endpoint['path'], opt_parameters)); return getResource(buildUrl(endpoint['path'], opt_parameters), opt_background);
}; };
} }
if (endpoint['user_method']) { if (endpoint['user_method']) {
apiService[getGenericMethodName(endpoint['user_method'])] = function(orgname, opt_options, opt_parameters) { apiService[getGenericMethodName(endpoint['user_method'])] = function(orgname, opt_options, opt_parameters, opt_background) {
if (orgname) { if (orgname) {
if (orgname.name) { if (orgname.name) {
orgname = orgname.name; orgname = orgname.name;
} }
var params = jQuery.extend({'orgname' : orgname}, opt_parameters || {}); var params = jQuery.extend({'orgname' : orgname}, opt_parameters || {}, opt_background);
return apiService[methodName](opt_options, params); return apiService[methodName](opt_options, params);
} else { } else {
return apiService[formatMethodName(endpoint['user_method'])](opt_options, opt_parameters); return apiService[formatMethodName(endpoint['user_method'])](opt_options, opt_parameters, opt_background);
} }
}; };
} }
@ -779,6 +791,7 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'rest
fixFooter: false}). fixFooter: false}).
when('/repository/:namespace/:name/image/:image', {templateUrl: '/static/partials/image-view.html', controller: ImageViewCtrl, reloadOnSearch: false}). when('/repository/:namespace/:name/image/:image', {templateUrl: '/static/partials/image-view.html', controller: ImageViewCtrl, reloadOnSearch: false}).
when('/repository/:namespace/:name/admin', {templateUrl: '/static/partials/repo-admin.html', controller:RepoAdminCtrl, reloadOnSearch: false}). when('/repository/:namespace/:name/admin', {templateUrl: '/static/partials/repo-admin.html', controller:RepoAdminCtrl, reloadOnSearch: false}).
when('/repository/:namespace/:name/build', {templateUrl: '/static/partials/repo-build.html', controller:RepoBuildCtrl, reloadOnSearch: false}).
when('/repository/', {title: 'Repositories', description: 'Public and private docker repositories list', when('/repository/', {title: 'Repositories', description: 'Public and private docker repositories list',
templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl}). templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl}).
when('/user/', {title: 'Account Settings', description:'Account settings for Quay.io', templateUrl: '/static/partials/user-admin.html', when('/user/', {title: 'Account Settings', description:'Account settings for Quay.io', templateUrl: '/static/partials/user-admin.html',
@ -2467,6 +2480,119 @@ quayApp.directive('namespaceSelector', function () {
}); });
quayApp.directive('buildLogPhase', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/build-log-phase.html',
replace: false,
transclude: false,
restrict: 'C',
scope: {
'phase': '=phase'
},
controller: function($scope, $element) {
}
};
return directiveDefinitionObject;
});
quayApp.directive('buildLogError', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/build-log-error.html',
replace: false,
transclude: false,
restrict: 'C',
scope: {
'error': '=error'
},
controller: function($scope, $element) {
}
};
return directiveDefinitionObject;
});
quayApp.directive('buildLogCommand', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/build-log-command.html',
replace: false,
transclude: false,
restrict: 'C',
scope: {
'command': '=command'
},
controller: function($scope, $element, $sanitize) {
var registryHandlers = {
'quay.io': function(pieces) {
var rnamespace = pieces[pieces.length - 2];
var rname = pieces[pieces.length - 1];
return '/repository/' + rnamespace + '/' + rname + '/';
},
'': function(pieces) {
var rnamespace = pieces.length == 1 ? '_' : pieces[0];
var rname = pieces[pieces.length - 1];
return 'https://index.docker.io/u/' + rnamespace + '/' + rname + '/';
}
};
var kindHandlers = {
'FROM': function(title) {
var pieces = title.split('/');
var registry = pieces.length < 3 ? '' : pieces[0];
if (!registryHandlers[registry]) {
return title;
}
return '<i class="fa fa-hdd-o"></i> <a href="' + registryHandlers[registry](pieces) + '">' + title + '</a>';
}
};
$scope.getCommandKind = function(fullTitle) {
var colon = fullTitle.indexOf(':');
var title = getTitleWithoutStep(fullTitle);
if (!title) {
return null;
}
var space = title.indexOf(' ');
return title.substring(0, space);
};
$scope.getCommandTitleHtml = function(fullTitle) {
var title = getTitleWithoutStep(fullTitle) || fullTitle;
var space = title.indexOf(' ');
if (space <= 0) {
return $sanitize(title);
}
var kind = $scope.getCommandKind(fullTitle);
var sanitized = $sanitize(title.substring(space + 1));
var handler = kindHandlers[kind || ''];
if (handler) {
return handler(sanitized);
} else {
return sanitized;
}
};
var getTitleWithoutStep = function(fullTitle) {
var colon = fullTitle.indexOf(':');
if (colon <= 0) {
return null;
}
return $.trim(fullTitle.substring(colon + 1));
};
}
};
return directiveDefinitionObject;
});
quayApp.directive('buildStatus', function () { quayApp.directive('buildStatus', function () {
var directiveDefinitionObject = { var directiveDefinitionObject = {
priority: 0, priority: 0,
@ -2478,55 +2604,85 @@ quayApp.directive('buildStatus', function () {
'build': '=build' 'build': '=build'
}, },
controller: function($scope, $element) { controller: function($scope, $element) {
$scope.getBuildProgress = function(buildInfo) { }
switch (buildInfo.status) { };
case 'building': return directiveDefinitionObject;
return (buildInfo.current_command / buildInfo.total_commands) * 100; });
break;
case 'pushing':
return buildInfo.push_completion * 100;
break;
case 'complete':
return 100;
break;
case 'initializing': quayApp.directive('buildMessage', function () {
case 'starting': var directiveDefinitionObject = {
case 'waiting': priority: 0,
return 0; templateUrl: '/static/directives/build-message.html',
break; replace: false,
} transclude: false,
restrict: 'C',
scope: {
'phase': '=phase'
},
controller: function($scope, $element) {
$scope.getBuildMessage = function (phase) {
switch (phase) {
case 'starting':
case 'initializing':
return 'Starting Dockerfile build';
case 'waiting':
return 'Waiting for available build worker';
case 'building':
return 'Building image from Dockerfile';
case 'pushing':
return 'Pushing image built from Dockerfile';
return -1; case 'complete':
}; return 'Dockerfile build completed and pushed';
case 'error':
return 'Dockerfile build failed';
}
};
}
};
return directiveDefinitionObject;
});
$scope.getBuildMessage = function(buildInfo) {
switch (buildInfo.status) {
case 'initializing':
return 'Starting Dockerfile build';
break;
case 'starting': quayApp.directive('buildProgress', function () {
case 'waiting': var directiveDefinitionObject = {
case 'building': priority: 0,
return 'Building image from Dockerfile'; templateUrl: '/static/directives/build-progress.html',
break; replace: false,
transclude: false,
restrict: 'C',
scope: {
'build': '=build'
},
controller: function($scope, $element) {
$scope.getPercentage = function(buildInfo) {
switch (buildInfo.phase) {
case 'building':
return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100;
break;
case 'pushing':
return buildInfo.status.push_completion * 100;
break;
case 'pushing': case 'complete':
return 'Pushing image built from Dockerfile'; return 100;
break; break;
case 'complete': case 'initializing':
return 'Dockerfile build completed and pushed'; case 'starting':
break; case 'waiting':
return 0;
case 'error': break;
return 'Dockerfile build failed: ' + buildInfo.message; }
break;
} return -1;
}; };
} }
}; };
return directiveDefinitionObject; return directiveDefinitionObject;
@ -2541,6 +2697,14 @@ quayApp.directive('ngBlur', function() {
}; };
}); });
quayApp.directive('ngVisible', function () {
return function (scope, element, attr) {
scope.$watch(attr.ngVisible, function (visible) {
element.css('visibility', visible ? 'visible' : 'hidden');
});
};
});
quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout', quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout',
function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout) { function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout) {

View file

@ -185,6 +185,11 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
$scope.getFormattedCommand = ImageMetadataService.getFormattedCommand; $scope.getFormattedCommand = ImageMetadataService.getFormattedCommand;
$scope.showBuild = function(buildInfo) {
$location.path('/repository/' + namespace + '/' + name + '/build');
$location.search('current', buildInfo.id);
};
$scope.getTooltipCommand = function(image) { $scope.getTooltipCommand = function(image) {
var sanitized = ImageMetadataService.getEscapedFormattedCommand(image); var sanitized = ImageMetadataService.getEscapedFormattedCommand(image);
return '<span class=\'codetooltip\'>' + sanitized + '</span>'; return '<span class=\'codetooltip\'>' + sanitized + '</span>';
@ -512,13 +517,11 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
}; };
var getBuildInfo = function(repo) { var getBuildInfo = function(repo) {
// Note: We use restangular manually here because we need to turn off the loading bar. var params = {
var buildInfo = Restangular.one('repository/' + repo.namespace + '/' + repo.name + '/build/'); 'repository': repo.namespace + '/' + repo.name
buildInfo.withHttpConfig({ };
'ignoreLoadingBar': true
});
buildInfo.get().then(function(resp) { ApiService.getRepoBuilds(null, params, true).then(function(resp) {
var runningBuilds = []; var runningBuilds = [];
for (var i = 0; i < resp.builds.length; ++i) { for (var i = 0; i < resp.builds.length; ++i) {
var build = resp.builds[i]; var build = resp.builds[i];
@ -604,6 +607,197 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
loadViewInfo(); loadViewInfo();
} }
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize) {
var namespace = $routeParams.namespace;
var name = $routeParams.name;
var pollTimerHandle = null;
$scope.$on('$destroy', function() {
stopPollTimer();
});
// Watch for changes to the current parameter.
$scope.$on('$routeUpdate', function(){
if ($location.search().current) {
$scope.setCurrentBuild($location.search().current, false);
}
});
$scope.builds = [];
$scope.polling = false;
$scope.adjustLogHeight = function() {
$('.build-logs').height($(window).height() - 365);
};
$scope.hasLogs = function(container) {
return ((container.logs && container.logs.length) || (container._logs && container._logs.length));
};
$scope.toggleLogs = function(container) {
if (container._logs) {
container.logs = container._logs;
container._logs = null;
} else {
container._logs = container.logs;
container.logs = null;
}
};
$scope.setCurrentBuild = function(buildId, opt_updateURL) {
// Find the build.
for (var i = 0; i < $scope.builds.length; ++i) {
if ($scope.builds[i].id == buildId) {
$scope.setCurrentBuildInternal($scope.builds[i], opt_updateURL);
return;
}
}
};
$scope.setCurrentBuildInternal = function(build, opt_updateURL) {
if (build == $scope.currentBuild) { return; }
stopPollTimer();
$scope.logEntries = null;
$scope.logStartIndex = null;
$scope.currentParentEntry = null;
$scope.currentBuild = build;
if (opt_updateURL) {
if (build) {
$location.search('current', build.id);
} else {
$location.search('current', null);
}
}
// Timeout needed to ensure the log element has been created
// before its height is adjusted.
setTimeout(function() {
$scope.adjustLogHeight();
}, 1);
// Load the first set of logs.
getBuildStatusAndLogs();
// If the build is currently processing, start the build timer.
checkPollTimer();
};
var checkPollTimer = function() {
var build = $scope.currentBuild;
if (!build) {
stopPollTimer();
return;
}
if (build['phase'] != 'complete' && build['phase'] != 'error') {
startPollTimer();
return true;
} else {
stopPollTimer();
return false;
}
};
var stopPollTimer = function() {
$interval.cancel(pollTimerHandle);
};
var startPollTimer = function() {
stopPollTimer();
pollTimerHandle = $interval(getBuildStatusAndLogs, 2000);
};
var processLogs = function(logs, startIndex) {
if (!$scope.logEntries) { $scope.logEntries = []; }
for (var i = 0; i < logs.length; ++i) {
var entry = logs[i];
var type = entry['type'] || 'entry';
if (type == 'command' || type == 'phase' || type == 'error') {
entry['_logs'] = [];
entry['index'] = startIndex + i;
$scope.logEntries.push(entry);
$scope.currentParentEntry = entry;
} else if ($scope.currentParentEntry) {
if ($scope.currentParentEntry['logs']) {
$scope.currentParentEntry['logs'].push(entry);
} else {
$scope.currentParentEntry['_logs'].push(entry);
}
}
}
};
var getBuildStatusAndLogs = function() {
if (!$scope.currentBuild || $scope.polling) { return; }
$scope.polling = true;
var params = {
'repository': namespace + '/' + name,
'build_uuid': $scope.currentBuild.id
};
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
// Note: We use extend here rather than replacing as Angular is depending on the
// root build object to remain the same object.
$.extend(true, $scope.currentBuild, resp);
checkPollTimer();
// Load the updated logs for the build.
var options = {
'start': $scope.logStartIndex
};
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
processLogs(resp['logs'], resp['start']);
$scope.logStartIndex = resp['total'];
$scope.polling = false;
});
});
};
var fetchRepository = function() {
var params = {'repository': namespace + '/' + name};
$rootScope.title = 'Loading Repository...';
$scope.repository = ApiService.getRepoAsResource(params).get(function(repo) {
if (!repo.can_write) {
$rootScope.title = 'Unknown builds';
$scope.accessDenied = true;
return;
}
$rootScope.title = 'Repository Builds';
$scope.repo = repo;
getBuildInfo();
});
};
var getBuildInfo = function(repo) {
var params = {
'repository': namespace + '/' + name
};
ApiService.getRepoBuilds(null, params).then(function(resp) {
$scope.builds = resp.builds;
if ($location.search().current) {
$scope.setCurrentBuild($location.search().current, false);
} else if ($scope.builds.length > 0) {
$scope.setCurrentBuild($scope.builds[0].id, true);
}
});
};
fetchRepository();
}
function RepoAdminCtrl($scope, Restangular, ApiService, $routeParams, $rootScope) { function RepoAdminCtrl($scope, Restangular, ApiService, $routeParams, $rootScope) {
var namespace = $routeParams.namespace; var namespace = $routeParams.namespace;
var name = $routeParams.name; var name = $routeParams.name;
@ -854,8 +1048,13 @@ function RepoAdminCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
}; };
$scope.repository = ApiService.getRepoAsResource(params).get(function(repo) { $scope.repository = ApiService.getRepoAsResource(params).get(function(repo) {
$scope.repo = repo; if (!repo.can_admin) {
$rootScope.title = 'Forbidden';
$scope.accessDenied = true;
return;
}
$scope.repo = repo;
$rootScope.title = 'Settings - ' + namespace + '/' + name; $rootScope.title = 'Settings - ' + namespace + '/' + name;
$rootScope.description = 'Administrator settings for ' + namespace + '/' + name + $rootScope.description = 'Administrator settings for ' + namespace + '/' + name +
': Permissions, webhooks and other settings'; ': Permissions, webhooks and other settings';

1
static/lib/bindonce.min.js vendored Normal file

File diff suppressed because one or more lines are too long

View file

@ -1,5 +1,8 @@
<div class="container" ng-show="deleting"><div class="quay-spinner"></div></div> <div class="container" ng-show="deleting"><div class="quay-spinner"></div></div>
<div class="resource-view" resource="repository" error-message="'No repository found'"></div> <div class="resource-view" resource="repository" error-message="'No repository found'"></div>
<div class="container repo repo-admin" ng-show="accessDenied">
You do not have permission to view this page
</div>
<div class="container repo repo-admin" ng-show="repo && !deleting"> <div class="container repo repo-admin" ng-show="repo && !deleting">
<div class="header row"> <div class="header row">
<a href="{{ '/repository/' + repo.namespace + '/' + repo.name }}" class="back"><i class="fa fa-chevron-left"></i></a> <a href="{{ '/repository/' + repo.namespace + '/' + repo.name }}" class="back"><i class="fa fa-chevron-left"></i></a>

View file

@ -0,0 +1,83 @@
<div class="resource-view" resource="repository" error-message="'No repository found'"></div>
<div class="container repo repo-build" ng-show="accessDenied">
You do not have permission to view this page
</div>
<div class="container repo repo-build" ng-show="repo">
<div class="header row">
<a href="{{ '/repository/' + repo.namespace + '/' + repo.name }}" class="back"><i class="fa fa-chevron-left"></i></a>
<h3>
<span class="repo-circle no-background" repo="repo"></span>
<span class="repo-breadcrumb" repo="repo"></span>
</h3>
</div>
<div class="row" ng-show="!builds.length">
There are no builds for this repository
</div>
<div class="row" ng-show="builds.length">
<!-- Side tabs -->
<div class="col-sm-2">
<ul class="nav nav-pills nav-stacked">
<li ng-class="currentBuild == build ? 'active' : ''" ng-repeat="build in builds">
<a class="build-tab-link" href="javascript:void(0)" ng-click="setCurrentBuild(build.id, true)">
<span class="phase-icon" ng-class="build.phase"></span>
<span>{{ build.display_name }}</span>
</a>
</li>
</ul>
</div>
<!-- Content -->
<div class="col-sm-10">
<div class="tab-content" onresize="adjustLogHeight()">
<div ng-repeat="build in builds" class="tab-pane build-pane" ng-class="currentBuild == build ? 'active' : ''">
<div class="build-header">
<div class="timing">
<i class="fa fa-clock-o"></i>
Started: <span am-time-ago="build.started || 0"></span>
</div>
<span class="phase-icon" ng-class="build.phase"></span>
<span class="build-message" phase="build.phase"></span>
<div class="build-progress" build="build"></div>
</div>
<div class="build-logs">
<div ng-show="!logEntries">
<span class="quay-spinner"></span>
</div>
<div class="log-container" ng-class="container.type" ng-repeat="container in logEntries">
<div class="container-header" ng-class="container.type == 'phase' ? container.message : ''"
ng-switch on="container.type" ng-click="toggleLogs(container)">
<i class="fa chevron"
ng-class="container.logs ? 'fa-chevron-down' : 'fa-chevron-right'" ng-show="hasLogs(container)"></i>
<div ng-switch-when="phase">
<span class="container-content build-log-phase" phase="container"></span>
</div>
<div ng-switch-when="error">
<span class="container-content build-log-error" error="container"></span>
</div>
<div ng-switch-when="command">
<span class="container-content build-log-command" command="container"></span>
</div>
</div>
<!-- Display the entries for the container -->
<div class="container-logs" ng-show="container.logs">
<div class="log-entry" bindonce ng-repeat="entry in container.logs">
<span class="id" bo-text="$index + container.index + 1"></span>
<span class="message" bo-text="entry.message"></span>
</div>
</div>
</div>
</div>
<div>
<span class="quay-spinner" ng-show="polling"></span>
<span class="build-id">{{ build.id }}</span>
</div>
</div>
</div>
</div>
</div>
</div>

View file

@ -38,13 +38,18 @@
<!-- Status boxes --> <!-- Status boxes -->
<div class="status-boxes"> <div class="status-boxes">
<div id="buildInfoBox" class="status-box" ng-show="repo.is_building" <div id="buildInfoBox" class="status-box" ng-show="repo.is_building">
bs-popover="'static/partials/build-status-item.html'" data-placement="bottom"> <div class="dropdown" data-placement="top">
<span class="title"> <span class="count" ng-class="buildsInfo ? 'visible' : ''"><span>{{ buildsInfo ? buildsInfo.length : '-' }}</span></span>
<span class="quay-spinner"></span> <a href="javascript:void(0)" class="dropdown-toggle" data-toggle="dropdown">Building Dockerfile<span ng-show="buildsInfo.length > 1">s</span> <b class="caret"></b></a>
<b>Building Images</b> <ul class="dropdown-menu pull-right">
</span> <li ng-repeat="buildInfo in buildsInfo">
<span class="count" ng-class="buildsInfo ? 'visible' : ''"><span>{{ buildsInfo ? buildsInfo.length : '-' }}</span></span> <div class="build-info" ng-class="repo.can_write ? 'clickable' : ''" ng-click="showBuild(buildInfo)">
<span class="build-status" build="buildInfo"></span>
</div>
</li>
</ul>
</div>
</div> </div>
</div> </div>

View file

@ -51,6 +51,7 @@
<script src="static/lib/angulartics-mixpanel.js"></script> <script src="static/lib/angulartics-mixpanel.js"></script>
<script src="static/lib/angulartics-google-analytics.js"></script> <script src="static/lib/angulartics-google-analytics.js"></script>
<script src="static/lib/angular-md5.js"></script> <script src="static/lib/angular-md5.js"></script>
<script src="static/lib/bindonce.min.js"></script>
<script src="static/lib/angular-moment.min.js"></script> <script src="static/lib/angular-moment.min.js"></script>
<script src="static/lib/angular-cookies.min.js"></script> <script src="static/lib/angular-cookies.min.js"></script>

Binary file not shown.

189
test/testlogs.py Normal file
View file

@ -0,0 +1,189 @@
import logging
from random import SystemRandom
from loremipsum import get_sentence
from functools import wraps
from copy import deepcopy
from data.buildlogs import BuildLogs
logger = logging.getLogger(__name__)
random = SystemRandom()
def maybe_advance_script(is_get_status=False):
def inner_advance(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
advance_units = random.randint(1, 500)
logger.debug('Advancing script %s units', advance_units)
while advance_units > 0 and self.remaining_script:
units = self.remaining_script[0][0]
if advance_units > units:
advance_units -= units
self.advance_script(is_get_status)
else:
break
return func(self, *args, **kwargs)
return wrapper
return inner_advance
class TestBuildLogs(BuildLogs):
COMMAND_TYPES = ['FROM', 'MAINTAINER', 'RUN', 'CMD', 'EXPOSE', 'ENV', 'ADD',
'ENTRYPOINT', 'VOLUME', 'USER', 'WORKDIR']
STATUS_TEMPLATE = {
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'image_completion': {},
}
def __init__(self, redis_host, namespace, repository, test_build_id):
super(TestBuildLogs, self).__init__(redis_host)
self.namespace = namespace
self.repository = repository
self.test_build_id = test_build_id
self.remaining_script = self._generate_script()
logger.debug('Total script size: %s', len(self.remaining_script))
self._logs = []
self._status = {}
self._last_status = {}
def advance_script(self, is_get_status):
(_, log, status_wrapper) = self.remaining_script.pop(0)
if log is not None:
self._logs.append(log)
if status_wrapper is not None:
(phase, status) = status_wrapper
from data import model
build_obj = model.get_repository_build(self.namespace, self.repository,
self.test_build_id)
build_obj.phase = phase
build_obj.save()
self._status = status
if not is_get_status:
self._last_status = status
def _generate_script(self):
script = []
# generate the init phase
script.append(self._generate_phase(400, 'initializing'))
script.extend(self._generate_logs(random.randint(1, 3)))
# move to the building phase
script.append(self._generate_phase(400, 'building'))
total_commands = random.randint(5, 20)
for command_num in range(1, total_commands + 1):
command_weight = random.randint(50, 100)
script.append(self._generate_command(command_num, total_commands,
command_weight))
# we want 0 logs some percent of the time
num_logs = max(0, random.randint(-50, 400))
script.extend(self._generate_logs(num_logs))
# move to the pushing phase
script.append(self._generate_phase(400, 'pushing'))
script.extend(self._generate_push_statuses(total_commands))
# move to the error or complete phase
if random.randint(0, 1) == 0:
script.append(self._generate_phase(400, 'complete'))
else:
script.append(self._generate_phase(400, 'error'))
script.append((1, {'message': 'Something bad happened! Oh noes!',
'type': self.ERROR}, None))
return script
def _generate_phase(self, start_weight, phase_name):
return (start_weight, {'message': phase_name, 'type': self.PHASE},
(phase_name, deepcopy(self.STATUS_TEMPLATE)))
def _generate_command(self, command_num, total_commands, command_weight):
sentence = get_sentence()
command = random.choice(self.COMMAND_TYPES)
if command == 'FROM':
sentence = random.choice(['ubuntu', 'lopter/raring-base',
'quay.io/devtable/simple',
'quay.io/buynlarge/orgrepo',
'stackbrew/ubuntu:precise'])
msg = {
'message': 'Step %s: %s %s' % (command_num, command, sentence),
'type': self.COMMAND,
}
status = deepcopy(self.STATUS_TEMPLATE)
status['total_commands'] = total_commands
status['current_command'] = command_num
return (command_weight, msg, ('building', status))
@staticmethod
def _generate_logs(count):
return [(1, {'message': get_sentence()}, None) for _ in range(count)]
@staticmethod
def _compute_total_completion(statuses, total_images):
percentage_with_sizes = float(len(statuses.values()))/total_images
sent_bytes = sum([status[u'current'] for status in statuses.values()])
total_bytes = sum([status[u'total'] for status in statuses.values()])
return float(sent_bytes)/total_bytes*percentage_with_sizes
@staticmethod
def _generate_push_statuses(total_commands):
push_status_template = deepcopy(TestBuildLogs.STATUS_TEMPLATE)
push_status_template['current_command'] = total_commands
push_status_template['total_commands'] = total_commands
push_statuses = []
one_mb = 1 * 1024 * 1024
num_images = random.randint(2, 7)
sizes = [random.randint(one_mb, one_mb * 5) for _ in range(num_images)]
image_completion = {}
for image_num, image_size in enumerate(sizes):
image_id = 'image_id_%s' % image_num
image_completion[image_id] = {
'current': 0,
'total': image_size,
}
for i in range(one_mb, image_size, one_mb):
image_completion[image_id]['current'] = i
new_status = deepcopy(push_status_template)
new_status['image_completion'] = deepcopy(image_completion)
completion = TestBuildLogs._compute_total_completion(image_completion,
num_images)
new_status['push_completion'] = completion
push_statuses.append((250, None, ('pushing', new_status)))
return push_statuses
@maybe_advance_script()
def get_log_entries(self, build_id, start_index):
if build_id == self.test_build_id:
return (len(self._logs), self._logs[start_index:])
else:
return super(TestBuildLogs, self).get_log_entries(build_id, start_index)
@maybe_advance_script(True)
def get_status(self, build_id):
if build_id == self.test_build_id:
returnable_status = self._last_status
self._last_status = self._status
return returnable_status
else:
return super(TestBuildLogs, self).get_status(build_id)

39
workers/README.md Normal file
View file

@ -0,0 +1,39 @@
to prepare a new build node host:
```
sudo apt-get update
sudo apt-get install -y git python-virtualenv python-dev phantomjs libjpeg8 libjpeg62-dev libfreetype6 libfreetype6-dev libevent-dev gdebi-core
```
check out the code, install the kernel, custom docker, nsexec, and reboot:
```
git clone https://bitbucket.org/yackob03/quay.git
cd quay
sudo gdebi --n binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb
sudo gdebi --n binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb
sudo gdebi --n binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb
sudo gdebi --n binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb
sudo gdebi --n binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb
sudo gdebi --n binary_dependencies/builder/lxc-docker-0.8.0-tutum_0.8.0-tutum-20140212002736-afad5c0-dirty_amd64.deb
sudo chown -R 100000:100000 /var/lib/docker
sudo shutdown -r now
```
pull some base images if you want (optional)
```
sudo docker pull ubuntu
sudo docker pull stackbrew/ubuntu
sudo docker pull busybox
sudo docker pull lopter/raring-base
```
start the worker
```
cd quay
virtualenv --distribute venv
source venv/bin/activate
pip install -r requirements.txt
sudo STACK=prod venv/bin/python -m workers.dockerfilebuild -D
```

View file

@ -1,20 +1,20 @@
import logging import logging
import json
import daemon import daemon
import time
import argparse import argparse
import digitalocean
import requests
import os import os
import requests
import re
import json
import shutil
from apscheduler.scheduler import Scheduler from docker import Client, APIError
from multiprocessing.pool import ThreadPool from tempfile import TemporaryFile, mkdtemp
from base64 import b64encode from zipfile import ZipFile
from requests.exceptions import ConnectionError from functools import partial
from data.queue import dockerfile_build_queue from data.queue import dockerfile_build_queue
from data import model from data import model
from data.database import db as db_connection from workers.worker import Worker
from app import app from app import app
@ -26,234 +26,300 @@ formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
BUILD_SERVER_CMD = ('docker run -d -p 5002:5002 ' + user_files = app.config['USERFILES']
'-lxc-conf="lxc.aa_profile=unconfined" ' + build_logs = app.config['BUILDLOGS']
'-privileged -e \'RESOURCE_URL=%s\' -e \'TAG=%s\' ' +
'-e \'TOKEN=%s\' quay.io/quay/buildserver')
def retry_command(to_call, args=[], kwargs={}, retries=5, period=5): class StatusWrapper(object):
try: def __init__(self, build_uuid):
return to_call(*args, **kwargs) self._uuid = build_uuid
except Exception as ex: self._status = {
if retries: 'total_commands': None,
logger.debug('Retrying command after %ss' % period) 'current_command': None,
time.sleep(period) 'push_completion': 0.0,
return retry_command(to_call, args, kwargs, retries-1, period) 'image_completion': {},
raise ex
def get_status(url):
return retry_command(requests.get, [url]).json()['status']
def babysit_builder(request):
""" Spin up a build node and ask it to build our job. Retryable errors
should return False, while fatal errors should return True.
"""
try:
logger.debug('Starting work item: %s' % request)
repository_build = model.get_repository_build(request['build_id'])
logger.debug('Request details: %s' % repository_build)
# Initialize digital ocean API
do_client_id = app.config['DO_CLIENT_ID']
do_api_key = app.config['DO_CLIENT_SECRET']
manager = digitalocean.Manager(client_id=do_client_id, api_key=do_api_key)
# check if there is already a DO node for this build, if so clean it up
old_id = repository_build.build_node_id
if old_id:
logger.debug('Cleaning up old DO node: %s' % old_id)
old_droplet = digitalocean.Droplet(id=old_id, client_id=do_client_id,
api_key=do_api_key)
retry_command(old_droplet.destroy)
# Pick the region for the new droplet
allowed_regions = app.config['DO_ALLOWED_REGIONS']
regions = retry_command(manager.get_all_regions)
available_regions = {region.id for region in regions}
regions = available_regions.intersection(allowed_regions)
if not regions:
logger.error('No droplets in our allowed regtions, available: %s' %
available_regions)
return False
# start the DO node
name = 'dockerfile-build-%s' % repository_build.id
logger.debug('Starting DO node: %s' % name)
droplet = digitalocean.Droplet(client_id=do_client_id,
api_key=do_api_key,
name=name,
region_id=regions.pop(),
image_id=app.config['DO_DOCKER_IMAGE'],
size_id=66, # 512MB,
backup_active=False)
retry_command(droplet.create, [],
{'ssh_key_ids': [app.config['DO_SSH_KEY_ID']]})
repository_build.build_node_id = droplet.id
repository_build.phase = 'starting'
repository_build.save()
logger.debug('Waiting for DO node to be available.')
startup = retry_command(droplet.get_events)[0]
while not startup.percentage or int(startup.percentage) != 100:
logger.debug('Droplet startup percentage: %s' % startup.percentage)
time.sleep(5)
retry_command(startup.load)
retry_command(droplet.load)
logger.debug('Droplet started at ip address: %s' % droplet.ip_address)
# connect to it with ssh
repository_build.phase = 'initializing'
repository_build.save()
# We wait until here to import paramiko because otherwise it doesn't work
# under the daemon context.
import paramiko
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
logger.debug('Connecting to droplet through ssh at ip: %s' %
droplet.ip_address)
retry_command(ssh_client.connect, [droplet.ip_address, 22, 'root'],
{'look_for_keys': False, 'timeout': 10.0,
'key_filename': app.config['DO_SSH_PRIVATE_KEY_FILENAME']})
# Load the node with the pull token
token = app.config['BUILD_NODE_PULL_TOKEN']
basicauth = b64encode('%s:%s' % ('$token', token))
auth_object = {
'https://quay.io/v1/': {
'auth': basicauth,
'email': '',
},
} }
create_auth_cmd = 'echo \'%s\' > .dockercfg' % json.dumps(auth_object) self.__exit__(None, None, None)
ssh_client.exec_command(create_auth_cmd)
# Pull and run the buildserver def __enter__(self):
pull_cmd = 'docker pull quay.io/quay/buildserver' return self._status
_, stdout, _ = ssh_client.exec_command(pull_cmd)
pull_status = stdout.channel.recv_exit_status()
if pull_status != 0: def __exit__(self, exc_type, value, traceback):
logger.error('Pull command failed for host: %s' % droplet.ip_address) build_logs.set_status(self._uuid, self._status)
return False
else:
logger.debug('Pull status was: %s' % pull_status)
# Remove the credentials we used to pull so crafty users cant steal them
remove_auth_cmd = 'rm .dockercfg'
ssh_client.exec_command(remove_auth_cmd)
# Prepare the signed resource url the build node can fetch the job from class DockerfileBuildContext(object):
user_files = app.config['USERFILES'] def __init__(self, build_context_dir, tag_name, push_token, build_uuid):
self._build_dir = build_context_dir
self._tag_name = tag_name
self._push_token = push_token
self._cl = Client(timeout=1200, version='1.7')
self._status = StatusWrapper(build_uuid)
self._build_logger = partial(build_logs.append_log_message, build_uuid)
dockerfile_path = os.path.join(self._build_dir, "Dockerfile")
self._num_steps = DockerfileBuildContext.__count_steps(dockerfile_path)
logger.debug('Will build and push to tag named: %s' % self._tag_name)
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
self.__cleanup()
shutil.rmtree(self._build_dir)
@staticmethod
def __count_steps(dockerfile_path):
with open(dockerfile_path, 'r') as dockerfileobj:
steps = 0
for line in dockerfileobj.readlines():
stripped = line.strip()
if stripped and stripped[0] is not '#':
steps += 1
return steps
@staticmethod
def __total_completion(statuses, total_images):
percentage_with_sizes = float(len(statuses.values()))/total_images
sent_bytes = sum([status[u'current'] for status in statuses.values()])
total_bytes = sum([status[u'total'] for status in statuses.values()])
return float(sent_bytes)/total_bytes*percentage_with_sizes
def build(self):
logger.debug('Starting build.')
with self._status as status:
status['total_commands'] = self._num_steps
logger.debug('Building to tag named: %s' % self._tag_name)
build_status = self._cl.build(path=self._build_dir, tag=self._tag_name,
stream=True)
current_step = 0
built_image = None
for status in build_status:
status_str = str(status.encode('utf-8'))
logger.debug('Status: %s', status_str)
step_increment = re.search(r'Step ([0-9]+) :', status)
if step_increment:
self._build_logger(status_str, build_logs.COMMAND)
current_step = int(step_increment.group(1))
logger.debug('Step now: %s/%s' % (current_step, self._num_steps))
with self._status as status:
status['current_command'] = current_step
continue
else:
self._build_logger(status_str)
complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
if complete:
built_image = complete.group(1)
logger.debug('Final image ID is: %s' % built_image)
continue
# Get the image count
if not built_image:
return
return built_image
def push(self, built_image):
# Login to the registry
host = re.match(r'([a-z0-9.:]+)/.+/.+$', self._tag_name)
if not host:
raise RuntimeError('Invalid tag name: %s' % self._tag_name)
for protocol in ['https', 'http']:
registry_endpoint = '%s://%s/v1/' % (protocol, host.group(1))
logger.debug('Attempting login to registry: %s' % registry_endpoint)
try:
self._cl.login('$token', self._push_token, registry=registry_endpoint)
break
except APIError:
pass # Probably the wrong protocol
history = json.loads(self._cl.history(built_image))
num_images = len(history)
with self._status as status:
status['total_images'] = num_images
logger.debug('Pushing to tag name: %s' % self._tag_name)
resp = self._cl.push(self._tag_name, stream=True)
for status_str in resp:
status = json.loads(status_str)
logger.debug('Status: %s', status_str)
if u'status' in status:
status_msg = status[u'status']
if status_msg == 'Pushing':
if u'progressDetail' in status and u'id' in status:
image_id = status[u'id']
detail = status[u'progressDetail']
if u'current' in detail and 'total' in detail:
with self._status as status:
images = status['image_completion']
images[image_id] = detail
status['push_completion'] = \
DockerfileBuildContext.__total_completion(images, num_images)
elif u'errorDetail' in status:
message = 'Error pushing image.'
if u'message' in status[u'errorDetail']:
message = str(status[u'errorDetail'][u'message'])
raise RuntimeError(message)
def __cleanup(self):
# First clean up any containers that might be holding the images
for running in self._cl.containers(quiet=True):
logger.debug('Killing container: %s' % running['Id'])
self._cl.kill(running['Id'])
# Next, remove all of the containers (which should all now be killed)
for container in self._cl.containers(all=True, quiet=True):
logger.debug('Removing container: %s' % container['Id'])
self._cl.remove_container(container['Id'])
# Iterate all of the images and remove the ones that the public registry
# doesn't know about, this should preserve base images.
images_to_remove = set()
repos = set()
for image in self._cl.images():
images_to_remove.add(image['Id'])
for tag in image['RepoTags']:
tag_repo = tag.split(':')[0]
if tag_repo != '<none>':
repos.add(tag_repo)
for repo in repos:
repo_url = 'https://index.docker.io/v1/repositories/%s/images' % repo
repo_info = requests.get(repo_url)
if repo_info.status_code / 100 == 2:
for repo_image in repo_info.json():
if repo_image['id'] in images_to_remove:
logger.debug('Image was deemed public: %s' % repo_image['id'])
images_to_remove.remove(repo_image['id'])
for to_remove in images_to_remove:
logger.debug('Removing private image: %s' % to_remove)
try:
self._cl.remove_image(to_remove)
except APIError:
# Sometimes an upstream image removed this one
pass
# Verify that our images were actually removed
for image in self._cl.images():
if image['Id'] in images_to_remove:
raise RuntimeError('Image was not removed: %s' % image['Id'])
class DockerfileBuildWorker(Worker):
def __init__(self, *vargs, **kwargs):
super(DockerfileBuildWorker, self).__init__(*vargs, **kwargs)
self._mime_processors = {
'application/zip': DockerfileBuildWorker.__prepare_zip,
'text/plain': DockerfileBuildWorker.__prepare_dockerfile,
'application/octet-stream': DockerfileBuildWorker.__prepare_dockerfile,
}
@staticmethod
def __prepare_zip(request_file):
build_dir = mkdtemp(prefix='docker-build-')
# Save the zip file to temp somewhere
with TemporaryFile() as zip_file:
zip_file.write(request_file.content)
to_extract = ZipFile(zip_file)
to_extract.extractall(build_dir)
return build_dir
@staticmethod
def __prepare_dockerfile(request_file):
build_dir = mkdtemp(prefix='docker-build-')
dockerfile_path = os.path.join(build_dir, "Dockerfile")
with open(dockerfile_path, 'w') as dockerfile:
dockerfile.write(request_file.content)
return build_dir
def process_queue_item(self, job_details):
repository_build = model.get_repository_build(job_details['namespace'],
job_details['repository'],
job_details['build_uuid'])
resource_url = user_files.get_file_url(repository_build.resource_key) resource_url = user_files.get_file_url(repository_build.resource_key)
tag_name = repository_build.tag
access_token = repository_build.access_token.code
# Start the build server log_appender = partial(build_logs.append_log_message,
start_cmd = BUILD_SERVER_CMD % (resource_url, repository_build.tag, repository_build.uuid)
repository_build.access_token.code)
logger.debug('Sending build server request with command: %s' % start_cmd)
ssh_client.exec_command(start_cmd)
status_endpoint = 'http://%s:5002/build/' % droplet.ip_address log_appender('initializing', build_logs.PHASE)
# wait for the server to be ready
logger.debug('Waiting for buildserver to be ready')
retry_command(requests.get, [status_endpoint])
# wait for the job to be complete start_msg = ('Starting job with resource url: %s tag: %s' % (resource_url,
tag_name))
logger.debug(start_msg)
log_appender(start_msg)
docker_resource = requests.get(resource_url)
c_type = docker_resource.headers['content-type']
filetype_msg = ('Request to build file of type: %s with tag: %s' %
(c_type, tag_name))
logger.info(filetype_msg)
log_appender(filetype_msg)
if c_type not in self._mime_processors:
raise RuntimeError('Invalid dockerfile content type: %s' % c_type)
build_dir = self._mime_processors[c_type](docker_resource)
log_appender('building', build_logs.PHASE)
repository_build.phase = 'building' repository_build.phase = 'building'
repository_build.status_url = status_endpoint
repository_build.save() repository_build.save()
logger.debug('Waiting for job to be complete') with DockerfileBuildContext(build_dir, tag_name, access_token,
status = get_status(status_endpoint) repository_build.uuid) as build_ctxt:
while status != 'error' and status != 'complete': try:
logger.debug('Job status is: %s' % status) built_image = build_ctxt.build()
time.sleep(5)
status = get_status(status_endpoint)
logger.debug('Job complete with status: %s' % status) if not built_image:
if status == 'error': log_appender('error', build_logs.PHASE)
error_message = requests.get(status_endpoint).json()['message'] repository_build.phase = 'error'
logger.warning('Job error: %s' % error_message) repository_build.save()
repository_build.phase = 'error' log_appender('Unable to build dockerfile.', build_logs.ERROR)
else: return False
repository_build.phase = 'complete'
# clean up the DO node log_appender('pushing', build_logs.PHASE)
logger.debug('Cleaning up DO node.') repository_build.phase = 'pushing'
retry_command(droplet.destroy) repository_build.save()
repository_build.status_url = None build_ctxt.push(built_image)
repository_build.build_node_id = None
repository_build.save() log_appender('complete', build_logs.PHASE)
repository_build.phase = 'complete'
repository_build.save()
except Exception as exc:
log_appender('error', build_logs.PHASE)
logger.exception('Exception when processing request.')
repository_build.phase = 'error'
repository_build.save()
log_appender(str(exc), build_logs.ERROR)
return False
return True return True
except Exception as outer_ex:
# We don't really know what these are, but they are probably retryable
logger.exception('Exception processing job: %s' % outer_ex.message)
return False
finally:
if not db_connection.is_closed():
logger.debug('Closing thread db connection.')
db_connection.close()
def process_work_items(pool):
logger.debug('Getting work item from queue.')
item = dockerfile_build_queue.get(processing_time=60*60) # allow 1 hr
while item:
logger.debug('Queue gave us some work: %s' % item.body)
request = json.loads(item.body)
def build_callback(item):
local_item = item
def complete_callback(completed):
if completed:
logger.debug('Queue item completed successfully, will be removed.')
dockerfile_build_queue.complete(local_item)
else:
# We have a retryable error, add the job back to the queue
logger.debug('Queue item incomplete, will be retryed.')
dockerfile_build_queue.incomplete(local_item)
return complete_callback
logger.debug('Sending work item to thread pool: %s' % pool)
pool.apply_async(babysit_builder, [request],
callback=build_callback(item))
item = dockerfile_build_queue.get()
logger.debug('No more work.')
if not db_connection.is_closed():
logger.debug('Closing thread db connection.')
db_connection.close()
def start_worker():
pool = ThreadPool(3)
logger.debug('Scheduling worker.')
sched = Scheduler()
sched.start()
sched.add_interval_job(process_work_items, args=[pool], seconds=30)
while True:
time.sleep(60 * 60 * 24) # sleep one day, basically forever
desc = 'Worker daemon to monitor dockerfile build' desc = 'Worker daemon to monitor dockerfile build'
parser = argparse.ArgumentParser(description=desc) parser = argparse.ArgumentParser(description=desc)
@ -264,16 +330,17 @@ parser.add_argument('--log', default='dockerfilebuild.log',
args = parser.parse_args() args = parser.parse_args()
worker = DockerfileBuildWorker(dockerfile_build_queue)
if args.D: if args.D:
handler = logging.FileHandler(args.log) handler = logging.FileHandler(args.log)
handler.setFormatter(formatter) handler.setFormatter(formatter)
root_logger.addHandler(handler) root_logger.addHandler(handler)
with daemon.DaemonContext(files_preserve=[handler.stream], with daemon.DaemonContext(files_preserve=[handler.stream]):
working_directory=os.getcwd()): worker.start()
start_worker()
else: else:
handler = logging.StreamHandler() handler = logging.StreamHandler()
handler.setFormatter(formatter) handler.setFormatter(formatter)
root_logger.addHandler(handler) root_logger.addHandler(handler)
start_worker() worker.start()