diff --git a/application.py b/application.py index 0923f5d40..2d6660866 100644 --- a/application.py +++ b/application.py @@ -1,5 +1,4 @@ import logging -import os from app import app as application from data.model import db as model_db @@ -21,10 +20,6 @@ from endpoints.realtime import realtime logger = logging.getLogger(__name__) -if application.config.get('INCLUDE_TEST_ENDPOINTS', False): - logger.debug('Loading test endpoints.') - import endpoints.test - application.register_blueprint(web) application.register_blueprint(index, url_prefix='/v1') application.register_blueprint(tags, url_prefix='/v1') diff --git a/binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb b/binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb new file mode 100644 index 000000000..b69f98a44 Binary files /dev/null and b/binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb differ diff --git a/binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb b/binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb new file mode 100644 index 000000000..c9e530479 Binary files /dev/null and b/binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb differ diff --git a/binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb b/binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb new file mode 100644 index 000000000..cf0dce064 Binary files /dev/null and b/binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb differ diff --git a/binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb b/binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb new file mode 100644 index 000000000..bdee3f6af Binary files /dev/null and b/binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb differ diff --git a/binary_dependencies/builder/lxc-docker-0.8.0-tutum_0.8.0-tutum-20140212002736-afad5c0-dirty_amd64.deb b/binary_dependencies/builder/lxc-docker-0.8.0-tutum_0.8.0-tutum-20140212002736-afad5c0-dirty_amd64.deb new file mode 100644 index 000000000..e8db59fd5 Binary files /dev/null and b/binary_dependencies/builder/lxc-docker-0.8.0-tutum_0.8.0-tutum-20140212002736-afad5c0-dirty_amd64.deb differ diff --git a/binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb b/binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb new file mode 100644 index 000000000..e78b16986 Binary files /dev/null and b/binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb differ diff --git a/config.py b/config.py index 8c1844821..74be8b664 100644 --- a/config.py +++ b/config.py @@ -1,5 +1,4 @@ import logging -import os import logstash_formatter from peewee import MySQLDatabase, SqliteDatabase @@ -12,6 +11,7 @@ from util import analytics from test.teststorage import FakeStorage, FakeUserfiles from test import analytics as fake_analytics +from test.testlogs import TestBuildLogs class FlaskConfig(object): @@ -96,6 +96,11 @@ class UserEventConfig(object): USER_EVENTS = UserEventBuilder('logs.quay.io') +class TestBuildLogs(object): + BUILDLOGS = TestBuildLogs('logs.quay.io', 'devtable', 'building', + 'deadbeef-dead-beef-dead-beefdeadbeef') + + class StripeTestConfig(object): STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y' STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh' @@ -145,13 +150,13 @@ class BuildNodeConfig(object): BUILD_NODE_PULL_TOKEN = 'F02O2E86CQLKZUQ0O81J8XDHQ6F0N1V36L9JTOEEK6GKKMT1GI8PTJQT4OU88Y6G' -def logs_init_builder(level=logging.DEBUG): +def logs_init_builder(level=logging.DEBUG, + formatter=logstash_formatter.LogstashFormatter()): @staticmethod def init_logs(): handler = logging.StreamHandler() root_logger = logging.getLogger('') root_logger.setLevel(level) - formatter = logstash_formatter.LogstashFormatter() handler.setFormatter(formatter) root_logger.addHandler(handler) @@ -164,17 +169,15 @@ class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles, LOGGING_CONFIG = logs_init_builder(logging.WARN) POPULATE_DB_TEST_DATA = True TESTING = True - INCLUDE_TEST_ENDPOINTS = True class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB, StripeTestConfig, MixpanelTestConfig, GitHubTestConfig, DigitalOceanConfig, BuildNodeConfig, S3Userfiles, - RedisBuildLogs, UserEventConfig): - LOGGING_CONFIG = logs_init_builder() + RedisBuildLogs, UserEventConfig, TestBuildLogs): + LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter()) SEND_FILE_MAX_AGE_DEFAULT = 0 POPULATE_DB_TEST_DATA = True - INCLUDE_TEST_ENDPOINTS = True class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL, diff --git a/data/buildlogs.py b/data/buildlogs.py index ff09934f7..bb96ac7dc 100644 --- a/data/buildlogs.py +++ b/data/buildlogs.py @@ -3,6 +3,10 @@ import json class BuildLogs(object): + ERROR = 'error' + COMMAND = 'command' + PHASE = 'phase' + def __init__(self, redis_host): self._redis = redis.StrictRedis(host=redis_host) @@ -17,24 +21,27 @@ class BuildLogs(object): """ return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - def append_log_message(self, build_id, log_message): + def append_log_message(self, build_id, log_message, log_type=None): """ Wraps the message in an envelope and push it to the end of the log entry - list and returns the new length of the list. + list and returns the index at which it was inserted. """ log_obj = { 'message': log_message } - return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - def get_log_entries(self, build_id, start_index, end_index): + if log_type: + log_obj['type'] = log_type + + return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj)) - 1 + + def get_log_entries(self, build_id, start_index): """ Returns a tuple of the current length of the list and an iterable of the - requested log entries. End index is inclusive. + requested log entries. """ llen = self._redis.llen(self._logs_key(build_id)) - log_entries = self._redis.lrange(self._logs_key(build_id), start_index, - end_index) + log_entries = self._redis.lrange(self._logs_key(build_id), start_index, -1) return (llen, (json.loads(entry) for entry in log_entries)) @staticmethod diff --git a/data/database.py b/data/database.py index 3c5fcf422..86a797bed 100644 --- a/data/database.py +++ b/data/database.py @@ -215,6 +215,8 @@ class RepositoryBuild(BaseModel): resource_key = CharField() tag = CharField() phase = CharField(default='waiting') + started = DateTimeField(default=datetime.now) + display_name = CharField() class QueueItem(BaseModel): diff --git a/data/model.py b/data/model.py index a27036d3a..f6dd75965 100644 --- a/data/model.py +++ b/data/model.py @@ -1309,9 +1309,11 @@ def list_repository_builds(namespace_name, repository_name, return fetched -def create_repository_build(repo, access_token, resource_key, tag): +def create_repository_build(repo, access_token, resource_key, tag, + display_name): return RepositoryBuild.create(repository=repo, access_token=access_token, - resource_key=resource_key, tag=tag) + resource_key=resource_key, tag=tag, + display_name=display_name) def create_webhook(repo, params_obj): diff --git a/data/queue.py b/data/queue.py index ef0026e52..cf0acd898 100644 --- a/data/queue.py +++ b/data/queue.py @@ -64,5 +64,5 @@ class WorkQueue(object): image_diff_queue = WorkQueue('imagediff') -dockerfile_build_queue = WorkQueue('dockerfilebuild') +dockerfile_build_queue = WorkQueue('dockerfilebuild2') webhook_queue = WorkQueue('webhook') diff --git a/data/userfiles.py b/data/userfiles.py index 86ddb62a2..c2a8bc63c 100644 --- a/data/userfiles.py +++ b/data/userfiles.py @@ -59,3 +59,9 @@ class UserRequestFiles(object): full_key = os.path.join(self._prefix, file_id) k = Key(self._bucket, full_key) return k.generate_url(expires_in) + + def get_file_checksum(self, file_id): + self._initialize_s3() + full_key = os.path.join(self._prefix, file_id) + k = self._bucket.lookup(full_key) + return k.etag[1:-1][:7] diff --git a/endpoints/api.py b/endpoints/api.py index 7022a97dd..92f0f5472 100644 --- a/endpoints/api.py +++ b/endpoints/api.py @@ -70,7 +70,7 @@ def get_route_data(): routes = [] for rule in app.url_map.iter_rules(): if rule.endpoint.startswith('api.'): - endpoint_method = globals()[rule.endpoint[4:]] # Remove api. + endpoint_method = app.view_functions[rule.endpoint] is_internal = '__internal_call' in dir(endpoint_method) is_org_api = '__user_call' in dir(endpoint_method) methods = list(rule.methods.difference(['HEAD', 'OPTIONS'])) @@ -1154,6 +1154,8 @@ def build_status_view(build_obj): return { 'id': build_obj.uuid, 'phase': build_obj.phase, + 'started': build_obj.started, + 'display_name': build_obj.display_name, 'status': status, } @@ -1191,25 +1193,22 @@ def get_repo_build_status(namespace, repository, build_uuid): def get_repo_build_logs(namespace, repository, build_uuid): permission = ModifyRepositoryPermission(namespace, repository) if permission.can(): + response_obj = {} + build = model.get_repository_build(namespace, repository, build_uuid) - start = int(request.args.get('start', -1000)) - end = int(request.args.get('end', -1)) - count, logs = build_logs.get_log_entries(build.uuid, start, end) + start = int(request.args.get('start', 0)) - if start < 0: - start = max(0, count + start) + count, logs = build_logs.get_log_entries(build.uuid, start) - if end < 0: - end = count + end - - return jsonify({ + response_obj.update({ 'start': start, - 'end': end, 'total': count, 'logs': [log for log in logs], }) + return jsonify(response_obj) + abort(403) # Permission denied @@ -1224,11 +1223,13 @@ def request_repo_build(namespace, repository): repo = model.get_repository(namespace, repository) token = model.create_access_token(repo, 'write') + display_name = user_files.get_file_checksum(dockerfile_id) + logger.debug('**********Md5: %s' % display_name) host = urlparse.urlparse(request.url).netloc tag = '%s/%s/%s' % (host, repo.namespace, repo.name) build_request = model.create_repository_build(repo, token, dockerfile_id, - tag) + tag, display_name) dockerfile_build_queue.put(json.dumps({ 'build_uuid': build_request.uuid, 'namespace': namespace, diff --git a/endpoints/test.py b/endpoints/test.py deleted file mode 100644 index bbd9e286a..000000000 --- a/endpoints/test.py +++ /dev/null @@ -1,61 +0,0 @@ -import math - -from random import SystemRandom -from flask import jsonify -from app import app - - -def generate_image_completion(rand_func): - images = {} - for image_id in range(rand_func.randint(1, 11)): - total = int(math.pow(abs(rand_func.gauss(0, 1000)), 2)) - current = rand_func.randint(0, total) - image_id = 'image_id_%s' % image_id - images[image_id] = { - 'total': total, - 'current': current, - } - return images - - -@app.route('/test/build/status', methods=['GET']) -def generate_random_build_status(): - response = { - 'id': 1, - 'total_commands': None, - 'current_command': None, - 'push_completion': 0.0, - 'status': None, - 'message': None, - 'image_completion': {}, - } - - random = SystemRandom() - phases = { - 'waiting': {}, - 'starting': { - 'total_commands': 7, - 'current_command': 0, - }, - 'initializing': {}, - 'error': { - 'message': 'Oops!' - }, - 'complete': {}, - 'building': { - 'total_commands': 7, - 'current_command': random.randint(1, 7), - }, - 'pushing': { - 'total_commands': 7, - 'current_command': 7, - 'push_completion': random.random(), - 'image_completion': generate_image_completion(random), - }, - } - - phase = random.choice(phases.keys()) - response['status'] = phase - response.update(phases[phase]) - - return jsonify(response) diff --git a/initdb.py b/initdb.py index 0cdb5f91a..cb29d5246 100644 --- a/initdb.py +++ b/initdb.py @@ -275,6 +275,13 @@ def populate_database(): 'Empty repository which is building.', False, [], (0, [], None)) + token = model.create_access_token(building, 'write') + tag = 'ci.devtable.com:5000/%s/%s' % (building.namespace, building.name) + build = model.create_repository_build(building, token, '123-45-6789', tag, + 'build-name') + build.uuid = 'deadbeef-dead-beef-dead-beefdeadbeef' + build.save() + org = model.create_organization('buynlarge', 'quay@devtable.com', new_user_1) org.stripe_id = TEST_STRIPE_ID @@ -298,19 +305,11 @@ def populate_database(): model.add_user_to_team(new_user_2, reader_team) model.add_user_to_team(reader, reader_team) - token = model.create_access_token(building, 'write') - tag = 'ci.devtable.com:5000/%s/%s' % (building.namespace, building.name) - build = model.create_repository_build(building, token, '123-45-6789', tag) - - build.build_node_id = 1 - build.phase = 'building' - build.status_url = 'http://localhost:5000/test/build/status' - build.save() - __generate_repository(new_user_1, 'superwide', None, False, [], [(10, [], 'latest2'), (2, [], 'latest3'), - (2, [(1, [], 'latest11'), (2, [], 'latest12')], 'latest4'), + (2, [(1, [], 'latest11'), (2, [], 'latest12')], + 'latest4'), (2, [], 'latest5'), (2, [], 'latest6'), (2, [], 'latest7'), diff --git a/requirements-nover.txt b/requirements-nover.txt index d582eae27..5b1ef8841 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -21,4 +21,5 @@ xhtml2pdf logstash_formatter redis hiredis -git+https://github.com/dotcloud/docker-py.git \ No newline at end of file +git+https://github.com/dotcloud/docker-py.git +loremipsum \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index b14c07bd6..8d9670b67 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,6 +23,7 @@ html5lib==1.0b3 itsdangerous==0.23 lockfile==0.9.1 logstash-formatter==0.5.8 +loremipsum==1.0.2 marisa-trie==0.5.1 mixpanel-py==3.1.1 mock==1.0.1 diff --git a/static/css/quay.css b/static/css/quay.css index d8a5898e5..42be7e72d 100644 --- a/static/css/quay.css +++ b/static/css/quay.css @@ -507,35 +507,103 @@ i.toggle-icon:hover { color: #428bca; } -.status-boxes .popover { - margin-right: 20px; +.status-box a { + padding: 6px; + color: black; } -.status-boxes .popover-content { - width: 260px; +.status-box a b { + margin-right: 10px; } -.build-statuses { +.build-info { + margin: 4px; + padding: 4px; + margin-left: 6px; + margin-right: 6px; + border-bottom: 1px solid #eee; +} + +.build-info.clickable:hover { + background: rgba(66, 139, 202, 0.2); + cursor: pointer; + border-radius: 4px; +} + + +.build-info:last-child { + border-bottom: 0px; +} + +.phase-icon { + border-radius: 50%; + display: inline-block; + width: 12px; + height: 12px; + margin-right: 6px; +} + +.active .build-tab-link .phase-icon { + box-shadow: 0px 0px 10px #FFFFFF, 0px 0px 10px #FFFFFF; +} + +.build-status .phase-icon { + margin-top: 4px; + float: left; +} + +.phase-icon.error { + background-color: red; +} + +.phase-icon.waiting, .phase-icon.starting, .phase-icon.initializing { + background-color: #ddd; +} + +.phase-icon.building { + background-color: #f0ad4e; +} + +.phase-icon.pushing { + background-color: #5cb85c; +} + +.phase-icon.complete { + background-color: #428bca; +} + +.build-status { + display: inline-block; } .build-status-container { padding: 4px; margin-bottom: 10px; border-bottom: 1px solid #eee; - width: 230px; + width: 350px; } .build-status-container .build-message { display: block; white-space: nowrap; - font-size: 12px; + font-size: 14px; + margin-bottom: 10px; + padding-bottom: 10px; + border-bottom: 1px solid #eee; + margin-left: 20px; } .build-status-container .progress { - height: 12px; + height: 10px; margin: 0px; margin-top: 10px; - width: 230px; + margin-left: 20px; + width: 310px; +} + +.build-status-container .timing { + margin-left: 20px; + margin-top: 6px; } .build-status-container:last-child { @@ -1633,6 +1701,185 @@ p.editable:hover i { padding-left: 44px; } + +.repo-build .build-id:before { + content: "Build ID: " +} + +.repo-build .build-id { + float: right; + font-size: 12px; + color: #aaa; + padding: 10px; +} + +.repo-build .build-pane .timing { + float: right; +} + +.repo-build .build-tab-link { + white-space: nowrap; +} + +.repo-build .build-pane .build-header { + padding-top: 10px; + border-bottom: 1px solid #eee; + padding-bottom: 10px; +} + +.repo-build .build-pane .build-progress { + margin-top: 16px; + margin-bottom: 10px; +} + +.repo-build .build-pane .build-progress .progress { + height: 14px; + margin-bottom: 0px; +} + +.repo-build .build-pane .quay-spinner { + margin-top: 4px; + display: inline-block; +} + +.repo-build .build-pane .build-logs { + background: #222; + color: white; + padding: 10px; + overflow: auto; +} + +.repo-build .build-pane .build-logs .container-header { + padding: 2px; +} + +.repo-build .build-pane .build-logs .container-logs { + margin: 4px; + padding-bottom: 4px; +} + +.repo-build .build-pane .build-logs .command-title, +.repo-build .build-pane .build-logs .log-entry .message { + font-family: Consolas, "Lucida Console", Monaco, monospace; + font-size: 13px; +} + +.repo-build .build-pane .build-logs .container-header { + cursor: pointer; + position: relative; +} + +.repo-build .build-pane .build-logs .container-header i.fa.chevron { + color: #666; + margin-right: 4px; + width: 14px; + text-align: center; + + position: absolute; + top: 6px; + left: 0px; +} + +.repo-build .build-pane .build-logs .log-container.command { + margin-left: 42px; +} + +.repo-build .build-pane .build-logs .container-header.building { + margin-bottom: 10px; +} + +.repo-build .build-pane .build-logs .container-header.pushing { + margin-top: 10px; +} + +.repo-build .build-log-error-element { + position: relative; + display: inline-block; + margin: 10px; + padding: 10px; + background: rgba(255, 0, 0, 0.17); + border-radius: 10px; + margin-left: 22px; +} + +.repo-build .build-log-error-element i.fa { + color: red; + position: absolute; + top: 13px; + left: 11px; +} + +.repo-build .build-log-error-element .error-message { + display: inline-block; + margin-left: 25px; +} + +.repo-build .build-pane .build-logs .container-header .label { + padding-top: 4px; + text-align: right; + margin-right: 4px; + width: 86px; + display: inline-block; + + border-right: 4px solid #aaa; + background-color: #444; + + position: absolute; + top: 4px; + left: 24px; +} + +.repo-build .build-pane .build-logs .container-header .container-content { + display: block; + padding-left: 20px; +} + +.repo-build .build-pane .build-logs .container-header .container-content.build-log-command { + padding-left: 120px; +} + +.label.FROM { + border-color: #5bc0de !important; +} + +.label.CMD, .label.EXPOSE, .label.ENTRYPOINT { + border-color: #428bca !important; +} + +.label.RUN, .label.ADD { + border-color: #5cb85c !important; +} + +.label.ENV, .label.VOLUME, .label.USER, .label.WORKDIR { + border-color: #f0ad4e !important; +} + +.label.MAINTAINER { + border-color: #aaa !important; +} + +.repo-build .build-pane .build-logs .log-entry { + position: relative; +} + +.repo-build .build-pane .build-logs .log-entry .message { + display: inline-block; + margin-left: 46px; +} + +.repo-build .build-pane .build-logs .log-entry .id { + color: #aaa; + padding-right: 6px; + margin-right: 6px; + text-align: right; + font-size: 12px; + width: 40px; + + position: absolute; + top: 4px; + left: 4px; +} + .repo-admin .right-info { font-size: 11px; margin-top: 10px; @@ -1676,16 +1923,6 @@ p.editable:hover i { cursor: pointer; } -.repo .build-info { - padding: 10px; - margin: 0px; -} - -.repo .build-info .progress { - margin: 0px; - margin-top: 10px; -} - .repo .section { display: block; margin-bottom: 20px; @@ -2935,4 +3172,4 @@ pre.command:before { .about-basic-text { display: inline-block; -} \ No newline at end of file +} diff --git a/static/directives/build-log-command.html b/static/directives/build-log-command.html new file mode 100644 index 000000000..211667ee4 --- /dev/null +++ b/static/directives/build-log-command.html @@ -0,0 +1,6 @@ + + + + + diff --git a/static/directives/build-log-error.html b/static/directives/build-log-error.html new file mode 100644 index 000000000..095f8edd0 --- /dev/null +++ b/static/directives/build-log-error.html @@ -0,0 +1,4 @@ + + + + diff --git a/static/directives/build-log-phase.html b/static/directives/build-log-phase.html new file mode 100644 index 000000000..503593923 --- /dev/null +++ b/static/directives/build-log-phase.html @@ -0,0 +1,4 @@ + + + + diff --git a/static/directives/build-message.html b/static/directives/build-message.html new file mode 100644 index 000000000..17895dd28 --- /dev/null +++ b/static/directives/build-message.html @@ -0,0 +1 @@ +{{ getBuildMessage(phase) }} diff --git a/static/directives/build-progress.html b/static/directives/build-progress.html new file mode 100644 index 000000000..ac719d449 --- /dev/null +++ b/static/directives/build-progress.html @@ -0,0 +1,6 @@ +
+
+
+
+
+
diff --git a/static/directives/build-status.html b/static/directives/build-status.html index 8c27dba53..cf5ded997 100644 --- a/static/directives/build-status.html +++ b/static/directives/build-status.html @@ -1,8 +1,11 @@
- {{ getBuildMessage(build) }} -
-
-
+
+ +
- +
+ + Started: +
+
diff --git a/static/js/app.js b/static/js/app.js index 7cfe13e24..f014660a2 100644 --- a/static/js/app.js +++ b/static/js/app.js @@ -102,9 +102,8 @@ function getMarkedDown(string) { return Markdown.getSanitizingConverter().makeHtml(string || ''); } -// Start the application code itself. -quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angular-tour', 'restangular', 'angularMoment', 'angulartics', /*'angulartics.google.analytics',*/ 'angulartics.mixpanel', '$strap.directives', 'ngCookies', 'ngSanitize', 'angular-md5'], function($provide, cfpLoadingBarProvider) { - cfpLoadingBarProvider.includeSpinner = false; +quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angular-tour', 'restangular', 'angularMoment', 'angulartics', /*'angulartics.google.analytics',*/ 'angulartics.mixpanel', '$strap.directives', 'ngCookies', 'ngSanitize', 'angular-md5', 'pasvaz.bindonce'], function($provide, cfpLoadingBarProvider) { + cfpLoadingBarProvider.includeSpinner = false; $provide.factory('UtilService', ['$sanitize', function($sanitize) { var utilService = {}; @@ -151,7 +150,7 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angu $provide.factory('ApiService', ['Restangular', function(Restangular) { var apiService = {}; - var getResource = function(path) { + var getResource = function(path, opt_background) { var resource = {}; resource.url = path; resource.withOptions = function(options) { @@ -169,6 +168,12 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angu 'hasError': false }; + if (opt_background) { + performer.withHttpConfig({ + 'ignoreLoadingBar': true + }); + } + performer.get(options).then(function(resp) { result.value = processor(resp); result.loading = false; @@ -240,27 +245,33 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angu var buildMethodsForEndpoint = function(endpoint) { var method = endpoint.methods[0].toLowerCase(); var methodName = formatMethodName(endpoint['name']); - apiService[methodName] = function(opt_options, opt_parameters) { - return Restangular.one(buildUrl(endpoint['path'], opt_parameters))['custom' + method.toUpperCase()](opt_options); + apiService[methodName] = function(opt_options, opt_parameters, opt_background) { + var one = Restangular.one(buildUrl(endpoint['path'], opt_parameters)); + if (opt_background) { + one.withHttpConfig({ + 'ignoreLoadingBar': true + }); + } + return one['custom' + method.toUpperCase()](opt_options); }; if (method == 'get') { - apiService[methodName + 'AsResource'] = function(opt_parameters) { - return getResource(buildUrl(endpoint['path'], opt_parameters)); + apiService[methodName + 'AsResource'] = function(opt_parameters, opt_background) { + return getResource(buildUrl(endpoint['path'], opt_parameters), opt_background); }; } if (endpoint['user_method']) { - apiService[getGenericMethodName(endpoint['user_method'])] = function(orgname, opt_options, opt_parameters) { + apiService[getGenericMethodName(endpoint['user_method'])] = function(orgname, opt_options, opt_parameters, opt_background) { if (orgname) { if (orgname.name) { orgname = orgname.name; } - var params = jQuery.extend({'orgname' : orgname}, opt_parameters || {}); + var params = jQuery.extend({'orgname' : orgname}, opt_parameters || {}, opt_background); return apiService[methodName](opt_options, params); } else { - return apiService[formatMethodName(endpoint['user_method'])](opt_options, opt_parameters); + return apiService[formatMethodName(endpoint['user_method'])](opt_options, opt_parameters, opt_background); } }; } @@ -779,6 +790,7 @@ quayApp = angular.module('quay', ['ngRoute', 'chieffancypants.loadingBar', 'angu fixFooter: false}). when('/repository/:namespace/:name/image/:image', {templateUrl: '/static/partials/image-view.html', controller: ImageViewCtrl, reloadOnSearch: false}). when('/repository/:namespace/:name/admin', {templateUrl: '/static/partials/repo-admin.html', controller:RepoAdminCtrl, reloadOnSearch: false}). + when('/repository/:namespace/:name/build', {templateUrl: '/static/partials/repo-build.html', controller:RepoBuildCtrl, reloadOnSearch: false}). when('/repository/', {title: 'Repositories', description: 'Public and private docker repositories list', templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl}). when('/user/', {title: 'Account Settings', description:'Account settings for Quay.io', templateUrl: '/static/partials/user-admin.html', @@ -2471,6 +2483,119 @@ quayApp.directive('namespaceSelector', function () { }); +quayApp.directive('buildLogPhase', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/build-log-phase.html', + replace: false, + transclude: false, + restrict: 'C', + scope: { + 'phase': '=phase' + }, + controller: function($scope, $element) { + } + }; + return directiveDefinitionObject; +}); + + +quayApp.directive('buildLogError', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/build-log-error.html', + replace: false, + transclude: false, + restrict: 'C', + scope: { + 'error': '=error' + }, + controller: function($scope, $element) { + } + }; + return directiveDefinitionObject; +}); + + +quayApp.directive('buildLogCommand', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/build-log-command.html', + replace: false, + transclude: false, + restrict: 'C', + scope: { + 'command': '=command' + }, + controller: function($scope, $element, $sanitize) { + var registryHandlers = { + 'quay.io': function(pieces) { + var rnamespace = pieces[pieces.length - 2]; + var rname = pieces[pieces.length - 1]; + return '/repository/' + rnamespace + '/' + rname + '/'; + }, + + '': function(pieces) { + var rnamespace = pieces.length == 1 ? '_' : pieces[0]; + var rname = pieces[pieces.length - 1]; + return 'https://index.docker.io/u/' + rnamespace + '/' + rname + '/'; + } + }; + + var kindHandlers = { + 'FROM': function(title) { + var pieces = title.split('/'); + var registry = pieces.length < 3 ? '' : pieces[0]; + if (!registryHandlers[registry]) { + return title; + } + + return ' ' + title + ''; + } + }; + + $scope.getCommandKind = function(fullTitle) { + var colon = fullTitle.indexOf(':'); + var title = getTitleWithoutStep(fullTitle); + if (!title) { + return null; + } + + var space = title.indexOf(' '); + return title.substring(0, space); + }; + + $scope.getCommandTitleHtml = function(fullTitle) { + var title = getTitleWithoutStep(fullTitle) || fullTitle; + var space = title.indexOf(' '); + if (space <= 0) { + return $sanitize(title); + } + + var kind = $scope.getCommandKind(fullTitle); + var sanitized = $sanitize(title.substring(space + 1)); + + var handler = kindHandlers[kind || '']; + if (handler) { + return handler(sanitized); + } else { + return sanitized; + } + }; + + var getTitleWithoutStep = function(fullTitle) { + var colon = fullTitle.indexOf(':'); + if (colon <= 0) { + return null; + } + + return $.trim(fullTitle.substring(colon + 1)); + }; + } + }; + return directiveDefinitionObject; +}); + quayApp.directive('buildStatus', function () { var directiveDefinitionObject = { priority: 0, @@ -2482,55 +2607,85 @@ quayApp.directive('buildStatus', function () { 'build': '=build' }, controller: function($scope, $element) { - $scope.getBuildProgress = function(buildInfo) { - switch (buildInfo.phase) { - case 'building': - return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100; - break; - - case 'pushing': - return buildInfo.status.push_completion * 100; - break; + } + }; + return directiveDefinitionObject; +}); - case 'complete': - return 100; - break; - case 'initializing': - case 'starting': - case 'waiting': - return 0; - break; - } +quayApp.directive('buildMessage', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/build-message.html', + replace: false, + transclude: false, + restrict: 'C', + scope: { + 'phase': '=phase' + }, + controller: function($scope, $element) { + $scope.getBuildMessage = function (phase) { + switch (phase) { + case 'starting': + case 'initializing': + return 'Starting Dockerfile build'; + + case 'waiting': + return 'Waiting for available build worker'; + + case 'building': + return 'Building image from Dockerfile'; + + case 'pushing': + return 'Pushing image built from Dockerfile'; - return -1; - }; + case 'complete': + return 'Dockerfile build completed and pushed'; + + case 'error': + return 'Dockerfile build failed'; + } + }; + } + }; + return directiveDefinitionObject; +}); - $scope.getBuildMessage = function(buildInfo) { - switch (buildInfo.phase) { - case 'initializing': - return 'Starting Dockerfile build'; - break; - case 'starting': - case 'waiting': - case 'building': - return 'Building image from Dockerfile'; - break; +quayApp.directive('buildProgress', function () { + var directiveDefinitionObject = { + priority: 0, + templateUrl: '/static/directives/build-progress.html', + replace: false, + transclude: false, + restrict: 'C', + scope: { + 'build': '=build' + }, + controller: function($scope, $element) { + $scope.getPercentage = function(buildInfo) { + switch (buildInfo.phase) { + case 'building': + return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100; + break; + + case 'pushing': + return buildInfo.status.push_completion * 100; + break; - case 'pushing': - return 'Pushing image built from Dockerfile'; - break; + case 'complete': + return 100; + break; - case 'complete': - return 'Dockerfile build completed and pushed'; - break; - - case 'error': - return 'Dockerfile build failed.'; - break; - } - }; + case 'initializing': + case 'starting': + case 'waiting': + return 0; + break; + } + + return -1; + }; } }; return directiveDefinitionObject; @@ -2545,6 +2700,13 @@ quayApp.directive('ngBlur', function() { }; }); +quayApp.directive('ngVisible', function () { + return function (scope, element, attr) { + scope.$watch(attr.ngVisible, function (visible) { + element.css('visibility', visible ? 'visible' : 'hidden'); + }); + }; +}); quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout', function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout) { diff --git a/static/js/controllers.js b/static/js/controllers.js index 9df7bb774..679935a3e 100644 --- a/static/js/controllers.js +++ b/static/js/controllers.js @@ -326,6 +326,11 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi $scope.getFormattedCommand = ImageMetadataService.getFormattedCommand; + $scope.showBuild = function(buildInfo) { + $location.path('/repository/' + namespace + '/' + name + '/build'); + $location.search('current', buildInfo.id); + }; + $scope.getTooltipCommand = function(image) { var sanitized = ImageMetadataService.getEscapedFormattedCommand(image); return '' + sanitized + ''; @@ -653,13 +658,11 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi }; var getBuildInfo = function(repo) { - // Note: We use restangular manually here because we need to turn off the loading bar. - var buildInfo = Restangular.one('repository/' + repo.namespace + '/' + repo.name + '/build/'); - buildInfo.withHttpConfig({ - 'ignoreLoadingBar': true - }); + var params = { + 'repository': repo.namespace + '/' + repo.name + }; - buildInfo.get().then(function(resp) { + ApiService.getRepoBuilds(null, params, true).then(function(resp) { var runningBuilds = []; for (var i = 0; i < resp.builds.length; ++i) { var build = resp.builds[i]; @@ -745,6 +748,197 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi loadViewInfo(); } +function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize) { + var namespace = $routeParams.namespace; + var name = $routeParams.name; + var pollTimerHandle = null; + + $scope.$on('$destroy', function() { + stopPollTimer(); + }); + + // Watch for changes to the current parameter. + $scope.$on('$routeUpdate', function(){ + if ($location.search().current) { + $scope.setCurrentBuild($location.search().current, false); + } + }); + + $scope.builds = []; + $scope.polling = false; + + $scope.adjustLogHeight = function() { + $('.build-logs').height($(window).height() - 365); + }; + + $scope.hasLogs = function(container) { + return ((container.logs && container.logs.length) || (container._logs && container._logs.length)); + }; + + $scope.toggleLogs = function(container) { + if (container._logs) { + container.logs = container._logs; + container._logs = null; + } else { + container._logs = container.logs; + container.logs = null; + } + }; + + $scope.setCurrentBuild = function(buildId, opt_updateURL) { + // Find the build. + for (var i = 0; i < $scope.builds.length; ++i) { + if ($scope.builds[i].id == buildId) { + $scope.setCurrentBuildInternal($scope.builds[i], opt_updateURL); + return; + } + } + }; + + $scope.setCurrentBuildInternal = function(build, opt_updateURL) { + if (build == $scope.currentBuild) { return; } + + stopPollTimer(); + + $scope.logEntries = null; + $scope.logStartIndex = null; + $scope.currentParentEntry = null; + + $scope.currentBuild = build; + + if (opt_updateURL) { + if (build) { + $location.search('current', build.id); + } else { + $location.search('current', null); + } + } + + // Timeout needed to ensure the log element has been created + // before its height is adjusted. + setTimeout(function() { + $scope.adjustLogHeight(); + }, 1); + + // Load the first set of logs. + getBuildStatusAndLogs(); + + // If the build is currently processing, start the build timer. + checkPollTimer(); + }; + + var checkPollTimer = function() { + var build = $scope.currentBuild; + if (!build) { + stopPollTimer(); + return; + } + + if (build['phase'] != 'complete' && build['phase'] != 'error') { + startPollTimer(); + return true; + } else { + stopPollTimer(); + return false; + } + }; + + var stopPollTimer = function() { + $interval.cancel(pollTimerHandle); + }; + + var startPollTimer = function() { + stopPollTimer(); + pollTimerHandle = $interval(getBuildStatusAndLogs, 2000); + }; + + var processLogs = function(logs, startIndex) { + if (!$scope.logEntries) { $scope.logEntries = []; } + + for (var i = 0; i < logs.length; ++i) { + var entry = logs[i]; + var type = entry['type'] || 'entry'; + if (type == 'command' || type == 'phase' || type == 'error') { + entry['_logs'] = []; + entry['index'] = startIndex + i; + + $scope.logEntries.push(entry); + $scope.currentParentEntry = entry; + } else if ($scope.currentParentEntry) { + if ($scope.currentParentEntry['logs']) { + $scope.currentParentEntry['logs'].push(entry); + } else { + $scope.currentParentEntry['_logs'].push(entry); + } + } + } + }; + + var getBuildStatusAndLogs = function() { + if (!$scope.currentBuild || $scope.polling) { return; } + + $scope.polling = true; + + var params = { + 'repository': namespace + '/' + name, + 'build_uuid': $scope.currentBuild.id + }; + + ApiService.getRepoBuildStatus(null, params, true).then(function(resp) { + // Note: We use extend here rather than replacing as Angular is depending on the + // root build object to remain the same object. + $.extend(true, $scope.currentBuild, resp); + checkPollTimer(); + + // Load the updated logs for the build. + var options = { + 'start': $scope.logStartIndex + }; + + ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) { + processLogs(resp['logs'], resp['start']); + $scope.logStartIndex = resp['total']; + $scope.polling = false; + }); + }); + }; + + var fetchRepository = function() { + var params = {'repository': namespace + '/' + name}; + $rootScope.title = 'Loading Repository...'; + $scope.repository = ApiService.getRepoAsResource(params).get(function(repo) { + if (!repo.can_write) { + $rootScope.title = 'Unknown builds'; + $scope.accessDenied = true; + return; + } + + $rootScope.title = 'Repository Builds'; + $scope.repo = repo; + + getBuildInfo(); + }); + }; + + var getBuildInfo = function(repo) { + var params = { + 'repository': namespace + '/' + name + }; + + ApiService.getRepoBuilds(null, params).then(function(resp) { + $scope.builds = resp.builds; + + if ($location.search().current) { + $scope.setCurrentBuild($location.search().current, false); + } else if ($scope.builds.length > 0) { + $scope.setCurrentBuild($scope.builds[0].id, true); + } + }); + }; + + fetchRepository(); +} + function RepoAdminCtrl($scope, Restangular, ApiService, $routeParams, $rootScope) { var namespace = $routeParams.namespace; var name = $routeParams.name; @@ -1002,8 +1196,13 @@ function RepoAdminCtrl($scope, Restangular, ApiService, $routeParams, $rootScope }; $scope.repository = ApiService.getRepoAsResource(params).get(function(repo) { - $scope.repo = repo; + if (!repo.can_admin) { + $rootScope.title = 'Forbidden'; + $scope.accessDenied = true; + return; + } + $scope.repo = repo; $rootScope.title = 'Settings - ' + namespace + '/' + name; $rootScope.description = 'Administrator settings for ' + namespace + '/' + name + ': Permissions, webhooks and other settings'; diff --git a/static/lib/bindonce.min.js b/static/lib/bindonce.min.js new file mode 100644 index 000000000..2c26c0cf0 --- /dev/null +++ b/static/lib/bindonce.min.js @@ -0,0 +1 @@ +(function(){"use strict";var bindonceModule=angular.module("pasvaz.bindonce",[]);bindonceModule.directive("bindonce",function(){var toBoolean=function(value){if(value&&value.length!==0){var v=angular.lowercase(""+value);value=!(v==="f"||v==="0"||v==="false"||v==="no"||v==="n"||v==="[]")}else{value=false}return value};var msie=parseInt((/msie (\d+)/.exec(angular.lowercase(navigator.userAgent))||[])[1],10);if(isNaN(msie)){msie=parseInt((/trident\/.*; rv:(\d+)/.exec(angular.lowercase(navigator.userAgent))||[])[1],10)}var bindonceDirective={restrict:"AM",controller:["$scope","$element","$attrs","$interpolate",function($scope,$element,$attrs,$interpolate){var showHideBinder=function(elm,attr,value){var show=attr==="show"?"":"none";var hide=attr==="hide"?"":"none";elm.css("display",toBoolean(value)?show:hide)};var classBinder=function(elm,value){if(angular.isObject(value)&&!angular.isArray(value)){var results=[];angular.forEach(value,function(value,index){if(value)results.push(index)});value=results}if(value){elm.addClass(angular.isArray(value)?value.join(" "):value)}};var ctrl={watcherRemover:undefined,binders:[],group:$attrs.boName,element:$element,ran:false,addBinder:function(binder){this.binders.push(binder);if(this.ran){this.runBinders()}},setupWatcher:function(bindonceValue){var that=this;this.watcherRemover=$scope.$watch(bindonceValue,function(newValue){if(newValue===undefined)return;that.removeWatcher();that.runBinders()},true)},removeWatcher:function(){if(this.watcherRemover!==undefined){this.watcherRemover();this.watcherRemover=undefined}},runBinders:function(){while(this.binders.length>0){var binder=this.binders.shift();if(this.group&&this.group!=binder.group)continue;var value=binder.scope.$eval(binder.interpolate?$interpolate(binder.value):binder.value);switch(binder.attr){case"boIf":if(toBoolean(value)){binder.transclude(binder.scope.$new(),function(clone){var parent=binder.element.parent();var afterNode=binder.element&&binder.element[binder.element.length-1];var parentNode=parent&&parent[0]||afterNode&&afterNode.parentNode;var afterNextSibling=afterNode&&afterNode.nextSibling||null;angular.forEach(clone,function(node){parentNode.insertBefore(node,afterNextSibling)})})}break;case"boSwitch":var selectedTranscludes,switchCtrl=binder.controller[0];if(selectedTranscludes=switchCtrl.cases["!"+value]||switchCtrl.cases["?"]){binder.scope.$eval(binder.attrs.change);angular.forEach(selectedTranscludes,function(selectedTransclude){selectedTransclude.transclude(binder.scope.$new(),function(clone){var parent=selectedTransclude.element.parent();var afterNode=selectedTransclude.element&&selectedTransclude.element[selectedTransclude.element.length-1];var parentNode=parent&&parent[0]||afterNode&&afterNode.parentNode;var afterNextSibling=afterNode&&afterNode.nextSibling||null;angular.forEach(clone,function(node){parentNode.insertBefore(node,afterNextSibling)})})})}break;case"boSwitchWhen":var ctrl=binder.controller[0];ctrl.cases["!"+binder.attrs.boSwitchWhen]=ctrl.cases["!"+binder.attrs.boSwitchWhen]||[];ctrl.cases["!"+binder.attrs.boSwitchWhen].push({transclude:binder.transclude,element:binder.element});break;case"boSwitchDefault":var ctrl=binder.controller[0];ctrl.cases["?"]=ctrl.cases["?"]||[];ctrl.cases["?"].push({transclude:binder.transclude,element:binder.element});break;case"hide":case"show":showHideBinder(binder.element,binder.attr,value);break;case"class":classBinder(binder.element,value);break;case"text":binder.element.text(value);break;case"html":binder.element.html(value);break;case"style":binder.element.css(value);break;case"src":binder.element.attr(binder.attr,value);if(msie)binder.element.prop("src",value);break;case"attr":angular.forEach(binder.attrs,function(attrValue,attrKey){var newAttr,newValue;if(attrKey.match(/^boAttr./)&&binder.attrs[attrKey]){newAttr=attrKey.replace(/^boAttr/,"").replace(/([a-z])([A-Z])/g,"$1-$2").toLowerCase();newValue=binder.scope.$eval(binder.attrs[attrKey]);binder.element.attr(newAttr,newValue)}});break;case"href":case"alt":case"title":case"id":case"value":binder.element.attr(binder.attr,value);break}}this.ran=true}};return ctrl}],link:function(scope,elm,attrs,bindonceController){var value=attrs.bindonce?scope.$eval(attrs.bindonce):true;if(value!==undefined){bindonceController.runBinders()}else{bindonceController.setupWatcher(attrs.bindonce);elm.bind("$destroy",bindonceController.removeWatcher)}}};return bindonceDirective});angular.forEach([{directiveName:"boShow",attribute:"show"},{directiveName:"boHide",attribute:"hide"},{directiveName:"boClass",attribute:"class"},{directiveName:"boText",attribute:"text"},{directiveName:"boHtml",attribute:"html"},{directiveName:"boSrcI",attribute:"src",interpolate:true},{directiveName:"boSrc",attribute:"src"},{directiveName:"boHrefI",attribute:"href",interpolate:true},{directiveName:"boHref",attribute:"href"},{directiveName:"boAlt",attribute:"alt"},{directiveName:"boTitle",attribute:"title"},{directiveName:"boId",attribute:"id"},{directiveName:"boStyle",attribute:"style"},{directiveName:"boValue",attribute:"value"},{directiveName:"boAttr",attribute:"attr"},{directiveName:"boIf",transclude:"element",terminal:true,priority:1e3},{directiveName:"boSwitch",require:"boSwitch",controller:function(){this.cases={}}},{directiveName:"boSwitchWhen",transclude:"element",priority:800,require:"^boSwitch"},{directiveName:"boSwitchDefault",transclude:"element",priority:800,require:"^boSwitch"}],function(boDirective){var childPriority=200;return bindonceModule.directive(boDirective.directiveName,function(){var bindonceDirective={priority:boDirective.priority||childPriority,transclude:boDirective.transclude||false,terminal:boDirective.terminal||false,require:["^bindonce"].concat(boDirective.require||[]),controller:boDirective.controller,compile:function(tElement,tAttrs,transclude){return function(scope,elm,attrs,controllers){var bindonceController=controllers[0];var name=attrs.boParent;if(name&&bindonceController.group!==name){var element=bindonceController.element.parent();bindonceController=undefined;var parentValue;while(element[0].nodeType!==9&&element.length){if((parentValue=element.data("$bindonceController"))&&parentValue.group===name){bindonceController=parentValue;break}element=element.parent()}if(!bindonceController){throw new Error("No bindonce controller: "+name)}}bindonceController.addBinder({element:elm,attr:boDirective.attribute||boDirective.directiveName,attrs:attrs,value:attrs[boDirective.directiveName],interpolate:boDirective.interpolate,group:name,transclude:transclude,controller:controllers.slice(1),scope:scope})}}};return bindonceDirective})})})(); \ No newline at end of file diff --git a/static/partials/repo-admin.html b/static/partials/repo-admin.html index 4e567b381..3053f26fc 100644 --- a/static/partials/repo-admin.html +++ b/static/partials/repo-admin.html @@ -1,5 +1,8 @@
+
+ You do not have permission to view this page +
diff --git a/static/partials/repo-build.html b/static/partials/repo-build.html new file mode 100644 index 000000000..defa1b636 --- /dev/null +++ b/static/partials/repo-build.html @@ -0,0 +1,83 @@ +
+
+ You do not have permission to view this page +
+
+
+ +

+ + +

+
+ +
+ There are no builds for this repository +
+ +
+ + + + +
+
+
+
+
+ + Started: +
+ + +
+
+ +
+
+ +
+ +
+
+ +
+ +
+
+ +
+
+ +
+
+ + +
+
+ + +
+
+
+
+
+ + {{ build.id }} +
+
+
+
+
+
diff --git a/static/partials/view-repo.html b/static/partials/view-repo.html index ae8cb94f9..c46eaf16e 100644 --- a/static/partials/view-repo.html +++ b/static/partials/view-repo.html @@ -38,13 +38,18 @@
-
- - - Building Images - - {{ buildsInfo ? buildsInfo.length : '-' }} +
+
diff --git a/templates/base.html b/templates/base.html index e240d737e..b47a2d8ae 100644 --- a/templates/base.html +++ b/templates/base.html @@ -38,14 +38,12 @@ - - - + @@ -53,6 +51,7 @@ + diff --git a/test/data/test.db b/test/data/test.db index aa74e339f..775f2c82d 100644 Binary files a/test/data/test.db and b/test/data/test.db differ diff --git a/test/test_api_usage.py b/test/test_api_usage.py index fca9f2eed..efbe5849f 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -856,7 +856,6 @@ class TestGetRepoBuilds(ApiTestCase): assert 'id' in build assert 'status' in build - assert 'message' in build class TestRequearRepoBuild(ApiTestCase): diff --git a/test/testlogs.py b/test/testlogs.py new file mode 100644 index 000000000..76a68b1bf --- /dev/null +++ b/test/testlogs.py @@ -0,0 +1,189 @@ +import logging + +from random import SystemRandom +from loremipsum import get_sentence +from functools import wraps +from copy import deepcopy + +from data.buildlogs import BuildLogs + + +logger = logging.getLogger(__name__) +random = SystemRandom() + + +def maybe_advance_script(is_get_status=False): + def inner_advance(func): + @wraps(func) + def wrapper(self, *args, **kwargs): + advance_units = random.randint(1, 500) + logger.debug('Advancing script %s units', advance_units) + while advance_units > 0 and self.remaining_script: + units = self.remaining_script[0][0] + + if advance_units > units: + advance_units -= units + self.advance_script(is_get_status) + else: + break + + return func(self, *args, **kwargs) + return wrapper + return inner_advance + + +class TestBuildLogs(BuildLogs): + COMMAND_TYPES = ['FROM', 'MAINTAINER', 'RUN', 'CMD', 'EXPOSE', 'ENV', 'ADD', + 'ENTRYPOINT', 'VOLUME', 'USER', 'WORKDIR'] + STATUS_TEMPLATE = { + 'total_commands': None, + 'current_command': None, + 'push_completion': 0.0, + 'image_completion': {}, + } + + def __init__(self, redis_host, namespace, repository, test_build_id): + super(TestBuildLogs, self).__init__(redis_host) + self.namespace = namespace + self.repository = repository + self.test_build_id = test_build_id + self.remaining_script = self._generate_script() + logger.debug('Total script size: %s', len(self.remaining_script)) + self._logs = [] + + self._status = {} + self._last_status = {} + + def advance_script(self, is_get_status): + (_, log, status_wrapper) = self.remaining_script.pop(0) + if log is not None: + self._logs.append(log) + + if status_wrapper is not None: + (phase, status) = status_wrapper + + from data import model + build_obj = model.get_repository_build(self.namespace, self.repository, + self.test_build_id) + build_obj.phase = phase + build_obj.save() + + self._status = status + if not is_get_status: + self._last_status = status + + def _generate_script(self): + script = [] + + # generate the init phase + script.append(self._generate_phase(400, 'initializing')) + script.extend(self._generate_logs(random.randint(1, 3))) + + # move to the building phase + script.append(self._generate_phase(400, 'building')) + total_commands = random.randint(5, 20) + for command_num in range(1, total_commands + 1): + command_weight = random.randint(50, 100) + script.append(self._generate_command(command_num, total_commands, + command_weight)) + + # we want 0 logs some percent of the time + num_logs = max(0, random.randint(-50, 400)) + script.extend(self._generate_logs(num_logs)) + + # move to the pushing phase + script.append(self._generate_phase(400, 'pushing')) + script.extend(self._generate_push_statuses(total_commands)) + + # move to the error or complete phase + if random.randint(0, 1) == 0: + script.append(self._generate_phase(400, 'complete')) + else: + script.append(self._generate_phase(400, 'error')) + script.append((1, {'message': 'Something bad happened! Oh noes!', + 'type': self.ERROR}, None)) + + return script + + def _generate_phase(self, start_weight, phase_name): + return (start_weight, {'message': phase_name, 'type': self.PHASE}, + (phase_name, deepcopy(self.STATUS_TEMPLATE))) + + def _generate_command(self, command_num, total_commands, command_weight): + sentence = get_sentence() + command = random.choice(self.COMMAND_TYPES) + if command == 'FROM': + sentence = random.choice(['ubuntu', 'lopter/raring-base', + 'quay.io/devtable/simple', + 'quay.io/buynlarge/orgrepo', + 'stackbrew/ubuntu:precise']) + + msg = { + 'message': 'Step %s: %s %s' % (command_num, command, sentence), + 'type': self.COMMAND, + } + status = deepcopy(self.STATUS_TEMPLATE) + status['total_commands'] = total_commands + status['current_command'] = command_num + return (command_weight, msg, ('building', status)) + + @staticmethod + def _generate_logs(count): + return [(1, {'message': get_sentence()}, None) for _ in range(count)] + + @staticmethod + def _compute_total_completion(statuses, total_images): + percentage_with_sizes = float(len(statuses.values()))/total_images + sent_bytes = sum([status[u'current'] for status in statuses.values()]) + total_bytes = sum([status[u'total'] for status in statuses.values()]) + return float(sent_bytes)/total_bytes*percentage_with_sizes + + @staticmethod + def _generate_push_statuses(total_commands): + push_status_template = deepcopy(TestBuildLogs.STATUS_TEMPLATE) + push_status_template['current_command'] = total_commands + push_status_template['total_commands'] = total_commands + + push_statuses = [] + + one_mb = 1 * 1024 * 1024 + + num_images = random.randint(2, 7) + sizes = [random.randint(one_mb, one_mb * 5) for _ in range(num_images)] + + image_completion = {} + for image_num, image_size in enumerate(sizes): + image_id = 'image_id_%s' % image_num + + image_completion[image_id] = { + 'current': 0, + 'total': image_size, + } + + for i in range(one_mb, image_size, one_mb): + image_completion[image_id]['current'] = i + new_status = deepcopy(push_status_template) + new_status['image_completion'] = deepcopy(image_completion) + + completion = TestBuildLogs._compute_total_completion(image_completion, + num_images) + new_status['push_completion'] = completion + push_statuses.append((250, None, ('pushing', new_status))) + + return push_statuses + + @maybe_advance_script() + def get_log_entries(self, build_id, start_index): + if build_id == self.test_build_id: + return (len(self._logs), self._logs[start_index:]) + else: + return super(TestBuildLogs, self).get_log_entries(build_id, start_index) + + @maybe_advance_script(True) + def get_status(self, build_id): + if build_id == self.test_build_id: + returnable_status = self._last_status + self._last_status = self._status + return returnable_status + else: + return super(TestBuildLogs, self).get_status(build_id) diff --git a/test/teststorage.py b/test/teststorage.py index 48634075b..41768e09d 100644 --- a/test/teststorage.py +++ b/test/teststorage.py @@ -35,3 +35,6 @@ class FakeUserfiles(object): def get_file_url(self, file_id, expires_in=300): return ('http://fake/url') + + def get_file_checksum(self, file_id): + return 'abcdefg' diff --git a/workers/README.md b/workers/README.md new file mode 100644 index 000000000..8e356181f --- /dev/null +++ b/workers/README.md @@ -0,0 +1,39 @@ +to prepare a new build node host: + +``` +sudo apt-get update +sudo apt-get install -y git python-virtualenv python-dev phantomjs libjpeg8 libjpeg62-dev libfreetype6 libfreetype6-dev libevent-dev gdebi-core +``` + +check out the code, install the kernel, custom docker, nsexec, and reboot: + +``` +git clone https://bitbucket.org/yackob03/quay.git +cd quay +sudo gdebi --n binary_dependencies/builder/linux-headers-3.11.0-17_3.11.0-17.28_all.deb +sudo gdebi --n binary_dependencies/builder/linux-headers-3.11.0-17-generic_3.11.0-17.28_amd64.deb +sudo gdebi --n binary_dependencies/builder/linux-image-3.11.0-17-generic_3.11.0-17.28_amd64.deb +sudo gdebi --n binary_dependencies/builder/linux-image-extra-3.11.0-17-generic_3.11.0-17.28_amd64.deb +sudo gdebi --n binary_dependencies/builder/nsexec_1.22ubuntu1trusty1_amd64.deb +sudo gdebi --n binary_dependencies/builder/lxc-docker-0.8.0-tutum_0.8.0-tutum-20140212002736-afad5c0-dirty_amd64.deb +sudo chown -R 100000:100000 /var/lib/docker +sudo shutdown -r now +``` + +pull some base images if you want (optional) +``` +sudo docker pull ubuntu +sudo docker pull stackbrew/ubuntu +sudo docker pull busybox +sudo docker pull lopter/raring-base +``` + +start the worker + +``` +cd quay +virtualenv --distribute venv +source venv/bin/activate +pip install -r requirements.txt +sudo STACK=prod venv/bin/python -m workers.dockerfilebuild -D +``` diff --git a/workers/dockerfilebuild.py b/workers/dockerfilebuild.py index 7cd801d9a..6d50601da 100644 --- a/workers/dockerfilebuild.py +++ b/workers/dockerfilebuild.py @@ -10,6 +10,7 @@ import shutil from docker import Client, APIError from tempfile import TemporaryFile, mkdtemp from zipfile import ZipFile +from functools import partial from data.queue import dockerfile_build_queue from data import model @@ -53,9 +54,9 @@ class DockerfileBuildContext(object): self._build_dir = build_context_dir self._tag_name = tag_name self._push_token = push_token - self._build_uuid = build_uuid - self._cl = Client(timeout=1200) - self._status = StatusWrapper(self._build_uuid) + self._cl = Client(timeout=1200, version='1.7') + self._status = StatusWrapper(build_uuid) + self._build_logger = partial(build_logs.append_log_message, build_uuid) dockerfile_path = os.path.join(self._build_dir, "Dockerfile") self._num_steps = DockerfileBuildContext.__count_steps(dockerfile_path) @@ -93,22 +94,25 @@ class DockerfileBuildContext(object): with self._status as status: status['total_commands'] = self._num_steps - logger.debug('Building to tag names: %s' % self._tag_name) + logger.debug('Building to tag named: %s' % self._tag_name) build_status = self._cl.build(path=self._build_dir, tag=self._tag_name, stream=True) current_step = 0 built_image = None for status in build_status: - logger.debug('Status: %s', str(status)) - build_logs.append_log_message(self._build_uuid, str(status)) + status_str = str(status.encode('utf-8')) + logger.debug('Status: %s', status_str) step_increment = re.search(r'Step ([0-9]+) :', status) if step_increment: + self._build_logger(status_str, build_logs.COMMAND) current_step = int(step_increment.group(1)) logger.debug('Step now: %s/%s' % (current_step, self._num_steps)) with self._status as status: status['current_command'] = current_step continue + else: + self._build_logger(status_str) complete = re.match(r'Successfully built ([a-z0-9]+)$', status) if complete: @@ -189,7 +193,11 @@ class DockerfileBuildContext(object): repos = set() for image in self._cl.images(): images_to_remove.add(image['Id']) - repos.add(image['Repository']) + + for tag in image['RepoTags']: + tag_repo = tag.split(':')[0] + if tag_repo != '': + repos.add(tag_repo) for repo in repos: repo_url = 'https://index.docker.io/v1/repositories/%s/images' % repo @@ -254,10 +262,15 @@ class DockerfileBuildWorker(Worker): tag_name = repository_build.tag access_token = repository_build.access_token.code - start_msg = ('Starting job with resource url: %s tag: %s and token: %s' % - (resource_url, tag_name, access_token)) + log_appender = partial(build_logs.append_log_message, + repository_build.uuid) + + log_appender('initializing', build_logs.PHASE) + + start_msg = ('Starting job with resource url: %s tag: %s' % (resource_url, + tag_name)) logger.debug(start_msg) - build_logs.append_log_message(repository_build.uuid, start_msg) + log_appender(start_msg) docker_resource = requests.get(resource_url) c_type = docker_resource.headers['content-type'] @@ -265,41 +278,45 @@ class DockerfileBuildWorker(Worker): filetype_msg = ('Request to build file of type: %s with tag: %s' % (c_type, tag_name)) logger.info(filetype_msg) - build_logs.append_log_message(repository_build.uuid, filetype_msg) + log_appender(filetype_msg) if c_type not in self._mime_processors: raise RuntimeError('Invalid dockerfile content type: %s' % c_type) build_dir = self._mime_processors[c_type](docker_resource) - uuid = repository_build.uuid + log_appender('building', build_logs.PHASE) repository_build.phase = 'building' repository_build.save() - try: - with DockerfileBuildContext(build_dir, tag_name, access_token, - repository_build.uuid) as build_ctxt: + with DockerfileBuildContext(build_dir, tag_name, access_token, + repository_build.uuid) as build_ctxt: + try: built_image = build_ctxt.build() if not built_image: + log_appender('error', build_logs.PHASE) repository_build.phase = 'error' repository_build.save() - build_logs.append_log_message(uuid, 'Unable to build dockerfile.') + log_appender('Unable to build dockerfile.', build_logs.ERROR) return False + log_appender('pushing', build_logs.PHASE) repository_build.phase = 'pushing' repository_build.save() build_ctxt.push(built_image) + log_appender('complete', build_logs.PHASE) repository_build.phase = 'complete' repository_build.save() - except Exception as exc: - logger.exception('Exception when processing request.') - repository_build.phase = 'error' - repository_build.save() - build_logs.append_log_message(uuid, exc.message) - return False + except Exception as exc: + log_appender('error', build_logs.PHASE) + logger.exception('Exception when processing request.') + repository_build.phase = 'error' + repository_build.save() + log_appender(str(exc), build_logs.ERROR) + return False return True