Merge branch 'master' into nimbus
This commit is contained in:
commit
9393dc27e5
17 changed files with 614 additions and 271 deletions
|
@ -333,6 +333,10 @@ class PermissionPrototype(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AccessTokenKind(BaseModel):
|
||||||
|
name = CharField(unique=True, index=True)
|
||||||
|
|
||||||
|
|
||||||
class AccessToken(BaseModel):
|
class AccessToken(BaseModel):
|
||||||
friendly_name = CharField(null=True)
|
friendly_name = CharField(null=True)
|
||||||
code = CharField(default=random_string_generator(length=64), unique=True,
|
code = CharField(default=random_string_generator(length=64), unique=True,
|
||||||
|
@ -341,6 +345,7 @@ class AccessToken(BaseModel):
|
||||||
created = DateTimeField(default=datetime.now)
|
created = DateTimeField(default=datetime.now)
|
||||||
role = ForeignKeyField(Role)
|
role = ForeignKeyField(Role)
|
||||||
temporary = BooleanField(default=True)
|
temporary = BooleanField(default=True)
|
||||||
|
kind = ForeignKeyField(AccessTokenKind, null=True)
|
||||||
|
|
||||||
|
|
||||||
class BuildTriggerService(BaseModel):
|
class BuildTriggerService(BaseModel):
|
||||||
|
@ -473,6 +478,7 @@ class BUILD_PHASE(object):
|
||||||
PULLING = 'pulling'
|
PULLING = 'pulling'
|
||||||
BUILDING = 'building'
|
BUILDING = 'building'
|
||||||
PUSHING = 'pushing'
|
PUSHING = 'pushing'
|
||||||
|
WAITING = 'waiting'
|
||||||
COMPLETE = 'complete'
|
COMPLETE = 'complete'
|
||||||
|
|
||||||
|
|
||||||
|
@ -491,7 +497,7 @@ class RepositoryBuild(BaseModel):
|
||||||
access_token = ForeignKeyField(AccessToken)
|
access_token = ForeignKeyField(AccessToken)
|
||||||
resource_key = CharField(index=True)
|
resource_key = CharField(index=True)
|
||||||
job_config = TextField()
|
job_config = TextField()
|
||||||
phase = CharField(default='waiting')
|
phase = CharField(default=BUILD_PHASE.WAITING)
|
||||||
started = DateTimeField(default=datetime.now)
|
started = DateTimeField(default=datetime.now)
|
||||||
display_name = CharField()
|
display_name = CharField()
|
||||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
||||||
|
@ -599,4 +605,5 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
||||||
Notification, ImageStorageLocation, ImageStoragePlacement,
|
Notification, ImageStorageLocation, ImageStoragePlacement,
|
||||||
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
||||||
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
|
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
|
||||||
TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind]
|
TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind,
|
||||||
|
AccessTokenKind]
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
"""Actually remove the column access_token_id
|
||||||
|
|
||||||
|
Revision ID: 1d2d86d09fcd
|
||||||
|
Revises: 14fe12ade3df
|
||||||
|
Create Date: 2015-02-12 16:27:30.260797
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '1d2d86d09fcd'
|
||||||
|
down_revision = '14fe12ade3df'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
from sqlalchemy.exc import InternalError
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
try:
|
||||||
|
op.drop_constraint(u'fk_logentry_access_token_id_accesstoken', 'logentry', type_='foreignkey')
|
||||||
|
op.drop_index('logentry_access_token_id', table_name='logentry')
|
||||||
|
op.drop_column('logentry', 'access_token_id')
|
||||||
|
except InternalError:
|
||||||
|
pass
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
try:
|
||||||
|
op.add_column('logentry', sa.Column('access_token_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||||
|
op.create_foreign_key(u'fk_logentry_access_token_id_accesstoken', 'logentry', 'accesstoken', ['access_token_id'], ['id'])
|
||||||
|
op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False)
|
||||||
|
except InternalError:
|
||||||
|
pass
|
||||||
|
### end Alembic commands ###
|
|
@ -0,0 +1,44 @@
|
||||||
|
"""Add access token kinds type
|
||||||
|
|
||||||
|
Revision ID: 3e2d38b52a75
|
||||||
|
Revises: 1d2d86d09fcd
|
||||||
|
Create Date: 2015-02-17 12:03:26.422485
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '3e2d38b52a75'
|
||||||
|
down_revision = '1d2d86d09fcd'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('accesstokenkind',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_accesstokenkind'))
|
||||||
|
)
|
||||||
|
op.create_index('accesstokenkind_name', 'accesstokenkind', ['name'], unique=True)
|
||||||
|
op.add_column(u'accesstoken', sa.Column('kind_id', sa.Integer(), nullable=True))
|
||||||
|
op.create_index('accesstoken_kind_id', 'accesstoken', ['kind_id'], unique=False)
|
||||||
|
op.create_foreign_key(op.f('fk_accesstoken_kind_id_accesstokenkind'), 'accesstoken', 'accesstokenkind', ['kind_id'], ['id'])
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
op.bulk_insert(tables.accesstokenkind,
|
||||||
|
[
|
||||||
|
{'id': 1, 'name':'build-worker'},
|
||||||
|
{'id': 2, 'name':'pushpull-token'},
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(op.f('fk_accesstoken_kind_id_accesstokenkind'), 'accesstoken', type_='foreignkey')
|
||||||
|
op.drop_index('accesstoken_kind_id', table_name='accesstoken')
|
||||||
|
op.drop_column(u'accesstoken', 'kind_id')
|
||||||
|
op.drop_index('accesstokenkind_name', table_name='accesstokenkind')
|
||||||
|
op.drop_table('accesstokenkind')
|
||||||
|
### end Alembic commands ###
|
|
@ -14,8 +14,9 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
|
||||||
ExternalNotificationEvent, ExternalNotificationMethod,
|
ExternalNotificationEvent, ExternalNotificationMethod,
|
||||||
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
|
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
|
||||||
DerivedImageStorage, ImageStorageTransformation, random_string_generator,
|
DerivedImageStorage, ImageStorageTransformation, random_string_generator,
|
||||||
db, BUILD_PHASE, QuayUserField, ImageStorageSignature,
|
db, BUILD_PHASE, QuayUserField, ImageStorageSignature, QueueItem,
|
||||||
ImageStorageSignatureKind, validate_database_url, db_for_update)
|
ImageStorageSignatureKind, validate_database_url, db_for_update,
|
||||||
|
AccessTokenKind)
|
||||||
from peewee import JOIN_LEFT_OUTER, fn
|
from peewee import JOIN_LEFT_OUTER, fn
|
||||||
from util.validation import (validate_username, validate_email, validate_password,
|
from util.validation import (validate_username, validate_email, validate_password,
|
||||||
INVALID_PASSWORD_MESSAGE)
|
INVALID_PASSWORD_MESSAGE)
|
||||||
|
@ -1902,10 +1903,14 @@ def get_private_repo_count(username):
|
||||||
.count())
|
.count())
|
||||||
|
|
||||||
|
|
||||||
def create_access_token(repository, role):
|
def create_access_token(repository, role, kind=None, friendly_name=None):
|
||||||
role = Role.get(Role.name == role)
|
role = Role.get(Role.name == role)
|
||||||
|
kind_ref = None
|
||||||
|
if kind is not None:
|
||||||
|
kind_ref = AccessTokenKind.get(AccessTokenKind.name == kind)
|
||||||
|
|
||||||
new_token = AccessToken.create(repository=repository, temporary=True,
|
new_token = AccessToken.create(repository=repository, temporary=True,
|
||||||
role=role)
|
role=role, kind=kind_ref, friendly_name=friendly_name)
|
||||||
return new_token
|
return new_token
|
||||||
|
|
||||||
|
|
||||||
|
@ -2024,10 +2029,10 @@ def create_repository_build(repo, access_token, job_config_obj, dockerfile_id,
|
||||||
pull_robot = lookup_robot(pull_robot_name)
|
pull_robot = lookup_robot(pull_robot_name)
|
||||||
|
|
||||||
return RepositoryBuild.create(repository=repo, access_token=access_token,
|
return RepositoryBuild.create(repository=repo, access_token=access_token,
|
||||||
job_config=json.dumps(job_config_obj),
|
job_config=json.dumps(job_config_obj),
|
||||||
display_name=display_name, trigger=trigger,
|
display_name=display_name, trigger=trigger,
|
||||||
resource_key=dockerfile_id,
|
resource_key=dockerfile_id,
|
||||||
pull_robot=pull_robot)
|
pull_robot=pull_robot)
|
||||||
|
|
||||||
|
|
||||||
def get_pull_robot_name(trigger):
|
def get_pull_robot_name(trigger):
|
||||||
|
@ -2431,6 +2436,32 @@ def confirm_team_invite(code, user):
|
||||||
found.delete_instance()
|
found.delete_instance()
|
||||||
return (team, inviter)
|
return (team, inviter)
|
||||||
|
|
||||||
|
def cancel_repository_build(build):
|
||||||
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
# Reload the build for update.
|
||||||
|
try:
|
||||||
|
build = db_for_update(RepositoryBuild.select().where(RepositoryBuild.id == build.id)).get()
|
||||||
|
except RepositoryBuild.DoesNotExist:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if build.phase != BUILD_PHASE.WAITING or not build.queue_item:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Load the build queue item for update.
|
||||||
|
try:
|
||||||
|
queue_item = db_for_update(QueueItem.select()
|
||||||
|
.where(QueueItem.id == build.queue_item.id)).get()
|
||||||
|
except QueueItem.DoesNotExist:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check the queue item.
|
||||||
|
if not queue_item.available or queue_item.retries_remaining == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Delete the queue item and build.
|
||||||
|
queue_item.delete_instance(recursive=True)
|
||||||
|
build.delete_instance()
|
||||||
|
return True
|
||||||
|
|
||||||
def get_repository_usage():
|
def get_repository_usage():
|
||||||
one_month_ago = date.today() - timedelta(weeks=4)
|
one_month_ago = date.today() - timedelta(weeks=4)
|
||||||
|
|
|
@ -9,7 +9,7 @@ from app import app, userfiles as user_files, build_logs, log_archive
|
||||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||||
require_repo_read, require_repo_write, validate_json_request,
|
require_repo_read, require_repo_write, validate_json_request,
|
||||||
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
||||||
path_param)
|
path_param, InvalidRequest, require_repo_admin)
|
||||||
from endpoints.common import start_build
|
from endpoints.common import start_build
|
||||||
from endpoints.trigger import BuildTrigger
|
from endpoints.trigger import BuildTrigger
|
||||||
from data import model, database
|
from data import model, database
|
||||||
|
@ -207,6 +207,31 @@ class RepositoryBuildList(RepositoryParamResource):
|
||||||
return resp, 201, headers
|
return resp, 201, headers
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/repository/<repopath:repository>/build/<build_uuid>')
|
||||||
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||||
|
@path_param('build_uuid', 'The UUID of the build')
|
||||||
|
class RepositoryBuildResource(RepositoryParamResource):
|
||||||
|
""" Resource for dealing with repository builds. """
|
||||||
|
@require_repo_admin
|
||||||
|
@nickname('cancelRepoBuild')
|
||||||
|
def delete(self, namespace, repository, build_uuid):
|
||||||
|
""" Cancels a repository build if it has not yet been picked up by a build worker. """
|
||||||
|
try:
|
||||||
|
build = model.get_repository_build(build_uuid)
|
||||||
|
except model.InvalidRepositoryBuildException:
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
|
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
|
if model.cancel_repository_build(build):
|
||||||
|
return 'Okay', 201
|
||||||
|
else:
|
||||||
|
raise InvalidRequest('Build is currently running or has finished')
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/repository/<repopath:repository>/build/<build_uuid>/status')
|
@resource('/v1/repository/<repopath:repository>/build/<build_uuid>/status')
|
||||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||||
@path_param('build_uuid', 'The UUID of the build')
|
@path_param('build_uuid', 'The UUID of the build')
|
||||||
|
|
|
@ -215,7 +215,8 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
||||||
host = urlparse.urlparse(request.url).netloc
|
host = urlparse.urlparse(request.url).netloc
|
||||||
repo_path = '%s/%s/%s' % (host, repository.namespace_user.username, repository.name)
|
repo_path = '%s/%s/%s' % (host, repository.namespace_user.username, repository.name)
|
||||||
|
|
||||||
token = model.create_access_token(repository, 'write')
|
token = model.create_access_token(repository, 'write', kind='build-worker',
|
||||||
|
friendly_name='Repository Build Token')
|
||||||
logger.debug('Creating build %s with repo %s tags %s and dockerfile_id %s',
|
logger.debug('Creating build %s with repo %s tags %s and dockerfile_id %s',
|
||||||
build_name, repo_path, tags, dockerfile_id)
|
build_name, repo_path, tags, dockerfile_id)
|
||||||
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ def generate_headers(role='read'):
|
||||||
if has_token_request:
|
if has_token_request:
|
||||||
repo = model.get_repository(namespace, repository)
|
repo = model.get_repository(namespace, repository)
|
||||||
if repo:
|
if repo:
|
||||||
token = model.create_access_token(repo, role)
|
token = model.create_access_token(repo, role, 'pushpull-token')
|
||||||
token_str = 'signature=%s' % token.code
|
token_str = 'signature=%s' % token.code
|
||||||
response.headers['WWW-Authenticate'] = token_str
|
response.headers['WWW-Authenticate'] = token_str
|
||||||
response.headers['X-Docker-Token'] = token_str
|
response.headers['X-Docker-Token'] = token_str
|
||||||
|
|
|
@ -34,6 +34,10 @@ def track_and_log(event_name, repo, **kwargs):
|
||||||
elif authenticated_token:
|
elif authenticated_token:
|
||||||
metadata['token'] = authenticated_token.friendly_name
|
metadata['token'] = authenticated_token.friendly_name
|
||||||
metadata['token_code'] = authenticated_token.code
|
metadata['token_code'] = authenticated_token.code
|
||||||
|
|
||||||
|
if authenticated_token.kind:
|
||||||
|
metadata['token_type'] = authenticated_token.kind.name
|
||||||
|
|
||||||
analytics_id = 'token:' + authenticated_token.code
|
analytics_id = 'token:' + authenticated_token.code
|
||||||
else:
|
else:
|
||||||
metadata['public'] = True
|
metadata['public'] = True
|
||||||
|
|
|
@ -192,6 +192,9 @@ def initialize_database():
|
||||||
|
|
||||||
BuildTriggerService.create(name='github')
|
BuildTriggerService.create(name='github')
|
||||||
|
|
||||||
|
AccessTokenKind.create(name='build-worker')
|
||||||
|
AccessTokenKind.create(name='pushpull-token')
|
||||||
|
|
||||||
LogEntryKind.create(name='account_change_plan')
|
LogEntryKind.create(name='account_change_plan')
|
||||||
LogEntryKind.create(name='account_change_cc')
|
LogEntryKind.create(name='account_change_cc')
|
||||||
LogEntryKind.create(name='account_change_password')
|
LogEntryKind.create(name='account_change_password')
|
||||||
|
@ -393,7 +396,7 @@ def populate_database():
|
||||||
'Empty repository which is building.',
|
'Empty repository which is building.',
|
||||||
False, [], (0, [], None))
|
False, [], (0, [], None))
|
||||||
|
|
||||||
token = model.create_access_token(building, 'write')
|
token = model.create_access_token(building, 'write', 'build-worker')
|
||||||
|
|
||||||
trigger = model.create_build_trigger(building, 'github', '123authtoken',
|
trigger = model.create_build_trigger(building, 'github', '123authtoken',
|
||||||
new_user_1, pull_robot=dtrobot[0])
|
new_user_1, pull_robot=dtrobot[0])
|
||||||
|
|
|
@ -56,7 +56,8 @@
|
||||||
<td>
|
<td>
|
||||||
<span class="log-performer" ng-if="log.metadata.oauth_token_application">
|
<span class="log-performer" ng-if="log.metadata.oauth_token_application">
|
||||||
<div>
|
<div>
|
||||||
<span class="application-reference" data-title="log.metadata.oauth_token_application"
|
<span class="application-reference"
|
||||||
|
data-title="log.metadata.oauth_token_application"
|
||||||
client-id="log.metadata.oauth_token_application_id"></span>
|
client-id="log.metadata.oauth_token_application_id"></span>
|
||||||
</div>
|
</div>
|
||||||
<div style="text-align: center; font-size: 12px; color: #aaa; padding: 4px;">on behalf of</div>
|
<div style="text-align: center; font-size: 12px; color: #aaa; padding: 4px;">on behalf of</div>
|
||||||
|
|
|
@ -3314,7 +3314,11 @@ quayApp.directive('logsView', function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (metadata.token) {
|
if (metadata.token) {
|
||||||
prefix += ' via token {token}';
|
if (metadata.token_type == 'build-worker') {
|
||||||
|
prefix += ' by <b>build worker</b>';
|
||||||
|
} else {
|
||||||
|
prefix += ' via token';
|
||||||
|
}
|
||||||
} else if (metadata.username) {
|
} else if (metadata.username) {
|
||||||
prefix += ' by {username}';
|
prefix += ' by {username}';
|
||||||
} else {
|
} else {
|
||||||
|
@ -3325,7 +3329,13 @@ quayApp.directive('logsView', function () {
|
||||||
},
|
},
|
||||||
'pull_repo': function(metadata) {
|
'pull_repo': function(metadata) {
|
||||||
if (metadata.token) {
|
if (metadata.token) {
|
||||||
return 'Pull repository {repo} via token {token}';
|
var prefix = 'Pull of repository'
|
||||||
|
if (metadata.token_type == 'build-worker') {
|
||||||
|
prefix += ' by <b>build worker</b>';
|
||||||
|
} else {
|
||||||
|
prefix += ' via token';
|
||||||
|
}
|
||||||
|
return prefix;
|
||||||
} else if (metadata.username) {
|
} else if (metadata.username) {
|
||||||
return 'Pull repository {repo} by {username}';
|
return 'Pull repository {repo} by {username}';
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -1072,257 +1072,6 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou
|
||||||
getBuildInfo();
|
getBuildInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
|
||||||
ansi2html, AngularViewArray, AngularPollChannel) {
|
|
||||||
var namespace = $routeParams.namespace;
|
|
||||||
var name = $routeParams.name;
|
|
||||||
|
|
||||||
// Watch for changes to the current parameter.
|
|
||||||
$scope.$on('$routeUpdate', function(){
|
|
||||||
if ($location.search().current) {
|
|
||||||
$scope.setCurrentBuild($location.search().current, false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
$scope.builds = null;
|
|
||||||
$scope.pollChannel = null;
|
|
||||||
$scope.buildDialogShowCounter = 0;
|
|
||||||
|
|
||||||
$scope.showNewBuildDialog = function() {
|
|
||||||
$scope.buildDialogShowCounter++;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.handleBuildStarted = function(newBuild) {
|
|
||||||
if (!$scope.builds) { return; }
|
|
||||||
|
|
||||||
$scope.builds.unshift(newBuild);
|
|
||||||
$scope.setCurrentBuild(newBuild['id'], true);
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.adjustLogHeight = function() {
|
|
||||||
var triggerOffset = 0;
|
|
||||||
if ($scope.currentBuild && $scope.currentBuild.trigger) {
|
|
||||||
triggerOffset = 85;
|
|
||||||
}
|
|
||||||
$('.build-logs').height($(window).height() - 415 - triggerOffset);
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.askRestartBuild = function(build) {
|
|
||||||
$('#confirmRestartBuildModal').modal({});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.restartBuild = function(build) {
|
|
||||||
$('#confirmRestartBuildModal').modal('hide');
|
|
||||||
|
|
||||||
var subdirectory = '';
|
|
||||||
if (build['job_config']) {
|
|
||||||
subdirectory = build['job_config']['build_subdir'] || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
var data = {
|
|
||||||
'file_id': build['resource_key'],
|
|
||||||
'subdirectory': subdirectory,
|
|
||||||
'docker_tags': build['job_config']['docker_tags']
|
|
||||||
};
|
|
||||||
|
|
||||||
if (build['pull_robot']) {
|
|
||||||
data['pull_robot'] = build['pull_robot']['name'];
|
|
||||||
}
|
|
||||||
|
|
||||||
var params = {
|
|
||||||
'repository': namespace + '/' + name
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.requestRepoBuild(data, params).then(function(newBuild) {
|
|
||||||
if (!$scope.builds) { return; }
|
|
||||||
|
|
||||||
$scope.builds.unshift(newBuild);
|
|
||||||
$scope.setCurrentBuild(newBuild['id'], true);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.hasLogs = function(container) {
|
|
||||||
return container.logs.hasEntries;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.setCurrentBuild = function(buildId, opt_updateURL) {
|
|
||||||
if (!$scope.builds) { return; }
|
|
||||||
|
|
||||||
// Find the build.
|
|
||||||
for (var i = 0; i < $scope.builds.length; ++i) {
|
|
||||||
if ($scope.builds[i].id == buildId) {
|
|
||||||
$scope.setCurrentBuildInternal(i, $scope.builds[i], opt_updateURL);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.processANSI = function(message, container) {
|
|
||||||
var filter = container.logs._filter = (container.logs._filter || ansi2html.create());
|
|
||||||
|
|
||||||
// Note: order is important here.
|
|
||||||
var setup = filter.getSetupHtml();
|
|
||||||
var stream = filter.addInputToStream(message);
|
|
||||||
var teardown = filter.getTeardownHtml();
|
|
||||||
return setup + stream + teardown;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
|
||||||
if (build == $scope.currentBuild) { return; }
|
|
||||||
|
|
||||||
$scope.logEntries = null;
|
|
||||||
$scope.logStartIndex = null;
|
|
||||||
$scope.currentParentEntry = null;
|
|
||||||
|
|
||||||
$scope.currentBuild = build;
|
|
||||||
|
|
||||||
if (opt_updateURL) {
|
|
||||||
if (build) {
|
|
||||||
$location.search('current', build.id);
|
|
||||||
} else {
|
|
||||||
$location.search('current', null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Timeout needed to ensure the log element has been created
|
|
||||||
// before its height is adjusted.
|
|
||||||
setTimeout(function() {
|
|
||||||
$scope.adjustLogHeight();
|
|
||||||
}, 1);
|
|
||||||
|
|
||||||
// Stop any existing polling.
|
|
||||||
if ($scope.pollChannel) {
|
|
||||||
$scope.pollChannel.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new channel for polling the build status and logs.
|
|
||||||
var conductStatusAndLogRequest = function(callback) {
|
|
||||||
getBuildStatusAndLogs(build, callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
|
||||||
$scope.pollChannel.start();
|
|
||||||
};
|
|
||||||
|
|
||||||
var processLogs = function(logs, startIndex, endIndex) {
|
|
||||||
if (!$scope.logEntries) { $scope.logEntries = []; }
|
|
||||||
|
|
||||||
// If the start index given is less than that requested, then we've received a larger
|
|
||||||
// pool of logs, and we need to only consider the new ones.
|
|
||||||
if (startIndex < $scope.logStartIndex) {
|
|
||||||
logs = logs.slice($scope.logStartIndex - startIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (var i = 0; i < logs.length; ++i) {
|
|
||||||
var entry = logs[i];
|
|
||||||
var type = entry['type'] || 'entry';
|
|
||||||
if (type == 'command' || type == 'phase' || type == 'error') {
|
|
||||||
entry['logs'] = AngularViewArray.create();
|
|
||||||
entry['index'] = $scope.logStartIndex + i;
|
|
||||||
|
|
||||||
$scope.logEntries.push(entry);
|
|
||||||
$scope.currentParentEntry = entry;
|
|
||||||
} else if ($scope.currentParentEntry) {
|
|
||||||
$scope.currentParentEntry['logs'].push(entry);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return endIndex;
|
|
||||||
};
|
|
||||||
|
|
||||||
var getBuildStatusAndLogs = function(build, callback) {
|
|
||||||
var params = {
|
|
||||||
'repository': namespace + '/' + name,
|
|
||||||
'build_uuid': build.id
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
|
||||||
if (build != $scope.currentBuild) { callback(false); return; }
|
|
||||||
|
|
||||||
// Note: We use extend here rather than replacing as Angular is depending on the
|
|
||||||
// root build object to remain the same object.
|
|
||||||
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
|
||||||
return elem['id'] == resp['id']
|
|
||||||
});
|
|
||||||
|
|
||||||
var currentBuild = matchingBuilds.length > 0 ? matchingBuilds[0] : null;
|
|
||||||
if (currentBuild) {
|
|
||||||
currentBuild = $.extend(true, currentBuild, resp);
|
|
||||||
} else {
|
|
||||||
currentBuild = resp;
|
|
||||||
$scope.builds.push(currentBuild);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load the updated logs for the build.
|
|
||||||
var options = {
|
|
||||||
'start': $scope.logStartIndex
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
|
||||||
if (build != $scope.currentBuild) { callback(false); return; }
|
|
||||||
|
|
||||||
// Process the logs we've received.
|
|
||||||
$scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']);
|
|
||||||
|
|
||||||
// If the build status is an error, open the last two log entries.
|
|
||||||
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
|
||||||
var openLogEntries = function(entry) {
|
|
||||||
if (entry.logs) {
|
|
||||||
entry.logs.setVisible(true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
|
||||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the build phase is an error or a complete, then we mark the channel
|
|
||||||
// as closed.
|
|
||||||
callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete');
|
|
||||||
}, function() {
|
|
||||||
callback(false);
|
|
||||||
});
|
|
||||||
}, function() {
|
|
||||||
callback(false);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
var fetchRepository = function() {
|
|
||||||
var params = {'repository': namespace + '/' + name};
|
|
||||||
$rootScope.title = 'Loading Repository...';
|
|
||||||
$scope.repository = ApiService.getRepoAsResource(params).get(function(repo) {
|
|
||||||
if (!repo.can_write) {
|
|
||||||
$rootScope.title = 'Unknown builds';
|
|
||||||
$scope.accessDenied = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
$rootScope.title = 'Repository Builds';
|
|
||||||
$scope.repo = repo;
|
|
||||||
|
|
||||||
getBuildInfo();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
var getBuildInfo = function(repo) {
|
|
||||||
var params = {
|
|
||||||
'repository': namespace + '/' + name
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.getRepoBuilds(null, params).then(function(resp) {
|
|
||||||
$scope.builds = resp.builds;
|
|
||||||
|
|
||||||
if ($location.search().current) {
|
|
||||||
$scope.setCurrentBuild($location.search().current, false);
|
|
||||||
} else if ($scope.builds.length > 0) {
|
|
||||||
$scope.setCurrentBuild($scope.builds[0].id, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
fetchRepository();
|
|
||||||
}
|
|
||||||
|
|
||||||
function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerService, $routeParams,
|
function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerService, $routeParams,
|
||||||
$rootScope, $location, UserService, Config, Features, ExternalNotificationData) {
|
$rootScope, $location, UserService, Config, Features, ExternalNotificationData) {
|
||||||
|
|
||||||
|
|
272
static/js/controllers/repo-build.js
Normal file
272
static/js/controllers/repo-build.js
Normal file
|
@ -0,0 +1,272 @@
|
||||||
|
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
||||||
|
ansi2html, AngularViewArray, AngularPollChannel) {
|
||||||
|
var namespace = $routeParams.namespace;
|
||||||
|
var name = $routeParams.name;
|
||||||
|
|
||||||
|
// Watch for changes to the current parameter.
|
||||||
|
$scope.$on('$routeUpdate', function(){
|
||||||
|
if ($location.search().current) {
|
||||||
|
$scope.setCurrentBuild($location.search().current, false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.builds = null;
|
||||||
|
$scope.pollChannel = null;
|
||||||
|
$scope.buildDialogShowCounter = 0;
|
||||||
|
|
||||||
|
$scope.showNewBuildDialog = function() {
|
||||||
|
$scope.buildDialogShowCounter++;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.handleBuildStarted = function(newBuild) {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
|
||||||
|
$scope.builds.unshift(newBuild);
|
||||||
|
$scope.setCurrentBuild(newBuild['id'], true);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.adjustLogHeight = function() {
|
||||||
|
var triggerOffset = 0;
|
||||||
|
if ($scope.currentBuild && $scope.currentBuild.trigger) {
|
||||||
|
triggerOffset = 85;
|
||||||
|
}
|
||||||
|
$('.build-logs').height($(window).height() - 415 - triggerOffset);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.askRestartBuild = function(build) {
|
||||||
|
$('#confirmRestartBuildModal').modal({});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.askCancelBuild = function(build) {
|
||||||
|
bootbox.confirm('Are you sure you want to cancel this build?', function(r) {
|
||||||
|
if (r) {
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name,
|
||||||
|
'build_uuid': build.id
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.cancelRepoBuild(null, params).then(function() {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
$scope.builds.splice($.inArray(build, $scope.builds), 1);
|
||||||
|
|
||||||
|
if ($scope.builds.length) {
|
||||||
|
$scope.currentBuild = $scope.builds[0];
|
||||||
|
} else {
|
||||||
|
$scope.currentBuild = null;
|
||||||
|
}
|
||||||
|
}, ApiService.errorDisplay('Cannot cancel build'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.restartBuild = function(build) {
|
||||||
|
$('#confirmRestartBuildModal').modal('hide');
|
||||||
|
|
||||||
|
var subdirectory = '';
|
||||||
|
if (build['job_config']) {
|
||||||
|
subdirectory = build['job_config']['build_subdir'] || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
var data = {
|
||||||
|
'file_id': build['resource_key'],
|
||||||
|
'subdirectory': subdirectory,
|
||||||
|
'docker_tags': build['job_config']['docker_tags']
|
||||||
|
};
|
||||||
|
|
||||||
|
if (build['pull_robot']) {
|
||||||
|
data['pull_robot'] = build['pull_robot']['name'];
|
||||||
|
}
|
||||||
|
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.requestRepoBuild(data, params).then(function(newBuild) {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
|
||||||
|
$scope.builds.unshift(newBuild);
|
||||||
|
$scope.setCurrentBuild(newBuild['id'], true);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.hasLogs = function(container) {
|
||||||
|
return container.logs.hasEntries;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.setCurrentBuild = function(buildId, opt_updateURL) {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
|
||||||
|
// Find the build.
|
||||||
|
for (var i = 0; i < $scope.builds.length; ++i) {
|
||||||
|
if ($scope.builds[i].id == buildId) {
|
||||||
|
$scope.setCurrentBuildInternal(i, $scope.builds[i], opt_updateURL);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.processANSI = function(message, container) {
|
||||||
|
var filter = container.logs._filter = (container.logs._filter || ansi2html.create());
|
||||||
|
|
||||||
|
// Note: order is important here.
|
||||||
|
var setup = filter.getSetupHtml();
|
||||||
|
var stream = filter.addInputToStream(message);
|
||||||
|
var teardown = filter.getTeardownHtml();
|
||||||
|
return setup + stream + teardown;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
||||||
|
if (build == $scope.currentBuild) { return; }
|
||||||
|
|
||||||
|
$scope.logEntries = null;
|
||||||
|
$scope.logStartIndex = null;
|
||||||
|
$scope.currentParentEntry = null;
|
||||||
|
|
||||||
|
$scope.currentBuild = build;
|
||||||
|
|
||||||
|
if (opt_updateURL) {
|
||||||
|
if (build) {
|
||||||
|
$location.search('current', build.id);
|
||||||
|
} else {
|
||||||
|
$location.search('current', null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Timeout needed to ensure the log element has been created
|
||||||
|
// before its height is adjusted.
|
||||||
|
setTimeout(function() {
|
||||||
|
$scope.adjustLogHeight();
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
// Stop any existing polling.
|
||||||
|
if ($scope.pollChannel) {
|
||||||
|
$scope.pollChannel.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new channel for polling the build status and logs.
|
||||||
|
var conductStatusAndLogRequest = function(callback) {
|
||||||
|
getBuildStatusAndLogs(build, callback);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
||||||
|
$scope.pollChannel.start();
|
||||||
|
};
|
||||||
|
|
||||||
|
var processLogs = function(logs, startIndex, endIndex) {
|
||||||
|
if (!$scope.logEntries) { $scope.logEntries = []; }
|
||||||
|
|
||||||
|
// If the start index given is less than that requested, then we've received a larger
|
||||||
|
// pool of logs, and we need to only consider the new ones.
|
||||||
|
if (startIndex < $scope.logStartIndex) {
|
||||||
|
logs = logs.slice($scope.logStartIndex - startIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = 0; i < logs.length; ++i) {
|
||||||
|
var entry = logs[i];
|
||||||
|
var type = entry['type'] || 'entry';
|
||||||
|
if (type == 'command' || type == 'phase' || type == 'error') {
|
||||||
|
entry['logs'] = AngularViewArray.create();
|
||||||
|
entry['index'] = $scope.logStartIndex + i;
|
||||||
|
|
||||||
|
$scope.logEntries.push(entry);
|
||||||
|
$scope.currentParentEntry = entry;
|
||||||
|
} else if ($scope.currentParentEntry) {
|
||||||
|
$scope.currentParentEntry['logs'].push(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return endIndex;
|
||||||
|
};
|
||||||
|
|
||||||
|
var getBuildStatusAndLogs = function(build, callback) {
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name,
|
||||||
|
'build_uuid': build.id
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
||||||
|
if (build != $scope.currentBuild) { callback(false); return; }
|
||||||
|
|
||||||
|
// Note: We use extend here rather than replacing as Angular is depending on the
|
||||||
|
// root build object to remain the same object.
|
||||||
|
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
||||||
|
return elem['id'] == resp['id']
|
||||||
|
});
|
||||||
|
|
||||||
|
var currentBuild = matchingBuilds.length > 0 ? matchingBuilds[0] : null;
|
||||||
|
if (currentBuild) {
|
||||||
|
currentBuild = $.extend(true, currentBuild, resp);
|
||||||
|
} else {
|
||||||
|
currentBuild = resp;
|
||||||
|
$scope.builds.push(currentBuild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the updated logs for the build.
|
||||||
|
var options = {
|
||||||
|
'start': $scope.logStartIndex
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
||||||
|
if (build != $scope.currentBuild) { callback(false); return; }
|
||||||
|
|
||||||
|
// Process the logs we've received.
|
||||||
|
$scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']);
|
||||||
|
|
||||||
|
// If the build status is an error, open the last two log entries.
|
||||||
|
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
||||||
|
var openLogEntries = function(entry) {
|
||||||
|
if (entry.logs) {
|
||||||
|
entry.logs.setVisible(true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
||||||
|
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the build phase is an error or a complete, then we mark the channel
|
||||||
|
// as closed.
|
||||||
|
callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete');
|
||||||
|
}, function() {
|
||||||
|
callback(false);
|
||||||
|
});
|
||||||
|
}, function() {
|
||||||
|
callback(false);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var fetchRepository = function() {
|
||||||
|
var params = {'repository': namespace + '/' + name};
|
||||||
|
$rootScope.title = 'Loading Repository...';
|
||||||
|
$scope.repository = ApiService.getRepoAsResource(params).get(function(repo) {
|
||||||
|
if (!repo.can_write) {
|
||||||
|
$rootScope.title = 'Unknown builds';
|
||||||
|
$scope.accessDenied = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$rootScope.title = 'Repository Builds';
|
||||||
|
$scope.repo = repo;
|
||||||
|
|
||||||
|
getBuildInfo();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var getBuildInfo = function(repo) {
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.getRepoBuilds(null, params).then(function(resp) {
|
||||||
|
$scope.builds = resp.builds;
|
||||||
|
|
||||||
|
if ($location.search().current) {
|
||||||
|
$scope.setCurrentBuild($location.search().current, false);
|
||||||
|
} else if ($scope.builds.length > 0) {
|
||||||
|
$scope.setCurrentBuild($scope.builds[0].id, true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchRepository();
|
||||||
|
}
|
|
@ -94,13 +94,20 @@
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<div style="margin-top: 10px">
|
<div style="margin-top: 10px">
|
||||||
<span class="quay-spinner" ng-show="pollChannel.working"></span>
|
<button class="btn btn-default"
|
||||||
|
ng-show="build.phase == 'waiting' && build.resource_key"
|
||||||
|
ng-click="askCancelBuild(build)">
|
||||||
|
<i class="fa fa-times-circle" style="margin-right: 6px; display: inline-block;"></i>
|
||||||
|
Cancel Build
|
||||||
|
</button>
|
||||||
|
|
||||||
<button class="btn" ng-show="(build.phase == 'error' || build.phase == 'complete') && build.resource_key"
|
<button class="btn" ng-show="(build.phase == 'error' || build.phase == 'complete') && build.resource_key"
|
||||||
ng-class="build.phase == 'error' ? 'btn-success' : 'btn-default'"
|
ng-class="build.phase == 'error' ? 'btn-success' : 'btn-default'"
|
||||||
ng-click="askRestartBuild(build)">
|
ng-click="askRestartBuild(build)">
|
||||||
<i class="fa fa-refresh"></i>
|
<i class="fa fa-refresh"></i>
|
||||||
Run Build Again
|
Run Build Again
|
||||||
</button>
|
</button>
|
||||||
|
<span class="quay-spinner" ng-show="pollChannel.working"></span>
|
||||||
<span class="build-id">{{ build.id }}</span>
|
<span class="build-id">{{ build.id }}</span>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
Binary file not shown.
|
@ -15,7 +15,7 @@ from endpoints.api.tag import RepositoryTagImages, RepositoryTag
|
||||||
from endpoints.api.search import FindRepositories, EntitySearch
|
from endpoints.api.search import FindRepositories, EntitySearch
|
||||||
from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList
|
from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList
|
||||||
from endpoints.api.build import (FileDropResource, RepositoryBuildStatus, RepositoryBuildLogs,
|
from endpoints.api.build import (FileDropResource, RepositoryBuildStatus, RepositoryBuildLogs,
|
||||||
RepositoryBuildList)
|
RepositoryBuildList, RepositoryBuildResource)
|
||||||
from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
|
from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
|
||||||
RegenerateOrgRobot, RegenerateUserRobot)
|
RegenerateOrgRobot, RegenerateUserRobot)
|
||||||
|
|
||||||
|
@ -1571,6 +1571,60 @@ class TestRepositoryBuildStatusFg86BuynlargeOrgrepo(ApiTestCase):
|
||||||
self._run_test('GET', 400, 'devtable', None)
|
self._run_test('GET', 400, 'devtable', None)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRepositoryBuildResourceFg86PublicPublicrepo(ApiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ApiTestCase.setUp(self)
|
||||||
|
self._set_url(RepositoryBuildResource, build_uuid="FG86", repository="public/publicrepo")
|
||||||
|
|
||||||
|
def test_delete_anonymous(self):
|
||||||
|
self._run_test('DELETE', 401, None, None)
|
||||||
|
|
||||||
|
def test_delete_freshuser(self):
|
||||||
|
self._run_test('DELETE', 403, 'freshuser', None)
|
||||||
|
|
||||||
|
def test_delete_reader(self):
|
||||||
|
self._run_test('DELETE', 403, 'reader', None)
|
||||||
|
|
||||||
|
def test_delete_devtable(self):
|
||||||
|
self._run_test('DELETE', 403, 'devtable', None)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRepositoryBuildResourceFg86DevtableShared(ApiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ApiTestCase.setUp(self)
|
||||||
|
self._set_url(RepositoryBuildResource, build_uuid="FG86", repository="devtable/shared")
|
||||||
|
|
||||||
|
def test_delete_anonymous(self):
|
||||||
|
self._run_test('DELETE', 401, None, None)
|
||||||
|
|
||||||
|
def test_delete_freshuser(self):
|
||||||
|
self._run_test('DELETE', 403, 'freshuser', None)
|
||||||
|
|
||||||
|
def test_delete_reader(self):
|
||||||
|
self._run_test('DELETE', 403, 'reader', None)
|
||||||
|
|
||||||
|
def test_delete_devtable(self):
|
||||||
|
self._run_test('DELETE', 404, 'devtable', None)
|
||||||
|
|
||||||
|
|
||||||
|
class TestRepositoryBuildResourceFg86BuynlargeOrgrepo(ApiTestCase):
|
||||||
|
def setUp(self):
|
||||||
|
ApiTestCase.setUp(self)
|
||||||
|
self._set_url(RepositoryBuildResource, build_uuid="FG86", repository="buynlarge/orgrepo")
|
||||||
|
|
||||||
|
def test_delete_anonymous(self):
|
||||||
|
self._run_test('DELETE', 401, None, None)
|
||||||
|
|
||||||
|
def test_delete_freshuser(self):
|
||||||
|
self._run_test('DELETE', 403, 'freshuser', None)
|
||||||
|
|
||||||
|
def test_delete_reader(self):
|
||||||
|
self._run_test('DELETE', 403, 'reader', None)
|
||||||
|
|
||||||
|
def test_delete_devtable(self):
|
||||||
|
self._run_test('DELETE', 404, 'devtable', None)
|
||||||
|
|
||||||
|
|
||||||
class TestRepositoryBuildLogsS5j8PublicPublicrepo(ApiTestCase):
|
class TestRepositoryBuildLogsS5j8PublicPublicrepo(ApiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
ApiTestCase.setUp(self)
|
ApiTestCase.setUp(self)
|
||||||
|
|
|
@ -17,7 +17,8 @@ from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, Org
|
||||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag
|
from endpoints.api.tag import RepositoryTagImages, RepositoryTag
|
||||||
from endpoints.api.search import FindRepositories, EntitySearch
|
from endpoints.api.search import FindRepositories, EntitySearch
|
||||||
from endpoints.api.image import RepositoryImage, RepositoryImageList
|
from endpoints.api.image import RepositoryImage, RepositoryImageList
|
||||||
from endpoints.api.build import RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList
|
from endpoints.api.build import (RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList,
|
||||||
|
RepositoryBuildResource)
|
||||||
from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
|
from endpoints.api.robot import (UserRobotList, OrgRobot, OrgRobotList, UserRobot,
|
||||||
RegenerateUserRobot, RegenerateOrgRobot)
|
RegenerateUserRobot, RegenerateOrgRobot)
|
||||||
from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs,
|
from endpoints.api.trigger import (BuildTriggerActivate, BuildTriggerSources, BuildTriggerSubdirs,
|
||||||
|
@ -1303,6 +1304,103 @@ class TestGetRepository(ApiTestCase):
|
||||||
self.assertEquals(True, json['is_organization'])
|
self.assertEquals(True, json['is_organization'])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class TestRepositoryBuildResource(ApiTestCase):
|
||||||
|
def test_cancel_invalidbuild(self):
|
||||||
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
||||||
|
self.deleteResponse(RepositoryBuildResource,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple', build_uuid='invalid'),
|
||||||
|
expected_code=404)
|
||||||
|
|
||||||
|
def test_cancel_waitingbuild(self):
|
||||||
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
||||||
|
# Request a (fake) build.
|
||||||
|
json = self.postJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'),
|
||||||
|
data=dict(file_id='foobarbaz'),
|
||||||
|
expected_code=201)
|
||||||
|
|
||||||
|
uuid = json['id']
|
||||||
|
|
||||||
|
# Check for the build.
|
||||||
|
json = self.getJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||||
|
|
||||||
|
self.assertEquals(1, len(json['builds']))
|
||||||
|
self.assertEquals(uuid, json['builds'][0]['id'])
|
||||||
|
|
||||||
|
# Cancel the build.
|
||||||
|
self.deleteResponse(RepositoryBuildResource,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple', build_uuid=uuid),
|
||||||
|
expected_code=201)
|
||||||
|
|
||||||
|
# Check for the build.
|
||||||
|
json = self.getJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||||
|
|
||||||
|
self.assertEquals(0, len(json['builds']))
|
||||||
|
|
||||||
|
|
||||||
|
def test_attemptcancel_scheduledbuild(self):
|
||||||
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
||||||
|
# Request a (fake) build.
|
||||||
|
json = self.postJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'),
|
||||||
|
data=dict(file_id='foobarbaz'),
|
||||||
|
expected_code=201)
|
||||||
|
|
||||||
|
uuid = json['id']
|
||||||
|
|
||||||
|
# Check for the build.
|
||||||
|
json = self.getJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||||
|
|
||||||
|
self.assertEquals(1, len(json['builds']))
|
||||||
|
self.assertEquals(uuid, json['builds'][0]['id'])
|
||||||
|
|
||||||
|
# Set queue item to be picked up.
|
||||||
|
qi = database.QueueItem.get(id=1)
|
||||||
|
qi.available = False
|
||||||
|
qi.save()
|
||||||
|
|
||||||
|
# Try to cancel the build.
|
||||||
|
self.deleteResponse(RepositoryBuildResource,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple', build_uuid=uuid),
|
||||||
|
expected_code=400)
|
||||||
|
|
||||||
|
|
||||||
|
def test_attemptcancel_workingbuild(self):
|
||||||
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
||||||
|
# Request a (fake) build.
|
||||||
|
json = self.postJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'),
|
||||||
|
data=dict(file_id='foobarbaz'),
|
||||||
|
expected_code=201)
|
||||||
|
|
||||||
|
uuid = json['id']
|
||||||
|
|
||||||
|
# Check for the build.
|
||||||
|
json = self.getJsonResponse(RepositoryBuildList,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple'))
|
||||||
|
|
||||||
|
self.assertEquals(1, len(json['builds']))
|
||||||
|
self.assertEquals(uuid, json['builds'][0]['id'])
|
||||||
|
|
||||||
|
# Set the build to a different phase.
|
||||||
|
rb = database.RepositoryBuild.get(uuid=uuid)
|
||||||
|
rb.phase = database.BUILD_PHASE.BUILDING
|
||||||
|
rb.save()
|
||||||
|
|
||||||
|
# Try to cancel the build.
|
||||||
|
self.deleteResponse(RepositoryBuildResource,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/simple', build_uuid=uuid),
|
||||||
|
expected_code=400)
|
||||||
|
|
||||||
|
|
||||||
class TestRepoBuilds(ApiTestCase):
|
class TestRepoBuilds(ApiTestCase):
|
||||||
def test_getrepo_nobuilds(self):
|
def test_getrepo_nobuilds(self):
|
||||||
self.login(ADMIN_ACCESS_USER)
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
Reference in a new issue