Phase 4 of the namespace to user migration: actually remove the column from the db and remove the dependence on serialized namespaces in the workers and queues
This commit is contained in:
parent
2c5cc7990f
commit
e8b3d1cc4a
17 changed files with 273 additions and 123 deletions
|
@ -0,0 +1,61 @@
|
|||
"""Translate the queue names to reference namespace by id, remove the namespace column.
|
||||
|
||||
Revision ID: 2fb36d4be80d
|
||||
Revises: 3f4fe1194671
|
||||
Create Date: 2014-09-30 17:31:33.308490
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '2fb36d4be80d'
|
||||
down_revision = '3f4fe1194671'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
import re
|
||||
from app import app
|
||||
from data.database import QueueItem, User, db
|
||||
|
||||
|
||||
NAMESPACE_EXTRACTOR = re.compile(r'^([a-z]+/)([a-z0-9_]+)(/.*$)')
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
# Rename the namespace component of the existing queue items to reference user ids
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
for item in QueueItem.select():
|
||||
namespace_match = NAMESPACE_EXTRACTOR.match(item.queue_name)
|
||||
if namespace_match is not None:
|
||||
namespace_name = namespace_match.group(2)
|
||||
namespace_user = User.get(User.username == namespace_name)
|
||||
item.queue_name = '%s%s%s' % (namespace_match.group(1), str(namespace_user.id),
|
||||
namespace_match.group(3))
|
||||
item.save()
|
||||
else:
|
||||
raise RuntimeError('Invalid queue name: %s' % item.queue_name)
|
||||
|
||||
op.drop_index('repository_namespace_name', table_name='repository')
|
||||
op.drop_column('repository', 'namespace')
|
||||
|
||||
|
||||
def downgrade(tables):
|
||||
# Add the namespace column back in and fill it in
|
||||
op.add_column('repository', sa.Column('namespace', sa.String(length=255)))
|
||||
conn = op.get_bind()
|
||||
conn.execute('update repository set namespace = (select username from user where user.id = repository.namespace_user_id) where namespace is NULL')
|
||||
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
|
||||
|
||||
# Rename the namespace component of existing queue items to reference namespace strings
|
||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||
for item in QueueItem.select():
|
||||
namespace_match = NAMESPACE_EXTRACTOR.match(item.queue_name)
|
||||
if namespace_match is not None:
|
||||
namespace_id = namespace_match.group(2)
|
||||
namespace_user = User.get(User.id == namespace_id)
|
||||
item.queue_name = '%s%s%s' % (namespace_match.group(1),
|
||||
str(namespace_user.username),
|
||||
namespace_match.group(3))
|
||||
item.save()
|
||||
else:
|
||||
raise RuntimeError('Invalid queue name: %s' % item.queue_name)
|
|
@ -592,6 +592,13 @@ def get_user_by_id(user_db_id):
|
|||
return None
|
||||
|
||||
|
||||
def get_namespace_by_user_id(namespace_user_db_id):
|
||||
try:
|
||||
return User.get(User.id == namespace_user_db_id, User.robot == False).username
|
||||
except User.DoesNotExist:
|
||||
raise InvalidUsernameException('User with id does not exist: %s' % namespace_user_db_id)
|
||||
|
||||
|
||||
def get_user_or_org_by_customer_id(customer_id):
|
||||
try:
|
||||
return User.get(User.stripe_id == customer_id)
|
||||
|
@ -858,6 +865,15 @@ def change_password(user, new_password):
|
|||
delete_notifications_by_kind(user, 'password_required')
|
||||
|
||||
|
||||
def change_username(user, new_username):
|
||||
(username_valid, username_issue) = validate_username(new_username)
|
||||
if not username_valid:
|
||||
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
||||
|
||||
user.username = new_username
|
||||
user.save()
|
||||
|
||||
|
||||
def change_invoice_email(user, invoice_email):
|
||||
user.invoice_email = invoice_email
|
||||
user.save()
|
||||
|
@ -1676,10 +1692,21 @@ def load_token_data(code):
|
|||
raise InvalidTokenException('Invalid delegate token code: %s' % code)
|
||||
|
||||
|
||||
def get_repository_build(namespace_name, repository_name, build_uuid):
|
||||
def _get_build_base_query():
|
||||
return (RepositoryBuild
|
||||
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService, Repository,
|
||||
Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuild)
|
||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
||||
.order_by(RepositoryBuild.started.desc()))
|
||||
|
||||
|
||||
def get_repository_build(build_uuid):
|
||||
try:
|
||||
query = list_repository_builds(namespace_name, repository_name, 1)
|
||||
return query.where(RepositoryBuild.uuid == build_uuid).get()
|
||||
return _get_build_base_query().where(RepositoryBuild.uuid == build_uuid).get()
|
||||
|
||||
except RepositoryBuild.DoesNotExist:
|
||||
msg = 'Unable to locate a build by id: %s' % build_uuid
|
||||
|
@ -1688,15 +1715,8 @@ def get_repository_build(namespace_name, repository_name, build_uuid):
|
|||
|
||||
def list_repository_builds(namespace_name, repository_name, limit,
|
||||
include_inactive=True):
|
||||
query = (RepositoryBuild
|
||||
.select(RepositoryBuild, RepositoryBuildTrigger, BuildTriggerService)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.switch(RepositoryBuild)
|
||||
.join(RepositoryBuildTrigger, JOIN_LEFT_OUTER)
|
||||
.join(BuildTriggerService, JOIN_LEFT_OUTER)
|
||||
query = (_get_build_base_query()
|
||||
.where(Repository.name == repository_name, Namespace.username == namespace_name)
|
||||
.order_by(RepositoryBuild.started.desc())
|
||||
.limit(limit))
|
||||
|
||||
if not include_inactive:
|
||||
|
@ -1760,21 +1780,23 @@ def create_repo_notification(repo, event_name, method_name, config):
|
|||
config_json=json.dumps(config))
|
||||
|
||||
|
||||
def get_repo_notification(namespace_name, repository_name, uuid):
|
||||
def get_repo_notification(uuid):
|
||||
try:
|
||||
return (RepositoryNotification
|
||||
.select(RepositoryNotification, Repository, Namespace)
|
||||
.join(Repository)
|
||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||
.where(Namespace.username == namespace_name, Repository.name == repository_name,
|
||||
RepositoryNotification.uuid == uuid)
|
||||
.where(RepositoryNotification.uuid == uuid)
|
||||
.get())
|
||||
except RepositoryNotification.DoesNotExist:
|
||||
raise InvalidNotificationException('No repository notification found with id: %s' % uuid)
|
||||
|
||||
|
||||
def delete_repo_notification(namespace_name, repository_name, uuid):
|
||||
found = get_repo_notification(namespace_name, repository_name, uuid)
|
||||
found = get_repo_notification(uuid)
|
||||
if (found.repository.namespace_user.username != namespace_name or
|
||||
found.repository.name != repository_name):
|
||||
raise InvalidNotificationException('No repository notifiation found with id: %s' % uuid)
|
||||
found.delete_instance()
|
||||
return found
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from data.database import QueueItem, db
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
|
||||
MINIMUM_EXTENSION = timedelta(seconds=20)
|
||||
|
@ -25,17 +26,17 @@ class WorkQueue(object):
|
|||
|
||||
def _running_jobs(self, now, name_match_query):
|
||||
return (QueueItem
|
||||
.select(QueueItem.queue_name)
|
||||
.where(QueueItem.available == False,
|
||||
QueueItem.processing_expires > now,
|
||||
QueueItem.queue_name ** name_match_query))
|
||||
.select(QueueItem.queue_name)
|
||||
.where(QueueItem.available == False,
|
||||
QueueItem.processing_expires > now,
|
||||
QueueItem.queue_name ** name_match_query))
|
||||
|
||||
def _available_jobs(self, now, name_match_query, running_query):
|
||||
return (QueueItem
|
||||
.select()
|
||||
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
||||
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
||||
QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query)))
|
||||
.select()
|
||||
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
||||
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
||||
QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query)))
|
||||
|
||||
def _name_match_query(self):
|
||||
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
|
||||
|
@ -49,7 +50,7 @@ class WorkQueue(object):
|
|||
name_match_query = self._name_match_query()
|
||||
|
||||
running_query = self._running_jobs(now, name_match_query)
|
||||
running_count =running_query.distinct().count()
|
||||
running_count = running_query.distinct().count()
|
||||
|
||||
avialable_query = self._available_jobs(now, name_match_query, running_query)
|
||||
available_count = avialable_query.select(QueueItem.queue_name).distinct().count()
|
||||
|
@ -89,41 +90,49 @@ class WorkQueue(object):
|
|||
|
||||
item = None
|
||||
try:
|
||||
item = avail.order_by(QueueItem.id).get()
|
||||
item.available = False
|
||||
item.processing_expires = now + timedelta(seconds=processing_time)
|
||||
item.retries_remaining -= 1
|
||||
item.save()
|
||||
db_item = avail.order_by(QueueItem.id).get()
|
||||
db_item.available = False
|
||||
db_item.processing_expires = now + timedelta(seconds=processing_time)
|
||||
db_item.retries_remaining -= 1
|
||||
db_item.save()
|
||||
|
||||
item = AttrDict({
|
||||
'id': db_item.id,
|
||||
'body': db_item.body,
|
||||
})
|
||||
|
||||
self._currently_processing = True
|
||||
except QueueItem.DoesNotExist:
|
||||
self._currently_processing = False
|
||||
pass
|
||||
|
||||
# Return a view of the queue item rather than an active db object
|
||||
return item
|
||||
|
||||
def complete(self, completed_item):
|
||||
with self._transaction_factory(db):
|
||||
completed_item.delete_instance()
|
||||
completed_item_obj = QueueItem.get(QueueItem.id == completed_item.id)
|
||||
completed_item_obj.delete_instance()
|
||||
self._currently_processing = False
|
||||
|
||||
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
||||
with self._transaction_factory(db):
|
||||
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
||||
incomplete_item.available_after = retry_date
|
||||
incomplete_item.available = True
|
||||
incomplete_item_obj = QueueItem.get(QueueItem.id == incomplete_item.id)
|
||||
incomplete_item_obj.available_after = retry_date
|
||||
incomplete_item_obj.available = True
|
||||
|
||||
if restore_retry:
|
||||
incomplete_item.retries_remaining += 1
|
||||
incomplete_item_obj.retries_remaining += 1
|
||||
|
||||
incomplete_item.save()
|
||||
incomplete_item_obj.save()
|
||||
self._currently_processing = False
|
||||
|
||||
@staticmethod
|
||||
def extend_processing(queue_item, seconds_from_now):
|
||||
def extend_processing(self, queue_item, seconds_from_now):
|
||||
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
||||
|
||||
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
||||
if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION:
|
||||
queue_item.processing_expires = new_expiration
|
||||
queue_item.save()
|
||||
queue_item_obj = QueueItem.get(QueueItem.id == queue_item.id)
|
||||
if new_expiration - queue_item_obj.processing_expires > MINIMUM_EXTENSION:
|
||||
with self._transaction_factory(db):
|
||||
queue_item_obj.processing_expires = new_expiration
|
||||
queue_item_obj.save()
|
||||
|
|
|
@ -196,8 +196,9 @@ class RepositoryBuildStatus(RepositoryParamResource):
|
|||
@nickname('getRepoBuildStatus')
|
||||
def get(self, namespace, repository, build_uuid):
|
||||
""" Return the status for the builds specified by the build uuids. """
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
if not build:
|
||||
build = model.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
||||
can_write = ModifyRepositoryPermission(namespace, repository).can()
|
||||
|
@ -213,7 +214,10 @@ class RepositoryBuildLogs(RepositoryParamResource):
|
|||
""" Return the build logs for the build specified by the build uuid. """
|
||||
response_obj = {}
|
||||
|
||||
build = model.get_repository_build(namespace, repository, build_uuid)
|
||||
build = model.get_repository_build(build_uuid)
|
||||
if (not build or build.repository.name != repository or
|
||||
build.repository.namespace_user.username != namespace):
|
||||
raise NotFound()
|
||||
|
||||
# If the logs have been archived, just redirect to the completed archive
|
||||
if build.logs_archived:
|
||||
|
|
|
@ -102,10 +102,14 @@ class RepositoryNotification(RepositoryParamResource):
|
|||
def get(self, namespace, repository, uuid):
|
||||
""" Get information for the specified notification. """
|
||||
try:
|
||||
notification = model.get_repo_notification(namespace, repository, uuid)
|
||||
notification = model.get_repo_notification(uuid)
|
||||
except model.InvalidNotificationException:
|
||||
raise NotFound()
|
||||
|
||||
if (notification.repository.namespace_user.username != namespace or
|
||||
notification.repository.name != repository):
|
||||
raise NotFound()
|
||||
|
||||
return notification_view(notification)
|
||||
|
||||
@require_repo_admin
|
||||
|
@ -129,14 +133,18 @@ class TestRepositoryNotification(RepositoryParamResource):
|
|||
def post(self, namespace, repository, uuid):
|
||||
""" Queues a test notification for this repository. """
|
||||
try:
|
||||
notification = model.get_repo_notification(namespace, repository, uuid)
|
||||
notification = model.get_repo_notification(uuid)
|
||||
except model.InvalidNotificationException:
|
||||
raise NotFound()
|
||||
|
||||
if (notification.repository.namespace_user.username != namespace or
|
||||
notification.repository.name != repository):
|
||||
raise NotFound()
|
||||
|
||||
event_info = NotificationEvent.get_event(notification.event.name)
|
||||
sample_data = event_info.get_sample_data(repository=notification.repository)
|
||||
notification_data = build_notification_data(notification, sample_data)
|
||||
notification_queue.put([namespace, repository, notification.event.name],
|
||||
json.dumps(notification_data))
|
||||
notification_queue.put([str(notification.repository.namespace_user.id), repository,
|
||||
notification.event.name], json.dumps(notification_data))
|
||||
|
||||
return {}
|
||||
|
|
|
@ -139,6 +139,10 @@ class User(ApiResource):
|
|||
'type': 'string',
|
||||
'description': 'The user\'s email address',
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'description': 'The user\'s username',
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -189,6 +193,14 @@ class User(ApiResource):
|
|||
send_change_email(user.username, user_data['email'], code.code)
|
||||
else:
|
||||
model.update_email(user, new_email, auto_verify=not features.MAILING)
|
||||
|
||||
if 'username' in user_data and user_data['username'] != user.username:
|
||||
new_username = user_data['username']
|
||||
if model.get_user_or_org(new_username) is not None:
|
||||
# Username already used
|
||||
raise request_error(message='Username is already in use')
|
||||
|
||||
model.change_username(user, new_username)
|
||||
|
||||
except model.InvalidPasswordException, ex:
|
||||
raise request_error(exception=ex)
|
||||
|
|
|
@ -213,7 +213,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
|
||||
job_config = {
|
||||
'docker_tags': tags,
|
||||
'repository': repo_path,
|
||||
'registry': host,
|
||||
'build_subdir': subdir
|
||||
}
|
||||
|
||||
|
@ -221,10 +221,8 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
|||
dockerfile_id, build_name,
|
||||
trigger, pull_robot_name=pull_robot_name)
|
||||
|
||||
dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({
|
||||
dockerfile_build_queue.put([str(repository.namespace_user.id), repository.name], json.dumps({
|
||||
'build_uuid': build_request.uuid,
|
||||
'namespace': repository.namespace_user.username,
|
||||
'repository': repository.name,
|
||||
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
||||
}), retries_remaining=1)
|
||||
|
||||
|
|
|
@ -30,8 +30,6 @@ def build_event_data(repo, extra_data={}, subpage=None):
|
|||
def build_notification_data(notification, event_data):
|
||||
return {
|
||||
'notification_uuid': notification.uuid,
|
||||
'repository_namespace': notification.repository.namespace_user.username,
|
||||
'repository_name': notification.repository.name,
|
||||
'event_data': event_data
|
||||
}
|
||||
|
||||
|
@ -43,5 +41,5 @@ def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[
|
|||
event_name=event_name)
|
||||
for notification in notifications:
|
||||
notification_data = build_notification_data(notification, event_data)
|
||||
path = [repo.namespace_user.username, repo.name, event_name] + pathargs
|
||||
path = [str(repo.namespace_user.id), repo.name, event_name] + pathargs
|
||||
notification_queue.put(path, json.dumps(notification_data))
|
||||
|
|
|
@ -255,8 +255,9 @@ def put_image_layer(namespace, repository, image_id):
|
|||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
profile.debug('Adding layer to diff queue')
|
||||
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
||||
'namespace': namespace,
|
||||
repo = model.get_repository(namespace, repository)
|
||||
image_diff_queue.put([str(repo.namespace_user.id), repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
|
@ -313,8 +314,9 @@ def put_image_checksum(namespace, repository, image_id):
|
|||
# The layer is ready for download, send a job to the work queue to
|
||||
# process it.
|
||||
profile.debug('Adding layer to diff queue')
|
||||
image_diff_queue.put([namespace, repository, image_id], json.dumps({
|
||||
'namespace': namespace,
|
||||
repo = model.get_repository(namespace, repository)
|
||||
image_diff_queue.put([str(repo.namespace_user.id), repository, image_id], json.dumps({
|
||||
'namespace_user_id': repo.namespace_user.id,
|
||||
'repository': repository,
|
||||
'image_id': image_id,
|
||||
}))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
var TEAM_PATTERN = '^[a-zA-Z][a-zA-Z0-9]+$';
|
||||
var ROBOT_PATTERN = '^[a-zA-Z][a-zA-Z0-9]{3,29}$';
|
||||
var USER_PATTERN = '^[a-z0-9_]{4,30}$';
|
||||
|
||||
$.fn.clipboardCopy = function() {
|
||||
if (zeroClipboardSupported) {
|
||||
|
|
|
@ -1676,6 +1676,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
|
||||
$scope.logsShown = 0;
|
||||
$scope.invoicesShown = 0;
|
||||
|
||||
$scope.USER_PATTERN = USER_PATTERN;
|
||||
|
||||
$scope.loadAuthedApps = function() {
|
||||
if ($scope.authorizedApps) { return; }
|
||||
|
@ -1752,6 +1754,24 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
});
|
||||
};
|
||||
|
||||
$scope.changeUsername = function() {
|
||||
UserService.load();
|
||||
|
||||
$scope.updatingUser = true;
|
||||
|
||||
ApiService.changeUserDetails($scope.cuser).then(function() {
|
||||
$scope.updatingUser = false;
|
||||
|
||||
// Reset the form.
|
||||
delete $scope.cuser['username'];
|
||||
|
||||
$scope.changeUsernameForm.$setPristine();
|
||||
}, function(result) {
|
||||
$scope.updatingUser = false;
|
||||
UIService.showFormError('#changeUsernameForm', result);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.changeEmail = function() {
|
||||
UIService.hidePopover('#changeEmailForm');
|
||||
|
||||
|
@ -1764,7 +1784,7 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
$scope.sentEmail = $scope.cuser.email;
|
||||
|
||||
// Reset the form.
|
||||
delete $scope.cuser['repeatEmail'];
|
||||
delete $scope.cuser['email'];
|
||||
|
||||
$scope.changeEmailForm.$setPristine();
|
||||
}, function(result) {
|
||||
|
|
|
@ -38,6 +38,7 @@
|
|||
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
|
||||
</li>
|
||||
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#username">Change Username</a></li>
|
||||
<li quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#migrate" id="migrateTab">Convert to Organization</a>
|
||||
</li>
|
||||
|
@ -234,6 +235,31 @@
|
|||
<div class="billing-invoices" user="user" makevisible="invoicesShown"></div>
|
||||
</div>
|
||||
|
||||
<!-- Change username tab -->
|
||||
<div id="username" class="tab-pane">
|
||||
<div class="row">
|
||||
<div class="panel">
|
||||
<div class="panel-title">Change Username</div>
|
||||
|
||||
<div class="loading" ng-show="updatingUser">
|
||||
<div class="quay-spinner 3x"></div>
|
||||
</div>
|
||||
|
||||
<span class="help-block" ng-show="changeUsernameSuccess">Username changed successfully</span>
|
||||
|
||||
<div ng-show="!updatingUser" class="panel-body">
|
||||
<form class="form-change col-md-6" id="changeUsernameForm" name="changeUsernameForm" ng-submit="changePassword()"
|
||||
ng-show="!awaitingConfirmation && !registering">
|
||||
<input type="text" class="form-control" placeholder="Your new username" ng-model="cuser.username" required
|
||||
ng-pattern="/{{ USER_PATTERN }}/">
|
||||
<button class="btn btn-danger" ng-disabled="changeUsernameForm.$invalid" type="submit"
|
||||
analytics-on analytics-event="change_username">Change Username</button>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Convert to organization tab -->
|
||||
<div id="migrate" class="tab-pane" quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<!-- Step 0 -->
|
||||
|
|
Binary file not shown.
|
@ -67,8 +67,7 @@ class TestBuildLogs(RedisBuildLogs):
|
|||
(phase, status) = status_wrapper
|
||||
|
||||
from data import model
|
||||
build_obj = model.get_repository_build(self.namespace, self.repository,
|
||||
self.test_build_id)
|
||||
build_obj = model.get_repository_build(self.test_build_id)
|
||||
build_obj.phase = phase
|
||||
build_obj.save()
|
||||
|
||||
|
@ -88,8 +87,7 @@ class TestBuildLogs(RedisBuildLogs):
|
|||
total_commands = random.randint(5, 20)
|
||||
for command_num in range(1, total_commands + 1):
|
||||
command_weight = random.randint(50, 100)
|
||||
script.append(self._generate_command(command_num, total_commands,
|
||||
command_weight))
|
||||
script.append(self._generate_command(command_num, total_commands, command_weight))
|
||||
|
||||
# we want 0 logs some percent of the time
|
||||
num_logs = max(0, random.randint(-50, 400))
|
||||
|
|
|
@ -1,30 +1,28 @@
|
|||
import logging
|
||||
import argparse
|
||||
|
||||
from app import image_diff_queue
|
||||
from data.model import DataModelException
|
||||
from data import model
|
||||
from endpoints.registry import process_image_changes
|
||||
from workers.worker import Worker
|
||||
|
||||
|
||||
root_logger = logging.getLogger('')
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DiffsWorker(Worker):
|
||||
def process_queue_item(self, job_details):
|
||||
image_id = job_details['image_id']
|
||||
namespace = job_details['namespace']
|
||||
repository = job_details['repository']
|
||||
|
||||
# TODO switch to the namespace_user_id branch only once exisiting jobs have all gone through
|
||||
if 'namespace_user_id' in job_details:
|
||||
namespace = model.get_namespace_by_user_id(job_details['namespace_user_id'])
|
||||
else:
|
||||
namespace = job_details['namespace']
|
||||
|
||||
try:
|
||||
process_image_changes(namespace, repository, image_id)
|
||||
except DataModelException:
|
||||
except model.DataModelException:
|
||||
# This exception is unrecoverable, and the item should continue and be
|
||||
# marked as complete.
|
||||
msg = ('Image does not exist in database \'%s\' for repo \'%s/\'%s\'' %
|
||||
|
|
|
@ -38,7 +38,7 @@ TIMEOUT_PERIOD_MINUTES = 20
|
|||
CACHE_EXPIRATION_PERIOD_HOURS = 24
|
||||
NO_TAGS = ['<none>:<none>']
|
||||
RESERVATION_TIME = (TIMEOUT_PERIOD_MINUTES + 5) * 60
|
||||
DOCKER_BASE_URL = None # Set this if you want to use a different docker URL/socket.
|
||||
DOCKER_BASE_URL = os.environ.get('DOCKER_HOST', None)
|
||||
|
||||
|
||||
def matches_system_error(status_str):
|
||||
|
@ -130,8 +130,8 @@ class DockerfileBuildContext(object):
|
|||
# Note: We have two different clients here because we (potentially) login
|
||||
# with both, but with different credentials that we do not want shared between
|
||||
# the build and push operations.
|
||||
self._push_cl = StreamingDockerClient(timeout=1200, base_url = DOCKER_BASE_URL)
|
||||
self._build_cl = StreamingDockerClient(timeout=1200, base_url = DOCKER_BASE_URL)
|
||||
self._push_cl = StreamingDockerClient(timeout=1200, base_url=DOCKER_BASE_URL)
|
||||
self._build_cl = StreamingDockerClient(timeout=1200, base_url=DOCKER_BASE_URL)
|
||||
|
||||
dockerfile_path = os.path.join(self._build_dir, dockerfile_subdir,
|
||||
'Dockerfile')
|
||||
|
@ -223,20 +223,6 @@ class DockerfileBuildContext(object):
|
|||
raise RuntimeError(message)
|
||||
|
||||
def pull(self):
|
||||
# Login with the specified credentials (if any).
|
||||
if self._pull_credentials:
|
||||
logger.debug('Logging in with pull credentials: %s@%s',
|
||||
self._pull_credentials['username'], self._pull_credentials['registry'])
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
|
||||
'phasestep': 'login',
|
||||
'username': self._pull_credentials['username'],
|
||||
'registry': self._pull_credentials['registry']
|
||||
})
|
||||
|
||||
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
||||
registry=self._pull_credentials['registry'], reauth=True)
|
||||
|
||||
# Pull the image, in case it was updated since the last build
|
||||
image_and_tag_tuple = self._parsed_dockerfile.get_image_and_tag()
|
||||
if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
|
||||
|
@ -245,10 +231,24 @@ class DockerfileBuildContext(object):
|
|||
|
||||
image_and_tag = ':'.join(image_and_tag_tuple)
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data = {
|
||||
'phasestep': 'pull',
|
||||
'repo_url': image_and_tag
|
||||
})
|
||||
# Login with the specified credentials (if any).
|
||||
if self._pull_credentials:
|
||||
logger.debug('Logging in with pull credentials: %s@%s',
|
||||
self._pull_credentials['username'], self._pull_credentials['registry'])
|
||||
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data={
|
||||
'phasestep': 'login',
|
||||
'username': self._pull_credentials['username'],
|
||||
'registry': self._pull_credentials['registry']
|
||||
})
|
||||
|
||||
self._build_cl.login(self._pull_credentials['username'], self._pull_credentials['password'],
|
||||
registry=self._pull_credentials['registry'], reauth=True)
|
||||
else:
|
||||
self._build_logger('Pulling base image: %s' % image_and_tag, log_data={
|
||||
'phasestep': 'pull',
|
||||
'repo_url': image_and_tag
|
||||
})
|
||||
|
||||
pull_status = self._build_cl.pull(image_and_tag, stream=True)
|
||||
|
||||
|
@ -279,7 +279,7 @@ class DockerfileBuildContext(object):
|
|||
if key in status:
|
||||
fully_unwrapped = status[key]
|
||||
break
|
||||
|
||||
|
||||
if not fully_unwrapped:
|
||||
logger.debug('Status dict did not have any extractable keys and was: %s', status)
|
||||
elif isinstance(status, basestring):
|
||||
|
@ -289,7 +289,7 @@ class DockerfileBuildContext(object):
|
|||
|
||||
# Check for system errors when building.
|
||||
if matches_system_error(status_str):
|
||||
raise WorkerUnhealthyException(status_str)
|
||||
raise WorkerUnhealthyException(status_str)
|
||||
|
||||
logger.debug('Status: %s', status_str)
|
||||
step_increment = re.search(r'Step ([0-9]+) :', status_str)
|
||||
|
@ -481,8 +481,8 @@ class DockerfileBuildWorker(Worker):
|
|||
def watchdog(self):
|
||||
logger.debug('Running build watchdog code.')
|
||||
try:
|
||||
docker_cl = Client(base_url = DOCKER_BASE_URL)
|
||||
|
||||
docker_cl = Client(base_url=DOCKER_BASE_URL)
|
||||
|
||||
# Iterate the running containers and kill ones that have been running more than 20 minutes
|
||||
for container in docker_cl.containers():
|
||||
start_time = datetime.fromtimestamp(container['Created'])
|
||||
|
@ -502,9 +502,7 @@ class DockerfileBuildWorker(Worker):
|
|||
# Make sure we have more information for debugging problems
|
||||
sentry.client.user_context(job_details)
|
||||
|
||||
repository_build = model.get_repository_build(job_details['namespace'],
|
||||
job_details['repository'],
|
||||
job_details['build_uuid'])
|
||||
repository_build = model.get_repository_build(job_details['build_uuid'])
|
||||
|
||||
pull_credentials = job_details.get('pull_credentials', None)
|
||||
|
||||
|
@ -513,15 +511,21 @@ class DockerfileBuildWorker(Worker):
|
|||
resource_url = user_files.get_file_url(repository_build.resource_key, requires_cors=False)
|
||||
tag_names = job_config['docker_tags']
|
||||
build_subdir = job_config['build_subdir']
|
||||
repo = job_config['repository']
|
||||
|
||||
# TODO remove the top branch when there are no more jobs with a repository config
|
||||
if 'repository' in job_config:
|
||||
repo = job_config['repository']
|
||||
else:
|
||||
repo = '%s/%s/%s' % (job_config['registry'],
|
||||
repository_build.repository.namespace_user.username,
|
||||
repository_build.repository.name)
|
||||
|
||||
access_token = repository_build.access_token.code
|
||||
|
||||
log_appender = partial(build_logs.append_log_message,
|
||||
repository_build.uuid)
|
||||
log_appender = partial(build_logs.append_log_message, repository_build.uuid)
|
||||
|
||||
# Lookup and save the version of docker being used.
|
||||
docker_cl = Client(base_url = DOCKER_BASE_URL)
|
||||
docker_cl = Client(base_url=DOCKER_BASE_URL)
|
||||
docker_version = docker_cl.version().get('Version', '')
|
||||
dash = docker_version.find('-')
|
||||
|
||||
|
@ -529,14 +533,13 @@ class DockerfileBuildWorker(Worker):
|
|||
if dash > 0:
|
||||
docker_version = docker_version[:dash]
|
||||
|
||||
log_appender('initializing', build_logs.PHASE, log_data = {
|
||||
log_appender('initializing', build_logs.PHASE, log_data={
|
||||
'docker_version': docker_version
|
||||
})
|
||||
|
||||
log_appender('Docker version: %s' % docker_version)
|
||||
|
||||
start_msg = ('Starting job with resource url: %s repo: %s' % (resource_url,
|
||||
repo))
|
||||
start_msg = ('Starting job with resource url: %s repo: %s' % (resource_url, repo))
|
||||
logger.debug(start_msg)
|
||||
|
||||
docker_resource = requests.get(resource_url, stream=True)
|
||||
|
@ -592,7 +595,7 @@ class DockerfileBuildWorker(Worker):
|
|||
cur_message = ex.message or 'Error while unpacking build package'
|
||||
log_appender(cur_message, build_logs.ERROR)
|
||||
spawn_failure(cur_message, event_data)
|
||||
raise JobException(cur_message)
|
||||
raise JobException(cur_message)
|
||||
|
||||
# Start the build process.
|
||||
try:
|
||||
|
@ -637,14 +640,14 @@ class DockerfileBuildWorker(Worker):
|
|||
|
||||
# Spawn a notification that the build has completed.
|
||||
spawn_notification(repository_build.repository, 'build_success', event_data,
|
||||
subpage='build?current=%s' % repository_build.uuid,
|
||||
pathargs=['build', repository_build.uuid])
|
||||
subpage='build?current=%s' % repository_build.uuid,
|
||||
pathargs=['build', repository_build.uuid])
|
||||
|
||||
except WorkerUnhealthyException as exc:
|
||||
# Spawn a notification that the build has failed.
|
||||
log_appender('Worker has become unhealthy. Will retry shortly.', build_logs.ERROR)
|
||||
spawn_failure(exc.message, event_data)
|
||||
|
||||
|
||||
# Raise the exception to the queue.
|
||||
raise exc
|
||||
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
import logging
|
||||
import argparse
|
||||
import requests
|
||||
import json
|
||||
|
||||
from app import notification_queue
|
||||
from workers.worker import Worker
|
||||
|
@ -12,11 +9,6 @@ from workers.worker import JobException
|
|||
|
||||
from data import model
|
||||
|
||||
root_logger = logging.getLogger('')
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -24,10 +16,8 @@ logger = logging.getLogger(__name__)
|
|||
class NotificationWorker(Worker):
|
||||
def process_queue_item(self, job_details):
|
||||
notification_uuid = job_details['notification_uuid'];
|
||||
repo_namespace = job_details['repository_namespace']
|
||||
repo_name = job_details['repository_name']
|
||||
|
||||
notification = model.get_repo_notification(repo_namespace, repo_name, notification_uuid)
|
||||
notification = model.get_repo_notification(notification_uuid)
|
||||
if not notification:
|
||||
# Probably deleted.
|
||||
return
|
||||
|
|
Reference in a new issue