Switch to the redis backed build logs and status.

This commit is contained in:
yackob03 2014-02-03 19:08:37 -05:00
parent 5cc59e67a9
commit 5270066d6d
10 changed files with 292 additions and 141 deletions

View file

@ -6,6 +6,7 @@ from peewee import MySQLDatabase, SqliteDatabase
from storage.s3 import S3Storage from storage.s3 import S3Storage
from storage.local import LocalStorage from storage.local import LocalStorage
from data.userfiles import UserRequestFiles from data.userfiles import UserRequestFiles
from data.buildlogs import BuildLogs
from util import analytics from util import analytics
from test.teststorage import FakeStorage, FakeUserfiles from test.teststorage import FakeStorage, FakeUserfiles
@ -86,6 +87,10 @@ class S3Userfiles(AWSCredentials):
AWSCredentials.REGISTRY_S3_BUCKET) AWSCredentials.REGISTRY_S3_BUCKET)
class RedisBuildLogs(object):
BUILDLOGS = BuildLogs('logs.quay.io')
class StripeTestConfig(object): class StripeTestConfig(object):
STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y' STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y'
STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh' STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh'
@ -153,7 +158,7 @@ def logs_init_builder(level=logging.DEBUG, logfile=None):
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles, class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
FakeAnalytics, StripeTestConfig): FakeAnalytics, StripeTestConfig, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder(logging.WARN) LOGGING_CONFIG = logs_init_builder(logging.WARN)
POPULATE_DB_TEST_DATA = True POPULATE_DB_TEST_DATA = True
TESTING = True TESTING = True
@ -162,7 +167,8 @@ class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB, class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig, StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
DigitalOceanConfig, BuildNodeConfig, S3Userfiles): DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder() LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True POPULATE_DB_TEST_DATA = True
@ -172,7 +178,7 @@ class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL, class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelTestConfig, StripeLiveConfig, MixpanelTestConfig,
GitHubProdConfig, DigitalOceanConfig, GitHubProdConfig, DigitalOceanConfig,
BuildNodeConfig, S3Userfiles): BuildNodeConfig, S3Userfiles, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder() LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0
@ -180,7 +186,7 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL, class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelProdConfig, StripeLiveConfig, MixpanelProdConfig,
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig, GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
S3Userfiles): S3Userfiles, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder(logfile='/mnt/logs/application.log') LOGGING_CONFIG = logs_init_builder(logfile='/mnt/logs/application.log')
SEND_FILE_MAX_AGE_DEFAULT = 0 SEND_FILE_MAX_AGE_DEFAULT = 0

56
data/buildlogs.py Normal file
View file

@ -0,0 +1,56 @@
import redis
import json
class BuildLogs(object):
def __init__(self, redis_host):
self._redis = redis.StrictRedis(host=redis_host)
@staticmethod
def _logs_key(build_id):
return 'builds/%s/logs' % build_id
def append_log_entry(self, build_id, log_obj):
"""
Appends the serialized form of log_obj to the end of the log entry list
and returns the new length of the list.
"""
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def append_log_message(self, build_id, log_message):
"""
Wraps the message in an envelope and push it to the end of the log entry
list and returns the new length of the list.
"""
log_obj = {
'message': log_message
}
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def get_log_entries(self, build_id, start_index, end_index):
"""
Returns a tuple of the current length of the list and an iterable of the
requested log entries. End index is inclusive.
"""
llen = self._redis.llen(self._logs_key(build_id))
log_entries = self._redis.lrange(self._logs_key(build_id), start_index,
end_index)
return (llen, (json.loads(entry) for entry in log_entries))
@staticmethod
def _status_key(build_id):
return 'builds/%s/status' % build_id
def set_status(self, build_id, status_obj):
"""
Sets the status key for this build to json serialized form of the supplied
obj.
"""
self._redis.set(self._status_key(build_id), json.dumps(status_obj))
def get_status(self, build_id):
"""
Loads the status information for the specified build id.
"""
fetched = self._redis.get(self._status_key(build_id))
return json.loads(fetched) if fetched else None

View file

@ -1,5 +1,6 @@
import string import string
import logging import logging
import uuid
from random import SystemRandom from random import SystemRandom
from datetime import datetime from datetime import datetime
@ -20,6 +21,10 @@ def random_string_generator(length=16):
return random_string return random_string
def uuid_generator():
return str(uuid.uuid4())
class BaseModel(Model): class BaseModel(Model):
class Meta: class Meta:
database = db database = db
@ -125,7 +130,7 @@ class RepositoryPermission(BaseModel):
class PermissionPrototype(BaseModel): class PermissionPrototype(BaseModel):
org = ForeignKeyField(User, index=True, related_name='orgpermissionproto') org = ForeignKeyField(User, index=True, related_name='orgpermissionproto')
uuid = CharField() uuid = CharField(default=uuid_generator)
activating_user = ForeignKeyField(User, index=True, null=True, activating_user = ForeignKeyField(User, index=True, null=True,
related_name='userpermissionproto') related_name='userpermissionproto')
delegate_user = ForeignKeyField(User, related_name='receivingpermission', delegate_user = ForeignKeyField(User, related_name='receivingpermission',
@ -204,13 +209,12 @@ class RepositoryTag(BaseModel):
class RepositoryBuild(BaseModel): class RepositoryBuild(BaseModel):
repository = ForeignKeyField(Repository) uuid = CharField(default=uuid_generator, index=True)
repository = ForeignKeyField(Repository, index=True)
access_token = ForeignKeyField(AccessToken) access_token = ForeignKeyField(AccessToken)
resource_key = CharField() resource_key = CharField()
tag = CharField() tag = CharField()
build_node_id = IntegerField(null=True)
phase = CharField(default='waiting') phase = CharField(default='waiting')
status_url = CharField(null=True)
class QueueItem(BaseModel): class QueueItem(BaseModel):

View file

@ -4,9 +4,7 @@ import datetime
import dateutil.parser import dateutil.parser
import operator import operator
import json import json
import uuid
from datetime import timedelta
from database import * from database import *
from util.validation import * from util.validation import *
@ -728,8 +726,7 @@ def update_prototype_permission(org, uid, role_name):
def add_prototype_permission(org, role_name, activating_user, def add_prototype_permission(org, role_name, activating_user,
delegate_user=None, delegate_team=None): delegate_user=None, delegate_team=None):
new_role = Role.get(Role.name == role_name) new_role = Role.get(Role.name == role_name)
uid = str(uuid.uuid4()) return PermissionPrototype.create(org=org, role=new_role,
return PermissionPrototype.create(org=org, uuid=uid, role=new_role,
activating_user=activating_user, activating_user=activating_user,
delegate_user=delegate_user, delegate_team=delegate_team) delegate_user=delegate_user, delegate_team=delegate_team)
@ -1248,13 +1245,18 @@ def load_token_data(code):
raise InvalidTokenException('Invalid delegate token code: %s' % code) raise InvalidTokenException('Invalid delegate token code: %s' % code)
def get_repository_build(request_dbid): def get_repository_build(namespace_name, repository_name, build_uuid):
try: joined = RepositoryBuild.select().join(Repository)
return RepositoryBuild.get(RepositoryBuild.id == request_dbid) fetched = list(joined.where(Repository.name == repository_name,
except RepositoryBuild.DoesNotExist: Repository.namespace == namespace_name,
msg = 'Unable to locate a build by id: %s' % request_dbid RepositoryBuild.uuid == build_uuid))
if not fetched:
msg = 'Unable to locate a build by id: %s' % build_uuid
raise InvalidRepositoryBuildException(msg) raise InvalidRepositoryBuildException(msg)
return fetched[0]
def list_repository_builds(namespace_name, repository_name, def list_repository_builds(namespace_name, repository_name,
include_inactive=True): include_inactive=True):

View file

@ -31,6 +31,7 @@ from datetime import datetime, timedelta
store = app.config['STORAGE'] store = app.config['STORAGE']
user_files = app.config['USERFILES'] user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
route_data = None route_data = None
@ -1141,35 +1142,65 @@ def get_repo(namespace, repository):
abort(403) # Permission denied abort(403) # Permission denied
def build_status_view(build_obj):
status = build_logs.get_status(build_obj.uuid)
return {
'id': build_obj.uuid,
'phase': build_obj.phase,
'status': status,
}
@api.route('/repository/<path:repository>/build/', methods=['GET']) @api.route('/repository/<path:repository>/build/', methods=['GET'])
@parse_repository_name @parse_repository_name
def get_repo_builds(namespace, repository): def get_repo_builds(namespace, repository):
permission = ReadRepositoryPermission(namespace, repository) permission = ReadRepositoryPermission(namespace, repository)
is_public = model.repository_is_public(namespace, repository) is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public: if permission.can() or is_public:
def build_view(build_obj):
# TODO(jake): Filter these logs if the current user can only *read* the repo.
if build_obj.status_url:
# Delegate the status to the build node
node_status = requests.get(build_obj.status_url).json()
node_status['id'] = build_obj.id
return node_status
# If there was no status url, do the best we can
# The format of this block should mirror that of the buildserver.
return {
'id': build_obj.id,
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': build_obj.phase,
'message': None,
'image_completion': {},
}
builds = model.list_repository_builds(namespace, repository) builds = model.list_repository_builds(namespace, repository)
return jsonify({ return jsonify({
'builds': [build_view(build) for build in builds] 'builds': [build_status_view(build) for build in builds]
})
abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/<build_uuid>/status',
methods=['GET'])
@parse_repository_name
def get_repo_build_status(namespace, repository, build_uuid):
permission = ReadRepositoryPermission(namespace, repository)
is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public:
build = model.get_repository_build(namespace, repository, build_uuid)
return jsonify(build_status_view(build))
abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/<build_uuid>/logs',
methods=['GET'])
@parse_repository_name
def get_repo_build_logs(namespace, repository, build_uuid):
permission = ModifyRepositoryPermission(namespace, repository)
if permission.can():
build = model.get_repository_build(namespace, repository, build_uuid)
start = request.args.get('start', -1000)
end = request.args.get('end', -1)
count, logs = build_logs.get_log_entries(build.uuid, start, end)
if start < 0:
start = max(0, count + start)
if end < 0:
end = count + end
return jsonify({
'start': start,
'end': end,
'total': count,
'logs': [log for log in logs],
}) })
abort(403) # Permission denied abort(403) # Permission denied
@ -1191,15 +1222,21 @@ def request_repo_build(namespace, repository):
tag = '%s/%s/%s' % (host, repo.namespace, repo.name) tag = '%s/%s/%s' % (host, repo.namespace, repo.name)
build_request = model.create_repository_build(repo, token, dockerfile_id, build_request = model.create_repository_build(repo, token, dockerfile_id,
tag) tag)
dockerfile_build_queue.put(json.dumps({'build_id': build_request.id})) dockerfile_build_queue.put(json.dumps({
'build_uuid': build_request.uuid,
'namespace': namespace,
'repository': repository,
}))
log_action('build_dockerfile', namespace, log_action('build_dockerfile', namespace,
{'repo': repository, 'namespace': namespace, {'repo': repository, 'namespace': namespace,
'fileid': dockerfile_id}, repo=repo) 'fileid': dockerfile_id}, repo=repo)
resp = jsonify({ resp = jsonify(build_status_view(build_request))
'started': True repo_string = '%s/%s' % (namespace, repository)
}) resp.headers['Location'] = url_for('api.get_repo_build_status',
repository=repo_string,
build_uuid=build_request.uuid)
resp.status_code = 201 resp.status_code = 201
return resp return resp

View file

@ -19,3 +19,5 @@ paramiko
python-digitalocean python-digitalocean
xhtml2pdf xhtml2pdf
logstash_formatter logstash_formatter
redis
hiredis

View file

@ -17,6 +17,7 @@ ecdsa==0.10
gevent==1.0 gevent==1.0
greenlet==0.4.2 greenlet==0.4.2
gunicorn==18.0 gunicorn==18.0
hiredis==0.1.2
html5lib==1.0b3 html5lib==1.0b3
itsdangerous==0.23 itsdangerous==0.23
lockfile==0.9.1 lockfile==0.9.1
@ -31,6 +32,7 @@ pycrypto==2.6.1
python-daemon==1.6 python-daemon==1.6
python-dateutil==2.2 python-dateutil==2.2
python-digitalocean==0.6 python-digitalocean==0.6
redis==2.9.1
reportlab==2.7 reportlab==2.7
requests==2.2.1 requests==2.2.1
six==1.5.2 six==1.5.2

View file

@ -2475,13 +2475,13 @@ quayApp.directive('buildStatus', function () {
}, },
controller: function($scope, $element) { controller: function($scope, $element) {
$scope.getBuildProgress = function(buildInfo) { $scope.getBuildProgress = function(buildInfo) {
switch (buildInfo.status) { switch (buildInfo.phase) {
case 'building': case 'building':
return (buildInfo.current_command / buildInfo.total_commands) * 100; return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100;
break; break;
case 'pushing': case 'pushing':
return buildInfo.push_completion * 100; return buildInfo.status.push_completion * 100;
break; break;
case 'complete': case 'complete':
@ -2499,7 +2499,7 @@ quayApp.directive('buildStatus', function () {
}; };
$scope.getBuildMessage = function(buildInfo) { $scope.getBuildMessage = function(buildInfo) {
switch (buildInfo.status) { switch (buildInfo.phase) {
case 'initializing': case 'initializing':
return 'Starting Dockerfile build'; return 'Starting Dockerfile build';
break; break;
@ -2519,7 +2519,7 @@ quayApp.directive('buildStatus', function () {
break; break;
case 'error': case 'error':
return 'Dockerfile build failed: ' + buildInfo.message; return 'Dockerfile build failed.';
break; break;
} }
}; };

Binary file not shown.

View file

@ -25,6 +25,28 @@ formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']
class StatusWrapper(object):
def __init__(self, build_uuid):
self._uuid = build_uuid
self._status = {
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'image_completion': {},
}
self.__exit__(None, None, None)
def __enter__(self):
return self._status
def __exit__(self, exc_type, value, traceback):
build_logs.set_status(self._uuid, self._status)
class DockerfileBuildWorker(Worker): class DockerfileBuildWorker(Worker):
def __init__(self, *vargs, **kwargs): def __init__(self, *vargs, **kwargs):
@ -75,23 +97,26 @@ class DockerfileBuildWorker(Worker):
return float(sent_bytes)/total_bytes*percentage_with_sizes return float(sent_bytes)/total_bytes*percentage_with_sizes
@staticmethod @staticmethod
def __build_image(build_dir, tag_name, num_steps, result_object): def __build_image(build_dir, tag_name, num_steps, result_object, uuid):
try:
logger.debug('Starting build.') logger.debug('Starting build.')
docker_cl = Client(timeout=1200) docker_cl = Client(timeout=1200)
result_object['status'] = 'building' with result_object as status:
status['total_commands'] = num_steps
build_status = docker_cl.build(path=build_dir, tag=tag_name, stream=True) build_status = docker_cl.build(path=build_dir, tag=tag_name, stream=True)
current_step = 0 current_step = 0
built_image = None built_image = None
for status in build_status: for status in build_status:
# logger.debug('Status: %s', str(status)) logger.debug('Status: %s', str(status))
build_logs.append_log_message(uuid, str(status))
step_increment = re.search(r'Step ([0-9]+) :', status) step_increment = re.search(r'Step ([0-9]+) :', status)
if step_increment: if step_increment:
current_step = int(step_increment.group(1)) current_step = int(step_increment.group(1))
logger.debug('Step now: %s/%s' % (current_step, num_steps)) logger.debug('Step now: %s/%s' % (current_step, num_steps))
result_object['current_command'] = current_step with result_object as status:
status['current_command'] = current_step
continue continue
complete = re.match(r'Successfully built ([a-z0-9]+)$', status) complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
@ -104,23 +129,16 @@ class DockerfileBuildWorker(Worker):
# Get the image count # Get the image count
if not built_image: if not built_image:
result_object['status'] = 'error'
result_object['message'] = 'Unable to build dockerfile.'
return return
return built_image return built_image
except Exception as exc:
logger.exception('Exception when processing request.')
result_object['status'] = 'error'
result_object['message'] = str(exc.message)
@staticmethod @staticmethod
def __push_image(built_image, token, tag_name, result_object): def __push_image(built_image, token, tag_name, result_object):
try:
# Login to the registry # Login to the registry
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name) host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
if not host: if not host:
raise Exception('Invalid tag name: %s' % tag_name) raise RuntimeError('Invalid tag name: %s' % tag_name)
docker_cl = Client(timeout=1200) docker_cl = Client(timeout=1200)
@ -136,9 +154,9 @@ class DockerfileBuildWorker(Worker):
history = json.loads(docker_cl.history(built_image)) history = json.loads(docker_cl.history(built_image))
num_images = len(history) num_images = len(history)
result_object['total_images'] = num_images with result_object as status:
status['total_images'] = num_images
result_object['status'] = 'pushing'
logger.debug('Pushing to tag name: %s' % tag_name) logger.debug('Pushing to tag name: %s' % tag_name)
resp = docker_cl.push(tag_name, stream=True) resp = docker_cl.push(tag_name, stream=True)
@ -154,23 +172,19 @@ class DockerfileBuildWorker(Worker):
detail = status[u'progressDetail'] detail = status[u'progressDetail']
if u'current' in detail and 'total' in detail: if u'current' in detail and 'total' in detail:
images = result_object['image_completion'] with result_object as status:
images = status['image_completion']
images[image_id] = detail images[image_id] = detail
result_object['push_completion'] = \ status['push_completion'] = \
DockerfileBuildWorker.__total_completion(images, num_images) DockerfileBuildWorker.__total_completion(images, num_images)
elif u'errorDetail' in status: elif u'errorDetail' in status:
result_object['status'] = 'error' message = 'Error pushing image.'
if u'message' in status[u'errorDetail']: if u'message' in status[u'errorDetail']:
result_object['message'] = str(status[u'errorDetail'][u'message']) message = str(status[u'errorDetail'][u'message'])
return
result_object['status'] = 'complete' raise RuntimeError(message)
except Exception as exc:
logger.exception('Exception when processing request.')
result_object['status'] = 'error'
result_object['message'] = str(exc.message)
@staticmethod @staticmethod
def __cleanup(): def __cleanup():
@ -215,47 +229,75 @@ class DockerfileBuildWorker(Worker):
raise RuntimeError('Image was not removed: %s' % image['Id']) raise RuntimeError('Image was not removed: %s' % image['Id'])
def process_queue_item(self, job_details): def process_queue_item(self, job_details):
repository_build = model.get_repository_build(job_details['build_id']) repository_build = model.get_repository_build(job_details['namespace'],
job_details['repository'],
job_details['build_uuid'])
user_files = app.config['USERFILES']
resource_url = user_files.get_file_url(repository_build.resource_key) resource_url = user_files.get_file_url(repository_build.resource_key)
tag_name = repository_build.tag tag_name = repository_build.tag
access_token = repository_build.access_token.code access_token = repository_build.access_token.code
feedback = { result_object = StatusWrapper(repository_build.uuid)
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': 'waiting',
'message': None,
'image_completion': {},
}
logger.debug('Starting job with resource url: %s tag: %s and token: %s' % start_msg = ('Starting job with resource url: %s tag: %s and token: %s' %
(resource_url, tag_name, access_token)) (resource_url, tag_name, access_token))
logger.debug(start_msg)
build_logs.append_log_message(repository_build.uuid, start_msg)
docker_resource = requests.get(resource_url) docker_resource = requests.get(resource_url)
c_type = docker_resource.headers['content-type'] c_type = docker_resource.headers['content-type']
logger.info('Request to build file of type: %s with tag: %s' % filetype_msg = ('Request to build file of type: %s with tag: %s' %
(c_type, tag_name)) (c_type, tag_name))
logger.info(filetype_msg)
build_logs.append_log_message(repository_build.uuid, filetype_msg)
if c_type not in self._mime_processors: if c_type not in self._mime_processors:
raise Exception('Invalid dockerfile content type: %s' % c_type) raise RuntimeError('Invalid dockerfile content type: %s' % c_type)
build_dir = self._mime_processors[c_type](docker_resource) build_dir = self._mime_processors[c_type](docker_resource)
dockerfile_path = os.path.join(build_dir, "Dockerfile") dockerfile_path = os.path.join(build_dir, "Dockerfile")
num_steps = DockerfileBuildWorker.__count_steps(dockerfile_path) num_steps = DockerfileBuildWorker.__count_steps(dockerfile_path)
logger.debug('Dockerfile had %s steps' % num_steps)
steps_msg = 'Dockerfile had %s steps' % num_steps
logger.debug(steps_msg)
build_logs.append_log_message(repository_build.uuid, steps_msg)
uuid = repository_build.uuid
repository_build.phase = 'building'
repository_build.save()
try:
built_image = DockerfileBuildWorker.__build_image(build_dir, tag_name, built_image = DockerfileBuildWorker.__build_image(build_dir, tag_name,
num_steps, feedback) num_steps,
result_object, uuid)
if not built_image:
repository_build.phase = 'error'
repository_build.save()
build_logs.append_log_message(uuid, 'Unable to build dockerfile.')
return False
repository_build.phase = 'pushing'
repository_build.save()
DockerfileBuildWorker.__push_image(built_image, access_token, tag_name, DockerfileBuildWorker.__push_image(built_image, access_token, tag_name,
feedback) result_object)
DockerfileBuildWorker.__cleanup() repository_build.phase = 'complete'
repository_build.save()
# TODO turn cleanup on before pushing to prod
# DockerfileBuildWorker.__cleanup()
except Exception as exc:
logger.exception('Exception when processing request.')
repository_build.phase = 'error'
repository_build.save()
build_logs.append_log_message(uuid, exc.message)
return False
return True
desc = 'Worker daemon to monitor dockerfile build' desc = 'Worker daemon to monitor dockerfile build'