Switch to the redis backed build logs and status.

This commit is contained in:
yackob03 2014-02-03 19:08:37 -05:00
parent 5cc59e67a9
commit 5270066d6d
10 changed files with 292 additions and 141 deletions

View file

@ -6,6 +6,7 @@ from peewee import MySQLDatabase, SqliteDatabase
from storage.s3 import S3Storage
from storage.local import LocalStorage
from data.userfiles import UserRequestFiles
from data.buildlogs import BuildLogs
from util import analytics
from test.teststorage import FakeStorage, FakeUserfiles
@ -86,6 +87,10 @@ class S3Userfiles(AWSCredentials):
AWSCredentials.REGISTRY_S3_BUCKET)
class RedisBuildLogs(object):
BUILDLOGS = BuildLogs('logs.quay.io')
class StripeTestConfig(object):
STRIPE_SECRET_KEY = 'sk_test_PEbmJCYrLXPW0VRLSnWUiZ7Y'
STRIPE_PUBLISHABLE_KEY = 'pk_test_uEDHANKm9CHCvVa2DLcipGRh'
@ -153,7 +158,7 @@ def logs_init_builder(level=logging.DEBUG, logfile=None):
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
FakeAnalytics, StripeTestConfig):
FakeAnalytics, StripeTestConfig, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder(logging.WARN)
POPULATE_DB_TEST_DATA = True
TESTING = True
@ -162,7 +167,8 @@ class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
DigitalOceanConfig, BuildNodeConfig, S3Userfiles):
DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0
POPULATE_DB_TEST_DATA = True
@ -172,7 +178,7 @@ class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelTestConfig,
GitHubProdConfig, DigitalOceanConfig,
BuildNodeConfig, S3Userfiles):
BuildNodeConfig, S3Userfiles, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder()
SEND_FILE_MAX_AGE_DEFAULT = 0
@ -180,7 +186,7 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
StripeLiveConfig, MixpanelProdConfig,
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
S3Userfiles):
S3Userfiles, RedisBuildLogs):
LOGGING_CONFIG = logs_init_builder(logfile='/mnt/logs/application.log')
SEND_FILE_MAX_AGE_DEFAULT = 0

56
data/buildlogs.py Normal file
View file

@ -0,0 +1,56 @@
import redis
import json
class BuildLogs(object):
def __init__(self, redis_host):
self._redis = redis.StrictRedis(host=redis_host)
@staticmethod
def _logs_key(build_id):
return 'builds/%s/logs' % build_id
def append_log_entry(self, build_id, log_obj):
"""
Appends the serialized form of log_obj to the end of the log entry list
and returns the new length of the list.
"""
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def append_log_message(self, build_id, log_message):
"""
Wraps the message in an envelope and push it to the end of the log entry
list and returns the new length of the list.
"""
log_obj = {
'message': log_message
}
return self._redis.rpush(self._logs_key(build_id), json.dumps(log_obj))
def get_log_entries(self, build_id, start_index, end_index):
"""
Returns a tuple of the current length of the list and an iterable of the
requested log entries. End index is inclusive.
"""
llen = self._redis.llen(self._logs_key(build_id))
log_entries = self._redis.lrange(self._logs_key(build_id), start_index,
end_index)
return (llen, (json.loads(entry) for entry in log_entries))
@staticmethod
def _status_key(build_id):
return 'builds/%s/status' % build_id
def set_status(self, build_id, status_obj):
"""
Sets the status key for this build to json serialized form of the supplied
obj.
"""
self._redis.set(self._status_key(build_id), json.dumps(status_obj))
def get_status(self, build_id):
"""
Loads the status information for the specified build id.
"""
fetched = self._redis.get(self._status_key(build_id))
return json.loads(fetched) if fetched else None

View file

@ -1,5 +1,6 @@
import string
import logging
import uuid
from random import SystemRandom
from datetime import datetime
@ -20,6 +21,10 @@ def random_string_generator(length=16):
return random_string
def uuid_generator():
return str(uuid.uuid4())
class BaseModel(Model):
class Meta:
database = db
@ -125,7 +130,7 @@ class RepositoryPermission(BaseModel):
class PermissionPrototype(BaseModel):
org = ForeignKeyField(User, index=True, related_name='orgpermissionproto')
uuid = CharField()
uuid = CharField(default=uuid_generator)
activating_user = ForeignKeyField(User, index=True, null=True,
related_name='userpermissionproto')
delegate_user = ForeignKeyField(User, related_name='receivingpermission',
@ -204,13 +209,12 @@ class RepositoryTag(BaseModel):
class RepositoryBuild(BaseModel):
repository = ForeignKeyField(Repository)
uuid = CharField(default=uuid_generator, index=True)
repository = ForeignKeyField(Repository, index=True)
access_token = ForeignKeyField(AccessToken)
resource_key = CharField()
tag = CharField()
build_node_id = IntegerField(null=True)
phase = CharField(default='waiting')
status_url = CharField(null=True)
class QueueItem(BaseModel):

View file

@ -4,9 +4,7 @@ import datetime
import dateutil.parser
import operator
import json
import uuid
from datetime import timedelta
from database import *
from util.validation import *
@ -728,8 +726,7 @@ def update_prototype_permission(org, uid, role_name):
def add_prototype_permission(org, role_name, activating_user,
delegate_user=None, delegate_team=None):
new_role = Role.get(Role.name == role_name)
uid = str(uuid.uuid4())
return PermissionPrototype.create(org=org, uuid=uid, role=new_role,
return PermissionPrototype.create(org=org, role=new_role,
activating_user=activating_user,
delegate_user=delegate_user, delegate_team=delegate_team)
@ -1248,13 +1245,18 @@ def load_token_data(code):
raise InvalidTokenException('Invalid delegate token code: %s' % code)
def get_repository_build(request_dbid):
try:
return RepositoryBuild.get(RepositoryBuild.id == request_dbid)
except RepositoryBuild.DoesNotExist:
msg = 'Unable to locate a build by id: %s' % request_dbid
def get_repository_build(namespace_name, repository_name, build_uuid):
joined = RepositoryBuild.select().join(Repository)
fetched = list(joined.where(Repository.name == repository_name,
Repository.namespace == namespace_name,
RepositoryBuild.uuid == build_uuid))
if not fetched:
msg = 'Unable to locate a build by id: %s' % build_uuid
raise InvalidRepositoryBuildException(msg)
return fetched[0]
def list_repository_builds(namespace_name, repository_name,
include_inactive=True):

View file

@ -31,6 +31,7 @@ from datetime import datetime, timedelta
store = app.config['STORAGE']
user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']
logger = logging.getLogger(__name__)
route_data = None
@ -1141,35 +1142,65 @@ def get_repo(namespace, repository):
abort(403) # Permission denied
def build_status_view(build_obj):
status = build_logs.get_status(build_obj.uuid)
return {
'id': build_obj.uuid,
'phase': build_obj.phase,
'status': status,
}
@api.route('/repository/<path:repository>/build/', methods=['GET'])
@parse_repository_name
def get_repo_builds(namespace, repository):
permission = ReadRepositoryPermission(namespace, repository)
is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public:
def build_view(build_obj):
# TODO(jake): Filter these logs if the current user can only *read* the repo.
if build_obj.status_url:
# Delegate the status to the build node
node_status = requests.get(build_obj.status_url).json()
node_status['id'] = build_obj.id
return node_status
# If there was no status url, do the best we can
# The format of this block should mirror that of the buildserver.
return {
'id': build_obj.id,
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': build_obj.phase,
'message': None,
'image_completion': {},
}
builds = model.list_repository_builds(namespace, repository)
return jsonify({
'builds': [build_view(build) for build in builds]
'builds': [build_status_view(build) for build in builds]
})
abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/<build_uuid>/status',
methods=['GET'])
@parse_repository_name
def get_repo_build_status(namespace, repository, build_uuid):
permission = ReadRepositoryPermission(namespace, repository)
is_public = model.repository_is_public(namespace, repository)
if permission.can() or is_public:
build = model.get_repository_build(namespace, repository, build_uuid)
return jsonify(build_status_view(build))
abort(403) # Permission denied
@api.route('/repository/<path:repository>/build/<build_uuid>/logs',
methods=['GET'])
@parse_repository_name
def get_repo_build_logs(namespace, repository, build_uuid):
permission = ModifyRepositoryPermission(namespace, repository)
if permission.can():
build = model.get_repository_build(namespace, repository, build_uuid)
start = request.args.get('start', -1000)
end = request.args.get('end', -1)
count, logs = build_logs.get_log_entries(build.uuid, start, end)
if start < 0:
start = max(0, count + start)
if end < 0:
end = count + end
return jsonify({
'start': start,
'end': end,
'total': count,
'logs': [log for log in logs],
})
abort(403) # Permission denied
@ -1191,15 +1222,21 @@ def request_repo_build(namespace, repository):
tag = '%s/%s/%s' % (host, repo.namespace, repo.name)
build_request = model.create_repository_build(repo, token, dockerfile_id,
tag)
dockerfile_build_queue.put(json.dumps({'build_id': build_request.id}))
dockerfile_build_queue.put(json.dumps({
'build_uuid': build_request.uuid,
'namespace': namespace,
'repository': repository,
}))
log_action('build_dockerfile', namespace,
{'repo': repository, 'namespace': namespace,
'fileid': dockerfile_id}, repo=repo)
resp = jsonify({
'started': True
})
resp = jsonify(build_status_view(build_request))
repo_string = '%s/%s' % (namespace, repository)
resp.headers['Location'] = url_for('api.get_repo_build_status',
repository=repo_string,
build_uuid=build_request.uuid)
resp.status_code = 201
return resp

View file

@ -19,3 +19,5 @@ paramiko
python-digitalocean
xhtml2pdf
logstash_formatter
redis
hiredis

View file

@ -17,6 +17,7 @@ ecdsa==0.10
gevent==1.0
greenlet==0.4.2
gunicorn==18.0
hiredis==0.1.2
html5lib==1.0b3
itsdangerous==0.23
lockfile==0.9.1
@ -31,6 +32,7 @@ pycrypto==2.6.1
python-daemon==1.6
python-dateutil==2.2
python-digitalocean==0.6
redis==2.9.1
reportlab==2.7
requests==2.2.1
six==1.5.2

View file

@ -2475,13 +2475,13 @@ quayApp.directive('buildStatus', function () {
},
controller: function($scope, $element) {
$scope.getBuildProgress = function(buildInfo) {
switch (buildInfo.status) {
switch (buildInfo.phase) {
case 'building':
return (buildInfo.current_command / buildInfo.total_commands) * 100;
return (buildInfo.status.current_command / buildInfo.status.total_commands) * 100;
break;
case 'pushing':
return buildInfo.push_completion * 100;
return buildInfo.status.push_completion * 100;
break;
case 'complete':
@ -2499,7 +2499,7 @@ quayApp.directive('buildStatus', function () {
};
$scope.getBuildMessage = function(buildInfo) {
switch (buildInfo.status) {
switch (buildInfo.phase) {
case 'initializing':
return 'Starting Dockerfile build';
break;
@ -2519,7 +2519,7 @@ quayApp.directive('buildStatus', function () {
break;
case 'error':
return 'Dockerfile build failed: ' + buildInfo.message;
return 'Dockerfile build failed.';
break;
}
};

Binary file not shown.

View file

@ -25,6 +25,28 @@ formatter = logging.Formatter(FORMAT)
logger = logging.getLogger(__name__)
user_files = app.config['USERFILES']
build_logs = app.config['BUILDLOGS']
class StatusWrapper(object):
def __init__(self, build_uuid):
self._uuid = build_uuid
self._status = {
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'image_completion': {},
}
self.__exit__(None, None, None)
def __enter__(self):
return self._status
def __exit__(self, exc_type, value, traceback):
build_logs.set_status(self._uuid, self._status)
class DockerfileBuildWorker(Worker):
def __init__(self, *vargs, **kwargs):
@ -75,102 +97,94 @@ class DockerfileBuildWorker(Worker):
return float(sent_bytes)/total_bytes*percentage_with_sizes
@staticmethod
def __build_image(build_dir, tag_name, num_steps, result_object):
try:
logger.debug('Starting build.')
docker_cl = Client(timeout=1200)
def __build_image(build_dir, tag_name, num_steps, result_object, uuid):
logger.debug('Starting build.')
docker_cl = Client(timeout=1200)
result_object['status'] = 'building'
build_status = docker_cl.build(path=build_dir, tag=tag_name, stream=True)
with result_object as status:
status['total_commands'] = num_steps
current_step = 0
built_image = None
for status in build_status:
# logger.debug('Status: %s', str(status))
step_increment = re.search(r'Step ([0-9]+) :', status)
if step_increment:
current_step = int(step_increment.group(1))
logger.debug('Step now: %s/%s' % (current_step, num_steps))
result_object['current_command'] = current_step
continue
build_status = docker_cl.build(path=build_dir, tag=tag_name, stream=True)
complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
if complete:
built_image = complete.group(1)
logger.debug('Final image ID is: %s' % built_image)
continue
current_step = 0
built_image = None
for status in build_status:
logger.debug('Status: %s', str(status))
build_logs.append_log_message(uuid, str(status))
step_increment = re.search(r'Step ([0-9]+) :', status)
if step_increment:
current_step = int(step_increment.group(1))
logger.debug('Step now: %s/%s' % (current_step, num_steps))
with result_object as status:
status['current_command'] = current_step
continue
shutil.rmtree(build_dir)
complete = re.match(r'Successfully built ([a-z0-9]+)$', status)
if complete:
built_image = complete.group(1)
logger.debug('Final image ID is: %s' % built_image)
continue
# Get the image count
if not built_image:
result_object['status'] = 'error'
result_object['message'] = 'Unable to build dockerfile.'
return
shutil.rmtree(build_dir)
return built_image
except Exception as exc:
logger.exception('Exception when processing request.')
result_object['status'] = 'error'
result_object['message'] = str(exc.message)
# Get the image count
if not built_image:
return
return built_image
@staticmethod
def __push_image(built_image, token, tag_name, result_object):
try:
# Login to the registry
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
if not host:
raise Exception('Invalid tag name: %s' % tag_name)
# Login to the registry
host = re.match(r'([a-z0-9.:]+)/.+/.+$', tag_name)
if not host:
raise RuntimeError('Invalid tag name: %s' % tag_name)
docker_cl = Client(timeout=1200)
docker_cl = Client(timeout=1200)
for protocol in ['https', 'http']:
registry_endpoint = '%s://%s/v1/' % (protocol, host.group(1))
logger.debug('Attempting login to registry: %s' % registry_endpoint)
for protocol in ['https', 'http']:
registry_endpoint = '%s://%s/v1/' % (protocol, host.group(1))
logger.debug('Attempting login to registry: %s' % registry_endpoint)
try:
docker_cl.login('$token', token, registry=registry_endpoint)
break
except APIError:
pass # Probably the wrong protocol
try:
docker_cl.login('$token', token, registry=registry_endpoint)
break
except APIError:
pass # Probably the wrong protocol
history = json.loads(docker_cl.history(built_image))
num_images = len(history)
result_object['total_images'] = num_images
history = json.loads(docker_cl.history(built_image))
num_images = len(history)
with result_object as status:
status['total_images'] = num_images
result_object['status'] = 'pushing'
logger.debug('Pushing to tag name: %s' % tag_name)
resp = docker_cl.push(tag_name, stream=True)
logger.debug('Pushing to tag name: %s' % tag_name)
resp = docker_cl.push(tag_name, stream=True)
for status_str in resp:
status = json.loads(status_str)
logger.debug('Status: %s', status_str)
if u'status' in status:
status_msg = status[u'status']
for status_str in resp:
status = json.loads(status_str)
logger.debug('Status: %s', status_str)
if u'status' in status:
status_msg = status[u'status']
if status_msg == 'Pushing':
if u'progressDetail' in status and u'id' in status:
image_id = status[u'id']
detail = status[u'progressDetail']
if status_msg == 'Pushing':
if u'progressDetail' in status and u'id' in status:
image_id = status[u'id']
detail = status[u'progressDetail']
if u'current' in detail and 'total' in detail:
images = result_object['image_completion']
if u'current' in detail and 'total' in detail:
with result_object as status:
images = status['image_completion']
images[image_id] = detail
result_object['push_completion'] = \
status['push_completion'] = \
DockerfileBuildWorker.__total_completion(images, num_images)
elif u'errorDetail' in status:
result_object['status'] = 'error'
if u'message' in status[u'errorDetail']:
result_object['message'] = str(status[u'errorDetail'][u'message'])
return
elif u'errorDetail' in status:
message = 'Error pushing image.'
if u'message' in status[u'errorDetail']:
message = str(status[u'errorDetail'][u'message'])
result_object['status'] = 'complete'
except Exception as exc:
logger.exception('Exception when processing request.')
result_object['status'] = 'error'
result_object['message'] = str(exc.message)
raise RuntimeError(message)
@staticmethod
def __cleanup():
@ -215,47 +229,75 @@ class DockerfileBuildWorker(Worker):
raise RuntimeError('Image was not removed: %s' % image['Id'])
def process_queue_item(self, job_details):
repository_build = model.get_repository_build(job_details['build_id'])
repository_build = model.get_repository_build(job_details['namespace'],
job_details['repository'],
job_details['build_uuid'])
user_files = app.config['USERFILES']
resource_url = user_files.get_file_url(repository_build.resource_key)
tag_name = repository_build.tag
access_token = repository_build.access_token.code
feedback = {
'total_commands': None,
'current_command': None,
'push_completion': 0.0,
'status': 'waiting',
'message': None,
'image_completion': {},
}
result_object = StatusWrapper(repository_build.uuid)
logger.debug('Starting job with resource url: %s tag: %s and token: %s' %
start_msg = ('Starting job with resource url: %s tag: %s and token: %s' %
(resource_url, tag_name, access_token))
logger.debug(start_msg)
build_logs.append_log_message(repository_build.uuid, start_msg)
docker_resource = requests.get(resource_url)
c_type = docker_resource.headers['content-type']
logger.info('Request to build file of type: %s with tag: %s' %
(c_type, tag_name))
filetype_msg = ('Request to build file of type: %s with tag: %s' %
(c_type, tag_name))
logger.info(filetype_msg)
build_logs.append_log_message(repository_build.uuid, filetype_msg)
if c_type not in self._mime_processors:
raise Exception('Invalid dockerfile content type: %s' % c_type)
raise RuntimeError('Invalid dockerfile content type: %s' % c_type)
build_dir = self._mime_processors[c_type](docker_resource)
dockerfile_path = os.path.join(build_dir, "Dockerfile")
num_steps = DockerfileBuildWorker.__count_steps(dockerfile_path)
logger.debug('Dockerfile had %s steps' % num_steps)
steps_msg = 'Dockerfile had %s steps' % num_steps
logger.debug(steps_msg)
build_logs.append_log_message(repository_build.uuid, steps_msg)
built_image = DockerfileBuildWorker.__build_image(build_dir, tag_name,
num_steps, feedback)
uuid = repository_build.uuid
repository_build.phase = 'building'
repository_build.save()
DockerfileBuildWorker.__push_image(built_image, access_token, tag_name,
feedback)
try:
built_image = DockerfileBuildWorker.__build_image(build_dir, tag_name,
num_steps,
result_object, uuid)
DockerfileBuildWorker.__cleanup()
if not built_image:
repository_build.phase = 'error'
repository_build.save()
build_logs.append_log_message(uuid, 'Unable to build dockerfile.')
return False
repository_build.phase = 'pushing'
repository_build.save()
DockerfileBuildWorker.__push_image(built_image, access_token, tag_name,
result_object)
repository_build.phase = 'complete'
repository_build.save()
# TODO turn cleanup on before pushing to prod
# DockerfileBuildWorker.__cleanup()
except Exception as exc:
logger.exception('Exception when processing request.')
repository_build.phase = 'error'
repository_build.save()
build_logs.append_log_message(uuid, exc.message)
return False
return True
desc = 'Worker daemon to monitor dockerfile build'