initial import for Open Source 🎉
This commit is contained in:
parent
1898c361f3
commit
9c0dd3b722
2048 changed files with 218743 additions and 0 deletions
0
buildman/jobutil/__init__.py
Normal file
0
buildman/jobutil/__init__.py
Normal file
183
buildman/jobutil/buildjob.py
Normal file
183
buildman/jobutil/buildjob.py
Normal file
|
@ -0,0 +1,183 @@
|
|||
import json
|
||||
import logging
|
||||
|
||||
from app import app
|
||||
from cachetools.func import lru_cache
|
||||
from notifications import spawn_notification
|
||||
from data import model
|
||||
from data.registry_model import registry_model
|
||||
from data.registry_model.datatypes import RepositoryReference
|
||||
from data.database import UseThenDisconnect
|
||||
from util.morecollections import AttrDict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BuildJobLoadException(Exception):
|
||||
""" Exception raised if a build job could not be instantiated for some reason. """
|
||||
pass
|
||||
|
||||
|
||||
class BuildJob(object):
|
||||
""" Represents a single in-progress build job. """
|
||||
def __init__(self, job_item):
|
||||
self.job_item = job_item
|
||||
|
||||
try:
|
||||
self.job_details = json.loads(job_item.body)
|
||||
self.build_notifier = BuildJobNotifier(self.build_uuid)
|
||||
except ValueError:
|
||||
raise BuildJobLoadException(
|
||||
'Could not parse build queue item config with ID %s' % self.job_details['build_uuid']
|
||||
)
|
||||
|
||||
@property
|
||||
def retries_remaining(self):
|
||||
return self.job_item.retries_remaining
|
||||
|
||||
def has_retries_remaining(self):
|
||||
return self.job_item.retries_remaining > 0
|
||||
|
||||
def send_notification(self, kind, error_message=None, image_id=None, manifest_digests=None):
|
||||
self.build_notifier.send_notification(kind, error_message, image_id, manifest_digests)
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _load_repo_build(self):
|
||||
with UseThenDisconnect(app.config):
|
||||
try:
|
||||
return model.build.get_repository_build(self.build_uuid)
|
||||
except model.InvalidRepositoryBuildException:
|
||||
raise BuildJobLoadException(
|
||||
'Could not load repository build with ID %s' % self.build_uuid)
|
||||
|
||||
@property
|
||||
def build_uuid(self):
|
||||
""" Returns the unique UUID for this build job. """
|
||||
return self.job_details['build_uuid']
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
""" Returns the namespace under which this build is running. """
|
||||
return self.repo_build.repository.namespace_user.username
|
||||
|
||||
@property
|
||||
def repo_name(self):
|
||||
""" Returns the name of the repository under which this build is running. """
|
||||
return self.repo_build.repository.name
|
||||
|
||||
@property
|
||||
def repo_build(self):
|
||||
return self._load_repo_build()
|
||||
|
||||
def get_build_package_url(self, user_files):
|
||||
""" Returns the URL of the build package for this build, if any or empty string if none. """
|
||||
archive_url = self.build_config.get('archive_url', None)
|
||||
if archive_url:
|
||||
return archive_url
|
||||
|
||||
if not self.repo_build.resource_key:
|
||||
return ''
|
||||
|
||||
return user_files.get_file_url(self.repo_build.resource_key, '127.0.0.1', requires_cors=False)
|
||||
|
||||
@property
|
||||
def pull_credentials(self):
|
||||
""" Returns the pull credentials for this job, or None if none. """
|
||||
return self.job_details.get('pull_credentials')
|
||||
|
||||
@property
|
||||
def build_config(self):
|
||||
try:
|
||||
return json.loads(self.repo_build.job_config)
|
||||
except ValueError:
|
||||
raise BuildJobLoadException(
|
||||
'Could not parse repository build job config with ID %s' % self.job_details['build_uuid']
|
||||
)
|
||||
|
||||
def determine_cached_tag(self, base_image_id=None, cache_comments=None):
|
||||
""" Returns the tag to pull to prime the cache or None if none. """
|
||||
cached_tag = self._determine_cached_tag_by_tag()
|
||||
logger.debug('Determined cached tag %s for %s: %s', cached_tag, base_image_id, cache_comments)
|
||||
return cached_tag
|
||||
|
||||
def _determine_cached_tag_by_tag(self):
|
||||
""" Determines the cached tag by looking for one of the tags being built, and seeing if it
|
||||
exists in the repository. This is a fallback for when no comment information is available.
|
||||
"""
|
||||
with UseThenDisconnect(app.config):
|
||||
tags = self.build_config.get('docker_tags', ['latest'])
|
||||
repository = RepositoryReference.for_repo_obj(self.repo_build.repository)
|
||||
matching_tag = registry_model.find_matching_tag(repository, tags)
|
||||
if matching_tag is not None:
|
||||
return matching_tag.name
|
||||
|
||||
most_recent_tag = registry_model.get_most_recent_tag(repository)
|
||||
if most_recent_tag is not None:
|
||||
return most_recent_tag.name
|
||||
|
||||
return None
|
||||
|
||||
|
||||
class BuildJobNotifier(object):
|
||||
""" A class for sending notifications to a job that only relies on the build_uuid """
|
||||
|
||||
def __init__(self, build_uuid):
|
||||
self.build_uuid = build_uuid
|
||||
|
||||
@property
|
||||
def repo_build(self):
|
||||
return self._load_repo_build()
|
||||
|
||||
@lru_cache(maxsize=1)
|
||||
def _load_repo_build(self):
|
||||
try:
|
||||
return model.build.get_repository_build(self.build_uuid)
|
||||
except model.InvalidRepositoryBuildException:
|
||||
raise BuildJobLoadException(
|
||||
'Could not load repository build with ID %s' % self.build_uuid)
|
||||
|
||||
@property
|
||||
def build_config(self):
|
||||
try:
|
||||
return json.loads(self.repo_build.job_config)
|
||||
except ValueError:
|
||||
raise BuildJobLoadException(
|
||||
'Could not parse repository build job config with ID %s' % self.repo_build.uuid
|
||||
)
|
||||
|
||||
def send_notification(self, kind, error_message=None, image_id=None, manifest_digests=None):
|
||||
with UseThenDisconnect(app.config):
|
||||
tags = self.build_config.get('docker_tags', ['latest'])
|
||||
trigger = self.repo_build.trigger
|
||||
if trigger is not None and trigger.id is not None:
|
||||
trigger_kind = trigger.service.name
|
||||
else:
|
||||
trigger_kind = None
|
||||
|
||||
event_data = {
|
||||
'build_id': self.repo_build.uuid,
|
||||
'build_name': self.repo_build.display_name,
|
||||
'docker_tags': tags,
|
||||
'trigger_id': trigger.uuid if trigger is not None else None,
|
||||
'trigger_kind': trigger_kind,
|
||||
'trigger_metadata': self.build_config.get('trigger_metadata', {})
|
||||
}
|
||||
|
||||
if image_id is not None:
|
||||
event_data['image_id'] = image_id
|
||||
|
||||
if manifest_digests:
|
||||
event_data['manifest_digests'] = manifest_digests
|
||||
|
||||
if error_message is not None:
|
||||
event_data['error_message'] = error_message
|
||||
|
||||
# TODO: remove when more endpoints have been converted to using
|
||||
# interfaces
|
||||
repo = AttrDict({
|
||||
'namespace_name': self.repo_build.repository.namespace_user.username,
|
||||
'name': self.repo_build.repository.name,
|
||||
})
|
||||
spawn_notification(repo, kind, event_data,
|
||||
subpage='build/%s' % self.repo_build.uuid,
|
||||
pathargs=['build', self.repo_build.uuid])
|
88
buildman/jobutil/buildstatus.py
Normal file
88
buildman/jobutil/buildstatus.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
import datetime
|
||||
import logging
|
||||
|
||||
from redis import RedisError
|
||||
from trollius import From, Return, coroutine
|
||||
|
||||
from data.database import BUILD_PHASE
|
||||
from data import model
|
||||
from buildman.asyncutil import AsyncWrapper
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StatusHandler(object):
|
||||
""" Context wrapper for writing status to build logs. """
|
||||
|
||||
def __init__(self, build_logs, repository_build_uuid):
|
||||
self._current_phase = None
|
||||
self._current_command = None
|
||||
self._uuid = repository_build_uuid
|
||||
self._build_logs = AsyncWrapper(build_logs)
|
||||
self._sync_build_logs = build_logs
|
||||
self._build_model = AsyncWrapper(model.build)
|
||||
|
||||
self._status = {
|
||||
'total_commands': 0,
|
||||
'current_command': None,
|
||||
'push_completion': 0.0,
|
||||
'pull_completion': 0.0,
|
||||
}
|
||||
|
||||
# Write the initial status.
|
||||
self.__exit__(None, None, None)
|
||||
|
||||
@coroutine
|
||||
def _append_log_message(self, log_message, log_type=None, log_data=None):
|
||||
log_data = log_data or {}
|
||||
log_data['datetime'] = str(datetime.datetime.now())
|
||||
|
||||
try:
|
||||
yield From(self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data))
|
||||
except RedisError:
|
||||
logger.exception('Could not save build log for build %s: %s', self._uuid, log_message)
|
||||
|
||||
@coroutine
|
||||
def append_log(self, log_message, extra_data=None):
|
||||
if log_message is None:
|
||||
return
|
||||
|
||||
yield From(self._append_log_message(log_message, log_data=extra_data))
|
||||
|
||||
@coroutine
|
||||
def set_command(self, command, extra_data=None):
|
||||
if self._current_command == command:
|
||||
raise Return()
|
||||
|
||||
self._current_command = command
|
||||
yield From(self._append_log_message(command, self._build_logs.COMMAND, extra_data))
|
||||
|
||||
@coroutine
|
||||
def set_error(self, error_message, extra_data=None, internal_error=False, requeued=False):
|
||||
error_phase = BUILD_PHASE.INTERNAL_ERROR if internal_error and requeued else BUILD_PHASE.ERROR
|
||||
yield From(self.set_phase(error_phase))
|
||||
|
||||
extra_data = extra_data or {}
|
||||
extra_data['internal_error'] = internal_error
|
||||
yield From(self._append_log_message(error_message, self._build_logs.ERROR, extra_data))
|
||||
|
||||
@coroutine
|
||||
def set_phase(self, phase, extra_data=None):
|
||||
if phase == self._current_phase:
|
||||
raise Return(False)
|
||||
|
||||
self._current_phase = phase
|
||||
yield From(self._append_log_message(phase, self._build_logs.PHASE, extra_data))
|
||||
|
||||
# Update the repository build with the new phase
|
||||
raise Return(self._build_model.update_phase_then_close(self._uuid, phase))
|
||||
|
||||
def __enter__(self):
|
||||
return self._status
|
||||
|
||||
def __exit__(self, exc_type, value, traceback):
|
||||
try:
|
||||
self._sync_build_logs.set_status(self._uuid, self._status)
|
||||
except RedisError:
|
||||
logger.exception('Could not set status of build %s to %s', self._uuid, self._status)
|
119
buildman/jobutil/workererror.py
Normal file
119
buildman/jobutil/workererror.py
Normal file
|
@ -0,0 +1,119 @@
|
|||
class WorkerError(object):
|
||||
""" Helper class which represents errors raised by a build worker. """
|
||||
def __init__(self, error_code, base_message=None):
|
||||
self._error_code = error_code
|
||||
self._base_message = base_message
|
||||
|
||||
self._error_handlers = {
|
||||
'io.quay.builder.buildpackissue': {
|
||||
'message': 'Could not load build package',
|
||||
'is_internal': True,
|
||||
},
|
||||
|
||||
'io.quay.builder.gitfailure': {
|
||||
'message': 'Could not clone git repository',
|
||||
'show_base_error': True,
|
||||
},
|
||||
|
||||
'io.quay.builder.gitcheckout': {
|
||||
'message': 'Could not checkout git ref. If you force pushed recently, ' +
|
||||
'the commit may be missing.',
|
||||
'show_base_error': True,
|
||||
},
|
||||
|
||||
'io.quay.builder.cannotextractbuildpack': {
|
||||
'message': 'Could not extract the contents of the build package'
|
||||
},
|
||||
|
||||
'io.quay.builder.cannotpullforcache': {
|
||||
'message': 'Could not pull cached image',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.dockerfileissue': {
|
||||
'message': 'Could not find or parse Dockerfile',
|
||||
'show_base_error': True
|
||||
},
|
||||
|
||||
'io.quay.builder.cannotpullbaseimage': {
|
||||
'message': 'Could not pull base image',
|
||||
'show_base_error': True
|
||||
},
|
||||
|
||||
'io.quay.builder.internalerror': {
|
||||
'message': 'An internal error occurred while building. Please submit a ticket.',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.buildrunerror': {
|
||||
'message': 'Could not start the build process',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.builderror': {
|
||||
'message': 'A build step failed',
|
||||
'show_base_error': True
|
||||
},
|
||||
|
||||
'io.quay.builder.tagissue': {
|
||||
'message': 'Could not tag built image',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.pushissue': {
|
||||
'message': 'Could not push built image',
|
||||
'show_base_error': True,
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.dockerconnecterror': {
|
||||
'message': 'Could not connect to Docker daemon',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.missingorinvalidargument': {
|
||||
'message': 'Missing required arguments for builder',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.cachelookupissue': {
|
||||
'message': 'Error checking for a cached tag',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.errorduringphasetransition': {
|
||||
'message': 'Error during phase transition. If this problem persists ' +
|
||||
'please contact customer support.',
|
||||
'is_internal': True
|
||||
},
|
||||
|
||||
'io.quay.builder.clientrejectedtransition': {
|
||||
'message': 'Build can not be finished due to user cancellation.',
|
||||
}
|
||||
}
|
||||
|
||||
def is_internal_error(self):
|
||||
handler = self._error_handlers.get(self._error_code)
|
||||
return handler.get('is_internal', False) if handler else True
|
||||
|
||||
def public_message(self):
|
||||
handler = self._error_handlers.get(self._error_code)
|
||||
if not handler:
|
||||
return 'An unknown error occurred'
|
||||
|
||||
message = handler['message']
|
||||
if handler.get('show_base_error', False) and self._base_message:
|
||||
message = message + ': ' + self._base_message
|
||||
|
||||
return message
|
||||
|
||||
def extra_data(self):
|
||||
if self._base_message:
|
||||
return {
|
||||
'base_error': self._base_message,
|
||||
'error_code': self._error_code
|
||||
}
|
||||
|
||||
return {
|
||||
'error_code': self._error_code
|
||||
}
|
Reference in a new issue