2014-12-31 16:33:56 +00:00
|
|
|
import json
|
2015-02-09 18:54:14 +00:00
|
|
|
import logging
|
2014-12-31 16:33:56 +00:00
|
|
|
|
|
|
|
from cachetools import lru_cache
|
2015-02-03 18:01:42 +00:00
|
|
|
from endpoints.notificationhelper import spawn_notification
|
2014-11-12 19:03:07 +00:00
|
|
|
from data import model
|
2015-02-11 02:46:58 +00:00
|
|
|
from util.imagetree import ImageTree
|
2014-11-12 19:03:07 +00:00
|
|
|
|
2015-02-09 18:54:14 +00:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2014-11-12 19:03:07 +00:00
|
|
|
|
|
|
|
class BuildJobLoadException(Exception):
|
|
|
|
""" Exception raised if a build job could not be instantiated for some reason. """
|
|
|
|
pass
|
|
|
|
|
|
|
|
class BuildJob(object):
|
|
|
|
""" Represents a single in-progress build job. """
|
|
|
|
def __init__(self, job_item):
|
2014-12-16 18:41:30 +00:00
|
|
|
self.job_item = job_item
|
2014-11-12 19:03:07 +00:00
|
|
|
|
|
|
|
try:
|
2014-12-16 18:41:30 +00:00
|
|
|
self.job_details = json.loads(job_item.body)
|
2014-11-12 19:03:07 +00:00
|
|
|
except ValueError:
|
|
|
|
raise BuildJobLoadException(
|
2014-12-16 18:41:30 +00:00
|
|
|
'Could not parse build queue item config with ID %s' % self.job_details['build_uuid']
|
2014-11-18 20:45:56 +00:00
|
|
|
)
|
2014-11-12 19:03:07 +00:00
|
|
|
|
2015-02-12 21:19:44 +00:00
|
|
|
def has_retries_remaining(self):
|
|
|
|
return self.job_item.retries_remaining > 0
|
|
|
|
|
2015-02-24 20:13:51 +00:00
|
|
|
def send_notification(self, kind, error_message=None, image_id=None):
|
2015-02-03 18:01:42 +00:00
|
|
|
tags = self.build_config.get('docker_tags', ['latest'])
|
|
|
|
event_data = {
|
|
|
|
'build_id': self.repo_build.uuid,
|
|
|
|
'build_name': self.repo_build.display_name,
|
|
|
|
'docker_tags': tags,
|
|
|
|
'trigger_id': self.repo_build.trigger.uuid,
|
2015-02-24 20:13:51 +00:00
|
|
|
'trigger_kind': self.repo_build.trigger.service.name,
|
|
|
|
'trigger_metadata': self.build_config.get('trigger_metadata', {})
|
2015-02-03 18:01:42 +00:00
|
|
|
}
|
|
|
|
|
2015-02-24 20:13:51 +00:00
|
|
|
if image_id is not None:
|
|
|
|
event_data['image_id'] = image_id
|
|
|
|
|
2015-02-03 18:01:42 +00:00
|
|
|
if error_message is not None:
|
2015-02-03 18:08:38 +00:00
|
|
|
event_data['error_message'] = error_message
|
2015-02-03 18:01:42 +00:00
|
|
|
|
|
|
|
spawn_notification(self.repo_build.repository, kind, event_data,
|
2015-06-17 03:16:36 +00:00
|
|
|
subpage='build/%s' % self.repo_build.uuid,
|
2015-02-03 18:01:42 +00:00
|
|
|
pathargs=['build', self.repo_build.uuid])
|
|
|
|
|
|
|
|
|
2014-12-31 16:33:56 +00:00
|
|
|
@lru_cache(maxsize=1)
|
|
|
|
def _load_repo_build(self):
|
2014-11-12 19:03:07 +00:00
|
|
|
try:
|
2015-07-15 21:25:41 +00:00
|
|
|
return model.build.get_repository_build(self.job_details['build_uuid'])
|
2014-11-12 19:03:07 +00:00
|
|
|
except model.InvalidRepositoryBuildException:
|
|
|
|
raise BuildJobLoadException(
|
2014-12-16 18:41:30 +00:00
|
|
|
'Could not load repository build with ID %s' % self.job_details['build_uuid'])
|
2014-11-12 19:03:07 +00:00
|
|
|
|
2014-12-31 16:33:56 +00:00
|
|
|
@property
|
|
|
|
def repo_build(self):
|
|
|
|
return self._load_repo_build()
|
|
|
|
|
2015-01-29 23:01:42 +00:00
|
|
|
@property
|
|
|
|
def pull_credentials(self):
|
|
|
|
""" Returns the pull credentials for this job, or None if none. """
|
|
|
|
return self.job_details.get('pull_credentials')
|
2014-11-12 19:03:07 +00:00
|
|
|
|
2014-12-31 16:33:56 +00:00
|
|
|
@property
|
|
|
|
def build_config(self):
|
2014-11-12 19:03:07 +00:00
|
|
|
try:
|
2014-12-31 16:33:56 +00:00
|
|
|
return json.loads(self.repo_build.job_config)
|
2014-11-12 19:03:07 +00:00
|
|
|
except ValueError:
|
|
|
|
raise BuildJobLoadException(
|
2015-07-15 21:25:41 +00:00
|
|
|
'Could not parse repository build job config with ID %s' % self.job_details['build_uuid']
|
2014-11-18 20:45:56 +00:00
|
|
|
)
|
2014-11-12 19:03:07 +00:00
|
|
|
|
2014-12-11 16:03:40 +00:00
|
|
|
def determine_cached_tag(self, base_image_id=None, cache_comments=None):
|
2014-11-12 19:03:07 +00:00
|
|
|
""" Returns the tag to pull to prime the cache or None if none. """
|
2014-12-11 16:03:40 +00:00
|
|
|
cached_tag = None
|
|
|
|
if base_image_id and cache_comments:
|
|
|
|
cached_tag = self._determine_cached_tag_by_comments(base_image_id, cache_comments)
|
|
|
|
|
|
|
|
if not cached_tag:
|
|
|
|
cached_tag = self._determine_cached_tag_by_tag()
|
|
|
|
|
2015-02-09 18:54:14 +00:00
|
|
|
logger.debug('Determined cached tag %s for %s: %s', cached_tag, base_image_id, cache_comments)
|
|
|
|
|
2014-12-11 16:03:40 +00:00
|
|
|
return cached_tag
|
|
|
|
|
|
|
|
def _determine_cached_tag_by_comments(self, base_image_id, cache_commands):
|
|
|
|
""" Determines the tag to use for priming the cache for this build job, by matching commands
|
|
|
|
starting at the given base_image_id. This mimics the Docker cache checking, so it should,
|
|
|
|
in theory, provide "perfect" caching.
|
|
|
|
"""
|
|
|
|
# Lookup the base image in the repository. If it doesn't exist, nothing more to do.
|
2015-02-09 17:16:43 +00:00
|
|
|
repo_build = self.repo_build
|
|
|
|
repo_namespace = repo_build.repository.namespace_user.username
|
|
|
|
repo_name = repo_build.repository.name
|
2014-12-11 16:03:40 +00:00
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
base_image = model.image.get_image(repo_build.repository, base_image_id)
|
2015-02-11 02:46:58 +00:00
|
|
|
if base_image is None:
|
2014-12-11 16:03:40 +00:00
|
|
|
return None
|
|
|
|
|
2015-02-11 02:46:58 +00:00
|
|
|
# Build an in-memory tree of the full heirarchy of images in the repository.
|
2015-07-15 21:25:41 +00:00
|
|
|
all_images = model.image.get_repository_images_without_placements(repo_build.repository,
|
|
|
|
with_ancestor=base_image)
|
2015-04-24 20:22:19 +00:00
|
|
|
|
2015-07-15 21:25:41 +00:00
|
|
|
all_tags = model.tag.list_repository_tags(repo_namespace, repo_name)
|
2015-02-11 02:46:58 +00:00
|
|
|
tree = ImageTree(all_images, all_tags, base_filter=base_image.id)
|
|
|
|
|
|
|
|
# Find a path in the tree, starting at the base image, that matches the cache comments
|
|
|
|
# or some subset thereof.
|
|
|
|
def checker(step, image):
|
|
|
|
if step >= len(cache_commands):
|
|
|
|
return False
|
|
|
|
|
|
|
|
full_command = '["/bin/sh", "-c", "%s"]' % cache_commands[step]
|
2015-02-11 21:02:36 +00:00
|
|
|
logger.debug('Checking step #%s: %s, %s == %s', step, image.id,
|
|
|
|
image.storage.command, full_command)
|
|
|
|
|
|
|
|
return image.storage.command == full_command
|
2015-02-11 02:46:58 +00:00
|
|
|
|
|
|
|
path = tree.find_longest_path(base_image.id, checker)
|
|
|
|
if not path:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Find any tag associated with the last image in the path.
|
2015-02-11 21:02:36 +00:00
|
|
|
return tree.tag_containing_image(path[-1])
|
2014-12-11 16:03:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _determine_cached_tag_by_tag(self):
|
|
|
|
""" Determines the cached tag by looking for one of the tags being built, and seeing if it
|
|
|
|
exists in the repository. This is a fallback for when no comment information is available.
|
|
|
|
"""
|
2015-02-09 19:53:18 +00:00
|
|
|
tags = self.build_config.get('docker_tags', ['latest'])
|
2015-02-09 17:16:43 +00:00
|
|
|
repository = self.repo_build.repository
|
2015-07-15 21:25:41 +00:00
|
|
|
existing_tags = model.tag.list_repository_tags(repository.namespace_user.username,
|
|
|
|
repository.name)
|
2014-11-12 19:03:07 +00:00
|
|
|
cached_tags = set(tags) & set([tag.name for tag in existing_tags])
|
|
|
|
if cached_tags:
|
2014-11-14 00:41:17 +00:00
|
|
|
return list(cached_tags)[0]
|
2014-11-12 19:03:07 +00:00
|
|
|
|
|
|
|
return None
|