import json import logging from cachetools import lru_cache from endpoints.notificationhelper import spawn_notification from data import model from util.imagetree import ImageTree logger = logging.getLogger(__name__) class BuildJobLoadException(Exception): """ Exception raised if a build job could not be instantiated for some reason. """ pass class BuildJob(object): """ Represents a single in-progress build job. """ def __init__(self, job_item): self.job_item = job_item try: self.job_details = json.loads(job_item.body) except ValueError: raise BuildJobLoadException( 'Could not parse build queue item config with ID %s' % self.job_details['build_uuid'] ) def has_retries_remaining(self): return self.job_item.retries_remaining > 0 def send_notification(self, kind, error_message=None, image_id=None): tags = self.build_config.get('docker_tags', ['latest']) event_data = { 'build_id': self.repo_build.uuid, 'build_name': self.repo_build.display_name, 'docker_tags': tags, 'trigger_id': self.repo_build.trigger.uuid, 'trigger_kind': self.repo_build.trigger.service.name, 'trigger_metadata': self.build_config.get('trigger_metadata', {}) } if image_id is not None: event_data['image_id'] = image_id if error_message is not None: event_data['error_message'] = error_message spawn_notification(self.repo_build.repository, kind, event_data, subpage='build/%s' % self.repo_build.uuid, pathargs=['build', self.repo_build.uuid]) @lru_cache(maxsize=1) def _load_repo_build(self): try: return model.build.get_repository_build(self.job_details['build_uuid']) except model.InvalidRepositoryBuildException: raise BuildJobLoadException( 'Could not load repository build with ID %s' % self.job_details['build_uuid']) @property def repo_build(self): return self._load_repo_build() def get_build_package_url(self, user_files): """ Returns the URL of the build package for this build, if any or empty string if none. """ archive_url = self.build_config.get('archive_url', None) if archive_url: return archive_url if not self.repo_build.resource_key: return '' return user_files.get_file_url(self.repo_build.resource_key, requires_cors=False) @property def pull_credentials(self): """ Returns the pull credentials for this job, or None if none. """ return self.job_details.get('pull_credentials') @property def build_config(self): try: return json.loads(self.repo_build.job_config) except ValueError: raise BuildJobLoadException( 'Could not parse repository build job config with ID %s' % self.job_details['build_uuid'] ) def determine_cached_tag(self, base_image_id=None, cache_comments=None): """ Returns the tag to pull to prime the cache or None if none. """ cached_tag = None if base_image_id and cache_comments: cached_tag = self._determine_cached_tag_by_comments(base_image_id, cache_comments) if not cached_tag: cached_tag = self._determine_cached_tag_by_tag() logger.debug('Determined cached tag %s for %s: %s', cached_tag, base_image_id, cache_comments) return cached_tag def _determine_cached_tag_by_comments(self, base_image_id, cache_commands): """ Determines the tag to use for priming the cache for this build job, by matching commands starting at the given base_image_id. This mimics the Docker cache checking, so it should, in theory, provide "perfect" caching. """ # Lookup the base image in the repository. If it doesn't exist, nothing more to do. repo_build = self.repo_build repo_namespace = repo_build.repository.namespace_user.username repo_name = repo_build.repository.name base_image = model.image.get_image(repo_build.repository, base_image_id) if base_image is None: return None # Build an in-memory tree of the full heirarchy of images in the repository. all_images = model.image.get_repository_images_without_placements(repo_build.repository, with_ancestor=base_image) all_tags = model.tag.list_repository_tags(repo_namespace, repo_name) tree = ImageTree(all_images, all_tags, base_filter=base_image.id) # Find a path in the tree, starting at the base image, that matches the cache comments # or some subset thereof. def checker(step, image): if step >= len(cache_commands): return False full_command = '["/bin/sh", "-c", "%s"]' % cache_commands[step] logger.debug('Checking step #%s: %s, %s == %s', step, image.id, image.storage.command, full_command) return image.storage.command == full_command path = tree.find_longest_path(base_image.id, checker) if not path: return None # Find any tag associated with the last image in the path. return tree.tag_containing_image(path[-1]) def _determine_cached_tag_by_tag(self): """ Determines the cached tag by looking for one of the tags being built, and seeing if it exists in the repository. This is a fallback for when no comment information is available. """ tags = self.build_config.get('docker_tags', ['latest']) repository = self.repo_build.repository existing_tags = model.tag.list_repository_tags(repository.namespace_user.username, repository.name) cached_tags = set(tags) & set([tag.name for tag in existing_tags]) if cached_tags: return list(cached_tags)[0] return None