import logging import io import os.path import tarfile import base64 import re import json import gitlab from endpoints.building import PreparedBuild from github import (Github, UnknownObjectException, GithubException, BadCredentialsException as GitHubBadCredentialsException) from bitbucket import BitBucket from tempfile import SpooledTemporaryFile from jsonschema import validate from data import model from app import app, userfiles as user_files, github_trigger, get_app_url from util.registry.tarfileappender import TarfileAppender from util.security.ssh import generate_ssh_keypair client = app.config['HTTPCLIENT'] logger = logging.getLogger(__name__) TARBALL_MIME = 'application/gzip' CHUNK_SIZE = 512 * 1024 class InvalidPayloadException(Exception): pass class BuildArchiveException(Exception): pass class InvalidServiceException(Exception): pass class TriggerActivationException(Exception): pass class TriggerDeactivationException(Exception): pass class TriggerStartException(Exception): pass class ValidationRequestException(Exception): pass class SkipRequestException(Exception): pass class EmptyRepositoryException(Exception): pass class RepositoryReadException(Exception): pass class TriggerProviderException(Exception): pass def _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, default_branch): run_parameters = run_parameters or {} kind = '' value = '' if 'refs' in run_parameters and run_parameters['refs']: kind = run_parameters['refs']['kind'] value = run_parameters['refs']['name'] elif 'branch_name' in run_parameters: kind = 'branch' value = run_parameters['branch_name'] kind = kind or 'branch' value = value or default_branch ref = 'refs/tags/' + value if kind == 'tag' else 'refs/heads/' + value commit_sha = get_tag_sha(value) if kind == 'tag' else get_branch_sha(value) return (commit_sha, ref) def find_matching_branches(config, branches): if 'branchtag_regex' in config: try: regex = re.compile(config['branchtag_regex']) return [branch for branch in branches if matches_ref('refs/heads/' + branch, regex)] except: pass return branches def raise_if_skipped(config, ref): """ Raises a SkipRequestException if the given ref should be skipped. """ if 'branchtag_regex' in config: try: regex = re.compile(config['branchtag_regex']) except: regex = re.compile('.*') if not matches_ref(ref, regex): raise SkipRequestException() def matches_ref(ref, regex): match_string = ref.split('/', 1)[1] if not regex: return False m = regex.match(match_string) if not m: return False return len(m.group(0)) == len(match_string) def should_skip_commit(message): return '[skip build]' in message or '[build skip]' in message def raise_unsupported(): raise io.UnsupportedOperation def get_trigger_config(trigger): try: return json.loads(trigger.config) except: return {} class BuildTriggerHandler(object): def __init__(self, trigger, override_config=None): self.trigger = trigger self.config = override_config or get_trigger_config(trigger) @property def auth_token(self): """ Returns the auth token for the trigger. """ return self.trigger.auth_token def load_dockerfile_contents(self): """ Loads the Dockerfile found for the trigger's config and returns them or None if none could be found/loaded. """ raise NotImplementedError def list_build_sources(self): """ Take the auth information for the specific trigger type and load the list of build sources(repositories). """ raise NotImplementedError def list_build_subdirs(self): """ Take the auth information and the specified config so far and list all of the possible subdirs containing dockerfiles. """ raise NotImplementedError def handle_trigger_request(self): """ Transform the incoming request data into a set of actions. Returns a PreparedBuild. """ raise NotImplementedError def is_active(self): """ Returns True if the current build trigger is active. Inactive means further setup is needed. """ raise NotImplementedError def activate(self, standard_webhook_url): """ Activates the trigger for the service, with the given new configuration. Returns new public and private config that should be stored if successful. """ raise NotImplementedError def deactivate(self): """ Deactivates the trigger for the service, removing any hooks installed in the remote service. Returns the new config that should be stored if this trigger is going to be re-activated. """ raise NotImplementedError def manual_start(self, run_parameters=None): """ Manually creates a repository build for this trigger. Returns a PreparedBuild. """ raise NotImplementedError def list_field_values(self, field_name, limit=None): """ Lists all values for the given custom trigger field. For example, a trigger might have a field named "branches", and this method would return all branches. """ raise NotImplementedError def get_repository_url(self): """ Returns the URL of the current trigger's repository. Note that this operation can be called in a loop, so it should be as fast as possible. """ raise NotImplementedError @classmethod def service_name(cls): """ Particular service implemented by subclasses. """ raise NotImplementedError @classmethod def get_handler(cls, trigger, override_config=None): for subc in cls.__subclasses__(): if subc.service_name() == trigger.service.name: return subc(trigger, override_config) raise InvalidServiceException('Unable to find service: %s' % trigger.service.name) def put_config_key(self, key, value): """ Updates a config key in the trigger, saving it to the DB. """ self.config[key] = value model.build.update_build_trigger(self.trigger, self.config) def set_auth_token(self, auth_token): """ Sets the auth token for the trigger, saving it to the DB. """ model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token) def get_dockerfile_path(self): """ Returns the normalized path to the Dockerfile found in the subdirectory in the config. """ subdirectory = self.config.get('subdir', '') if subdirectory == '/': subdirectory = '' else: if not subdirectory.endswith('/'): subdirectory = subdirectory + '/' return subdirectory + 'Dockerfile' class BitbucketBuildTrigger(BuildTriggerHandler): """ BuildTrigger for Bitbucket. """ @classmethod def service_name(cls): return 'bitbucket' def _get_client(self): key = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_KEY', '') secret = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_SECRET', '') trigger_uuid = self.trigger.uuid callback_url = '%s/oauth1/bitbucket/callback/trigger/%s' % (get_app_url(), trigger_uuid) return BitBucket(key, secret, callback_url) def _get_authorized_client(self): base_client = self._get_client() auth_token = self.auth_token or 'invalid:invalid' token_parts = auth_token.split(':') if len(token_parts) != 2: token_parts = ['invalid', 'invalid'] (access_token, access_token_secret) = token_parts return base_client.get_authorized_client(access_token, access_token_secret) def _get_repository_client(self): source = self.config['build_source'] (namespace, name) = source.split('/') bitbucket_client = self._get_authorized_client() return bitbucket_client.for_namespace(namespace).repositories().get(name) def _get_default_branch(self, repository, default_value='master'): (result, data, _) = repository.get_main_branch() if result: return data['name'] return default_value def get_oauth_url(self): bitbucket_client = self._get_client() (result, data, err_msg) = bitbucket_client.get_authorization_url() if not result: raise RepositoryReadException(err_msg) return data def exchange_verifier(self, verifier): bitbucket_client = self._get_client() access_token = self.config.get('access_token', '') access_token_secret = self.auth_token # Exchange the verifier for a new access token. (result, data, _) = bitbucket_client.verify_token(access_token, access_token_secret, verifier) if not result: return False # Save the updated access token and secret. self.set_auth_token(data[0] + ':' + data[1]) # Retrieve the current authorized user's information and store the username in the config. authorized_client = self._get_authorized_client() (result, data, _) = authorized_client.get_current_user() if not result: return False username = data['user']['username'] self.put_config_key('username', username) return True def is_active(self): return 'webhook_id' in self.config def activate(self, standard_webhook_url): config = self.config # Add a deploy key to the repository. public_key, private_key = generate_ssh_keypair() config['credentials'] = [ { 'name': 'SSH Public Key', 'value': public_key, }, ] repository = self._get_repository_client() (result, created_deploykey, err_msg) = repository.deploykeys().create( app.config['REGISTRY_TITLE'] + ' webhook key', public_key) if not result: msg = 'Unable to add deploy key to repository: %s' % err_msg raise TriggerActivationException(msg) config['deploy_key_id'] = created_deploykey['pk'] # Add a webhook callback. description = 'Webhook for invoking builds on %s' % app.config['REGISTRY_TITLE_SHORT'] webhook_events = ['repo:push'] (result, created_webhook, err_msg) = repository.webhooks().create( description, standard_webhook_url, webhook_events) if not result: msg = 'Unable to add webhook to repository: %s' % err_msg raise TriggerActivationException(msg) config['webhook_id'] = created_webhook['uuid'] self.config = config return config, {'private_key': private_key} def deactivate(self): config = self.config webhook_id = config.pop('webhook_id', None) deploy_key_id = config.pop('deploy_key_id', None) repository = self._get_repository_client() # Remove the webhook. if webhook_id is not None: (result, _, err_msg) = repository.webhooks().delete(webhook_id) if not result: msg = 'Unable to remove webhook from repository: %s' % err_msg raise TriggerDeactivationException(msg) # Remove the public key. if deploy_key_id is not None: (result, _, err_msg) = repository.deploykeys().delete(deploy_key_id) if not result: msg = 'Unable to remove deploy key from repository: %s' % err_msg raise TriggerDeactivationException(msg) return config def list_build_sources(self): bitbucket_client = self._get_authorized_client() (result, data, err_msg) = bitbucket_client.get_visible_repositories() if not result: raise RepositoryReadException('Could not read repository list: ' + err_msg) namespaces = {} for repo in data: if not repo['scm'] == 'git': continue owner = repo['owner'] if not owner in namespaces: namespaces[owner] = { 'personal': owner == self.config.get('username'), 'repos': [], 'info': { 'name': owner } } namespaces[owner]['repos'].append(owner + '/' + repo['slug']) return namespaces.values() def list_build_subdirs(self): config = self.config repository = self._get_repository_client() # Find the first matching branch. repo_branches = self.list_field_values('branch_name') or [] branches = find_matching_branches(config, repo_branches) if not branches: branches = [self._get_default_branch(repository)] (result, data, err_msg) = repository.get_path_contents('', revision=branches[0]) if not result: raise RepositoryReadException(err_msg) files = set([f['path'] for f in data['files']]) if 'Dockerfile' in files: return ['/'] return [] def load_dockerfile_contents(self): repository = self._get_repository_client() path = self.get_dockerfile_path() (result, data, err_msg) = repository.get_raw_path_contents(path, revision='master') if not result: raise RepositoryReadException(err_msg) return data def list_field_values(self, field_name, limit=None): source = self.config['build_source'] (namespace, name) = source.split('/') bitbucket_client = self._get_authorized_client() repository = bitbucket_client.for_namespace(namespace).repositories().get(name) if field_name == 'refs': (result, data, _) = repository.get_branches_and_tags() if not result: return None branches = [b['name'] for b in data['branches']] tags = [t['name'] for t in data['tags']] return ([{'kind': 'branch', 'name': b} for b in branches] + [{'kind': 'tag', 'name': tag} for tag in tags]) if field_name == 'tag_name': (result, data, _) = repository.get_tags() if not result: return None tags = list(data.keys()) if limit: tags = tags[0:limit] return tags if field_name == 'branch_name': (result, data, _) = repository.get_branches() if not result: return None branches = list(data.keys()) if limit: branches = branches[0:limit] return branches return None _BITBUCKET_COMMIT_URL = 'https://bitbucket.org/%s/%s/commits/%s' def _prepare_build(self, commit_sha, ref, is_manual, target=None, actor=None): def _build_user_block(info): return { 'username': info['username'], 'url': info['links']['html']['href'], 'avatar_url': info['links']['avatar']['href'], 'display_name': info['display_name'] } config = self.config repository = self._get_repository_client() # Lookup the default branch associated with the repository. We use this when building # the tags. default_branch = self._get_default_branch(repository) # Lookup the commit sha (if necessary) data = {} if target is None: (result, data, _) = repository.changesets().get(commit_sha) if not result: raise TriggerStartException('Could not lookup commit SHA') namespace = repository.namespace name = repository.repository_name # Build the commit information. commit_url = self._BITBUCKET_COMMIT_URL % (namespace, name, commit_sha) if target is not None and 'links' in target: commit_url = target['links']['html']['href'] commit_info = { 'url': commit_url, 'message': target['message'] if target else data['message'], 'date': target['date'] if target else data['timestamp'] } # Add the commit's author. if target is not None and target.get('author') and 'user' in target['author']: commit_info['author'] = _build_user_block(target['author']['user']) elif data.get('raw_author'): # Try to lookup the author by email address. The raw_author field (if it exists) is returned # in the form: "Joseph Schorr " match = re.compile(r'.*<(.+)>').match(data['raw_author']) if match: email_address = match.group(1) bitbucket_client = self._get_authorized_client() (result, data, _) = bitbucket_client.accounts().get_profile(email_address) if result: commit_info['author'] = { 'username': data['user']['username'], 'url': 'https://bitbucket.org/%s/' % data['user']['username'], 'avatar_url': data['user']['avatar'] } # Add the commit's actor (committer). if actor is not None: commit_info['committer'] = _build_user_block(actor) metadata = { 'commit': commit_sha, 'ref': ref, 'default_branch': default_branch, 'git_url': 'git@bitbucket.org:%s/%s.git' % (namespace, name), 'commit_info': commit_info } prepared = PreparedBuild(self.trigger) prepared.tags_from_ref(ref, default_branch) prepared.name_from_sha(commit_sha) prepared.subdirectory = config['subdir'] prepared.metadata = metadata prepared.is_manual = is_manual return prepared def handle_trigger_request(self, request): payload = request.get_json() if not payload or not 'push' in payload: logger.debug('Skipping BitBucket request due to missing push data in payload') raise SkipRequestException() push_payload = payload['push'] if not 'changes' in push_payload or not push_payload['changes']: logger.debug('Skipping BitBucket request due to empty changes list') raise SkipRequestException() # Make sure we have a new change. changes = push_payload['changes'] last_change = changes[-1] if not last_change.get('new'): logger.debug('Skipping BitBucket request due to change being a deletion') raise SkipRequestException() change_info = last_change['new'] change_target = change_info.get('target') if not change_target: logger.debug('Skipping BitBucket request due to missing change target') raise SkipRequestException() # Check if this build should be skipped by commit message. commit_message = change_target.get('message', '') if should_skip_commit(commit_message): logger.debug('Skipping BitBucket request due to commit message request') raise SkipRequestException() # Check to see if this build should be skipped by ref. ref = ('refs/heads/' + change_info['name'] if change_info['type'] == 'branch' else 'refs/tags/' + change_info['name']) logger.debug('Checking BitBucket request: %s', ref) raise_if_skipped(self.config, ref) # Prepare the build. commit_sha = change_target['hash'] return self._prepare_build(commit_sha, ref, False, target=change_target, actor=payload.get('actor')) def manual_start(self, run_parameters=None): run_parameters = run_parameters or {} repository = self._get_repository_client() def get_branch_sha(branch_name): # Lookup the commit SHA for the branch. (result, data, _) = repository.get_branches() if not result or not branch_name in data: raise TriggerStartException('Could not find branch commit SHA') return data[branch_name]['node'] def get_tag_sha(tag_name): # Lookup the commit SHA for the tag. (result, data, _) = repository.get_tags() if not result or not tag_name in data: raise TriggerStartException('Could not find tag commit SHA') return data[tag_name]['node'] # Find the branch or tag to build. (commit_sha, ref) = _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, self._get_default_branch(repository)) return self._prepare_build(commit_sha, ref, True) def get_repository_url(self): source = self.config['build_source'] (namespace, name) = source.split('/') return 'https://bitbucket.org/%s/%s' % (namespace, name) class GithubBuildTrigger(BuildTriggerHandler): """ BuildTrigger for GitHub that uses the archive API and buildpacks. """ def _get_client(self): return Github(self.auth_token, base_url=github_trigger.api_endpoint(), client_id=github_trigger.client_id(), client_secret=github_trigger.client_secret()) @classmethod def service_name(cls): return 'github' def is_active(self): return 'hook_id' in self.config def activate(self, standard_webhook_url): config = self.config new_build_source = config['build_source'] gh_client = self._get_client() # Find the GitHub repository. try: gh_repo = gh_client.get_repo(new_build_source) except UnknownObjectException: msg = 'Unable to find GitHub repository for source: %s' % new_build_source raise TriggerActivationException(msg) # Add a deploy key to the GitHub repository. public_key, private_key = generate_ssh_keypair() config['credentials'] = [ { 'name': 'SSH Public Key', 'value': public_key, }, ] try: deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'], public_key) config['deploy_key_id'] = deploy_key.id except GithubException: msg = 'Unable to add deploy key to repository: %s' % new_build_source raise TriggerActivationException(msg) # Add the webhook to the GitHub repository. webhook_config = { 'url': standard_webhook_url, 'content_type': 'json', } try: hook = gh_repo.create_hook('web', webhook_config) config['hook_id'] = hook.id config['master_branch'] = gh_repo.default_branch except GithubException: msg = 'Unable to create webhook on repository: %s' % new_build_source raise TriggerActivationException(msg) return config, {'private_key': private_key} def deactivate(self): config = self.config gh_client = self._get_client() # Find the GitHub repository. try: repo = gh_client.get_repo(config['build_source']) except UnknownObjectException: msg = 'Unable to find GitHub repository for source: %s' % config['build_source'] raise TriggerDeactivationException(msg) except GitHubBadCredentialsException: msg = 'Unable to access repository to disable trigger' raise TriggerDeactivationException(msg) # If the trigger uses a deploy key, remove it. try: if config['deploy_key_id']: deploy_key = repo.get_key(config['deploy_key_id']) deploy_key.delete() except KeyError: # There was no config['deploy_key_id'], thus this is an old trigger without a deploy key. pass except GithubException: msg = 'Unable to remove deploy key: %s' % config['deploy_key_id'] raise TriggerDeactivationException(msg) # Remove the webhook. try: hook = repo.get_hook(config['hook_id']) hook.delete() except GithubException: msg = 'Unable to remove hook: %s' % config['hook_id'] raise TriggerDeactivationException(msg) config.pop('hook_id', None) self.config = config return config def list_build_sources(self): gh_client = self._get_client() usr = gh_client.get_user() try: repos = usr.get_repos() except GithubException: raise RepositoryReadException('Unable to list user repositories') namespaces = {} has_non_personal = False for repository in repos: namespace = repository.owner.login if not namespace in namespaces: is_personal_repo = namespace == usr.login namespaces[namespace] = { 'personal': is_personal_repo, 'repos': [], 'info': { 'name': namespace, 'avatar_url': repository.owner.avatar_url } } if not is_personal_repo: has_non_personal = True namespaces[namespace]['repos'].append(repository.full_name) # In older versions of GitHub Enterprise, the get_repos call above does not # return any non-personal repositories. In that case, we need to lookup the # repositories manually. # TODO: Remove this once we no longer support GHE versions <= 2.1 if not has_non_personal: for org in usr.get_orgs(): repo_list = [repo.full_name for repo in org.get_repos(type='member')] namespaces[org.name] = { 'personal': False, 'repos': repo_list, 'info': { 'name': org.name or org.login, 'avatar_url': org.avatar_url } } entries = list(namespaces.values()) entries.sort(key=lambda e: e['info']['name']) return entries def list_build_subdirs(self): config = self.config gh_client = self._get_client() source = config['build_source'] try: repo = gh_client.get_repo(source) # Find the first matching branch. repo_branches = self.list_field_values('branch_name') or [] branches = find_matching_branches(config, repo_branches) branches = branches or [repo.default_branch or 'master'] default_commit = repo.get_branch(branches[0]).commit commit_tree = repo.get_git_tree(default_commit.sha, recursive=True) return [os.path.dirname(elem.path) for elem in commit_tree.tree if (elem.type == u'blob' and os.path.basename(elem.path) == u'Dockerfile')] except GithubException as ge: message = ge.data.get('message', 'Unable to list contents of repository: %s' % source) if message == 'Branch not found': raise EmptyRepositoryException() raise RepositoryReadException(message) def load_dockerfile_contents(self): config = self.config gh_client = self._get_client() source = config['build_source'] path = self.get_dockerfile_path() try: repo = gh_client.get_repo(source) file_info = repo.get_file_contents(path) if file_info is None: return None content = file_info.content if file_info.encoding == 'base64': content = base64.b64decode(content) return content except GithubException as ge: message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source) raise RepositoryReadException(message) @staticmethod def _build_commit_info(repo, payload, commit_sha): if repo: return GithubBuildTrigger._build_repo_commit_info(repo, commit_sha) else: return GithubBuildTrigger._build_payload_commit_info(payload, commit_sha) @staticmethod def _build_payload_commit_info(payload, commit_sha): head_commit = payload.get('head_commit', {}) sender = payload.get('sender', {}) commit_info = { 'url': head_commit.get('url', ''), 'message': head_commit.get('message', ''), 'date': head_commit.get('timestamp', ''), } if 'author' in head_commit: commit_info['author'] = { 'username': head_commit['author'].get('username'), } if head_commit['author']['username'] == sender.get('login'): commit_info['author']['avatar_url'] = sender.get('avatar_url', '') commit_info['author']['url'] = sender.get('html_url', '') if 'committer' in head_commit: commit_info['committer'] = { 'username': head_commit['committer'].get('username'), } if head_commit['committer']['username'] == sender.get('login'): commit_info['committer']['avatar_url'] = sender.get('avatar_url', '') commit_info['committer']['url'] = sender.get('html_url', '') return commit_info @staticmethod def _build_repo_commit_info(repo, commit_sha): try: commit = repo.get_commit(commit_sha) except GithubException: logger.exception('Could not load data for commit') return commit_info = { 'url': commit.html_url, 'message': commit.commit.message, 'date': commit.last_modified } if commit.author: commit_info['author'] = { 'username': commit.author.login, 'avatar_url': commit.author.avatar_url, 'url': commit.author.html_url } if commit.committer: commit_info['committer'] = { 'username': commit.committer.login, 'avatar_url': commit.committer.avatar_url, 'url': commit.committer.html_url } return commit_info @staticmethod def _prepare_tarball(repo, commit_sha): # Prepare the download and upload URLs archive_link = repo.get_archive_link('tarball', commit_sha) download_archive = client.get(archive_link, stream=True) tarball_subdir = '' with SpooledTemporaryFile(CHUNK_SIZE) as tarball: for chunk in download_archive.iter_content(CHUNK_SIZE): tarball.write(chunk) # Seek to position 0 to make tarfile happy tarball.seek(0) # Pull out the name of the subdir that GitHub generated with tarfile.open(fileobj=tarball) as archive: tarball_subdir = archive.getnames()[0] # Seek to position 0 to make tarfile happy. tarball.seek(0) entries = { tarball_subdir + '/.git/HEAD': commit_sha, tarball_subdir + '/.git/objects/': None, tarball_subdir + '/.git/refs/': None } appender = TarfileAppender(tarball, entries).get_stream() dockerfile_id = user_files.store_file(appender, TARBALL_MIME) logger.debug('Successfully prepared job') return tarball_subdir, dockerfile_id def _get_payload(self, payload, *args): current = payload for arg in args: current = current.get(arg, {}) return current def _prepare_build(self, ref, commit_sha, is_manual, repo=None, payload=None): config = self.config prepared = PreparedBuild(self.trigger) # If the trigger isn't using git, prepare the buildpack. if self.trigger.private_key is None: if repo is None: raise SkipRequestException() tarball_subdir, dockerfile_id = GithubBuildTrigger._prepare_tarball(repo, commit_sha) prepared.subdirectory = os.path.join(tarball_subdir, config['subdir']) prepared.dockerfile_id = dockerfile_id else: prepared.subdirectory = config['subdir'] # Set the name. prepared.name_from_sha(commit_sha) # Set the tag(s). if repo: default_branch = repo.default_branch else: default_branch = self._get_payload(payload, 'repository', 'default_branch') prepared.tags_from_ref(ref, default_branch) # Build and set the metadata. metadata = { 'commit': commit_sha, 'ref': ref, 'default_branch': default_branch, 'git_url': repo.ssh_url if repo else self._get_payload(payload, 'repository', 'ssh_url'), } # add the commit info. commit_info = GithubBuildTrigger._build_commit_info(repo, payload, commit_sha) if commit_info is not None: metadata['commit_info'] = commit_info prepared.metadata = metadata prepared.is_manual = is_manual return prepared def handle_trigger_request(self, request): # Check the payload to see if we should skip it based on the lack of a head_commit. payload = request.get_json() if not payload or payload.get('head_commit') is None: raise SkipRequestException() # This is for GitHub's probing/testing. if 'zen' in payload: raise ValidationRequestException() logger.debug('GitHub trigger payload %s', payload) ref = payload['ref'] commit_sha = payload['head_commit']['id'] commit_message = payload['head_commit'].get('message', '') # Check if this build should be skipped by commit message. if should_skip_commit(commit_message): raise SkipRequestException() # Check to see if this build should be skipped by ref. raise_if_skipped(self.config, ref) try: repo_full_name = '%s/%s' % (payload['repository']['owner']['name'], payload['repository']['name']) gh_client = self._get_client() repo = gh_client.get_repo(repo_full_name) return self._prepare_build(ref, commit_sha, False, repo=repo) except GitHubBadCredentialsException: logger.exception('Got GitHub Credentials Exception, retrying with a manual payload') return self._prepare_build(ref, commit_sha, False, payload=payload) except GithubException: logger.exception("Got GitHub Exception when trying to start trigger %s", self.trigger.id) raise SkipRequestException() def manual_start(self, run_parameters=None): config = self.config source = config['build_source'] try: gh_client = self._get_client() repo = gh_client.get_repo(source) default_branch = repo.default_branch except GithubException as ghe: raise TriggerStartException(ghe.data['message']) def get_branch_sha(branch_name): branch = repo.get_branch(branch_name) return branch.commit.sha def get_tag_sha(tag_name): tags = {tag.name: tag for tag in repo.get_tags()} if not tag_name in tags: raise TriggerStartException('Could not find tag in repository') return tags[tag_name].commit.sha # Find the branch or tag to build. (commit_sha, ref) = _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, default_branch) return self._prepare_build(ref, commit_sha, True, repo=repo) def list_field_values(self, field_name, limit=None): if field_name == 'refs': branches = self.list_field_values('branch_name') tags = self.list_field_values('tag_name') return ([{'kind': 'branch', 'name': b} for b in branches] + [{'kind': 'tag', 'name': tag} for tag in tags]) config = self.config if field_name == 'tag_name': try: gh_client = self._get_client() source = config['build_source'] repo = gh_client.get_repo(source) gh_tags = repo.get_tags() if limit: gh_tags = repo.get_tags()[0:limit] return [tag.name for tag in gh_tags] except GitHubBadCredentialsException: return [] except GithubException: logger.exception("Got GitHub Exception when trying to list tags for trigger %s", self.trigger.id) return [] if field_name == 'branch_name': try: gh_client = self._get_client() source = config['build_source'] repo = gh_client.get_repo(source) gh_branches = repo.get_branches() if limit: gh_branches = repo.get_branches()[0:limit] branches = [branch.name for branch in gh_branches] if not repo.default_branch in branches: branches.insert(0, repo.default_branch) if branches[0] != repo.default_branch: branches.remove(repo.default_branch) branches.insert(0, repo.default_branch) return branches except GitHubBadCredentialsException: return ['master'] except GithubException: logger.exception("Got GitHub Exception when trying to list branches for trigger %s", self.trigger.id) return ['master'] return None def get_repository_url(self): from app import github_trigger source = self.config['build_source'] return github_trigger.get_public_url(source) class CustomBuildTrigger(BuildTriggerHandler): payload_schema = { 'type': 'object', 'properties': { 'commit': { 'type': 'string', 'description': 'first 7 characters of the SHA-1 identifier for a git commit', 'pattern': '^([A-Fa-f0-9]{7,})$', }, 'ref': { 'type': 'string', 'description': 'git reference for a git commit', 'pattern': '^refs\/(heads|tags|remotes)\/(.+)$', }, 'default_branch': { 'type': 'string', 'description': 'default branch of the git repository', }, 'commit_info': { 'type': 'object', 'description': 'metadata about a git commit', 'properties': { 'url': { 'type': 'string', 'description': 'URL to view a git commit', }, 'message': { 'type': 'string', 'description': 'git commit message', }, 'date': { 'type': 'string', 'description': 'timestamp for a git commit' }, 'author': { 'type': 'object', 'description': 'metadata about the author of a git commit', 'properties': { 'username': { 'type': 'string', 'description': 'username of the author', }, 'url': { 'type': 'string', 'description': 'URL to view the profile of the author', }, 'avatar_url': { 'type': 'string', 'description': 'URL to view the avatar of the author', }, }, 'required': ['username', 'url', 'avatar_url'], }, 'committer': { 'type': 'object', 'description': 'metadata about the committer of a git commit', 'properties': { 'username': { 'type': 'string', 'description': 'username of the committer', }, 'url': { 'type': 'string', 'description': 'URL to view the profile of the committer', }, 'avatar_url': { 'type': 'string', 'description': 'URL to view the avatar of the committer', }, }, 'required': ['username', 'url', 'avatar_url'], }, }, 'required': ['url', 'message', 'date'], }, }, 'required': ['commit', 'ref', 'default_branch'], } @classmethod def service_name(cls): return 'custom-git' def is_active(self): return self.config.has_key('credentials') def _metadata_from_payload(self, payload): try: metadata = json.loads(payload) validate(metadata, self.payload_schema) except Exception as e: raise InvalidPayloadException(e.message) return metadata def handle_trigger_request(self, request): # Skip if there is no payload. payload = request.data if not payload: raise InvalidPayloadException() logger.debug('Payload %s', payload) # Skip if the commit message matches. metadata = self._metadata_from_payload(payload) if should_skip_commit(metadata.get('commit_info', {}).get('message', '')): raise SkipRequestException() # The build source is the canonical git URL used to clone. config = self.config metadata['git_url'] = config['build_source'] prepared = PreparedBuild(self.trigger) prepared.tags_from_ref(metadata['ref']) prepared.name_from_sha(metadata['commit']) prepared.subdirectory = config['subdir'] prepared.metadata = metadata prepared.is_manual = False return prepared def manual_start(self, run_parameters=None): # commit_sha is the only required parameter commit_sha = run_parameters.get('commit_sha') if commit_sha is None: raise TriggerStartException('missing required parameter') config = self.config metadata = { 'commit': commit_sha, 'git_url': config['build_source'], } prepared = PreparedBuild(self.trigger) prepared.tags = [commit_sha[:7]] prepared.name_from_sha(commit_sha) prepared.subdirectory = config['subdir'] prepared.metadata = metadata prepared.is_manual = True return prepared def activate(self, standard_webhook_url): config = self.config public_key, private_key = generate_ssh_keypair() config['credentials'] = [ { 'name': 'SSH Public Key', 'value': public_key, }, { 'name': 'Webhook Endpoint URL', 'value': standard_webhook_url, }, ] self.config = config return config, {'private_key': private_key} def deactivate(self): config = self.config config.pop('credentials', None) self.config = config return config def get_repository_url(self): return None class GitLabBuildTrigger(BuildTriggerHandler): """ BuildTrigger for GitLab. """ @classmethod def service_name(cls): return 'gitlab' def _get_authorized_client(self): host = app.config.get('GITLAB_TRIGGER_CONFIG', {}).get('GITLAB_ENDPOINT', '') auth_token = self.auth_token or 'invalid' return gitlab.Gitlab(host, oauth_token=auth_token) def is_active(self): return 'hook_id' in self.config def activate(self, standard_webhook_url): config = self.config new_build_source = config['build_source'] gl_client = self._get_authorized_client() # Find the GitLab repository. repository = gl_client.getproject(new_build_source) if repository is False: msg = 'Unable to find GitLab repository for source: %s' % new_build_source raise TriggerActivationException(msg) # Add a deploy key to the repository. public_key, private_key = generate_ssh_keypair() config['credentials'] = [ { 'name': 'SSH Public Key', 'value': public_key, }, ] key = gl_client.adddeploykey(repository['id'], '%s Builder' % app.config['REGISTRY_TITLE'], public_key) if key is False: msg = 'Unable to add deploy key to repository: %s' % new_build_source raise TriggerActivationException(msg) config['key_id'] = key['id'] # Add the webhook to the GitLab repository. hook = gl_client.addprojecthook(repository['id'], standard_webhook_url, push=True) if hook is False: msg = 'Unable to create webhook on repository: %s' % new_build_source raise TriggerActivationException(msg) config['hook_id'] = hook['id'] self.config = config return config, {'private_key': private_key} def deactivate(self): config = self.config gl_client = self._get_authorized_client() # Find the GitLab repository. repository = gl_client.getproject(config['build_source']) if repository is False: msg = 'Unable to find GitLab repository for source: %s' % config['build_source'] raise TriggerDeactivationException(msg) # Remove the webhook. success = gl_client.deleteprojecthook(repository['id'], config['hook_id']) if success is False: msg = 'Unable to remove hook: %s' % config['hook_id'] raise TriggerDeactivationException(msg) config.pop('hook_id', None) # Remove the key success = gl_client.deletedeploykey(repository['id'], config['key_id']) if success is False: msg = 'Unable to remove deploy key: %s' % config['key_id'] raise TriggerDeactivationException(msg) config.pop('key_id', None) self.config = config return config def list_build_sources(self): gl_client = self._get_authorized_client() current_user = gl_client.currentuser() if current_user is False: raise RepositoryReadException('Unable to get current user') repositories = gl_client.getprojects() if repositories is False: raise RepositoryReadException('Unable to list user repositories') namespaces = {} for repo in repositories: owner = repo['namespace']['name'] if not owner in namespaces: namespaces[owner] = { 'personal': owner == current_user['username'], 'repos': [], 'info': { 'name': owner, } } namespaces[owner]['repos'].append(repo['path_with_namespace']) return namespaces.values() def list_build_subdirs(self): config = self.config gl_client = self._get_authorized_client() new_build_source = config['build_source'] repository = gl_client.getproject(new_build_source) if repository is False: msg = 'Unable to find GitLab repository for source: %s' % new_build_source raise RepositoryReadException(msg) repo_branches = gl_client.getbranches(repository['id']) if repo_branches is False: msg = 'Unable to find GitLab branches for source: %s' % new_build_source raise RepositoryReadException(msg) branches = [branch['name'] for branch in repo_branches] branches = find_matching_branches(config, branches) branches = branches or [repository['default_branch'] or 'master'] repo_tree = gl_client.getrepositorytree(repository['id'], ref_name=branches[0]) if repo_tree is False: msg = 'Unable to find GitLab repository tree for source: %s' % new_build_source raise RepositoryReadException(msg) for node in repo_tree: if node['name'] == 'Dockerfile': return ['/'] return [] def load_dockerfile_contents(self): gl_client = self._get_authorized_client() path = self.get_dockerfile_path() repository = gl_client.getproject(self.config['build_source']) if repository is False: return None branches = self.list_field_values('branch_name') branches = find_matching_branches(self.config, branches) if branches == []: return None branch_name = branches[0] if repository['default_branch'] in branches: branch_name = repository['default_branch'] contents = gl_client.getrawfile(repository['id'], branch_name, path) if contents is False: return None return contents def list_field_values(self, field_name, limit=None): if field_name == 'refs': branches = self.list_field_values('branch_name') tags = self.list_field_values('tag_name') return ([{'kind': 'branch', 'name': b} for b in branches] + [{'kind': 'tag', 'name': t} for t in tags]) gl_client = self._get_authorized_client() repo = gl_client.getproject(self.config['build_source']) if repo is False: return [] if field_name == 'tag_name': tags = gl_client.getrepositorytags(repo['id']) if tags is False: return [] if limit: tags = tags[0:limit] return [tag['name'] for tag in tags] if field_name == 'branch_name': branches = gl_client.getbranches(repo['id']) if branches is False: return [] if limit: branches = branches[0:limit] return [branch['name'] for branch in branches] return None def _prepare_build(self, commit_sha, ref, is_manual): config = self.config gl_client = self._get_authorized_client() repo = gl_client.getproject(self.config['build_source']) if repo is False: raise TriggerStartException('Could not find repository') commit = gl_client.getrepositorycommit(repo['id'], commit_sha) if repo is False: raise TriggerStartException('Could not find repository') committer = None if 'committer_email' in commit: try: [committer] = gl_client.getusers(search=commit['committer_email']) except ValueError: committer = None try: [author] = gl_client.getusers(search=commit['author_email']) except ValueError: author = None metadata = { 'commit': commit['id'], 'ref': ref, 'default_branch': repo['default_branch'], 'git_url': repo['ssh_url_to_repo'], 'commit_info': { 'url': gl_client.host + '/' + repo['path_with_namespace'] + '/commit/' + commit['id'], 'message': commit['message'], 'date': commit['committed_date'], }, } if committer is not None: metadata['commit_info']['committer'] = { 'username': committer['username'], 'avatar_url': committer['avatar_url'], 'url': gl_client.host + '/' + committer['username'], } if author is not None: metadata['commit_info']['author'] = { 'username': author['username'], 'avatar_url': author['avatar_url'], 'url': gl_client.host + '/' + author['username'] } prepared = PreparedBuild(self.trigger) prepared.tags_from_ref(ref, repo['default_branch']) prepared.name_from_sha(commit['id']) prepared.subdirectory = config['subdir'] prepared.metadata = metadata prepared.is_manual = is_manual return prepared def handle_trigger_request(self, request): payload = request.get_json() if not payload: raise SkipRequestException() logger.debug('GitLab trigger payload %s', payload) if not payload.get('commits'): raise SkipRequestException() commit = payload['commits'][0] commit_message = commit['message'] if should_skip_commit(commit_message): raise SkipRequestException() ref = payload['ref'] raise_if_skipped(self.config, ref) return self._prepare_build(commit['id'], ref, False) def manual_start(self, run_parameters=None): gl_client = self._get_authorized_client() repo = gl_client.getproject(self.config['build_source']) if repo is False: raise TriggerStartException('Could not find repository') def get_tag_sha(tag_name): tags = gl_client.getrepositorytags(repo['id']) if tags is False: raise TriggerStartException('Could not find tags') for tag in tags: if tag['name'] == tag_name: return tag['commit']['id'] raise TriggerStartException('Could not find commit') def get_branch_sha(branch_name): branch = gl_client.getbranch(repo['id'], branch_name) if branch is False: raise TriggerStartException('Could not find branch') return branch['commit']['id'] # Find the branch or tag to build. (commit_sha, ref) = _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, repo['default_branch']) return self._prepare_build(commit_sha, ref, True) def get_repository_url(self): gl_client = self._get_authorized_client() repository = gl_client.getproject(self.config['build_source']) if repository is False: return None return '%s/%s' % (gl_client.host, repository['path_with_namespace'])