diff --git a/buildtrigger/__init__.py b/buildtrigger/__init__.py new file mode 100644 index 000000000..8a794cf96 --- /dev/null +++ b/buildtrigger/__init__.py @@ -0,0 +1,5 @@ +import buildtrigger.bitbuckethandler +import buildtrigger.customhandler +import buildtrigger.githubhandler +import buildtrigger.gitlabhandler + diff --git a/buildtrigger/basehandler.py b/buildtrigger/basehandler.py new file mode 100644 index 000000000..2555b09ed --- /dev/null +++ b/buildtrigger/basehandler.py @@ -0,0 +1,222 @@ +from endpoints.building import PreparedBuild +from data import model +from buildtrigger.triggerutil import get_trigger_config, InvalidServiceException +from jsonschema import validate + +METADATA_SCHEMA = { + 'type': 'object', + 'properties': { + 'commit': { + 'type': 'string', + 'description': 'first 7 characters of the SHA-1 identifier for a git commit', + 'pattern': '^([A-Fa-f0-9]{7,})$', + }, + 'git_url': { + 'type': 'string', + 'description': 'The GIT url to use for the checkout', + }, + 'ref': { + 'type': 'string', + 'description': 'git reference for a git commit', + 'pattern': '^refs\/(heads|tags|remotes)\/(.+)$', + }, + 'default_branch': { + 'type': 'string', + 'description': 'default branch of the git repository', + }, + 'commit_info': { + 'type': 'object', + 'description': 'metadata about a git commit', + 'properties': { + 'url': { + 'type': 'string', + 'description': 'URL to view a git commit', + }, + 'message': { + 'type': 'string', + 'description': 'git commit message', + }, + 'date': { + 'type': 'string', + 'description': 'timestamp for a git commit' + }, + 'author': { + 'type': 'object', + 'description': 'metadata about the author of a git commit', + 'properties': { + 'username': { + 'type': 'string', + 'description': 'username of the author', + }, + 'url': { + 'type': 'string', + 'description': 'URL to view the profile of the author', + }, + 'avatar_url': { + 'type': 'string', + 'description': 'URL to view the avatar of the author', + }, + }, + 'required': ['username'], + }, + 'committer': { + 'type': 'object', + 'description': 'metadata about the committer of a git commit', + 'properties': { + 'username': { + 'type': 'string', + 'description': 'username of the committer', + }, + 'url': { + 'type': 'string', + 'description': 'URL to view the profile of the committer', + }, + 'avatar_url': { + 'type': 'string', + 'description': 'URL to view the avatar of the committer', + }, + }, + 'required': ['username'], + }, + }, + 'required': ['url', 'message', 'date'], + }, + }, + 'required': ['commit', 'git_url'], +} + + +class BuildTriggerHandler(object): + def __init__(self, trigger, override_config=None): + self.trigger = trigger + self.config = override_config or get_trigger_config(trigger) + + @property + def auth_token(self): + """ Returns the auth token for the trigger. """ + return self.trigger.auth_token + + def load_dockerfile_contents(self): + """ + Loads the Dockerfile found for the trigger's config and returns them or None if none could + be found/loaded. + """ + raise NotImplementedError + + def list_build_sources(self): + """ + Take the auth information for the specific trigger type and load the + list of build sources(repositories). + """ + raise NotImplementedError + + def list_build_subdirs(self): + """ + Take the auth information and the specified config so far and list all of + the possible subdirs containing dockerfiles. + """ + raise NotImplementedError + + def handle_trigger_request(self): + """ + Transform the incoming request data into a set of actions. Returns a PreparedBuild. + """ + raise NotImplementedError + + def is_active(self): + """ + Returns True if the current build trigger is active. Inactive means further + setup is needed. + """ + raise NotImplementedError + + def activate(self, standard_webhook_url): + """ + Activates the trigger for the service, with the given new configuration. + Returns new public and private config that should be stored if successful. + """ + raise NotImplementedError + + def deactivate(self): + """ + Deactivates the trigger for the service, removing any hooks installed in + the remote service. Returns the new config that should be stored if this + trigger is going to be re-activated. + """ + raise NotImplementedError + + def manual_start(self, run_parameters=None): + """ + Manually creates a repository build for this trigger. Returns a PreparedBuild. + """ + raise NotImplementedError + + def list_field_values(self, field_name, limit=None): + """ + Lists all values for the given custom trigger field. For example, a trigger might have a + field named "branches", and this method would return all branches. + """ + raise NotImplementedError + + def get_repository_url(self): + """ Returns the URL of the current trigger's repository. Note that this operation + can be called in a loop, so it should be as fast as possible. """ + raise NotImplementedError + + @classmethod + def service_name(cls): + """ + Particular service implemented by subclasses. + """ + raise NotImplementedError + + @classmethod + def get_handler(cls, trigger, override_config=None): + for subc in cls.__subclasses__(): + if subc.service_name() == trigger.service.name: + return subc(trigger, override_config) + + raise InvalidServiceException('Unable to find service: %s' % trigger.service.name) + + def put_config_key(self, key, value): + """ Updates a config key in the trigger, saving it to the DB. """ + self.config[key] = value + model.build.update_build_trigger(self.trigger, self.config) + + def set_auth_token(self, auth_token): + """ Sets the auth token for the trigger, saving it to the DB. """ + model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token) + + def get_dockerfile_path(self): + """ Returns the normalized path to the Dockerfile found in the subdirectory + in the config. """ + subdirectory = self.config.get('subdir', '') + if subdirectory == '/': + subdirectory = '' + else: + if not subdirectory.endswith('/'): + subdirectory = subdirectory + '/' + + return subdirectory + 'Dockerfile' + + def prepare_build(self, metadata, is_manual=False): + # Ensure that the metadata meets the scheme. + validate(metadata, METADATA_SCHEMA) + + config = self.config + ref = metadata.get('ref', None) + commit_sha = metadata['commit'] + default_branch = metadata.get('default_branch', None) + + prepared = PreparedBuild(self.trigger) + prepared.name_from_sha(commit_sha) + prepared.subdirectory = config.get('subdir', None) + prepared.is_manual = is_manual + prepared.metadata = metadata + + if ref is not None: + prepared.tags_from_ref(ref, default_branch) + else: + prepared.tags = [commit_sha[:7]] + + return prepared diff --git a/buildtrigger/bitbuckethandler.py b/buildtrigger/bitbuckethandler.py new file mode 100644 index 000000000..3c7ba810a --- /dev/null +++ b/buildtrigger/bitbuckethandler.py @@ -0,0 +1,383 @@ +import logging +import re + +from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException, + TriggerDeactivationException, TriggerStartException, + determine_build_ref, raise_if_skipped_build, + find_matching_branches) + +from buildtrigger.basehandler import BuildTriggerHandler + +from app import app, get_app_url +from bitbucket import BitBucket +from util.security.ssh import generate_ssh_keypair +from util.dict_wrappers import JSONPathDict, SafeDictSetter + +logger = logging.getLogger(__name__) + +_BITBUCKET_COMMIT_URL = 'https://bitbucket.org/%s/commits/%s' +_RAW_AUTHOR_REGEX = re.compile(r'.*<(.+)>') + +def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name, lookup_author): + """ Returns the BitBucket commit information transformed into our own + payload format. + """ + # TODO(jschorr): Validate commit JSON + commit = JSONPathDict(bb_commit) + + config = SafeDictSetter() + config['commit'] = commit['node'] + config['ref'] = ref + config['default_branch'] = default_branch + config['git_url'] = 'git@bitbucket.org:%s.git' % repository_name + + config['commit_info.url'] = _BITBUCKET_COMMIT_URL % (repository_name, commit['node']) + config['commit_info.message'] = commit['message'] + config['commit_info.date'] = commit['timestamp'] + + match = _RAW_AUTHOR_REGEX.match(commit['raw_author']) + if match: + email_address = match.group(1) + author_info = JSONPathDict(lookup_author(email_address)) + if author_info: + config['commit_info.author.username'] = author_info['user.username'] + config['commit_info.author.url'] = 'https://bitbucket.org/%s/' % author_info['user.username'] + config['commit_info.author.avatar_url'] = author_info['user.avatar'] + + return config.dict_value() + + +def get_transformed_webhook_payload(bb_payload, default_branch=None): + """ Returns the BitBucket webhook JSON payload transformed into our own payload + format. If the bb_payload is not valid, returns None. + """ + # TODO(jschorr): Validate payload JSON + + payload = JSONPathDict(bb_payload) + change = payload['push.changes[-1].new'] + if not change: + return None + + ref = ('refs/heads/' + change['name'] if change['type'] == 'branch' + else 'refs/tags/' + change['name']) + + repository_name = payload['repository.full_name'] + target = change['target'] + + config = SafeDictSetter() + config['commit'] = target['hash'] + config['ref'] = ref + config['default_branch'] = default_branch + config['git_url'] = 'git@bitbucket.org:%s.git' % repository_name + + config['commit_info.url'] = target['links.html.href'] + config['commit_info.message'] = target['message'] + config['commit_info.date'] = target['date'] + + config['commit_info.author.username'] = target['author.user.username'] + config['commit_info.author.url'] = target['author.user.links.html.href'] + config['commit_info.author.avatar_url'] = target['author.user.links.avatar.href'] + + config['commit_info.committer.username'] = payload['actor.username'] + config['commit_info.committer.url'] = payload['actor.links.html.href'] + config['commit_info.committer.avatar_url'] = payload['actor.links.avatar.href'] + return config.dict_value() + + +class BitbucketBuildTrigger(BuildTriggerHandler): + """ + BuildTrigger for Bitbucket. + """ + @classmethod + def service_name(cls): + return 'bitbucket' + + def _get_client(self): + """ Returns a BitBucket API client for this trigger's config. """ + key = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_KEY', '') + secret = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_SECRET', '') + + trigger_uuid = self.trigger.uuid + callback_url = '%s/oauth1/bitbucket/callback/trigger/%s' % (get_app_url(), trigger_uuid) + + return BitBucket(key, secret, callback_url) + + def _get_authorized_client(self): + """ Returns an authorized API client. """ + base_client = self._get_client() + auth_token = self.auth_token or 'invalid:invalid' + token_parts = auth_token.split(':') + if len(token_parts) != 2: + token_parts = ['invalid', 'invalid'] + + (access_token, access_token_secret) = token_parts + return base_client.get_authorized_client(access_token, access_token_secret) + + def _get_repository_client(self): + """ Returns an API client for working with this config's BB repository. """ + source = self.config['build_source'] + (namespace, name) = source.split('/') + bitbucket_client = self._get_authorized_client() + return bitbucket_client.for_namespace(namespace).repositories().get(name) + + def _get_default_branch(self, repository, default_value='master'): + """ Returns the default branch for the repository or the value given. """ + (result, data, _) = repository.get_main_branch() + if result: + return data['name'] + + return default_value + + def get_oauth_url(self): + """ Returns the OAuth URL to authorize Bitbucket. """ + bitbucket_client = self._get_client() + (result, data, err_msg) = bitbucket_client.get_authorization_url() + if not result: + raise RepositoryReadException(err_msg) + + return data + + def exchange_verifier(self, verifier): + """ Exchanges the given verifier token to setup this trigger. """ + bitbucket_client = self._get_client() + access_token = self.config.get('access_token', '') + access_token_secret = self.auth_token + + # Exchange the verifier for a new access token. + (result, data, _) = bitbucket_client.verify_token(access_token, access_token_secret, verifier) + if not result: + return False + + # Save the updated access token and secret. + self.set_auth_token(data[0] + ':' + data[1]) + + # Retrieve the current authorized user's information and store the username in the config. + authorized_client = self._get_authorized_client() + (result, data, _) = authorized_client.get_current_user() + if not result: + return False + + username = data['user']['username'] + self.put_config_key('username', username) + return True + + def is_active(self): + return 'webhook_id' in self.config + + def activate(self, standard_webhook_url): + config = self.config + + # Add a deploy key to the repository. + public_key, private_key = generate_ssh_keypair() + config['credentials'] = [ + { + 'name': 'SSH Public Key', + 'value': public_key, + }, + ] + + repository = self._get_repository_client() + (result, created_deploykey, err_msg) = repository.deploykeys().create( + app.config['REGISTRY_TITLE'] + ' webhook key', public_key) + + if not result: + msg = 'Unable to add deploy key to repository: %s' % err_msg + raise TriggerActivationException(msg) + + config['deploy_key_id'] = created_deploykey['pk'] + + # Add a webhook callback. + description = 'Webhook for invoking builds on %s' % app.config['REGISTRY_TITLE_SHORT'] + webhook_events = ['repo:push'] + (result, created_webhook, err_msg) = repository.webhooks().create( + description, standard_webhook_url, webhook_events) + + if not result: + msg = 'Unable to add webhook to repository: %s' % err_msg + raise TriggerActivationException(msg) + + config['webhook_id'] = created_webhook['uuid'] + self.config = config + return config, {'private_key': private_key} + + def deactivate(self): + config = self.config + + webhook_id = config.pop('webhook_id', None) + deploy_key_id = config.pop('deploy_key_id', None) + repository = self._get_repository_client() + + # Remove the webhook. + if webhook_id is not None: + (result, _, err_msg) = repository.webhooks().delete(webhook_id) + if not result: + msg = 'Unable to remove webhook from repository: %s' % err_msg + raise TriggerDeactivationException(msg) + + # Remove the public key. + if deploy_key_id is not None: + (result, _, err_msg) = repository.deploykeys().delete(deploy_key_id) + if not result: + msg = 'Unable to remove deploy key from repository: %s' % err_msg + raise TriggerDeactivationException(msg) + + return config + + def list_build_sources(self): + bitbucket_client = self._get_authorized_client() + (result, data, err_msg) = bitbucket_client.get_visible_repositories() + if not result: + raise RepositoryReadException('Could not read repository list: ' + err_msg) + + namespaces = {} + for repo in data: + if not repo['scm'] == 'git': + continue + + owner = repo['owner'] + if not owner in namespaces: + namespaces[owner] = { + 'personal': owner == self.config.get('username'), + 'repos': [], + 'info': { + 'name': owner + } + } + + namespaces[owner]['repos'].append(owner + '/' + repo['slug']) + + return namespaces.values() + + def list_build_subdirs(self): + config = self.config + repository = self._get_repository_client() + + # Find the first matching branch. + repo_branches = self.list_field_values('branch_name') or [] + branches = find_matching_branches(config, repo_branches) + if not branches: + branches = [self._get_default_branch(repository)] + + (result, data, err_msg) = repository.get_path_contents('', revision=branches[0]) + if not result: + raise RepositoryReadException(err_msg) + + files = set([f['path'] for f in data['files']]) + if 'Dockerfile' in files: + return ['/'] + + return [] + + def load_dockerfile_contents(self): + repository = self._get_repository_client() + path = self.get_dockerfile_path() + + (result, data, err_msg) = repository.get_raw_path_contents(path, revision='master') + if not result: + raise RepositoryReadException(err_msg) + + return data + + def list_field_values(self, field_name, limit=None): + source = self.config['build_source'] + (namespace, name) = source.split('/') + + bitbucket_client = self._get_authorized_client() + repository = bitbucket_client.for_namespace(namespace).repositories().get(name) + + if field_name == 'refs': + (result, data, _) = repository.get_branches_and_tags() + if not result: + return None + + branches = [b['name'] for b in data['branches']] + tags = [t['name'] for t in data['tags']] + + return ([{'kind': 'branch', 'name': b} for b in branches] + + [{'kind': 'tag', 'name': tag} for tag in tags]) + + if field_name == 'tag_name': + (result, data, _) = repository.get_tags() + if not result: + return None + + tags = list(data.keys()) + if limit: + tags = tags[0:limit] + + return tags + + if field_name == 'branch_name': + (result, data, _) = repository.get_branches() + if not result: + return None + + branches = list(data.keys()) + if limit: + branches = branches[0:limit] + + return branches + + return None + + def get_repository_url(self): + source = self.config['build_source'] + (namespace, name) = source.split('/') + return 'https://bitbucket.org/%s/%s' % (namespace, name) + + def handle_trigger_request(self, request): + payload = request.get_json() + logger.debug('Got BitBucket request: %s', payload) + + repository = self._get_repository_client() + default_branch = self._get_default_branch(repository) + + metadata = get_transformed_webhook_payload(payload, default_branch=default_branch) + prepared = self.prepare_build(metadata) + + # Check if we should skip this build. + raise_if_skipped_build(prepared) + return prepared + + def manual_start(self, run_parameters=None): + run_parameters = run_parameters or {} + repository = self._get_repository_client() + bitbucket_client = self._get_authorized_client() + + def get_branch_sha(branch_name): + # Lookup the commit SHA for the branch. + (result, data, _) = repository.get_branches() + if not result or not branch_name in data: + raise TriggerStartException('Could not find branch commit SHA') + + return data[branch_name]['node'] + + def get_tag_sha(tag_name): + # Lookup the commit SHA for the tag. + (result, data, _) = repository.get_tags() + if not result or not tag_name in data: + raise TriggerStartException('Could not find tag commit SHA') + + return data[tag_name]['node'] + + def lookup_author(email_address): + (result, data, _) = bitbucket_client.accounts().get_profile(email_address) + return data if result else None + + # Find the branch or tag to build. + default_branch = self._get_default_branch(repository) + (commit_sha, ref) = determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, + default_branch) + + # Lookup the commit SHA in BitBucket. + (result, commit_info, _) = repository.changesets().get(commit_sha) + if not result: + raise TriggerStartException('Could not lookup commit SHA') + + # Return a prepared build for the commit. + repository_name = '%s/%s' % (repository.namespace, repository.repository_name) + metadata = get_transformed_commit_info(commit_info, ref, default_branch, + repository_name, lookup_author) + + return self.prepare_build(metadata, is_manual=True) + diff --git a/buildtrigger/customhandler.py b/buildtrigger/customhandler.py new file mode 100644 index 000000000..84ec09831 --- /dev/null +++ b/buildtrigger/customhandler.py @@ -0,0 +1,166 @@ +import logging +import json + +from jsonschema import validate +from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException, + TriggerStartException, ValidationRequestException, + InvalidPayloadException, + SkipRequestException, raise_if_skipped_build, + find_matching_branches) + +from buildtrigger.basehandler import BuildTriggerHandler + +from util.security.ssh import generate_ssh_keypair + + +logger = logging.getLogger(__name__) + +class CustomBuildTrigger(BuildTriggerHandler): + payload_schema = { + 'type': 'object', + 'properties': { + 'commit': { + 'type': 'string', + 'description': 'first 7 characters of the SHA-1 identifier for a git commit', + 'pattern': '^([A-Fa-f0-9]{7,})$', + }, + 'ref': { + 'type': 'string', + 'description': 'git reference for a git commit', + 'pattern': '^refs\/(heads|tags|remotes)\/(.+)$', + }, + 'default_branch': { + 'type': 'string', + 'description': 'default branch of the git repository', + }, + 'commit_info': { + 'type': 'object', + 'description': 'metadata about a git commit', + 'properties': { + 'url': { + 'type': 'string', + 'description': 'URL to view a git commit', + }, + 'message': { + 'type': 'string', + 'description': 'git commit message', + }, + 'date': { + 'type': 'string', + 'description': 'timestamp for a git commit' + }, + 'author': { + 'type': 'object', + 'description': 'metadata about the author of a git commit', + 'properties': { + 'username': { + 'type': 'string', + 'description': 'username of the author', + }, + 'url': { + 'type': 'string', + 'description': 'URL to view the profile of the author', + }, + 'avatar_url': { + 'type': 'string', + 'description': 'URL to view the avatar of the author', + }, + }, + 'required': ['username', 'url', 'avatar_url'], + }, + 'committer': { + 'type': 'object', + 'description': 'metadata about the committer of a git commit', + 'properties': { + 'username': { + 'type': 'string', + 'description': 'username of the committer', + }, + 'url': { + 'type': 'string', + 'description': 'URL to view the profile of the committer', + }, + 'avatar_url': { + 'type': 'string', + 'description': 'URL to view the avatar of the committer', + }, + }, + 'required': ['username', 'url', 'avatar_url'], + }, + }, + 'required': ['url', 'message', 'date'], + }, + }, + 'required': ['commit', 'ref', 'default_branch'], + } + + @classmethod + def service_name(cls): + return 'custom-git' + + def is_active(self): + return self.config.has_key('credentials') + + def _metadata_from_payload(self, payload): + try: + metadata = json.loads(payload) + validate(metadata, self.payload_schema) + except Exception as e: + raise InvalidPayloadException(e.message) + return metadata + + def handle_trigger_request(self, request): + payload = request.data + if not payload: + raise InvalidPayloadException() + + logger.debug('Payload %s', payload) + + metadata = self._metadata_from_payload(payload) + metadata['git_url'] = self.config['build_source'] + + prepared = self.prepare_build(metadata) + + # Check if we should skip this build. + raise_if_skipped_build(prepared) + + return prepared + + def manual_start(self, run_parameters=None): + # commit_sha is the only required parameter + commit_sha = run_parameters.get('commit_sha') + if commit_sha is None: + raise TriggerStartException('missing required parameter') + + config = self.config + metadata = { + 'commit': commit_sha, + 'git_url': config['build_source'], + } + + return self.prepare_build(metadata, is_manual=True) + + def activate(self, standard_webhook_url): + config = self.config + public_key, private_key = generate_ssh_keypair() + config['credentials'] = [ + { + 'name': 'SSH Public Key', + 'value': public_key, + }, + { + 'name': 'Webhook Endpoint URL', + 'value': standard_webhook_url, + }, + ] + self.config = config + return config, {'private_key': private_key} + + def deactivate(self): + config = self.config + config.pop('credentials', None) + self.config = config + return config + + def get_repository_url(self): + return None diff --git a/buildtrigger/githubhandler.py b/buildtrigger/githubhandler.py new file mode 100644 index 000000000..76131f2bf --- /dev/null +++ b/buildtrigger/githubhandler.py @@ -0,0 +1,430 @@ +import logging +import os.path +import base64 + +from app import app, github_trigger + +from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException, + TriggerDeactivationException, TriggerStartException, + EmptyRepositoryException, ValidationRequestException, + SkipRequestException, + determine_build_ref, raise_if_skipped_build, + find_matching_branches) + +from buildtrigger.basehandler import BuildTriggerHandler + +from util.security.ssh import generate_ssh_keypair +from util.dict_wrappers import JSONPathDict, SafeDictSetter + +from github import (Github, UnknownObjectException, GithubException, + BadCredentialsException as GitHubBadCredentialsException) + +logger = logging.getLogger(__name__) + + +def get_transformed_webhook_payload(gh_payload, default_branch=None, lookup_user=None): + """ Returns the GitHub webhook JSON payload transformed into our own payload + format. If the gh_payload is not valid, returns None. + """ + # TODO(jschorr): Validate payload JSON + payload = JSONPathDict(gh_payload) + + config = SafeDictSetter() + config['commit'] = payload['head_commit.id'] + config['ref'] = payload['ref'] + config['default_branch'] = default_branch + config['git_url'] = payload['repository.ssh_url'] + + config['commit_info.url'] = payload['head_commit.url'] + config['commit_info.message'] = payload['head_commit.message'] + config['commit_info.date'] = payload['head_commit.timestamp'] + + config['commit_info.author.username'] = payload['head_commit.author.username'] + config['commit_info.author.url'] = payload.get('head_commit.author.html_url') + config['commit_info.author.avatar_url'] = payload.get('head_commit.author.avatar_url') + + config['commit_info.committer.username'] = payload.get('head_commit.committer.username') + config['commit_info.committer.url'] = payload.get('head_commit.committer.html_url') + config['commit_info.committer.avatar_url'] = payload.get('head_commit.committer.avatar_url') + + # Note: GitHub doesn't always return the extra information for users, so we do the lookup + # manually if possible. + if lookup_user and not payload.get('head_commit.author.html_url'): + author_info = lookup_user(payload['head_commit.author.username']) + if author_info: + config['commit_info.author.url'] = author_info['html_url'] + config['commit_info.author.avatar_url'] = author_info['avatar_url'] + + if (lookup_user and + payload.get('head_commit.committer.username') and + not payload.get('head_commit.committer.html_url')): + committer_info = lookup_user(payload['head_commit.committer.username']) + if committer_info: + config['commit_info.committer.url'] = committer_info['html_url'] + config['commit_info.committer.avatar_url'] = committer_info['avatar_url'] + + return config.dict_value() + + +class GithubBuildTrigger(BuildTriggerHandler): + """ + BuildTrigger for GitHub that uses the archive API and buildpacks. + """ + def _get_client(self): + """ Returns an authenticated client for talking to the GitHub API. """ + return Github(self.auth_token, + base_url=github_trigger.api_endpoint(), + client_id=github_trigger.client_id(), + client_secret=github_trigger.client_secret()) + + @classmethod + def service_name(cls): + return 'github' + + def is_active(self): + return 'hook_id' in self.config + + def get_repository_url(self): + source = self.config['build_source'] + return github_trigger.get_public_url(source) + + def activate(self, standard_webhook_url): + config = self.config + new_build_source = config['build_source'] + gh_client = self._get_client() + + # Find the GitHub repository. + try: + gh_repo = gh_client.get_repo(new_build_source) + except UnknownObjectException: + msg = 'Unable to find GitHub repository for source: %s' % new_build_source + raise TriggerActivationException(msg) + + # Add a deploy key to the GitHub repository. + public_key, private_key = generate_ssh_keypair() + config['credentials'] = [ + { + 'name': 'SSH Public Key', + 'value': public_key, + }, + ] + try: + deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'], + public_key) + config['deploy_key_id'] = deploy_key.id + except GithubException: + msg = 'Unable to add deploy key to repository: %s' % new_build_source + raise TriggerActivationException(msg) + + # Add the webhook to the GitHub repository. + webhook_config = { + 'url': standard_webhook_url, + 'content_type': 'json', + } + try: + hook = gh_repo.create_hook('web', webhook_config) + config['hook_id'] = hook.id + config['master_branch'] = gh_repo.default_branch + except GithubException: + msg = 'Unable to create webhook on repository: %s' % new_build_source + raise TriggerActivationException(msg) + + return config, {'private_key': private_key} + + def deactivate(self): + config = self.config + gh_client = self._get_client() + + # Find the GitHub repository. + try: + repo = gh_client.get_repo(config['build_source']) + except UnknownObjectException: + msg = 'Unable to find GitHub repository for source: %s' % config['build_source'] + raise TriggerDeactivationException(msg) + except GitHubBadCredentialsException: + msg = 'Unable to access repository to disable trigger' + raise TriggerDeactivationException(msg) + + # If the trigger uses a deploy key, remove it. + try: + if config['deploy_key_id']: + deploy_key = repo.get_key(config['deploy_key_id']) + deploy_key.delete() + except KeyError: + # There was no config['deploy_key_id'], thus this is an old trigger without a deploy key. + pass + except GithubException: + msg = 'Unable to remove deploy key: %s' % config['deploy_key_id'] + raise TriggerDeactivationException(msg) + + # Remove the webhook. + try: + hook = repo.get_hook(config['hook_id']) + hook.delete() + except GithubException: + msg = 'Unable to remove hook: %s' % config['hook_id'] + raise TriggerDeactivationException(msg) + + config.pop('hook_id', None) + self.config = config + return config + + def list_build_sources(self): + gh_client = self._get_client() + usr = gh_client.get_user() + + try: + repos = usr.get_repos() + except GithubException: + raise RepositoryReadException('Unable to list user repositories') + + namespaces = {} + has_non_personal = False + + for repository in repos: + namespace = repository.owner.login + if not namespace in namespaces: + is_personal_repo = namespace == usr.login + namespaces[namespace] = { + 'personal': is_personal_repo, + 'repos': [], + 'info': { + 'name': namespace, + 'avatar_url': repository.owner.avatar_url + } + } + + if not is_personal_repo: + has_non_personal = True + + namespaces[namespace]['repos'].append(repository.full_name) + + # In older versions of GitHub Enterprise, the get_repos call above does not + # return any non-personal repositories. In that case, we need to lookup the + # repositories manually. + # TODO: Remove this once we no longer support GHE versions <= 2.1 + if not has_non_personal: + for org in usr.get_orgs(): + repo_list = [repo.full_name for repo in org.get_repos(type='member')] + namespaces[org.name] = { + 'personal': False, + 'repos': repo_list, + 'info': { + 'name': org.name or org.login, + 'avatar_url': org.avatar_url + } + } + + entries = list(namespaces.values()) + entries.sort(key=lambda e: e['info']['name']) + return entries + + def list_build_subdirs(self): + config = self.config + gh_client = self._get_client() + source = config['build_source'] + + try: + repo = gh_client.get_repo(source) + + # Find the first matching branch. + repo_branches = self.list_field_values('branch_name') or [] + branches = find_matching_branches(config, repo_branches) + branches = branches or [repo.default_branch or 'master'] + default_commit = repo.get_branch(branches[0]).commit + commit_tree = repo.get_git_tree(default_commit.sha, recursive=True) + + return [os.path.dirname(elem.path) for elem in commit_tree.tree + if (elem.type == u'blob' and + os.path.basename(elem.path) == u'Dockerfile')] + except GithubException as ghe: + message = ghe.data.get('message', 'Unable to list contents of repository: %s' % source) + if message == 'Branch not found': + raise EmptyRepositoryException() + + raise RepositoryReadException(message) + + def load_dockerfile_contents(self): + config = self.config + gh_client = self._get_client() + + source = config['build_source'] + path = self.get_dockerfile_path() + try: + repo = gh_client.get_repo(source) + file_info = repo.get_file_contents(path) + if file_info is None: + return None + + content = file_info.content + if file_info.encoding == 'base64': + content = base64.b64decode(content) + return content + + except GithubException as ghe: + message = ghe.data.get('message', 'Unable to read Dockerfile: %s' % source) + raise RepositoryReadException(message) + + def list_field_values(self, field_name, limit=None): + if field_name == 'refs': + branches = self.list_field_values('branch_name') + tags = self.list_field_values('tag_name') + + return ([{'kind': 'branch', 'name': b} for b in branches] + + [{'kind': 'tag', 'name': tag} for tag in tags]) + + config = self.config + if field_name == 'tag_name': + try: + gh_client = self._get_client() + source = config['build_source'] + repo = gh_client.get_repo(source) + gh_tags = repo.get_tags() + if limit: + gh_tags = repo.get_tags()[0:limit] + + return [tag.name for tag in gh_tags] + except GitHubBadCredentialsException: + return [] + except GithubException: + logger.exception("Got GitHub Exception when trying to list tags for trigger %s", + self.trigger.id) + return [] + + if field_name == 'branch_name': + try: + gh_client = self._get_client() + source = config['build_source'] + repo = gh_client.get_repo(source) + gh_branches = repo.get_branches() + if limit: + gh_branches = repo.get_branches()[0:limit] + + branches = [branch.name for branch in gh_branches] + + if not repo.default_branch in branches: + branches.insert(0, repo.default_branch) + + if branches[0] != repo.default_branch: + branches.remove(repo.default_branch) + branches.insert(0, repo.default_branch) + + return branches + except GitHubBadCredentialsException: + return ['master'] + except GithubException: + logger.exception("Got GitHub Exception when trying to list branches for trigger %s", + self.trigger.id) + return ['master'] + + return None + + @classmethod + def _build_metadata_for_commit(cls, commit_sha, ref, repo): + try: + commit = repo.get_commit(commit_sha) + except GithubException: + logger.exception('Could not load commit information from GitHub') + return None + + commit_info = { + 'url': commit.html_url, + 'message': commit.commit.message, + 'date': commit.last_modified + } + + if commit.author: + commit_info['author'] = { + 'username': commit.author.login, + 'avatar_url': commit.author.avatar_url, + 'url': commit.author.html_url + } + + if commit.committer: + commit_info['committer'] = { + 'username': commit.committer.login, + 'avatar_url': commit.committer.avatar_url, + 'url': commit.committer.html_url + } + + return { + 'commit': commit_sha, + 'ref': ref, + 'default_branch': repo.default_branch, + 'git_url': repo.ssh_url, + 'commit_info': commit_info + } + + def manual_start(self, run_parameters=None): + config = self.config + source = config['build_source'] + + try: + gh_client = self._get_client() + repo = gh_client.get_repo(source) + default_branch = repo.default_branch + except GithubException as ghe: + raise TriggerStartException(ghe.data['message']) + + def get_branch_sha(branch_name): + branch = repo.get_branch(branch_name) + return branch.commit.sha + + def get_tag_sha(tag_name): + tags = {tag.name: tag for tag in repo.get_tags()} + if not tag_name in tags: + raise TriggerStartException('Could not find tag in repository') + + return tags[tag_name].commit.sha + + # Find the branch or tag to build. + (commit_sha, ref) = determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, + default_branch) + + metadata = GithubBuildTrigger._build_metadata_for_commit(commit_sha, ref, repo) + return self.prepare_build(metadata, is_manual=True) + + def lookup_user(self, username): + try: + gh_client = self._get_client() + user = gh_client.get_user(username) + return { + 'html_url': user.html_url, + 'avatar_url': user.avatar_url + } + except GithubException: + return None + + def handle_trigger_request(self, request): + # Check the payload to see if we should skip it based on the lack of a head_commit. + payload = request.get_json() + + # This is for GitHub's probing/testing. + if 'zen' in payload: + raise ValidationRequestException() + + # Lookup the default branch for the repository. + default_branch = None + lookup_user = None + try: + repo_full_name = '%s/%s' % (payload['repository']['owner']['name'], + payload['repository']['name']) + + gh_client = self._get_client() + repo = gh_client.get_repo(repo_full_name) + default_branch = repo.default_branch + lookup_user = self.lookup_user + except GitHubBadCredentialsException: + logger.exception('Got GitHub Credentials Exception; Cannot lookup default branch') + except GithubException: + logger.exception("Got GitHub Exception when trying to start trigger %s", self.trigger.id) + raise SkipRequestException() + + logger.debug('GitHub trigger payload %s', payload) + metadata = get_transformed_webhook_payload(payload, default_branch=default_branch, + lookup_user=lookup_user) + prepared = self.prepare_build(metadata) + + # Check if we should skip this build. + raise_if_skipped_build(prepared) + return prepared diff --git a/buildtrigger/gitlabhandler.py b/buildtrigger/gitlabhandler.py new file mode 100644 index 000000000..d4269337f --- /dev/null +++ b/buildtrigger/gitlabhandler.py @@ -0,0 +1,359 @@ +import logging + +from app import app + +from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException, + TriggerDeactivationException, TriggerStartException, + EmptyRepositoryException, ValidationRequestException, + SkipRequestException, + determine_build_ref, raise_if_skipped_build, + find_matching_branches) + +from buildtrigger.basehandler import BuildTriggerHandler + +from util.security.ssh import generate_ssh_keypair +from util.dict_wrappers import JSONPathDict, SafeDictSetter + +import gitlab + +logger = logging.getLogger(__name__) + + +def get_transformed_webhook_payload(gl_payload, default_branch=None, lookup_user=None): + """ Returns the Gitlab webhook JSON payload transformed into our own payload + format. If the gl_payload is not valid, returns None. + """ + # TODO(jschorr): Validate payload JSON + payload = JSONPathDict(gl_payload) + + config = SafeDictSetter() + config['commit'] = payload['checkout_sha'] + config['ref'] = payload['ref'] + config['default_branch'] = default_branch + config['git_url'] = payload['repository.git_ssh_url'] + + config['commit_info.url'] = payload['commits[0].url'] + config['commit_info.message'] = payload['commits[0].message'] + config['commit_info.date'] = payload['commits[0].timestamp'] + + # Note: Gitlab does not send full user information with the payload, so we have to + # (optionally) look it up. + author_email = payload['commits[0].author.email'] + if lookup_user and author_email: + author_info = lookup_user(author_email) + if author_info: + config['commit_info.author.username'] = author_info['username'] + config['commit_info.author.url'] = author_info['html_url'] + config['commit_info.author.avatar_url'] = author_info['avatar_url'] + + return config.dict_value() + + +class GitLabBuildTrigger(BuildTriggerHandler): + """ + BuildTrigger for GitLab. + """ + @classmethod + def service_name(cls): + return 'gitlab' + + def _get_authorized_client(self): + host = app.config.get('GITLAB_TRIGGER_CONFIG', {}).get('GITLAB_ENDPOINT', '') + auth_token = self.auth_token or 'invalid' + return gitlab.Gitlab(host, oauth_token=auth_token) + + def is_active(self): + return 'hook_id' in self.config + + def activate(self, standard_webhook_url): + config = self.config + new_build_source = config['build_source'] + gl_client = self._get_authorized_client() + + # Find the GitLab repository. + repository = gl_client.getproject(new_build_source) + if repository is False: + msg = 'Unable to find GitLab repository for source: %s' % new_build_source + raise TriggerActivationException(msg) + + # Add a deploy key to the repository. + public_key, private_key = generate_ssh_keypair() + config['credentials'] = [ + { + 'name': 'SSH Public Key', + 'value': public_key, + }, + ] + key = gl_client.adddeploykey(repository['id'], '%s Builder' % app.config['REGISTRY_TITLE'], + public_key) + if key is False: + msg = 'Unable to add deploy key to repository: %s' % new_build_source + raise TriggerActivationException(msg) + config['key_id'] = key['id'] + + # Add the webhook to the GitLab repository. + hook = gl_client.addprojecthook(repository['id'], standard_webhook_url, push=True) + if hook is False: + msg = 'Unable to create webhook on repository: %s' % new_build_source + raise TriggerActivationException(msg) + + config['hook_id'] = hook['id'] + self.config = config + return config, {'private_key': private_key} + + def deactivate(self): + config = self.config + gl_client = self._get_authorized_client() + + # Find the GitLab repository. + repository = gl_client.getproject(config['build_source']) + if repository is False: + msg = 'Unable to find GitLab repository for source: %s' % config['build_source'] + raise TriggerDeactivationException(msg) + + # Remove the webhook. + success = gl_client.deleteprojecthook(repository['id'], config['hook_id']) + if success is False: + msg = 'Unable to remove hook: %s' % config['hook_id'] + raise TriggerDeactivationException(msg) + config.pop('hook_id', None) + + # Remove the key + success = gl_client.deletedeploykey(repository['id'], config['key_id']) + if success is False: + msg = 'Unable to remove deploy key: %s' % config['key_id'] + raise TriggerDeactivationException(msg) + config.pop('key_id', None) + + self.config = config + + return config + + def list_build_sources(self): + gl_client = self._get_authorized_client() + current_user = gl_client.currentuser() + if current_user is False: + raise RepositoryReadException('Unable to get current user') + + repositories = gl_client.getprojects() + if repositories is False: + raise RepositoryReadException('Unable to list user repositories') + + namespaces = {} + for repo in repositories: + owner = repo['namespace']['name'] + if not owner in namespaces: + namespaces[owner] = { + 'personal': owner == current_user['username'], + 'repos': [], + 'info': { + 'name': owner, + } + } + + namespaces[owner]['repos'].append(repo['path_with_namespace']) + + return namespaces.values() + + def list_build_subdirs(self): + config = self.config + gl_client = self._get_authorized_client() + new_build_source = config['build_source'] + + repository = gl_client.getproject(new_build_source) + if repository is False: + msg = 'Unable to find GitLab repository for source: %s' % new_build_source + raise RepositoryReadException(msg) + + repo_branches = gl_client.getbranches(repository['id']) + if repo_branches is False: + msg = 'Unable to find GitLab branches for source: %s' % new_build_source + raise RepositoryReadException(msg) + + branches = [branch['name'] for branch in repo_branches] + branches = find_matching_branches(config, branches) + branches = branches or [repository['default_branch'] or 'master'] + + repo_tree = gl_client.getrepositorytree(repository['id'], ref_name=branches[0]) + if repo_tree is False: + msg = 'Unable to find GitLab repository tree for source: %s' % new_build_source + raise RepositoryReadException(msg) + + for node in repo_tree: + if node['name'] == 'Dockerfile': + return ['/'] + + return [] + + def load_dockerfile_contents(self): + gl_client = self._get_authorized_client() + path = self.get_dockerfile_path() + + repository = gl_client.getproject(self.config['build_source']) + if repository is False: + return None + + branches = self.list_field_values('branch_name') + branches = find_matching_branches(self.config, branches) + if branches == []: + return None + + branch_name = branches[0] + if repository['default_branch'] in branches: + branch_name = repository['default_branch'] + + contents = gl_client.getrawfile(repository['id'], branch_name, path) + if contents is False: + return None + + return contents + + def list_field_values(self, field_name, limit=None): + if field_name == 'refs': + branches = self.list_field_values('branch_name') + tags = self.list_field_values('tag_name') + + return ([{'kind': 'branch', 'name': b} for b in branches] + + [{'kind': 'tag', 'name': t} for t in tags]) + + gl_client = self._get_authorized_client() + repo = gl_client.getproject(self.config['build_source']) + if repo is False: + return [] + + if field_name == 'tag_name': + tags = gl_client.getrepositorytags(repo['id']) + if tags is False: + return [] + + if limit: + tags = tags[0:limit] + + return [tag['name'] for tag in tags] + + if field_name == 'branch_name': + branches = gl_client.getbranches(repo['id']) + if branches is False: + return [] + + if limit: + branches = branches[0:limit] + + return [branch['name'] for branch in branches] + + return None + + def get_repository_url(self): + gl_client = self._get_authorized_client() + repository = gl_client.getproject(self.config['build_source']) + if repository is False: + return None + + return '%s/%s' % (gl_client.host, repository['path_with_namespace']) + + def lookup_user(self, email): + gl_client = self._get_authorized_client() + try: + [user] = gl_client.getusers(search=email) + + return { + 'username': user['username'], + 'html_url': gl_client.host + '/' + user['username'], + 'avatar_url': user['avatar_url'] + } + except ValueError: + return None + + def get_metadata_for_commit(self, commit_sha, ref, repo): + gl_client = self._get_authorized_client() + commit = gl_client.getrepositorycommit(repo['id'], commit_sha) + + metadata = { + 'commit': commit['id'], + 'ref': ref, + 'default_branch': repo['default_branch'], + 'git_url': repo['ssh_url_to_repo'], + 'commit_info': { + 'url': gl_client.host + '/' + repo['path_with_namespace'] + '/commit/' + commit['id'], + 'message': commit['message'], + 'date': commit['committed_date'], + }, + } + + committer = None + if 'committer_email' in commit: + committer = self.lookup_user(commit['committer_email']) + + author = None + if 'author_email' in commit: + author = self.lookup_user(commit['author_email']) + + if committer is not None: + metadata['commit_info']['committer'] = { + 'username': committer['username'], + 'avatar_url': committer['avatar_url'], + 'url': gl_client.host + '/' + committer['username'], + } + + if author is not None: + metadata['commit_info']['author'] = { + 'username': author['username'], + 'avatar_url': author['avatar_url'], + 'url': gl_client.host + '/' + author['username'] + } + + return metadata + + def manual_start(self, run_parameters=None): + gl_client = self._get_authorized_client() + + repo = gl_client.getproject(self.config['build_source']) + if repo is False: + raise TriggerStartException('Could not find repository') + + def get_tag_sha(tag_name): + tags = gl_client.getrepositorytags(repo['id']) + if tags is False: + raise TriggerStartException('Could not find tags') + + for tag in tags: + if tag['name'] == tag_name: + return tag['commit']['id'] + + raise TriggerStartException('Could not find commit') + + def get_branch_sha(branch_name): + branch = gl_client.getbranch(repo['id'], branch_name) + if branch is False: + raise TriggerStartException('Could not find branch') + + return branch['commit']['id'] + + # Find the branch or tag to build. + (commit_sha, ref) = determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, + repo['default_branch']) + + metadata = self.get_metadata_for_commit(commit_sha, ref, repo) + return self.prepare_build(metadata, is_manual=True) + + def handle_trigger_request(self, request): + payload = request.get_json() + if not payload: + raise SkipRequestException() + + # Lookup the default branch. + default_branch = None + gl_client = self._get_authorized_client() + repo = gl_client.getproject(self.config['build_source']) + if repo is not False: + default_branch = repo['default_branch'] + lookup_user = self.lookup_user + + logger.debug('GitLab trigger payload %s', payload) + metadata = get_transformed_webhook_payload(payload, default_branch=default_branch, + lookup_user=lookup_user) + prepared = self.prepare_build(metadata) + + # Check if we should skip this build. + raise_if_skipped_build(prepared) + return prepared diff --git a/buildtrigger/triggerutil.py b/buildtrigger/triggerutil.py new file mode 100644 index 000000000..3786dd7d4 --- /dev/null +++ b/buildtrigger/triggerutil.py @@ -0,0 +1,121 @@ +import json +import io +import logging +import re + +class InvalidPayloadException(Exception): + pass + +class BuildArchiveException(Exception): + pass + +class InvalidServiceException(Exception): + pass + +class TriggerActivationException(Exception): + pass + +class TriggerDeactivationException(Exception): + pass + +class TriggerStartException(Exception): + pass + +class ValidationRequestException(Exception): + pass + +class SkipRequestException(Exception): + pass + +class EmptyRepositoryException(Exception): + pass + +class RepositoryReadException(Exception): + pass + +class TriggerProviderException(Exception): + pass + +logger = logging.getLogger(__name__) + +def determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, default_branch): + run_parameters = run_parameters or {} + + kind = '' + value = '' + + if 'refs' in run_parameters and run_parameters['refs']: + kind = run_parameters['refs']['kind'] + value = run_parameters['refs']['name'] + elif 'branch_name' in run_parameters: + kind = 'branch' + value = run_parameters['branch_name'] + + kind = kind or 'branch' + value = value or default_branch + + ref = 'refs/tags/' + value if kind == 'tag' else 'refs/heads/' + value + commit_sha = get_tag_sha(value) if kind == 'tag' else get_branch_sha(value) + return (commit_sha, ref) + + +def find_matching_branches(config, branches): + if 'branchtag_regex' in config: + try: + regex = re.compile(config['branchtag_regex']) + return [branch for branch in branches + if matches_ref('refs/heads/' + branch, regex)] + except: + pass + + return branches + + +def should_skip_commit(message): + return '[skip build]' in message or '[build skip]' in message + + +def raise_if_skipped_build(prepared_build): + """ Raises a SkipRequestException if the given build should be skipped. """ + if not prepared_build.metadata: + logger.debug('Skipping request due to missing metadata for prepared build') + raise SkipRequestException() + + if should_skip_commit(prepared_build.metadata['commit_info']['message']): + logger.debug('Skipping request due to commit message request') + raise SkipRequestException() + + +def raise_if_skipped(config, ref): + """ Raises a SkipRequestException if the given ref should be skipped. """ + if 'branchtag_regex' in config: + try: + regex = re.compile(config['branchtag_regex']) + except: + regex = re.compile('.*') + + if not matches_ref(ref, regex): + raise SkipRequestException() + + +def matches_ref(ref, regex): + match_string = ref.split('/', 1)[1] + if not regex: + return False + + m = regex.match(match_string) + if not m: + return False + + return len(m.group(0)) == len(match_string) + + +def raise_unsupported(): + raise io.UnsupportedOperation + + +def get_trigger_config(trigger): + try: + return json.loads(trigger.config) + except ValueError: + return {} diff --git a/endpoints/api/build.py b/endpoints/api/build.py index 397e40ac2..c517e1f35 100644 --- a/endpoints/api/build.py +++ b/endpoints/api/build.py @@ -9,12 +9,12 @@ from flask import request from rfc3987 import parse as uri_parse from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue +from buildtrigger.basehandler import BuildTriggerHandler from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource, require_repo_read, require_repo_write, validate_json_request, ApiResource, internal_only, format_date, api, Unauthorized, NotFound, path_param, InvalidRequest, require_repo_admin) from endpoints.building import start_build, PreparedBuild -from endpoints.trigger import BuildTriggerHandler from data import database from data import model from auth.auth_context import get_authenticated_user diff --git a/endpoints/api/trigger.py b/endpoints/api/trigger.py index 1582f3890..b1f37f727 100644 --- a/endpoints/api/trigger.py +++ b/endpoints/api/trigger.py @@ -8,15 +8,16 @@ from urllib import quote from urlparse import urlunparse from app import app +from buildtrigger.basehandler import BuildTriggerHandler +from buildtrigger.triggerutil import (TriggerDeactivationException, + TriggerActivationException, EmptyRepositoryException, + RepositoryReadException, TriggerStartException) from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin, log_action, request_error, query_param, parse_args, internal_only, validate_json_request, api, Unauthorized, NotFound, InvalidRequest, path_param) from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus from endpoints.building import start_build -from endpoints.trigger import (BuildTriggerHandler, TriggerDeactivationException, - TriggerActivationException, EmptyRepositoryException, - RepositoryReadException, TriggerStartException) from data import model from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission, ReadRepositoryPermission) diff --git a/endpoints/bitbuckettrigger.py b/endpoints/bitbuckettrigger.py index ba685450c..7045200f9 100644 --- a/endpoints/bitbuckettrigger.py +++ b/endpoints/bitbuckettrigger.py @@ -3,7 +3,8 @@ import logging from flask import request, redirect, url_for, Blueprint from flask.ext.login import current_user -from endpoints.trigger import BitbucketBuildTrigger, BuildTriggerHandler +from buildtrigger.basehandler import BuildTriggerHandler +from buildtrigger.bitbuckethandler import BitbucketBuildTrigger from endpoints.common import route_show_if from app import app from data import model diff --git a/endpoints/building.py b/endpoints/building.py index 93d76be7b..bf2c2b161 100644 --- a/endpoints/building.py +++ b/endpoints/building.py @@ -96,7 +96,7 @@ class PreparedBuild(object): def get_display_name(sha): return sha[0:7] - def tags_from_ref(self, ref, default_branch='master'): + def tags_from_ref(self, ref, default_branch=None): branch = ref.split('/')[-1] tags = {branch} diff --git a/endpoints/trigger.py b/endpoints/trigger.py deleted file mode 100644 index 5e174cdb9..000000000 --- a/endpoints/trigger.py +++ /dev/null @@ -1,1589 +0,0 @@ -import logging -import io -import os.path -import tarfile -import base64 -import re -import json - -import gitlab - -from endpoints.building import PreparedBuild -from github import (Github, UnknownObjectException, GithubException, - BadCredentialsException as GitHubBadCredentialsException) -from bitbucket import BitBucket -from tempfile import SpooledTemporaryFile -from jsonschema import validate -from data import model - -from app import app, userfiles as user_files, github_trigger, get_app_url -from util.registry.tarfileappender import TarfileAppender -from util.security.ssh import generate_ssh_keypair - - -client = app.config['HTTPCLIENT'] - - -logger = logging.getLogger(__name__) - - -TARBALL_MIME = 'application/gzip' -CHUNK_SIZE = 512 * 1024 - - -class InvalidPayloadException(Exception): - pass - -class BuildArchiveException(Exception): - pass - -class InvalidServiceException(Exception): - pass - -class TriggerActivationException(Exception): - pass - -class TriggerDeactivationException(Exception): - pass - -class TriggerStartException(Exception): - pass - -class ValidationRequestException(Exception): - pass - -class SkipRequestException(Exception): - pass - -class EmptyRepositoryException(Exception): - pass - -class RepositoryReadException(Exception): - pass - -class TriggerProviderException(Exception): - pass - - -def _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, default_branch): - run_parameters = run_parameters or {} - - kind = '' - value = '' - - if 'refs' in run_parameters and run_parameters['refs']: - kind = run_parameters['refs']['kind'] - value = run_parameters['refs']['name'] - elif 'branch_name' in run_parameters: - kind = 'branch' - value = run_parameters['branch_name'] - - kind = kind or 'branch' - value = value or default_branch - - ref = 'refs/tags/' + value if kind == 'tag' else 'refs/heads/' + value - commit_sha = get_tag_sha(value) if kind == 'tag' else get_branch_sha(value) - return (commit_sha, ref) - - -def find_matching_branches(config, branches): - if 'branchtag_regex' in config: - try: - regex = re.compile(config['branchtag_regex']) - return [branch for branch in branches - if matches_ref('refs/heads/' + branch, regex)] - except: - pass - - return branches - -def raise_if_skipped(config, ref): - """ Raises a SkipRequestException if the given ref should be skipped. """ - if 'branchtag_regex' in config: - try: - regex = re.compile(config['branchtag_regex']) - except: - regex = re.compile('.*') - - if not matches_ref(ref, regex): - raise SkipRequestException() - -def matches_ref(ref, regex): - match_string = ref.split('/', 1)[1] - if not regex: - return False - - m = regex.match(match_string) - if not m: - return False - - return len(m.group(0)) == len(match_string) - -def should_skip_commit(message): - return '[skip build]' in message or '[build skip]' in message - -def raise_unsupported(): - raise io.UnsupportedOperation - -def get_trigger_config(trigger): - try: - return json.loads(trigger.config) - except: - return {} - - -class BuildTriggerHandler(object): - def __init__(self, trigger, override_config=None): - self.trigger = trigger - self.config = override_config or get_trigger_config(trigger) - - @property - def auth_token(self): - """ Returns the auth token for the trigger. """ - return self.trigger.auth_token - - def load_dockerfile_contents(self): - """ - Loads the Dockerfile found for the trigger's config and returns them or None if none could - be found/loaded. - """ - raise NotImplementedError - - def list_build_sources(self): - """ - Take the auth information for the specific trigger type and load the - list of build sources(repositories). - """ - raise NotImplementedError - - def list_build_subdirs(self): - """ - Take the auth information and the specified config so far and list all of - the possible subdirs containing dockerfiles. - """ - raise NotImplementedError - - def handle_trigger_request(self): - """ - Transform the incoming request data into a set of actions. Returns a PreparedBuild. - """ - raise NotImplementedError - - def is_active(self): - """ - Returns True if the current build trigger is active. Inactive means further - setup is needed. - """ - raise NotImplementedError - - def activate(self, standard_webhook_url): - """ - Activates the trigger for the service, with the given new configuration. - Returns new public and private config that should be stored if successful. - """ - raise NotImplementedError - - def deactivate(self): - """ - Deactivates the trigger for the service, removing any hooks installed in - the remote service. Returns the new config that should be stored if this - trigger is going to be re-activated. - """ - raise NotImplementedError - - def manual_start(self, run_parameters=None): - """ - Manually creates a repository build for this trigger. Returns a PreparedBuild. - """ - raise NotImplementedError - - def list_field_values(self, field_name, limit=None): - """ - Lists all values for the given custom trigger field. For example, a trigger might have a - field named "branches", and this method would return all branches. - """ - raise NotImplementedError - - def get_repository_url(self): - """ Returns the URL of the current trigger's repository. Note that this operation - can be called in a loop, so it should be as fast as possible. """ - raise NotImplementedError - - @classmethod - def service_name(cls): - """ - Particular service implemented by subclasses. - """ - raise NotImplementedError - - @classmethod - def get_handler(cls, trigger, override_config=None): - for subc in cls.__subclasses__(): - if subc.service_name() == trigger.service.name: - return subc(trigger, override_config) - - raise InvalidServiceException('Unable to find service: %s' % trigger.service.name) - - def put_config_key(self, key, value): - """ Updates a config key in the trigger, saving it to the DB. """ - self.config[key] = value - model.build.update_build_trigger(self.trigger, self.config) - - def set_auth_token(self, auth_token): - """ Sets the auth token for the trigger, saving it to the DB. """ - model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token) - - def get_dockerfile_path(self): - """ Returns the normalized path to the Dockerfile found in the subdirectory - in the config. """ - subdirectory = self.config.get('subdir', '') - if subdirectory == '/': - subdirectory = '' - else: - if not subdirectory.endswith('/'): - subdirectory = subdirectory + '/' - - return subdirectory + 'Dockerfile' - - -class BitbucketBuildTrigger(BuildTriggerHandler): - """ - BuildTrigger for Bitbucket. - """ - @classmethod - def service_name(cls): - return 'bitbucket' - - def _get_client(self): - key = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_KEY', '') - secret = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_SECRET', '') - - trigger_uuid = self.trigger.uuid - callback_url = '%s/oauth1/bitbucket/callback/trigger/%s' % (get_app_url(), trigger_uuid) - - return BitBucket(key, secret, callback_url) - - def _get_authorized_client(self): - base_client = self._get_client() - auth_token = self.auth_token or 'invalid:invalid' - token_parts = auth_token.split(':') - if len(token_parts) != 2: - token_parts = ['invalid', 'invalid'] - - (access_token, access_token_secret) = token_parts - return base_client.get_authorized_client(access_token, access_token_secret) - - def _get_repository_client(self): - source = self.config['build_source'] - (namespace, name) = source.split('/') - bitbucket_client = self._get_authorized_client() - return bitbucket_client.for_namespace(namespace).repositories().get(name) - - def _get_default_branch(self, repository, default_value='master'): - (result, data, _) = repository.get_main_branch() - if result: - return data['name'] - - return default_value - - def get_oauth_url(self): - bitbucket_client = self._get_client() - (result, data, err_msg) = bitbucket_client.get_authorization_url() - if not result: - raise RepositoryReadException(err_msg) - - return data - - def exchange_verifier(self, verifier): - bitbucket_client = self._get_client() - access_token = self.config.get('access_token', '') - access_token_secret = self.auth_token - - # Exchange the verifier for a new access token. - (result, data, _) = bitbucket_client.verify_token(access_token, access_token_secret, verifier) - if not result: - return False - - # Save the updated access token and secret. - self.set_auth_token(data[0] + ':' + data[1]) - - # Retrieve the current authorized user's information and store the username in the config. - authorized_client = self._get_authorized_client() - (result, data, _) = authorized_client.get_current_user() - if not result: - return False - - username = data['user']['username'] - self.put_config_key('username', username) - return True - - def is_active(self): - return 'webhook_id' in self.config - - def activate(self, standard_webhook_url): - config = self.config - - # Add a deploy key to the repository. - public_key, private_key = generate_ssh_keypair() - config['credentials'] = [ - { - 'name': 'SSH Public Key', - 'value': public_key, - }, - ] - - repository = self._get_repository_client() - (result, created_deploykey, err_msg) = repository.deploykeys().create( - app.config['REGISTRY_TITLE'] + ' webhook key', public_key) - - if not result: - msg = 'Unable to add deploy key to repository: %s' % err_msg - raise TriggerActivationException(msg) - - config['deploy_key_id'] = created_deploykey['pk'] - - # Add a webhook callback. - description = 'Webhook for invoking builds on %s' % app.config['REGISTRY_TITLE_SHORT'] - webhook_events = ['repo:push'] - (result, created_webhook, err_msg) = repository.webhooks().create( - description, standard_webhook_url, webhook_events) - - if not result: - msg = 'Unable to add webhook to repository: %s' % err_msg - raise TriggerActivationException(msg) - - config['webhook_id'] = created_webhook['uuid'] - self.config = config - return config, {'private_key': private_key} - - def deactivate(self): - config = self.config - - webhook_id = config.pop('webhook_id', None) - deploy_key_id = config.pop('deploy_key_id', None) - repository = self._get_repository_client() - - # Remove the webhook. - if webhook_id is not None: - (result, _, err_msg) = repository.webhooks().delete(webhook_id) - if not result: - msg = 'Unable to remove webhook from repository: %s' % err_msg - raise TriggerDeactivationException(msg) - - # Remove the public key. - if deploy_key_id is not None: - (result, _, err_msg) = repository.deploykeys().delete(deploy_key_id) - if not result: - msg = 'Unable to remove deploy key from repository: %s' % err_msg - raise TriggerDeactivationException(msg) - - return config - - def list_build_sources(self): - bitbucket_client = self._get_authorized_client() - (result, data, err_msg) = bitbucket_client.get_visible_repositories() - if not result: - raise RepositoryReadException('Could not read repository list: ' + err_msg) - - namespaces = {} - for repo in data: - if not repo['scm'] == 'git': - continue - - owner = repo['owner'] - if not owner in namespaces: - namespaces[owner] = { - 'personal': owner == self.config.get('username'), - 'repos': [], - 'info': { - 'name': owner - } - } - - namespaces[owner]['repos'].append(owner + '/' + repo['slug']) - - return namespaces.values() - - def list_build_subdirs(self): - config = self.config - repository = self._get_repository_client() - - # Find the first matching branch. - repo_branches = self.list_field_values('branch_name') or [] - branches = find_matching_branches(config, repo_branches) - if not branches: - branches = [self._get_default_branch(repository)] - - (result, data, err_msg) = repository.get_path_contents('', revision=branches[0]) - if not result: - raise RepositoryReadException(err_msg) - - files = set([f['path'] for f in data['files']]) - if 'Dockerfile' in files: - return ['/'] - - return [] - - def load_dockerfile_contents(self): - repository = self._get_repository_client() - path = self.get_dockerfile_path() - - (result, data, err_msg) = repository.get_raw_path_contents(path, revision='master') - if not result: - raise RepositoryReadException(err_msg) - - return data - - def list_field_values(self, field_name, limit=None): - source = self.config['build_source'] - (namespace, name) = source.split('/') - - bitbucket_client = self._get_authorized_client() - repository = bitbucket_client.for_namespace(namespace).repositories().get(name) - - if field_name == 'refs': - (result, data, _) = repository.get_branches_and_tags() - if not result: - return None - - branches = [b['name'] for b in data['branches']] - tags = [t['name'] for t in data['tags']] - - return ([{'kind': 'branch', 'name': b} for b in branches] + - [{'kind': 'tag', 'name': tag} for tag in tags]) - - if field_name == 'tag_name': - (result, data, _) = repository.get_tags() - if not result: - return None - - tags = list(data.keys()) - if limit: - tags = tags[0:limit] - - return tags - - if field_name == 'branch_name': - (result, data, _) = repository.get_branches() - if not result: - return None - - branches = list(data.keys()) - if limit: - branches = branches[0:limit] - - return branches - - return None - - _BITBUCKET_COMMIT_URL = 'https://bitbucket.org/%s/%s/commits/%s' - - def _prepare_build(self, commit_sha, ref, is_manual, target=None, actor=None): - def _build_user_block(info): - return { - 'username': info['username'], - 'url': info['links']['html']['href'], - 'avatar_url': info['links']['avatar']['href'], - 'display_name': info['display_name'] - } - - config = self.config - repository = self._get_repository_client() - - # Lookup the default branch associated with the repository. We use this when building - # the tags. - default_branch = self._get_default_branch(repository) - - # Lookup the commit sha (if necessary) - data = {} - if target is None: - (result, data, _) = repository.changesets().get(commit_sha) - if not result: - raise TriggerStartException('Could not lookup commit SHA') - - namespace = repository.namespace - name = repository.repository_name - - # Build the commit information. - commit_url = self._BITBUCKET_COMMIT_URL % (namespace, name, commit_sha) - if target is not None and 'links' in target: - commit_url = target['links']['html']['href'] - - commit_info = { - 'url': commit_url, - 'message': target['message'] if target else data['message'], - 'date': target['date'] if target else data['timestamp'] - } - - # Add the commit's author. - if target is not None and target.get('author') and 'user' in target['author']: - commit_info['author'] = _build_user_block(target['author']['user']) - elif data.get('raw_author'): - # Try to lookup the author by email address. The raw_author field (if it exists) is returned - # in the form: "Joseph Schorr " - match = re.compile(r'.*<(.+)>').match(data['raw_author']) - if match: - email_address = match.group(1) - bitbucket_client = self._get_authorized_client() - (result, data, _) = bitbucket_client.accounts().get_profile(email_address) - if result: - commit_info['author'] = { - 'username': data['user']['username'], - 'url': 'https://bitbucket.org/%s/' % data['user']['username'], - 'avatar_url': data['user']['avatar'] - } - - # Add the commit's actor (committer). - if actor is not None: - commit_info['committer'] = _build_user_block(actor) - - metadata = { - 'commit': commit_sha, - 'ref': ref, - 'default_branch': default_branch, - 'git_url': 'git@bitbucket.org:%s/%s.git' % (namespace, name), - 'commit_info': commit_info - } - - prepared = PreparedBuild(self.trigger) - prepared.tags_from_ref(ref, default_branch) - prepared.name_from_sha(commit_sha) - prepared.subdirectory = config['subdir'] - prepared.metadata = metadata - prepared.is_manual = is_manual - - return prepared - - def handle_trigger_request(self, request): - payload = request.get_json() - if not payload or not 'push' in payload: - logger.debug('Skipping BitBucket request due to missing push data in payload') - raise SkipRequestException() - - push_payload = payload['push'] - if not 'changes' in push_payload or not push_payload['changes']: - logger.debug('Skipping BitBucket request due to empty changes list') - raise SkipRequestException() - - # Make sure we have a new change. - changes = push_payload['changes'] - last_change = changes[-1] - if not last_change.get('new'): - logger.debug('Skipping BitBucket request due to change being a deletion') - raise SkipRequestException() - - change_info = last_change['new'] - change_target = change_info.get('target') - if not change_target: - logger.debug('Skipping BitBucket request due to missing change target') - raise SkipRequestException() - - # Check if this build should be skipped by commit message. - commit_message = change_target.get('message', '') - if should_skip_commit(commit_message): - logger.debug('Skipping BitBucket request due to commit message request') - raise SkipRequestException() - - # Check to see if this build should be skipped by ref. - ref = ('refs/heads/' + change_info['name'] if change_info['type'] == 'branch' - else 'refs/tags/' + change_info['name']) - - logger.debug('Checking BitBucket request: %s', ref) - raise_if_skipped(self.config, ref) - - # Prepare the build. - commit_sha = change_target['hash'] - return self._prepare_build(commit_sha, ref, False, target=change_target, - actor=payload.get('actor')) - - - def manual_start(self, run_parameters=None): - run_parameters = run_parameters or {} - repository = self._get_repository_client() - - def get_branch_sha(branch_name): - # Lookup the commit SHA for the branch. - (result, data, _) = repository.get_branches() - if not result or not branch_name in data: - raise TriggerStartException('Could not find branch commit SHA') - - return data[branch_name]['node'] - - def get_tag_sha(tag_name): - # Lookup the commit SHA for the tag. - (result, data, _) = repository.get_tags() - if not result or not tag_name in data: - raise TriggerStartException('Could not find tag commit SHA') - - return data[tag_name]['node'] - - # Find the branch or tag to build. - (commit_sha, ref) = _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, - self._get_default_branch(repository)) - - return self._prepare_build(commit_sha, ref, True) - - def get_repository_url(self): - source = self.config['build_source'] - (namespace, name) = source.split('/') - return 'https://bitbucket.org/%s/%s' % (namespace, name) - - -class GithubBuildTrigger(BuildTriggerHandler): - """ - BuildTrigger for GitHub that uses the archive API and buildpacks. - """ - def _get_client(self): - return Github(self.auth_token, - base_url=github_trigger.api_endpoint(), - client_id=github_trigger.client_id(), - client_secret=github_trigger.client_secret()) - - @classmethod - def service_name(cls): - return 'github' - - def is_active(self): - return 'hook_id' in self.config - - def activate(self, standard_webhook_url): - config = self.config - new_build_source = config['build_source'] - gh_client = self._get_client() - - # Find the GitHub repository. - try: - gh_repo = gh_client.get_repo(new_build_source) - except UnknownObjectException: - msg = 'Unable to find GitHub repository for source: %s' % new_build_source - raise TriggerActivationException(msg) - - # Add a deploy key to the GitHub repository. - public_key, private_key = generate_ssh_keypair() - config['credentials'] = [ - { - 'name': 'SSH Public Key', - 'value': public_key, - }, - ] - try: - deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'], - public_key) - config['deploy_key_id'] = deploy_key.id - except GithubException: - msg = 'Unable to add deploy key to repository: %s' % new_build_source - raise TriggerActivationException(msg) - - # Add the webhook to the GitHub repository. - webhook_config = { - 'url': standard_webhook_url, - 'content_type': 'json', - } - try: - hook = gh_repo.create_hook('web', webhook_config) - config['hook_id'] = hook.id - config['master_branch'] = gh_repo.default_branch - except GithubException: - msg = 'Unable to create webhook on repository: %s' % new_build_source - raise TriggerActivationException(msg) - - return config, {'private_key': private_key} - - def deactivate(self): - config = self.config - gh_client = self._get_client() - - # Find the GitHub repository. - try: - repo = gh_client.get_repo(config['build_source']) - except UnknownObjectException: - msg = 'Unable to find GitHub repository for source: %s' % config['build_source'] - raise TriggerDeactivationException(msg) - except GitHubBadCredentialsException: - msg = 'Unable to access repository to disable trigger' - raise TriggerDeactivationException(msg) - - # If the trigger uses a deploy key, remove it. - try: - if config['deploy_key_id']: - deploy_key = repo.get_key(config['deploy_key_id']) - deploy_key.delete() - except KeyError: - # There was no config['deploy_key_id'], thus this is an old trigger without a deploy key. - pass - except GithubException: - msg = 'Unable to remove deploy key: %s' % config['deploy_key_id'] - raise TriggerDeactivationException(msg) - - # Remove the webhook. - try: - hook = repo.get_hook(config['hook_id']) - hook.delete() - except GithubException: - msg = 'Unable to remove hook: %s' % config['hook_id'] - raise TriggerDeactivationException(msg) - - config.pop('hook_id', None) - self.config = config - return config - - def list_build_sources(self): - gh_client = self._get_client() - usr = gh_client.get_user() - - try: - repos = usr.get_repos() - except GithubException: - raise RepositoryReadException('Unable to list user repositories') - - namespaces = {} - has_non_personal = False - - for repository in repos: - namespace = repository.owner.login - if not namespace in namespaces: - is_personal_repo = namespace == usr.login - namespaces[namespace] = { - 'personal': is_personal_repo, - 'repos': [], - 'info': { - 'name': namespace, - 'avatar_url': repository.owner.avatar_url - } - } - - if not is_personal_repo: - has_non_personal = True - - namespaces[namespace]['repos'].append(repository.full_name) - - # In older versions of GitHub Enterprise, the get_repos call above does not - # return any non-personal repositories. In that case, we need to lookup the - # repositories manually. - # TODO: Remove this once we no longer support GHE versions <= 2.1 - if not has_non_personal: - for org in usr.get_orgs(): - repo_list = [repo.full_name for repo in org.get_repos(type='member')] - namespaces[org.name] = { - 'personal': False, - 'repos': repo_list, - 'info': { - 'name': org.name or org.login, - 'avatar_url': org.avatar_url - } - } - - entries = list(namespaces.values()) - entries.sort(key=lambda e: e['info']['name']) - return entries - - def list_build_subdirs(self): - config = self.config - gh_client = self._get_client() - source = config['build_source'] - - try: - repo = gh_client.get_repo(source) - - # Find the first matching branch. - repo_branches = self.list_field_values('branch_name') or [] - branches = find_matching_branches(config, repo_branches) - branches = branches or [repo.default_branch or 'master'] - default_commit = repo.get_branch(branches[0]).commit - commit_tree = repo.get_git_tree(default_commit.sha, recursive=True) - - return [os.path.dirname(elem.path) for elem in commit_tree.tree - if (elem.type == u'blob' and - os.path.basename(elem.path) == u'Dockerfile')] - except GithubException as ge: - message = ge.data.get('message', 'Unable to list contents of repository: %s' % source) - if message == 'Branch not found': - raise EmptyRepositoryException() - - raise RepositoryReadException(message) - - - def load_dockerfile_contents(self): - config = self.config - gh_client = self._get_client() - - source = config['build_source'] - path = self.get_dockerfile_path() - try: - repo = gh_client.get_repo(source) - file_info = repo.get_file_contents(path) - if file_info is None: - return None - - content = file_info.content - if file_info.encoding == 'base64': - content = base64.b64decode(content) - return content - - except GithubException as ge: - message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source) - raise RepositoryReadException(message) - - @staticmethod - def _build_commit_info(repo, payload, commit_sha): - if repo: - return GithubBuildTrigger._build_repo_commit_info(repo, commit_sha) - else: - return GithubBuildTrigger._build_payload_commit_info(payload, commit_sha) - - @staticmethod - def _build_payload_commit_info(payload, commit_sha): - head_commit = payload.get('head_commit', {}) - sender = payload.get('sender', {}) - - commit_info = { - 'url': head_commit.get('url', ''), - 'message': head_commit.get('message', ''), - 'date': head_commit.get('timestamp', ''), - } - - if 'author' in head_commit: - commit_info['author'] = { - 'username': head_commit['author'].get('username'), - } - - if head_commit['author']['username'] == sender.get('login'): - commit_info['author']['avatar_url'] = sender.get('avatar_url', '') - commit_info['author']['url'] = sender.get('html_url', '') - - if 'committer' in head_commit: - commit_info['committer'] = { - 'username': head_commit['committer'].get('username'), - } - - if head_commit['committer']['username'] == sender.get('login'): - commit_info['committer']['avatar_url'] = sender.get('avatar_url', '') - commit_info['committer']['url'] = sender.get('html_url', '') - - return commit_info - - @staticmethod - def _build_repo_commit_info(repo, commit_sha): - try: - commit = repo.get_commit(commit_sha) - except GithubException: - logger.exception('Could not load data for commit') - return - - commit_info = { - 'url': commit.html_url, - 'message': commit.commit.message, - 'date': commit.last_modified - } - - if commit.author: - commit_info['author'] = { - 'username': commit.author.login, - 'avatar_url': commit.author.avatar_url, - 'url': commit.author.html_url - } - - if commit.committer: - commit_info['committer'] = { - 'username': commit.committer.login, - 'avatar_url': commit.committer.avatar_url, - 'url': commit.committer.html_url - } - - return commit_info - - @staticmethod - def _prepare_tarball(repo, commit_sha): - # Prepare the download and upload URLs - archive_link = repo.get_archive_link('tarball', commit_sha) - download_archive = client.get(archive_link, stream=True) - tarball_subdir = '' - - with SpooledTemporaryFile(CHUNK_SIZE) as tarball: - for chunk in download_archive.iter_content(CHUNK_SIZE): - tarball.write(chunk) - - # Seek to position 0 to make tarfile happy - tarball.seek(0) - - # Pull out the name of the subdir that GitHub generated - with tarfile.open(fileobj=tarball) as archive: - tarball_subdir = archive.getnames()[0] - - # Seek to position 0 to make tarfile happy. - tarball.seek(0) - - entries = { - tarball_subdir + '/.git/HEAD': commit_sha, - tarball_subdir + '/.git/objects/': None, - tarball_subdir + '/.git/refs/': None - } - - appender = TarfileAppender(tarball, entries).get_stream() - dockerfile_id = user_files.store_file(appender, TARBALL_MIME) - - logger.debug('Successfully prepared job') - - return tarball_subdir, dockerfile_id - - - def _get_payload(self, payload, *args): - current = payload - for arg in args: - current = current.get(arg, {}) - - return current - - - def _prepare_build(self, ref, commit_sha, is_manual, repo=None, payload=None): - config = self.config - prepared = PreparedBuild(self.trigger) - - # If the trigger isn't using git, prepare the buildpack. - if self.trigger.private_key is None: - if repo is None: - raise SkipRequestException() - - tarball_subdir, dockerfile_id = GithubBuildTrigger._prepare_tarball(repo, commit_sha) - - prepared.subdirectory = os.path.join(tarball_subdir, config['subdir']) - prepared.dockerfile_id = dockerfile_id - else: - prepared.subdirectory = config['subdir'] - - # Set the name. - prepared.name_from_sha(commit_sha) - - # Set the tag(s). - if repo: - default_branch = repo.default_branch - else: - default_branch = self._get_payload(payload, 'repository', 'default_branch') - - prepared.tags_from_ref(ref, default_branch) - - # Build and set the metadata. - metadata = { - 'commit': commit_sha, - 'ref': ref, - 'default_branch': default_branch, - 'git_url': repo.ssh_url if repo else self._get_payload(payload, 'repository', 'ssh_url'), - } - - # add the commit info. - commit_info = GithubBuildTrigger._build_commit_info(repo, payload, commit_sha) - if commit_info is not None: - metadata['commit_info'] = commit_info - - prepared.metadata = metadata - prepared.is_manual = is_manual - return prepared - - - def handle_trigger_request(self, request): - # Check the payload to see if we should skip it based on the lack of a head_commit. - payload = request.get_json() - if not payload or payload.get('head_commit') is None: - raise SkipRequestException() - - # This is for GitHub's probing/testing. - if 'zen' in payload: - raise ValidationRequestException() - - logger.debug('GitHub trigger payload %s', payload) - - ref = payload['ref'] - commit_sha = payload['head_commit']['id'] - commit_message = payload['head_commit'].get('message', '') - - # Check if this build should be skipped by commit message. - if should_skip_commit(commit_message): - raise SkipRequestException() - - # Check to see if this build should be skipped by ref. - raise_if_skipped(self.config, ref) - - try: - repo_full_name = '%s/%s' % (payload['repository']['owner']['name'], - payload['repository']['name']) - - gh_client = self._get_client() - repo = gh_client.get_repo(repo_full_name) - return self._prepare_build(ref, commit_sha, False, repo=repo) - except GitHubBadCredentialsException: - logger.exception('Got GitHub Credentials Exception, retrying with a manual payload') - return self._prepare_build(ref, commit_sha, False, payload=payload) - except GithubException: - logger.exception("Got GitHub Exception when trying to start trigger %s", self.trigger.id) - raise SkipRequestException() - - - def manual_start(self, run_parameters=None): - config = self.config - source = config['build_source'] - - try: - gh_client = self._get_client() - repo = gh_client.get_repo(source) - default_branch = repo.default_branch - except GithubException as ghe: - raise TriggerStartException(ghe.data['message']) - - def get_branch_sha(branch_name): - branch = repo.get_branch(branch_name) - return branch.commit.sha - - def get_tag_sha(tag_name): - tags = {tag.name: tag for tag in repo.get_tags()} - if not tag_name in tags: - raise TriggerStartException('Could not find tag in repository') - - return tags[tag_name].commit.sha - - # Find the branch or tag to build. - (commit_sha, ref) = _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, - default_branch) - - return self._prepare_build(ref, commit_sha, True, repo=repo) - - - def list_field_values(self, field_name, limit=None): - if field_name == 'refs': - branches = self.list_field_values('branch_name') - tags = self.list_field_values('tag_name') - - return ([{'kind': 'branch', 'name': b} for b in branches] + - [{'kind': 'tag', 'name': tag} for tag in tags]) - - config = self.config - if field_name == 'tag_name': - try: - gh_client = self._get_client() - source = config['build_source'] - repo = gh_client.get_repo(source) - gh_tags = repo.get_tags() - if limit: - gh_tags = repo.get_tags()[0:limit] - - return [tag.name for tag in gh_tags] - except GitHubBadCredentialsException: - return [] - except GithubException: - logger.exception("Got GitHub Exception when trying to list tags for trigger %s", - self.trigger.id) - return [] - - if field_name == 'branch_name': - try: - gh_client = self._get_client() - source = config['build_source'] - repo = gh_client.get_repo(source) - gh_branches = repo.get_branches() - if limit: - gh_branches = repo.get_branches()[0:limit] - - branches = [branch.name for branch in gh_branches] - - if not repo.default_branch in branches: - branches.insert(0, repo.default_branch) - - if branches[0] != repo.default_branch: - branches.remove(repo.default_branch) - branches.insert(0, repo.default_branch) - - return branches - except GitHubBadCredentialsException: - return ['master'] - except GithubException: - logger.exception("Got GitHub Exception when trying to list branches for trigger %s", - self.trigger.id) - return ['master'] - - return None - - def get_repository_url(self): - from app import github_trigger - source = self.config['build_source'] - return github_trigger.get_public_url(source) - - -class CustomBuildTrigger(BuildTriggerHandler): - payload_schema = { - 'type': 'object', - 'properties': { - 'commit': { - 'type': 'string', - 'description': 'first 7 characters of the SHA-1 identifier for a git commit', - 'pattern': '^([A-Fa-f0-9]{7,})$', - }, - 'ref': { - 'type': 'string', - 'description': 'git reference for a git commit', - 'pattern': '^refs\/(heads|tags|remotes)\/(.+)$', - }, - 'default_branch': { - 'type': 'string', - 'description': 'default branch of the git repository', - }, - 'commit_info': { - 'type': 'object', - 'description': 'metadata about a git commit', - 'properties': { - 'url': { - 'type': 'string', - 'description': 'URL to view a git commit', - }, - 'message': { - 'type': 'string', - 'description': 'git commit message', - }, - 'date': { - 'type': 'string', - 'description': 'timestamp for a git commit' - }, - 'author': { - 'type': 'object', - 'description': 'metadata about the author of a git commit', - 'properties': { - 'username': { - 'type': 'string', - 'description': 'username of the author', - }, - 'url': { - 'type': 'string', - 'description': 'URL to view the profile of the author', - }, - 'avatar_url': { - 'type': 'string', - 'description': 'URL to view the avatar of the author', - }, - }, - 'required': ['username', 'url', 'avatar_url'], - }, - 'committer': { - 'type': 'object', - 'description': 'metadata about the committer of a git commit', - 'properties': { - 'username': { - 'type': 'string', - 'description': 'username of the committer', - }, - 'url': { - 'type': 'string', - 'description': 'URL to view the profile of the committer', - }, - 'avatar_url': { - 'type': 'string', - 'description': 'URL to view the avatar of the committer', - }, - }, - 'required': ['username', 'url', 'avatar_url'], - }, - }, - 'required': ['url', 'message', 'date'], - }, - }, - 'required': ['commit', 'ref', 'default_branch'], - } - - @classmethod - def service_name(cls): - return 'custom-git' - - def is_active(self): - return self.config.has_key('credentials') - - def _metadata_from_payload(self, payload): - try: - metadata = json.loads(payload) - validate(metadata, self.payload_schema) - except Exception as e: - raise InvalidPayloadException(e.message) - return metadata - - def handle_trigger_request(self, request): - # Skip if there is no payload. - payload = request.data - if not payload: - raise InvalidPayloadException() - - logger.debug('Payload %s', payload) - - # Skip if the commit message matches. - metadata = self._metadata_from_payload(payload) - if should_skip_commit(metadata.get('commit_info', {}).get('message', '')): - raise SkipRequestException() - - # The build source is the canonical git URL used to clone. - config = self.config - metadata['git_url'] = config['build_source'] - - prepared = PreparedBuild(self.trigger) - prepared.tags_from_ref(metadata['ref']) - prepared.name_from_sha(metadata['commit']) - prepared.subdirectory = config['subdir'] - prepared.metadata = metadata - prepared.is_manual = False - - return prepared - - def manual_start(self, run_parameters=None): - # commit_sha is the only required parameter - commit_sha = run_parameters.get('commit_sha') - if commit_sha is None: - raise TriggerStartException('missing required parameter') - - config = self.config - metadata = { - 'commit': commit_sha, - 'git_url': config['build_source'], - } - - prepared = PreparedBuild(self.trigger) - prepared.tags = [commit_sha[:7]] - prepared.name_from_sha(commit_sha) - prepared.subdirectory = config['subdir'] - prepared.metadata = metadata - prepared.is_manual = True - - return prepared - - def activate(self, standard_webhook_url): - config = self.config - public_key, private_key = generate_ssh_keypair() - config['credentials'] = [ - { - 'name': 'SSH Public Key', - 'value': public_key, - }, - { - 'name': 'Webhook Endpoint URL', - 'value': standard_webhook_url, - }, - ] - self.config = config - return config, {'private_key': private_key} - - def deactivate(self): - config = self.config - config.pop('credentials', None) - self.config = config - return config - - def get_repository_url(self): - return None - - -class GitLabBuildTrigger(BuildTriggerHandler): - """ - BuildTrigger for GitLab. - """ - @classmethod - def service_name(cls): - return 'gitlab' - - def _get_authorized_client(self): - host = app.config.get('GITLAB_TRIGGER_CONFIG', {}).get('GITLAB_ENDPOINT', '') - auth_token = self.auth_token or 'invalid' - return gitlab.Gitlab(host, oauth_token=auth_token) - - def is_active(self): - return 'hook_id' in self.config - - def activate(self, standard_webhook_url): - config = self.config - new_build_source = config['build_source'] - gl_client = self._get_authorized_client() - - # Find the GitLab repository. - repository = gl_client.getproject(new_build_source) - if repository is False: - msg = 'Unable to find GitLab repository for source: %s' % new_build_source - raise TriggerActivationException(msg) - - # Add a deploy key to the repository. - public_key, private_key = generate_ssh_keypair() - config['credentials'] = [ - { - 'name': 'SSH Public Key', - 'value': public_key, - }, - ] - key = gl_client.adddeploykey(repository['id'], '%s Builder' % app.config['REGISTRY_TITLE'], - public_key) - if key is False: - msg = 'Unable to add deploy key to repository: %s' % new_build_source - raise TriggerActivationException(msg) - config['key_id'] = key['id'] - - # Add the webhook to the GitLab repository. - hook = gl_client.addprojecthook(repository['id'], standard_webhook_url, push=True) - if hook is False: - msg = 'Unable to create webhook on repository: %s' % new_build_source - raise TriggerActivationException(msg) - - config['hook_id'] = hook['id'] - self.config = config - return config, {'private_key': private_key} - - def deactivate(self): - config = self.config - gl_client = self._get_authorized_client() - - # Find the GitLab repository. - repository = gl_client.getproject(config['build_source']) - if repository is False: - msg = 'Unable to find GitLab repository for source: %s' % config['build_source'] - raise TriggerDeactivationException(msg) - - # Remove the webhook. - success = gl_client.deleteprojecthook(repository['id'], config['hook_id']) - if success is False: - msg = 'Unable to remove hook: %s' % config['hook_id'] - raise TriggerDeactivationException(msg) - config.pop('hook_id', None) - - # Remove the key - success = gl_client.deletedeploykey(repository['id'], config['key_id']) - if success is False: - msg = 'Unable to remove deploy key: %s' % config['key_id'] - raise TriggerDeactivationException(msg) - config.pop('key_id', None) - - self.config = config - - return config - - def list_build_sources(self): - gl_client = self._get_authorized_client() - current_user = gl_client.currentuser() - if current_user is False: - raise RepositoryReadException('Unable to get current user') - - repositories = gl_client.getprojects() - if repositories is False: - raise RepositoryReadException('Unable to list user repositories') - - namespaces = {} - for repo in repositories: - owner = repo['namespace']['name'] - if not owner in namespaces: - namespaces[owner] = { - 'personal': owner == current_user['username'], - 'repos': [], - 'info': { - 'name': owner, - } - } - - namespaces[owner]['repos'].append(repo['path_with_namespace']) - - return namespaces.values() - - def list_build_subdirs(self): - config = self.config - gl_client = self._get_authorized_client() - new_build_source = config['build_source'] - - repository = gl_client.getproject(new_build_source) - if repository is False: - msg = 'Unable to find GitLab repository for source: %s' % new_build_source - raise RepositoryReadException(msg) - - repo_branches = gl_client.getbranches(repository['id']) - if repo_branches is False: - msg = 'Unable to find GitLab branches for source: %s' % new_build_source - raise RepositoryReadException(msg) - - branches = [branch['name'] for branch in repo_branches] - branches = find_matching_branches(config, branches) - branches = branches or [repository['default_branch'] or 'master'] - - repo_tree = gl_client.getrepositorytree(repository['id'], ref_name=branches[0]) - if repo_tree is False: - msg = 'Unable to find GitLab repository tree for source: %s' % new_build_source - raise RepositoryReadException(msg) - - for node in repo_tree: - if node['name'] == 'Dockerfile': - return ['/'] - - return [] - - def load_dockerfile_contents(self): - gl_client = self._get_authorized_client() - path = self.get_dockerfile_path() - - repository = gl_client.getproject(self.config['build_source']) - if repository is False: - return None - - branches = self.list_field_values('branch_name') - branches = find_matching_branches(self.config, branches) - if branches == []: - return None - - branch_name = branches[0] - if repository['default_branch'] in branches: - branch_name = repository['default_branch'] - - contents = gl_client.getrawfile(repository['id'], branch_name, path) - if contents is False: - return None - - return contents - - def list_field_values(self, field_name, limit=None): - if field_name == 'refs': - branches = self.list_field_values('branch_name') - tags = self.list_field_values('tag_name') - - return ([{'kind': 'branch', 'name': b} for b in branches] + - [{'kind': 'tag', 'name': t} for t in tags]) - - gl_client = self._get_authorized_client() - repo = gl_client.getproject(self.config['build_source']) - if repo is False: - return [] - - if field_name == 'tag_name': - tags = gl_client.getrepositorytags(repo['id']) - if tags is False: - return [] - - if limit: - tags = tags[0:limit] - - return [tag['name'] for tag in tags] - - if field_name == 'branch_name': - branches = gl_client.getbranches(repo['id']) - if branches is False: - return [] - - if limit: - branches = branches[0:limit] - - return [branch['name'] for branch in branches] - - return None - - def _prepare_build(self, commit_sha, ref, is_manual): - config = self.config - gl_client = self._get_authorized_client() - - repo = gl_client.getproject(self.config['build_source']) - if repo is False: - raise TriggerStartException('Could not find repository') - - commit = gl_client.getrepositorycommit(repo['id'], commit_sha) - if repo is False: - raise TriggerStartException('Could not find repository') - - committer = None - if 'committer_email' in commit: - try: - [committer] = gl_client.getusers(search=commit['committer_email']) - except ValueError: - committer = None - - try: - [author] = gl_client.getusers(search=commit['author_email']) - except ValueError: - author = None - - metadata = { - 'commit': commit['id'], - 'ref': ref, - 'default_branch': repo['default_branch'], - 'git_url': repo['ssh_url_to_repo'], - 'commit_info': { - 'url': gl_client.host + '/' + repo['path_with_namespace'] + '/commit/' + commit['id'], - 'message': commit['message'], - 'date': commit['committed_date'], - }, - } - - if committer is not None: - metadata['commit_info']['committer'] = { - 'username': committer['username'], - 'avatar_url': committer['avatar_url'], - 'url': gl_client.host + '/' + committer['username'], - } - - if author is not None: - metadata['commit_info']['author'] = { - 'username': author['username'], - 'avatar_url': author['avatar_url'], - 'url': gl_client.host + '/' + author['username'] - } - - prepared = PreparedBuild(self.trigger) - prepared.tags_from_ref(ref, repo['default_branch']) - prepared.name_from_sha(commit['id']) - prepared.subdirectory = config['subdir'] - prepared.metadata = metadata - prepared.is_manual = is_manual - - return prepared - - def handle_trigger_request(self, request): - payload = request.get_json() - if not payload: - raise SkipRequestException() - - logger.debug('GitLab trigger payload %s', payload) - - if not payload.get('commits'): - raise SkipRequestException() - - commit = payload['commits'][0] - commit_message = commit['message'] - if should_skip_commit(commit_message): - raise SkipRequestException() - - ref = payload['ref'] - raise_if_skipped(self.config, ref) - - return self._prepare_build(commit['id'], ref, False) - - def manual_start(self, run_parameters=None): - gl_client = self._get_authorized_client() - - repo = gl_client.getproject(self.config['build_source']) - if repo is False: - raise TriggerStartException('Could not find repository') - - def get_tag_sha(tag_name): - tags = gl_client.getrepositorytags(repo['id']) - if tags is False: - raise TriggerStartException('Could not find tags') - - for tag in tags: - if tag['name'] == tag_name: - return tag['commit']['id'] - - raise TriggerStartException('Could not find commit') - - def get_branch_sha(branch_name): - branch = gl_client.getbranch(repo['id'], branch_name) - if branch is False: - raise TriggerStartException('Could not find branch') - - return branch['commit']['id'] - - # Find the branch or tag to build. - (commit_sha, ref) = _determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, - repo['default_branch']) - - - return self._prepare_build(commit_sha, ref, True) - - def get_repository_url(self): - gl_client = self._get_authorized_client() - repository = gl_client.getproject(self.config['build_source']) - if repository is False: - return None - - return '%s/%s' % (gl_client.host, repository['path_with_namespace']) - diff --git a/endpoints/web.py b/endpoints/web.py index 154483faf..f7450b599 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -21,8 +21,12 @@ from util.cache import no_cache from endpoints.common import common_login, render_page_template, route_show_if, param_required from endpoints.decorators import anon_protect from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf -from endpoints.trigger import (CustomBuildTrigger, BitbucketBuildTrigger, TriggerProviderException, - BuildTriggerHandler) + +from buildtrigger.customhandler import CustomBuildTrigger +from buildtrigger.bitbuckethandler import BitbucketBuildTrigger +from buildtrigger.triggerutil import TriggerProviderException +from buildtrigger.basehandler import BuildTriggerHandler + from util.names import parse_repository_name, parse_repository_name_and_tag from util.useremails import send_email_changed from util.systemlogs import build_logs_archive diff --git a/endpoints/webhooks.py b/endpoints/webhooks.py index 836b2fa9b..1b3d23f23 100644 --- a/endpoints/webhooks.py +++ b/endpoints/webhooks.py @@ -9,8 +9,9 @@ from auth.permissions import ModifyRepositoryPermission from util.invoice import renderInvoiceToHtml from util.useremails import send_invoice_email, send_subscription_change, send_payment_failed from util.http import abort -from endpoints.trigger import (BuildTriggerHandler, ValidationRequestException, - SkipRequestException, InvalidPayloadException) +from buildtrigger.basehandler import BuildTriggerHandler +from buildtrigger.triggerutil import (ValidationRequestException, SkipRequestException, + InvalidPayloadException) from endpoints.building import start_build diff --git a/requirements-nover.txt b/requirements-nover.txt index 95e6bae3f..ce0739cbc 100644 --- a/requirements-nover.txt +++ b/requirements-nover.txt @@ -54,3 +54,4 @@ pyjwt toposort rfc3987 pyjwkest +jsonpath-rw \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 3df8ec547..849cd9b78 100644 --- a/requirements.txt +++ b/requirements.txt @@ -32,6 +32,7 @@ iso8601==0.1.10 itsdangerous==0.24 Jinja2==2.7.3 jsonschema==2.5.1 +jsonpath-rw==1.4.0 Mako==1.0.1 marisa-trie==0.7.2 MarkupSafe==0.23 diff --git a/test/test_api_usage.py b/test/test_api_usage.py index 92cae9bb6..24fb02de2 100644 --- a/test/test_api_usage.py +++ b/test/test_api_usage.py @@ -12,8 +12,8 @@ from playhouse.test_utils import assert_query_count from endpoints.api import api_bp, api from endpoints.building import PreparedBuild from endpoints.webhooks import webhooks -from endpoints.trigger import BuildTriggerHandler from app import app +from buildtrigger.basehandler import BuildTriggerHandler from initdb import setup_database_for_testing, finished_database_for_testing from data import database, model from data.database import RepositoryActionCount diff --git a/test/test_prepare_trigger.py b/test/test_prepare_trigger.py new file mode 100644 index 000000000..d08926f0d --- /dev/null +++ b/test/test_prepare_trigger.py @@ -0,0 +1,194 @@ +import unittest +import json + +from jsonschema import validate, ValidationError +from buildtrigger.basehandler import METADATA_SCHEMA +from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_webhook +from buildtrigger.bitbuckethandler import get_transformed_commit_info as bb_commit +from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_webhook +from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_webhook + +class TestPrepareTrigger(unittest.TestCase): + def test_bitbucket_commit(self): + with open('test/triggerjson/bitbucket_commit.json') as f: + commit = json.loads(f.read()) + + ref = 'refs/heads/somebranch' + default_branch = 'somebranch' + repository_name = 'foo/bar' + + def lookup_author(_): + return { + 'user': { + 'username': 'cooluser', + 'avatar': 'http://some/avatar/url' + } + } + + expected = { + "commit": u"abdeaf1b2b4a6b9ddf742c1e1754236380435a62", + "ref": u"refs/heads/somebranch", + "git_url": u"git@bitbucket.org:foo/bar.git", + "default_branch": u"somebranch", + "commit_info": { + "url": u"https://bitbucket.org/foo/bar/commits/abdeaf1b2b4a6b9ddf742c1e1754236380435a62", + "date": u"2012-07-24 00:26:36", + "message": u"making some changes\n", + "author": { + "url": u"https://bitbucket.org/cooluser/", + "avatar_url": u"http://some/avatar/url", + "username": u"cooluser", + } + } + } + + created = bb_commit(commit, ref, default_branch, repository_name, lookup_author) + self.assertEquals(expected, created) + validate(created, METADATA_SCHEMA) + + + def test_bitbucket_webhook_payload(self): + with open('test/triggerjson/bitbucket_webhook.json') as f: + payload = json.loads(f.read()) + + expected = { + "commit": u"af64ae7188685f8424040b4735ad12941b980d75", + "ref": u"refs/heads/master", + "git_url": u"git@bitbucket.org:jscoreos/another-repo.git", + "commit_info": { + "url": u"https://bitbucket.org/jscoreos/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75", + "date": u"2015-09-10T20:40:54+00:00", + "message": u"Dockerfile edited online with Bitbucket", + "author": { + "username": u"jscoreos", + "url": u"https://bitbucket.org/jscoreos/", + "avatar_url": u"https://bitbucket.org/account/jscoreos/avatar/32/", + }, + "committer": { + "username": u"jscoreos", + "url": u"https://bitbucket.org/jscoreos/", + "avatar_url": u"https://bitbucket.org/account/jscoreos/avatar/32/", + }, + }, + } + + created = bb_webhook(payload) + self.assertEquals(expected, created) + validate(created, METADATA_SCHEMA) + + + def test_github_webhook_payload(self): + with open('test/triggerjson/github_webhook.json') as f: + payload = json.loads(f.read()) + + expected = { + 'commit': u'410f4cdf8ff09b87f245b13845e8497f90b90a4c', + 'ref': u'refs/heads/master', + 'git_url': u'git@github.com:josephschorr/anothertest.git', + 'commit_info': { + 'url': u'https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c', + 'date': u'2015-09-11T14:26:16-04:00', + 'message': u'Update Dockerfile', + 'committer': { + 'username': u'josephschorr', + }, + 'author': { + 'username': u'josephschorr', + }, + }, + } + + created = gh_webhook(payload) + self.assertEquals(expected, created) + validate(created, METADATA_SCHEMA) + + + def test_github_webhook_payload_with_lookup(self): + with open('test/triggerjson/github_webhook.json') as f: + payload = json.loads(f.read()) + + expected = { + 'commit': u'410f4cdf8ff09b87f245b13845e8497f90b90a4c', + 'ref': u'refs/heads/master', + 'git_url': u'git@github.com:josephschorr/anothertest.git', + 'commit_info': { + 'url': u'https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c', + 'date': u'2015-09-11T14:26:16-04:00', + 'message': u'Update Dockerfile', + 'committer': { + 'username': u'josephschorr', + 'url': u'http://github.com/josephschorr', + 'avatar_url': u'http://some/avatar/url', + }, + 'author': { + 'username': u'josephschorr', + 'url': u'http://github.com/josephschorr', + 'avatar_url': u'http://some/avatar/url', + }, + }, + } + + def lookup_user(_): + return { + 'html_url': 'http://github.com/josephschorr', + 'avatar_url': 'http://some/avatar/url' + } + + created = gh_webhook(payload, lookup_user=lookup_user) + self.assertEquals(expected, created) + validate(created, METADATA_SCHEMA) + + + def test_gitlab_webhook_payload(self): + with open('test/triggerjson/gitlab_webhook.json') as f: + payload = json.loads(f.read()) + + expected = { + 'commit': u'fb88379ee45de28a0a4590fddcbd8eff8b36026e', + 'ref': u'refs/heads/master', + 'git_url': u'git@gitlab.com:jzelinskie/www-gitlab-com.git', + 'commit_info': { + 'url': u'https://gitlab.com/jzelinskie/www-gitlab-com/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e', + 'date': u'2015-08-13T19:33:18+00:00', + 'message': u'Fix link\n', + }, + } + + created = gl_webhook(payload) + self.assertEquals(expected, created) + validate(created, METADATA_SCHEMA) + + + def test_gitlab_webhook_payload_with_lookup(self): + with open('test/triggerjson/gitlab_webhook.json') as f: + payload = json.loads(f.read()) + + expected = { + 'commit': u'fb88379ee45de28a0a4590fddcbd8eff8b36026e', + 'ref': u'refs/heads/master', + 'git_url': u'git@gitlab.com:jzelinskie/www-gitlab-com.git', + 'commit_info': { + 'url': u'https://gitlab.com/jzelinskie/www-gitlab-com/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e', + 'date': u'2015-08-13T19:33:18+00:00', + 'message': u'Fix link\n', + 'author': { + 'username': 'jzelinskie', + 'url': 'http://gitlab.com/jzelinskie', + 'avatar_url': 'http://some/avatar/url', + }, + }, + } + + def lookup_user(_): + return { + 'username': 'jzelinskie', + 'html_url': 'http://gitlab.com/jzelinskie', + 'avatar_url': 'http://some/avatar/url', + } + + created = gl_webhook(payload, lookup_user=lookup_user) + self.assertEquals(expected, created) + validate(created, METADATA_SCHEMA) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/test/test_trigger.py b/test/test_trigger.py index d01853d96..710dab20b 100644 --- a/test/test_trigger.py +++ b/test/test_trigger.py @@ -1,7 +1,7 @@ import unittest import re -from endpoints.trigger import matches_ref +from buildtrigger.triggerutil import matches_ref class TestRegex(unittest.TestCase): def assertDoesNotMatch(self, ref, filt): diff --git a/test/triggerjson/bitbucket_commit.json b/test/triggerjson/bitbucket_commit.json new file mode 100644 index 000000000..1cdd5bf65 --- /dev/null +++ b/test/triggerjson/bitbucket_commit.json @@ -0,0 +1,24 @@ +{ + "files": [ + { + "type": "added", + "file": "AnotherFile.txt" + }, + { + "type": "modified", + "file": "Readme" + } + ], + "raw_author": "Mary Anthony ", + "utctimestamp": "2012-07-23 22:26:36+00:00", + "author": "Mary Anthony", + "timestamp": "2012-07-24 00:26:36", + "node": "abdeaf1b2b4a6b9ddf742c1e1754236380435a62", + "parents": [ + "86432202a2d5" + ], + "branch": "master", + "message": "making some changes\n", + "revision": null, + "size": -1 +} \ No newline at end of file diff --git a/test/triggerjson/bitbucket_webhook.json b/test/triggerjson/bitbucket_webhook.json new file mode 100644 index 000000000..9567e0f92 --- /dev/null +++ b/test/triggerjson/bitbucket_webhook.json @@ -0,0 +1,237 @@ +{ + "push": { + "changes": [ + { + "links": { + "commits": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commits?include=af64ae7188685f8424040b4735ad12941b980d75&exclude=1784139225279a587e0afb151bed1f9ba3dd509e" + }, + "diff": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/diff/af64ae7188685f8424040b4735ad12941b980d75..1784139225279a587e0afb151bed1f9ba3dd509e" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/branches/compare/af64ae7188685f8424040b4735ad12941b980d75..1784139225279a587e0afb151bed1f9ba3dd509e" + } + }, + "old": { + "name": "master", + "links": { + "commits": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commits/master" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/branch/master" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/refs/branches/master" + } + }, + "type": "branch", + "target": { + "links": { + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/commits/1784139225279a587e0afb151bed1f9ba3dd509e" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/1784139225279a587e0afb151bed1f9ba3dd509e" + } + }, + "author": { + "user": { + "links": { + "avatar": { + "href": "https://bitbucket.org/account/jscoreos/avatar/32/" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/users/jscoreos" + } + }, + "uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}", + "type": "user", + "display_name": "Joseph Schorr", + "username": "jscoreos" + }, + "raw": "Joseph Schorr " + }, + "date": "2015-09-10T20:37:54+00:00", + "parents": [ + { + "links": { + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/commits/5329daa0961ec968de9ef36f30024bfa0da73103" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/5329daa0961ec968de9ef36f30024bfa0da73103" + } + }, + "type": "commit", + "hash": "5329daa0961ec968de9ef36f30024bfa0da73103" + } + ], + "type": "commit", + "message": "Dockerfile edited online with Bitbucket", + "hash": "1784139225279a587e0afb151bed1f9ba3dd509e" + } + }, + "forced": false, + "truncated": false, + "commits": [ + { + "author": { + "user": { + "links": { + "avatar": { + "href": "https://bitbucket.org/account/jscoreos/avatar/32/" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/users/jscoreos" + } + }, + "uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}", + "type": "user", + "display_name": "Joseph Schorr", + "username": "jscoreos" + }, + "raw": "Joseph Schorr " + }, + "links": { + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/af64ae7188685f8424040b4735ad12941b980d75" + } + }, + "message": "Dockerfile edited online with Bitbucket", + "type": "commit", + "hash": "af64ae7188685f8424040b4735ad12941b980d75" + } + ], + "new": { + "name": "master", + "links": { + "commits": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commits/master" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/branch/master" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/refs/branches/master" + } + }, + "type": "branch", + "target": { + "links": { + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/af64ae7188685f8424040b4735ad12941b980d75" + } + }, + "author": { + "user": { + "links": { + "avatar": { + "href": "https://bitbucket.org/account/jscoreos/avatar/32/" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/users/jscoreos" + } + }, + "uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}", + "type": "user", + "display_name": "Joseph Schorr", + "username": "jscoreos" + }, + "raw": "Joseph Schorr " + }, + "date": "2015-09-10T20:40:54+00:00", + "parents": [ + { + "links": { + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo/commits/1784139225279a587e0afb151bed1f9ba3dd509e" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/1784139225279a587e0afb151bed1f9ba3dd509e" + } + }, + "type": "commit", + "hash": "1784139225279a587e0afb151bed1f9ba3dd509e" + } + ], + "type": "commit", + "message": "Dockerfile edited online with Bitbucket", + "hash": "af64ae7188685f8424040b4735ad12941b980d75" + } + }, + "closed": false, + "created": false + } + ] + }, + "repository": { + "links": { + "avatar": { + "href": "https://bitbucket.org/jscoreos/another-repo/avatar/16/" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/another-repo" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo" + } + }, + "full_name": "jscoreos/another-repo", + "uuid": "{b3459203-3e58-497b-8059-ad087b6b01de}", + "type": "repository", + "is_private": true, + "name": "Another Repo", + "owner": { + "links": { + "avatar": { + "href": "https://bitbucket.org/account/jscoreos/avatar/32/" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/users/jscoreos" + } + }, + "uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}", + "type": "user", + "display_name": "Joseph Schorr", + "username": "jscoreos" + }, + "scm": "git" + }, + "actor": { + "links": { + "avatar": { + "href": "https://bitbucket.org/account/jscoreos/avatar/32/" + }, + "html": { + "href": "https://bitbucket.org/jscoreos/" + }, + "self": { + "href": "https://api.bitbucket.org/2.0/users/jscoreos" + } + }, + "uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}", + "type": "user", + "display_name": "Joseph Schorr", + "username": "jscoreos" + } +} \ No newline at end of file diff --git a/test/triggerjson/github_webhook.json b/test/triggerjson/github_webhook.json new file mode 100644 index 000000000..8d17f969c --- /dev/null +++ b/test/triggerjson/github_webhook.json @@ -0,0 +1,153 @@ +{ + "ref": "refs/heads/master", + "before": "9ea43cab474709d4a61afb7e3340de1ffc405b41", + "after": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "created": false, + "deleted": false, + "forced": false, + "base_ref": null, + "compare": "https://github.com/josephschorr/anothertest/compare/9ea43cab4747...410f4cdf8ff0", + "commits": [ + { + "id": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "distinct": true, + "message": "Update Dockerfile", + "timestamp": "2015-09-11T14:26:16-04:00", + "url": "https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "author": { + "name": "josephschorr", + "email": "josephschorr@users.noreply.github.com", + "username": "josephschorr" + }, + "committer": { + "name": "josephschorr", + "email": "josephschorr@users.noreply.github.com", + "username": "josephschorr" + }, + "added": [], + "removed": [], + "modified": [ + "Dockerfile" + ] + } + ], + "head_commit": { + "id": "410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "distinct": true, + "message": "Update Dockerfile", + "timestamp": "2015-09-11T14:26:16-04:00", + "url": "https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c", + "author": { + "name": "josephschorr", + "email": "josephschorr@users.noreply.github.com", + "username": "josephschorr" + }, + "committer": { + "name": "josephschorr", + "email": "josephschorr@users.noreply.github.com", + "username": "josephschorr" + }, + "added": [], + "removed": [], + "modified": [ + "Dockerfile" + ] + }, + "repository": { + "id": 34876107, + "name": "anothertest", + "full_name": "josephschorr/anothertest", + "owner": { + "name": "josephschorr", + "email": "josephschorr@users.noreply.github.com" + }, + "private": false, + "html_url": "https://github.com/josephschorr/anothertest", + "description": "", + "fork": false, + "url": "https://github.com/josephschorr/anothertest", + "forks_url": "https://api.github.com/repos/josephschorr/anothertest/forks", + "keys_url": "https://api.github.com/repos/josephschorr/anothertest/keys{/key_id}", + "collaborators_url": "https://api.github.com/repos/josephschorr/anothertest/collaborators{/collaborator}", + "teams_url": "https://api.github.com/repos/josephschorr/anothertest/teams", + "hooks_url": "https://api.github.com/repos/josephschorr/anothertest/hooks", + "issue_events_url": "https://api.github.com/repos/josephschorr/anothertest/issues/events{/number}", + "events_url": "https://api.github.com/repos/josephschorr/anothertest/events", + "assignees_url": "https://api.github.com/repos/josephschorr/anothertest/assignees{/user}", + "branches_url": "https://api.github.com/repos/josephschorr/anothertest/branches{/branch}", + "tags_url": "https://api.github.com/repos/josephschorr/anothertest/tags", + "blobs_url": "https://api.github.com/repos/josephschorr/anothertest/git/blobs{/sha}", + "git_tags_url": "https://api.github.com/repos/josephschorr/anothertest/git/tags{/sha}", + "git_refs_url": "https://api.github.com/repos/josephschorr/anothertest/git/refs{/sha}", + "trees_url": "https://api.github.com/repos/josephschorr/anothertest/git/trees{/sha}", + "statuses_url": "https://api.github.com/repos/josephschorr/anothertest/statuses/{sha}", + "languages_url": "https://api.github.com/repos/josephschorr/anothertest/languages", + "stargazers_url": "https://api.github.com/repos/josephschorr/anothertest/stargazers", + "contributors_url": "https://api.github.com/repos/josephschorr/anothertest/contributors", + "subscribers_url": "https://api.github.com/repos/josephschorr/anothertest/subscribers", + "subscription_url": "https://api.github.com/repos/josephschorr/anothertest/subscription", + "commits_url": "https://api.github.com/repos/josephschorr/anothertest/commits{/sha}", + "git_commits_url": "https://api.github.com/repos/josephschorr/anothertest/git/commits{/sha}", + "comments_url": "https://api.github.com/repos/josephschorr/anothertest/comments{/number}", + "issue_comment_url": "https://api.github.com/repos/josephschorr/anothertest/issues/comments{/number}", + "contents_url": "https://api.github.com/repos/josephschorr/anothertest/contents/{+path}", + "compare_url": "https://api.github.com/repos/josephschorr/anothertest/compare/{base}...{head}", + "merges_url": "https://api.github.com/repos/josephschorr/anothertest/merges", + "archive_url": "https://api.github.com/repos/josephschorr/anothertest/{archive_format}{/ref}", + "downloads_url": "https://api.github.com/repos/josephschorr/anothertest/downloads", + "issues_url": "https://api.github.com/repos/josephschorr/anothertest/issues{/number}", + "pulls_url": "https://api.github.com/repos/josephschorr/anothertest/pulls{/number}", + "milestones_url": "https://api.github.com/repos/josephschorr/anothertest/milestones{/number}", + "notifications_url": "https://api.github.com/repos/josephschorr/anothertest/notifications{?since,all,participating}", + "labels_url": "https://api.github.com/repos/josephschorr/anothertest/labels{/name}", + "releases_url": "https://api.github.com/repos/josephschorr/anothertest/releases{/id}", + "created_at": 1430426945, + "updated_at": "2015-04-30T20:49:05Z", + "pushed_at": 1441995976, + "git_url": "git://github.com/josephschorr/anothertest.git", + "ssh_url": "git@github.com:josephschorr/anothertest.git", + "clone_url": "https://github.com/josephschorr/anothertest.git", + "svn_url": "https://github.com/josephschorr/anothertest", + "homepage": null, + "size": 144, + "stargazers_count": 0, + "watchers_count": 0, + "language": null, + "has_issues": true, + "has_downloads": true, + "has_wiki": true, + "has_pages": false, + "forks_count": 0, + "mirror_url": null, + "open_issues_count": 0, + "forks": 0, + "open_issues": 0, + "watchers": 0, + "default_branch": "master", + "stargazers": 0, + "master_branch": "master" + }, + "pusher": { + "name": "josephschorr", + "email": "josephschorr@users.noreply.github.com" + }, + "sender": { + "login": "josephschorr", + "id": 4073002, + "avatar_url": "https://avatars.githubusercontent.com/u/4073002?v=3", + "gravatar_id": "", + "url": "https://api.github.com/users/josephschorr", + "html_url": "https://github.com/josephschorr", + "followers_url": "https://api.github.com/users/josephschorr/followers", + "following_url": "https://api.github.com/users/josephschorr/following{/other_user}", + "gists_url": "https://api.github.com/users/josephschorr/gists{/gist_id}", + "starred_url": "https://api.github.com/users/josephschorr/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/josephschorr/subscriptions", + "organizations_url": "https://api.github.com/users/josephschorr/orgs", + "repos_url": "https://api.github.com/users/josephschorr/repos", + "events_url": "https://api.github.com/users/josephschorr/events{/privacy}", + "received_events_url": "https://api.github.com/users/josephschorr/received_events", + "type": "User", + "site_admin": false + } +} \ No newline at end of file diff --git a/test/triggerjson/gitlab_webhook.json b/test/triggerjson/gitlab_webhook.json new file mode 100644 index 000000000..81c18f606 --- /dev/null +++ b/test/triggerjson/gitlab_webhook.json @@ -0,0 +1,54 @@ +{ + "object_kind": "push", + "before": "11fcaca195e8b17ca7e3dc47d9608d5b6b892f45", + "after": "fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "ref": "refs/heads/master", + "checkout_sha": "fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "message": null, + "user_id": 95973, + "user_name": "Jimmy Zelinskie", + "user_email": "jimmyzelinskie@gmail.com", + "project_id": 406414, + "repository": { + "name": "www-gitlab-com", + "url": "git@gitlab.com:jzelinskie/www-gitlab-com.git", + "description": "", + "homepage": "https://gitlab.com/jzelinskie/www-gitlab-com", + "git_http_url": "https://gitlab.com/jzelinskie/www-gitlab-com.git", + "git_ssh_url": "git@gitlab.com:jzelinskie/www-gitlab-com.git", + "visibility_level": 20 + }, + "commits": [ + { + "id": "fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "message": "Fix link\n", + "timestamp": "2015-08-13T19:33:18+00:00", + "url": "https://gitlab.com/jzelinskie/www-gitlab-com/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e", + "author": { + "name": "Sytse Sijbrandij", + "email": "sytse@gitlab.com" + } + }, + { + "id": "4ca166bc0b511f21fa331873f260f1a7cb38d723", + "message": "Merge branch 'git-lfs' into 'master'\n\nGit lfs\n\n@JobV @dzaporozhets @DouweM please review the tone of this\n\nSee merge request !899\n", + "timestamp": "2015-08-13T15:52:15+00:00", + "url": "https://gitlab.com/jzelinskie/www-gitlab-com/commit/4ca166bc0b511f21fa331873f260f1a7cb38d723", + "author": { + "name": "Sytse Sijbrandij", + "email": "sytse@gitlab.com" + } + }, + { + "id": "11fcaca195e8b17ca7e3dc47d9608d5b6b892f45", + "message": "Merge branch 'release-7-3-5' into 'master'\n\n7-13-5 Release post.\n\nSee merge request !900\n", + "timestamp": "2015-08-13T09:31:47+00:00", + "url": "https://gitlab.com/jzelinskie/www-gitlab-com/commit/11fcaca195e8b17ca7e3dc47d9608d5b6b892f45", + "author": { + "name": "Valery Sizov", + "email": "valery@gitlab.com" + } + } + ], + "total_commits_count": 3 +} \ No newline at end of file diff --git a/util/dict_wrappers.py b/util/dict_wrappers.py new file mode 100644 index 000000000..d017a9573 --- /dev/null +++ b/util/dict_wrappers.py @@ -0,0 +1,79 @@ +import json +from jsonpath_rw import parse + +class SafeDictSetter(object): + """ Specialized write-only dictionary wrapper class that allows for setting + nested keys via a path syntax. + + Example: + sds = SafeDictSetter() + sds['foo.bar.baz'] = 'hello' # Sets 'foo' = {'bar': {'baz': 'hello'}} + sds['somekey'] = None # Does not set the key since the value is None + """ + def __init__(self, initial_object=None): + self._object = initial_object or {} + + def __setitem__(self, path, value): + self.set(path, value) + + def set(self, path, value, allow_none=False): + """ Sets the value of the given path to the given value. """ + if value is None and not allow_none: + return + + pieces = path.split('.') + current = self._object + + for piece in pieces[:len(pieces)-1]: + current_obj = current.get(piece, {}) + if not isinstance(current_obj, dict): + raise Exception('Key %s is a non-object value: %s' % (piece, current_obj)) + + current[piece] = current_obj + current = current_obj + + current[pieces[-1]] = value + + def dict_value(self): + """ Returns the dict value built. """ + return self._object + + def json_value(self): + """ Returns the JSON string value of the dictionary built. """ + return json.dumps(self._object) + + +class JSONPathDict(object): + """ Specialized read-only dictionary wrapper class that uses the jsonpath_rw library + to access keys via an X-Path-like syntax. + + Example: + pd = JSONPathDict({'hello': {'hi': 'there'}}) + pd['hello.hi'] # Returns 'there' + """ + def __init__(self, dict_value): + """ Init the helper with the JSON object. + """ + self._object = dict_value + + def __getitem__(self, path): + def raise_exception(): + raise KeyError('Unknown path: %s' % path) + + return self.get(path, not_found_handler=raise_exception) + + def get(self, path, not_found_handler=None): + """ Returns the value found at the given path. Path is a json-path expression. """ + jsonpath_expr = parse(path) + matches = jsonpath_expr.find(self._object) + if not matches: + return not_found_handler() if not_found_handler else None + + match = matches[0].value + if not match: + return not_found_handler() if not_found_handler else None + + if isinstance(match, dict): + return JSONPathDict(match) + + return match diff --git a/util/migrate/migratebitbucketservices.py b/util/migrate/migratebitbucketservices.py index c342654e7..f20f98edd 100644 --- a/util/migrate/migratebitbucketservices.py +++ b/util/migrate/migratebitbucketservices.py @@ -5,7 +5,7 @@ from app import app from data.database import configure, BaseModel, uuid_generator from peewee import * from bitbucket import BitBucket -from endpoints.trigger import BitbucketBuildTrigger +from buildtrigger.bitbuckethandler import BitbucketBuildTrigger configure(app.config)