This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/endpoints/trigger.py

981 lines
28 KiB
Python
Raw Normal View History

import logging
import io
import os.path
import tarfile
import base64
import re
import json
from github import Github, UnknownObjectException, GithubException
from bitbucket import BitBucket
from tempfile import SpooledTemporaryFile
from jsonschema import validate
2015-04-24 22:36:48 +00:00
from data import model
2015-04-24 19:13:08 +00:00
from app import app, userfiles as user_files, github_trigger, get_app_url
from util.tarfileappender import TarfileAppender
2015-03-26 20:20:53 +00:00
from util.ssh import generate_ssh_keypair
client = app.config['HTTPCLIENT']
logger = logging.getLogger(__name__)
TARBALL_MIME = 'application/gzip'
CHUNK_SIZE = 512 * 1024
def should_skip_commit(message):
return '[skip build]' in message or '[build skip]' in message
2015-03-26 20:20:53 +00:00
class InvalidPayloadException(Exception):
pass
class BuildArchiveException(Exception):
pass
class InvalidServiceException(Exception):
pass
class TriggerActivationException(Exception):
pass
class TriggerDeactivationException(Exception):
pass
class TriggerStartException(Exception):
pass
class ValidationRequestException(Exception):
pass
class SkipRequestException(Exception):
pass
class EmptyRepositoryException(Exception):
pass
class RepositoryReadException(Exception):
pass
2015-04-24 19:13:08 +00:00
class TriggerProviderException(Exception):
pass
2015-04-24 22:36:48 +00:00
def raise_unsupported():
raise io.UnsupportedOperation
2015-04-24 22:36:48 +00:00
def get_trigger_config(trigger):
try:
return json.loads(trigger.config)
except:
return {}
class BuildTriggerHandler(object):
def __init__(self, trigger, override_config=None):
self.trigger = trigger
self.config = override_config or get_trigger_config(trigger)
@property
def auth_token(self):
""" Returns the auth token for the trigger. """
return self.trigger.auth_token
def dockerfile_url(self):
"""
Returns the URL at which the Dockerfile for the trigger is found or None if none/not applicable.
"""
2015-04-06 18:53:54 +00:00
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def load_dockerfile_contents(self):
"""
Loads the Dockerfile found for the trigger's config and returns them or None if none could
be found/loaded.
"""
2015-04-06 18:53:54 +00:00
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def list_build_sources(self):
"""
Take the auth information for the specific trigger type and load the
list of build sources(repositories).
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def list_build_subdirs(self):
"""
Take the auth information and the specified config so far and list all of
the possible subdirs containing dockerfiles.
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def handle_trigger_request(self):
"""
Transform the incoming request data into a set of actions. Returns a tuple
of usefiles resource id, docker tags, build name, and resource subdir.
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def is_active(self):
"""
Returns True if the current build trigger is active. Inactive means further
setup is needed.
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def activate(self, standard_webhook_url):
"""
Activates the trigger for the service, with the given new configuration.
Returns new public and private config that should be stored if successful.
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def deactivate(self):
"""
Deactivates the trigger for the service, removing any hooks installed in
the remote service. Returns the new config that should be stored if this
trigger is going to be re-activated.
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def manual_start(self, run_parameters=None):
"""
Manually creates a repository build for this trigger.
"""
raise NotImplementedError
2015-04-24 22:36:48 +00:00
def list_field_values(self, field_name):
"""
Lists all values for the given custom trigger field. For example, a trigger might have a
field named "branches", and this method would return all branches.
"""
raise NotImplementedError
@classmethod
def service_name(cls):
"""
Particular service implemented by subclasses.
"""
raise NotImplementedError
@classmethod
2015-04-24 22:36:48 +00:00
def get_handler(cls, trigger, override_config=None):
for subc in cls.__subclasses__():
2015-04-24 22:36:48 +00:00
if subc.service_name() == trigger.service.name:
return subc(trigger, override_config)
raise InvalidServiceException('Unable to find service: %s' % service)
2015-04-24 22:36:48 +00:00
def put_config_key(self, key, value):
""" Updates a config key in the trigger, saving it to the DB. """
self.config[key] = value
model.update_build_trigger(self.trigger, self.config)
2015-04-24 22:36:48 +00:00
def set_auth_token(self, auth_token):
""" Sets the auth token for the trigger, saving it to the DB. """
model.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
2015-04-24 22:36:48 +00:00
class BitbucketBuildTrigger(BuildTriggerHandler):
2015-04-24 19:13:08 +00:00
"""
BuildTrigger for Bitbucket.
"""
@classmethod
def service_name(cls):
return 'bitbucket'
def _get_client(self):
2015-04-24 19:13:08 +00:00
key = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_KEY', '')
secret = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_SECRET', '')
2015-04-24 22:36:48 +00:00
trigger_uuid = self.trigger.uuid
2015-04-24 19:13:08 +00:00
callback_url = '%s/oauth1/bitbucket/callback/trigger/%s' % (get_app_url(), trigger_uuid)
2015-04-24 22:36:48 +00:00
return BitBucket(key, secret, callback_url)
2015-04-24 22:36:48 +00:00
def _get_authorized_client(self):
base_client = self._get_client()
auth_token = self.auth_token or 'invalid:invalid'
(access_token, access_token_secret) = auth_token.split(':')
return base_client.get_authorized_client(access_token, access_token_secret)
2015-04-24 19:13:08 +00:00
def _get_repository_client(self):
source = self.config['build_source']
(namespace, name) = source.split('/')
bitbucket_client = self._get_authorized_client()
return bitbucket_client.for_namespace(namespace).repositories().get(name)
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def get_oauth_url(self):
bitbucket_client = self._get_client()
(result, data, err_msg) = bitbucket_client.get_authorization_url()
if not result:
raise RepositoryReadException(err_msg)
return data
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def exchange_verifier(self, verifier):
bitbucket_client = self._get_client()
access_token = self.config.get('access_token', '')
access_token_secret = self.auth_token
2015-04-24 19:13:08 +00:00
# Exchange the verifier for a new access token.
(result, data, _) = bitbucket_client.verify_token(access_token, access_token_secret, verifier)
2015-04-24 22:36:48 +00:00
if not result:
return False
# Save the updated access token and secret.
self.set_auth_token(data[0] + ':' + data[1])
# Retrieve the current authorized user's information and store the username in the config.
authorized_client = self._get_authorized_client()
(result, data, _) = authorized_client.get_current_user()
2015-04-24 22:36:48 +00:00
if not result:
return False
username = data['user']['username']
self.put_config_key('username', username)
return True
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def is_active(self):
return 'hook_id' in self.config
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def activate(self, standard_webhook_url):
config = self.config
# Add a deploy key to the repository.
public_key, private_key = generate_ssh_keypair()
config['credentials'] = [
{
'name': 'SSH Public Key',
'value': public_key,
},
]
repository = self._get_repository_client()
(result, data, err_msg) = repository.deploykeys().create(
app.config['REGISTRY_TITLE'] + ' webhook key', public_key)
if not result:
msg = 'Unable to add deploy key to repository: %s' % err_msg
raise TriggerActivationException(msg)
config['deploy_key_id'] = data['pk']
# Add a webhook callback.
(result, data, err_msg) = repository.services().create('POST', URL=standard_webhook_url)
if not result:
msg = 'Unable to add webhook to repository: %s' % err_msg
raise TriggerActivationException(msg)
config['hook_id'] = data['id']
return config, {'private_key': private_key}
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def deactivate(self):
config = self.config
repository = self._get_repository_client()
# Remove the webhook link.
(result, _, err_msg) = repository.services().delete(config['hook_id'])
if not result:
msg = 'Unable to remove webhook from repository: %s' % err_msg
raise TriggerDeactivationException(msg)
# Remove the public key.
(result, _, err_msg) = repository.deploykeys().delete(config['deploy_key_id'])
if not result:
msg = 'Unable to remove deploy key from repository: %s' % err_msg
raise TriggerDeactivationException(msg)
config.pop('hook_id', None)
config.pop('deploy_key_id', None)
return config
2015-04-24 22:36:48 +00:00
def list_build_sources(self):
bitbucket_client = self._get_authorized_client()
(result, data, err_msg) = bitbucket_client.get_visible_repositories()
if not result:
raise RepositoryReadException('Could not read repository list: ' + err_msg)
2015-04-24 22:36:48 +00:00
namespaces = {}
for repo in data:
2015-04-24 22:36:48 +00:00
if not repo['scm'] == 'git':
continue
owner = repo['owner']
if not owner in namespaces:
namespaces[owner] = {
'personal': owner == self.config.get('username'),
'repos': [],
'info': {
'name': owner
}
}
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
namespaces[owner]['repos'].append(owner + '/' + repo['slug'])
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
return namespaces.values()
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def list_build_subdirs(self):
repository = self._get_repository_client()
(result, data, err_msg) = repository.get_path_contents('', revision='master')
if not result:
raise RepositoryReadException(err_msg)
files = set([f['path'] for f in data['files']])
if 'Dockerfile' in files:
return ['/']
2015-04-24 22:36:48 +00:00
return []
def dockerfile_url(self):
repository = self._get_repository_client()
subdirectory = self.config.get('subdir', '')
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
master_branch = 'master'
(result, data, _) = repository.get_main_branch()
if result:
master_branch = data['name']
return 'https://bitbucket.org/%s/%s/src/%s/%s' % (repository.namespace,
repository.repository_name,
master_branch, path)
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def load_dockerfile_contents(self):
repository = self._get_repository_client()
subdirectory = self.config.get('subdir', '/')[1:]
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
2015-04-24 19:13:08 +00:00
(result, data, err_msg) = repository.get_raw_path_contents(path, revision='master')
if not result:
raise RepositoryReadException(err_msg)
2015-04-24 19:13:08 +00:00
return data
2015-04-24 19:13:08 +00:00
2015-04-24 22:36:48 +00:00
def list_field_values(self, field_name):
source = self.config['build_source']
(namespace, name) = source.split('/')
2015-04-24 19:13:08 +00:00
bitbucket_client = self._get_authorized_client()
repository = bitbucket_client.for_namespace(namespace).repositories().get(name)
if field_name == 'refs':
(result, data, _) = repository.get_branches_and_tags()
if not result:
return None
branches = [b['name'] for b in data['branches']]
tags = [t['name'] for t in data['tags']]
return ([{'kind': 'branch', 'name': b} for b in branches] +
[{'kind': 'tag', 'name': tag} for tag in tags])
if field_name == 'tag_name':
(result, data, _) = repository.get_tags()
if not result:
return None
return data.keys()
if field_name == 'branch_name':
(result, data, _) = repository.get_branches()
if not result:
return None
return data.keys()
return None
def handle_trigger_request(self, request):
return
def manual_start(self, run_parameters=None):
config = self.config
repository = self._get_repository_client()
source = config['build_source']
run_parameters = run_parameters or {}
# Lookup the branch to build.
master_branch = 'master'
(result, data, _) = repository.get_main_branch()
if result:
master_branch = data['name']
branch_name = run_parameters.get('branch_name') or master_branch
# Find the SHA for the branch.
# TODO
return None
2015-04-24 22:36:48 +00:00
class GithubBuildTrigger(BuildTriggerHandler):
"""
BuildTrigger for GitHub that uses the archive API and buildpacks.
"""
2015-04-24 22:36:48 +00:00
def _get_client(self):
return Github(self.auth_token,
base_url=github_trigger.api_endpoint(),
client_id=github_trigger.client_id(),
client_secret=github_trigger.client_secret())
@classmethod
def service_name(cls):
return 'github'
2015-04-24 22:36:48 +00:00
def is_active(self):
return 'hook_id' in self.config
2015-04-24 22:36:48 +00:00
def activate(self, standard_webhook_url):
config = self.config
new_build_source = config['build_source']
2015-04-24 22:36:48 +00:00
gh_client = self._get_client()
# Find the GitHub repository.
try:
gh_repo = gh_client.get_repo(new_build_source)
except UnknownObjectException:
msg = 'Unable to find GitHub repository for source: %s' % new_build_source
raise TriggerActivationException(msg)
# Add a deploy key to the GitHub repository.
2015-04-22 21:07:16 +00:00
public_key, private_key = generate_ssh_keypair()
config['credentials'] = [
{
'name': 'SSH Public Key',
'value': public_key,
},
]
try:
2015-04-21 22:04:25 +00:00
deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'],
2015-04-22 21:07:16 +00:00
public_key)
config['deploy_key_id'] = deploy_key.id
except GithubException:
msg = 'Unable to add deploy key to repository: %s' % new_build_source
raise TriggerActivationException(msg)
# Add the webhook to the GitHub repository.
webhook_config = {
'url': standard_webhook_url,
'content_type': 'json',
}
try:
hook = gh_repo.create_hook('web', webhook_config)
config['hook_id'] = hook.id
config['master_branch'] = gh_repo.default_branch
except GithubException:
msg = 'Unable to create webhook on repository: %s' % new_build_source
raise TriggerActivationException(msg)
return config, {'private_key': private_key}
2015-04-24 22:36:48 +00:00
def deactivate(self):
config = self.config
gh_client = self._get_client()
# Find the GitHub repository.
try:
repo = gh_client.get_repo(config['build_source'])
except UnknownObjectException:
msg = 'Unable to find GitHub repository for source: %s' % config['build_source']
raise TriggerDeactivationException(msg)
# If the trigger uses a deploy key, remove it.
2015-04-21 22:04:25 +00:00
try:
if config['deploy_key_id']:
deploy_key = repo.get_key(config['deploy_key_id'])
deploy_key.delete()
2015-04-21 22:04:25 +00:00
except KeyError:
# There was no config['deploy_key_id'], thus this is an old trigger without a deploy key.
pass
except GithubException:
msg = 'Unable to remove deploy key: %s' % config['deploy_key_id']
raise TriggerDeactivationException(msg)
# Remove the webhook.
try:
hook = repo.get_hook(config['hook_id'])
hook.delete()
except GithubException:
msg = 'Unable to remove hook: %s' % config['hook_id']
raise TriggerDeactivationException(msg)
config.pop('hook_id', None)
2015-04-24 22:36:48 +00:00
self.config = config
return config
2015-04-24 22:36:48 +00:00
def list_build_sources(self):
gh_client = self._get_client()
usr = gh_client.get_user()
personal = {
'personal': True,
'repos': [repo.full_name for repo in usr.get_repos()],
'info': {
'name': usr.login,
'avatar_url': usr.avatar_url,
}
}
repos_by_org = [personal]
for org in usr.get_orgs():
repo_list = []
for repo in org.get_repos(type='member'):
repo_list.append(repo.full_name)
repos_by_org.append({
'personal': False,
'repos': repo_list,
'info': {
'name': org.name or org.login,
'avatar_url': org.avatar_url
}
})
return repos_by_org
@staticmethod
def matches_ref(ref, regex):
match_string = ref.split('/', 1)[1]
if not regex:
return False
m = regex.match(match_string)
if not m:
return False
return len(m.group(0)) == len(match_string)
2015-04-24 22:36:48 +00:00
def list_build_subdirs(self):
config = self.config
gh_client = self._get_client()
source = config['build_source']
try:
repo = gh_client.get_repo(source)
# Find the first matching branch.
branches = None
if 'branchtag_regex' in config:
try:
regex = re.compile(config['branchtag_regex'])
branches = [branch.name for branch in repo.get_branches()
if GithubBuildTrigger.matches_ref('refs/heads/' + branch.name, regex)]
except:
pass
branches = branches or [repo.default_branch or 'master']
default_commit = repo.get_branch(branches[0]).commit
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
return [os.path.dirname(elem.path) for elem in commit_tree.tree
if (elem.type == u'blob' and
os.path.basename(elem.path) == u'Dockerfile')]
except GithubException as ge:
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
if message == 'Branch not found':
raise EmptyRepositoryException()
raise RepositoryReadException(message)
2015-04-24 22:36:48 +00:00
def dockerfile_url(self):
config = self.config
source = config['build_source']
subdirectory = config.get('subdir', '')
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
2015-04-24 22:36:48 +00:00
gh_client = self._get_client()
try:
repo = gh_client.get_repo(source)
master_branch = repo.default_branch or 'master'
return 'https://github.com/%s/blob/%s/%s' % (source, master_branch, path)
except GithubException:
2015-04-21 22:04:25 +00:00
logger.exception('Could not load repository for Dockerfile.')
return None
2015-04-24 22:36:48 +00:00
def load_dockerfile_contents(self):
config = self.config
gh_client = self._get_client()
source = config['build_source']
subdirectory = config.get('subdir', '')
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
try:
repo = gh_client.get_repo(source)
file_info = repo.get_file_contents(path)
if file_info is None:
return None
content = file_info.content
if file_info.encoding == 'base64':
content = base64.b64decode(content)
return content
except GithubException as ge:
message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source)
raise RepositoryReadException(message)
@staticmethod
def _build_commit_info(repo, commit_sha):
try:
commit = repo.get_commit(commit_sha)
except GithubException:
logger.exception('Could not load data for commit')
return
commit_info = {
'url': commit.html_url,
'message': commit.commit.message,
'date': commit.last_modified
}
if commit.author:
commit_info['author'] = {
'username': commit.author.login,
'avatar_url': commit.author.avatar_url,
'url': commit.author.html_url
}
if commit.committer:
commit_info['committer'] = {
'username': commit.committer.login,
'avatar_url': commit.committer.avatar_url,
'url': commit.committer.html_url
}
return commit_info
@staticmethod
2015-04-21 22:04:25 +00:00
def _prepare_tarball(repo, commit_sha):
# Prepare the download and upload URLs
archive_link = repo.get_archive_link('tarball', commit_sha)
download_archive = client.get(archive_link, stream=True)
tarball_subdir = ''
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
for chunk in download_archive.iter_content(CHUNK_SIZE):
tarball.write(chunk)
# Seek to position 0 to make tarfile happy
tarball.seek(0)
# Pull out the name of the subdir that GitHub generated
with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make tarfile happy.
tarball.seek(0)
entries = {
tarball_subdir + '/.git/HEAD': commit_sha,
tarball_subdir + '/.git/objects/': None,
tarball_subdir + '/.git/refs/': None
}
2015-04-21 22:04:25 +00:00
appender = TarfileAppender(tarball, entries).get_stream()
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
2015-04-21 22:04:25 +00:00
logger.debug('Successfully prepared job')
2015-04-21 22:04:25 +00:00
return tarball_subdir, dockerfile_id
2015-04-21 22:04:25 +00:00
@staticmethod
def _prepare_build(trigger, config, repo, commit_sha, build_name, ref, git_url):
repo_subdir = config['subdir']
joined_subdir = repo_subdir
dockerfile_id = None
2015-04-21 22:04:25 +00:00
if trigger.private_key is None:
# If the trigger isn't using git, prepare the buildpack.
tarball_subdir, dockerfile_id = GithubBuildTrigger._prepare_tarball(repo, commit_sha)
logger.debug('Successfully prepared job')
2015-04-21 22:04:25 +00:00
# Join provided subdir with the tarball subdir.
joined_subdir = os.path.join(tarball_subdir, repo_subdir)
logger.debug('Final subdir: %s', joined_subdir)
# compute the tag(s)
branch = ref.split('/')[-1]
tags = {branch}
if branch == repo.default_branch:
tags.add('latest')
logger.debug('Pushing to tags: %s', tags)
# compute the metadata
metadata = {
'commit_sha': commit_sha,
'ref': ref,
'default_branch': repo.default_branch,
'git_url': git_url,
}
# add the commit info.
commit_info = GithubBuildTrigger._build_commit_info(repo, commit_sha)
if commit_info is not None:
metadata['commit_info'] = commit_info
return dockerfile_id, list(tags), build_name, joined_subdir, metadata
@staticmethod
def get_display_name(sha):
return sha[0:7]
2015-04-24 22:36:48 +00:00
def handle_trigger_request(self, request):
payload = request.get_json()
if not payload or payload.get('head_commit') is None:
raise SkipRequestException()
if 'zen' in payload:
raise ValidationRequestException()
logger.debug('Payload %s', payload)
ref = payload['ref']
commit_sha = payload['head_commit']['id']
commit_message = payload['head_commit'].get('message', '')
git_url = payload['repository']['git_url']
2015-04-24 22:36:48 +00:00
config = self.config
if 'branchtag_regex' in config:
try:
regex = re.compile(config['branchtag_regex'])
except:
regex = re.compile('.*')
if not GithubBuildTrigger.matches_ref(ref, regex):
raise SkipRequestException()
if should_skip_commit(commit_message):
raise SkipRequestException()
short_sha = GithubBuildTrigger.get_display_name(commit_sha)
2015-04-24 22:36:48 +00:00
gh_client = self._get_client()
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
payload['repository']['name'])
repo = gh_client.get_repo(repo_full_name)
logger.debug('Github repo: %s', repo)
2015-04-24 22:36:48 +00:00
return GithubBuildTrigger._prepare_build(self.trigger, config, repo, commit_sha, short_sha,
ref, git_url)
2015-04-24 22:36:48 +00:00
def manual_start(self, run_parameters=None):
config = self.config
try:
source = config['build_source']
run_parameters = run_parameters or {}
2015-04-24 22:36:48 +00:00
gh_client = self._get_client()
repo = gh_client.get_repo(source)
branch_name = run_parameters.get('branch_name') or repo.default_branch
branch = repo.get_branch(branch_name)
branch_sha = branch.commit.sha
short_sha = GithubBuildTrigger.get_display_name(branch_sha)
ref = 'refs/heads/%s' % (branch_name)
git_url = repo.git_url
2015-04-24 22:36:48 +00:00
return self._prepare_build(self.trigger, config, repo, branch_sha, short_sha, ref, git_url)
except GithubException as ghe:
raise TriggerStartException(ghe.data['message'])
2015-04-24 22:36:48 +00:00
def list_field_values(self, field_name):
if field_name == 'refs':
2015-04-24 22:36:48 +00:00
branches = self.list_field_values('branch_name')
tags = self.list_field_values('tag_name')
return ([{'kind': 'branch', 'name': b} for b in branches] +
[{'kind': 'tag', 'name': tag} for tag in tags])
2015-04-24 22:36:48 +00:00
config = self.config
if field_name == 'tag_name':
2015-04-24 22:36:48 +00:00
gh_client = self._get_client()
source = config['build_source']
repo = gh_client.get_repo(source)
return [tag.name for tag in repo.get_tags()]
if field_name == 'branch_name':
2015-04-24 22:36:48 +00:00
gh_client = self._get_client()
source = config['build_source']
repo = gh_client.get_repo(source)
branches = [branch.name for branch in repo.get_branches()]
if not repo.default_branch in branches:
branches.insert(0, repo.default_branch)
if branches[0] != repo.default_branch:
branches.remove(repo.default_branch)
branches.insert(0, repo.default_branch)
return branches
return None
2015-03-26 20:20:53 +00:00
2015-04-24 22:36:48 +00:00
class CustomBuildTrigger(BuildTriggerHandler):
payload_schema = {
'type': 'object',
'properties': {
2015-04-21 22:04:25 +00:00
'commit': {
'type': 'string',
'description': 'first 7 characters of the SHA-1 identifier for a git commit',
'pattern': '^([A-Fa-f0-9]{7})$',
2015-04-21 22:04:25 +00:00
},
'ref': {
'type': 'string',
'description': 'git reference for a git commit',
'pattern': '^refs\/(heads|tags|remotes)\/(.+)$',
},
'default_branch': {
'type': 'string',
'description': 'default branch of the git repository',
},
'commit_info': {
'type': 'object',
2015-04-21 22:04:25 +00:00
'description': 'metadata about a git commit',
'properties': {
2015-04-21 22:04:25 +00:00
'url': {
'type': 'string',
'description': 'URL to view a git commit',
},
'message': {
'type': 'string',
'description': 'git commit message',
},
'date': {
'type': 'string',
'description': 'timestamp for a git commit'
},
'author': {
'type': 'object',
2015-04-21 22:04:25 +00:00
'description': 'metadata about the author of a git commit',
'properties': {
2015-04-21 22:04:25 +00:00
'username': {
'type': 'string',
'description': 'username of the author',
},
'url': {
'type': 'string',
'description': 'URL to view the profile of the author',
},
'avatar_url': {
'type': 'string',
'description': 'URL to view the avatar of the author',
},
},
'required': ['username', 'url', 'avatar_url'],
},
'committer': {
'type': 'object',
2015-04-21 22:04:25 +00:00
'description': 'metadata about the committer of a git commit',
'properties': {
2015-04-21 22:04:25 +00:00
'username': {
'type': 'string',
'description': 'username of the committer',
},
'url': {
'type': 'string',
'description': 'URL to view the profile of the committer',
},
'avatar_url': {
'type': 'string',
'description': 'URL to view the avatar of the committer',
},
},
'required': ['username', 'url', 'avatar_url'],
},
},
'required': ['url', 'message', 'date'],
},
},
'required': ['commits', 'ref', 'default_branch'],
}
2015-03-26 20:20:53 +00:00
@classmethod
def service_name(cls):
2015-04-21 22:04:25 +00:00
return 'custom-git'
2015-03-26 20:20:53 +00:00
2015-04-24 22:36:48 +00:00
def is_active(self):
return self.config.has_key('credentials')
2015-03-26 20:20:53 +00:00
def _metadata_from_payload(self, payload):
2015-03-26 20:20:53 +00:00
try:
2015-04-15 21:02:53 +00:00
metadata = json.loads(payload)
validate(metadata, self.payload_schema)
except:
2015-03-26 20:20:53 +00:00
raise InvalidPayloadException()
2015-04-15 21:02:53 +00:00
return metadata
2015-03-26 20:20:53 +00:00
2015-04-24 22:36:48 +00:00
def handle_trigger_request(self, request):
2015-03-26 20:20:53 +00:00
payload = request.get_json()
if not payload:
raise SkipRequestException()
logger.debug('Payload %s', payload)
metadata = self._metadata_from_payload(payload)
# The build source is the canonical git URL used to clone.
2015-04-24 22:36:48 +00:00
config = self.config
2015-03-26 20:20:53 +00:00
metadata['git_url'] = config['build_source']
branch = metadata['ref'].split('/')[-1]
tags = {branch}
build_name = metadata['commit_sha'][:6]
dockerfile_id = None
2015-04-24 22:36:48 +00:00
return dockerfile_id, tags, build_name, config['subdir'], metadata
2015-03-26 20:20:53 +00:00
2015-04-24 22:36:48 +00:00
def activate(self, standard_webhook_url):
config = self.config
2015-04-22 21:07:16 +00:00
public_key, private_key = generate_ssh_keypair()
config['credentials'] = [
{
'name': 'SSH Public Key',
'value': public_key,
},
{
'name': 'Webhook Endpoint URL',
'value': standard_webhook_url,
},
]
2015-04-24 22:36:48 +00:00
self.config = config
return config, {'private_key': private_key}
2015-03-26 20:20:53 +00:00
2015-04-24 22:36:48 +00:00
def deactivate(self):
config = self.config
2015-04-22 21:07:16 +00:00
config.pop('credentials', None)
2015-04-24 22:36:48 +00:00
self.config = config
2015-03-26 20:20:53 +00:00
return config
2015-04-24 22:36:48 +00:00
def manual_start(self, run_parameters=None):
2015-04-21 22:04:25 +00:00
# commit_sha is the only required parameter
if 'commit_sha' not in run_parameters:
raise TriggerStartException('missing required parameter')
2015-03-26 20:20:53 +00:00
2015-04-24 22:36:48 +00:00
config = self.config
2015-03-26 20:20:53 +00:00
dockerfile_id = None
2015-04-08 20:56:47 +00:00
tags = {run_parameters['commit_sha']}
build_name = run_parameters['commit_sha']
2015-03-26 20:20:53 +00:00
metadata = {
'commit_sha': run_parameters['commit_sha'],
2015-04-03 21:14:03 +00:00
'git_url': config['build_source'],
2015-03-26 20:20:53 +00:00
}
2015-04-03 21:14:03 +00:00
return dockerfile_id, list(tags), build_name, config['subdir'], metadata