2014-02-18 20:50:15 +00:00
|
|
|
import logging
|
2014-02-18 23:09:14 +00:00
|
|
|
import io
|
2014-02-24 18:56:21 +00:00
|
|
|
import os.path
|
2014-03-31 19:40:24 +00:00
|
|
|
import tarfile
|
2014-04-03 03:33:58 +00:00
|
|
|
import base64
|
2014-10-14 19:46:35 +00:00
|
|
|
import re
|
2015-03-23 19:37:30 +00:00
|
|
|
import json
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
from github import Github, UnknownObjectException, GithubException
|
2014-02-18 23:09:14 +00:00
|
|
|
from tempfile import SpooledTemporaryFile
|
2015-04-15 20:52:46 +00:00
|
|
|
from jsonschema import validate
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-11-05 21:43:37 +00:00
|
|
|
from app import app, userfiles as user_files, github_trigger
|
2014-10-15 19:51:34 +00:00
|
|
|
from util.tarfileappender import TarfileAppender
|
2015-03-26 20:20:53 +00:00
|
|
|
from util.ssh import generate_ssh_keypair
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
client = app.config['HTTPCLIENT']
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2014-03-31 19:40:24 +00:00
|
|
|
TARBALL_MIME = 'application/gzip'
|
2014-02-18 23:09:14 +00:00
|
|
|
CHUNK_SIZE = 512 * 1024
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2014-05-01 19:25:46 +00:00
|
|
|
def should_skip_commit(message):
|
|
|
|
return '[skip build]' in message or '[build skip]' in message
|
|
|
|
|
2015-03-26 20:20:53 +00:00
|
|
|
class InvalidPayloadException(Exception):
|
|
|
|
pass
|
2014-05-01 19:25:46 +00:00
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
class BuildArchiveException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class InvalidServiceException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
class TriggerActivationException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-24 21:36:49 +00:00
|
|
|
class TriggerDeactivationException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
class TriggerStartException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
class ValidationRequestException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-05-01 19:25:46 +00:00
|
|
|
class SkipRequestException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-24 19:12:54 +00:00
|
|
|
class EmptyRepositoryException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-03-28 19:32:56 +00:00
|
|
|
class RepositoryReadException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
class BuildTrigger(object):
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
2014-04-03 03:33:58 +00:00
|
|
|
def dockerfile_url(self, auth_token, config):
|
|
|
|
"""
|
2015-03-18 21:33:43 +00:00
|
|
|
Returns the URL at which the Dockerfile for the trigger is found or None if none/not applicable.
|
2014-04-03 03:33:58 +00:00
|
|
|
"""
|
2015-04-06 18:53:54 +00:00
|
|
|
raise NotImplementedError
|
2014-04-03 03:33:58 +00:00
|
|
|
|
|
|
|
def load_dockerfile_contents(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
|
|
|
be found/loaded.
|
|
|
|
"""
|
2015-04-06 18:53:54 +00:00
|
|
|
raise NotImplementedError
|
2014-04-03 03:33:58 +00:00
|
|
|
|
2014-02-19 21:08:33 +00:00
|
|
|
def list_build_sources(self, auth_token):
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
Take the auth information for the specific trigger type and load the
|
2014-02-19 21:08:33 +00:00
|
|
|
list of build sources(repositories).
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-24 18:56:21 +00:00
|
|
|
def list_build_subdirs(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Take the auth information and the specified config so far and list all of
|
|
|
|
the possible subdirs containing dockerfiles.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
def handle_trigger_request(self, request, trigger):
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
2014-02-24 21:11:23 +00:00
|
|
|
Transform the incoming request data into a set of actions. Returns a tuple
|
|
|
|
of usefiles resource id, docker tags, build name, and resource subdir.
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
def is_active(self, config):
|
|
|
|
"""
|
2014-02-21 21:02:31 +00:00
|
|
|
Returns True if the current build trigger is active. Inactive means further
|
|
|
|
setup is needed.
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-21 21:02:31 +00:00
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
2014-11-26 17:37:20 +00:00
|
|
|
"""
|
2014-02-20 23:57:49 +00:00
|
|
|
Activates the trigger for the service, with the given new configuration.
|
2014-02-24 21:36:49 +00:00
|
|
|
Returns new configuration that should be stored if successful.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def deactivate(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Deactivates the trigger for the service, removing any hooks installed in
|
|
|
|
the remote service. Returns the new config that should be stored if this
|
|
|
|
trigger is going to be re-activated.
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
def manual_start(self, trigger, run_parameters=None):
|
2014-02-26 00:39:43 +00:00
|
|
|
"""
|
|
|
|
Manually creates a repository build for this trigger.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
def list_field_values(self, auth_token, config, field_name):
|
|
|
|
"""
|
|
|
|
Lists all values for the given custom trigger field. For example, a trigger might have a
|
|
|
|
field named "branches", and this method would return all branches.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
"""
|
|
|
|
Particular service implemented by subclasses.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_trigger_for_service(cls, service):
|
|
|
|
for subc in cls.__subclasses__():
|
|
|
|
if subc.service_name() == service:
|
|
|
|
return subc()
|
|
|
|
|
|
|
|
raise InvalidServiceException('Unable to find service: %s' % service)
|
|
|
|
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
def raise_unsupported():
|
|
|
|
raise io.UnsupportedOperation
|
|
|
|
|
2015-03-23 19:37:30 +00:00
|
|
|
def get_trigger_config(trigger):
|
|
|
|
try:
|
|
|
|
return json.loads(trigger.config)
|
|
|
|
except:
|
|
|
|
return {}
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
class GithubBuildTrigger(BuildTrigger):
|
2015-03-18 21:33:43 +00:00
|
|
|
"""
|
|
|
|
BuildTrigger for GitHub that uses the archive API and buildpacks.
|
|
|
|
"""
|
2014-02-18 20:50:15 +00:00
|
|
|
@staticmethod
|
|
|
|
def _get_client(auth_token):
|
2014-11-26 17:37:20 +00:00
|
|
|
return Github(auth_token,
|
|
|
|
base_url=github_trigger.api_endpoint(),
|
|
|
|
client_id=github_trigger.client_id(),
|
|
|
|
client_secret=github_trigger.client_secret())
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
return 'github'
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
def is_active(self, config):
|
2014-02-24 21:36:49 +00:00
|
|
|
return 'hook_id' in config
|
2014-02-20 23:57:49 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
|
|
|
new_build_source = config['build_source']
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
# Find the GitHub repository.
|
|
|
|
try:
|
|
|
|
gh_repo = gh_client.get_repo(new_build_source)
|
|
|
|
except UnknownObjectException:
|
|
|
|
msg = 'Unable to find GitHub repository for source: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
2015-03-19 18:31:01 +00:00
|
|
|
# Add a deploy key to the GitHub repository.
|
2015-04-22 21:07:16 +00:00
|
|
|
public_key, private_key = generate_ssh_keypair()
|
|
|
|
config['credentials'] = [
|
|
|
|
{
|
|
|
|
'name': 'SSH Public Key',
|
|
|
|
'value': public_key,
|
|
|
|
},
|
|
|
|
]
|
2015-03-18 21:33:43 +00:00
|
|
|
try:
|
2015-04-21 22:04:25 +00:00
|
|
|
deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'],
|
2015-04-22 21:07:16 +00:00
|
|
|
public_key)
|
2015-03-18 21:33:43 +00:00
|
|
|
config['deploy_key_id'] = deploy_key.id
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to add deploy key to repository: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
|
|
|
# Create a webhook config.
|
|
|
|
webhook_config = {
|
|
|
|
'url': standard_webhook_url,
|
|
|
|
'content_type': 'json',
|
|
|
|
}
|
|
|
|
|
|
|
|
# Add the webhook to the GitHub repository.
|
|
|
|
try:
|
|
|
|
hook = gh_repo.create_hook('web', webhook_config)
|
|
|
|
config['hook_id'] = hook.id
|
|
|
|
config['master_branch'] = gh_repo.default_branch
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to create webhook on repository: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
2015-03-26 20:20:53 +00:00
|
|
|
return config, private_key
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
def deactivate(self, auth_token, config):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
# Find the GitHub repository.
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(config['build_source'])
|
|
|
|
except UnknownObjectException:
|
|
|
|
msg = 'Unable to find GitHub repository for source: %s' % config['build_source']
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
|
2015-03-19 21:12:27 +00:00
|
|
|
# If the trigger uses a deploy key, remove it.
|
2015-04-21 22:04:25 +00:00
|
|
|
try:
|
|
|
|
if config['deploy_key_id']:
|
2015-03-19 21:12:27 +00:00
|
|
|
deploy_key = repo.get_key(config['deploy_key_id'])
|
|
|
|
deploy_key.delete()
|
2015-04-21 22:04:25 +00:00
|
|
|
except KeyError:
|
|
|
|
# There was no config['deploy_key_id'], thus this is an old trigger without a deploy key.
|
|
|
|
pass
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to remove deploy key: %s' % config['deploy_key_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
# Remove the webhook.
|
|
|
|
try:
|
|
|
|
hook = repo.get_hook(config['hook_id'])
|
|
|
|
hook.delete()
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to remove hook: %s' % config['hook_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
2015-03-19 21:12:27 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
config.pop('hook_id', None)
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
|
|
|
def list_build_sources(self, auth_token):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
usr = gh_client.get_user()
|
|
|
|
|
|
|
|
personal = {
|
|
|
|
'personal': True,
|
|
|
|
'repos': [repo.full_name for repo in usr.get_repos()],
|
|
|
|
'info': {
|
|
|
|
'name': usr.login,
|
|
|
|
'avatar_url': usr.avatar_url,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
repos_by_org = [personal]
|
|
|
|
|
|
|
|
for org in usr.get_orgs():
|
|
|
|
repo_list = []
|
|
|
|
for repo in org.get_repos(type='member'):
|
|
|
|
repo_list.append(repo.full_name)
|
|
|
|
|
|
|
|
repos_by_org.append({
|
|
|
|
'personal': False,
|
|
|
|
'repos': repo_list,
|
|
|
|
'info': {
|
|
|
|
'name': org.name or org.login,
|
|
|
|
'avatar_url': org.avatar_url
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
return repos_by_org
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def matches_ref(ref, regex):
|
|
|
|
match_string = ref.split('/', 1)[1]
|
|
|
|
if not regex:
|
|
|
|
return False
|
|
|
|
|
|
|
|
m = regex.match(match_string)
|
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return len(m.group(0)) == len(match_string)
|
|
|
|
|
|
|
|
def list_build_subdirs(self, auth_token, config):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
source = config['build_source']
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
|
|
|
|
# Find the first matching branch.
|
|
|
|
branches = None
|
|
|
|
if 'branchtag_regex' in config:
|
|
|
|
try:
|
|
|
|
regex = re.compile(config['branchtag_regex'])
|
|
|
|
branches = [branch.name for branch in repo.get_branches()
|
2015-03-19 21:12:27 +00:00
|
|
|
if GithubBuildTrigger.matches_ref('refs/heads/' + branch.name, regex)]
|
2015-03-18 21:33:43 +00:00
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
branches = branches or [repo.default_branch or 'master']
|
|
|
|
default_commit = repo.get_branch(branches[0]).commit
|
|
|
|
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
|
|
|
|
|
|
|
|
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
|
|
|
if (elem.type == u'blob' and
|
|
|
|
os.path.basename(elem.path) == u'Dockerfile')]
|
|
|
|
except GithubException as ge:
|
|
|
|
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
|
|
|
|
if message == 'Branch not found':
|
|
|
|
raise EmptyRepositoryException()
|
|
|
|
|
|
|
|
raise RepositoryReadException(message)
|
|
|
|
|
|
|
|
def dockerfile_url(self, auth_token, config):
|
|
|
|
source = config['build_source']
|
|
|
|
subdirectory = config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
gh_client = self._get_client(auth_token)
|
2015-03-19 21:12:27 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
master_branch = repo.default_branch or 'master'
|
|
|
|
return 'https://github.com/%s/blob/%s/%s' % (source, master_branch, path)
|
|
|
|
except GithubException:
|
2015-04-21 22:04:25 +00:00
|
|
|
logger.exception('Could not load repository for Dockerfile.')
|
2015-03-18 21:33:43 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
def load_dockerfile_contents(self, auth_token, config):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
source = config['build_source']
|
|
|
|
subdirectory = config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
file_info = repo.get_file_contents(path)
|
|
|
|
if file_info is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
content = file_info.content
|
|
|
|
if file_info.encoding == 'base64':
|
|
|
|
content = base64.b64decode(content)
|
|
|
|
return content
|
|
|
|
|
|
|
|
except GithubException as ge:
|
|
|
|
message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source)
|
|
|
|
raise RepositoryReadException(message)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _build_commit_info(repo, commit_sha):
|
|
|
|
try:
|
|
|
|
commit = repo.get_commit(commit_sha)
|
|
|
|
except GithubException:
|
|
|
|
logger.exception('Could not load data for commit')
|
|
|
|
return
|
|
|
|
|
2015-03-19 21:12:27 +00:00
|
|
|
commit_info = {
|
2015-03-18 21:33:43 +00:00
|
|
|
'url': commit.html_url,
|
|
|
|
'message': commit.commit.message,
|
2015-03-19 21:12:27 +00:00
|
|
|
'date': commit.last_modified
|
|
|
|
}
|
|
|
|
|
|
|
|
if commit.author:
|
|
|
|
commit_info['author'] = {
|
2015-03-18 21:33:43 +00:00
|
|
|
'username': commit.author.login,
|
|
|
|
'avatar_url': commit.author.avatar_url,
|
|
|
|
'url': commit.author.html_url
|
2015-03-19 21:12:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if commit.committer:
|
|
|
|
commit_info['committer'] = {
|
2015-03-18 21:33:43 +00:00
|
|
|
'username': commit.committer.login,
|
|
|
|
'avatar_url': commit.committer.avatar_url,
|
|
|
|
'url': commit.committer.html_url
|
2015-03-19 21:12:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return commit_info
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2015-04-21 22:04:25 +00:00
|
|
|
def _prepare_tarball(repo, commit_sha):
|
|
|
|
# Prepare the download and upload URLs
|
|
|
|
archive_link = repo.get_archive_link('tarball', commit_sha)
|
|
|
|
download_archive = client.get(archive_link, stream=True)
|
|
|
|
tarball_subdir = ''
|
|
|
|
|
|
|
|
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
|
|
|
|
for chunk in download_archive.iter_content(CHUNK_SIZE):
|
|
|
|
tarball.write(chunk)
|
|
|
|
|
|
|
|
# Seek to position 0 to make tarfile happy
|
|
|
|
tarball.seek(0)
|
|
|
|
|
|
|
|
# Pull out the name of the subdir that GitHub generated
|
|
|
|
with tarfile.open(fileobj=tarball) as archive:
|
|
|
|
tarball_subdir = archive.getnames()[0]
|
|
|
|
|
|
|
|
# Seek to position 0 to make tarfile happy.
|
|
|
|
tarball.seek(0)
|
|
|
|
|
|
|
|
entries = {
|
|
|
|
tarball_subdir + '/.git/HEAD': commit_sha,
|
|
|
|
tarball_subdir + '/.git/objects/': None,
|
|
|
|
tarball_subdir + '/.git/refs/': None
|
|
|
|
}
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
appender = TarfileAppender(tarball, entries).get_stream()
|
|
|
|
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
logger.debug('Successfully prepared job')
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
return tarball_subdir, dockerfile_id
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
@staticmethod
|
|
|
|
def _prepare_build(trigger, config, repo, commit_sha, build_name, ref, git_url):
|
|
|
|
repo_subdir = config['subdir']
|
|
|
|
joined_subdir = repo_subdir
|
|
|
|
dockerfile_id = None
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
if trigger.private_key is None:
|
|
|
|
# If the trigger isn't using git, prepare the buildpack.
|
|
|
|
tarball_subdir, dockerfile_id = GithubBuildTrigger._prepare_tarball(repo, commit_sha)
|
2015-03-23 16:14:47 +00:00
|
|
|
logger.debug('Successfully prepared job')
|
2015-03-23 19:37:30 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
# Join provided subdir with the tarball subdir.
|
|
|
|
joined_subdir = os.path.join(tarball_subdir, repo_subdir)
|
|
|
|
|
|
|
|
logger.debug('Final subdir: %s', joined_subdir)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
# compute the tag(s)
|
|
|
|
branch = ref.split('/')[-1]
|
|
|
|
tags = {branch}
|
|
|
|
|
|
|
|
if branch == repo.default_branch:
|
|
|
|
tags.add('latest')
|
|
|
|
|
|
|
|
logger.debug('Pushing to tags: %s', tags)
|
|
|
|
|
|
|
|
# compute the metadata
|
|
|
|
metadata = {
|
|
|
|
'commit_sha': commit_sha,
|
|
|
|
'ref': ref,
|
|
|
|
'default_branch': repo.default_branch,
|
2015-03-19 22:09:27 +00:00
|
|
|
'git_url': git_url,
|
2015-03-18 21:33:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# add the commit info.
|
2015-03-19 21:12:27 +00:00
|
|
|
commit_info = GithubBuildTrigger._build_commit_info(repo, commit_sha)
|
2015-03-18 21:33:43 +00:00
|
|
|
if commit_info is not None:
|
|
|
|
metadata['commit_info'] = commit_info
|
|
|
|
|
|
|
|
return dockerfile_id, list(tags), build_name, joined_subdir, metadata
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_display_name(sha):
|
|
|
|
return sha[0:7]
|
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
def handle_trigger_request(self, request, trigger):
|
2015-03-18 21:33:43 +00:00
|
|
|
payload = request.get_json()
|
|
|
|
if not payload or payload.get('head_commit') is None:
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
if 'zen' in payload:
|
|
|
|
raise ValidationRequestException()
|
|
|
|
|
|
|
|
logger.debug('Payload %s', payload)
|
|
|
|
ref = payload['ref']
|
|
|
|
commit_sha = payload['head_commit']['id']
|
|
|
|
commit_message = payload['head_commit'].get('message', '')
|
2015-03-19 22:09:27 +00:00
|
|
|
git_url = payload['repository']['git_url']
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
config = get_trigger_config(trigger)
|
2015-03-18 21:33:43 +00:00
|
|
|
if 'branchtag_regex' in config:
|
|
|
|
try:
|
|
|
|
regex = re.compile(config['branchtag_regex'])
|
|
|
|
except:
|
|
|
|
regex = re.compile('.*')
|
|
|
|
|
2015-03-19 21:12:27 +00:00
|
|
|
if not GithubBuildTrigger.matches_ref(ref, regex):
|
2015-03-18 21:33:43 +00:00
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
if should_skip_commit(commit_message):
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
2015-03-19 21:12:27 +00:00
|
|
|
short_sha = GithubBuildTrigger.get_display_name(commit_sha)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
gh_client = self._get_client(trigger.auth_token)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
|
|
|
|
payload['repository']['name'])
|
|
|
|
repo = gh_client.get_repo(repo_full_name)
|
|
|
|
|
|
|
|
logger.debug('Github repo: %s', repo)
|
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
return GithubBuildTrigger._prepare_build(trigger, config, repo, commit_sha,
|
2015-03-19 22:09:27 +00:00
|
|
|
short_sha, ref, git_url)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
def manual_start(self, trigger, run_parameters=None):
|
|
|
|
config = get_trigger_config(trigger)
|
2015-03-18 21:33:43 +00:00
|
|
|
try:
|
|
|
|
source = config['build_source']
|
|
|
|
run_parameters = run_parameters or {}
|
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
gh_client = self._get_client(trigger.auth_token)
|
2015-03-18 21:33:43 +00:00
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
branch_name = run_parameters.get('branch_name') or repo.default_branch
|
|
|
|
branch = repo.get_branch(branch_name)
|
|
|
|
branch_sha = branch.commit.sha
|
2015-03-19 21:12:27 +00:00
|
|
|
short_sha = GithubBuildTrigger.get_display_name(branch_sha)
|
2015-03-18 21:33:43 +00:00
|
|
|
ref = 'refs/heads/%s' % (branch_name)
|
2015-03-19 22:09:27 +00:00
|
|
|
git_url = repo.git_url
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-03-23 16:14:47 +00:00
|
|
|
return self._prepare_build(trigger, config, repo, branch_sha, short_sha, ref, git_url)
|
2015-03-18 21:33:43 +00:00
|
|
|
except GithubException as ghe:
|
|
|
|
raise TriggerStartException(ghe.data['message'])
|
|
|
|
|
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
def list_field_values(self, auth_token, config, field_name):
|
2014-10-23 20:39:10 +00:00
|
|
|
if field_name == 'refs':
|
|
|
|
branches = self.list_field_values(auth_token, config, 'branch_name')
|
|
|
|
tags = self.list_field_values(auth_token, config, 'tag_name')
|
|
|
|
|
2014-11-26 17:37:20 +00:00
|
|
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
2014-10-23 20:39:10 +00:00
|
|
|
[{'kind': 'tag', 'name': tag} for tag in tags])
|
|
|
|
|
|
|
|
if field_name == 'tag_name':
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
source = config['build_source']
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
return [tag.name for tag in repo.get_tags()]
|
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
if field_name == 'branch_name':
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
source = config['build_source']
|
|
|
|
repo = gh_client.get_repo(source)
|
2014-10-03 16:35:58 +00:00
|
|
|
branches = [branch.name for branch in repo.get_branches()]
|
2014-02-26 00:39:43 +00:00
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
if not repo.default_branch in branches:
|
|
|
|
branches.insert(0, repo.default_branch)
|
|
|
|
|
|
|
|
if branches[0] != repo.default_branch:
|
|
|
|
branches.remove(repo.default_branch)
|
|
|
|
branches.insert(0, repo.default_branch)
|
|
|
|
|
|
|
|
return branches
|
|
|
|
|
|
|
|
return None
|
2015-03-26 20:20:53 +00:00
|
|
|
|
|
|
|
class CustomBuildTrigger(BuildTrigger):
|
2015-04-15 20:52:46 +00:00
|
|
|
payload_schema = {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'commit': {
|
|
|
|
'type': 'string',
|
2015-04-22 15:24:04 +00:00
|
|
|
'description': 'first 7 characters of the SHA-1 identifier for a git commit',
|
2015-04-22 17:22:04 +00:00
|
|
|
'pattern': '^([A-Fa-f0-9]{7})$',
|
2015-04-21 22:04:25 +00:00
|
|
|
},
|
|
|
|
'ref': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'git reference for a git commit',
|
|
|
|
'pattern': '^refs\/(heads|tags|remotes)\/(.+)$',
|
|
|
|
},
|
|
|
|
'default_branch': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'default branch of the git repository',
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
'commit_info': {
|
|
|
|
'type': 'object',
|
2015-04-21 22:04:25 +00:00
|
|
|
'description': 'metadata about a git commit',
|
2015-04-15 20:52:46 +00:00
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view a git commit',
|
|
|
|
},
|
|
|
|
'message': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'git commit message',
|
|
|
|
},
|
|
|
|
'date': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'timestamp for a git commit'
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
'author': {
|
|
|
|
'type': 'object',
|
2015-04-21 22:04:25 +00:00
|
|
|
'description': 'metadata about the author of a git commit',
|
2015-04-15 20:52:46 +00:00
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'username': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'username of the author',
|
|
|
|
},
|
|
|
|
'url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the profile of the author',
|
|
|
|
},
|
|
|
|
'avatar_url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the avatar of the author',
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
},
|
|
|
|
'required': ['username', 'url', 'avatar_url'],
|
|
|
|
},
|
|
|
|
'committer': {
|
|
|
|
'type': 'object',
|
2015-04-21 22:04:25 +00:00
|
|
|
'description': 'metadata about the committer of a git commit',
|
2015-04-15 20:52:46 +00:00
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'username': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'username of the committer',
|
|
|
|
},
|
|
|
|
'url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the profile of the committer',
|
|
|
|
},
|
|
|
|
'avatar_url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the avatar of the committer',
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
},
|
|
|
|
'required': ['username', 'url', 'avatar_url'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['url', 'message', 'date'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['commits', 'ref', 'default_branch'],
|
|
|
|
}
|
|
|
|
|
2015-03-26 20:20:53 +00:00
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
2015-04-21 22:04:25 +00:00
|
|
|
return 'custom-git'
|
2015-03-26 20:20:53 +00:00
|
|
|
|
|
|
|
def is_active(self, config):
|
2015-04-22 21:07:16 +00:00
|
|
|
return config.has_key('credentials')
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-15 20:52:46 +00:00
|
|
|
def _metadata_from_payload(self, payload):
|
2015-03-26 20:20:53 +00:00
|
|
|
try:
|
2015-04-15 21:02:53 +00:00
|
|
|
metadata = json.loads(payload)
|
|
|
|
validate(metadata, self.payload_schema)
|
2015-04-15 20:52:46 +00:00
|
|
|
except:
|
2015-03-26 20:20:53 +00:00
|
|
|
raise InvalidPayloadException()
|
2015-04-15 21:02:53 +00:00
|
|
|
return metadata
|
2015-03-26 20:20:53 +00:00
|
|
|
|
|
|
|
def handle_trigger_request(self, request, trigger):
|
|
|
|
payload = request.get_json()
|
|
|
|
if not payload:
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
logger.debug('Payload %s', payload)
|
|
|
|
metadata = self._metadata_from_payload(payload)
|
|
|
|
|
|
|
|
# The build source is the canonical git URL used to clone.
|
|
|
|
config = get_trigger_config(trigger)
|
|
|
|
metadata['git_url'] = config['build_source']
|
|
|
|
|
|
|
|
branch = metadata['ref'].split('/')[-1]
|
|
|
|
tags = {branch}
|
|
|
|
|
|
|
|
build_name = metadata['commit_sha'][:6]
|
|
|
|
dockerfile_id = None
|
|
|
|
|
|
|
|
return dockerfile_id, tags, build_name, trigger.config['subdir'], metadata
|
|
|
|
|
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
2015-04-22 21:07:16 +00:00
|
|
|
public_key, private_key = generate_ssh_keypair()
|
|
|
|
config['credentials'] = [
|
|
|
|
{
|
|
|
|
'name': 'SSH Public Key',
|
|
|
|
'value': public_key,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name': 'Webhook Endpoint URL',
|
|
|
|
'value': standard_webhook_url,
|
|
|
|
},
|
|
|
|
]
|
2015-03-26 20:20:53 +00:00
|
|
|
return config, private_key
|
|
|
|
|
|
|
|
def deactivate(self, auth_token, config):
|
2015-04-22 21:07:16 +00:00
|
|
|
config.pop('credentials', None)
|
2015-03-26 20:20:53 +00:00
|
|
|
return config
|
|
|
|
|
|
|
|
def manual_start(self, trigger, run_parameters=None):
|
2015-04-21 22:04:25 +00:00
|
|
|
# commit_sha is the only required parameter
|
|
|
|
if 'commit_sha' not in run_parameters:
|
|
|
|
raise TriggerStartException('missing required parameter')
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-03 21:14:03 +00:00
|
|
|
config = get_trigger_config(trigger)
|
2015-03-26 20:20:53 +00:00
|
|
|
dockerfile_id = None
|
2015-04-08 20:56:47 +00:00
|
|
|
tags = {run_parameters['commit_sha']}
|
|
|
|
build_name = run_parameters['commit_sha']
|
2015-03-26 20:20:53 +00:00
|
|
|
metadata = {
|
|
|
|
'commit_sha': run_parameters['commit_sha'],
|
2015-04-03 21:14:03 +00:00
|
|
|
'git_url': config['build_source'],
|
2015-03-26 20:20:53 +00:00
|
|
|
}
|
|
|
|
|
2015-04-03 21:14:03 +00:00
|
|
|
return dockerfile_id, list(tags), build_name, config['subdir'], metadata
|