646 lines
19 KiB
Python
646 lines
19 KiB
Python
import logging
|
|
import io
|
|
import os.path
|
|
import tarfile
|
|
import base64
|
|
import re
|
|
import json
|
|
|
|
from github import Github, UnknownObjectException, GithubException
|
|
from tempfile import SpooledTemporaryFile
|
|
|
|
from app import app, userfiles as user_files, github_trigger
|
|
from util.tarfileappender import TarfileAppender
|
|
from util.ssh import generate_ssh_keypair
|
|
|
|
|
|
client = app.config['HTTPCLIENT']
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
TARBALL_MIME = 'application/gzip'
|
|
CHUNK_SIZE = 512 * 1024
|
|
|
|
|
|
def should_skip_commit(message):
|
|
return '[skip build]' in message or '[build skip]' in message
|
|
|
|
class InvalidPayloadException(Exception):
|
|
pass
|
|
|
|
class BuildArchiveException(Exception):
|
|
pass
|
|
|
|
class InvalidServiceException(Exception):
|
|
pass
|
|
|
|
class TriggerActivationException(Exception):
|
|
pass
|
|
|
|
class TriggerDeactivationException(Exception):
|
|
pass
|
|
|
|
class TriggerStartException(Exception):
|
|
pass
|
|
|
|
class ValidationRequestException(Exception):
|
|
pass
|
|
|
|
class SkipRequestException(Exception):
|
|
pass
|
|
|
|
class EmptyRepositoryException(Exception):
|
|
pass
|
|
|
|
class RepositoryReadException(Exception):
|
|
pass
|
|
|
|
|
|
class BuildTrigger(object):
|
|
def __init__(self):
|
|
pass
|
|
|
|
def dockerfile_url(self, auth_token, config):
|
|
"""
|
|
Returns the URL at which the Dockerfile for the trigger is found or None if none/not applicable.
|
|
"""
|
|
return None
|
|
|
|
def load_dockerfile_contents(self, auth_token, config):
|
|
"""
|
|
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
|
be found/loaded.
|
|
"""
|
|
return None
|
|
|
|
def list_build_sources(self, auth_token):
|
|
"""
|
|
Take the auth information for the specific trigger type and load the
|
|
list of build sources(repositories).
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def list_build_subdirs(self, auth_token, config):
|
|
"""
|
|
Take the auth information and the specified config so far and list all of
|
|
the possible subdirs containing dockerfiles.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def handle_trigger_request(self, request, trigger):
|
|
"""
|
|
Transform the incoming request data into a set of actions. Returns a tuple
|
|
of usefiles resource id, docker tags, build name, and resource subdir.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def is_active(self, config):
|
|
"""
|
|
Returns True if the current build trigger is active. Inactive means further
|
|
setup is needed.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
|
"""
|
|
Activates the trigger for the service, with the given new configuration.
|
|
Returns new configuration that should be stored if successful.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def deactivate(self, auth_token, config):
|
|
"""
|
|
Deactivates the trigger for the service, removing any hooks installed in
|
|
the remote service. Returns the new config that should be stored if this
|
|
trigger is going to be re-activated.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def manual_start(self, trigger, run_parameters=None):
|
|
"""
|
|
Manually creates a repository build for this trigger.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
def list_field_values(self, auth_token, config, field_name):
|
|
"""
|
|
Lists all values for the given custom trigger field. For example, a trigger might have a
|
|
field named "branches", and this method would return all branches.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
@classmethod
|
|
def service_name(cls):
|
|
"""
|
|
Particular service implemented by subclasses.
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
@classmethod
|
|
def get_trigger_for_service(cls, service):
|
|
for subc in cls.__subclasses__():
|
|
if subc.service_name() == service:
|
|
return subc()
|
|
|
|
raise InvalidServiceException('Unable to find service: %s' % service)
|
|
|
|
|
|
def raise_unsupported():
|
|
raise io.UnsupportedOperation
|
|
|
|
def get_trigger_config(trigger):
|
|
try:
|
|
return json.loads(trigger.config)
|
|
except:
|
|
return {}
|
|
|
|
|
|
class GithubBuildTrigger(BuildTrigger):
|
|
"""
|
|
BuildTrigger for GitHub that uses the archive API and buildpacks.
|
|
"""
|
|
@staticmethod
|
|
def _get_client(auth_token):
|
|
return Github(auth_token,
|
|
base_url=github_trigger.api_endpoint(),
|
|
client_id=github_trigger.client_id(),
|
|
client_secret=github_trigger.client_secret())
|
|
|
|
@classmethod
|
|
def service_name(cls):
|
|
return 'github'
|
|
|
|
def is_active(self, config):
|
|
return 'hook_id' in config
|
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
|
new_build_source = config['build_source']
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
# Find the GitHub repository.
|
|
try:
|
|
gh_repo = gh_client.get_repo(new_build_source)
|
|
except UnknownObjectException:
|
|
msg = 'Unable to find GitHub repository for source: %s' % new_build_source
|
|
raise TriggerActivationException(msg)
|
|
|
|
# Add a deploy key to the GitHub repository.
|
|
try:
|
|
config['public_key'], private_key = generate_ssh_keypair()
|
|
deploy_key = gh_repo.create_key('Quay.io Builder', config['public_key'])
|
|
config['deploy_key_id'] = deploy_key.id
|
|
except GithubException:
|
|
msg = 'Unable to add deploy key to repository: %s' % new_build_source
|
|
raise TriggerActivationException(msg)
|
|
|
|
# Create a webhook config.
|
|
webhook_config = {
|
|
'url': standard_webhook_url,
|
|
'content_type': 'json',
|
|
}
|
|
|
|
# Add the webhook to the GitHub repository.
|
|
try:
|
|
hook = gh_repo.create_hook('web', webhook_config)
|
|
config['hook_id'] = hook.id
|
|
config['master_branch'] = gh_repo.default_branch
|
|
except GithubException:
|
|
msg = 'Unable to create webhook on repository: %s' % new_build_source
|
|
raise TriggerActivationException(msg)
|
|
|
|
return config, private_key
|
|
|
|
def deactivate(self, auth_token, config):
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
# Find the GitHub repository.
|
|
try:
|
|
repo = gh_client.get_repo(config['build_source'])
|
|
except UnknownObjectException:
|
|
msg = 'Unable to find GitHub repository for source: %s' % config['build_source']
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
# If the trigger uses a deploy key, remove it.
|
|
if config['deploy_key_id']:
|
|
try:
|
|
deploy_key = repo.get_key(config['deploy_key_id'])
|
|
deploy_key.delete()
|
|
except GithubException:
|
|
msg = 'Unable to remove deploy key: %s' % config['deploy_key_id']
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
# Remove the webhook.
|
|
try:
|
|
hook = repo.get_hook(config['hook_id'])
|
|
hook.delete()
|
|
except GithubException:
|
|
msg = 'Unable to remove hook: %s' % config['hook_id']
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
config.pop('hook_id', None)
|
|
|
|
return config
|
|
|
|
def list_build_sources(self, auth_token):
|
|
gh_client = self._get_client(auth_token)
|
|
usr = gh_client.get_user()
|
|
|
|
personal = {
|
|
'personal': True,
|
|
'repos': [repo.full_name for repo in usr.get_repos()],
|
|
'info': {
|
|
'name': usr.login,
|
|
'avatar_url': usr.avatar_url,
|
|
}
|
|
}
|
|
|
|
repos_by_org = [personal]
|
|
|
|
for org in usr.get_orgs():
|
|
repo_list = []
|
|
for repo in org.get_repos(type='member'):
|
|
repo_list.append(repo.full_name)
|
|
|
|
repos_by_org.append({
|
|
'personal': False,
|
|
'repos': repo_list,
|
|
'info': {
|
|
'name': org.name or org.login,
|
|
'avatar_url': org.avatar_url
|
|
}
|
|
})
|
|
|
|
return repos_by_org
|
|
|
|
@staticmethod
|
|
def matches_ref(ref, regex):
|
|
match_string = ref.split('/', 1)[1]
|
|
if not regex:
|
|
return False
|
|
|
|
m = regex.match(match_string)
|
|
if not m:
|
|
return False
|
|
|
|
return len(m.group(0)) == len(match_string)
|
|
|
|
def list_build_subdirs(self, auth_token, config):
|
|
gh_client = self._get_client(auth_token)
|
|
source = config['build_source']
|
|
|
|
try:
|
|
repo = gh_client.get_repo(source)
|
|
|
|
# Find the first matching branch.
|
|
branches = None
|
|
if 'branchtag_regex' in config:
|
|
try:
|
|
regex = re.compile(config['branchtag_regex'])
|
|
branches = [branch.name for branch in repo.get_branches()
|
|
if GithubBuildTrigger.matches_ref('refs/heads/' + branch.name, regex)]
|
|
except:
|
|
pass
|
|
|
|
branches = branches or [repo.default_branch or 'master']
|
|
default_commit = repo.get_branch(branches[0]).commit
|
|
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
|
|
|
|
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
|
if (elem.type == u'blob' and
|
|
os.path.basename(elem.path) == u'Dockerfile')]
|
|
except GithubException as ge:
|
|
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
|
|
if message == 'Branch not found':
|
|
raise EmptyRepositoryException()
|
|
|
|
raise RepositoryReadException(message)
|
|
|
|
def dockerfile_url(self, auth_token, config):
|
|
source = config['build_source']
|
|
subdirectory = config.get('subdir', '')
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
try:
|
|
repo = gh_client.get_repo(source)
|
|
master_branch = repo.default_branch or 'master'
|
|
return 'https://github.com/%s/blob/%s/%s' % (source, master_branch, path)
|
|
except GithubException:
|
|
return None
|
|
|
|
def load_dockerfile_contents(self, auth_token, config):
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
source = config['build_source']
|
|
subdirectory = config.get('subdir', '')
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
try:
|
|
repo = gh_client.get_repo(source)
|
|
file_info = repo.get_file_contents(path)
|
|
if file_info is None:
|
|
return None
|
|
|
|
content = file_info.content
|
|
if file_info.encoding == 'base64':
|
|
content = base64.b64decode(content)
|
|
return content
|
|
|
|
except GithubException as ge:
|
|
message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source)
|
|
raise RepositoryReadException(message)
|
|
|
|
@staticmethod
|
|
def _build_commit_info(repo, commit_sha):
|
|
try:
|
|
commit = repo.get_commit(commit_sha)
|
|
except GithubException:
|
|
logger.exception('Could not load data for commit')
|
|
return
|
|
|
|
commit_info = {
|
|
'url': commit.html_url,
|
|
'message': commit.commit.message,
|
|
'date': commit.last_modified
|
|
}
|
|
|
|
if commit.author:
|
|
commit_info['author'] = {
|
|
'username': commit.author.login,
|
|
'avatar_url': commit.author.avatar_url,
|
|
'url': commit.author.html_url
|
|
}
|
|
|
|
if commit.committer:
|
|
commit_info['committer'] = {
|
|
'username': commit.committer.login,
|
|
'avatar_url': commit.committer.avatar_url,
|
|
'url': commit.committer.html_url
|
|
}
|
|
|
|
return commit_info
|
|
|
|
@staticmethod
|
|
def _prepare_build(trigger, config, repo, commit_sha, build_name, ref, git_url):
|
|
# If the trigger isn't using git, prepare the buildpack.
|
|
if trigger.private_key is None:
|
|
# Prepare the download and upload URLs
|
|
archive_link = repo.get_archive_link('tarball', commit_sha)
|
|
download_archive = client.get(archive_link, stream=True)
|
|
|
|
tarball_subdir = ''
|
|
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
|
|
for chunk in download_archive.iter_content(CHUNK_SIZE):
|
|
tarball.write(chunk)
|
|
|
|
# Seek to position 0 to make tarfile happy
|
|
tarball.seek(0)
|
|
|
|
# Pull out the name of the subdir that GitHub generated
|
|
with tarfile.open(fileobj=tarball) as archive:
|
|
tarball_subdir = archive.getnames()[0]
|
|
|
|
# Seek to position 0 to make tarfile happy.
|
|
tarball.seek(0)
|
|
|
|
entries = {
|
|
tarball_subdir + '/.git/HEAD': commit_sha,
|
|
tarball_subdir + '/.git/objects/': None,
|
|
tarball_subdir + '/.git/refs/': None
|
|
}
|
|
|
|
appender = TarfileAppender(tarball, entries).get_stream()
|
|
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
|
|
|
|
logger.debug('Successfully prepared job')
|
|
else:
|
|
dockerfile_id = None
|
|
|
|
|
|
# compute the tag(s)
|
|
branch = ref.split('/')[-1]
|
|
tags = {branch}
|
|
|
|
if branch == repo.default_branch:
|
|
tags.add('latest')
|
|
|
|
logger.debug('Pushing to tags: %s', tags)
|
|
|
|
# compute the subdir
|
|
repo_subdir = config['subdir']
|
|
if trigger.private_key is None:
|
|
joined_subdir = os.path.join(tarball_subdir, repo_subdir)
|
|
else:
|
|
joined_subdir = repo_subdir
|
|
logger.debug('Final subdir: %s', joined_subdir)
|
|
|
|
# compute the metadata
|
|
metadata = {
|
|
'commit_sha': commit_sha,
|
|
'ref': ref,
|
|
'default_branch': repo.default_branch,
|
|
'git_url': git_url,
|
|
}
|
|
|
|
# add the commit info.
|
|
commit_info = GithubBuildTrigger._build_commit_info(repo, commit_sha)
|
|
if commit_info is not None:
|
|
metadata['commit_info'] = commit_info
|
|
|
|
return dockerfile_id, list(tags), build_name, joined_subdir, metadata
|
|
|
|
@staticmethod
|
|
def get_display_name(sha):
|
|
return sha[0:7]
|
|
|
|
def handle_trigger_request(self, request, trigger):
|
|
payload = request.get_json()
|
|
if not payload or payload.get('head_commit') is None:
|
|
raise SkipRequestException()
|
|
|
|
if 'zen' in payload:
|
|
raise ValidationRequestException()
|
|
|
|
logger.debug('Payload %s', payload)
|
|
ref = payload['ref']
|
|
commit_sha = payload['head_commit']['id']
|
|
commit_message = payload['head_commit'].get('message', '')
|
|
git_url = payload['repository']['git_url']
|
|
|
|
config = get_trigger_config(trigger)
|
|
if 'branchtag_regex' in config:
|
|
try:
|
|
regex = re.compile(config['branchtag_regex'])
|
|
except:
|
|
regex = re.compile('.*')
|
|
|
|
if not GithubBuildTrigger.matches_ref(ref, regex):
|
|
raise SkipRequestException()
|
|
|
|
if should_skip_commit(commit_message):
|
|
raise SkipRequestException()
|
|
|
|
short_sha = GithubBuildTrigger.get_display_name(commit_sha)
|
|
|
|
gh_client = self._get_client(trigger.auth_token)
|
|
|
|
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
|
|
payload['repository']['name'])
|
|
repo = gh_client.get_repo(repo_full_name)
|
|
|
|
logger.debug('Github repo: %s', repo)
|
|
|
|
return GithubBuildTrigger._prepare_build(trigger, config, repo, commit_sha,
|
|
short_sha, ref, git_url)
|
|
|
|
def manual_start(self, trigger, run_parameters=None):
|
|
config = get_trigger_config(trigger)
|
|
try:
|
|
source = config['build_source']
|
|
run_parameters = run_parameters or {}
|
|
|
|
gh_client = self._get_client(trigger.auth_token)
|
|
repo = gh_client.get_repo(source)
|
|
branch_name = run_parameters.get('branch_name') or repo.default_branch
|
|
branch = repo.get_branch(branch_name)
|
|
branch_sha = branch.commit.sha
|
|
short_sha = GithubBuildTrigger.get_display_name(branch_sha)
|
|
ref = 'refs/heads/%s' % (branch_name)
|
|
git_url = repo.git_url
|
|
|
|
return self._prepare_build(trigger, config, repo, branch_sha, short_sha, ref, git_url)
|
|
except GithubException as ghe:
|
|
raise TriggerStartException(ghe.data['message'])
|
|
|
|
|
|
def list_field_values(self, auth_token, config, field_name):
|
|
if field_name == 'refs':
|
|
branches = self.list_field_values(auth_token, config, 'branch_name')
|
|
tags = self.list_field_values(auth_token, config, 'tag_name')
|
|
|
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
|
[{'kind': 'tag', 'name': tag} for tag in tags])
|
|
|
|
if field_name == 'tag_name':
|
|
gh_client = self._get_client(auth_token)
|
|
source = config['build_source']
|
|
repo = gh_client.get_repo(source)
|
|
return [tag.name for tag in repo.get_tags()]
|
|
|
|
if field_name == 'branch_name':
|
|
gh_client = self._get_client(auth_token)
|
|
source = config['build_source']
|
|
repo = gh_client.get_repo(source)
|
|
branches = [branch.name for branch in repo.get_branches()]
|
|
|
|
if not repo.default_branch in branches:
|
|
branches.insert(0, repo.default_branch)
|
|
|
|
if branches[0] != repo.default_branch:
|
|
branches.remove(repo.default_branch)
|
|
branches.insert(0, repo.default_branch)
|
|
|
|
return branches
|
|
|
|
return None
|
|
|
|
class CustomBuildTrigger(BuildTrigger):
|
|
@classmethod
|
|
def service_name(cls):
|
|
return 'custom'
|
|
|
|
def is_active(self, config):
|
|
return 'public_key' in config
|
|
|
|
@staticmethod
|
|
def _metadata_from_payload(payload):
|
|
try:
|
|
metadata = {
|
|
'commit_sha': payload['commit'],
|
|
'ref': payload['ref'],
|
|
'default_branch': payload.get('default_branch', 'master'),
|
|
}
|
|
except KeyError:
|
|
raise InvalidPayloadException()
|
|
|
|
commit_info = payload['commit_info']
|
|
if commit_info is not None:
|
|
try:
|
|
metadata['commit_info'] = {
|
|
'url': commit_info['url'],
|
|
'message': commit_info['message'],
|
|
'date': commit_info['date'],
|
|
}
|
|
except KeyError:
|
|
raise InvalidPayloadException()
|
|
|
|
author = commit_info['author']
|
|
if author is not None:
|
|
try:
|
|
metadata['commit_info']['author'] = {
|
|
'username': author['username'],
|
|
'avatar_url': author['avatar_url'],
|
|
'url': author['url'],
|
|
}
|
|
except KeyError:
|
|
raise InvalidPayloadException()
|
|
|
|
committer = commit_info['committer']
|
|
if committer is not None:
|
|
try:
|
|
metadata['commit_info']['committer'] = {
|
|
'username': committer['username'],
|
|
'avatar_url': committer['avatar_url'],
|
|
'url': committer['url'],
|
|
}
|
|
except KeyError:
|
|
raise InvalidPayloadException()
|
|
|
|
return metadata
|
|
|
|
def handle_trigger_request(self, request, trigger):
|
|
payload = request.get_json()
|
|
if not payload:
|
|
raise SkipRequestException()
|
|
|
|
logger.debug('Payload %s', payload)
|
|
metadata = self._metadata_from_payload(payload)
|
|
|
|
# The build source is the canonical git URL used to clone.
|
|
config = get_trigger_config(trigger)
|
|
metadata['git_url'] = config['build_source']
|
|
|
|
branch = metadata['ref'].split('/')[-1]
|
|
tags = {branch}
|
|
|
|
build_name = metadata['commit_sha'][:6]
|
|
dockerfile_id = None
|
|
|
|
return dockerfile_id, tags, build_name, trigger.config['subdir'], metadata
|
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
|
config['public_key'], private_key = generate_ssh_keypair()
|
|
return config, private_key
|
|
|
|
def deactivate(self, auth_token, config):
|
|
config.pop('public_key', None)
|
|
return config
|
|
|
|
def manual_start(self, trigger, run_parameters=None):
|
|
for parameter in ['branch_name', 'commit_sha',]:
|
|
if parameter not in run_parameters:
|
|
raise TriggerStartException
|
|
|
|
dockerfile_id = None
|
|
branch = run_parameters.get('branch_name', None)
|
|
tags = {branch} if branch is not None else {}
|
|
build_name = 'HEAD'
|
|
metadata = {
|
|
'commit_sha': run_parameters['commit_sha'],
|
|
'default_branch': branch,
|
|
'ref': 'refs/heads/%s' % branch,
|
|
}
|
|
|
|
return dockerfile_id, tags, build_name, trigger.config['subdir'], metadata
|