This repository has been archived on 2020-03-24. You can view files and clone it, but cannot push or open issues or pull requests.
quay/endpoints/trigger.py

494 lines
14 KiB
Python
Raw Normal View History

import logging
import io
import os.path
import tarfile
import base64
import re
from github import Github, UnknownObjectException, GithubException
from tempfile import SpooledTemporaryFile
from app import app, userfiles as user_files, github_trigger
from util.tarfileappender import TarfileAppender
client = app.config['HTTPCLIENT']
logger = logging.getLogger(__name__)
TARBALL_MIME = 'application/gzip'
CHUNK_SIZE = 512 * 1024
def should_skip_commit(message):
return '[skip build]' in message or '[build skip]' in message
class BuildArchiveException(Exception):
pass
class InvalidServiceException(Exception):
pass
class TriggerActivationException(Exception):
pass
class TriggerDeactivationException(Exception):
pass
class TriggerStartException(Exception):
pass
class ValidationRequestException(Exception):
pass
class SkipRequestException(Exception):
pass
class EmptyRepositoryException(Exception):
pass
class RepositoryReadException(Exception):
pass
class BuildTrigger(object):
def __init__(self):
pass
def dockerfile_url(self, auth_token, config):
"""
Returns the URL at which the Dockerfile for the trigger can be found or None if none/not applicable.
"""
return None
def load_dockerfile_contents(self, auth_token, config):
"""
Loads the Dockerfile found for the trigger's config and returns them or None if none could
be found/loaded.
"""
return None
def list_build_sources(self, auth_token):
"""
Take the auth information for the specific trigger type and load the
list of build sources(repositories).
"""
raise NotImplementedError
def list_build_subdirs(self, auth_token, config):
"""
Take the auth information and the specified config so far and list all of
the possible subdirs containing dockerfiles.
"""
raise NotImplementedError
def handle_trigger_request(self, request, auth_token, config):
"""
Transform the incoming request data into a set of actions. Returns a tuple
of usefiles resource id, docker tags, build name, and resource subdir.
"""
raise NotImplementedError
def is_active(self, config):
"""
Returns True if the current build trigger is active. Inactive means further
setup is needed.
"""
raise NotImplementedError
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
"""
Activates the trigger for the service, with the given new configuration.
Returns new configuration that should be stored if successful.
"""
raise NotImplementedError
def deactivate(self, auth_token, config):
"""
Deactivates the trigger for the service, removing any hooks installed in
the remote service. Returns the new config that should be stored if this
trigger is going to be re-activated.
"""
raise NotImplementedError
def manual_start(self, auth_token, config, run_parameters = None):
"""
Manually creates a repository build for this trigger.
"""
raise NotImplementedError
def list_field_values(self, auth_token, config, field_name):
"""
Lists all values for the given custom trigger field. For example, a trigger might have a
field named "branches", and this method would return all branches.
"""
raise NotImplementedError
@classmethod
def service_name(cls):
"""
Particular service implemented by subclasses.
"""
raise NotImplementedError
@classmethod
def get_trigger_for_service(cls, service):
for subc in cls.__subclasses__():
if subc.service_name() == service:
return subc()
raise InvalidServiceException('Unable to find service: %s' % service)
def raise_unsupported():
raise io.UnsupportedOperation
class GithubBuildTrigger(BuildTrigger):
@staticmethod
def _get_client(auth_token):
return Github(auth_token,
base_url=github_trigger.api_endpoint(),
client_id=github_trigger.client_id(),
client_secret=github_trigger.client_secret())
@classmethod
def service_name(cls):
return 'github'
def is_active(self, config):
return 'hook_id' in config
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
new_build_source = config['build_source']
gh_client = self._get_client(auth_token)
try:
to_add_webhook = gh_client.get_repo(new_build_source)
except UnknownObjectException:
msg = 'Unable to find GitHub repository for source: %s'
raise TriggerActivationException(msg % new_build_source)
webhook_config = {
'url': standard_webhook_url,
'content_type': 'json',
}
try:
hook = to_add_webhook.create_hook('web', webhook_config)
config['hook_id'] = hook.id
config['master_branch'] = to_add_webhook.default_branch
except GithubException:
msg = 'Unable to create webhook on repository: %s'
raise TriggerActivationException(msg % new_build_source)
return config
def deactivate(self, auth_token, config):
gh_client = self._get_client(auth_token)
try:
repo = gh_client.get_repo(config['build_source'])
to_delete = repo.get_hook(config['hook_id'])
to_delete.delete()
except GithubException:
msg = 'Unable to remove hook: %s' % config['hook_id']
raise TriggerDeactivationException(msg)
config.pop('hook_id', None)
return config
def list_build_sources(self, auth_token):
gh_client = self._get_client(auth_token)
usr = gh_client.get_user()
personal = {
'personal': True,
'repos': [repo.full_name for repo in usr.get_repos()],
'info': {
'name': usr.login,
'avatar_url': usr.avatar_url,
}
}
repos_by_org = [personal]
for org in usr.get_orgs():
repo_list = []
for repo in org.get_repos(type='member'):
repo_list.append(repo.full_name)
repos_by_org.append({
'personal': False,
'repos': repo_list,
'info': {
2015-01-15 20:02:20 +00:00
'name': org.name or org.login,
'avatar_url': org.avatar_url
}
})
return repos_by_org
def matches_ref(self, ref, regex):
match_string = ref.split('/', 1)[1]
if not regex:
return False
m = regex.match(match_string)
if not m:
return False
return len(m.group(0)) == len(match_string)
def list_build_subdirs(self, auth_token, config):
gh_client = self._get_client(auth_token)
source = config['build_source']
try:
repo = gh_client.get_repo(source)
# Find the first matching branch.
branches = None
if 'branchtag_regex' in config:
try:
regex = re.compile(config['branchtag_regex'])
branches = [branch.name for branch in repo.get_branches()
if self.matches_ref('refs/heads/' + branch.name, regex)]
except:
pass
branches = branches or [repo.default_branch or 'master']
default_commit = repo.get_branch(branches[0]).commit
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
return [os.path.dirname(elem.path) for elem in commit_tree.tree
if (elem.type == u'blob' and
os.path.basename(elem.path) == u'Dockerfile')]
except GithubException as ge:
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
if message == 'Branch not found':
raise EmptyRepositoryException()
raise RepositoryReadException(message)
def dockerfile_url(self, auth_token, config):
source = config['build_source']
subdirectory = config.get('subdir', '')
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
gh_client = self._get_client(auth_token)
try:
repo = gh_client.get_repo(source)
master_branch = repo.default_branch or 'master'
return 'https://github.com/%s/blob/%s/%s' % (source, master_branch, path)
except GithubException as ge:
return None
def load_dockerfile_contents(self, auth_token, config):
gh_client = self._get_client(auth_token)
source = config['build_source']
subdirectory = config.get('subdir', '')
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
try:
repo = gh_client.get_repo(source)
file_info = repo.get_file_contents(path)
if file_info is None:
return None
content = file_info.content
if file_info.encoding == 'base64':
content = base64.b64decode(content)
return content
except GithubException as ge:
message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source)
raise RepositoryReadException(message)
2015-02-26 22:45:28 +00:00
@staticmethod
def _build_commit_info(repo, commit_sha):
try:
commit = repo.get_commit(commit_sha)
except GithubException:
logger.exception('Could not load data for commit')
return
commit_info = {
2015-02-26 22:45:28 +00:00
'url': commit.html_url,
'message': commit.commit.message,
'date': commit.last_modified
}
if commit.author:
commit_info['author'] = {
2015-02-26 22:45:28 +00:00
'username': commit.author.login,
'avatar_url': commit.author.avatar_url,
'url': commit.author.html_url
}
if commit.committer:
commit_info['committer'] = {
2015-02-26 22:45:28 +00:00
'username': commit.committer.login,
'avatar_url': commit.committer.avatar_url,
'url': commit.committer.html_url
}
return commit_info
2015-02-26 22:45:28 +00:00
@staticmethod
def _prepare_build(config, repo, commit_sha, build_name, ref):
# Prepare the download and upload URLs
archive_link = repo.get_archive_link('tarball', commit_sha)
download_archive = client.get(archive_link, stream=True)
tarball_subdir = ''
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
for chunk in download_archive.iter_content(CHUNK_SIZE):
tarball.write(chunk)
# Seek to position 0 to make tarfile happy
tarball.seek(0)
# Pull out the name of the subdir that GitHub generated
with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make tarfile happy.
tarball.seek(0)
entries = {
tarball_subdir + '/.git/HEAD': commit_sha,
tarball_subdir + '/.git/objects/': None,
tarball_subdir + '/.git/refs/': None
}
appender = TarfileAppender(tarball, entries).get_stream()
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
logger.debug('Successfully prepared job')
# compute the tag(s)
branch = ref.split('/')[-1]
tags = {branch}
if branch == repo.default_branch:
tags.add('latest')
logger.debug('Pushing to tags: %s' % tags)
# compute the subdir
repo_subdir = config['subdir']
joined_subdir = os.path.join(tarball_subdir, repo_subdir)
logger.debug('Final subdir: %s' % joined_subdir)
# compute the metadata
metadata = {
'commit_sha': commit_sha,
'ref': ref,
2015-02-26 22:45:28 +00:00
'default_branch': repo.default_branch,
}
2015-02-26 22:45:28 +00:00
# add the commit info.
commit_info = GithubBuildTrigger._build_commit_info(repo, commit_sha)
if commit_info is not None:
metadata['commit_info'] = commit_info
return dockerfile_id, list(tags), build_name, joined_subdir, metadata
@staticmethod
def get_display_name(sha):
return sha[0:7]
def handle_trigger_request(self, request, auth_token, config):
payload = request.get_json()
if not payload or payload.get('head_commit') is None:
raise SkipRequestException()
if 'zen' in payload:
raise ValidationRequestException()
logger.debug('Payload %s', payload)
ref = payload['ref']
commit_sha = payload['head_commit']['id']
commit_message = payload['head_commit'].get('message', '')
if 'branchtag_regex' in config:
try:
regex = re.compile(config['branchtag_regex'])
except:
regex = re.compile('.*')
if not self.matches_ref(ref, regex):
raise SkipRequestException()
if should_skip_commit(commit_message):
raise SkipRequestException()
short_sha = GithubBuildTrigger.get_display_name(commit_sha)
gh_client = self._get_client(auth_token)
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
payload['repository']['name'])
repo = gh_client.get_repo(repo_full_name)
logger.debug('Github repo: %s', repo)
return GithubBuildTrigger._prepare_build(config, repo, commit_sha,
short_sha, ref)
def manual_start(self, auth_token, config, run_parameters = None):
try:
source = config['build_source']
run_parameters = run_parameters or {}
gh_client = self._get_client(auth_token)
repo = gh_client.get_repo(source)
branch_name = run_parameters.get('branch_name') or repo.default_branch
branch = repo.get_branch(branch_name)
branch_sha = branch.commit.sha
2015-02-26 22:45:28 +00:00
commit_info = branch.commit
short_sha = GithubBuildTrigger.get_display_name(branch_sha)
ref = 'refs/heads/%s' % (branch_name)
return self._prepare_build(config, repo, branch_sha, short_sha, ref)
except GithubException as ghe:
raise TriggerStartException(ghe.data['message'])
def list_field_values(self, auth_token, config, field_name):
if field_name == 'refs':
branches = self.list_field_values(auth_token, config, 'branch_name')
tags = self.list_field_values(auth_token, config, 'tag_name')
return ([{'kind': 'branch', 'name': b} for b in branches] +
[{'kind': 'tag', 'name': tag} for tag in tags])
if field_name == 'tag_name':
gh_client = self._get_client(auth_token)
source = config['build_source']
repo = gh_client.get_repo(source)
return [tag.name for tag in repo.get_tags()]
if field_name == 'branch_name':
gh_client = self._get_client(auth_token)
source = config['build_source']
repo = gh_client.get_repo(source)
branches = [branch.name for branch in repo.get_branches()]
if not repo.default_branch in branches:
branches.insert(0, repo.default_branch)
if branches[0] != repo.default_branch:
branches.remove(repo.default_branch)
branches.insert(0, repo.default_branch)
return branches
return None