2014-02-18 20:50:15 +00:00
|
|
|
import logging
|
2014-02-18 23:09:14 +00:00
|
|
|
import io
|
2014-02-24 18:56:21 +00:00
|
|
|
import os.path
|
2014-03-31 19:40:24 +00:00
|
|
|
import tarfile
|
2014-04-03 03:33:58 +00:00
|
|
|
import base64
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
from github import Github, UnknownObjectException, GithubException
|
2014-02-18 23:09:14 +00:00
|
|
|
from tempfile import SpooledTemporaryFile
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-04-03 21:31:46 +00:00
|
|
|
from app import app, userfiles as user_files
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
client = app.config['HTTPCLIENT']
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2014-03-31 19:40:24 +00:00
|
|
|
TARBALL_MIME = 'application/gzip'
|
2014-02-18 23:09:14 +00:00
|
|
|
CHUNK_SIZE = 512 * 1024
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
class BuildArchiveException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class InvalidServiceException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
class TriggerActivationException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-24 21:36:49 +00:00
|
|
|
class TriggerDeactivationException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
class ValidationRequestException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-24 19:12:54 +00:00
|
|
|
class EmptyRepositoryException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-03-28 19:32:56 +00:00
|
|
|
class RepositoryReadException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
class BuildTrigger(object):
|
|
|
|
def __init__(self):
|
|
|
|
pass
|
|
|
|
|
2014-04-03 03:33:58 +00:00
|
|
|
def dockerfile_url(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Returns the URL at which the Dockerfile for the trigger can be found or None if none/not applicable.
|
|
|
|
"""
|
|
|
|
return None
|
|
|
|
|
|
|
|
def load_dockerfile_contents(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
|
|
|
be found/loaded.
|
|
|
|
"""
|
|
|
|
return None
|
|
|
|
|
2014-02-19 21:08:33 +00:00
|
|
|
def list_build_sources(self, auth_token):
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
Take the auth information for the specific trigger type and load the
|
2014-02-19 21:08:33 +00:00
|
|
|
list of build sources(repositories).
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-24 18:56:21 +00:00
|
|
|
def list_build_subdirs(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Take the auth information and the specified config so far and list all of
|
|
|
|
the possible subdirs containing dockerfiles.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
def handle_trigger_request(self, request, auth_token, config):
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
2014-02-24 21:11:23 +00:00
|
|
|
Transform the incoming request data into a set of actions. Returns a tuple
|
|
|
|
of usefiles resource id, docker tags, build name, and resource subdir.
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
def is_active(self, config):
|
|
|
|
"""
|
2014-02-21 21:02:31 +00:00
|
|
|
Returns True if the current build trigger is active. Inactive means further
|
|
|
|
setup is needed.
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-21 21:02:31 +00:00
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
Activates the trigger for the service, with the given new configuration.
|
2014-02-24 21:36:49 +00:00
|
|
|
Returns new configuration that should be stored if successful.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
def deactivate(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Deactivates the trigger for the service, removing any hooks installed in
|
|
|
|
the remote service. Returns the new config that should be stored if this
|
|
|
|
trigger is going to be re-activated.
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-26 00:39:43 +00:00
|
|
|
def manual_start(self, auth_token, config):
|
|
|
|
"""
|
|
|
|
Manually creates a repository build for this trigger.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
"""
|
|
|
|
Particular service implemented by subclasses.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_trigger_for_service(cls, service):
|
|
|
|
for subc in cls.__subclasses__():
|
|
|
|
if subc.service_name() == service:
|
|
|
|
return subc()
|
|
|
|
|
|
|
|
raise InvalidServiceException('Unable to find service: %s' % service)
|
|
|
|
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
def raise_unsupported():
|
|
|
|
raise io.UnsupportedOperation
|
|
|
|
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
class GithubBuildTrigger(BuildTrigger):
|
|
|
|
@staticmethod
|
|
|
|
def _get_client(auth_token):
|
|
|
|
return Github(auth_token, client_id=app.config['GITHUB_CLIENT_ID'],
|
|
|
|
client_secret=app.config['GITHUB_CLIENT_SECRET'])
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
return 'github'
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
def is_active(self, config):
|
2014-02-24 21:36:49 +00:00
|
|
|
return 'hook_id' in config
|
2014-02-20 23:57:49 +00:00
|
|
|
|
2014-02-21 21:02:31 +00:00
|
|
|
def activate(self, trigger_uuid, standard_webhook_url, auth_token, config):
|
|
|
|
new_build_source = config['build_source']
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
try:
|
|
|
|
to_add_webhook = gh_client.get_repo(new_build_source)
|
2014-02-21 22:09:56 +00:00
|
|
|
except UnknownObjectException:
|
|
|
|
msg = 'Unable to find GitHub repository for source: %s'
|
|
|
|
raise TriggerActivationException(msg % new_build_source)
|
2014-02-21 21:02:31 +00:00
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
webhook_config = {
|
|
|
|
'url': standard_webhook_url,
|
|
|
|
'content_type': 'json',
|
|
|
|
}
|
2014-02-21 21:02:31 +00:00
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
try:
|
2014-02-24 21:36:49 +00:00
|
|
|
hook = to_add_webhook.create_hook('web', webhook_config)
|
|
|
|
config['hook_id'] = hook.id
|
2014-02-26 00:39:43 +00:00
|
|
|
config['master_branch'] = to_add_webhook.master_branch
|
2014-02-21 22:09:56 +00:00
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to create webhook on repository: %s'
|
|
|
|
raise TriggerActivationException(msg % new_build_source)
|
2014-02-20 23:57:49 +00:00
|
|
|
|
2014-02-24 21:36:49 +00:00
|
|
|
return config
|
|
|
|
|
|
|
|
def deactivate(self, auth_token, config):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(config['build_source'])
|
|
|
|
to_delete = repo.get_hook(config['hook_id'])
|
|
|
|
to_delete.delete()
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to remove hook: %s' % config['hook_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
|
|
|
|
config.pop('hook_id', None)
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
2014-02-19 21:08:33 +00:00
|
|
|
def list_build_sources(self, auth_token):
|
2014-02-18 20:50:15 +00:00
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
usr = gh_client.get_user()
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
personal = {
|
|
|
|
'personal': True,
|
|
|
|
'repos': [repo.full_name for repo in usr.get_repos()],
|
|
|
|
'info': {
|
|
|
|
'name': usr.login,
|
|
|
|
'avatar_url': usr.avatar_url,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
repos_by_org = [personal]
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
for org in usr.get_orgs():
|
2014-02-20 23:57:49 +00:00
|
|
|
repo_list = []
|
2014-02-24 18:56:21 +00:00
|
|
|
for repo in org.get_repos(type='member'):
|
2014-02-20 23:57:49 +00:00
|
|
|
repo_list.append(repo.full_name)
|
|
|
|
|
|
|
|
repos_by_org.append({
|
|
|
|
'personal': False,
|
|
|
|
'repos': repo_list,
|
|
|
|
'info': {
|
|
|
|
'name': org.name,
|
|
|
|
'avatar_url': org.avatar_url
|
|
|
|
}
|
|
|
|
})
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
return repos_by_org
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-02-24 18:56:21 +00:00
|
|
|
def list_build_subdirs(self, auth_token, config):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
source = config['build_source']
|
|
|
|
|
2014-02-24 19:12:54 +00:00
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
2014-03-12 04:49:03 +00:00
|
|
|
default_commit = repo.get_branch(repo.master_branch or 'master').commit
|
2014-02-24 19:12:54 +00:00
|
|
|
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
|
2014-02-24 18:56:21 +00:00
|
|
|
|
2014-02-24 19:12:54 +00:00
|
|
|
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
|
|
|
if (elem.type == u'blob' and
|
|
|
|
os.path.basename(elem.path) == u'Dockerfile')]
|
2014-03-28 19:32:56 +00:00
|
|
|
except GithubException as ge:
|
|
|
|
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
|
|
|
|
if message == 'Branch not found':
|
|
|
|
raise EmptyRepositoryException()
|
|
|
|
|
|
|
|
raise RepositoryReadException(message)
|
2014-02-24 18:56:21 +00:00
|
|
|
|
2014-04-03 03:33:58 +00:00
|
|
|
def dockerfile_url(self, auth_token, config):
|
|
|
|
source = config['build_source']
|
|
|
|
subdirectory = config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
master_branch = repo.master_branch or 'master'
|
|
|
|
return 'https://github.com/%s/blob/%s/%s' % (source, master_branch, path)
|
|
|
|
except GithubException as ge:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def load_dockerfile_contents(self, auth_token, config):
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
source = config['build_source']
|
|
|
|
subdirectory = config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
file_info = repo.get_file_contents(path)
|
|
|
|
if file_info is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
content = file_info.content
|
|
|
|
if file_info.encoding == 'base64':
|
|
|
|
content = base64.b64decode(content)
|
|
|
|
return content
|
|
|
|
|
|
|
|
except GithubException as ge:
|
|
|
|
message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source)
|
|
|
|
raise RepositoryReadException(message)
|
|
|
|
|
2014-02-26 00:39:43 +00:00
|
|
|
@staticmethod
|
|
|
|
def _prepare_build(config, repo, commit_sha, build_name, ref):
|
2014-02-18 20:50:15 +00:00
|
|
|
# Prepare the download and upload URLs
|
2014-03-31 19:40:24 +00:00
|
|
|
archive_link = repo.get_archive_link('tarball', commit_sha)
|
2014-02-18 23:09:14 +00:00
|
|
|
download_archive = client.get(archive_link, stream=True)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-03-31 19:40:24 +00:00
|
|
|
tarball_subdir = ''
|
|
|
|
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
|
2014-02-18 23:09:14 +00:00
|
|
|
for chunk in download_archive.iter_content(CHUNK_SIZE):
|
2014-03-31 19:40:24 +00:00
|
|
|
tarball.write(chunk)
|
|
|
|
|
|
|
|
# Seek to position 0 to make tarfile happy
|
|
|
|
tarball.seek(0)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2014-02-25 03:57:58 +00:00
|
|
|
# Pull out the name of the subdir that GitHub generated
|
2014-03-31 19:40:24 +00:00
|
|
|
with tarfile.open(fileobj=tarball) as archive:
|
|
|
|
tarball_subdir = archive.getnames()[0]
|
2014-02-25 03:57:58 +00:00
|
|
|
|
2014-03-31 19:40:24 +00:00
|
|
|
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
logger.debug('Successfully prepared job')
|
|
|
|
|
2014-02-24 21:11:23 +00:00
|
|
|
# compute the tag(s)
|
2014-02-26 00:39:43 +00:00
|
|
|
branch = ref.split('/')[-1]
|
|
|
|
tags = {branch}
|
|
|
|
if branch == repo.master_branch:
|
2014-02-24 21:11:23 +00:00
|
|
|
tags.add('latest')
|
|
|
|
logger.debug('Pushing to tags: %s' % tags)
|
|
|
|
|
|
|
|
# compute the subdir
|
|
|
|
repo_subdir = config['subdir']
|
2014-03-31 19:40:24 +00:00
|
|
|
joined_subdir = os.path.join(tarball_subdir, repo_subdir)
|
2014-02-24 21:11:23 +00:00
|
|
|
logger.debug('Final subdir: %s' % joined_subdir)
|
|
|
|
|
2014-02-26 00:39:43 +00:00
|
|
|
return dockerfile_id, list(tags), build_name, joined_subdir
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get_display_name(sha):
|
|
|
|
return sha[0:7]
|
|
|
|
|
|
|
|
def handle_trigger_request(self, request, auth_token, config):
|
|
|
|
payload = request.get_json()
|
|
|
|
|
|
|
|
if 'zen' in payload:
|
|
|
|
raise ValidationRequestException()
|
|
|
|
|
|
|
|
logger.debug('Payload %s', payload)
|
|
|
|
ref = payload['ref']
|
|
|
|
commit_sha = payload['head_commit']['id']
|
|
|
|
short_sha = GithubBuildTrigger.get_display_name(commit_sha)
|
|
|
|
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
|
|
|
|
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
|
|
|
|
payload['repository']['name'])
|
|
|
|
repo = gh_client.get_repo(repo_full_name)
|
|
|
|
|
|
|
|
logger.debug('Github repo: %s', repo)
|
|
|
|
|
|
|
|
return GithubBuildTrigger._prepare_build(config, repo, commit_sha,
|
|
|
|
short_sha, ref)
|
|
|
|
|
|
|
|
def manual_start(self, auth_token, config):
|
|
|
|
source = config['build_source']
|
|
|
|
|
|
|
|
gh_client = self._get_client(auth_token)
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
master = repo.get_branch(repo.master_branch)
|
|
|
|
master_sha = master.commit.sha
|
|
|
|
short_sha = GithubBuildTrigger.get_display_name(master_sha)
|
|
|
|
ref = 'refs/heads/%s' % repo.master_branch
|
|
|
|
|
2014-03-12 04:49:03 +00:00
|
|
|
return self._prepare_build(config, repo, master_sha, short_sha, ref)
|