2014-02-18 20:50:15 +00:00
|
|
|
import logging
|
2014-02-18 23:09:14 +00:00
|
|
|
import io
|
2014-02-24 18:56:21 +00:00
|
|
|
import os.path
|
2014-03-31 19:40:24 +00:00
|
|
|
import tarfile
|
2014-04-03 03:33:58 +00:00
|
|
|
import base64
|
2014-10-14 19:46:35 +00:00
|
|
|
import re
|
2015-03-23 19:37:30 +00:00
|
|
|
import json
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-30 21:12:41 +00:00
|
|
|
import gitlab
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
from endpoints.building import PreparedBuild
|
2014-02-21 22:09:56 +00:00
|
|
|
from github import Github, UnknownObjectException, GithubException
|
2015-04-28 22:15:12 +00:00
|
|
|
from bitbucket import BitBucket
|
2014-02-18 23:09:14 +00:00
|
|
|
from tempfile import SpooledTemporaryFile
|
2015-04-15 20:52:46 +00:00
|
|
|
from jsonschema import validate
|
2015-04-24 22:36:48 +00:00
|
|
|
from data import model
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-24 19:13:08 +00:00
|
|
|
from app import app, userfiles as user_files, github_trigger, get_app_url
|
2014-10-15 19:51:34 +00:00
|
|
|
from util.tarfileappender import TarfileAppender
|
2015-03-26 20:20:53 +00:00
|
|
|
from util.ssh import generate_ssh_keypair
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
client = app.config['HTTPCLIENT']
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2014-03-31 19:40:24 +00:00
|
|
|
TARBALL_MIME = 'application/gzip'
|
2014-02-18 23:09:14 +00:00
|
|
|
CHUNK_SIZE = 512 * 1024
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
|
2015-03-26 20:20:53 +00:00
|
|
|
class InvalidPayloadException(Exception):
|
|
|
|
pass
|
2014-05-01 19:25:46 +00:00
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
class BuildArchiveException(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class InvalidServiceException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-20 23:57:49 +00:00
|
|
|
class TriggerActivationException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-24 21:36:49 +00:00
|
|
|
class TriggerDeactivationException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
class TriggerStartException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-21 22:09:56 +00:00
|
|
|
class ValidationRequestException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-05-01 19:25:46 +00:00
|
|
|
class SkipRequestException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-24 19:12:54 +00:00
|
|
|
class EmptyRepositoryException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-03-28 19:32:56 +00:00
|
|
|
class RepositoryReadException(Exception):
|
|
|
|
pass
|
|
|
|
|
2015-04-24 19:13:08 +00:00
|
|
|
class TriggerProviderException(Exception):
|
|
|
|
pass
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
def find_matching_branches(config, branches):
|
|
|
|
if 'branchtag_regex' in config:
|
|
|
|
try:
|
|
|
|
regex = re.compile(config['branchtag_regex'])
|
|
|
|
return [branch for branch in branches
|
|
|
|
if matches_ref('refs/heads/' + branch, regex)]
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return branches
|
|
|
|
|
|
|
|
def raise_if_skipped(config, ref):
|
|
|
|
""" Raises a SkipRequestException if the given ref should be skipped. """
|
|
|
|
if 'branchtag_regex' in config:
|
|
|
|
try:
|
|
|
|
regex = re.compile(config['branchtag_regex'])
|
|
|
|
except:
|
|
|
|
regex = re.compile('.*')
|
|
|
|
|
|
|
|
if not matches_ref(ref, regex):
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
def matches_ref(ref, regex):
|
|
|
|
match_string = ref.split('/', 1)[1]
|
|
|
|
if not regex:
|
|
|
|
return False
|
|
|
|
|
|
|
|
m = regex.match(match_string)
|
|
|
|
if not m:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return len(m.group(0)) == len(match_string)
|
|
|
|
|
|
|
|
def should_skip_commit(message):
|
|
|
|
return '[skip build]' in message or '[build skip]' in message
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def raise_unsupported():
|
|
|
|
raise io.UnsupportedOperation
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def get_trigger_config(trigger):
|
|
|
|
try:
|
|
|
|
return json.loads(trigger.config)
|
|
|
|
except:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
|
|
|
|
class BuildTriggerHandler(object):
|
|
|
|
def __init__(self, trigger, override_config=None):
|
|
|
|
self.trigger = trigger
|
|
|
|
self.config = override_config or get_trigger_config(trigger)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def auth_token(self):
|
|
|
|
""" Returns the auth token for the trigger. """
|
|
|
|
return self.trigger.auth_token
|
|
|
|
|
|
|
|
def dockerfile_url(self):
|
2014-04-03 03:33:58 +00:00
|
|
|
"""
|
2015-03-18 21:33:43 +00:00
|
|
|
Returns the URL at which the Dockerfile for the trigger is found or None if none/not applicable.
|
2014-04-03 03:33:58 +00:00
|
|
|
"""
|
2015-04-06 18:53:54 +00:00
|
|
|
raise NotImplementedError
|
2014-04-03 03:33:58 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def load_dockerfile_contents(self):
|
2014-04-03 03:33:58 +00:00
|
|
|
"""
|
|
|
|
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
|
|
|
be found/loaded.
|
|
|
|
"""
|
2015-04-06 18:53:54 +00:00
|
|
|
raise NotImplementedError
|
2014-04-03 03:33:58 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_build_sources(self):
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
Take the auth information for the specific trigger type and load the
|
2014-02-19 21:08:33 +00:00
|
|
|
list of build sources(repositories).
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_build_subdirs(self):
|
2014-02-24 18:56:21 +00:00
|
|
|
"""
|
|
|
|
Take the auth information and the specified config so far and list all of
|
|
|
|
the possible subdirs containing dockerfiles.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def handle_trigger_request(self):
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
2015-04-29 21:04:52 +00:00
|
|
|
Transform the incoming request data into a set of actions. Returns a PreparedBuild.
|
2014-02-18 20:50:15 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def is_active(self):
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
2014-02-21 21:02:31 +00:00
|
|
|
Returns True if the current build trigger is active. Inactive means further
|
|
|
|
setup is needed.
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def activate(self, standard_webhook_url):
|
2014-11-26 17:37:20 +00:00
|
|
|
"""
|
2014-02-20 23:57:49 +00:00
|
|
|
Activates the trigger for the service, with the given new configuration.
|
2015-04-23 22:14:26 +00:00
|
|
|
Returns new public and private config that should be stored if successful.
|
2014-02-24 21:36:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def deactivate(self):
|
2014-02-24 21:36:49 +00:00
|
|
|
"""
|
|
|
|
Deactivates the trigger for the service, removing any hooks installed in
|
|
|
|
the remote service. Returns the new config that should be stored if this
|
|
|
|
trigger is going to be re-activated.
|
2014-02-20 23:57:49 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def manual_start(self, run_parameters=None):
|
2014-02-26 00:39:43 +00:00
|
|
|
"""
|
2015-04-29 21:04:52 +00:00
|
|
|
Manually creates a repository build for this trigger. Returns a PreparedBuild.
|
2014-02-26 00:39:43 +00:00
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_field_values(self, field_name):
|
2014-09-30 20:29:32 +00:00
|
|
|
"""
|
|
|
|
Lists all values for the given custom trigger field. For example, a trigger might have a
|
|
|
|
field named "branches", and this method would return all branches.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
2014-02-18 20:50:15 +00:00
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
"""
|
|
|
|
Particular service implemented by subclasses.
|
|
|
|
"""
|
|
|
|
raise NotImplementedError
|
|
|
|
|
|
|
|
@classmethod
|
2015-04-24 22:36:48 +00:00
|
|
|
def get_handler(cls, trigger, override_config=None):
|
2014-02-18 20:50:15 +00:00
|
|
|
for subc in cls.__subclasses__():
|
2015-04-24 22:36:48 +00:00
|
|
|
if subc.service_name() == trigger.service.name:
|
|
|
|
return subc(trigger, override_config)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
raise InvalidServiceException('Unable to find service: %s' % trigger.service.name)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def put_config_key(self, key, value):
|
|
|
|
""" Updates a config key in the trigger, saving it to the DB. """
|
|
|
|
self.config[key] = value
|
|
|
|
model.update_build_trigger(self.trigger, self.config)
|
2014-02-18 20:50:15 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def set_auth_token(self, auth_token):
|
|
|
|
""" Sets the auth token for the trigger, saving it to the DB. """
|
|
|
|
model.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
|
2015-03-23 19:37:30 +00:00
|
|
|
|
2014-02-18 23:09:14 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
class BitbucketBuildTrigger(BuildTriggerHandler):
|
2015-04-24 19:13:08 +00:00
|
|
|
"""
|
|
|
|
BuildTrigger for Bitbucket.
|
|
|
|
"""
|
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
return 'bitbucket'
|
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
def _get_client(self):
|
2015-04-24 19:13:08 +00:00
|
|
|
key = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_KEY', '')
|
|
|
|
secret = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_SECRET', '')
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
trigger_uuid = self.trigger.uuid
|
2015-04-24 19:13:08 +00:00
|
|
|
callback_url = '%s/oauth1/bitbucket/callback/trigger/%s' % (get_app_url(), trigger_uuid)
|
2015-04-24 22:36:48 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
return BitBucket(key, secret, callback_url)
|
2015-04-24 22:36:48 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
def _get_authorized_client(self):
|
|
|
|
base_client = self._get_client()
|
|
|
|
auth_token = self.auth_token or 'invalid:invalid'
|
|
|
|
(access_token, access_token_secret) = auth_token.split(':')
|
|
|
|
return base_client.get_authorized_client(access_token, access_token_secret)
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
def _get_repository_client(self):
|
|
|
|
source = self.config['build_source']
|
|
|
|
(namespace, name) = source.split('/')
|
|
|
|
bitbucket_client = self._get_authorized_client()
|
|
|
|
return bitbucket_client.for_namespace(namespace).repositories().get(name)
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def get_oauth_url(self):
|
2015-04-28 22:15:12 +00:00
|
|
|
bitbucket_client = self._get_client()
|
|
|
|
(result, data, err_msg) = bitbucket_client.get_authorization_url()
|
|
|
|
if not result:
|
|
|
|
raise RepositoryReadException(err_msg)
|
|
|
|
|
|
|
|
return data
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def exchange_verifier(self, verifier):
|
2015-04-28 22:15:12 +00:00
|
|
|
bitbucket_client = self._get_client()
|
|
|
|
access_token = self.config.get('access_token', '')
|
|
|
|
access_token_secret = self.auth_token
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
# Exchange the verifier for a new access token.
|
|
|
|
(result, data, _) = bitbucket_client.verify_token(access_token, access_token_secret, verifier)
|
2015-04-24 22:36:48 +00:00
|
|
|
if not result:
|
|
|
|
return False
|
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
# Save the updated access token and secret.
|
|
|
|
self.set_auth_token(data[0] + ':' + data[1])
|
|
|
|
|
|
|
|
# Retrieve the current authorized user's information and store the username in the config.
|
|
|
|
authorized_client = self._get_authorized_client()
|
|
|
|
(result, data, _) = authorized_client.get_current_user()
|
2015-04-24 22:36:48 +00:00
|
|
|
if not result:
|
|
|
|
return False
|
|
|
|
|
|
|
|
username = data['user']['username']
|
|
|
|
self.put_config_key('username', username)
|
|
|
|
return True
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def is_active(self):
|
2015-04-28 22:15:12 +00:00
|
|
|
return 'hook_id' in self.config
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def activate(self, standard_webhook_url):
|
2015-04-28 22:15:12 +00:00
|
|
|
config = self.config
|
|
|
|
|
|
|
|
# Add a deploy key to the repository.
|
|
|
|
public_key, private_key = generate_ssh_keypair()
|
|
|
|
config['credentials'] = [
|
|
|
|
{
|
|
|
|
'name': 'SSH Public Key',
|
|
|
|
'value': public_key,
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
repository = self._get_repository_client()
|
|
|
|
(result, data, err_msg) = repository.deploykeys().create(
|
|
|
|
app.config['REGISTRY_TITLE'] + ' webhook key', public_key)
|
|
|
|
|
|
|
|
if not result:
|
|
|
|
msg = 'Unable to add deploy key to repository: %s' % err_msg
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
|
|
|
config['deploy_key_id'] = data['pk']
|
|
|
|
|
|
|
|
# Add a webhook callback.
|
|
|
|
(result, data, err_msg) = repository.services().create('POST', URL=standard_webhook_url)
|
|
|
|
if not result:
|
|
|
|
msg = 'Unable to add webhook to repository: %s' % err_msg
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
|
|
|
config['hook_id'] = data['id']
|
2015-04-30 21:12:41 +00:00
|
|
|
self.config = config
|
2015-04-28 22:15:12 +00:00
|
|
|
return config, {'private_key': private_key}
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def deactivate(self):
|
2015-04-28 22:15:12 +00:00
|
|
|
config = self.config
|
|
|
|
repository = self._get_repository_client()
|
|
|
|
|
|
|
|
# Remove the webhook link.
|
|
|
|
(result, _, err_msg) = repository.services().delete(config['hook_id'])
|
|
|
|
if not result:
|
|
|
|
msg = 'Unable to remove webhook from repository: %s' % err_msg
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
|
|
|
|
# Remove the public key.
|
|
|
|
(result, _, err_msg) = repository.deploykeys().delete(config['deploy_key_id'])
|
|
|
|
if not result:
|
|
|
|
msg = 'Unable to remove deploy key from repository: %s' % err_msg
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
|
|
|
|
config.pop('hook_id', None)
|
|
|
|
config.pop('deploy_key_id', None)
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_build_sources(self):
|
|
|
|
bitbucket_client = self._get_authorized_client()
|
2015-04-28 22:15:12 +00:00
|
|
|
(result, data, err_msg) = bitbucket_client.get_visible_repositories()
|
|
|
|
if not result:
|
|
|
|
raise RepositoryReadException('Could not read repository list: ' + err_msg)
|
2015-04-24 22:36:48 +00:00
|
|
|
|
|
|
|
namespaces = {}
|
2015-04-28 22:15:12 +00:00
|
|
|
for repo in data:
|
2015-04-24 22:36:48 +00:00
|
|
|
if not repo['scm'] == 'git':
|
|
|
|
continue
|
|
|
|
|
|
|
|
owner = repo['owner']
|
|
|
|
if not owner in namespaces:
|
|
|
|
namespaces[owner] = {
|
|
|
|
'personal': owner == self.config.get('username'),
|
|
|
|
'repos': [],
|
|
|
|
'info': {
|
|
|
|
'name': owner
|
|
|
|
}
|
|
|
|
}
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
namespaces[owner]['repos'].append(owner + '/' + repo['slug'])
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
return namespaces.values()
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_build_subdirs(self):
|
2015-04-29 21:04:52 +00:00
|
|
|
config = self.config
|
2015-04-28 22:15:12 +00:00
|
|
|
repository = self._get_repository_client()
|
2015-04-29 21:04:52 +00:00
|
|
|
|
|
|
|
# Find the first matching branch.
|
|
|
|
repo_branches = self.list_field_values('branch_name') or []
|
|
|
|
branches = find_matching_branches(config, repo_branches)
|
|
|
|
(result, data, err_msg) = repository.get_path_contents('', revision=branches[0])
|
2015-04-28 22:15:12 +00:00
|
|
|
if not result:
|
|
|
|
raise RepositoryReadException(err_msg)
|
|
|
|
|
|
|
|
files = set([f['path'] for f in data['files']])
|
|
|
|
if 'Dockerfile' in files:
|
|
|
|
return ['/']
|
2015-04-24 22:36:48 +00:00
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
def dockerfile_url(self):
|
2015-04-28 22:15:12 +00:00
|
|
|
repository = self._get_repository_client()
|
|
|
|
subdirectory = self.config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
|
|
|
|
master_branch = 'master'
|
|
|
|
(result, data, _) = repository.get_main_branch()
|
|
|
|
if result:
|
|
|
|
master_branch = data['name']
|
|
|
|
|
|
|
|
return 'https://bitbucket.org/%s/%s/src/%s/%s' % (repository.namespace,
|
|
|
|
repository.repository_name,
|
|
|
|
master_branch, path)
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def load_dockerfile_contents(self):
|
2015-04-28 22:15:12 +00:00
|
|
|
repository = self._get_repository_client()
|
|
|
|
subdirectory = self.config.get('subdir', '/')[1:]
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
(result, data, err_msg) = repository.get_raw_path_contents(path, revision='master')
|
|
|
|
if not result:
|
|
|
|
raise RepositoryReadException(err_msg)
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
return data
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_field_values(self, field_name):
|
|
|
|
source = self.config['build_source']
|
|
|
|
(namespace, name) = source.split('/')
|
2015-04-24 19:13:08 +00:00
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
bitbucket_client = self._get_authorized_client()
|
|
|
|
repository = bitbucket_client.for_namespace(namespace).repositories().get(name)
|
|
|
|
|
|
|
|
if field_name == 'refs':
|
|
|
|
(result, data, _) = repository.get_branches_and_tags()
|
|
|
|
if not result:
|
|
|
|
return None
|
|
|
|
|
|
|
|
branches = [b['name'] for b in data['branches']]
|
|
|
|
tags = [t['name'] for t in data['tags']]
|
|
|
|
|
|
|
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
|
|
|
[{'kind': 'tag', 'name': tag} for tag in tags])
|
|
|
|
|
|
|
|
if field_name == 'tag_name':
|
|
|
|
(result, data, _) = repository.get_tags()
|
|
|
|
if not result:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return data.keys()
|
|
|
|
|
|
|
|
if field_name == 'branch_name':
|
|
|
|
(result, data, _) = repository.get_branches()
|
|
|
|
if not result:
|
|
|
|
return None
|
|
|
|
|
|
|
|
return data.keys()
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
def _prepare_build(self, commit_sha, ref, is_manual):
|
|
|
|
config = self.config
|
|
|
|
repository = self._get_repository_client()
|
|
|
|
|
|
|
|
# Lookup the default branch associated with the repository. We use this when building
|
|
|
|
# the tags.
|
|
|
|
default_branch = ''
|
|
|
|
(result, data, _) = repository.get_main_branch()
|
|
|
|
if result:
|
|
|
|
default_branch = data['name']
|
|
|
|
|
|
|
|
# Lookup the commit sha.
|
|
|
|
(result, data, _) = repository.changesets().get(commit_sha)
|
|
|
|
if not result:
|
|
|
|
raise TriggerStartException('Could not lookup commit SHA')
|
|
|
|
|
|
|
|
namespace = repository.namespace
|
|
|
|
name = repository.repository_name
|
|
|
|
|
|
|
|
commit_info = {
|
|
|
|
'url': 'https://bitbucket.org/%s/%s/commits/%s' % (namespace, name, commit_sha),
|
|
|
|
'message': data['message'],
|
|
|
|
'date': data['timestamp']
|
|
|
|
}
|
|
|
|
|
|
|
|
# Try to lookup the author by email address. The raw_author field (if it exists) is returned
|
|
|
|
# in the form: "Joseph Schorr <joseph.schorr@coreos.com>"
|
|
|
|
if data.get('raw_author'):
|
|
|
|
match = re.compile(r'.*<(.+)>').match(data['raw_author'])
|
|
|
|
if match:
|
|
|
|
email_address = match.group(1)
|
|
|
|
bitbucket_client = self._get_authorized_client()
|
|
|
|
(result, data, _) = bitbucket_client.accounts().get_profile(email_address)
|
|
|
|
if result:
|
|
|
|
commit_info['author'] = {
|
|
|
|
'username': data['user']['username'],
|
|
|
|
'url': 'https://bitbucket.org/%s/' % data['user']['username'],
|
|
|
|
'avatar_url': data['user']['avatar']
|
|
|
|
}
|
|
|
|
|
|
|
|
metadata = {
|
|
|
|
'commit_sha': commit_sha,
|
|
|
|
'ref': ref,
|
|
|
|
'default_branch': default_branch,
|
|
|
|
'git_url': 'git@bitbucket.org:%s/%s.git' % (namespace, name),
|
|
|
|
'commit_info': commit_info
|
|
|
|
}
|
|
|
|
|
|
|
|
prepared = PreparedBuild(self.trigger)
|
|
|
|
prepared.tags_from_ref(ref, default_branch)
|
|
|
|
prepared.name_from_sha(commit_sha)
|
|
|
|
prepared.subdirectory = config['subdir']
|
|
|
|
prepared.metadata = metadata
|
|
|
|
prepared.is_manual = is_manual
|
|
|
|
|
|
|
|
return prepared
|
|
|
|
|
2015-04-28 22:15:12 +00:00
|
|
|
|
|
|
|
def handle_trigger_request(self, request):
|
2015-04-29 21:04:52 +00:00
|
|
|
# Parse the JSON payload.
|
|
|
|
payload_json = request.form.get('payload')
|
|
|
|
if not payload_json:
|
|
|
|
raise SkipRequestException()
|
2015-04-28 22:15:12 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
try:
|
|
|
|
payload = json.loads(payload_json)
|
|
|
|
except ValueError:
|
|
|
|
raise SkipRequestException()
|
2015-04-28 22:15:12 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
logger.debug('BitBucket trigger payload %s', payload)
|
2015-04-28 22:15:12 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Make sure we have a commit in the payload.
|
|
|
|
if not payload.get('commits'):
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
# Check if this build should be skipped by commit message.
|
|
|
|
commit = payload['commits'][0]
|
|
|
|
commit_message = commit['message']
|
|
|
|
if should_skip_commit(commit_message):
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
# Check to see if this build should be skipped by ref.
|
|
|
|
ref = 'refs/heads/' + commit['branch'] if commit.get('branch') else 'refs/tags/' + commit['tag']
|
|
|
|
raise_if_skipped(self.config, ref)
|
|
|
|
|
|
|
|
commit_sha = commit['node']
|
|
|
|
return self._prepare_build(commit_sha, ref, False)
|
|
|
|
|
|
|
|
|
|
|
|
def manual_start(self, run_parameters=None):
|
2015-04-28 22:15:12 +00:00
|
|
|
run_parameters = run_parameters or {}
|
2015-04-29 21:04:52 +00:00
|
|
|
repository = self._get_repository_client()
|
2015-04-28 22:15:12 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Find the branch to build.
|
|
|
|
branch_name = run_parameters.get('branch_name')
|
2015-04-28 22:15:12 +00:00
|
|
|
(result, data, _) = repository.get_main_branch()
|
|
|
|
if result:
|
2015-04-29 21:04:52 +00:00
|
|
|
branch_name = data['name'] or branch_name
|
2015-04-28 22:15:12 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Lookup the commit SHA for the branch.
|
|
|
|
(result, data, _) = repository.get_branches()
|
|
|
|
if not result or not branch_name in data:
|
|
|
|
raise TriggerStartException('Could not find branch commit SHA')
|
2015-04-28 22:15:12 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
commit_sha = data[branch_name]['node']
|
|
|
|
ref = 'refs/heads/%s' % (branch_name)
|
|
|
|
|
|
|
|
return self._prepare_build(commit_sha, ref, True)
|
2015-04-24 22:36:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
class GithubBuildTrigger(BuildTriggerHandler):
|
2015-03-18 21:33:43 +00:00
|
|
|
"""
|
|
|
|
BuildTrigger for GitHub that uses the archive API and buildpacks.
|
|
|
|
"""
|
2015-04-24 22:36:48 +00:00
|
|
|
def _get_client(self):
|
|
|
|
return Github(self.auth_token,
|
2014-11-26 17:37:20 +00:00
|
|
|
base_url=github_trigger.api_endpoint(),
|
|
|
|
client_id=github_trigger.client_id(),
|
|
|
|
client_secret=github_trigger.client_secret())
|
2014-02-18 20:50:15 +00:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
return 'github'
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def is_active(self):
|
|
|
|
return 'hook_id' in self.config
|
2014-02-20 23:57:49 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def activate(self, standard_webhook_url):
|
|
|
|
config = self.config
|
2015-03-18 21:33:43 +00:00
|
|
|
new_build_source = config['build_source']
|
2015-04-24 22:36:48 +00:00
|
|
|
gh_client = self._get_client()
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
# Find the GitHub repository.
|
|
|
|
try:
|
|
|
|
gh_repo = gh_client.get_repo(new_build_source)
|
|
|
|
except UnknownObjectException:
|
|
|
|
msg = 'Unable to find GitHub repository for source: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
2015-03-19 18:31:01 +00:00
|
|
|
# Add a deploy key to the GitHub repository.
|
2015-04-22 21:07:16 +00:00
|
|
|
public_key, private_key = generate_ssh_keypair()
|
|
|
|
config['credentials'] = [
|
|
|
|
{
|
|
|
|
'name': 'SSH Public Key',
|
|
|
|
'value': public_key,
|
|
|
|
},
|
|
|
|
]
|
2015-03-18 21:33:43 +00:00
|
|
|
try:
|
2015-04-21 22:04:25 +00:00
|
|
|
deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'],
|
2015-04-22 21:07:16 +00:00
|
|
|
public_key)
|
2015-03-18 21:33:43 +00:00
|
|
|
config['deploy_key_id'] = deploy_key.id
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to add deploy key to repository: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
2015-04-23 18:02:05 +00:00
|
|
|
# Add the webhook to the GitHub repository.
|
2015-03-18 21:33:43 +00:00
|
|
|
webhook_config = {
|
|
|
|
'url': standard_webhook_url,
|
|
|
|
'content_type': 'json',
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
hook = gh_repo.create_hook('web', webhook_config)
|
|
|
|
config['hook_id'] = hook.id
|
|
|
|
config['master_branch'] = gh_repo.default_branch
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to create webhook on repository: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
2015-04-23 22:14:26 +00:00
|
|
|
return config, {'private_key': private_key}
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def deactivate(self):
|
|
|
|
config = self.config
|
|
|
|
gh_client = self._get_client()
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
# Find the GitHub repository.
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(config['build_source'])
|
|
|
|
except UnknownObjectException:
|
|
|
|
msg = 'Unable to find GitHub repository for source: %s' % config['build_source']
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
|
2015-03-19 21:12:27 +00:00
|
|
|
# If the trigger uses a deploy key, remove it.
|
2015-04-21 22:04:25 +00:00
|
|
|
try:
|
|
|
|
if config['deploy_key_id']:
|
2015-03-19 21:12:27 +00:00
|
|
|
deploy_key = repo.get_key(config['deploy_key_id'])
|
|
|
|
deploy_key.delete()
|
2015-04-21 22:04:25 +00:00
|
|
|
except KeyError:
|
|
|
|
# There was no config['deploy_key_id'], thus this is an old trigger without a deploy key.
|
|
|
|
pass
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to remove deploy key: %s' % config['deploy_key_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
# Remove the webhook.
|
|
|
|
try:
|
|
|
|
hook = repo.get_hook(config['hook_id'])
|
|
|
|
hook.delete()
|
|
|
|
except GithubException:
|
|
|
|
msg = 'Unable to remove hook: %s' % config['hook_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
2015-03-19 21:12:27 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
config.pop('hook_id', None)
|
2015-04-24 22:36:48 +00:00
|
|
|
self.config = config
|
2015-03-18 21:33:43 +00:00
|
|
|
return config
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_build_sources(self):
|
|
|
|
gh_client = self._get_client()
|
2015-03-18 21:33:43 +00:00
|
|
|
usr = gh_client.get_user()
|
|
|
|
|
2015-04-30 21:12:41 +00:00
|
|
|
try:
|
|
|
|
repos = usr.get_repos()
|
|
|
|
except GithubException:
|
|
|
|
raise RepositoryReadException('Unable to list user repositories')
|
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
personal = {
|
|
|
|
'personal': True,
|
2015-04-30 21:12:41 +00:00
|
|
|
'repos': [repo.full_name for repo in repos],
|
2015-03-18 21:33:43 +00:00
|
|
|
'info': {
|
|
|
|
'name': usr.login,
|
|
|
|
'avatar_url': usr.avatar_url,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
repos_by_org = [personal]
|
|
|
|
|
|
|
|
for org in usr.get_orgs():
|
|
|
|
repo_list = []
|
|
|
|
for repo in org.get_repos(type='member'):
|
|
|
|
repo_list.append(repo.full_name)
|
|
|
|
|
|
|
|
repos_by_org.append({
|
|
|
|
'personal': False,
|
|
|
|
'repos': repo_list,
|
|
|
|
'info': {
|
|
|
|
'name': org.name or org.login,
|
|
|
|
'avatar_url': org.avatar_url
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
return repos_by_org
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_build_subdirs(self):
|
|
|
|
config = self.config
|
|
|
|
gh_client = self._get_client()
|
2015-03-18 21:33:43 +00:00
|
|
|
source = config['build_source']
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
|
|
|
|
# Find the first matching branch.
|
2015-04-29 21:04:52 +00:00
|
|
|
repo_branches = self.list_field_values('branch_name') or []
|
|
|
|
branches = find_matching_branches(config, repo_branches)
|
2015-03-18 21:33:43 +00:00
|
|
|
branches = branches or [repo.default_branch or 'master']
|
|
|
|
default_commit = repo.get_branch(branches[0]).commit
|
|
|
|
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
|
|
|
|
|
|
|
|
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
|
|
|
if (elem.type == u'blob' and
|
|
|
|
os.path.basename(elem.path) == u'Dockerfile')]
|
|
|
|
except GithubException as ge:
|
|
|
|
message = ge.data.get('message', 'Unable to list contents of repository: %s' % source)
|
|
|
|
if message == 'Branch not found':
|
|
|
|
raise EmptyRepositoryException()
|
|
|
|
|
|
|
|
raise RepositoryReadException(message)
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def dockerfile_url(self):
|
|
|
|
config = self.config
|
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
source = config['build_source']
|
|
|
|
subdirectory = config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
2015-04-24 22:36:48 +00:00
|
|
|
gh_client = self._get_client()
|
2015-03-19 21:12:27 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
master_branch = repo.default_branch or 'master'
|
|
|
|
return 'https://github.com/%s/blob/%s/%s' % (source, master_branch, path)
|
|
|
|
except GithubException:
|
2015-04-21 22:04:25 +00:00
|
|
|
logger.exception('Could not load repository for Dockerfile.')
|
2015-03-18 21:33:43 +00:00
|
|
|
return None
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def load_dockerfile_contents(self):
|
|
|
|
config = self.config
|
|
|
|
gh_client = self._get_client()
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
source = config['build_source']
|
|
|
|
subdirectory = config.get('subdir', '')
|
|
|
|
path = subdirectory + '/Dockerfile' if subdirectory else 'Dockerfile'
|
|
|
|
|
|
|
|
try:
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
file_info = repo.get_file_contents(path)
|
|
|
|
if file_info is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
content = file_info.content
|
|
|
|
if file_info.encoding == 'base64':
|
|
|
|
content = base64.b64decode(content)
|
|
|
|
return content
|
|
|
|
|
|
|
|
except GithubException as ge:
|
|
|
|
message = ge.data.get('message', 'Unable to read Dockerfile: %s' % source)
|
|
|
|
raise RepositoryReadException(message)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _build_commit_info(repo, commit_sha):
|
|
|
|
try:
|
|
|
|
commit = repo.get_commit(commit_sha)
|
|
|
|
except GithubException:
|
|
|
|
logger.exception('Could not load data for commit')
|
|
|
|
return
|
|
|
|
|
2015-03-19 21:12:27 +00:00
|
|
|
commit_info = {
|
2015-03-18 21:33:43 +00:00
|
|
|
'url': commit.html_url,
|
|
|
|
'message': commit.commit.message,
|
2015-03-19 21:12:27 +00:00
|
|
|
'date': commit.last_modified
|
|
|
|
}
|
|
|
|
|
|
|
|
if commit.author:
|
|
|
|
commit_info['author'] = {
|
2015-03-18 21:33:43 +00:00
|
|
|
'username': commit.author.login,
|
|
|
|
'avatar_url': commit.author.avatar_url,
|
|
|
|
'url': commit.author.html_url
|
2015-03-19 21:12:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if commit.committer:
|
|
|
|
commit_info['committer'] = {
|
2015-03-18 21:33:43 +00:00
|
|
|
'username': commit.committer.login,
|
|
|
|
'avatar_url': commit.committer.avatar_url,
|
|
|
|
'url': commit.committer.html_url
|
2015-03-19 21:12:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return commit_info
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2015-04-21 22:04:25 +00:00
|
|
|
def _prepare_tarball(repo, commit_sha):
|
|
|
|
# Prepare the download and upload URLs
|
|
|
|
archive_link = repo.get_archive_link('tarball', commit_sha)
|
|
|
|
download_archive = client.get(archive_link, stream=True)
|
|
|
|
tarball_subdir = ''
|
|
|
|
|
|
|
|
with SpooledTemporaryFile(CHUNK_SIZE) as tarball:
|
|
|
|
for chunk in download_archive.iter_content(CHUNK_SIZE):
|
|
|
|
tarball.write(chunk)
|
|
|
|
|
|
|
|
# Seek to position 0 to make tarfile happy
|
|
|
|
tarball.seek(0)
|
|
|
|
|
|
|
|
# Pull out the name of the subdir that GitHub generated
|
|
|
|
with tarfile.open(fileobj=tarball) as archive:
|
|
|
|
tarball_subdir = archive.getnames()[0]
|
|
|
|
|
|
|
|
# Seek to position 0 to make tarfile happy.
|
|
|
|
tarball.seek(0)
|
|
|
|
|
|
|
|
entries = {
|
|
|
|
tarball_subdir + '/.git/HEAD': commit_sha,
|
|
|
|
tarball_subdir + '/.git/objects/': None,
|
|
|
|
tarball_subdir + '/.git/refs/': None
|
|
|
|
}
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
appender = TarfileAppender(tarball, entries).get_stream()
|
|
|
|
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
logger.debug('Successfully prepared job')
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-04-21 22:04:25 +00:00
|
|
|
return tarball_subdir, dockerfile_id
|
2015-03-23 16:14:47 +00:00
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
def _prepare_build(self, repo, ref, commit_sha, is_manual):
|
|
|
|
config = self.config
|
|
|
|
prepared = PreparedBuild(self.trigger)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# If the trigger isn't using git, prepare the buildpack.
|
|
|
|
if self.trigger.private_key is None:
|
2015-04-21 22:04:25 +00:00
|
|
|
tarball_subdir, dockerfile_id = GithubBuildTrigger._prepare_tarball(repo, commit_sha)
|
2015-03-23 19:37:30 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
prepared.subdirectory = os.path.join(tarball_subdir, config['subdir'])
|
|
|
|
prepared.dockerfile_id = dockerfile_id
|
|
|
|
else:
|
|
|
|
prepared.subdirectory = config['subdir']
|
2015-04-21 22:04:25 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Set the name.
|
|
|
|
prepared.name_from_sha(commit_sha)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Set the tag(s).
|
|
|
|
prepared.tags_from_ref(ref, repo.default_branch)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Build and set the metadata.
|
2015-03-18 21:33:43 +00:00
|
|
|
metadata = {
|
|
|
|
'commit_sha': commit_sha,
|
|
|
|
'ref': ref,
|
|
|
|
'default_branch': repo.default_branch,
|
2015-04-29 21:04:52 +00:00
|
|
|
'git_url': repo.git_url,
|
2015-03-18 21:33:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# add the commit info.
|
2015-03-19 21:12:27 +00:00
|
|
|
commit_info = GithubBuildTrigger._build_commit_info(repo, commit_sha)
|
2015-03-18 21:33:43 +00:00
|
|
|
if commit_info is not None:
|
|
|
|
metadata['commit_info'] = commit_info
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
prepared.metadata = metadata
|
|
|
|
prepared.is_manual = is_manual
|
|
|
|
return prepared
|
2015-03-18 21:33:43 +00:00
|
|
|
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def handle_trigger_request(self, request):
|
2015-04-29 21:04:52 +00:00
|
|
|
# Check the payload to see if we should skip it based on the lack of a head_commit.
|
2015-03-18 21:33:43 +00:00
|
|
|
payload = request.get_json()
|
|
|
|
if not payload or payload.get('head_commit') is None:
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# This is for GitHub's probing/testing.
|
2015-03-18 21:33:43 +00:00
|
|
|
if 'zen' in payload:
|
|
|
|
raise ValidationRequestException()
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
logger.debug('GitHub trigger payload %s', payload)
|
|
|
|
|
2015-03-18 21:33:43 +00:00
|
|
|
ref = payload['ref']
|
|
|
|
commit_sha = payload['head_commit']['id']
|
|
|
|
commit_message = payload['head_commit'].get('message', '')
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Check if this build should be skipped by commit message.
|
2015-03-18 21:33:43 +00:00
|
|
|
if should_skip_commit(commit_message):
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
# Check to see if this build should be skipped by ref.
|
|
|
|
raise_if_skipped(self.config, ref)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
try:
|
|
|
|
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
|
|
|
|
payload['repository']['name'])
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
gh_client = self._get_client()
|
|
|
|
repo = gh_client.get_repo(repo_full_name)
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
return self._prepare_build(repo, ref, commit_sha, False)
|
|
|
|
except GithubException as ghe:
|
|
|
|
raise TriggerStartException(ghe.data['message'])
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def manual_start(self, run_parameters=None):
|
|
|
|
config = self.config
|
2015-04-29 21:04:52 +00:00
|
|
|
source = config['build_source']
|
|
|
|
run_parameters = run_parameters or {}
|
2015-03-18 21:33:43 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
try:
|
2015-04-24 22:36:48 +00:00
|
|
|
gh_client = self._get_client()
|
2015-04-29 21:04:52 +00:00
|
|
|
|
|
|
|
# Lookup the branch and its associated current SHA.
|
2015-03-18 21:33:43 +00:00
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
branch_name = run_parameters.get('branch_name') or repo.default_branch
|
|
|
|
branch = repo.get_branch(branch_name)
|
2015-04-29 21:04:52 +00:00
|
|
|
commit_sha = branch.commit.sha
|
2015-03-18 21:33:43 +00:00
|
|
|
ref = 'refs/heads/%s' % (branch_name)
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
return self._prepare_build(repo, ref, commit_sha, True)
|
2015-03-18 21:33:43 +00:00
|
|
|
except GithubException as ghe:
|
|
|
|
raise TriggerStartException(ghe.data['message'])
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def list_field_values(self, field_name):
|
2014-10-23 20:39:10 +00:00
|
|
|
if field_name == 'refs':
|
2015-04-24 22:36:48 +00:00
|
|
|
branches = self.list_field_values('branch_name')
|
|
|
|
tags = self.list_field_values('tag_name')
|
2014-10-23 20:39:10 +00:00
|
|
|
|
2014-11-26 17:37:20 +00:00
|
|
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
2014-10-23 20:39:10 +00:00
|
|
|
[{'kind': 'tag', 'name': tag} for tag in tags])
|
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
config = self.config
|
2014-10-23 20:39:10 +00:00
|
|
|
if field_name == 'tag_name':
|
2015-04-24 22:36:48 +00:00
|
|
|
gh_client = self._get_client()
|
2014-10-23 20:39:10 +00:00
|
|
|
source = config['build_source']
|
|
|
|
repo = gh_client.get_repo(source)
|
|
|
|
return [tag.name for tag in repo.get_tags()]
|
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
if field_name == 'branch_name':
|
2015-04-24 22:36:48 +00:00
|
|
|
gh_client = self._get_client()
|
2014-09-30 20:29:32 +00:00
|
|
|
source = config['build_source']
|
|
|
|
repo = gh_client.get_repo(source)
|
2014-10-03 16:35:58 +00:00
|
|
|
branches = [branch.name for branch in repo.get_branches()]
|
2014-02-26 00:39:43 +00:00
|
|
|
|
2014-09-30 20:29:32 +00:00
|
|
|
if not repo.default_branch in branches:
|
|
|
|
branches.insert(0, repo.default_branch)
|
|
|
|
|
|
|
|
if branches[0] != repo.default_branch:
|
|
|
|
branches.remove(repo.default_branch)
|
|
|
|
branches.insert(0, repo.default_branch)
|
|
|
|
|
|
|
|
return branches
|
|
|
|
|
|
|
|
return None
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
class CustomBuildTrigger(BuildTriggerHandler):
|
2015-04-15 20:52:46 +00:00
|
|
|
payload_schema = {
|
|
|
|
'type': 'object',
|
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'commit': {
|
|
|
|
'type': 'string',
|
2015-04-22 15:24:04 +00:00
|
|
|
'description': 'first 7 characters of the SHA-1 identifier for a git commit',
|
2015-04-22 17:22:04 +00:00
|
|
|
'pattern': '^([A-Fa-f0-9]{7})$',
|
2015-04-21 22:04:25 +00:00
|
|
|
},
|
|
|
|
'ref': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'git reference for a git commit',
|
|
|
|
'pattern': '^refs\/(heads|tags|remotes)\/(.+)$',
|
|
|
|
},
|
|
|
|
'default_branch': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'default branch of the git repository',
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
'commit_info': {
|
|
|
|
'type': 'object',
|
2015-04-21 22:04:25 +00:00
|
|
|
'description': 'metadata about a git commit',
|
2015-04-15 20:52:46 +00:00
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view a git commit',
|
|
|
|
},
|
|
|
|
'message': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'git commit message',
|
|
|
|
},
|
|
|
|
'date': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'timestamp for a git commit'
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
'author': {
|
|
|
|
'type': 'object',
|
2015-04-21 22:04:25 +00:00
|
|
|
'description': 'metadata about the author of a git commit',
|
2015-04-15 20:52:46 +00:00
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'username': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'username of the author',
|
|
|
|
},
|
|
|
|
'url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the profile of the author',
|
|
|
|
},
|
|
|
|
'avatar_url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the avatar of the author',
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
},
|
|
|
|
'required': ['username', 'url', 'avatar_url'],
|
|
|
|
},
|
|
|
|
'committer': {
|
|
|
|
'type': 'object',
|
2015-04-21 22:04:25 +00:00
|
|
|
'description': 'metadata about the committer of a git commit',
|
2015-04-15 20:52:46 +00:00
|
|
|
'properties': {
|
2015-04-21 22:04:25 +00:00
|
|
|
'username': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'username of the committer',
|
|
|
|
},
|
|
|
|
'url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the profile of the committer',
|
|
|
|
},
|
|
|
|
'avatar_url': {
|
|
|
|
'type': 'string',
|
|
|
|
'description': 'URL to view the avatar of the committer',
|
|
|
|
},
|
2015-04-15 20:52:46 +00:00
|
|
|
},
|
|
|
|
'required': ['username', 'url', 'avatar_url'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['url', 'message', 'date'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'required': ['commits', 'ref', 'default_branch'],
|
|
|
|
}
|
|
|
|
|
2015-03-26 20:20:53 +00:00
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
2015-04-21 22:04:25 +00:00
|
|
|
return 'custom-git'
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def is_active(self):
|
|
|
|
return self.config.has_key('credentials')
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-15 20:52:46 +00:00
|
|
|
def _metadata_from_payload(self, payload):
|
2015-03-26 20:20:53 +00:00
|
|
|
try:
|
2015-04-15 21:02:53 +00:00
|
|
|
metadata = json.loads(payload)
|
|
|
|
validate(metadata, self.payload_schema)
|
2015-04-15 20:52:46 +00:00
|
|
|
except:
|
2015-03-26 20:20:53 +00:00
|
|
|
raise InvalidPayloadException()
|
2015-04-15 21:02:53 +00:00
|
|
|
return metadata
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def handle_trigger_request(self, request):
|
2015-04-29 21:04:52 +00:00
|
|
|
# Skip if there is no payload.
|
2015-03-26 20:20:53 +00:00
|
|
|
payload = request.get_json()
|
|
|
|
if not payload:
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
logger.debug('Payload %s', payload)
|
2015-04-29 21:04:52 +00:00
|
|
|
|
|
|
|
# Skip if the commit message matches.
|
2015-03-26 20:20:53 +00:00
|
|
|
metadata = self._metadata_from_payload(payload)
|
2015-04-29 21:04:52 +00:00
|
|
|
if should_skip_commit(metadata.get('commit_info', {}).get('message', '')):
|
|
|
|
raise SkipRequestException()
|
2015-03-26 20:20:53 +00:00
|
|
|
|
|
|
|
# The build source is the canonical git URL used to clone.
|
2015-04-24 22:36:48 +00:00
|
|
|
config = self.config
|
2015-03-26 20:20:53 +00:00
|
|
|
metadata['git_url'] = config['build_source']
|
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
prepared = PreparedBuild(self.trigger)
|
|
|
|
prepared.tags_from_ref(metadata['ref'])
|
|
|
|
prepared.name_from_sha(metadata['commit_sha'])
|
|
|
|
prepared.subdirectory = config['subdir']
|
|
|
|
prepared.metadata = metadata
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
return prepared
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-29 21:04:52 +00:00
|
|
|
def manual_start(self, run_parameters=None):
|
|
|
|
# commit_sha is the only required parameter
|
|
|
|
commit_sha = run_parameters.get('commit_sha')
|
|
|
|
if commit_sha is None:
|
|
|
|
raise TriggerStartException('missing required parameter')
|
|
|
|
|
|
|
|
config = self.config
|
|
|
|
metadata = {
|
|
|
|
'commit_sha': commit_sha,
|
|
|
|
'git_url': config['build_source'],
|
|
|
|
}
|
|
|
|
|
|
|
|
prepared = PreparedBuild(self.trigger)
|
|
|
|
prepared.tags = [commit_sha]
|
|
|
|
prepared.name_from_sha(commit_sha)
|
|
|
|
prepared.subdirectory = config['subdir']
|
|
|
|
prepared.metadata = metadata
|
|
|
|
prepared.is_manual = True
|
|
|
|
|
|
|
|
return prepared
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def activate(self, standard_webhook_url):
|
|
|
|
config = self.config
|
2015-04-22 21:07:16 +00:00
|
|
|
public_key, private_key = generate_ssh_keypair()
|
|
|
|
config['credentials'] = [
|
|
|
|
{
|
|
|
|
'name': 'SSH Public Key',
|
|
|
|
'value': public_key,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name': 'Webhook Endpoint URL',
|
|
|
|
'value': standard_webhook_url,
|
|
|
|
},
|
|
|
|
]
|
2015-04-24 22:36:48 +00:00
|
|
|
self.config = config
|
2015-04-23 22:14:26 +00:00
|
|
|
return config, {'private_key': private_key}
|
2015-03-26 20:20:53 +00:00
|
|
|
|
2015-04-24 22:36:48 +00:00
|
|
|
def deactivate(self):
|
|
|
|
config = self.config
|
2015-04-22 21:07:16 +00:00
|
|
|
config.pop('credentials', None)
|
2015-04-24 22:36:48 +00:00
|
|
|
self.config = config
|
2015-03-26 20:20:53 +00:00
|
|
|
return config
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
class GitLabBuildTrigger(BuildTriggerHandler):
|
|
|
|
"""
|
|
|
|
BuildTrigger for GitLab.
|
|
|
|
"""
|
|
|
|
@classmethod
|
|
|
|
def service_name(cls):
|
|
|
|
return 'gitlab'
|
|
|
|
|
|
|
|
def _get_authorized_client(self):
|
2015-05-02 17:31:54 +00:00
|
|
|
host = app.config.get('GITLAB_TRIGGER_CONFIG', {}).get('GITLAB_ENDPOINT', '')
|
2015-04-30 21:12:41 +00:00
|
|
|
auth_token = self.auth_token or 'invalid'
|
|
|
|
return gitlab.Gitlab(host, oauth_token=auth_token)
|
|
|
|
|
|
|
|
def is_active(self):
|
|
|
|
return 'hook_id' in self.config
|
|
|
|
|
|
|
|
def activate(self, standard_webhook_url):
|
|
|
|
config = self.config
|
|
|
|
new_build_source = config['build_source']
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
|
|
|
|
# Find the GitLab repository.
|
|
|
|
repository = gl_client.getproject(new_build_source)
|
|
|
|
if repository is False:
|
|
|
|
msg = 'Unable to find GitLab repository for source: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
|
|
|
# Add a deploy key to the repository.
|
|
|
|
public_key, private_key = generate_ssh_keypair()
|
|
|
|
config['credentials'] = [
|
|
|
|
{
|
|
|
|
'name': 'SSH Public Key',
|
|
|
|
'value': public_key,
|
|
|
|
},
|
|
|
|
]
|
2015-05-02 03:35:54 +00:00
|
|
|
key = gl_client.adddeploykey(repository['id'], '%s Builder' % app.config['REGISTRY_TITLE'],
|
|
|
|
public_key)
|
|
|
|
if key is False:
|
2015-04-30 21:12:41 +00:00
|
|
|
msg = 'Unable to add deploy key to repository: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
2015-05-02 03:35:54 +00:00
|
|
|
config['key_id'] = key['id']
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
# Add the webhook to the GitLab repository.
|
|
|
|
hook = gl_client.addprojecthook(repository['id'], standard_webhook_url, push=True)
|
|
|
|
if hook is False:
|
|
|
|
msg = 'Unable to create webhook on repository: %s' % new_build_source
|
|
|
|
raise TriggerActivationException(msg)
|
|
|
|
|
|
|
|
config['hook_id'] = hook['id']
|
|
|
|
self.config = config
|
|
|
|
return config, {'private_key': private_key}
|
|
|
|
|
|
|
|
def deactivate(self):
|
|
|
|
config = self.config
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
|
|
|
|
# Find the GitLab repository.
|
|
|
|
repository = gl_client.getproject(config['build_source'])
|
|
|
|
if repository is False:
|
|
|
|
msg = 'Unable to find GitLab repository for source: %s' % config['build_source']
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
|
|
|
|
# Remove the webhook.
|
|
|
|
success = gl_client.deleteprojecthook(repository['id'], config['hook_id'])
|
|
|
|
if success is False:
|
|
|
|
msg = 'Unable to remove hook: %s' % config['hook_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
config.pop('hook_id', None)
|
2015-05-02 03:35:54 +00:00
|
|
|
|
|
|
|
# Remove the key
|
|
|
|
success = gl_client.deletedeploykey(repository['id'], config['key_id'])
|
|
|
|
if success is False:
|
|
|
|
msg = 'Unable to remove deploy key: %s' % config['key_id']
|
|
|
|
raise TriggerDeactivationException(msg)
|
|
|
|
config.pop('key_id', None)
|
|
|
|
|
2015-04-30 21:12:41 +00:00
|
|
|
self.config = config
|
|
|
|
|
|
|
|
return config
|
|
|
|
|
|
|
|
def list_build_sources(self):
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
current_user = gl_client.currentuser()
|
2015-05-03 16:58:48 +00:00
|
|
|
if current_user is False:
|
|
|
|
raise RepositoryReadException('Unable to get current user')
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
repositories = gl_client.getprojects()
|
|
|
|
if repositories is False:
|
|
|
|
raise RepositoryReadException('Unable to list user repositories')
|
|
|
|
|
|
|
|
namespaces = {}
|
|
|
|
for repo in repositories:
|
|
|
|
owner = repo['namespace']['name']
|
|
|
|
if not owner in namespaces:
|
|
|
|
namespaces[owner] = {
|
2015-05-03 16:58:48 +00:00
|
|
|
'personal': owner == current_user['username'],
|
2015-04-30 21:12:41 +00:00
|
|
|
'repos': [],
|
|
|
|
'info': {
|
|
|
|
'name': owner,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
namespaces[owner]['repos'].append(repo['path_with_namespace'])
|
|
|
|
|
|
|
|
return namespaces.values()
|
|
|
|
|
|
|
|
def list_build_subdirs(self):
|
|
|
|
config = self.config
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
new_build_source = config['build_source']
|
|
|
|
|
|
|
|
repository = gl_client.getproject(new_build_source)
|
|
|
|
if repository is False:
|
|
|
|
msg = 'Unable to find GitLab repository for source: %s' % new_build_source
|
|
|
|
raise RepositoryReadException(msg)
|
|
|
|
|
|
|
|
repo_branches = gl_client.getbranches(repository['id'])
|
|
|
|
if repo_branches is False:
|
|
|
|
msg = 'Unable to find GitLab branches for source: %s' % new_build_source
|
|
|
|
raise RepositoryReadException(msg)
|
|
|
|
|
|
|
|
branches = [branch['name'] for branch in repo_branches]
|
|
|
|
branches = find_matching_branches(config, branches)
|
|
|
|
branches = branches or [repository['default_branch'] or 'master']
|
|
|
|
|
|
|
|
repo_tree = gl_client.getrepositorytree(repository['id'], ref_name=branches[0])
|
|
|
|
if repo_tree is False:
|
|
|
|
msg = 'Unable to find GitLab repository tree for source: %s' % new_build_source
|
|
|
|
raise RepositoryReadException(msg)
|
|
|
|
|
|
|
|
for node in repo_tree:
|
|
|
|
if node['name'] == 'Dockerfile':
|
|
|
|
return ['/']
|
|
|
|
|
|
|
|
return []
|
|
|
|
|
|
|
|
def dockerfile_url(self):
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
subdir = self.config.get('subdir', '')
|
|
|
|
path = subdir + '/Dockerfile' if subdir else 'Dockerfile'
|
|
|
|
|
|
|
|
repository = gl_client.getproject(self.config['build_source'])
|
|
|
|
if repository is False:
|
|
|
|
return None
|
2015-05-02 03:35:54 +00:00
|
|
|
|
|
|
|
branches = self.list_field_values('branch_name')
|
|
|
|
branches = find_matching_branches(self.config, branches)
|
|
|
|
if branches == []:
|
|
|
|
return None
|
|
|
|
branch_name = branches[0]
|
|
|
|
if repository['default_branch'] in branches:
|
|
|
|
branch_name = repository['default_branch']
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
return '%s/%s/blob/%s/%s' % (gl_client.host,
|
|
|
|
repository['path_with_namespace'],
|
2015-05-02 03:35:54 +00:00
|
|
|
branch_name,
|
2015-04-30 21:12:41 +00:00
|
|
|
path)
|
|
|
|
|
|
|
|
def load_dockerfile_contents(self):
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
subdir = self.config.get('subdir', '')
|
2015-05-03 16:58:48 +00:00
|
|
|
if subdir == '/':
|
|
|
|
subdir = ''
|
|
|
|
|
|
|
|
path = subdir + 'Dockerfile' if subdir else 'Dockerfile'
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
repository = gl_client.getproject(self.config['build_source'])
|
|
|
|
if repository is False:
|
|
|
|
return None
|
|
|
|
|
2015-05-02 03:35:54 +00:00
|
|
|
branches = self.list_field_values('branch_name')
|
|
|
|
branches = find_matching_branches(self.config, branches)
|
|
|
|
if branches == []:
|
|
|
|
return None
|
2015-05-03 16:58:48 +00:00
|
|
|
|
2015-05-02 03:35:54 +00:00
|
|
|
branch_name = branches[0]
|
|
|
|
if repository['default_branch'] in branches:
|
|
|
|
branch_name = repository['default_branch']
|
|
|
|
|
|
|
|
contents = gl_client.getrawfile(repository['id'], branch_name, path)
|
2015-04-30 21:12:41 +00:00
|
|
|
if contents is False:
|
|
|
|
return None
|
2015-05-03 16:58:48 +00:00
|
|
|
|
2015-04-30 21:12:41 +00:00
|
|
|
return contents
|
|
|
|
|
|
|
|
def list_field_values(self, field_name):
|
|
|
|
if field_name == 'refs':
|
|
|
|
branches = self.list_field_values('branch_name')
|
|
|
|
tags = self.list_field_values('tag_name')
|
|
|
|
|
|
|
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
|
|
|
[{'kind': 'tag', 'name': t} for t in tags])
|
|
|
|
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
repo = gl_client.getproject(self.config['build_source'])
|
|
|
|
if repo is False:
|
|
|
|
return []
|
|
|
|
|
|
|
|
if field_name == 'tag_name':
|
2015-05-03 16:58:48 +00:00
|
|
|
tags = gl_client.getrepositorytags(repo['id'])
|
2015-04-30 21:12:41 +00:00
|
|
|
if tags is False:
|
|
|
|
return []
|
2015-05-02 03:35:54 +00:00
|
|
|
return [tag['name'] for tag in tags]
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
if field_name == 'branch_name':
|
|
|
|
branches = gl_client.getbranches(repo['id'])
|
|
|
|
if branches is False:
|
|
|
|
return []
|
2015-05-02 03:35:54 +00:00
|
|
|
return [branch['name'] for branch in branches]
|
2015-04-30 21:12:41 +00:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
def _prepare_build(self, commit, ref, is_manual):
|
|
|
|
config = self.config
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
|
|
|
|
repo = gl_client.getproject(self.config['build_source'])
|
|
|
|
if repo is False:
|
|
|
|
raise TriggerStartException('Could not find repository')
|
|
|
|
|
|
|
|
try:
|
|
|
|
[committer] = gl_client.getusers(search=commit['committer_email'])
|
|
|
|
except ValueError:
|
|
|
|
committer = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
[author] = gl_client.getusers(search=commit['author_email'])
|
|
|
|
except ValueError:
|
|
|
|
author = None
|
|
|
|
|
|
|
|
metadata = {
|
|
|
|
'commit_sha': commit['id'],
|
|
|
|
'ref': ref,
|
|
|
|
'default_branch': repo['default_branch'],
|
|
|
|
'git_url': repo['ssh_url_to_repo'],
|
|
|
|
'commit_info': {
|
2015-05-02 03:35:54 +00:00
|
|
|
'url': gl_client.host + '/' + repo['path_with_namespace'] + '/commit/' + commit['id'],
|
2015-04-30 21:12:41 +00:00
|
|
|
'message': commit['message'],
|
|
|
|
'date': commit['committed_date'],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
if committer is not None:
|
|
|
|
metadata['commit_info']['committer'] = {
|
|
|
|
'username': committer['username'],
|
|
|
|
'avatar_url': committer['avatar_url'],
|
2015-05-03 16:58:48 +00:00
|
|
|
'url': gl_client.host + '/' + committer['username'],
|
2015-04-30 21:12:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if author is not None:
|
|
|
|
metadata['commit_info']['author'] = {
|
|
|
|
'username': author['username'],
|
|
|
|
'avatar_url': author['avatar_url'],
|
2015-05-03 16:58:48 +00:00
|
|
|
'url': gl_client.host + '/' + author['username']
|
2015-04-30 21:12:41 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
prepared = PreparedBuild(self.trigger)
|
|
|
|
prepared.tags_from_ref(ref, repo['default_branch'])
|
|
|
|
prepared.name_from_sha(commit['id'])
|
|
|
|
prepared.subdirectory = config['subdir']
|
|
|
|
prepared.metadata = metadata
|
|
|
|
prepared.is_manual = is_manual
|
|
|
|
|
|
|
|
return prepared
|
|
|
|
|
|
|
|
def handle_trigger_request(self, request):
|
|
|
|
payload = request.get_json()
|
|
|
|
if not payload:
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
logger.debug('GitLab trigger payload %s', payload)
|
|
|
|
|
2015-05-02 03:35:54 +00:00
|
|
|
if not payload.get('commits'):
|
2015-04-30 21:12:41 +00:00
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
commit = payload['commits'][0]
|
|
|
|
commit_message = commit['message']
|
|
|
|
if should_skip_commit(commit_message):
|
|
|
|
raise SkipRequestException()
|
|
|
|
|
|
|
|
ref = payload['ref']
|
|
|
|
raise_if_skipped(self.config, ref)
|
|
|
|
|
|
|
|
return self._prepare_build(commit, ref, False)
|
|
|
|
|
|
|
|
def manual_start(self, run_parameters=None):
|
|
|
|
run_parameters = run_parameters or {}
|
|
|
|
gl_client = self._get_authorized_client()
|
|
|
|
|
|
|
|
repo = gl_client.getproject(self.config['build_source'])
|
|
|
|
if repo is False:
|
|
|
|
raise TriggerStartException('Could not find repository')
|
|
|
|
|
|
|
|
branch_name = run_parameters.get('branch_name') or repo['default_branch']
|
|
|
|
|
|
|
|
branches = gl_client.getbranches(repo['id'])
|
|
|
|
if branches is False:
|
|
|
|
raise TriggerStartException('Could not find branches')
|
|
|
|
|
|
|
|
commit = None
|
|
|
|
for branch in branches:
|
2015-05-02 03:35:54 +00:00
|
|
|
if branch['name'] == branch_name:
|
2015-04-30 21:12:41 +00:00
|
|
|
commit = branch['commit']
|
|
|
|
if commit is None:
|
|
|
|
raise TriggerStartException('Could not find commit')
|
|
|
|
|
|
|
|
ref = 'refs/heads/%s' % branch_name
|
|
|
|
|
|
|
|
return self._prepare_build(commit, ref, True)
|