Implement new create and manager trigger UI

Implements the new trigger setup user interface, which is now a linear workflow found on its own page, rather than a tiny modal dialog

Fixes #1187
This commit is contained in:
Joseph Schorr 2016-09-27 16:52:34 +02:00
parent 21b09a7451
commit 8e863b8cf5
47 changed files with 1835 additions and 1068 deletions

View file

@ -1,7 +1,10 @@
from abc import ABCMeta, abstractmethod
from jsonschema import validate
from six import add_metaclass
from endpoints.building import PreparedBuild
from data import model
from buildtrigger.triggerutil import get_trigger_config, InvalidServiceException
from jsonschema import validate
METADATA_SCHEMA = {
'type': 'object',
@ -18,7 +21,7 @@ METADATA_SCHEMA = {
'ref': {
'type': 'string',
'description': 'git reference for a git commit',
'pattern': '^refs\/(heads|tags|remotes)\/(.+)$',
'pattern': r'^refs\/(heads|tags|remotes)\/(.+)$',
},
'default_branch': {
'type': 'string',
@ -86,6 +89,7 @@ METADATA_SCHEMA = {
}
@add_metaclass(ABCMeta)
class BuildTriggerHandler(object):
def __init__(self, trigger, override_config=None):
self.trigger = trigger
@ -96,72 +100,90 @@ class BuildTriggerHandler(object):
""" Returns the auth token for the trigger. """
return self.trigger.auth_token
@abstractmethod
def load_dockerfile_contents(self):
"""
Loads the Dockerfile found for the trigger's config and returns them or None if none could
be found/loaded.
"""
raise NotImplementedError
pass
def list_build_sources(self):
@abstractmethod
def list_build_source_namespaces(self):
"""
Take the auth information for the specific trigger type and load the
list of build sources(repositories).
list of namespaces that can contain build sources.
"""
raise NotImplementedError
pass
@abstractmethod
def list_build_sources_for_namespace(self, namespace):
"""
Take the auth information for the specific trigger type and load the
list of repositories under the given namespace.
"""
pass
@abstractmethod
def list_build_subdirs(self):
"""
Take the auth information and the specified config so far and list all of
the possible subdirs containing dockerfiles.
"""
raise NotImplementedError
pass
def handle_trigger_request(self):
@abstractmethod
def handle_trigger_request(self, request):
"""
Transform the incoming request data into a set of actions. Returns a PreparedBuild.
"""
raise NotImplementedError
pass
@abstractmethod
def is_active(self):
"""
Returns True if the current build trigger is active. Inactive means further
setup is needed.
"""
raise NotImplementedError
pass
@abstractmethod
def activate(self, standard_webhook_url):
"""
Activates the trigger for the service, with the given new configuration.
Returns new public and private config that should be stored if successful.
"""
raise NotImplementedError
pass
@abstractmethod
def deactivate(self):
"""
Deactivates the trigger for the service, removing any hooks installed in
the remote service. Returns the new config that should be stored if this
trigger is going to be re-activated.
"""
raise NotImplementedError
pass
@abstractmethod
def manual_start(self, run_parameters=None):
"""
Manually creates a repository build for this trigger. Returns a PreparedBuild.
"""
raise NotImplementedError
pass
@abstractmethod
def list_field_values(self, field_name, limit=None):
"""
Lists all values for the given custom trigger field. For example, a trigger might have a
field named "branches", and this method would return all branches.
"""
raise NotImplementedError
pass
@abstractmethod
def get_repository_url(self):
""" Returns the URL of the current trigger's repository. Note that this operation
can be called in a loop, so it should be as fast as possible. """
raise NotImplementedError
pass
@classmethod
def service_name(cls):

View file

@ -1,6 +1,10 @@
import logging
import re
from calendar import timegm
import dateutil.parser
from jsonschema import validate
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
TriggerDeactivationException, TriggerStartException,
@ -217,7 +221,8 @@ def get_transformed_webhook_payload(bb_payload, default_branch=None):
try:
validate(bb_payload, BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA)
except Exception as exc:
logger.exception('Exception when validating Bitbucket webhook payload: %s from %s', exc.message, bb_payload)
logger.exception('Exception when validating Bitbucket webhook payload: %s from %s', exc.message,
bb_payload)
raise InvalidPayloadException(exc.message)
payload = JSONPathDict(bb_payload)
@ -225,8 +230,8 @@ def get_transformed_webhook_payload(bb_payload, default_branch=None):
if not change:
return None
ref = ('refs/heads/' + change['name'] if change['type'] == 'branch'
else 'refs/tags/' + change['name'])
is_branch = change['type'] == 'branch'
ref = 'refs/heads/' + change['name'] if is_branch else 'refs/tags/' + change['name']
repository_name = payload['repository.full_name']
target = change['target']
@ -390,7 +395,7 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
return config
def list_build_sources(self):
def list_build_source_namespaces(self):
bitbucket_client = self._get_authorized_client()
(result, data, err_msg) = bitbucket_client.get_visible_repositories()
if not result:
@ -398,22 +403,40 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
namespaces = {}
for repo in data:
if not repo['scm'] == 'git':
continue
owner = repo['owner']
if not owner in namespaces:
if owner in namespaces:
namespaces[owner]['score'] = namespaces[owner]['score'] + 1
else:
namespaces[owner] = {
'personal': owner == self.config.get('username'),
'repos': [],
'info': {
'name': owner
}
'id': owner,
'title': owner,
'avatar_url': repo['logo'],
'score': 0,
}
namespaces[owner]['repos'].append(owner + '/' + repo['slug'])
return list(namespaces.values())
return namespaces.values()
def list_build_sources_for_namespace(self, namespace):
def repo_view(repo):
last_modified = dateutil.parser.parse(repo['utc_last_updated'])
return {
'name': repo['slug'],
'full_name': '%s/%s' % (repo['owner'], repo['slug']),
'description': repo['description'] or '',
'last_updated': timegm(last_modified.utctimetuple()),
'url': 'https://bitbucket.org/%s/%s' % (repo['owner'], repo['slug']),
'has_admin_permissions': repo['read_only'] is False,
'private': repo['is_private'],
}
bitbucket_client = self._get_authorized_client()
(result, data, err_msg) = bitbucket_client.get_visible_repositories()
if not result:
raise RepositoryReadException('Could not read repository list: ' + err_msg)
return [repo_view(repo) for repo in data if repo['owner'] == namespace]
def list_build_subdirs(self):
config = self.config
@ -431,7 +454,7 @@ class BitbucketBuildTrigger(BuildTriggerHandler):
files = set([f['path'] for f in data['files']])
if 'Dockerfile' in files:
return ['/']
return ['']
return []

View file

@ -212,3 +212,18 @@ class CustomBuildTrigger(BuildTriggerHandler):
def get_repository_url(self):
return None
def list_build_source_namespaces(self):
raise NotImplementedError
def list_build_sources_for_namespace(self, namespace):
raise NotImplementedError
def list_build_subdirs(self):
raise NotImplementedError
def list_field_values(self, field_name, limit=None):
raise NotImplementedError
def load_dockerfile_contents(self):
raise NotImplementedError

View file

@ -2,14 +2,15 @@ import logging
import os.path
import base64
from calendar import timegm
from functools import wraps
from ssl import SSLError
from github import (Github, UnknownObjectException, GithubException,
BadCredentialsException as GitHubBadCredentialsException)
from jsonschema import validate
from app import app, github_trigger
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
TriggerDeactivationException, TriggerStartException,
EmptyRepositoryException, ValidationRequestException,
@ -273,55 +274,57 @@ class GithubBuildTrigger(BuildTriggerHandler):
return config
@_catch_ssl_errors
def list_build_sources(self):
def list_build_source_namespaces(self):
gh_client = self._get_client()
usr = gh_client.get_user()
try:
repos = usr.get_repos()
except GithubException:
raise RepositoryReadException('Unable to list user repositories')
# Build the full set of namespaces for the user, starting with their own.
namespaces = {}
has_non_personal = False
namespaces[usr.login] = {
'personal': True,
'id': usr.login,
'title': usr.name or usr.login,
'avatar_url': usr.avatar_url,
'score': usr.plan.private_repos if usr.plan else 0,
}
for repository in repos:
namespace = repository.owner.login
if not namespace in namespaces:
is_personal_repo = namespace == usr.login
namespaces[namespace] = {
'personal': is_personal_repo,
'repos': [],
'info': {
'name': namespace,
'avatar_url': repository.owner.avatar_url
}
}
for org in usr.get_orgs():
namespaces[org.name] = {
'personal': False,
'id': org.login,
'title': org.name or org.login,
'avatar_url': org.avatar_url,
'url': org.html_url,
'score': org.plan.private_repos if org.plan else 0,
}
if not is_personal_repo:
has_non_personal = True
return list(namespaces.values())
namespaces[namespace]['repos'].append(repository.full_name)
@_catch_ssl_errors
def list_build_sources_for_namespace(self, namespace):
def repo_view(repo):
return {
'name': repo.name,
'full_name': repo.full_name,
'description': repo.description or '',
'last_updated': timegm(repo.pushed_at.utctimetuple()),
'url': repo.html_url,
'has_admin_permissions': repo.permissions.admin,
'private': repo.private,
}
# In older versions of GitHub Enterprise, the get_repos call above does not
# return any non-personal repositories. In that case, we need to lookup the
# repositories manually.
# TODO: Remove this once we no longer support GHE versions <= 2.1
if not has_non_personal:
for org in usr.get_orgs():
repo_list = [repo.full_name for repo in org.get_repos(type='member')]
namespaces[org.name] = {
'personal': False,
'repos': repo_list,
'info': {
'name': org.name or org.login,
'avatar_url': org.avatar_url
}
}
gh_client = self._get_client()
usr = gh_client.get_user()
if namespace == usr.login:
return [repo_view(repo) for repo in usr.get_repos() if repo.owner.login == namespace]
org = gh_client.get_organization(namespace)
if org is None:
return []
return [repo_view(repo) for repo in org.get_repos(type='member')]
entries = list(namespaces.values())
entries.sort(key=lambda e: e['info']['name'])
return entries
@_catch_ssl_errors
def list_build_subdirs(self):
@ -357,19 +360,17 @@ class GithubBuildTrigger(BuildTriggerHandler):
source = config['build_source']
path = self.get_dockerfile_path()
try:
repo = gh_client.get_repo(source)
file_info = repo.get_file_contents(path)
if file_info is None:
return None
content = file_info.content
if file_info.encoding == 'base64':
content = base64.b64decode(content)
return content
except GithubException as ghe:
message = ghe.data.get('message', 'Unable to read Dockerfile: %s' % source)
raise RepositoryReadException(message)
return None
if file_info is None:
return None
content = file_info.content
if file_info.encoding == 'base64':
content = base64.b64decode(content)
return content
@_catch_ssl_errors
def list_field_values(self, field_name, limit=None):
@ -535,7 +536,7 @@ class GithubBuildTrigger(BuildTriggerHandler):
logger.debug('GitHub trigger payload %s', payload)
metadata = get_transformed_webhook_payload(payload, default_branch=default_branch,
lookup_user=lookup_user)
lookup_user=lookup_user)
prepared = self.prepare_build(metadata)
# Check if we should skip this build.

View file

@ -1,6 +1,10 @@
import logging
from calendar import timegm
from functools import wraps
import dateutil.parser
from app import app, gitlab_trigger
from jsonschema import validate
@ -70,6 +74,17 @@ GITLAB_WEBHOOK_PAYLOAD_SCHEMA = {
'required': ['ref', 'checkout_sha', 'repository'],
}
_ACCESS_LEVEL_MAP = {
50: ("owner", True),
40: ("master", True),
30: ("developer", False),
20: ("reporter", False),
10: ("guest", False),
}
_PER_PAGE_COUNT = 20
def _catch_timeouts(func):
@wraps(func)
def wrapper(*args, **kwargs):
@ -82,6 +97,27 @@ def _catch_timeouts(func):
return wrapper
def _paginated_iterator(func, exc):
""" Returns an iterator over invocations of the given function, automatically handling
pagination.
"""
page = 0
while True:
result = func(page=page, per_page=_PER_PAGE_COUNT)
if result is False:
raise exc
counter = 0
for item in result:
yield item
counter = counter + 1
if counter < _PER_PAGE_COUNT:
break
page = page + 1
def get_transformed_webhook_payload(gl_payload, default_branch=None, lookup_user=None,
lookup_commit=None):
""" Returns the Gitlab webhook JSON payload transformed into our own payload
@ -223,35 +259,57 @@ class GitLabBuildTrigger(BuildTriggerHandler):
config.pop('key_id', None)
self.config = config
return config
@_catch_timeouts
def list_build_sources(self):
def list_build_source_namespaces(self):
gl_client = self._get_authorized_client()
current_user = gl_client.currentuser()
if current_user is False:
raise RepositoryReadException('Unable to get current user')
repositories = gl_client.getprojects()
if repositories is False:
raise RepositoryReadException('Unable to list user repositories')
namespaces = {}
repositories = _paginated_iterator(gl_client.getprojects, RepositoryReadException)
for repo in repositories:
owner = repo['namespace']['name']
if not owner in namespaces:
namespaces[owner] = {
namespace = repo['namespace']
namespace_id = namespace['id']
if namespace_id in namespaces:
namespaces[namespace_id]['score'] = namespaces[namespace_id]['score'] + 1
else:
owner = repo['namespace']['name']
namespaces[namespace_id] = {
'personal': owner == current_user['username'],
'repos': [],
'info': {
'name': owner,
}
'id': namespace['path'],
'title': namespace['name'],
'avatar_url': repo['owner']['avatar_url'],
'score': 0,
}
namespaces[owner]['repos'].append(repo['path_with_namespace'])
return list(namespaces.values())
return namespaces.values()
@_catch_timeouts
def list_build_sources_for_namespace(self, namespace):
def repo_view(repo):
last_modified = dateutil.parser.parse(repo['last_activity_at'])
has_admin_permission = False
if repo.get('permissions'):
access_level = repo['permissions']['project_access']['access_level']
has_admin_permission = _ACCESS_LEVEL_MAP.get(access_level, ("", False))[1]
return {
'name': repo['path'],
'full_name': repo['path_with_namespace'],
'description': repo['description'] or '',
'last_updated': timegm(last_modified.utctimetuple()),
'url': repo['web_url'],
'has_admin_permissions': has_admin_permission,
'private': repo['public'] is False,
}
gl_client = self._get_authorized_client()
repositories = _paginated_iterator(gl_client.getprojects, RepositoryReadException)
return [repo_view(repo) for repo in repositories if repo['namespace']['path'] == namespace]
@_catch_timeouts
def list_build_subdirs(self):
@ -280,7 +338,7 @@ class GitLabBuildTrigger(BuildTriggerHandler):
for node in repo_tree:
if node['name'] == 'Dockerfile':
return ['/']
return ['']
return []