Split out callbacks into their own blueprint. Add build trigger DB information and connect it with some APIs. Stub out the UI to allow for generation of triggers. Split out the triggers into a plugin-ish architecture for easily adding new triggers.

This commit is contained in:
jakedt 2014-02-18 15:50:15 -05:00
parent fc4983ed8b
commit b5d4919364
13 changed files with 500 additions and 170 deletions

111
endpoints/trigger.py Normal file
View file

@ -0,0 +1,111 @@
import json
import requests
import logging
from github import Github
from app import app
user_files = app.config['USERFILES']
logger = logging.getLogger(__name__)
ZIPBALL = 'application/zip'
class BuildArchiveException(Exception):
pass
class InvalidServiceException(Exception):
pass
class BuildTrigger(object):
def __init__(self):
pass
def list_repositories(self, auth_token):
"""
Take the auth information for the specific trigger type and load the
list of repositories.
"""
raise NotImplementedError
def incoming_webhook(self, request, auth_token, config):
"""
Transform the incoming request data into a set of actions.
"""
raise NotImplementedError
@classmethod
def service_name(cls):
"""
Particular service implemented by subclasses.
"""
raise NotImplementedError
@classmethod
def get_trigger_for_service(cls, service):
for subc in cls.__subclasses__():
if subc.service_name() == service:
return subc()
raise InvalidServiceException('Unable to find service: %s' % service)
class GithubBuildTrigger(BuildTrigger):
@staticmethod
def _get_client(auth_token):
return Github(auth_token, client_id=app.config['GITHUB_CLIENT_ID'],
client_secret=app.config['GITHUB_CLIENT_SECRET'])
@classmethod
def service_name(cls):
return 'github'
def list_repositories(self, auth_token):
gh_client = self._get_client(auth_token)
usr = gh_client.get_user()
repo_list = [repo.full_name for repo in usr.get_repos()]
for org in usr.get_orgs():
repo_list.extend((repo.full_name for repo in org.get_repos()))
return repo_list
def incoming_webhook(self, request, auth_token, config):
payload = request.get_json()
logger.debug('Payload %s', payload)
ref = payload['ref']
commit_id = payload['head_commit']['id'][0:7]
gh_client = self._get_client(auth_token)
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
payload['repository']['name'])
repo = gh_client.get_repo(repo_full_name)
logger.debug('Github repo: %s', repo)
# Prepare the download and upload URLs
branch_name = ref.split('/')[-1]
archive_link = repo.get_archive_link('zipball', branch_name)
download_archive = requests.get(archive_link, stream=True)
upload_url, dockerfile_id = user_files.prepare_for_drop(ZIPBALL)
up_headers = {'Content-Type': ZIPBALL}
upload_archive = requests.put(upload_url, headers=up_headers,
data=download_archive.raw)
if upload_archive.status_code / 100 != 2:
logger.debug('Failed to upload archive to s3')
raise BuildArchiveException('Unable to copy archie to s3 for ref: %s' %
ref)
logger.debug('Successfully prepared job')
return dockerfile_id, branch_name, commit_id