Properly connect the github push webhook with the build worker. Still need to resolve the archive format.
This commit is contained in:
parent
ed38bcdafc
commit
f60f9eb62a
8 changed files with 57 additions and 34 deletions
24
config.py
24
config.py
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import logstash_formatter
|
import logstash_formatter
|
||||||
|
import requests
|
||||||
|
|
||||||
from peewee import MySQLDatabase, SqliteDatabase
|
from peewee import MySQLDatabase, SqliteDatabase
|
||||||
from storage.s3 import S3Storage
|
from storage.s3 import S3Storage
|
||||||
|
@ -18,6 +19,7 @@ class FlaskConfig(object):
|
||||||
SECRET_KEY = '1cb18882-6d12-440d-a4cc-b7430fb5f884'
|
SECRET_KEY = '1cb18882-6d12-440d-a4cc-b7430fb5f884'
|
||||||
JSONIFY_PRETTYPRINT_REGULAR = False
|
JSONIFY_PRETTYPRINT_REGULAR = False
|
||||||
|
|
||||||
|
|
||||||
class FlaskProdConfig(FlaskConfig):
|
class FlaskProdConfig(FlaskConfig):
|
||||||
SESSION_COOKIE_SECURE = True
|
SESSION_COOKIE_SECURE = True
|
||||||
|
|
||||||
|
@ -163,9 +165,22 @@ def logs_init_builder(level=logging.DEBUG,
|
||||||
return init_logs
|
return init_logs
|
||||||
|
|
||||||
|
|
||||||
|
def build_requests_session():
|
||||||
|
sess = requests.Session()
|
||||||
|
adapter = requests.adapters.HTTPAdapter(pool_connections=100,
|
||||||
|
pool_maxsize=100)
|
||||||
|
sess.mount('http://', adapter)
|
||||||
|
sess.mount('https://', adapter)
|
||||||
|
return sess
|
||||||
|
|
||||||
|
|
||||||
|
class LargePoolHttpClient(object):
|
||||||
|
HTTPCLIENT = build_requests_session()
|
||||||
|
|
||||||
|
|
||||||
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
|
class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
|
||||||
FakeAnalytics, StripeTestConfig, RedisBuildLogs,
|
FakeAnalytics, StripeTestConfig, RedisBuildLogs,
|
||||||
UserEventConfig):
|
UserEventConfig, LargePoolHttpClient):
|
||||||
LOGGING_CONFIG = logs_init_builder(logging.WARN)
|
LOGGING_CONFIG = logs_init_builder(logging.WARN)
|
||||||
POPULATE_DB_TEST_DATA = True
|
POPULATE_DB_TEST_DATA = True
|
||||||
TESTING = True
|
TESTING = True
|
||||||
|
@ -174,7 +189,7 @@ class TestConfig(FlaskConfig, FakeStorage, EphemeralDB, FakeUserfiles,
|
||||||
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
class DebugConfig(FlaskConfig, MailConfig, LocalStorage, SQLiteDB,
|
||||||
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
|
StripeTestConfig, MixpanelTestConfig, GitHubTestConfig,
|
||||||
DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
|
DigitalOceanConfig, BuildNodeConfig, S3Userfiles,
|
||||||
UserEventConfig, TestBuildLogs):
|
UserEventConfig, TestBuildLogs, LargePoolHttpClient):
|
||||||
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
|
LOGGING_CONFIG = logs_init_builder(formatter=logging.Formatter())
|
||||||
SEND_FILE_MAX_AGE_DEFAULT = 0
|
SEND_FILE_MAX_AGE_DEFAULT = 0
|
||||||
POPULATE_DB_TEST_DATA = True
|
POPULATE_DB_TEST_DATA = True
|
||||||
|
@ -184,7 +199,7 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
StripeLiveConfig, MixpanelTestConfig,
|
StripeLiveConfig, MixpanelTestConfig,
|
||||||
GitHubProdConfig, DigitalOceanConfig,
|
GitHubProdConfig, DigitalOceanConfig,
|
||||||
BuildNodeConfig, S3Userfiles, RedisBuildLogs,
|
BuildNodeConfig, S3Userfiles, RedisBuildLogs,
|
||||||
UserEventConfig):
|
UserEventConfig, LargePoolHttpClient):
|
||||||
LOGGING_CONFIG = logs_init_builder()
|
LOGGING_CONFIG = logs_init_builder()
|
||||||
SEND_FILE_MAX_AGE_DEFAULT = 0
|
SEND_FILE_MAX_AGE_DEFAULT = 0
|
||||||
|
|
||||||
|
@ -192,7 +207,8 @@ class LocalHostedConfig(FlaskConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
|
class ProductionConfig(FlaskProdConfig, MailConfig, S3Storage, RDSMySQL,
|
||||||
StripeLiveConfig, MixpanelProdConfig,
|
StripeLiveConfig, MixpanelProdConfig,
|
||||||
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
|
GitHubProdConfig, DigitalOceanConfig, BuildNodeConfig,
|
||||||
S3Userfiles, RedisBuildLogs, UserEventConfig):
|
S3Userfiles, RedisBuildLogs, UserEventConfig,
|
||||||
|
LargePoolHttpClient):
|
||||||
|
|
||||||
LOGGING_CONFIG = logs_init_builder()
|
LOGGING_CONFIG = logs_init_builder()
|
||||||
SEND_FILE_MAX_AGE_DEFAULT = 0
|
SEND_FILE_MAX_AGE_DEFAULT = 0
|
||||||
|
|
|
@ -40,14 +40,15 @@ class UserRequestFiles(object):
|
||||||
encrypt_key=True)
|
encrypt_key=True)
|
||||||
return (url, file_id)
|
return (url, file_id)
|
||||||
|
|
||||||
def store_file(self, flask_file):
|
def store_file(self, file_like_obj, content_type):
|
||||||
self._initialize_s3()
|
self._initialize_s3()
|
||||||
file_id = str(uuid4())
|
file_id = str(uuid4())
|
||||||
full_key = os.path.join(self._prefix, file_id)
|
full_key = os.path.join(self._prefix, file_id)
|
||||||
k = Key(self._bucket, full_key)
|
k = Key(self._bucket, full_key)
|
||||||
logger.debug('Setting s3 content type to: %s' % flask_file.content_type)
|
logger.debug('Setting s3 content type to: %s' % content_type)
|
||||||
k.set_metadata('Content-Type', flask_file.content_type)
|
k.set_metadata('Content-Type', content_type)
|
||||||
bytes_written = k.set_contents_from_file(flask_file, encrypt_key=True)
|
bytes_written = k.set_contents_from_file(file_like_obj, encrypt_key=True,
|
||||||
|
rewind=True)
|
||||||
|
|
||||||
if bytes_written == 0:
|
if bytes_written == 0:
|
||||||
raise S3FileWriteException('Unable to write file to S3')
|
raise S3FileWriteException('Unable to write file to S3')
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import logging
|
import logging
|
||||||
import stripe
|
import stripe
|
||||||
import requests
|
|
||||||
import urlparse
|
import urlparse
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from flask import request, make_response, jsonify, abort, url_for, Blueprint, session
|
from flask import (request, make_response, jsonify, abort, url_for, Blueprint,
|
||||||
|
session)
|
||||||
from flask.ext.login import current_user, logout_user
|
from flask.ext.login import current_user, logout_user
|
||||||
from flask.ext.principal import identity_changed, AnonymousIdentity
|
from flask.ext.principal import identity_changed, AnonymousIdentity
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
@ -14,7 +14,8 @@ from data import model
|
||||||
from data.queue import dockerfile_build_queue
|
from data.queue import dockerfile_build_queue
|
||||||
from data.plans import PLANS, get_plan
|
from data.plans import PLANS, get_plan
|
||||||
from app import app
|
from app import app
|
||||||
from util.email import send_confirmation_email, send_recovery_email, send_change_email
|
from util.email import (send_confirmation_email, send_recovery_email,
|
||||||
|
send_change_email)
|
||||||
from util.names import parse_repository_name, format_robot_username
|
from util.names import parse_repository_name, format_robot_username
|
||||||
from util.gravatar import compute_hash
|
from util.gravatar import compute_hash
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import request, redirect, url_for, Blueprint
|
from flask import request, redirect, url_for, Blueprint
|
||||||
|
@ -12,9 +11,13 @@ from util.names import parse_repository_name
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
client = app.config['HTTPCLIENT']
|
||||||
|
|
||||||
|
|
||||||
callback = Blueprint('callback', __name__)
|
callback = Blueprint('callback', __name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def exchange_github_code_for_token(code):
|
def exchange_github_code_for_token(code):
|
||||||
code = request.args.get('code')
|
code = request.args.get('code')
|
||||||
payload = {
|
payload = {
|
||||||
|
@ -26,7 +29,7 @@ def exchange_github_code_for_token(code):
|
||||||
'Accept': 'application/json'
|
'Accept': 'application/json'
|
||||||
}
|
}
|
||||||
|
|
||||||
get_access_token = requests.post(app.config['GITHUB_TOKEN_URL'],
|
get_access_token = client.post(app.config['GITHUB_TOKEN_URL'],
|
||||||
params=payload, headers=headers)
|
params=payload, headers=headers)
|
||||||
|
|
||||||
token = get_access_token.json()['access_token']
|
token = get_access_token.json()['access_token']
|
||||||
|
@ -37,7 +40,7 @@ def get_github_user(token):
|
||||||
token_param = {
|
token_param = {
|
||||||
'access_token': token,
|
'access_token': token,
|
||||||
}
|
}
|
||||||
get_user = requests.get(app.config['GITHUB_USER_URL'], params=token_param)
|
get_user = client.get(app.config['GITHUB_USER_URL'], params=token_param)
|
||||||
|
|
||||||
return get_user.json()
|
return get_user.json()
|
||||||
|
|
||||||
|
@ -61,8 +64,8 @@ def github_oauth_callback():
|
||||||
token_param = {
|
token_param = {
|
||||||
'access_token': token,
|
'access_token': token,
|
||||||
}
|
}
|
||||||
get_email = requests.get(app.config['GITHUB_USER_EMAILS'],
|
get_email = client.get(app.config['GITHUB_USER_EMAILS'], params=token_param,
|
||||||
params=token_param, headers=v3_media_type)
|
headers=v3_media_type)
|
||||||
|
|
||||||
# We will accept any email, but we prefer the primary
|
# We will accept any email, but we prefer the primary
|
||||||
found_email = None
|
found_email = None
|
||||||
|
|
|
@ -1,19 +1,21 @@
|
||||||
import json
|
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
|
import io
|
||||||
|
|
||||||
from github import Github
|
from github import Github
|
||||||
|
from tempfile import SpooledTemporaryFile
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
|
|
||||||
|
|
||||||
user_files = app.config['USERFILES']
|
user_files = app.config['USERFILES']
|
||||||
|
client = app.config['HTTPCLIENT']
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
ZIPBALL = 'application/zip'
|
ZIPBALL = 'application/zip'
|
||||||
|
CHUNK_SIZE = 512 * 1024
|
||||||
|
|
||||||
|
|
||||||
class BuildArchiveException(Exception):
|
class BuildArchiveException(Exception):
|
||||||
|
@ -35,7 +37,7 @@ class BuildTrigger(object):
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def incoming_webhook(self, request, auth_token, config):
|
def handle_trigger_request(self, request, auth_token, config):
|
||||||
"""
|
"""
|
||||||
Transform the incoming request data into a set of actions.
|
Transform the incoming request data into a set of actions.
|
||||||
"""
|
"""
|
||||||
|
@ -57,6 +59,10 @@ class BuildTrigger(object):
|
||||||
raise InvalidServiceException('Unable to find service: %s' % service)
|
raise InvalidServiceException('Unable to find service: %s' % service)
|
||||||
|
|
||||||
|
|
||||||
|
def raise_unsupported():
|
||||||
|
raise io.UnsupportedOperation
|
||||||
|
|
||||||
|
|
||||||
class GithubBuildTrigger(BuildTrigger):
|
class GithubBuildTrigger(BuildTrigger):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_client(auth_token):
|
def _get_client(auth_token):
|
||||||
|
@ -77,7 +83,7 @@ class GithubBuildTrigger(BuildTrigger):
|
||||||
|
|
||||||
return repo_list
|
return repo_list
|
||||||
|
|
||||||
def incoming_webhook(self, request, auth_token, config):
|
def handle_trigger_request(self, request, auth_token, config):
|
||||||
payload = request.get_json()
|
payload = request.get_json()
|
||||||
logger.debug('Payload %s', payload)
|
logger.debug('Payload %s', payload)
|
||||||
ref = payload['ref']
|
ref = payload['ref']
|
||||||
|
@ -94,17 +100,13 @@ class GithubBuildTrigger(BuildTrigger):
|
||||||
# Prepare the download and upload URLs
|
# Prepare the download and upload URLs
|
||||||
branch_name = ref.split('/')[-1]
|
branch_name = ref.split('/')[-1]
|
||||||
archive_link = repo.get_archive_link('zipball', branch_name)
|
archive_link = repo.get_archive_link('zipball', branch_name)
|
||||||
download_archive = requests.get(archive_link, stream=True)
|
download_archive = client.get(archive_link, stream=True)
|
||||||
|
|
||||||
upload_url, dockerfile_id = user_files.prepare_for_drop(ZIPBALL)
|
with SpooledTemporaryFile(CHUNK_SIZE) as zipball:
|
||||||
up_headers = {'Content-Type': ZIPBALL}
|
for chunk in download_archive.iter_content(CHUNK_SIZE):
|
||||||
upload_archive = requests.put(upload_url, headers=up_headers,
|
zipball.write(chunk)
|
||||||
data=download_archive.raw)
|
|
||||||
|
|
||||||
if upload_archive.status_code / 100 != 2:
|
dockerfile_id = user_files.store_file(zipball, ZIPBALL)
|
||||||
logger.debug('Failed to upload archive to s3')
|
|
||||||
raise BuildArchiveException('Unable to copy archie to s3 for ref: %s' %
|
|
||||||
ref)
|
|
||||||
|
|
||||||
logger.debug('Successfully prepared job')
|
logger.debug('Successfully prepared job')
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import logging
|
import logging
|
||||||
import requests
|
|
||||||
import stripe
|
import stripe
|
||||||
|
|
||||||
from flask import (abort, redirect, request, url_for, make_response, Response,
|
from flask import (abort, redirect, request, url_for, make_response, Response,
|
||||||
|
|
|
@ -61,7 +61,8 @@ def github_push_webhook(namespace, repository, trigger_uuid):
|
||||||
handler = BuildTrigger.get_trigger_for_service(trigger.service.name)
|
handler = BuildTrigger.get_trigger_for_service(trigger.service.name)
|
||||||
|
|
||||||
logger.debug('Passing webhook request to handler %s', handler)
|
logger.debug('Passing webhook request to handler %s', handler)
|
||||||
df_id, tag, name = handler.incoming_webhook(request, trigger.auth_token,
|
df_id, tag, name = handler.handle_trigger_request(request,
|
||||||
|
trigger.auth_token,
|
||||||
trigger.config)
|
trigger.config)
|
||||||
|
|
||||||
host = urlparse.urlparse(request.url).netloc
|
host = urlparse.urlparse(request.url).netloc
|
||||||
|
|
|
@ -30,7 +30,7 @@ class FakeUserfiles(object):
|
||||||
def prepare_for_drop(self, mime_type):
|
def prepare_for_drop(self, mime_type):
|
||||||
return ('http://fake/url', uuid4())
|
return ('http://fake/url', uuid4())
|
||||||
|
|
||||||
def store_file(self, flask_file):
|
def store_file(self, file_like_obj, content_type):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def get_file_url(self, file_id, expires_in=300):
|
def get_file_url(self, file_id, expires_in=300):
|
||||||
|
|
Reference in a new issue