Merge remote-tracking branch 'upstream/v2-phase4' into python-registry-v2
This commit is contained in:
commit
e7a6176594
105 changed files with 4439 additions and 2074 deletions
|
@ -6,7 +6,7 @@ ENV DEBIAN_FRONTEND noninteractive
|
||||||
ENV HOME /root
|
ENV HOME /root
|
||||||
|
|
||||||
# Install the dependencies.
|
# Install the dependencies.
|
||||||
RUN apt-get update # 24JUN2015
|
RUN apt-get update # 22OCT2015
|
||||||
|
|
||||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev libgpgme11 libgpgme11-dev
|
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev libgpgme11 libgpgme11-dev
|
||||||
|
|
|
@ -34,6 +34,7 @@ To build and run a docker container, pass one argument to local-docker.sh:
|
||||||
- `buildman`: run the buildmanager
|
- `buildman`: run the buildmanager
|
||||||
- `notifications`: run the notification worker
|
- `notifications`: run the notification worker
|
||||||
- `test`: run the unit tests
|
- `test`: run the unit tests
|
||||||
|
- `initdb`: clear and initialize the test database
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
|
|
7
app.py
7
app.py
|
@ -151,12 +151,11 @@ dex_login = DexOAuthConfig(app.config, 'DEX_LOGIN_CONFIG')
|
||||||
|
|
||||||
oauth_apps = [github_login, github_trigger, gitlab_trigger, google_login, dex_login]
|
oauth_apps = [github_login, github_trigger, gitlab_trigger, google_login, dex_login]
|
||||||
|
|
||||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf, metric_queue=metric_queue)
|
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
||||||
image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf, metric_queue=metric_queue)
|
image_replication_queue = WorkQueue(app.config['REPLICATION_QUEUE_NAME'], tf)
|
||||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
|
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
|
||||||
metric_queue=metric_queue,
|
|
||||||
reporter=MetricQueueReporter(metric_queue))
|
reporter=MetricQueueReporter(metric_queue))
|
||||||
notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf, metric_queue=metric_queue)
|
notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf)
|
||||||
|
|
||||||
# Check for a key in config. If none found, generate a new signing key for Docker V2 manifests.
|
# Check for a key in config. If none found, generate a new signing key for Docker V2 manifests.
|
||||||
_v2_key_path = os.path.join(OVERRIDE_CONFIG_DIRECTORY, DOCKER_V2_SIGNINGKEY_FILENAME)
|
_v2_key_path = os.path.join(OVERRIDE_CONFIG_DIRECTORY, DOCKER_V2_SIGNINGKEY_FILENAME)
|
||||||
|
|
|
@ -2,6 +2,7 @@ import logging
|
||||||
import os
|
import os
|
||||||
import features
|
import features
|
||||||
import time
|
import time
|
||||||
|
import socket
|
||||||
|
|
||||||
from app import app, userfiles as user_files, build_logs, dockerfile_build_queue
|
from app import app, userfiles as user_files, build_logs, dockerfile_build_queue
|
||||||
|
|
||||||
|
@ -10,6 +11,8 @@ from buildman.manager.ephemeral import EphemeralBuilderManager
|
||||||
from buildman.server import BuilderServer
|
from buildman.server import BuilderServer
|
||||||
|
|
||||||
from trollius import SSLContext
|
from trollius import SSLContext
|
||||||
|
from raven.handlers.logging import SentryHandler
|
||||||
|
from raven.conf import setup_logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -77,4 +80,10 @@ if __name__ == '__main__':
|
||||||
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
|
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
|
||||||
logging.getLogger('peewee').setLevel(logging.WARN)
|
logging.getLogger('peewee').setLevel(logging.WARN)
|
||||||
logging.getLogger('boto').setLevel(logging.WARN)
|
logging.getLogger('boto').setLevel(logging.WARN)
|
||||||
|
|
||||||
|
if app.config.get('EXCEPTION_LOG_TYPE', 'FakeSentry') == 'Sentry':
|
||||||
|
buildman_name = '%s:buildman' % socket.gethostname()
|
||||||
|
setup_logging(SentryHandler(app.config.get('SENTRY_DSN', ''), name=buildman_name,
|
||||||
|
level=logging.ERROR))
|
||||||
|
|
||||||
run_build_manager()
|
run_build_manager()
|
||||||
|
|
|
@ -128,10 +128,9 @@ class BuildJob(object):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
full_command = '["/bin/sh", "-c", "%s"]' % cache_commands[step]
|
full_command = '["/bin/sh", "-c", "%s"]' % cache_commands[step]
|
||||||
logger.debug('Checking step #%s: %s, %s == %s', step, image.id,
|
logger.debug('Checking step #%s: %s, %s == %s', step, image.id, image.command, full_command)
|
||||||
image.storage.command, full_command)
|
|
||||||
|
|
||||||
return image.storage.command == full_command
|
return image.command == full_command
|
||||||
|
|
||||||
path = tree.find_longest_path(base_image.id, checker)
|
path = tree.find_longest_path(base_image.id, checker)
|
||||||
if not path:
|
if not path:
|
||||||
|
|
|
@ -1,6 +1,11 @@
|
||||||
from data.database import BUILD_PHASE
|
from data.database import BUILD_PHASE
|
||||||
from data import model
|
from data import model
|
||||||
|
from redis import RedisError
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class StatusHandler(object):
|
class StatusHandler(object):
|
||||||
""" Context wrapper for writing status to build logs. """
|
""" Context wrapper for writing status to build logs. """
|
||||||
|
@ -24,7 +29,11 @@ class StatusHandler(object):
|
||||||
def _append_log_message(self, log_message, log_type=None, log_data=None):
|
def _append_log_message(self, log_message, log_type=None, log_data=None):
|
||||||
log_data = log_data or {}
|
log_data = log_data or {}
|
||||||
log_data['datetime'] = str(datetime.datetime.now())
|
log_data['datetime'] = str(datetime.datetime.now())
|
||||||
self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data)
|
|
||||||
|
try:
|
||||||
|
self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data)
|
||||||
|
except RedisError:
|
||||||
|
logger.exception('Could not save build log for build %s: %s', self._uuid, log_message)
|
||||||
|
|
||||||
def append_log(self, log_message, extra_data=None):
|
def append_log(self, log_message, extra_data=None):
|
||||||
if log_message is None:
|
if log_message is None:
|
||||||
|
@ -64,4 +73,7 @@ class StatusHandler(object):
|
||||||
return self._status
|
return self._status
|
||||||
|
|
||||||
def __exit__(self, exc_type, value, traceback):
|
def __exit__(self, exc_type, value, traceback):
|
||||||
self._build_logs.set_status(self._uuid, self._status)
|
try:
|
||||||
|
self._build_logs.set_status(self._uuid, self._status)
|
||||||
|
except RedisError:
|
||||||
|
logger.exception('Could not set status of build %s to %s', self._uuid, self._status)
|
||||||
|
|
|
@ -10,6 +10,7 @@ from trollius import From, coroutine, Return, async
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from urllib3.exceptions import ReadTimeoutError, ProtocolError
|
from urllib3.exceptions import ReadTimeoutError, ProtocolError
|
||||||
|
|
||||||
|
from app import metric_queue
|
||||||
from buildman.manager.basemanager import BaseManager
|
from buildman.manager.basemanager import BaseManager
|
||||||
from buildman.manager.executor import PopenExecutor, EC2Executor
|
from buildman.manager.executor import PopenExecutor, EC2Executor
|
||||||
from buildman.component.buildcomponent import BuildComponent
|
from buildman.component.buildcomponent import BuildComponent
|
||||||
|
@ -98,6 +99,10 @@ class EphemeralBuilderManager(BaseManager):
|
||||||
if restarter is not None:
|
if restarter is not None:
|
||||||
async(restarter())
|
async(restarter())
|
||||||
|
|
||||||
|
except (KeyError, etcd.EtcdKeyError):
|
||||||
|
logger.debug('Etcd key already cleared: %s', etcd_key)
|
||||||
|
return
|
||||||
|
|
||||||
except etcd.EtcdException as eex:
|
except etcd.EtcdException as eex:
|
||||||
# TODO(jschorr): This is a quick and dirty hack and should be replaced
|
# TODO(jschorr): This is a quick and dirty hack and should be replaced
|
||||||
# with a proper exception check.
|
# with a proper exception check.
|
||||||
|
@ -335,6 +340,7 @@ class EphemeralBuilderManager(BaseManager):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
builder_id = yield From(self._executor.start_builder(realm, token, build_uuid))
|
builder_id = yield From(self._executor.start_builder(realm, token, build_uuid))
|
||||||
|
metric_queue.put('EC2BuilderStarted', 1, unit='Count')
|
||||||
except:
|
except:
|
||||||
logger.exception('Exception when starting builder for job: %s', build_uuid)
|
logger.exception('Exception when starting builder for job: %s', build_uuid)
|
||||||
raise Return(False, EC2_API_TIMEOUT)
|
raise Return(False, EC2_API_TIMEOUT)
|
||||||
|
@ -399,7 +405,7 @@ class EphemeralBuilderManager(BaseManager):
|
||||||
try:
|
try:
|
||||||
yield From(self._etcd_client.delete(job_key))
|
yield From(self._etcd_client.delete(job_key))
|
||||||
except (KeyError, etcd.EtcdKeyError):
|
except (KeyError, etcd.EtcdKeyError):
|
||||||
logger.exception('Builder is asking for job to be removed, but work already completed')
|
logger.debug('Builder is asking for job to be removed, but work already completed')
|
||||||
|
|
||||||
self.job_complete_callback(build_job, job_status)
|
self.job_complete_callback(build_job, job_status)
|
||||||
|
|
||||||
|
|
|
@ -160,8 +160,17 @@ class EC2Executor(BuilderExecutor):
|
||||||
|
|
||||||
@coroutine
|
@coroutine
|
||||||
def stop_builder(self, builder_id):
|
def stop_builder(self, builder_id):
|
||||||
ec2_conn = self._get_conn()
|
try:
|
||||||
terminated_instances = yield From(ec2_conn.terminate_instances([builder_id]))
|
ec2_conn = self._get_conn()
|
||||||
|
terminated_instances = yield From(ec2_conn.terminate_instances([builder_id]))
|
||||||
|
except boto.exception.EC2ResponseError as ec2e:
|
||||||
|
if ec2e.error_code == 404:
|
||||||
|
logger.debug('Instance %s already terminated', builder_id)
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.exception('Exception when trying to terminate instance %s', builder_id)
|
||||||
|
raise
|
||||||
|
|
||||||
if builder_id not in [si.id for si in terminated_instances]:
|
if builder_id not in [si.id for si in terminated_instances]:
|
||||||
raise ExecutorException('Unable to terminate instance: %s' % builder_id)
|
raise ExecutorException('Unable to terminate instance: %s' % builder_id)
|
||||||
|
|
||||||
|
|
|
@ -65,7 +65,7 @@ class BuilderServer(object):
|
||||||
|
|
||||||
@controller_app.route('/status')
|
@controller_app.route('/status')
|
||||||
def status():
|
def status():
|
||||||
metrics = server._queue.get_metrics(require_transaction=False)
|
metrics = server._queue.get_metrics()
|
||||||
(running_count, available_not_running_count, available_count) = metrics
|
(running_count, available_not_running_count, available_count) = metrics
|
||||||
|
|
||||||
workers = [component for component in server._current_components
|
workers = [component for component in server._current_components
|
||||||
|
|
5
buildtrigger/__init__.py
Normal file
5
buildtrigger/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
import buildtrigger.bitbuckethandler
|
||||||
|
import buildtrigger.customhandler
|
||||||
|
import buildtrigger.githubhandler
|
||||||
|
import buildtrigger.gitlabhandler
|
||||||
|
|
222
buildtrigger/basehandler.py
Normal file
222
buildtrigger/basehandler.py
Normal file
|
@ -0,0 +1,222 @@
|
||||||
|
from endpoints.building import PreparedBuild
|
||||||
|
from data import model
|
||||||
|
from buildtrigger.triggerutil import get_trigger_config, InvalidServiceException
|
||||||
|
from jsonschema import validate
|
||||||
|
|
||||||
|
METADATA_SCHEMA = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'commit': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'first 7 characters of the SHA-1 identifier for a git commit',
|
||||||
|
'pattern': '^([A-Fa-f0-9]{7,})$',
|
||||||
|
},
|
||||||
|
'git_url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The GIT url to use for the checkout',
|
||||||
|
},
|
||||||
|
'ref': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'git reference for a git commit',
|
||||||
|
'pattern': '^refs\/(heads|tags|remotes)\/(.+)$',
|
||||||
|
},
|
||||||
|
'default_branch': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'default branch of the git repository',
|
||||||
|
},
|
||||||
|
'commit_info': {
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'metadata about a git commit',
|
||||||
|
'properties': {
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view a git commit',
|
||||||
|
},
|
||||||
|
'message': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'git commit message',
|
||||||
|
},
|
||||||
|
'date': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'timestamp for a git commit'
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'metadata about the author of a git commit',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'username of the author',
|
||||||
|
},
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the profile of the author',
|
||||||
|
},
|
||||||
|
'avatar_url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the avatar of the author',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['username'],
|
||||||
|
},
|
||||||
|
'committer': {
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'metadata about the committer of a git commit',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'username of the committer',
|
||||||
|
},
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the profile of the committer',
|
||||||
|
},
|
||||||
|
'avatar_url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the avatar of the committer',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['username'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['url', 'message', 'date'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['commit', 'git_url'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class BuildTriggerHandler(object):
|
||||||
|
def __init__(self, trigger, override_config=None):
|
||||||
|
self.trigger = trigger
|
||||||
|
self.config = override_config or get_trigger_config(trigger)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auth_token(self):
|
||||||
|
""" Returns the auth token for the trigger. """
|
||||||
|
return self.trigger.auth_token
|
||||||
|
|
||||||
|
def load_dockerfile_contents(self):
|
||||||
|
"""
|
||||||
|
Loads the Dockerfile found for the trigger's config and returns them or None if none could
|
||||||
|
be found/loaded.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def list_build_sources(self):
|
||||||
|
"""
|
||||||
|
Take the auth information for the specific trigger type and load the
|
||||||
|
list of build sources(repositories).
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def list_build_subdirs(self):
|
||||||
|
"""
|
||||||
|
Take the auth information and the specified config so far and list all of
|
||||||
|
the possible subdirs containing dockerfiles.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def handle_trigger_request(self):
|
||||||
|
"""
|
||||||
|
Transform the incoming request data into a set of actions. Returns a PreparedBuild.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def is_active(self):
|
||||||
|
"""
|
||||||
|
Returns True if the current build trigger is active. Inactive means further
|
||||||
|
setup is needed.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def activate(self, standard_webhook_url):
|
||||||
|
"""
|
||||||
|
Activates the trigger for the service, with the given new configuration.
|
||||||
|
Returns new public and private config that should be stored if successful.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def deactivate(self):
|
||||||
|
"""
|
||||||
|
Deactivates the trigger for the service, removing any hooks installed in
|
||||||
|
the remote service. Returns the new config that should be stored if this
|
||||||
|
trigger is going to be re-activated.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def manual_start(self, run_parameters=None):
|
||||||
|
"""
|
||||||
|
Manually creates a repository build for this trigger. Returns a PreparedBuild.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def list_field_values(self, field_name, limit=None):
|
||||||
|
"""
|
||||||
|
Lists all values for the given custom trigger field. For example, a trigger might have a
|
||||||
|
field named "branches", and this method would return all branches.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_repository_url(self):
|
||||||
|
""" Returns the URL of the current trigger's repository. Note that this operation
|
||||||
|
can be called in a loop, so it should be as fast as possible. """
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def service_name(cls):
|
||||||
|
"""
|
||||||
|
Particular service implemented by subclasses.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_handler(cls, trigger, override_config=None):
|
||||||
|
for subc in cls.__subclasses__():
|
||||||
|
if subc.service_name() == trigger.service.name:
|
||||||
|
return subc(trigger, override_config)
|
||||||
|
|
||||||
|
raise InvalidServiceException('Unable to find service: %s' % trigger.service.name)
|
||||||
|
|
||||||
|
def put_config_key(self, key, value):
|
||||||
|
""" Updates a config key in the trigger, saving it to the DB. """
|
||||||
|
self.config[key] = value
|
||||||
|
model.build.update_build_trigger(self.trigger, self.config)
|
||||||
|
|
||||||
|
def set_auth_token(self, auth_token):
|
||||||
|
""" Sets the auth token for the trigger, saving it to the DB. """
|
||||||
|
model.build.update_build_trigger(self.trigger, self.config, auth_token=auth_token)
|
||||||
|
|
||||||
|
def get_dockerfile_path(self):
|
||||||
|
""" Returns the normalized path to the Dockerfile found in the subdirectory
|
||||||
|
in the config. """
|
||||||
|
subdirectory = self.config.get('subdir', '')
|
||||||
|
if subdirectory == '/':
|
||||||
|
subdirectory = ''
|
||||||
|
else:
|
||||||
|
if not subdirectory.endswith('/'):
|
||||||
|
subdirectory = subdirectory + '/'
|
||||||
|
|
||||||
|
return subdirectory + 'Dockerfile'
|
||||||
|
|
||||||
|
def prepare_build(self, metadata, is_manual=False):
|
||||||
|
# Ensure that the metadata meets the scheme.
|
||||||
|
validate(metadata, METADATA_SCHEMA)
|
||||||
|
|
||||||
|
config = self.config
|
||||||
|
ref = metadata.get('ref', None)
|
||||||
|
commit_sha = metadata['commit']
|
||||||
|
default_branch = metadata.get('default_branch', None)
|
||||||
|
|
||||||
|
prepared = PreparedBuild(self.trigger)
|
||||||
|
prepared.name_from_sha(commit_sha)
|
||||||
|
prepared.subdirectory = config.get('subdir', None)
|
||||||
|
prepared.is_manual = is_manual
|
||||||
|
prepared.metadata = metadata
|
||||||
|
|
||||||
|
if ref is not None:
|
||||||
|
prepared.tags_from_ref(ref, default_branch)
|
||||||
|
else:
|
||||||
|
prepared.tags = [commit_sha[:7]]
|
||||||
|
|
||||||
|
return prepared
|
549
buildtrigger/bitbuckethandler.py
Normal file
549
buildtrigger/bitbuckethandler.py
Normal file
|
@ -0,0 +1,549 @@
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
from jsonschema import validate
|
||||||
|
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
|
||||||
|
TriggerDeactivationException, TriggerStartException,
|
||||||
|
InvalidPayloadException,
|
||||||
|
determine_build_ref, raise_if_skipped_build,
|
||||||
|
find_matching_branches)
|
||||||
|
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
|
||||||
|
from app import app, get_app_url
|
||||||
|
from bitbucket import BitBucket
|
||||||
|
from util.security.ssh import generate_ssh_keypair
|
||||||
|
from util.dict_wrappers import JSONPathDict, SafeDictSetter
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_BITBUCKET_COMMIT_URL = 'https://bitbucket.org/%s/commits/%s'
|
||||||
|
_RAW_AUTHOR_REGEX = re.compile(r'.*<(.+)>')
|
||||||
|
|
||||||
|
BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'repository': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'full_name': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['full_name'],
|
||||||
|
},
|
||||||
|
'push': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'changes': {
|
||||||
|
'type': 'array',
|
||||||
|
'items': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'new': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'target': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'hash': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'message': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'date': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'user': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'links': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'html': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'href': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['href'],
|
||||||
|
},
|
||||||
|
'avatar': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'href': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['href'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['html', 'avatar'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['username'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'links': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'html': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'href': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['href'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['html'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['hash', 'message', 'date'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['target'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['changes'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'actor': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'links': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'html': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'href': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['href'],
|
||||||
|
},
|
||||||
|
'avatar': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'href': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['href'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['html', 'avatar'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['username'],
|
||||||
|
},
|
||||||
|
'required': ['push', 'repository'],
|
||||||
|
}
|
||||||
|
|
||||||
|
BITBUCKET_COMMIT_INFO_SCHEMA = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'node': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'message': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'timestamp': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'raw_author': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['node', 'message', 'timestamp']
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_transformed_commit_info(bb_commit, ref, default_branch, repository_name, lookup_author):
|
||||||
|
""" Returns the BitBucket commit information transformed into our own
|
||||||
|
payload format.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate(bb_commit, BITBUCKET_COMMIT_INFO_SCHEMA)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.exception('Exception when validating Bitbucket commit information: %s from %s', exc.message, bb_commit)
|
||||||
|
raise InvalidPayloadException(exc.message)
|
||||||
|
|
||||||
|
commit = JSONPathDict(bb_commit)
|
||||||
|
|
||||||
|
config = SafeDictSetter()
|
||||||
|
config['commit'] = commit['node']
|
||||||
|
config['ref'] = ref
|
||||||
|
config['default_branch'] = default_branch
|
||||||
|
config['git_url'] = 'git@bitbucket.org:%s.git' % repository_name
|
||||||
|
|
||||||
|
config['commit_info.url'] = _BITBUCKET_COMMIT_URL % (repository_name, commit['node'])
|
||||||
|
config['commit_info.message'] = commit['message']
|
||||||
|
config['commit_info.date'] = commit['timestamp']
|
||||||
|
|
||||||
|
match = _RAW_AUTHOR_REGEX.match(commit['raw_author'])
|
||||||
|
if match:
|
||||||
|
email_address = match.group(1)
|
||||||
|
author_info = JSONPathDict(lookup_author(email_address))
|
||||||
|
if author_info:
|
||||||
|
config['commit_info.author.username'] = author_info['user.username']
|
||||||
|
config['commit_info.author.url'] = 'https://bitbucket.org/%s/' % author_info['user.username']
|
||||||
|
config['commit_info.author.avatar_url'] = author_info['user.avatar']
|
||||||
|
|
||||||
|
return config.dict_value()
|
||||||
|
|
||||||
|
|
||||||
|
def get_transformed_webhook_payload(bb_payload, default_branch=None):
|
||||||
|
""" Returns the BitBucket webhook JSON payload transformed into our own payload
|
||||||
|
format. If the bb_payload is not valid, returns None.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate(bb_payload, BITBUCKET_WEBHOOK_PAYLOAD_SCHEMA)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.exception('Exception when validating Bitbucket webhook payload: %s from %s', exc.message, bb_payload)
|
||||||
|
raise InvalidPayloadException(exc.message)
|
||||||
|
|
||||||
|
payload = JSONPathDict(bb_payload)
|
||||||
|
change = payload['push.changes[-1].new']
|
||||||
|
if not change:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ref = ('refs/heads/' + change['name'] if change['type'] == 'branch'
|
||||||
|
else 'refs/tags/' + change['name'])
|
||||||
|
|
||||||
|
repository_name = payload['repository.full_name']
|
||||||
|
target = change['target']
|
||||||
|
|
||||||
|
config = SafeDictSetter()
|
||||||
|
config['commit'] = target['hash']
|
||||||
|
config['ref'] = ref
|
||||||
|
config['default_branch'] = default_branch
|
||||||
|
config['git_url'] = 'git@bitbucket.org:%s.git' % repository_name
|
||||||
|
|
||||||
|
config['commit_info.url'] = target['links.html.href']
|
||||||
|
config['commit_info.message'] = target['message']
|
||||||
|
config['commit_info.date'] = target['date']
|
||||||
|
|
||||||
|
config['commit_info.author.username'] = target['author.user.username']
|
||||||
|
config['commit_info.author.url'] = target['author.user.links.html.href']
|
||||||
|
config['commit_info.author.avatar_url'] = target['author.user.links.avatar.href']
|
||||||
|
|
||||||
|
config['commit_info.committer.username'] = payload['actor.username']
|
||||||
|
config['commit_info.committer.url'] = payload['actor.links.html.href']
|
||||||
|
config['commit_info.committer.avatar_url'] = payload['actor.links.avatar.href']
|
||||||
|
return config.dict_value()
|
||||||
|
|
||||||
|
|
||||||
|
class BitbucketBuildTrigger(BuildTriggerHandler):
|
||||||
|
"""
|
||||||
|
BuildTrigger for Bitbucket.
|
||||||
|
"""
|
||||||
|
@classmethod
|
||||||
|
def service_name(cls):
|
||||||
|
return 'bitbucket'
|
||||||
|
|
||||||
|
def _get_client(self):
|
||||||
|
""" Returns a BitBucket API client for this trigger's config. """
|
||||||
|
key = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_KEY', '')
|
||||||
|
secret = app.config.get('BITBUCKET_TRIGGER_CONFIG', {}).get('CONSUMER_SECRET', '')
|
||||||
|
|
||||||
|
trigger_uuid = self.trigger.uuid
|
||||||
|
callback_url = '%s/oauth1/bitbucket/callback/trigger/%s' % (get_app_url(), trigger_uuid)
|
||||||
|
|
||||||
|
return BitBucket(key, secret, callback_url, timeout=5)
|
||||||
|
|
||||||
|
def _get_authorized_client(self):
|
||||||
|
""" Returns an authorized API client. """
|
||||||
|
base_client = self._get_client()
|
||||||
|
auth_token = self.auth_token or 'invalid:invalid'
|
||||||
|
token_parts = auth_token.split(':')
|
||||||
|
if len(token_parts) != 2:
|
||||||
|
token_parts = ['invalid', 'invalid']
|
||||||
|
|
||||||
|
(access_token, access_token_secret) = token_parts
|
||||||
|
return base_client.get_authorized_client(access_token, access_token_secret)
|
||||||
|
|
||||||
|
def _get_repository_client(self):
|
||||||
|
""" Returns an API client for working with this config's BB repository. """
|
||||||
|
source = self.config['build_source']
|
||||||
|
(namespace, name) = source.split('/')
|
||||||
|
bitbucket_client = self._get_authorized_client()
|
||||||
|
return bitbucket_client.for_namespace(namespace).repositories().get(name)
|
||||||
|
|
||||||
|
def _get_default_branch(self, repository, default_value='master'):
|
||||||
|
""" Returns the default branch for the repository or the value given. """
|
||||||
|
(result, data, _) = repository.get_main_branch()
|
||||||
|
if result:
|
||||||
|
return data['name']
|
||||||
|
|
||||||
|
return default_value
|
||||||
|
|
||||||
|
def get_oauth_url(self):
|
||||||
|
""" Returns the OAuth URL to authorize Bitbucket. """
|
||||||
|
bitbucket_client = self._get_client()
|
||||||
|
(result, data, err_msg) = bitbucket_client.get_authorization_url()
|
||||||
|
if not result:
|
||||||
|
raise TriggerProviderException(err_msg)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def exchange_verifier(self, verifier):
|
||||||
|
""" Exchanges the given verifier token to setup this trigger. """
|
||||||
|
bitbucket_client = self._get_client()
|
||||||
|
access_token = self.config.get('access_token', '')
|
||||||
|
access_token_secret = self.auth_token
|
||||||
|
|
||||||
|
# Exchange the verifier for a new access token.
|
||||||
|
(result, data, _) = bitbucket_client.verify_token(access_token, access_token_secret, verifier)
|
||||||
|
if not result:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Save the updated access token and secret.
|
||||||
|
self.set_auth_token(data[0] + ':' + data[1])
|
||||||
|
|
||||||
|
# Retrieve the current authorized user's information and store the username in the config.
|
||||||
|
authorized_client = self._get_authorized_client()
|
||||||
|
(result, data, _) = authorized_client.get_current_user()
|
||||||
|
if not result:
|
||||||
|
return False
|
||||||
|
|
||||||
|
username = data['user']['username']
|
||||||
|
self.put_config_key('username', username)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def is_active(self):
|
||||||
|
return 'webhook_id' in self.config
|
||||||
|
|
||||||
|
def activate(self, standard_webhook_url):
|
||||||
|
config = self.config
|
||||||
|
|
||||||
|
# Add a deploy key to the repository.
|
||||||
|
public_key, private_key = generate_ssh_keypair()
|
||||||
|
config['credentials'] = [
|
||||||
|
{
|
||||||
|
'name': 'SSH Public Key',
|
||||||
|
'value': public_key,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
repository = self._get_repository_client()
|
||||||
|
(result, created_deploykey, err_msg) = repository.deploykeys().create(
|
||||||
|
app.config['REGISTRY_TITLE'] + ' webhook key', public_key)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
msg = 'Unable to add deploy key to repository: %s' % err_msg
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
config['deploy_key_id'] = created_deploykey['pk']
|
||||||
|
|
||||||
|
# Add a webhook callback.
|
||||||
|
description = 'Webhook for invoking builds on %s' % app.config['REGISTRY_TITLE_SHORT']
|
||||||
|
webhook_events = ['repo:push']
|
||||||
|
(result, created_webhook, err_msg) = repository.webhooks().create(
|
||||||
|
description, standard_webhook_url, webhook_events)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
msg = 'Unable to add webhook to repository: %s' % err_msg
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
config['webhook_id'] = created_webhook['uuid']
|
||||||
|
self.config = config
|
||||||
|
return config, {'private_key': private_key}
|
||||||
|
|
||||||
|
def deactivate(self):
|
||||||
|
config = self.config
|
||||||
|
|
||||||
|
webhook_id = config.pop('webhook_id', None)
|
||||||
|
deploy_key_id = config.pop('deploy_key_id', None)
|
||||||
|
repository = self._get_repository_client()
|
||||||
|
|
||||||
|
# Remove the webhook.
|
||||||
|
if webhook_id is not None:
|
||||||
|
(result, _, err_msg) = repository.webhooks().delete(webhook_id)
|
||||||
|
if not result:
|
||||||
|
msg = 'Unable to remove webhook from repository: %s' % err_msg
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
|
||||||
|
# Remove the public key.
|
||||||
|
if deploy_key_id is not None:
|
||||||
|
(result, _, err_msg) = repository.deploykeys().delete(deploy_key_id)
|
||||||
|
if not result:
|
||||||
|
msg = 'Unable to remove deploy key from repository: %s' % err_msg
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
def list_build_sources(self):
|
||||||
|
bitbucket_client = self._get_authorized_client()
|
||||||
|
(result, data, err_msg) = bitbucket_client.get_visible_repositories()
|
||||||
|
if not result:
|
||||||
|
raise RepositoryReadException('Could not read repository list: ' + err_msg)
|
||||||
|
|
||||||
|
namespaces = {}
|
||||||
|
for repo in data:
|
||||||
|
if not repo['scm'] == 'git':
|
||||||
|
continue
|
||||||
|
|
||||||
|
owner = repo['owner']
|
||||||
|
if not owner in namespaces:
|
||||||
|
namespaces[owner] = {
|
||||||
|
'personal': owner == self.config.get('username'),
|
||||||
|
'repos': [],
|
||||||
|
'info': {
|
||||||
|
'name': owner
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
namespaces[owner]['repos'].append(owner + '/' + repo['slug'])
|
||||||
|
|
||||||
|
return namespaces.values()
|
||||||
|
|
||||||
|
def list_build_subdirs(self):
|
||||||
|
config = self.config
|
||||||
|
repository = self._get_repository_client()
|
||||||
|
|
||||||
|
# Find the first matching branch.
|
||||||
|
repo_branches = self.list_field_values('branch_name') or []
|
||||||
|
branches = find_matching_branches(config, repo_branches)
|
||||||
|
if not branches:
|
||||||
|
branches = [self._get_default_branch(repository)]
|
||||||
|
|
||||||
|
(result, data, err_msg) = repository.get_path_contents('', revision=branches[0])
|
||||||
|
if not result:
|
||||||
|
raise RepositoryReadException(err_msg)
|
||||||
|
|
||||||
|
files = set([f['path'] for f in data['files']])
|
||||||
|
if 'Dockerfile' in files:
|
||||||
|
return ['/']
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
def load_dockerfile_contents(self):
|
||||||
|
repository = self._get_repository_client()
|
||||||
|
path = self.get_dockerfile_path()
|
||||||
|
|
||||||
|
(result, data, err_msg) = repository.get_raw_path_contents(path, revision='master')
|
||||||
|
if not result:
|
||||||
|
raise RepositoryReadException(err_msg)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def list_field_values(self, field_name, limit=None):
|
||||||
|
source = self.config['build_source']
|
||||||
|
(namespace, name) = source.split('/')
|
||||||
|
|
||||||
|
bitbucket_client = self._get_authorized_client()
|
||||||
|
repository = bitbucket_client.for_namespace(namespace).repositories().get(name)
|
||||||
|
|
||||||
|
if field_name == 'refs':
|
||||||
|
(result, data, _) = repository.get_branches_and_tags()
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
|
||||||
|
branches = [b['name'] for b in data['branches']]
|
||||||
|
tags = [t['name'] for t in data['tags']]
|
||||||
|
|
||||||
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
||||||
|
[{'kind': 'tag', 'name': tag} for tag in tags])
|
||||||
|
|
||||||
|
if field_name == 'tag_name':
|
||||||
|
(result, data, _) = repository.get_tags()
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
|
||||||
|
tags = list(data.keys())
|
||||||
|
if limit:
|
||||||
|
tags = tags[0:limit]
|
||||||
|
|
||||||
|
return tags
|
||||||
|
|
||||||
|
if field_name == 'branch_name':
|
||||||
|
(result, data, _) = repository.get_branches()
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
|
||||||
|
branches = list(data.keys())
|
||||||
|
if limit:
|
||||||
|
branches = branches[0:limit]
|
||||||
|
|
||||||
|
return branches
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_repository_url(self):
|
||||||
|
source = self.config['build_source']
|
||||||
|
(namespace, name) = source.split('/')
|
||||||
|
return 'https://bitbucket.org/%s/%s' % (namespace, name)
|
||||||
|
|
||||||
|
def handle_trigger_request(self, request):
|
||||||
|
payload = request.get_json()
|
||||||
|
logger.debug('Got BitBucket request: %s', payload)
|
||||||
|
|
||||||
|
repository = self._get_repository_client()
|
||||||
|
default_branch = self._get_default_branch(repository)
|
||||||
|
|
||||||
|
metadata = get_transformed_webhook_payload(payload, default_branch=default_branch)
|
||||||
|
prepared = self.prepare_build(metadata)
|
||||||
|
|
||||||
|
# Check if we should skip this build.
|
||||||
|
raise_if_skipped_build(prepared, self.config)
|
||||||
|
return prepared
|
||||||
|
|
||||||
|
def manual_start(self, run_parameters=None):
|
||||||
|
run_parameters = run_parameters or {}
|
||||||
|
repository = self._get_repository_client()
|
||||||
|
bitbucket_client = self._get_authorized_client()
|
||||||
|
|
||||||
|
def get_branch_sha(branch_name):
|
||||||
|
# Lookup the commit SHA for the branch.
|
||||||
|
(result, data, _) = repository.get_branches()
|
||||||
|
if not result or not branch_name in data:
|
||||||
|
raise TriggerStartException('Could not find branch commit SHA')
|
||||||
|
|
||||||
|
return data[branch_name]['node']
|
||||||
|
|
||||||
|
def get_tag_sha(tag_name):
|
||||||
|
# Lookup the commit SHA for the tag.
|
||||||
|
(result, data, _) = repository.get_tags()
|
||||||
|
if not result or not tag_name in data:
|
||||||
|
raise TriggerStartException('Could not find tag commit SHA')
|
||||||
|
|
||||||
|
return data[tag_name]['node']
|
||||||
|
|
||||||
|
def lookup_author(email_address):
|
||||||
|
(result, data, _) = bitbucket_client.accounts().get_profile(email_address)
|
||||||
|
return data if result else None
|
||||||
|
|
||||||
|
# Find the branch or tag to build.
|
||||||
|
default_branch = self._get_default_branch(repository)
|
||||||
|
(commit_sha, ref) = determine_build_ref(run_parameters, get_branch_sha, get_tag_sha,
|
||||||
|
default_branch)
|
||||||
|
|
||||||
|
# Lookup the commit SHA in BitBucket.
|
||||||
|
(result, commit_info, _) = repository.changesets().get(commit_sha)
|
||||||
|
if not result:
|
||||||
|
raise TriggerStartException('Could not lookup commit SHA')
|
||||||
|
|
||||||
|
# Return a prepared build for the commit.
|
||||||
|
repository_name = '%s/%s' % (repository.namespace, repository.repository_name)
|
||||||
|
metadata = get_transformed_commit_info(commit_info, ref, default_branch,
|
||||||
|
repository_name, lookup_author)
|
||||||
|
|
||||||
|
return self.prepare_build(metadata, is_manual=True)
|
166
buildtrigger/customhandler.py
Normal file
166
buildtrigger/customhandler.py
Normal file
|
@ -0,0 +1,166 @@
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
from jsonschema import validate
|
||||||
|
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
|
||||||
|
TriggerStartException, ValidationRequestException,
|
||||||
|
InvalidPayloadException,
|
||||||
|
SkipRequestException, raise_if_skipped_build,
|
||||||
|
find_matching_branches)
|
||||||
|
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
|
||||||
|
from util.security.ssh import generate_ssh_keypair
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class CustomBuildTrigger(BuildTriggerHandler):
|
||||||
|
payload_schema = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'commit': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'first 7 characters of the SHA-1 identifier for a git commit',
|
||||||
|
'pattern': '^([A-Fa-f0-9]{7,})$',
|
||||||
|
},
|
||||||
|
'ref': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'git reference for a git commit',
|
||||||
|
'pattern': '^refs\/(heads|tags|remotes)\/(.+)$',
|
||||||
|
},
|
||||||
|
'default_branch': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'default branch of the git repository',
|
||||||
|
},
|
||||||
|
'commit_info': {
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'metadata about a git commit',
|
||||||
|
'properties': {
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view a git commit',
|
||||||
|
},
|
||||||
|
'message': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'git commit message',
|
||||||
|
},
|
||||||
|
'date': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'timestamp for a git commit'
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'metadata about the author of a git commit',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'username of the author',
|
||||||
|
},
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the profile of the author',
|
||||||
|
},
|
||||||
|
'avatar_url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the avatar of the author',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['username', 'url', 'avatar_url'],
|
||||||
|
},
|
||||||
|
'committer': {
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'metadata about the committer of a git commit',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'username of the committer',
|
||||||
|
},
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the profile of the committer',
|
||||||
|
},
|
||||||
|
'avatar_url': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'URL to view the avatar of the committer',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['username', 'url', 'avatar_url'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['url', 'message', 'date'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['commit', 'ref', 'default_branch'],
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def service_name(cls):
|
||||||
|
return 'custom-git'
|
||||||
|
|
||||||
|
def is_active(self):
|
||||||
|
return self.config.has_key('credentials')
|
||||||
|
|
||||||
|
def _metadata_from_payload(self, payload):
|
||||||
|
try:
|
||||||
|
metadata = json.loads(payload)
|
||||||
|
validate(metadata, self.payload_schema)
|
||||||
|
except Exception as e:
|
||||||
|
raise InvalidPayloadException(e.message)
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
def handle_trigger_request(self, request):
|
||||||
|
payload = request.data
|
||||||
|
if not payload:
|
||||||
|
raise InvalidPayloadException()
|
||||||
|
|
||||||
|
logger.debug('Payload %s', payload)
|
||||||
|
|
||||||
|
metadata = self._metadata_from_payload(payload)
|
||||||
|
metadata['git_url'] = self.config['build_source']
|
||||||
|
|
||||||
|
prepared = self.prepare_build(metadata)
|
||||||
|
|
||||||
|
# Check if we should skip this build.
|
||||||
|
raise_if_skipped_build(prepared, self.config)
|
||||||
|
|
||||||
|
return prepared
|
||||||
|
|
||||||
|
def manual_start(self, run_parameters=None):
|
||||||
|
# commit_sha is the only required parameter
|
||||||
|
commit_sha = run_parameters.get('commit_sha')
|
||||||
|
if commit_sha is None:
|
||||||
|
raise TriggerStartException('missing required parameter')
|
||||||
|
|
||||||
|
config = self.config
|
||||||
|
metadata = {
|
||||||
|
'commit': commit_sha,
|
||||||
|
'git_url': config['build_source'],
|
||||||
|
}
|
||||||
|
|
||||||
|
return self.prepare_build(metadata, is_manual=True)
|
||||||
|
|
||||||
|
def activate(self, standard_webhook_url):
|
||||||
|
config = self.config
|
||||||
|
public_key, private_key = generate_ssh_keypair()
|
||||||
|
config['credentials'] = [
|
||||||
|
{
|
||||||
|
'name': 'SSH Public Key',
|
||||||
|
'value': public_key,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'Webhook Endpoint URL',
|
||||||
|
'value': standard_webhook_url,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
self.config = config
|
||||||
|
return config, {'private_key': private_key}
|
||||||
|
|
||||||
|
def deactivate(self):
|
||||||
|
config = self.config
|
||||||
|
config.pop('credentials', None)
|
||||||
|
self.config = config
|
||||||
|
return config
|
||||||
|
|
||||||
|
def get_repository_url(self):
|
||||||
|
return None
|
515
buildtrigger/githubhandler.py
Normal file
515
buildtrigger/githubhandler.py
Normal file
|
@ -0,0 +1,515 @@
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from app import app, github_trigger
|
||||||
|
from jsonschema import validate
|
||||||
|
|
||||||
|
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
|
||||||
|
TriggerDeactivationException, TriggerStartException,
|
||||||
|
EmptyRepositoryException, ValidationRequestException,
|
||||||
|
SkipRequestException, InvalidPayloadException,
|
||||||
|
determine_build_ref, raise_if_skipped_build,
|
||||||
|
find_matching_branches)
|
||||||
|
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
|
||||||
|
from util.security.ssh import generate_ssh_keypair
|
||||||
|
from util.dict_wrappers import JSONPathDict, SafeDictSetter
|
||||||
|
|
||||||
|
from github import (Github, UnknownObjectException, GithubException,
|
||||||
|
BadCredentialsException as GitHubBadCredentialsException)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
GITHUB_WEBHOOK_PAYLOAD_SCHEMA = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'ref': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'head_commit': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'id': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'message': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'timestamp': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'html_url': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'avatar_url': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'committer': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'html_url': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
'avatar_url': {
|
||||||
|
'type': 'string'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['id', 'url', 'message', 'timestamp'],
|
||||||
|
},
|
||||||
|
'repository': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'ssh_url': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['ssh_url'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['ref', 'head_commit', 'repository'],
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_transformed_webhook_payload(gh_payload, default_branch=None, lookup_user=None):
|
||||||
|
""" Returns the GitHub webhook JSON payload transformed into our own payload
|
||||||
|
format. If the gh_payload is not valid, returns None.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate(gh_payload, GITHUB_WEBHOOK_PAYLOAD_SCHEMA)
|
||||||
|
except Exception as exc:
|
||||||
|
raise InvalidPayloadException(exc.message)
|
||||||
|
|
||||||
|
payload = JSONPathDict(gh_payload)
|
||||||
|
|
||||||
|
config = SafeDictSetter()
|
||||||
|
config['commit'] = payload['head_commit.id']
|
||||||
|
config['ref'] = payload['ref']
|
||||||
|
config['default_branch'] = default_branch
|
||||||
|
config['git_url'] = payload['repository.ssh_url']
|
||||||
|
|
||||||
|
config['commit_info.url'] = payload['head_commit.url']
|
||||||
|
config['commit_info.message'] = payload['head_commit.message']
|
||||||
|
config['commit_info.date'] = payload['head_commit.timestamp']
|
||||||
|
|
||||||
|
config['commit_info.author.username'] = payload['head_commit.author.username']
|
||||||
|
config['commit_info.author.url'] = payload.get('head_commit.author.html_url')
|
||||||
|
config['commit_info.author.avatar_url'] = payload.get('head_commit.author.avatar_url')
|
||||||
|
|
||||||
|
config['commit_info.committer.username'] = payload.get('head_commit.committer.username')
|
||||||
|
config['commit_info.committer.url'] = payload.get('head_commit.committer.html_url')
|
||||||
|
config['commit_info.committer.avatar_url'] = payload.get('head_commit.committer.avatar_url')
|
||||||
|
|
||||||
|
# Note: GitHub doesn't always return the extra information for users, so we do the lookup
|
||||||
|
# manually if possible.
|
||||||
|
if (lookup_user and not payload.get('head_commit.author.html_url') and
|
||||||
|
payload.get('head_commit.author.username')):
|
||||||
|
author_info = lookup_user(payload['head_commit.author.username'])
|
||||||
|
if author_info:
|
||||||
|
config['commit_info.author.url'] = author_info['html_url']
|
||||||
|
config['commit_info.author.avatar_url'] = author_info['avatar_url']
|
||||||
|
|
||||||
|
if (lookup_user and
|
||||||
|
payload.get('head_commit.committer.username') and
|
||||||
|
not payload.get('head_commit.committer.html_url')):
|
||||||
|
committer_info = lookup_user(payload['head_commit.committer.username'])
|
||||||
|
if committer_info:
|
||||||
|
config['commit_info.committer.url'] = committer_info['html_url']
|
||||||
|
config['commit_info.committer.avatar_url'] = committer_info['avatar_url']
|
||||||
|
|
||||||
|
return config.dict_value()
|
||||||
|
|
||||||
|
|
||||||
|
class GithubBuildTrigger(BuildTriggerHandler):
|
||||||
|
"""
|
||||||
|
BuildTrigger for GitHub that uses the archive API and buildpacks.
|
||||||
|
"""
|
||||||
|
def _get_client(self):
|
||||||
|
""" Returns an authenticated client for talking to the GitHub API. """
|
||||||
|
return Github(self.auth_token,
|
||||||
|
base_url=github_trigger.api_endpoint(),
|
||||||
|
client_id=github_trigger.client_id(),
|
||||||
|
client_secret=github_trigger.client_secret(),
|
||||||
|
timeout=5)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def service_name(cls):
|
||||||
|
return 'github'
|
||||||
|
|
||||||
|
def is_active(self):
|
||||||
|
return 'hook_id' in self.config
|
||||||
|
|
||||||
|
def get_repository_url(self):
|
||||||
|
source = self.config['build_source']
|
||||||
|
return github_trigger.get_public_url(source)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_error_message(ghe, default_msg):
|
||||||
|
if ghe.data.get('errors') and ghe.data['errors'][0].get('message'):
|
||||||
|
return ghe.data['errors'][0]['message']
|
||||||
|
|
||||||
|
return default_msg
|
||||||
|
|
||||||
|
def activate(self, standard_webhook_url):
|
||||||
|
config = self.config
|
||||||
|
new_build_source = config['build_source']
|
||||||
|
gh_client = self._get_client()
|
||||||
|
|
||||||
|
# Find the GitHub repository.
|
||||||
|
try:
|
||||||
|
gh_repo = gh_client.get_repo(new_build_source)
|
||||||
|
except UnknownObjectException:
|
||||||
|
msg = 'Unable to find GitHub repository for source: %s' % new_build_source
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
# Add a deploy key to the GitHub repository.
|
||||||
|
public_key, private_key = generate_ssh_keypair()
|
||||||
|
config['credentials'] = [
|
||||||
|
{
|
||||||
|
'name': 'SSH Public Key',
|
||||||
|
'value': public_key,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
deploy_key = gh_repo.create_key('%s Builder' % app.config['REGISTRY_TITLE'],
|
||||||
|
public_key)
|
||||||
|
config['deploy_key_id'] = deploy_key.id
|
||||||
|
except GithubException as ghe:
|
||||||
|
default_msg = 'Unable to add deploy key to repository: %s' % new_build_source
|
||||||
|
msg = GithubBuildTrigger._get_error_message(ghe, default_msg)
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
# Add the webhook to the GitHub repository.
|
||||||
|
webhook_config = {
|
||||||
|
'url': standard_webhook_url,
|
||||||
|
'content_type': 'json',
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
hook = gh_repo.create_hook('web', webhook_config)
|
||||||
|
config['hook_id'] = hook.id
|
||||||
|
config['master_branch'] = gh_repo.default_branch
|
||||||
|
except GithubException:
|
||||||
|
default_msg = 'Unable to create webhook on repository: %s' % new_build_source
|
||||||
|
msg = GithubBuildTrigger._get_error_message(ghe, default_msg)
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
return config, {'private_key': private_key}
|
||||||
|
|
||||||
|
def deactivate(self):
|
||||||
|
config = self.config
|
||||||
|
gh_client = self._get_client()
|
||||||
|
|
||||||
|
# Find the GitHub repository.
|
||||||
|
try:
|
||||||
|
repo = gh_client.get_repo(config['build_source'])
|
||||||
|
except UnknownObjectException:
|
||||||
|
msg = 'Unable to find GitHub repository for source: %s' % config['build_source']
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
except GitHubBadCredentialsException:
|
||||||
|
msg = 'Unable to access repository to disable trigger'
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
|
||||||
|
# If the trigger uses a deploy key, remove it.
|
||||||
|
try:
|
||||||
|
if config['deploy_key_id']:
|
||||||
|
deploy_key = repo.get_key(config['deploy_key_id'])
|
||||||
|
deploy_key.delete()
|
||||||
|
except KeyError:
|
||||||
|
# There was no config['deploy_key_id'], thus this is an old trigger without a deploy key.
|
||||||
|
pass
|
||||||
|
except GithubException as ghe:
|
||||||
|
default_msg = 'Unable to remove deploy key: %s' % config['deploy_key_id']
|
||||||
|
msg = GithubBuildTrigger._get_error_message(ghe, default_msg)
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
|
||||||
|
# Remove the webhook.
|
||||||
|
try:
|
||||||
|
hook = repo.get_hook(config['hook_id'])
|
||||||
|
hook.delete()
|
||||||
|
except GithubException as ghe:
|
||||||
|
default_msg = 'Unable to remove hook: %s' % config['hook_id']
|
||||||
|
msg = GithubBuildTrigger._get_error_message(ghe, default_msg)
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
|
||||||
|
config.pop('hook_id', None)
|
||||||
|
self.config = config
|
||||||
|
return config
|
||||||
|
|
||||||
|
def list_build_sources(self):
|
||||||
|
gh_client = self._get_client()
|
||||||
|
usr = gh_client.get_user()
|
||||||
|
|
||||||
|
try:
|
||||||
|
repos = usr.get_repos()
|
||||||
|
except GithubException:
|
||||||
|
raise RepositoryReadException('Unable to list user repositories')
|
||||||
|
|
||||||
|
namespaces = {}
|
||||||
|
has_non_personal = False
|
||||||
|
|
||||||
|
for repository in repos:
|
||||||
|
namespace = repository.owner.login
|
||||||
|
if not namespace in namespaces:
|
||||||
|
is_personal_repo = namespace == usr.login
|
||||||
|
namespaces[namespace] = {
|
||||||
|
'personal': is_personal_repo,
|
||||||
|
'repos': [],
|
||||||
|
'info': {
|
||||||
|
'name': namespace,
|
||||||
|
'avatar_url': repository.owner.avatar_url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if not is_personal_repo:
|
||||||
|
has_non_personal = True
|
||||||
|
|
||||||
|
namespaces[namespace]['repos'].append(repository.full_name)
|
||||||
|
|
||||||
|
# In older versions of GitHub Enterprise, the get_repos call above does not
|
||||||
|
# return any non-personal repositories. In that case, we need to lookup the
|
||||||
|
# repositories manually.
|
||||||
|
# TODO: Remove this once we no longer support GHE versions <= 2.1
|
||||||
|
if not has_non_personal:
|
||||||
|
for org in usr.get_orgs():
|
||||||
|
repo_list = [repo.full_name for repo in org.get_repos(type='member')]
|
||||||
|
namespaces[org.name] = {
|
||||||
|
'personal': False,
|
||||||
|
'repos': repo_list,
|
||||||
|
'info': {
|
||||||
|
'name': org.name or org.login,
|
||||||
|
'avatar_url': org.avatar_url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
entries = list(namespaces.values())
|
||||||
|
entries.sort(key=lambda e: e['info']['name'])
|
||||||
|
return entries
|
||||||
|
|
||||||
|
def list_build_subdirs(self):
|
||||||
|
config = self.config
|
||||||
|
gh_client = self._get_client()
|
||||||
|
source = config['build_source']
|
||||||
|
|
||||||
|
try:
|
||||||
|
repo = gh_client.get_repo(source)
|
||||||
|
|
||||||
|
# Find the first matching branch.
|
||||||
|
repo_branches = self.list_field_values('branch_name') or []
|
||||||
|
branches = find_matching_branches(config, repo_branches)
|
||||||
|
branches = branches or [repo.default_branch or 'master']
|
||||||
|
default_commit = repo.get_branch(branches[0]).commit
|
||||||
|
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
|
||||||
|
|
||||||
|
return [os.path.dirname(elem.path) for elem in commit_tree.tree
|
||||||
|
if (elem.type == u'blob' and
|
||||||
|
os.path.basename(elem.path) == u'Dockerfile')]
|
||||||
|
except GithubException as ghe:
|
||||||
|
message = ghe.data.get('message', 'Unable to list contents of repository: %s' % source)
|
||||||
|
if message == 'Branch not found':
|
||||||
|
raise EmptyRepositoryException()
|
||||||
|
|
||||||
|
raise RepositoryReadException(message)
|
||||||
|
|
||||||
|
def load_dockerfile_contents(self):
|
||||||
|
config = self.config
|
||||||
|
gh_client = self._get_client()
|
||||||
|
|
||||||
|
source = config['build_source']
|
||||||
|
path = self.get_dockerfile_path()
|
||||||
|
try:
|
||||||
|
repo = gh_client.get_repo(source)
|
||||||
|
file_info = repo.get_file_contents(path)
|
||||||
|
if file_info is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
content = file_info.content
|
||||||
|
if file_info.encoding == 'base64':
|
||||||
|
content = base64.b64decode(content)
|
||||||
|
return content
|
||||||
|
|
||||||
|
except GithubException as ghe:
|
||||||
|
message = ghe.data.get('message', 'Unable to read Dockerfile: %s' % source)
|
||||||
|
raise RepositoryReadException(message)
|
||||||
|
|
||||||
|
def list_field_values(self, field_name, limit=None):
|
||||||
|
if field_name == 'refs':
|
||||||
|
branches = self.list_field_values('branch_name')
|
||||||
|
tags = self.list_field_values('tag_name')
|
||||||
|
|
||||||
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
||||||
|
[{'kind': 'tag', 'name': tag} for tag in tags])
|
||||||
|
|
||||||
|
config = self.config
|
||||||
|
if field_name == 'tag_name':
|
||||||
|
try:
|
||||||
|
gh_client = self._get_client()
|
||||||
|
source = config['build_source']
|
||||||
|
repo = gh_client.get_repo(source)
|
||||||
|
gh_tags = repo.get_tags()
|
||||||
|
if limit:
|
||||||
|
gh_tags = repo.get_tags()[0:limit]
|
||||||
|
|
||||||
|
return [tag.name for tag in gh_tags]
|
||||||
|
except GitHubBadCredentialsException:
|
||||||
|
return []
|
||||||
|
except GithubException:
|
||||||
|
logger.exception("Got GitHub Exception when trying to list tags for trigger %s",
|
||||||
|
self.trigger.id)
|
||||||
|
return []
|
||||||
|
|
||||||
|
if field_name == 'branch_name':
|
||||||
|
try:
|
||||||
|
gh_client = self._get_client()
|
||||||
|
source = config['build_source']
|
||||||
|
repo = gh_client.get_repo(source)
|
||||||
|
gh_branches = repo.get_branches()
|
||||||
|
if limit:
|
||||||
|
gh_branches = repo.get_branches()[0:limit]
|
||||||
|
|
||||||
|
branches = [branch.name for branch in gh_branches]
|
||||||
|
|
||||||
|
if not repo.default_branch in branches:
|
||||||
|
branches.insert(0, repo.default_branch)
|
||||||
|
|
||||||
|
if branches[0] != repo.default_branch:
|
||||||
|
branches.remove(repo.default_branch)
|
||||||
|
branches.insert(0, repo.default_branch)
|
||||||
|
|
||||||
|
return branches
|
||||||
|
except GitHubBadCredentialsException:
|
||||||
|
return ['master']
|
||||||
|
except GithubException:
|
||||||
|
logger.exception("Got GitHub Exception when trying to list branches for trigger %s",
|
||||||
|
self.trigger.id)
|
||||||
|
return ['master']
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _build_metadata_for_commit(cls, commit_sha, ref, repo):
|
||||||
|
try:
|
||||||
|
commit = repo.get_commit(commit_sha)
|
||||||
|
except GithubException:
|
||||||
|
logger.exception('Could not load commit information from GitHub')
|
||||||
|
return None
|
||||||
|
|
||||||
|
commit_info = {
|
||||||
|
'url': commit.html_url,
|
||||||
|
'message': commit.commit.message,
|
||||||
|
'date': commit.last_modified
|
||||||
|
}
|
||||||
|
|
||||||
|
if commit.author:
|
||||||
|
commit_info['author'] = {
|
||||||
|
'username': commit.author.login,
|
||||||
|
'avatar_url': commit.author.avatar_url,
|
||||||
|
'url': commit.author.html_url
|
||||||
|
}
|
||||||
|
|
||||||
|
if commit.committer:
|
||||||
|
commit_info['committer'] = {
|
||||||
|
'username': commit.committer.login,
|
||||||
|
'avatar_url': commit.committer.avatar_url,
|
||||||
|
'url': commit.committer.html_url
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'commit': commit_sha,
|
||||||
|
'ref': ref,
|
||||||
|
'default_branch': repo.default_branch,
|
||||||
|
'git_url': repo.ssh_url,
|
||||||
|
'commit_info': commit_info
|
||||||
|
}
|
||||||
|
|
||||||
|
def manual_start(self, run_parameters=None):
|
||||||
|
config = self.config
|
||||||
|
source = config['build_source']
|
||||||
|
|
||||||
|
try:
|
||||||
|
gh_client = self._get_client()
|
||||||
|
repo = gh_client.get_repo(source)
|
||||||
|
default_branch = repo.default_branch
|
||||||
|
except GithubException as ghe:
|
||||||
|
msg = GithubBuildTrigger._get_error_message(ghe, 'Unable to start build trigger')
|
||||||
|
raise TriggerStartException(msg)
|
||||||
|
|
||||||
|
def get_branch_sha(branch_name):
|
||||||
|
branch = repo.get_branch(branch_name)
|
||||||
|
return branch.commit.sha
|
||||||
|
|
||||||
|
def get_tag_sha(tag_name):
|
||||||
|
tags = {tag.name: tag for tag in repo.get_tags()}
|
||||||
|
if not tag_name in tags:
|
||||||
|
raise TriggerStartException('Could not find tag in repository')
|
||||||
|
|
||||||
|
return tags[tag_name].commit.sha
|
||||||
|
|
||||||
|
# Find the branch or tag to build.
|
||||||
|
(commit_sha, ref) = determine_build_ref(run_parameters, get_branch_sha, get_tag_sha,
|
||||||
|
default_branch)
|
||||||
|
|
||||||
|
metadata = GithubBuildTrigger._build_metadata_for_commit(commit_sha, ref, repo)
|
||||||
|
return self.prepare_build(metadata, is_manual=True)
|
||||||
|
|
||||||
|
def lookup_user(self, username):
|
||||||
|
try:
|
||||||
|
gh_client = self._get_client()
|
||||||
|
user = gh_client.get_user(username)
|
||||||
|
return {
|
||||||
|
'html_url': user.html_url,
|
||||||
|
'avatar_url': user.avatar_url
|
||||||
|
}
|
||||||
|
except GithubException:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def handle_trigger_request(self, request):
|
||||||
|
# Check the payload to see if we should skip it based on the lack of a head_commit.
|
||||||
|
payload = request.get_json()
|
||||||
|
|
||||||
|
# This is for GitHub's probing/testing.
|
||||||
|
if 'zen' in payload:
|
||||||
|
raise ValidationRequestException()
|
||||||
|
|
||||||
|
# Lookup the default branch for the repository.
|
||||||
|
default_branch = None
|
||||||
|
lookup_user = None
|
||||||
|
try:
|
||||||
|
repo_full_name = '%s/%s' % (payload['repository']['owner']['name'],
|
||||||
|
payload['repository']['name'])
|
||||||
|
|
||||||
|
gh_client = self._get_client()
|
||||||
|
repo = gh_client.get_repo(repo_full_name)
|
||||||
|
default_branch = repo.default_branch
|
||||||
|
lookup_user = self.lookup_user
|
||||||
|
except GitHubBadCredentialsException:
|
||||||
|
logger.exception('Got GitHub Credentials Exception; Cannot lookup default branch')
|
||||||
|
except GithubException:
|
||||||
|
logger.exception("Got GitHub Exception when trying to start trigger %s", self.trigger.id)
|
||||||
|
raise SkipRequestException()
|
||||||
|
|
||||||
|
logger.debug('GitHub trigger payload %s', payload)
|
||||||
|
metadata = get_transformed_webhook_payload(payload, default_branch=default_branch,
|
||||||
|
lookup_user=lookup_user)
|
||||||
|
prepared = self.prepare_build(metadata)
|
||||||
|
|
||||||
|
# Check if we should skip this build.
|
||||||
|
raise_if_skipped_build(prepared, self.config)
|
||||||
|
return prepared
|
432
buildtrigger/gitlabhandler.py
Normal file
432
buildtrigger/gitlabhandler.py
Normal file
|
@ -0,0 +1,432 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
from app import app
|
||||||
|
|
||||||
|
from jsonschema import validate
|
||||||
|
from buildtrigger.triggerutil import (RepositoryReadException, TriggerActivationException,
|
||||||
|
TriggerDeactivationException, TriggerStartException,
|
||||||
|
SkipRequestException, InvalidPayloadException,
|
||||||
|
determine_build_ref, raise_if_skipped_build,
|
||||||
|
find_matching_branches)
|
||||||
|
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
|
||||||
|
from util.security.ssh import generate_ssh_keypair
|
||||||
|
from util.dict_wrappers import JSONPathDict, SafeDictSetter
|
||||||
|
from endpoints.api import ExternalServiceTimeout
|
||||||
|
|
||||||
|
import gitlab
|
||||||
|
import requests
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
GITLAB_WEBHOOK_PAYLOAD_SCHEMA = {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'ref': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'checkout_sha': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'repository': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'git_ssh_url': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['git_ssh_url'],
|
||||||
|
},
|
||||||
|
'commits': {
|
||||||
|
'type': 'array',
|
||||||
|
'items': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'url': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'message': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'timestamp': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'type': 'object',
|
||||||
|
'properties': {
|
||||||
|
'email': {
|
||||||
|
'type': 'string',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['email'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'required': ['url', 'message', 'timestamp'],
|
||||||
|
},
|
||||||
|
'minItems': 1,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
'required': ['ref', 'checkout_sha', 'repository'],
|
||||||
|
}
|
||||||
|
|
||||||
|
def _catch_timeouts(func):
|
||||||
|
@wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except requests.exceptions.Timeout:
|
||||||
|
msg = 'Request to the GitLab API timed out'
|
||||||
|
logger.exception(msg)
|
||||||
|
raise ExternalServiceTimeout(msg)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def get_transformed_webhook_payload(gl_payload, default_branch=None, lookup_user=None):
|
||||||
|
""" Returns the Gitlab webhook JSON payload transformed into our own payload
|
||||||
|
format. If the gl_payload is not valid, returns None.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
validate(gl_payload, GITLAB_WEBHOOK_PAYLOAD_SCHEMA)
|
||||||
|
except Exception as exc:
|
||||||
|
raise InvalidPayloadException(exc.message)
|
||||||
|
|
||||||
|
payload = JSONPathDict(gl_payload)
|
||||||
|
|
||||||
|
config = SafeDictSetter()
|
||||||
|
config['commit'] = payload['checkout_sha']
|
||||||
|
config['ref'] = payload['ref']
|
||||||
|
config['default_branch'] = default_branch
|
||||||
|
config['git_url'] = payload['repository.git_ssh_url']
|
||||||
|
|
||||||
|
config['commit_info.url'] = payload['commits[0].url']
|
||||||
|
config['commit_info.message'] = payload['commits[0].message']
|
||||||
|
config['commit_info.date'] = payload['commits[0].timestamp']
|
||||||
|
|
||||||
|
# Note: Gitlab does not send full user information with the payload, so we have to
|
||||||
|
# (optionally) look it up.
|
||||||
|
author_email = payload['commits[0].author.email']
|
||||||
|
if lookup_user and author_email:
|
||||||
|
author_info = lookup_user(author_email)
|
||||||
|
if author_info:
|
||||||
|
config['commit_info.author.username'] = author_info['username']
|
||||||
|
config['commit_info.author.url'] = author_info['html_url']
|
||||||
|
config['commit_info.author.avatar_url'] = author_info['avatar_url']
|
||||||
|
|
||||||
|
return config.dict_value()
|
||||||
|
|
||||||
|
|
||||||
|
class GitLabBuildTrigger(BuildTriggerHandler):
|
||||||
|
"""
|
||||||
|
BuildTrigger for GitLab.
|
||||||
|
"""
|
||||||
|
@classmethod
|
||||||
|
def service_name(cls):
|
||||||
|
return 'gitlab'
|
||||||
|
|
||||||
|
def _get_authorized_client(self):
|
||||||
|
host = app.config.get('GITLAB_TRIGGER_CONFIG', {}).get('GITLAB_ENDPOINT', '')
|
||||||
|
auth_token = self.auth_token or 'invalid'
|
||||||
|
return gitlab.Gitlab(host, oauth_token=auth_token, timeout=5)
|
||||||
|
|
||||||
|
def is_active(self):
|
||||||
|
return 'hook_id' in self.config
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def activate(self, standard_webhook_url):
|
||||||
|
config = self.config
|
||||||
|
new_build_source = config['build_source']
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
|
||||||
|
# Find the GitLab repository.
|
||||||
|
repository = gl_client.getproject(new_build_source)
|
||||||
|
if repository is False:
|
||||||
|
msg = 'Unable to find GitLab repository for source: %s' % new_build_source
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
# Add a deploy key to the repository.
|
||||||
|
public_key, private_key = generate_ssh_keypair()
|
||||||
|
config['credentials'] = [
|
||||||
|
{
|
||||||
|
'name': 'SSH Public Key',
|
||||||
|
'value': public_key,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
key = gl_client.adddeploykey(repository['id'], '%s Builder' % app.config['REGISTRY_TITLE'],
|
||||||
|
public_key)
|
||||||
|
if key is False:
|
||||||
|
msg = 'Unable to add deploy key to repository: %s' % new_build_source
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
config['key_id'] = key['id']
|
||||||
|
|
||||||
|
# Add the webhook to the GitLab repository.
|
||||||
|
hook = gl_client.addprojecthook(repository['id'], standard_webhook_url, push=True)
|
||||||
|
if hook is False:
|
||||||
|
msg = 'Unable to create webhook on repository: %s' % new_build_source
|
||||||
|
raise TriggerActivationException(msg)
|
||||||
|
|
||||||
|
config['hook_id'] = hook['id']
|
||||||
|
self.config = config
|
||||||
|
return config, {'private_key': private_key}
|
||||||
|
|
||||||
|
def deactivate(self):
|
||||||
|
config = self.config
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
|
||||||
|
# Find the GitLab repository.
|
||||||
|
repository = gl_client.getproject(config['build_source'])
|
||||||
|
if repository is False:
|
||||||
|
msg = 'Unable to find GitLab repository for source: %s' % config['build_source']
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
|
||||||
|
# Remove the webhook.
|
||||||
|
success = gl_client.deleteprojecthook(repository['id'], config['hook_id'])
|
||||||
|
if success is False:
|
||||||
|
msg = 'Unable to remove hook: %s' % config['hook_id']
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
config.pop('hook_id', None)
|
||||||
|
|
||||||
|
# Remove the key
|
||||||
|
success = gl_client.deletedeploykey(repository['id'], config['key_id'])
|
||||||
|
if success is False:
|
||||||
|
msg = 'Unable to remove deploy key: %s' % config['key_id']
|
||||||
|
raise TriggerDeactivationException(msg)
|
||||||
|
config.pop('key_id', None)
|
||||||
|
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
return config
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def list_build_sources(self):
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
current_user = gl_client.currentuser()
|
||||||
|
if current_user is False:
|
||||||
|
raise RepositoryReadException('Unable to get current user')
|
||||||
|
|
||||||
|
repositories = gl_client.getprojects()
|
||||||
|
if repositories is False:
|
||||||
|
raise RepositoryReadException('Unable to list user repositories')
|
||||||
|
|
||||||
|
namespaces = {}
|
||||||
|
for repo in repositories:
|
||||||
|
owner = repo['namespace']['name']
|
||||||
|
if not owner in namespaces:
|
||||||
|
namespaces[owner] = {
|
||||||
|
'personal': owner == current_user['username'],
|
||||||
|
'repos': [],
|
||||||
|
'info': {
|
||||||
|
'name': owner,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
namespaces[owner]['repos'].append(repo['path_with_namespace'])
|
||||||
|
|
||||||
|
return namespaces.values()
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def list_build_subdirs(self):
|
||||||
|
config = self.config
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
new_build_source = config['build_source']
|
||||||
|
|
||||||
|
repository = gl_client.getproject(new_build_source)
|
||||||
|
if repository is False:
|
||||||
|
msg = 'Unable to find GitLab repository for source: %s' % new_build_source
|
||||||
|
raise RepositoryReadException(msg)
|
||||||
|
|
||||||
|
repo_branches = gl_client.getbranches(repository['id'])
|
||||||
|
if repo_branches is False:
|
||||||
|
msg = 'Unable to find GitLab branches for source: %s' % new_build_source
|
||||||
|
raise RepositoryReadException(msg)
|
||||||
|
|
||||||
|
branches = [branch['name'] for branch in repo_branches]
|
||||||
|
branches = find_matching_branches(config, branches)
|
||||||
|
branches = branches or [repository['default_branch'] or 'master']
|
||||||
|
|
||||||
|
repo_tree = gl_client.getrepositorytree(repository['id'], ref_name=branches[0])
|
||||||
|
if repo_tree is False:
|
||||||
|
msg = 'Unable to find GitLab repository tree for source: %s' % new_build_source
|
||||||
|
raise RepositoryReadException(msg)
|
||||||
|
|
||||||
|
for node in repo_tree:
|
||||||
|
if node['name'] == 'Dockerfile':
|
||||||
|
return ['/']
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def load_dockerfile_contents(self):
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
path = self.get_dockerfile_path()
|
||||||
|
|
||||||
|
repository = gl_client.getproject(self.config['build_source'])
|
||||||
|
if repository is False:
|
||||||
|
return None
|
||||||
|
|
||||||
|
branches = self.list_field_values('branch_name')
|
||||||
|
branches = find_matching_branches(self.config, branches)
|
||||||
|
if branches == []:
|
||||||
|
return None
|
||||||
|
|
||||||
|
branch_name = branches[0]
|
||||||
|
if repository['default_branch'] in branches:
|
||||||
|
branch_name = repository['default_branch']
|
||||||
|
|
||||||
|
contents = gl_client.getrawfile(repository['id'], branch_name, path)
|
||||||
|
if contents is False:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return contents
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def list_field_values(self, field_name, limit=None):
|
||||||
|
if field_name == 'refs':
|
||||||
|
branches = self.list_field_values('branch_name')
|
||||||
|
tags = self.list_field_values('tag_name')
|
||||||
|
|
||||||
|
return ([{'kind': 'branch', 'name': b} for b in branches] +
|
||||||
|
[{'kind': 'tag', 'name': t} for t in tags])
|
||||||
|
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
repo = gl_client.getproject(self.config['build_source'])
|
||||||
|
if repo is False:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if field_name == 'tag_name':
|
||||||
|
tags = gl_client.getrepositorytags(repo['id'])
|
||||||
|
if tags is False:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
tags = tags[0:limit]
|
||||||
|
|
||||||
|
return [tag['name'] for tag in tags]
|
||||||
|
|
||||||
|
if field_name == 'branch_name':
|
||||||
|
branches = gl_client.getbranches(repo['id'])
|
||||||
|
if branches is False:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if limit:
|
||||||
|
branches = branches[0:limit]
|
||||||
|
|
||||||
|
return [branch['name'] for branch in branches]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_repository_url(self):
|
||||||
|
return 'https://gitlab.com/%s' % self.config['build_source']
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def lookup_user(self, email):
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
try:
|
||||||
|
[user] = gl_client.getusers(search=email)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'username': user['username'],
|
||||||
|
'html_url': gl_client.host + '/' + user['username'],
|
||||||
|
'avatar_url': user['avatar_url']
|
||||||
|
}
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def get_metadata_for_commit(self, commit_sha, ref, repo):
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
commit = gl_client.getrepositorycommit(repo['id'], commit_sha)
|
||||||
|
|
||||||
|
metadata = {
|
||||||
|
'commit': commit['id'],
|
||||||
|
'ref': ref,
|
||||||
|
'default_branch': repo['default_branch'],
|
||||||
|
'git_url': repo['ssh_url_to_repo'],
|
||||||
|
'commit_info': {
|
||||||
|
'url': gl_client.host + '/' + repo['path_with_namespace'] + '/commit/' + commit['id'],
|
||||||
|
'message': commit['message'],
|
||||||
|
'date': commit['committed_date'],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
committer = None
|
||||||
|
if 'committer_email' in commit:
|
||||||
|
committer = self.lookup_user(commit['committer_email'])
|
||||||
|
|
||||||
|
author = None
|
||||||
|
if 'author_email' in commit:
|
||||||
|
author = self.lookup_user(commit['author_email'])
|
||||||
|
|
||||||
|
if committer is not None:
|
||||||
|
metadata['commit_info']['committer'] = {
|
||||||
|
'username': committer['username'],
|
||||||
|
'avatar_url': committer['avatar_url'],
|
||||||
|
'url': gl_client.host + '/' + committer['username'],
|
||||||
|
}
|
||||||
|
|
||||||
|
if author is not None:
|
||||||
|
metadata['commit_info']['author'] = {
|
||||||
|
'username': author['username'],
|
||||||
|
'avatar_url': author['avatar_url'],
|
||||||
|
'url': gl_client.host + '/' + author['username']
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def manual_start(self, run_parameters=None):
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
|
||||||
|
repo = gl_client.getproject(self.config['build_source'])
|
||||||
|
if repo is False:
|
||||||
|
raise TriggerStartException('Could not find repository')
|
||||||
|
|
||||||
|
def get_tag_sha(tag_name):
|
||||||
|
tags = gl_client.getrepositorytags(repo['id'])
|
||||||
|
if tags is False:
|
||||||
|
raise TriggerStartException('Could not find tags')
|
||||||
|
|
||||||
|
for tag in tags:
|
||||||
|
if tag['name'] == tag_name:
|
||||||
|
return tag['commit']['id']
|
||||||
|
|
||||||
|
raise TriggerStartException('Could not find commit')
|
||||||
|
|
||||||
|
def get_branch_sha(branch_name):
|
||||||
|
branch = gl_client.getbranch(repo['id'], branch_name)
|
||||||
|
if branch is False:
|
||||||
|
raise TriggerStartException('Could not find branch')
|
||||||
|
|
||||||
|
return branch['commit']['id']
|
||||||
|
|
||||||
|
# Find the branch or tag to build.
|
||||||
|
(commit_sha, ref) = determine_build_ref(run_parameters, get_branch_sha, get_tag_sha,
|
||||||
|
repo['default_branch'])
|
||||||
|
|
||||||
|
metadata = self.get_metadata_for_commit(commit_sha, ref, repo)
|
||||||
|
return self.prepare_build(metadata, is_manual=True)
|
||||||
|
|
||||||
|
@_catch_timeouts
|
||||||
|
def handle_trigger_request(self, request):
|
||||||
|
payload = request.get_json()
|
||||||
|
if not payload:
|
||||||
|
raise SkipRequestException()
|
||||||
|
|
||||||
|
# Lookup the default branch.
|
||||||
|
default_branch = None
|
||||||
|
gl_client = self._get_authorized_client()
|
||||||
|
repo = gl_client.getproject(self.config['build_source'])
|
||||||
|
if repo is not False:
|
||||||
|
default_branch = repo['default_branch']
|
||||||
|
lookup_user = self.lookup_user
|
||||||
|
|
||||||
|
logger.debug('GitLab trigger payload %s', payload)
|
||||||
|
metadata = get_transformed_webhook_payload(payload, default_branch=default_branch,
|
||||||
|
lookup_user=lookup_user)
|
||||||
|
prepared = self.prepare_build(metadata)
|
||||||
|
|
||||||
|
# Check if we should skip this build.
|
||||||
|
raise_if_skipped_build(prepared, self.config)
|
||||||
|
return prepared
|
124
buildtrigger/triggerutil.py
Normal file
124
buildtrigger/triggerutil.py
Normal file
|
@ -0,0 +1,124 @@
|
||||||
|
import json
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
|
class InvalidPayloadException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class BuildArchiveException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class InvalidServiceException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TriggerActivationException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TriggerDeactivationException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TriggerStartException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class ValidationRequestException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class SkipRequestException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class EmptyRepositoryException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class RepositoryReadException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TriggerProviderException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def determine_build_ref(run_parameters, get_branch_sha, get_tag_sha, default_branch):
|
||||||
|
run_parameters = run_parameters or {}
|
||||||
|
|
||||||
|
kind = ''
|
||||||
|
value = ''
|
||||||
|
|
||||||
|
if 'refs' in run_parameters and run_parameters['refs']:
|
||||||
|
kind = run_parameters['refs']['kind']
|
||||||
|
value = run_parameters['refs']['name']
|
||||||
|
elif 'branch_name' in run_parameters:
|
||||||
|
kind = 'branch'
|
||||||
|
value = run_parameters['branch_name']
|
||||||
|
|
||||||
|
kind = kind or 'branch'
|
||||||
|
value = value or default_branch
|
||||||
|
|
||||||
|
ref = 'refs/tags/' + value if kind == 'tag' else 'refs/heads/' + value
|
||||||
|
commit_sha = get_tag_sha(value) if kind == 'tag' else get_branch_sha(value)
|
||||||
|
return (commit_sha, ref)
|
||||||
|
|
||||||
|
|
||||||
|
def find_matching_branches(config, branches):
|
||||||
|
if 'branchtag_regex' in config:
|
||||||
|
try:
|
||||||
|
regex = re.compile(config['branchtag_regex'])
|
||||||
|
return [branch for branch in branches
|
||||||
|
if matches_ref('refs/heads/' + branch, regex)]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return branches
|
||||||
|
|
||||||
|
|
||||||
|
def should_skip_commit(metadata):
|
||||||
|
if 'commit_info' in metadata:
|
||||||
|
message = metadata['commit_info']['message']
|
||||||
|
return '[skip build]' in message or '[build skip]' in message
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def raise_if_skipped_build(prepared_build, config):
|
||||||
|
""" Raises a SkipRequestException if the given build should be skipped. """
|
||||||
|
# Check to ensure we have metadata.
|
||||||
|
if not prepared_build.metadata:
|
||||||
|
logger.debug('Skipping request due to missing metadata for prepared build')
|
||||||
|
raise SkipRequestException()
|
||||||
|
|
||||||
|
# Check the branchtag regex.
|
||||||
|
if 'branchtag_regex' in config:
|
||||||
|
try:
|
||||||
|
regex = re.compile(config['branchtag_regex'])
|
||||||
|
except:
|
||||||
|
regex = re.compile('.*')
|
||||||
|
|
||||||
|
if not matches_ref(prepared_build.metadata.get('ref'), regex):
|
||||||
|
raise SkipRequestException()
|
||||||
|
|
||||||
|
# Check the commit message.
|
||||||
|
if should_skip_commit(prepared_build.metadata):
|
||||||
|
logger.debug('Skipping request due to commit message request')
|
||||||
|
raise SkipRequestException()
|
||||||
|
|
||||||
|
|
||||||
|
def matches_ref(ref, regex):
|
||||||
|
match_string = ref.split('/', 1)[1]
|
||||||
|
if not regex:
|
||||||
|
return False
|
||||||
|
|
||||||
|
m = regex.match(match_string)
|
||||||
|
if not m:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return len(m.group(0)) == len(match_string)
|
||||||
|
|
||||||
|
|
||||||
|
def raise_unsupported():
|
||||||
|
raise io.UnsupportedOperation
|
||||||
|
|
||||||
|
|
||||||
|
def get_trigger_config(trigger):
|
||||||
|
try:
|
||||||
|
return json.loads(trigger.config)
|
||||||
|
except ValueError:
|
||||||
|
return {}
|
|
@ -5,7 +5,7 @@ real_ip_recursive on;
|
||||||
log_format lb_pp '$remote_addr ($proxy_protocol_addr) '
|
log_format lb_pp '$remote_addr ($proxy_protocol_addr) '
|
||||||
'- $remote_user [$time_local] '
|
'- $remote_user [$time_local] '
|
||||||
'"$request" $status $body_bytes_sent '
|
'"$request" $status $body_bytes_sent '
|
||||||
'"$http_referer" "$http_user_agent"'
|
'"$http_referer" "$http_user_agent"';
|
||||||
|
|
||||||
types_hash_max_size 2048;
|
types_hash_max_size 2048;
|
||||||
include /usr/local/nginx/conf/mime.types.default;
|
include /usr/local/nginx/conf/mime.types.default;
|
||||||
|
|
|
@ -4,6 +4,10 @@ server_name _;
|
||||||
|
|
||||||
keepalive_timeout 5;
|
keepalive_timeout 5;
|
||||||
|
|
||||||
|
if ($host = "www.quay.io") {
|
||||||
|
return 301 $scheme://quay.io$request_uri;
|
||||||
|
}
|
||||||
|
|
||||||
if ($args ~ "_escaped_fragment_") {
|
if ($args ~ "_escaped_fragment_") {
|
||||||
rewrite ^ /snapshot$uri;
|
rewrite ^ /snapshot$uri;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import redis
|
import redis
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
|
|
||||||
from util.dynamic import import_class
|
from util.dynamic import import_class
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
@ -65,7 +66,6 @@ class RedisBuildLogs(object):
|
||||||
"""
|
"""
|
||||||
self._redis.expire(self._logs_key(build_id), ONE_DAY)
|
self._redis.expire(self._logs_key(build_id), ONE_DAY)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _status_key(build_id):
|
def _status_key(build_id):
|
||||||
return 'builds/%s/status' % build_id
|
return 'builds/%s/status' % build_id
|
||||||
|
@ -88,9 +88,20 @@ class RedisBuildLogs(object):
|
||||||
|
|
||||||
return json.loads(fetched) if fetched else None
|
return json.loads(fetched) if fetched else None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _health_key():
|
||||||
|
return '_health'
|
||||||
|
|
||||||
def check_health(self):
|
def check_health(self):
|
||||||
try:
|
try:
|
||||||
return self._redis.ping() == True
|
if not self._redis.ping() == True:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Ensure we can write and read a key.
|
||||||
|
self._redis.set(self._health_key(), time.time())
|
||||||
|
self._redis.get(self._health_key())
|
||||||
|
|
||||||
|
return True
|
||||||
except redis.ConnectionError:
|
except redis.ConnectionError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -491,12 +491,8 @@ class EmailConfirmation(BaseModel):
|
||||||
class ImageStorage(BaseModel):
|
class ImageStorage(BaseModel):
|
||||||
uuid = CharField(default=uuid_generator, index=True, unique=True)
|
uuid = CharField(default=uuid_generator, index=True, unique=True)
|
||||||
checksum = CharField(null=True)
|
checksum = CharField(null=True)
|
||||||
created = DateTimeField(null=True)
|
|
||||||
comment = TextField(null=True)
|
|
||||||
command = TextField(null=True)
|
|
||||||
image_size = BigIntegerField(null=True)
|
image_size = BigIntegerField(null=True)
|
||||||
uncompressed_size = BigIntegerField(null=True)
|
uncompressed_size = BigIntegerField(null=True)
|
||||||
aggregate_size = BigIntegerField(null=True)
|
|
||||||
uploading = BooleanField(default=True, null=True)
|
uploading = BooleanField(default=True, null=True)
|
||||||
cas_path = BooleanField(default=True)
|
cas_path = BooleanField(default=True)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
"""Remove the deprecated imagestorage columns.
|
||||||
|
|
||||||
|
Revision ID: 127905a52fdd
|
||||||
|
Revises: 2e0380215d01
|
||||||
|
Create Date: 2015-09-17 15:48:56.667823
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '127905a52fdd'
|
||||||
|
down_revision = '2e0380215d01'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_column('imagestorage', 'comment')
|
||||||
|
op.drop_column('imagestorage', 'aggregate_size')
|
||||||
|
op.drop_column('imagestorage', 'command')
|
||||||
|
op.drop_column('imagestorage', 'created')
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('imagestorage', sa.Column('created', mysql.DATETIME(), nullable=True))
|
||||||
|
op.add_column('imagestorage', sa.Column('command', mysql.TEXT(), nullable=True))
|
||||||
|
op.add_column('imagestorage', sa.Column('aggregate_size', mysql.BIGINT(display_width=20), autoincrement=False, nullable=True))
|
||||||
|
op.add_column('imagestorage', sa.Column('comment', mysql.TEXT(), nullable=True))
|
||||||
|
### end Alembic commands ###
|
|
@ -0,0 +1,24 @@
|
||||||
|
"""Backfill image fields from image storages
|
||||||
|
|
||||||
|
Revision ID: 2e0380215d01
|
||||||
|
Revises: 3ff4fbc94644
|
||||||
|
Create Date: 2015-09-15 16:57:42.850246
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '2e0380215d01'
|
||||||
|
down_revision = '3ff4fbc94644'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from util.migrate.backfill_image_fields import backfill_image_fields
|
||||||
|
from util.migrate.backfill_v1_metadata import backfill_v1_metadata
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
backfill_image_fields()
|
||||||
|
backfill_v1_metadata()
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
pass
|
|
@ -1,10 +1,22 @@
|
||||||
from peewee import JOIN_LEFT_OUTER
|
from peewee import JOIN_LEFT_OUTER, Clause, SQL
|
||||||
from cachetools import lru_cache
|
from cachetools import lru_cache
|
||||||
|
|
||||||
from data.database import (Repository, User, Team, TeamMember, RepositoryPermission, TeamRole,
|
from data.database import (Repository, User, Team, TeamMember, RepositoryPermission, TeamRole,
|
||||||
Namespace, Visibility, db_for_update)
|
Namespace, Visibility, db_for_update)
|
||||||
|
|
||||||
|
|
||||||
|
def prefix_search(field, prefix_query):
|
||||||
|
""" Returns the wildcard match for searching for the given prefix query. """
|
||||||
|
# Escape the known wildcard characters.
|
||||||
|
prefix_query = (prefix_query
|
||||||
|
.replace('!', '!!')
|
||||||
|
.replace('%', '!%')
|
||||||
|
.replace('_', '!_')
|
||||||
|
.replace('[', '!['))
|
||||||
|
|
||||||
|
return field ** Clause(prefix_query + '%', SQL("ESCAPE '!'"))
|
||||||
|
|
||||||
|
|
||||||
def get_existing_repository(namespace_name, repository_name, for_update=False):
|
def get_existing_repository(namespace_name, repository_name, for_update=False):
|
||||||
query = (Repository
|
query = (Repository
|
||||||
.select(Repository, Namespace)
|
.select(Repository, Namespace)
|
||||||
|
@ -25,7 +37,18 @@ def filter_to_repos_for_user(query, username=None, namespace=None, include_publi
|
||||||
if not include_public and not username:
|
if not include_public and not username:
|
||||||
return Repository.select().where(Repository.id == '-1')
|
return Repository.select().where(Repository.id == '-1')
|
||||||
|
|
||||||
where_clause = None
|
# Build a set of queries that, when unioned together, return the full set of visible repositories
|
||||||
|
# for the filters specified.
|
||||||
|
queries = []
|
||||||
|
|
||||||
|
where_clause = (True)
|
||||||
|
if namespace:
|
||||||
|
where_clause = (Namespace.username == namespace)
|
||||||
|
|
||||||
|
if include_public:
|
||||||
|
queries.append(query.clone()
|
||||||
|
.where(Repository.visibility == get_public_repo_visibility(), where_clause))
|
||||||
|
|
||||||
if username:
|
if username:
|
||||||
UserThroughTeam = User.alias()
|
UserThroughTeam = User.alias()
|
||||||
Org = User.alias()
|
Org = User.alias()
|
||||||
|
@ -33,37 +56,32 @@ def filter_to_repos_for_user(query, username=None, namespace=None, include_publi
|
||||||
AdminTeamMember = TeamMember.alias()
|
AdminTeamMember = TeamMember.alias()
|
||||||
AdminUser = User.alias()
|
AdminUser = User.alias()
|
||||||
|
|
||||||
query = (query
|
# Add repositories in which the user has permission.
|
||||||
.switch(RepositoryPermission)
|
queries.append(query.clone()
|
||||||
.join(User, JOIN_LEFT_OUTER)
|
.switch(RepositoryPermission)
|
||||||
.switch(RepositoryPermission)
|
.join(User)
|
||||||
.join(Team, JOIN_LEFT_OUTER)
|
.where(User.username == username, where_clause))
|
||||||
.join(TeamMember, JOIN_LEFT_OUTER)
|
|
||||||
.join(UserThroughTeam, JOIN_LEFT_OUTER, on=(UserThroughTeam.id == TeamMember.user))
|
|
||||||
.switch(Repository)
|
|
||||||
.join(Org, JOIN_LEFT_OUTER, on=(Repository.namespace_user == Org.id))
|
|
||||||
.join(AdminTeam, JOIN_LEFT_OUTER, on=(Org.id == AdminTeam.organization))
|
|
||||||
.join(TeamRole, JOIN_LEFT_OUTER, on=(AdminTeam.role == TeamRole.id))
|
|
||||||
.switch(AdminTeam)
|
|
||||||
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id == AdminTeamMember.team))
|
|
||||||
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user == AdminUser.id)))
|
|
||||||
|
|
||||||
where_clause = ((User.username == username) | (UserThroughTeam.username == username) |
|
# Add repositories in which the user is a member of a team that has permission.
|
||||||
((AdminUser.username == username) & (TeamRole.name == 'admin')))
|
queries.append(query.clone()
|
||||||
|
.switch(RepositoryPermission)
|
||||||
|
.join(Team)
|
||||||
|
.join(TeamMember)
|
||||||
|
.join(UserThroughTeam, on=(UserThroughTeam.id == TeamMember.user))
|
||||||
|
.where(UserThroughTeam.username == username, where_clause))
|
||||||
|
|
||||||
if namespace:
|
# Add repositories under namespaces in which the user is the org admin.
|
||||||
where_clause = where_clause & (Namespace.username == namespace)
|
queries.append(query.clone()
|
||||||
|
.switch(Repository)
|
||||||
|
.join(Org, on=(Repository.namespace_user == Org.id))
|
||||||
|
.join(AdminTeam, on=(Org.id == AdminTeam.organization))
|
||||||
|
.join(TeamRole, on=(AdminTeam.role == TeamRole.id))
|
||||||
|
.switch(AdminTeam)
|
||||||
|
.join(AdminTeamMember, on=(AdminTeam.id == AdminTeamMember.team))
|
||||||
|
.join(AdminUser, on=(AdminTeamMember.user == AdminUser.id))
|
||||||
|
.where(AdminUser.username == username, where_clause))
|
||||||
|
|
||||||
# TODO(jschorr, jake): Figure out why the old join on Visibility was so darn slow and
|
return reduce(lambda l, r: l | r, queries)
|
||||||
# remove this hack.
|
|
||||||
if include_public:
|
|
||||||
new_clause = (Repository.visibility == get_public_repo_visibility())
|
|
||||||
if where_clause:
|
|
||||||
where_clause = where_clause | new_clause
|
|
||||||
else:
|
|
||||||
where_clause = new_clause
|
|
||||||
|
|
||||||
return query.where(where_clause)
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_organizations(username):
|
def get_user_organizations(username):
|
||||||
|
|
|
@ -79,11 +79,14 @@ def get_repository_images_base(namespace_name, repository_name, query_modifier):
|
||||||
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
.where(Repository.name == repository_name, Namespace.username == namespace_name))
|
||||||
|
|
||||||
query = query_modifier(query)
|
query = query_modifier(query)
|
||||||
return _translate_placements_to_images_with_locations(query)
|
return invert_placement_query_results(query)
|
||||||
|
|
||||||
|
|
||||||
def _translate_placements_to_images_with_locations(query):
|
def invert_placement_query_results(placement_query):
|
||||||
location_list = list(query)
|
""" This method will take a query which returns placements, storages, and images, and have it
|
||||||
|
return images and their storages, along with the placement set on each storage.
|
||||||
|
"""
|
||||||
|
location_list = list(placement_query)
|
||||||
|
|
||||||
images = {}
|
images = {}
|
||||||
for location in location_list:
|
for location in location_list:
|
||||||
|
@ -192,7 +195,12 @@ def _find_or_link_image(existing_image, repo_obj, username, translations, prefer
|
||||||
|
|
||||||
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
||||||
repository=repo_obj, storage=copied_storage,
|
repository=repo_obj, storage=copied_storage,
|
||||||
ancestors=new_image_ancestry)
|
ancestors=new_image_ancestry,
|
||||||
|
command=existing_image.command,
|
||||||
|
created=existing_image.created,
|
||||||
|
comment=existing_image.comment,
|
||||||
|
aggregate_size=existing_image.aggregate_size)
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
||||||
translations[existing_image.id] = new_image.id
|
translations[existing_image.id] = new_image.id
|
||||||
|
@ -274,24 +282,15 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
|
||||||
|
|
||||||
# We cleanup any old checksum in case it's a retry after a fail
|
# We cleanup any old checksum in case it's a retry after a fail
|
||||||
fetched.storage.checksum = None
|
fetched.storage.checksum = None
|
||||||
now = datetime.now()
|
fetched.created = datetime.now()
|
||||||
# TODO stop writing to storage when all readers are removed
|
|
||||||
fetched.storage.created = now
|
|
||||||
fetched.created = now
|
|
||||||
|
|
||||||
if created_date_str is not None:
|
if created_date_str is not None:
|
||||||
try:
|
try:
|
||||||
# TODO stop writing to storage fields when all readers are removed
|
fetched.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
||||||
parsed_created_time = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
|
|
||||||
fetched.created = parsed_created_time
|
|
||||||
fetched.storage.created = parsed_created_time
|
|
||||||
except:
|
except:
|
||||||
# parse raises different exceptions, so we cannot use a specific kind of handler here.
|
# parse raises different exceptions, so we cannot use a specific kind of handler here.
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# TODO stop writing to storage fields when all readers are removed
|
|
||||||
fetched.storage.comment = comment
|
|
||||||
fetched.storage.command = command
|
|
||||||
fetched.comment = comment
|
fetched.comment = comment
|
||||||
fetched.command = command
|
fetched.command = command
|
||||||
fetched.v1_json_metadata = v1_json_metadata
|
fetched.v1_json_metadata = v1_json_metadata
|
||||||
|
@ -304,6 +303,9 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
|
||||||
|
|
||||||
|
|
||||||
def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
|
def set_image_size(docker_image_id, namespace_name, repository_name, image_size, uncompressed_size):
|
||||||
|
if image_size is None:
|
||||||
|
raise DataModelException('Empty image size field')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
image = (Image
|
image = (Image
|
||||||
.select(Image, ImageStorage)
|
.select(Image, ImageStorage)
|
||||||
|
@ -314,7 +316,6 @@ def set_image_size(docker_image_id, namespace_name, repository_name, image_size,
|
||||||
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
.where(Repository.name == repository_name, Namespace.username == namespace_name,
|
||||||
Image.docker_image_id == docker_image_id)
|
Image.docker_image_id == docker_image_id)
|
||||||
.get())
|
.get())
|
||||||
|
|
||||||
except Image.DoesNotExist:
|
except Image.DoesNotExist:
|
||||||
raise DataModelException('No image with specified id and repository')
|
raise DataModelException('No image with specified id and repository')
|
||||||
|
|
||||||
|
@ -326,21 +327,17 @@ def set_image_size(docker_image_id, namespace_name, repository_name, image_size,
|
||||||
try:
|
try:
|
||||||
# TODO(jschorr): Switch to this faster route once we have full ancestor aggregate_size
|
# TODO(jschorr): Switch to this faster route once we have full ancestor aggregate_size
|
||||||
# parent_image = Image.get(Image.id == ancestors[-1])
|
# parent_image = Image.get(Image.id == ancestors[-1])
|
||||||
# total_size = image_size + parent_image.storage.aggregate_size
|
ancestor_size = (ImageStorage
|
||||||
total_size = (ImageStorage
|
.select(fn.Sum(ImageStorage.image_size))
|
||||||
.select(fn.Sum(ImageStorage.image_size))
|
.join(Image)
|
||||||
.join(Image)
|
.where(Image.id << ancestors)
|
||||||
.where(Image.id << ancestors)
|
.scalar())
|
||||||
.scalar()) + image_size
|
|
||||||
|
|
||||||
# TODO stop writing to storage when all readers are removed
|
if ancestor_size is not None:
|
||||||
image.storage.aggregate_size = total_size
|
image.aggregate_size = ancestor_size + image_size
|
||||||
image.aggregate_size = total_size
|
|
||||||
except Image.DoesNotExist:
|
except Image.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
# TODO stop writing to storage when all readers are removed
|
|
||||||
image.storage.aggregate_size = image_size
|
|
||||||
image.aggregate_size = image_size
|
image.aggregate_size = image_size
|
||||||
|
|
||||||
image.storage.save()
|
image.storage.save()
|
||||||
|
@ -374,24 +371,6 @@ def get_repo_image_by_storage_checksum(namespace, repository_name, storage_check
|
||||||
raise InvalidImageException(msg)
|
raise InvalidImageException(msg)
|
||||||
|
|
||||||
|
|
||||||
def has_image_json(image):
|
|
||||||
""" Returns the whether there exists a JSON definition data for the image. """
|
|
||||||
if image.v1_json_metadata:
|
|
||||||
return bool(image.v1_json_metadata)
|
|
||||||
|
|
||||||
store = config.store
|
|
||||||
return store.exists(image.storage.locations, store.image_json_path(image.storage.uuid))
|
|
||||||
|
|
||||||
|
|
||||||
def get_image_json(image):
|
|
||||||
""" Returns the JSON definition data for the image. """
|
|
||||||
if image.v1_json_metadata:
|
|
||||||
return image.v1_json_metadata
|
|
||||||
|
|
||||||
store = config.store
|
|
||||||
return store.get_content(image.storage.locations, store.image_json_path(image.storage.uuid))
|
|
||||||
|
|
||||||
|
|
||||||
def get_image_layers(image):
|
def get_image_layers(image):
|
||||||
""" Returns a list of the full layers of an image, including itself (if specified), sorted
|
""" Returns a list of the full layers of an image, including itself (if specified), sorted
|
||||||
from base image outward. """
|
from base image outward. """
|
||||||
|
|
|
@ -6,6 +6,7 @@ from cachetools import lru_cache
|
||||||
|
|
||||||
from data.database import LogEntry, LogEntryKind, User, db
|
from data.database import LogEntry, LogEntryKind, User, db
|
||||||
|
|
||||||
|
# TODO: Find a way to get logs without slowing down pagination significantly.
|
||||||
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None):
|
def _logs_query(selections, start_time, end_time, performer=None, repository=None, namespace=None):
|
||||||
joined = (LogEntry
|
joined = (LogEntry
|
||||||
.select(*selections)
|
.select(*selections)
|
||||||
|
|
|
@ -14,6 +14,10 @@ from data.database import (Repository, Namespace, RepositoryTag, Star, Image, Im
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_public_repo_visibility():
|
||||||
|
return _basequery.get_public_repo_visibility()
|
||||||
|
|
||||||
|
|
||||||
def create_repository(namespace, name, creating_user, visibility='private'):
|
def create_repository(namespace, name, creating_user, visibility='private'):
|
||||||
private = Visibility.get(name=visibility)
|
private = Visibility.get(name=visibility)
|
||||||
namespace_user = User.get(username=namespace)
|
namespace_user = User.get(username=namespace)
|
||||||
|
@ -64,11 +68,7 @@ def purge_repository(namespace_name, repository_name):
|
||||||
fetched.delete_instance(recursive=True, delete_nullable=False)
|
fetched.delete_instance(recursive=True, delete_nullable=False)
|
||||||
|
|
||||||
|
|
||||||
def find_repository_with_garbage(filter_list=None):
|
def find_repository_with_garbage():
|
||||||
# TODO(jschorr): Remove the filter once we have turned the experiment on for everyone.
|
|
||||||
if filter_list is not None and not filter_list:
|
|
||||||
return None
|
|
||||||
|
|
||||||
epoch_timestamp = get_epoch_timestamp()
|
epoch_timestamp = get_epoch_timestamp()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -80,11 +80,9 @@ def find_repository_with_garbage(filter_list=None):
|
||||||
(RepositoryTag.lifetime_end_ts <=
|
(RepositoryTag.lifetime_end_ts <=
|
||||||
(epoch_timestamp - Namespace.removed_tag_expiration_s)))
|
(epoch_timestamp - Namespace.removed_tag_expiration_s)))
|
||||||
.limit(500)
|
.limit(500)
|
||||||
|
.distinct()
|
||||||
.alias('candidates'))
|
.alias('candidates'))
|
||||||
|
|
||||||
if filter_list:
|
|
||||||
candidates = candidates.where(Namespace.username << filter_list)
|
|
||||||
|
|
||||||
found = (RepositoryTag
|
found = (RepositoryTag
|
||||||
.select(candidates.c.repository_id)
|
.select(candidates.c.repository_id)
|
||||||
.from_(candidates)
|
.from_(candidates)
|
||||||
|
@ -102,11 +100,6 @@ def find_repository_with_garbage(filter_list=None):
|
||||||
|
|
||||||
|
|
||||||
def garbage_collect_repository(namespace_name, repository_name):
|
def garbage_collect_repository(namespace_name, repository_name):
|
||||||
# If the namespace is the async experiment, don't perform garbage collection here.
|
|
||||||
# TODO(jschorr): Remove this check once we have turned the experiment on for everyone.
|
|
||||||
if namespace_name in config.app_config.get('EXP_ASYNC_GARBAGE_COLLECTION', []):
|
|
||||||
return
|
|
||||||
|
|
||||||
repo = get_repository(namespace_name, repository_name)
|
repo = get_repository(namespace_name, repository_name)
|
||||||
if repo is not None:
|
if repo is not None:
|
||||||
garbage_collect_repo(repo)
|
garbage_collect_repo(repo)
|
||||||
|
@ -247,28 +240,10 @@ def get_visible_repositories(username, namespace=None, page=None, limit=None, in
|
||||||
if not include_public and not username:
|
if not include_public and not username:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
fields = [Repository.name, Repository.id, Repository.description, Visibility.name,
|
|
||||||
Namespace.username]
|
|
||||||
|
|
||||||
query = _visible_repository_query(username=username, page=page,
|
|
||||||
limit=limit, namespace=namespace, include_public=include_public,
|
|
||||||
select_models=fields)
|
|
||||||
|
|
||||||
if limit:
|
|
||||||
query = query.limit(limit)
|
|
||||||
|
|
||||||
if namespace:
|
|
||||||
query = query.where(Namespace.username == namespace)
|
|
||||||
|
|
||||||
return query
|
|
||||||
|
|
||||||
|
|
||||||
def _visible_repository_query(username=None, include_public=True, limit=None,
|
|
||||||
page=None, namespace=None, select_models=[]):
|
|
||||||
query = (Repository
|
query = (Repository
|
||||||
.select(*select_models) # MySQL/RDS complains is there are selected models for counts.
|
.select(Repository.name, Repository.id, Repository.description, Namespace.username,
|
||||||
|
Repository.visibility)
|
||||||
.distinct()
|
.distinct()
|
||||||
.join(Visibility)
|
|
||||||
.switch(Repository)
|
.switch(Repository)
|
||||||
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
|
||||||
.switch(Repository)
|
.switch(Repository)
|
||||||
|
@ -338,36 +313,15 @@ def get_sorted_matching_repositories(prefix, only_public, checker, limit=10):
|
||||||
# For performance reasons, we conduct the repo name and repo namespace searches on their
|
# For performance reasons, we conduct the repo name and repo namespace searches on their
|
||||||
# own. This also affords us the ability to give higher precedence to repository names matching
|
# own. This also affords us the ability to give higher precedence to repository names matching
|
||||||
# over namespaces, which is semantically correct.
|
# over namespaces, which is semantically correct.
|
||||||
get_search_results(Repository.name ** (prefix + '%'), with_count=True)
|
get_search_results(_basequery.prefix_search(Repository.name, prefix), with_count=True)
|
||||||
get_search_results(Repository.name ** (prefix + '%'), with_count=False)
|
get_search_results(_basequery.prefix_search(Repository.name, prefix), with_count=False)
|
||||||
|
|
||||||
get_search_results(Namespace.username ** (prefix + '%'), with_count=True)
|
get_search_results(_basequery.prefix_search(Namespace.username, prefix), with_count=True)
|
||||||
get_search_results(Namespace.username ** (prefix + '%'), with_count=False)
|
get_search_results(_basequery.prefix_search(Namespace.username, prefix), with_count=False)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def get_matching_repositories(repo_term, username=None, limit=10, include_public=True):
|
|
||||||
namespace_term = repo_term
|
|
||||||
name_term = repo_term
|
|
||||||
|
|
||||||
visible = _visible_repository_query(username, include_public=include_public)
|
|
||||||
|
|
||||||
search_clauses = (Repository.name ** ('%' + name_term + '%') |
|
|
||||||
Namespace.username ** ('%' + namespace_term + '%'))
|
|
||||||
|
|
||||||
# Handle the case where the user has already entered a namespace path.
|
|
||||||
if repo_term.find('/') > 0:
|
|
||||||
parts = repo_term.split('/', 1)
|
|
||||||
namespace_term = '/'.join(parts[:-1])
|
|
||||||
name_term = parts[-1]
|
|
||||||
|
|
||||||
search_clauses = (Repository.name ** ('%' + name_term + '%') &
|
|
||||||
Namespace.username ** ('%' + namespace_term + '%'))
|
|
||||||
|
|
||||||
return visible.where(search_clauses).limit(limit)
|
|
||||||
|
|
||||||
|
|
||||||
def lookup_repository(repo_id):
|
def lookup_repository(repo_id):
|
||||||
try:
|
try:
|
||||||
return Repository.get(Repository.id == repo_id)
|
return Repository.get(Repository.id == repo_id)
|
||||||
|
|
|
@ -134,8 +134,7 @@ def list_repository_tag_history(repo_obj, page=1, size=100, specific_tag=None):
|
||||||
.join(Image)
|
.join(Image)
|
||||||
.where(RepositoryTag.repository == repo_obj)
|
.where(RepositoryTag.repository == repo_obj)
|
||||||
.where(RepositoryTag.hidden == False)
|
.where(RepositoryTag.hidden == False)
|
||||||
.order_by(RepositoryTag.lifetime_start_ts.desc())
|
.order_by(RepositoryTag.lifetime_start_ts.desc(), RepositoryTag.name)
|
||||||
.order_by(RepositoryTag.name)
|
|
||||||
.paginate(page, size))
|
.paginate(page, size))
|
||||||
|
|
||||||
if specific_tag:
|
if specific_tag:
|
||||||
|
|
|
@ -137,12 +137,13 @@ def add_or_invite_to_team(inviter, team, user_obj=None, email=None, requires_inv
|
||||||
|
|
||||||
|
|
||||||
def get_matching_user_teams(team_prefix, user_obj, limit=10):
|
def get_matching_user_teams(team_prefix, user_obj, limit=10):
|
||||||
|
team_prefix_search = _basequery.prefix_search(Team.name, team_prefix)
|
||||||
query = (Team
|
query = (Team
|
||||||
.select()
|
.select()
|
||||||
.join(User)
|
.join(User)
|
||||||
.switch(Team)
|
.switch(Team)
|
||||||
.join(TeamMember)
|
.join(TeamMember)
|
||||||
.where(TeamMember.user == user_obj, Team.name ** (team_prefix + '%'))
|
.where(TeamMember.user == user_obj, team_prefix_search)
|
||||||
.distinct(Team.id)
|
.distinct(Team.id)
|
||||||
.limit(limit))
|
.limit(limit))
|
||||||
|
|
||||||
|
@ -162,6 +163,7 @@ def get_organization_team(orgname, teamname):
|
||||||
|
|
||||||
|
|
||||||
def get_matching_admined_teams(team_prefix, user_obj, limit=10):
|
def get_matching_admined_teams(team_prefix, user_obj, limit=10):
|
||||||
|
team_prefix_search = _basequery.prefix_search(Team.name, team_prefix)
|
||||||
admined_orgs = (_basequery.get_user_organizations(user_obj.username)
|
admined_orgs = (_basequery.get_user_organizations(user_obj.username)
|
||||||
.switch(Team)
|
.switch(Team)
|
||||||
.join(TeamRole)
|
.join(TeamRole)
|
||||||
|
@ -172,7 +174,7 @@ def get_matching_admined_teams(team_prefix, user_obj, limit=10):
|
||||||
.join(User)
|
.join(User)
|
||||||
.switch(Team)
|
.switch(Team)
|
||||||
.join(TeamMember)
|
.join(TeamMember)
|
||||||
.where(Team.name ** (team_prefix + '%'), Team.organization << (admined_orgs))
|
.where(team_prefix_search, Team.organization << (admined_orgs))
|
||||||
.distinct(Team.id)
|
.distinct(Team.id)
|
||||||
.limit(limit))
|
.limit(limit))
|
||||||
|
|
||||||
|
@ -180,8 +182,8 @@ def get_matching_admined_teams(team_prefix, user_obj, limit=10):
|
||||||
|
|
||||||
|
|
||||||
def get_matching_teams(team_prefix, organization):
|
def get_matching_teams(team_prefix, organization):
|
||||||
query = Team.select().where(Team.name ** (team_prefix + '%'),
|
team_prefix_search = _basequery.prefix_search(Team.name, team_prefix)
|
||||||
Team.organization == organization)
|
query = Team.select().where(team_prefix_search, Team.organization == organization)
|
||||||
return query.limit(10)
|
return query.limit(10)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -203,9 +203,11 @@ def get_matching_robots(name_prefix, username, limit=10):
|
||||||
prefix_checks = False
|
prefix_checks = False
|
||||||
|
|
||||||
for org in admined_orgs:
|
for org in admined_orgs:
|
||||||
prefix_checks = prefix_checks | (User.username ** (org.username + '+' + name_prefix + '%'))
|
org_search = _basequery.prefix_search(User.username, org.username + '+' + name_prefix)
|
||||||
|
prefix_checks = prefix_checks | org_search
|
||||||
|
|
||||||
prefix_checks = prefix_checks | (User.username ** (username + '+' + name_prefix + '%'))
|
user_search = _basequery.prefix_search(User.username, username + '+' + name_prefix)
|
||||||
|
prefix_checks = prefix_checks | user_search
|
||||||
|
|
||||||
return User.select().where(prefix_checks).limit(limit)
|
return User.select().where(prefix_checks).limit(limit)
|
||||||
|
|
||||||
|
@ -493,26 +495,25 @@ def get_user_or_org_by_customer_id(customer_id):
|
||||||
|
|
||||||
|
|
||||||
def get_matching_user_namespaces(namespace_prefix, username, limit=10):
|
def get_matching_user_namespaces(namespace_prefix, username, limit=10):
|
||||||
|
namespace_search = _basequery.prefix_search(Namespace.username, namespace_prefix)
|
||||||
base_query = (Namespace
|
base_query = (Namespace
|
||||||
.select()
|
.select()
|
||||||
.distinct()
|
.distinct()
|
||||||
.limit(limit)
|
|
||||||
.join(Repository, on=(Repository.namespace_user == Namespace.id))
|
.join(Repository, on=(Repository.namespace_user == Namespace.id))
|
||||||
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
.join(RepositoryPermission, JOIN_LEFT_OUTER)
|
||||||
.where(Namespace.username ** (namespace_prefix + '%')))
|
.where(namespace_search))
|
||||||
|
|
||||||
return _basequery.filter_to_repos_for_user(base_query, username)
|
return _basequery.filter_to_repos_for_user(base_query, username).limit(limit)
|
||||||
|
|
||||||
def get_matching_users(username_prefix, robot_namespace=None,
|
def get_matching_users(username_prefix, robot_namespace=None,
|
||||||
organization=None):
|
organization=None):
|
||||||
direct_user_query = (User.username ** (username_prefix + '%') &
|
user_search = _basequery.prefix_search(User.username, username_prefix)
|
||||||
(User.organization == False) & (User.robot == False))
|
direct_user_query = (user_search & (User.organization == False) & (User.robot == False))
|
||||||
|
|
||||||
if robot_namespace:
|
if robot_namespace:
|
||||||
robot_prefix = format_robot_username(robot_namespace, username_prefix)
|
robot_prefix = format_robot_username(robot_namespace, username_prefix)
|
||||||
direct_user_query = (direct_user_query |
|
robot_search = _basequery.prefix_search(User.username, robot_prefix)
|
||||||
(User.username ** (robot_prefix + '%') &
|
direct_user_query = (direct_user_query | (robot_search & (User.robot == True)))
|
||||||
(User.robot == True)))
|
|
||||||
|
|
||||||
query = (User
|
query = (User
|
||||||
.select(User.username, User.email, User.robot)
|
.select(User.username, User.email, User.robot)
|
||||||
|
|
|
@ -67,22 +67,20 @@ class WorkQueue(object):
|
||||||
def _item_by_id_for_update(self, queue_id):
|
def _item_by_id_for_update(self, queue_id):
|
||||||
return db_for_update(QueueItem.select().where(QueueItem.id == queue_id)).get()
|
return db_for_update(QueueItem.select().where(QueueItem.id == queue_id)).get()
|
||||||
|
|
||||||
def get_metrics(self, require_transaction=True):
|
def get_metrics(self):
|
||||||
guard = self._transaction_factory(db) if require_transaction else NoopWith()
|
now = datetime.utcnow()
|
||||||
with guard:
|
name_match_query = self._name_match_query()
|
||||||
now = datetime.utcnow()
|
|
||||||
name_match_query = self._name_match_query()
|
|
||||||
|
|
||||||
running_query = self._running_jobs(now, name_match_query)
|
running_query = self._running_jobs(now, name_match_query)
|
||||||
running_count = running_query.distinct().count()
|
running_count = running_query.distinct().count()
|
||||||
|
|
||||||
available_query = self._available_jobs(now, name_match_query)
|
available_query = self._available_jobs(now, name_match_query)
|
||||||
available_count = available_query.select(QueueItem.queue_name).distinct().count()
|
available_count = available_query.select(QueueItem.queue_name).distinct().count()
|
||||||
|
|
||||||
available_not_running_query = self._available_jobs_not_running(now, name_match_query,
|
available_not_running_query = self._available_jobs_not_running(now, name_match_query,
|
||||||
running_query)
|
running_query)
|
||||||
available_not_running_count = (available_not_running_query.select(QueueItem.queue_name)
|
available_not_running_count = (available_not_running_query.select(QueueItem.queue_name)
|
||||||
.distinct().count())
|
.distinct().count())
|
||||||
|
|
||||||
return (running_count, available_not_running_count, available_count)
|
return (running_count, available_not_running_count, available_count)
|
||||||
|
|
||||||
|
@ -127,7 +125,10 @@ class WorkQueue(object):
|
||||||
params['available_after'] = available_date
|
params['available_after'] = available_date
|
||||||
|
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
return str(QueueItem.create(**params).id)
|
r = str(QueueItem.create(**params).id)
|
||||||
|
if self._metric_queue:
|
||||||
|
self._metric_queue.put('Added', 1, dimensions={'queue': self._queue_name})
|
||||||
|
return r
|
||||||
|
|
||||||
def get(self, processing_time=300):
|
def get(self, processing_time=300):
|
||||||
"""
|
"""
|
||||||
|
|
5
dev.df
5
dev.df
|
@ -19,3 +19,8 @@ RUN venv/bin/pip install -r requirements.txt
|
||||||
WORKDIR /src/quay
|
WORKDIR /src/quay
|
||||||
ENV PYTHONPATH=/
|
ENV PYTHONPATH=/
|
||||||
ENV PATH=/venv/bin:$PATH
|
ENV PATH=/venv/bin:$PATH
|
||||||
|
|
||||||
|
RUN apt-key adv --keyserver hkp://pgp.mit.edu:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D \
|
||||||
|
&& echo "deb https://apt.dockerproject.org/repo ubuntu-trusty main" > /etc/apt/sources.list.d/docker.list \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt-get install -y docker-engine
|
||||||
|
|
|
@ -49,10 +49,16 @@ class ApiException(Exception):
|
||||||
return rv
|
return rv
|
||||||
|
|
||||||
|
|
||||||
|
class ExternalServiceTimeout(ApiException):
|
||||||
|
def __init__(self, error_description, payload=None):
|
||||||
|
ApiException.__init__(self, 'external_service_timeout', 520, error_description, payload)
|
||||||
|
|
||||||
|
|
||||||
class InvalidRequest(ApiException):
|
class InvalidRequest(ApiException):
|
||||||
def __init__(self, error_description, payload=None):
|
def __init__(self, error_description, payload=None):
|
||||||
ApiException.__init__(self, 'invalid_request', 400, error_description, payload)
|
ApiException.__init__(self, 'invalid_request', 400, error_description, payload)
|
||||||
|
|
||||||
|
|
||||||
class InvalidResponse(ApiException):
|
class InvalidResponse(ApiException):
|
||||||
def __init__(self, error_description, payload=None):
|
def __init__(self, error_description, payload=None):
|
||||||
ApiException.__init__(self, 'invalid_response', 400, error_description, payload)
|
ApiException.__init__(self, 'invalid_response', 400, error_description, payload)
|
||||||
|
|
|
@ -9,12 +9,12 @@ from flask import request
|
||||||
from rfc3987 import parse as uri_parse
|
from rfc3987 import parse as uri_parse
|
||||||
|
|
||||||
from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
|
from app import app, userfiles as user_files, build_logs, log_archive, dockerfile_build_queue
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||||
require_repo_read, require_repo_write, validate_json_request,
|
require_repo_read, require_repo_write, validate_json_request,
|
||||||
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
||||||
path_param, InvalidRequest, require_repo_admin)
|
path_param, InvalidRequest, require_repo_admin)
|
||||||
from endpoints.building import start_build, PreparedBuild
|
from endpoints.building import start_build, PreparedBuild
|
||||||
from endpoints.trigger import BuildTriggerHandler
|
|
||||||
from data import database
|
from data import database
|
||||||
from data import model
|
from data import model
|
||||||
from auth.auth_context import get_authenticated_user
|
from auth.auth_context import get_authenticated_user
|
||||||
|
|
|
@ -12,10 +12,7 @@ from util.cache import cache_control_flask_restful
|
||||||
|
|
||||||
|
|
||||||
def image_view(image, image_map, include_ancestors=True):
|
def image_view(image, image_map, include_ancestors=True):
|
||||||
# TODO: Remove this once we've migrated all storage data to the image records.
|
command = image.command
|
||||||
storage_props = image
|
|
||||||
if image.storage and image.storage.id:
|
|
||||||
storage_props = image.storage
|
|
||||||
|
|
||||||
def docker_id(aid):
|
def docker_id(aid):
|
||||||
if not aid or not aid in image_map:
|
if not aid or not aid in image_map:
|
||||||
|
@ -23,13 +20,12 @@ def image_view(image, image_map, include_ancestors=True):
|
||||||
|
|
||||||
return image_map[aid].docker_image_id
|
return image_map[aid].docker_image_id
|
||||||
|
|
||||||
command = image.command or storage_props.command
|
|
||||||
image_data = {
|
image_data = {
|
||||||
'id': image.docker_image_id,
|
'id': image.docker_image_id,
|
||||||
'created': format_date(image.created or storage_props.created),
|
'created': format_date(image.created),
|
||||||
'comment': image.comment or storage_props.comment,
|
'comment': image.comment,
|
||||||
'command': json.loads(command) if command else None,
|
'command': json.loads(command) if command else None,
|
||||||
'size': storage_props.image_size,
|
'size': image.storage.image_size,
|
||||||
'uploading': image.storage.uploading,
|
'uploading': image.storage.uploading,
|
||||||
'sort_index': len(image.ancestors),
|
'sort_index': len(image.ancestors),
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ from auth import scopes
|
||||||
from app import avatar
|
from app import avatar
|
||||||
|
|
||||||
LOGS_PER_PAGE = 50
|
LOGS_PER_PAGE = 50
|
||||||
|
MAX_PAGES = 20
|
||||||
|
|
||||||
def log_view(log, kinds):
|
def log_view(log, kinds):
|
||||||
view = {
|
view = {
|
||||||
|
@ -80,7 +81,7 @@ def _validate_logs_arguments(start_time, end_time, performer_name):
|
||||||
|
|
||||||
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, page=None):
|
def get_logs(start_time, end_time, performer_name=None, repository=None, namespace=None, page=None):
|
||||||
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
(start_time, end_time, performer) = _validate_logs_arguments(start_time, end_time, performer_name)
|
||||||
page = page if page else 1
|
page = min(MAX_PAGES, page if page else 1)
|
||||||
kinds = model.log.get_log_entry_kinds()
|
kinds = model.log.get_log_entry_kinds()
|
||||||
logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository,
|
logs = model.log.list_logs(start_time, end_time, performer=performer, repository=repository,
|
||||||
namespace=namespace, page=page, count=LOGS_PER_PAGE + 1)
|
namespace=namespace, page=page, count=LOGS_PER_PAGE + 1)
|
||||||
|
|
|
@ -23,6 +23,7 @@ from auth.permissions import (ModifyRepositoryPermission, AdministerRepositoryPe
|
||||||
CreateRepositoryPermission)
|
CreateRepositoryPermission)
|
||||||
from auth.auth_context import get_authenticated_user
|
from auth.auth_context import get_authenticated_user
|
||||||
from auth import scopes
|
from auth import scopes
|
||||||
|
from util.names import REPOSITORY_NAME_REGEX
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -104,6 +105,10 @@ class RepositoryList(ApiResource):
|
||||||
if visibility == 'private':
|
if visibility == 'private':
|
||||||
check_allowed_private_repos(namespace_name)
|
check_allowed_private_repos(namespace_name)
|
||||||
|
|
||||||
|
# Verify that the repository name is valid.
|
||||||
|
if not REPOSITORY_NAME_REGEX.match(repository_name):
|
||||||
|
raise InvalidRequest('Invalid repository name')
|
||||||
|
|
||||||
repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility)
|
repo = model.repository.create_repository(namespace_name, repository_name, owner, visibility)
|
||||||
repo.description = req['description']
|
repo.description = req['description']
|
||||||
repo.save()
|
repo.save()
|
||||||
|
@ -141,6 +146,10 @@ class RepositoryList(ApiResource):
|
||||||
starred_repos = model.repository.get_user_starred_repositories(get_authenticated_user())
|
starred_repos = model.repository.get_user_starred_repositories(get_authenticated_user())
|
||||||
star_lookup = set([repo.id for repo in starred_repos])
|
star_lookup = set([repo.id for repo in starred_repos])
|
||||||
|
|
||||||
|
# If the user asked for only public repositories, limit to only public repos.
|
||||||
|
if public and (not namespace and not starred):
|
||||||
|
username = None
|
||||||
|
|
||||||
# Find the matching repositories.
|
# Find the matching repositories.
|
||||||
repositories = model.repository.get_visible_repositories(username=username,
|
repositories = model.repository.get_visible_repositories(username=username,
|
||||||
limit=limit,
|
limit=limit,
|
||||||
|
@ -172,6 +181,8 @@ class RepositoryList(ApiResource):
|
||||||
def get(self, args):
|
def get(self, args):
|
||||||
""" Fetch the list of repositories visible to the current user under a variety of situations.
|
""" Fetch the list of repositories visible to the current user under a variety of situations.
|
||||||
"""
|
"""
|
||||||
|
if not args['namespace'] and not args['starred'] and not args['public']:
|
||||||
|
raise InvalidRequest('namespace, starred or public are required for this API call')
|
||||||
|
|
||||||
repositories, star_lookup = self._load_repositories(args['namespace'], args['public'],
|
repositories, star_lookup = self._load_repositories(args['namespace'], args['public'],
|
||||||
args['starred'], args['limit'],
|
args['starred'], args['limit'],
|
||||||
|
@ -192,7 +203,7 @@ class RepositoryList(ApiResource):
|
||||||
'namespace': repo_obj.namespace_user.username,
|
'namespace': repo_obj.namespace_user.username,
|
||||||
'name': repo_obj.name,
|
'name': repo_obj.name,
|
||||||
'description': repo_obj.description,
|
'description': repo_obj.description,
|
||||||
'is_public': repo_obj.visibility.name == 'public'
|
'is_public': repo_obj.visibility_id == model.repository.get_public_repo_visibility().id,
|
||||||
}
|
}
|
||||||
|
|
||||||
repo_id = repo_obj.id
|
repo_id = repo_obj.id
|
||||||
|
@ -243,7 +254,7 @@ class Repository(RepositoryParamResource):
|
||||||
tag_info = {
|
tag_info = {
|
||||||
'name': tag.name,
|
'name': tag.name,
|
||||||
'image_id': tag.image.docker_image_id,
|
'image_id': tag.image.docker_image_id,
|
||||||
'size': tag.image.storage.aggregate_size
|
'size': tag.image.aggregate_size
|
||||||
}
|
}
|
||||||
|
|
||||||
if tag.lifetime_start_ts > 0:
|
if tag.lifetime_start_ts > 0:
|
||||||
|
|
|
@ -95,38 +95,6 @@ class EntitySearch(ApiResource):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/find/repository')
|
|
||||||
class FindRepositories(ApiResource):
|
|
||||||
""" Resource for finding repositories. """
|
|
||||||
@parse_args
|
|
||||||
@query_param('query', 'The prefix to use when querying for repositories.', type=str, default='')
|
|
||||||
@require_scope(scopes.READ_REPO)
|
|
||||||
@nickname('findRepos')
|
|
||||||
def get(self, args):
|
|
||||||
""" Get a list of repositories that match the specified prefix query. """
|
|
||||||
prefix = args['query']
|
|
||||||
|
|
||||||
def repo_view(repo):
|
|
||||||
return {
|
|
||||||
'namespace': repo.namespace_user.username,
|
|
||||||
'name': repo.name,
|
|
||||||
'description': repo.description
|
|
||||||
}
|
|
||||||
|
|
||||||
username = None
|
|
||||||
user = get_authenticated_user()
|
|
||||||
if user is not None:
|
|
||||||
username = user.username
|
|
||||||
|
|
||||||
matching = model.repository.get_matching_repositories(prefix, username)
|
|
||||||
return {
|
|
||||||
'repositories': [repo_view(repo) for repo in matching
|
|
||||||
if (repo.visibility.name == 'public' or
|
|
||||||
ReadRepositoryPermission(repo.namespace_user.username, repo.name).can())]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def search_entity_view(username, entity, get_short_name=None):
|
def search_entity_view(username, entity, get_short_name=None):
|
||||||
kind = 'user'
|
kind = 'user'
|
||||||
avatar_data = avatar.get_data_for_user(entity)
|
avatar_data = avatar.get_data_for_user(entity)
|
||||||
|
|
|
@ -8,6 +8,7 @@ from endpoints.api import (resource, nickname, require_repo_read, require_repo_w
|
||||||
from endpoints.api.image import image_view
|
from endpoints.api.image import image_view
|
||||||
from data import model
|
from data import model
|
||||||
from auth.auth_context import get_authenticated_user
|
from auth.auth_context import get_authenticated_user
|
||||||
|
from util.names import TAG_ERROR, TAG_REGEX
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/repository/<repopath:repository>/tag/')
|
@resource('/v1/repository/<repopath:repository>/tag/')
|
||||||
|
@ -85,6 +86,10 @@ class RepositoryTag(RepositoryParamResource):
|
||||||
@validate_json_request('MoveTag')
|
@validate_json_request('MoveTag')
|
||||||
def put(self, namespace, repository, tag):
|
def put(self, namespace, repository, tag):
|
||||||
""" Change which image a tag points to or create a new tag."""
|
""" Change which image a tag points to or create a new tag."""
|
||||||
|
|
||||||
|
if not TAG_REGEX.match(tag):
|
||||||
|
abort(400, TAG_ERROR)
|
||||||
|
|
||||||
image_id = request.get_json()['image']
|
image_id = request.get_json()['image']
|
||||||
image = model.image.get_repo_image(namespace, repository, image_id)
|
image = model.image.get_repo_image(namespace, repository, image_id)
|
||||||
if not image:
|
if not image:
|
||||||
|
@ -100,7 +105,6 @@ class RepositoryTag(RepositoryParamResource):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
model.tag.create_or_update_tag(namespace, repository, tag, image_id)
|
model.tag.create_or_update_tag(namespace, repository, tag, image_id)
|
||||||
model.repository.garbage_collect_repository(namespace, repository)
|
|
||||||
|
|
||||||
username = get_authenticated_user().username
|
username = get_authenticated_user().username
|
||||||
log_action('move_tag' if original_image_id else 'create_tag', namespace,
|
log_action('move_tag' if original_image_id else 'create_tag', namespace,
|
||||||
|
@ -115,7 +119,6 @@ class RepositoryTag(RepositoryParamResource):
|
||||||
def delete(self, namespace, repository, tag):
|
def delete(self, namespace, repository, tag):
|
||||||
""" Delete the specified repository tag. """
|
""" Delete the specified repository tag. """
|
||||||
model.tag.delete_tag(namespace, repository, tag)
|
model.tag.delete_tag(namespace, repository, tag)
|
||||||
model.repository.garbage_collect_repository(namespace, repository)
|
|
||||||
|
|
||||||
username = get_authenticated_user().username
|
username = get_authenticated_user().username
|
||||||
log_action('delete_tag', namespace,
|
log_action('delete_tag', namespace,
|
||||||
|
@ -188,7 +191,6 @@ class RevertTag(RepositoryParamResource):
|
||||||
# Revert the tag back to the previous image.
|
# Revert the tag back to the previous image.
|
||||||
image_id = request.get_json()['image']
|
image_id = request.get_json()['image']
|
||||||
model.tag.revert_tag(tag_image.repository, tag, image_id)
|
model.tag.revert_tag(tag_image.repository, tag, image_id)
|
||||||
model.repository.garbage_collect_repository(namespace, repository)
|
|
||||||
|
|
||||||
# Log the reversion.
|
# Log the reversion.
|
||||||
username = get_authenticated_user().username
|
username = get_authenticated_user().username
|
||||||
|
|
|
@ -8,15 +8,16 @@ from urllib import quote
|
||||||
from urlparse import urlunparse
|
from urlparse import urlunparse
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
from buildtrigger.triggerutil import (TriggerDeactivationException,
|
||||||
|
TriggerActivationException, EmptyRepositoryException,
|
||||||
|
RepositoryReadException, TriggerStartException)
|
||||||
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
|
from endpoints.api import (RepositoryParamResource, nickname, resource, require_repo_admin,
|
||||||
log_action, request_error, query_param, parse_args, internal_only,
|
log_action, request_error, query_param, parse_args, internal_only,
|
||||||
validate_json_request, api, Unauthorized, NotFound, InvalidRequest,
|
validate_json_request, api, Unauthorized, NotFound, InvalidRequest,
|
||||||
path_param)
|
path_param)
|
||||||
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
|
from endpoints.api.build import build_status_view, trigger_view, RepositoryBuildStatus
|
||||||
from endpoints.building import start_build
|
from endpoints.building import start_build
|
||||||
from endpoints.trigger import (BuildTriggerHandler, TriggerDeactivationException,
|
|
||||||
TriggerActivationException, EmptyRepositoryException,
|
|
||||||
RepositoryReadException, TriggerStartException)
|
|
||||||
from data import model
|
from data import model
|
||||||
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
|
from auth.permissions import (UserAdminPermission, AdministerOrganizationPermission,
|
||||||
ReadRepositoryPermission)
|
ReadRepositoryPermission)
|
||||||
|
|
|
@ -62,16 +62,22 @@ def handle_invite_code(invite_code, user):
|
||||||
|
|
||||||
|
|
||||||
def user_view(user):
|
def user_view(user):
|
||||||
def org_view(o):
|
def org_view(o, user_admin=True):
|
||||||
admin_org = AdministerOrganizationPermission(o.username)
|
admin_org = AdministerOrganizationPermission(o.username)
|
||||||
return {
|
org_response = {
|
||||||
'name': o.username,
|
'name': o.username,
|
||||||
'avatar': avatar.get_data_for_org(o),
|
'avatar': avatar.get_data_for_org(o),
|
||||||
'is_org_admin': admin_org.can(),
|
'can_create_repo': CreateRepositoryPermission(o.username).can(),
|
||||||
'can_create_repo': admin_org.can() or CreateRepositoryPermission(o.username).can(),
|
|
||||||
'preferred_namespace': not (o.stripe_id is None)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if user_admin:
|
||||||
|
org_response.update({
|
||||||
|
'is_org_admin': admin_org.can(),
|
||||||
|
'preferred_namespace': not (o.stripe_id is None),
|
||||||
|
})
|
||||||
|
|
||||||
|
return org_response
|
||||||
|
|
||||||
organizations = model.organization.get_user_organizations(user.username)
|
organizations = model.organization.get_user_organizations(user.username)
|
||||||
|
|
||||||
def login_view(login):
|
def login_view(login):
|
||||||
|
@ -91,23 +97,29 @@ def user_view(user):
|
||||||
user_response = {
|
user_response = {
|
||||||
'anonymous': False,
|
'anonymous': False,
|
||||||
'username': user.username,
|
'username': user.username,
|
||||||
'avatar': avatar.get_data_for_user(user)
|
'avatar': avatar.get_data_for_user(user),
|
||||||
}
|
}
|
||||||
|
|
||||||
user_admin = UserAdminPermission(user.username)
|
user_admin = UserAdminPermission(user.username)
|
||||||
if user_admin.can():
|
if user_admin.can():
|
||||||
user_response.update({
|
user_response.update({
|
||||||
|
'can_create_repo': True,
|
||||||
'is_me': True,
|
'is_me': True,
|
||||||
'verified': user.verified,
|
'verified': user.verified,
|
||||||
'email': user.email,
|
'email': user.email,
|
||||||
'organizations': [org_view(o) for o in organizations],
|
|
||||||
'logins': [login_view(login) for login in logins],
|
'logins': [login_view(login) for login in logins],
|
||||||
'can_create_repo': True,
|
|
||||||
'invoice_email': user.invoice_email,
|
'invoice_email': user.invoice_email,
|
||||||
'preferred_namespace': not (user.stripe_id is None),
|
'preferred_namespace': not (user.stripe_id is None),
|
||||||
'tag_expiration': user.removed_tag_expiration_s,
|
'tag_expiration': user.removed_tag_expiration_s,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
user_view_perm = UserReadPermission(user.username)
|
||||||
|
if user_view_perm.can():
|
||||||
|
user_response.update({
|
||||||
|
'organizations': [org_view(o, user_admin=user_admin.can()) for o in organizations],
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
if features.SUPER_USERS and SuperUserPermission().can():
|
if features.SUPER_USERS and SuperUserPermission().can():
|
||||||
user_response.update({
|
user_response.update({
|
||||||
'super_user': user and user == get_authenticated_user() and SuperUserPermission().can()
|
'super_user': user and user == get_authenticated_user() and SuperUserPermission().can()
|
||||||
|
|
|
@ -3,7 +3,8 @@ import logging
|
||||||
from flask import request, redirect, url_for, Blueprint
|
from flask import request, redirect, url_for, Blueprint
|
||||||
from flask.ext.login import current_user
|
from flask.ext.login import current_user
|
||||||
|
|
||||||
from endpoints.trigger import BitbucketBuildTrigger, BuildTriggerHandler
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger
|
||||||
from endpoints.common import route_show_if
|
from endpoints.common import route_show_if
|
||||||
from app import app
|
from app import app
|
||||||
from data import model
|
from data import model
|
||||||
|
|
|
@ -96,7 +96,7 @@ class PreparedBuild(object):
|
||||||
def get_display_name(sha):
|
def get_display_name(sha):
|
||||||
return sha[0:7]
|
return sha[0:7]
|
||||||
|
|
||||||
def tags_from_ref(self, ref, default_branch='master'):
|
def tags_from_ref(self, ref, default_branch=None):
|
||||||
branch = ref.split('/')[-1]
|
branch = ref.split('/')[-1]
|
||||||
tags = {branch}
|
tags = {branch}
|
||||||
|
|
||||||
|
|
1589
endpoints/trigger.py
1589
endpoints/trigger.py
File diff suppressed because it is too large
Load diff
|
@ -9,7 +9,7 @@ from data import model
|
||||||
from app import app, authentication, userevents, storage
|
from app import app, authentication, userevents, storage
|
||||||
from auth.auth import process_auth, generate_signed_token
|
from auth.auth import process_auth, generate_signed_token
|
||||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||||
from util.names import parse_repository_name
|
from util.names import parse_repository_name, REPOSITORY_NAME_REGEX
|
||||||
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
||||||
ReadRepositoryPermission, CreateRepositoryPermission,
|
ReadRepositoryPermission, CreateRepositoryPermission,
|
||||||
repository_read_grant, repository_write_grant)
|
repository_read_grant, repository_write_grant)
|
||||||
|
@ -173,6 +173,10 @@ def update_user(username):
|
||||||
@generate_headers(scope=GrantType.WRITE_REPOSITORY, add_grant_for_status=201)
|
@generate_headers(scope=GrantType.WRITE_REPOSITORY, add_grant_for_status=201)
|
||||||
@anon_allowed
|
@anon_allowed
|
||||||
def create_repository(namespace, repository):
|
def create_repository(namespace, repository):
|
||||||
|
# Verify that the repository name is valid.
|
||||||
|
if not REPOSITORY_NAME_REGEX.match(repository):
|
||||||
|
abort(400, message='Invalid repository name. Repository names cannot contain slashes.')
|
||||||
|
|
||||||
logger.debug('Looking up repository %s/%s', namespace, repository)
|
logger.debug('Looking up repository %s/%s', namespace, repository)
|
||||||
repo = model.repository.get_repository(namespace, repository)
|
repo = model.repository.get_repository(namespace, repository)
|
||||||
|
|
||||||
|
@ -232,9 +236,6 @@ def update_images(namespace, repository):
|
||||||
# Make sure the repo actually exists.
|
# Make sure the repo actually exists.
|
||||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||||
|
|
||||||
logger.debug('GCing repository')
|
|
||||||
model.repository.garbage_collect_repository(namespace, repository)
|
|
||||||
|
|
||||||
# Generate a job for each notification that has been added to this repo
|
# Generate a job for each notification that has been added to this repo
|
||||||
logger.debug('Adding notifications for repository')
|
logger.debug('Adding notifications for repository')
|
||||||
|
|
||||||
|
@ -292,16 +293,31 @@ def put_repository_auth(namespace, repository):
|
||||||
abort(501, 'Not Implemented', issue='not-implemented')
|
abort(501, 'Not Implemented', issue='not-implemented')
|
||||||
|
|
||||||
|
|
||||||
|
def conduct_repo_search(username, query, results):
|
||||||
|
""" Finds matching repositories. """
|
||||||
|
def can_read(repo):
|
||||||
|
if repo.is_public:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return ReadRepositoryPermission(repo.namespace_user.username, repo.name).can()
|
||||||
|
|
||||||
|
only_public = username is None
|
||||||
|
matching_repos = model.repository.get_sorted_matching_repositories(query, only_public, can_read,
|
||||||
|
limit=5)
|
||||||
|
|
||||||
|
for repo in matching_repos:
|
||||||
|
results.append({
|
||||||
|
'name': repo.name,
|
||||||
|
'description': repo.description,
|
||||||
|
'is_public': repo.is_public,
|
||||||
|
'href': '/repository/' + repo.namespace_user.username + '/' + repo.name
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
@v1_bp.route('/search', methods=['GET'])
|
@v1_bp.route('/search', methods=['GET'])
|
||||||
@process_auth
|
@process_auth
|
||||||
@anon_protect
|
@anon_protect
|
||||||
def get_search():
|
def get_search():
|
||||||
def result_view(repo):
|
|
||||||
return {
|
|
||||||
"name": repo.namespace_user.username + '/' + repo.name,
|
|
||||||
"description": repo.description
|
|
||||||
}
|
|
||||||
|
|
||||||
query = request.args.get('q')
|
query = request.args.get('q')
|
||||||
|
|
||||||
username = None
|
username = None
|
||||||
|
@ -309,14 +325,9 @@ def get_search():
|
||||||
if user is not None:
|
if user is not None:
|
||||||
username = user.username
|
username = user.username
|
||||||
|
|
||||||
|
results = []
|
||||||
if query:
|
if query:
|
||||||
matching = model.repository.get_matching_repositories(query, username)
|
conduct_repo_search(username, query, results)
|
||||||
else:
|
|
||||||
matching = []
|
|
||||||
|
|
||||||
results = [result_view(repo) for repo in matching
|
|
||||||
if (repo.visibility.name == 'public' or
|
|
||||||
ReadRepositoryPermission(repo.namespace_user.username, repo.name).can())]
|
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
"query": query,
|
"query": query,
|
||||||
|
|
|
@ -193,11 +193,11 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||||
try:
|
try:
|
||||||
logger.debug('Retrieving image data')
|
logger.debug('Retrieving image data')
|
||||||
json_data = model.image.get_image_json(repo_image)
|
uuid = repo_image.storage.uuid
|
||||||
except (IOError, AttributeError):
|
json_data = repo_image.v1_json_metadata
|
||||||
|
except (AttributeError):
|
||||||
logger.exception('Exception when retrieving image data')
|
logger.exception('Exception when retrieving image data')
|
||||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id)
|
||||||
image_id=image_id)
|
|
||||||
|
|
||||||
uuid = repo_image.storage.uuid
|
uuid = repo_image.storage.uuid
|
||||||
layer_path = store.v1_image_layer_path(uuid)
|
layer_path = store.v1_image_layer_path(uuid)
|
||||||
|
@ -241,15 +241,15 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
logger.exception('Exception when writing image data')
|
logger.exception('Exception when writing image data')
|
||||||
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
|
abort(520, 'Image %(image_id)s could not be written. Please try again.', image_id=image_id)
|
||||||
|
|
||||||
|
# Save the size of the image.
|
||||||
|
model.image.set_image_size(image_id, namespace, repository, size_info.compressed_size,
|
||||||
|
size_info.uncompressed_size)
|
||||||
|
|
||||||
# Append the computed checksum.
|
# Append the computed checksum.
|
||||||
csums = []
|
csums = []
|
||||||
csums.append('sha256:{0}'.format(h.hexdigest()))
|
csums.append('sha256:{0}'.format(h.hexdigest()))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Save the size of the image.
|
|
||||||
model.image.set_image_size(image_id, namespace, repository, size_info.compressed_size,
|
|
||||||
size_info.uncompressed_size)
|
|
||||||
|
|
||||||
if requires_tarsum:
|
if requires_tarsum:
|
||||||
tmp.seek(0)
|
tmp.seek(0)
|
||||||
csums.append(checksums.compute_tarsum(tmp, json_data))
|
csums.append(checksums.compute_tarsum(tmp, json_data))
|
||||||
|
@ -315,7 +315,7 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||||
|
|
||||||
logger.debug('Looking up repo layer data')
|
logger.debug('Looking up repo layer data')
|
||||||
if not model.image.has_image_json(repo_image):
|
if not repo_image.v1_json_metadata:
|
||||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||||
|
|
||||||
logger.debug('Marking image path')
|
logger.debug('Marking image path')
|
||||||
|
@ -355,21 +355,17 @@ def get_image_json(namespace, repository, image_id, headers):
|
||||||
|
|
||||||
logger.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.image.get_repo_image_extended(namespace, repository, image_id)
|
||||||
|
if repo_image is None:
|
||||||
logger.debug('Looking up repo layer data')
|
|
||||||
try:
|
|
||||||
data = model.image.get_image_json(repo_image)
|
|
||||||
except (IOError, AttributeError):
|
|
||||||
flask_abort(404)
|
flask_abort(404)
|
||||||
|
|
||||||
logger.debug('Looking up repo layer size')
|
logger.debug('Looking up repo layer size')
|
||||||
size = repo_image.storage.image_size
|
size = repo_image.storage.image_size
|
||||||
|
|
||||||
headers['Content-Type'] = 'application/json'
|
|
||||||
if size is not None:
|
if size is not None:
|
||||||
|
# Note: X-Docker-Size is optional and we *can* end up with a NULL image_size,
|
||||||
|
# so handle this case rather than failing.
|
||||||
headers['X-Docker-Size'] = str(size)
|
headers['X-Docker-Size'] = str(size)
|
||||||
|
|
||||||
response = make_response(data, 200)
|
response = make_response(repo_image.v1_json_metadata, 200)
|
||||||
response.headers.extend(headers)
|
response.headers.extend(headers)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -472,7 +468,8 @@ def put_image_json(namespace, repository, image_id):
|
||||||
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
||||||
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
||||||
|
|
||||||
if not image_is_uploading(repo_image) and model.image.has_image_json(repo_image):
|
logger.debug('Checking if image already exists')
|
||||||
|
if repo_image.v1_json_metadata and not image_is_uploading(repo_image):
|
||||||
exact_abort(409, 'Image already exists')
|
exact_abort(409, 'Image already exists')
|
||||||
|
|
||||||
set_uploading_flag(repo_image, True)
|
set_uploading_flag(repo_image, True)
|
||||||
|
|
|
@ -5,7 +5,7 @@ import json
|
||||||
from flask import abort, request, jsonify, make_response, session
|
from flask import abort, request, jsonify, make_response, session
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
from util.names import parse_repository_name
|
from util.names import TAG_ERROR, TAG_REGEX, parse_repository_name
|
||||||
from auth.auth import process_auth
|
from auth.auth import process_auth
|
||||||
from auth.permissions import (ReadRepositoryPermission,
|
from auth.permissions import (ReadRepositoryPermission,
|
||||||
ModifyRepositoryPermission)
|
ModifyRepositoryPermission)
|
||||||
|
@ -60,6 +60,9 @@ def put_tag(namespace, repository, tag):
|
||||||
permission = ModifyRepositoryPermission(namespace, repository)
|
permission = ModifyRepositoryPermission(namespace, repository)
|
||||||
|
|
||||||
if permission.can():
|
if permission.can():
|
||||||
|
if not TAG_REGEX.match(tag):
|
||||||
|
abort(400, TAG_ERROR)
|
||||||
|
|
||||||
docker_image_id = json.loads(request.data)
|
docker_image_id = json.loads(request.data)
|
||||||
model.tag.create_or_update_tag(namespace, repository, tag, docker_image_id)
|
model.tag.create_or_update_tag(namespace, repository, tag, docker_image_id)
|
||||||
|
|
||||||
|
@ -83,8 +86,6 @@ def delete_tag(namespace, repository, tag):
|
||||||
|
|
||||||
if permission.can():
|
if permission.can():
|
||||||
model.tag.delete_tag(namespace, repository, tag)
|
model.tag.delete_tag(namespace, repository, tag)
|
||||||
model.repository.garbage_collect_repository(namespace, repository)
|
|
||||||
|
|
||||||
return make_response('Deleted', 200)
|
return make_response('Deleted', 200)
|
||||||
|
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
|
@ -383,10 +383,10 @@ def _generate_and_store_manifest(namespace, repo_name, tag_name):
|
||||||
builder = SignedManifestBuilder(namespace, repo_name, tag_name)
|
builder = SignedManifestBuilder(namespace, repo_name, tag_name)
|
||||||
|
|
||||||
# Add the leaf layer
|
# Add the leaf layer
|
||||||
builder.add_layer(image.storage.checksum, __get_and_backfill_image_metadata(image))
|
builder.add_layer(image.storage.checksum, image.v1_json_metadata)
|
||||||
|
|
||||||
for parent in parents:
|
for parent in parents:
|
||||||
builder.add_layer(parent.storage.checksum, __get_and_backfill_image_metadata(parent))
|
builder.add_layer(parent.storage.checksum, parent.v1_json_metadata)
|
||||||
|
|
||||||
# Sign the manifest with our signing key.
|
# Sign the manifest with our signing key.
|
||||||
manifest = builder.build(docker_v2_signing_key)
|
manifest = builder.build(docker_v2_signing_key)
|
||||||
|
@ -394,15 +394,3 @@ def _generate_and_store_manifest(namespace, repo_name, tag_name):
|
||||||
manifest.digest, manifest.bytes)
|
manifest.digest, manifest.bytes)
|
||||||
|
|
||||||
return manifest_row
|
return manifest_row
|
||||||
|
|
||||||
|
|
||||||
def __get_and_backfill_image_metadata(image):
|
|
||||||
image_metadata = image.v1_json_metadata
|
|
||||||
if image_metadata is None:
|
|
||||||
logger.warning('Loading metadata from storage for image id: %s', image.id)
|
|
||||||
|
|
||||||
image.v1_json_metadata = model.image.get_image_json(image)
|
|
||||||
logger.info('Saving backfilled metadata for image id: %s', image.id)
|
|
||||||
image.save()
|
|
||||||
|
|
||||||
return image_metadata
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, imag
|
||||||
store = Storage(app)
|
store = Storage(app)
|
||||||
|
|
||||||
def get_image_json(image):
|
def get_image_json(image):
|
||||||
return json.loads(model.image.get_image_json(image))
|
return json.loads(image.v1_json_metadata)
|
||||||
|
|
||||||
def get_next_image():
|
def get_next_image():
|
||||||
for current_image in image_list:
|
for current_image in image_list:
|
||||||
|
@ -113,7 +113,7 @@ def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
# Lookup the tag's image and storage.
|
# Lookup the tag's image and storage.
|
||||||
repo_image = model.image.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
|
repo_image = model.image.get_repo_image(namespace, repository, tag_image.docker_image_id)
|
||||||
if not repo_image:
|
if not repo_image:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
|
@ -121,7 +121,8 @@ def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
|
||||||
image_json = None
|
image_json = None
|
||||||
|
|
||||||
if checker is not None:
|
if checker is not None:
|
||||||
image_json = json.loads(model.image.get_image_json(repo_image))
|
image_json = json.loads(repo_image.v1_json_metadata)
|
||||||
|
|
||||||
if not checker(image_json):
|
if not checker(image_json):
|
||||||
logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
|
logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
|
||||||
abort(404)
|
abort(404)
|
||||||
|
@ -187,7 +188,7 @@ def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=
|
||||||
|
|
||||||
# Load the image's JSON layer.
|
# Load the image's JSON layer.
|
||||||
if not image_json:
|
if not image_json:
|
||||||
image_json = json.loads(model.image.get_image_json(repo_image))
|
image_json = json.loads(repo_image.v1_json_metadata)
|
||||||
|
|
||||||
# Calculate a synthetic image ID.
|
# Calculate a synthetic image ID.
|
||||||
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest()
|
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest()
|
||||||
|
|
|
@ -21,8 +21,12 @@ from util.cache import no_cache
|
||||||
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
||||||
from endpoints.decorators import anon_protect
|
from endpoints.decorators import anon_protect
|
||||||
from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf
|
from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf
|
||||||
from endpoints.trigger import (CustomBuildTrigger, BitbucketBuildTrigger, TriggerProviderException,
|
|
||||||
BuildTriggerHandler)
|
from buildtrigger.customhandler import CustomBuildTrigger
|
||||||
|
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger
|
||||||
|
from buildtrigger.triggerutil import TriggerProviderException
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
|
|
||||||
from util.names import parse_repository_name, parse_repository_name_and_tag
|
from util.names import parse_repository_name, parse_repository_name_and_tag
|
||||||
from util.useremails import send_email_changed
|
from util.useremails import send_email_changed
|
||||||
from util.systemlogs import build_logs_archive
|
from util.systemlogs import build_logs_archive
|
||||||
|
|
|
@ -9,8 +9,9 @@ from auth.permissions import ModifyRepositoryPermission
|
||||||
from util.invoice import renderInvoiceToHtml
|
from util.invoice import renderInvoiceToHtml
|
||||||
from util.useremails import send_invoice_email, send_subscription_change, send_payment_failed
|
from util.useremails import send_invoice_email, send_subscription_change, send_payment_failed
|
||||||
from util.http import abort
|
from util.http import abort
|
||||||
from endpoints.trigger import (BuildTriggerHandler, ValidationRequestException,
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
SkipRequestException, InvalidPayloadException)
|
from buildtrigger.triggerutil import (ValidationRequestException, SkipRequestException,
|
||||||
|
InvalidPayloadException)
|
||||||
from endpoints.building import start_build
|
from endpoints.building import start_build
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -90,9 +90,7 @@ def __create_subtree(repo, structure, creator_username, parent, tag_map):
|
||||||
# Write some data for the storage.
|
# Write some data for the storage.
|
||||||
if os.environ.get('WRITE_STORAGE_FILES'):
|
if os.environ.get('WRITE_STORAGE_FILES'):
|
||||||
storage_paths = StoragePaths()
|
storage_paths = StoragePaths()
|
||||||
paths = [storage_paths.image_json_path,
|
paths = [storage_paths.v1_image_layer_path]
|
||||||
storage_paths.image_ancestry_path,
|
|
||||||
storage_paths.image_layer_path]
|
|
||||||
|
|
||||||
for path_builder in paths:
|
for path_builder in paths:
|
||||||
path = path_builder(new_image.storage.uuid)
|
path = path_builder(new_image.storage.uuid)
|
||||||
|
|
|
@ -7,7 +7,7 @@ REPO=quay.io/quay/quay-dev
|
||||||
d ()
|
d ()
|
||||||
{
|
{
|
||||||
docker build -t $REPO -f dev.df .
|
docker build -t $REPO -f dev.df .
|
||||||
docker -- run --rm -it --net=host -v $(pwd)/..:/src $REPO $*
|
docker -- run --rm -v /var/run/docker.sock:/run/docker.sock -it --net=host -v $(pwd)/..:/src $REPO $*
|
||||||
}
|
}
|
||||||
|
|
||||||
case $1 in
|
case $1 in
|
||||||
|
@ -23,6 +23,13 @@ notifications)
|
||||||
test)
|
test)
|
||||||
d bash /src/quay/local-test.sh
|
d bash /src/quay/local-test.sh
|
||||||
;;
|
;;
|
||||||
|
initdb)
|
||||||
|
rm -f test/data/test.db
|
||||||
|
d /venv/bin/python initdb.py
|
||||||
|
;;
|
||||||
|
fulldbtest)
|
||||||
|
d bash /src/quay/test/fulldbtest.sh
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "unknown option"
|
echo "unknown option"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
|
@ -1 +1,7 @@
|
||||||
TEST=true TROLLIUSDEBUG=1 python -m unittest discover -f
|
set -e
|
||||||
|
|
||||||
|
export TEST=true
|
||||||
|
export TROLLIUSDEBUG=1
|
||||||
|
|
||||||
|
python -m unittest discover -f
|
||||||
|
python -m test.registry_tests -f
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
|
import os
|
||||||
|
|
||||||
from app import app as application
|
from app import app as application
|
||||||
|
|
||||||
|
@ -9,5 +10,8 @@ import endpoints.decorated
|
||||||
from endpoints.v1 import v1_bp
|
from endpoints.v1 import v1_bp
|
||||||
from endpoints.v2 import v2_bp
|
from endpoints.v2 import v2_bp
|
||||||
|
|
||||||
|
if os.environ.get('DEBUGLOG') == 'true':
|
||||||
|
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||||
|
|
||||||
application.register_blueprint(v1_bp, url_prefix='/v1')
|
application.register_blueprint(v1_bp, url_prefix='/v1')
|
||||||
application.register_blueprint(v2_bp, url_prefix='/v2')
|
application.register_blueprint(v2_bp, url_prefix='/v2')
|
||||||
|
|
|
@ -38,7 +38,7 @@ git+https://github.com/DevTable/pygithub.git
|
||||||
git+https://github.com/DevTable/container-cloud-config.git
|
git+https://github.com/DevTable/container-cloud-config.git
|
||||||
git+https://github.com/coreos/mockldap.git
|
git+https://github.com/coreos/mockldap.git
|
||||||
git+https://github.com/coreos/py-bitbucket.git
|
git+https://github.com/coreos/py-bitbucket.git
|
||||||
git+https://github.com/coreos/pyapi-gitlab.git
|
git+https://github.com/coreos/pyapi-gitlab.git@timeout
|
||||||
git+https://github.com/coreos/resumablehashlib.git
|
git+https://github.com/coreos/resumablehashlib.git
|
||||||
git+https://github.com/DevTable/python-etcd.git@sslfix
|
git+https://github.com/DevTable/python-etcd.git@sslfix
|
||||||
gipc
|
gipc
|
||||||
|
@ -55,4 +55,4 @@ pyjwt
|
||||||
toposort
|
toposort
|
||||||
pyjwkest
|
pyjwkest
|
||||||
rfc3987
|
rfc3987
|
||||||
pyjwkest
|
jsonpath-rw
|
|
@ -8,7 +8,7 @@ blinker==1.3
|
||||||
boto==2.38.0
|
boto==2.38.0
|
||||||
cachetools==1.0.3
|
cachetools==1.0.3
|
||||||
cffi==1.1.2
|
cffi==1.1.2
|
||||||
cryptography==0.9.2
|
cryptography==1.0.2
|
||||||
debtcollector==0.5.0
|
debtcollector==0.5.0
|
||||||
enum34==1.0.4
|
enum34==1.0.4
|
||||||
Flask==0.10.1
|
Flask==0.10.1
|
||||||
|
@ -32,6 +32,7 @@ iso8601==0.1.10
|
||||||
itsdangerous==0.24
|
itsdangerous==0.24
|
||||||
Jinja2==2.7.3
|
Jinja2==2.7.3
|
||||||
jsonschema==2.5.1
|
jsonschema==2.5.1
|
||||||
|
jsonpath-rw==1.4.0
|
||||||
Mako==1.0.1
|
Mako==1.0.1
|
||||||
marisa-trie==0.7.2
|
marisa-trie==0.7.2
|
||||||
MarkupSafe==0.23
|
MarkupSafe==0.23
|
||||||
|
@ -97,7 +98,8 @@ git+https://github.com/DevTable/pygithub.git
|
||||||
git+https://github.com/DevTable/container-cloud-config.git
|
git+https://github.com/DevTable/container-cloud-config.git
|
||||||
git+https://github.com/coreos/mockldap.git
|
git+https://github.com/coreos/mockldap.git
|
||||||
git+https://github.com/coreos/py-bitbucket.git
|
git+https://github.com/coreos/py-bitbucket.git
|
||||||
git+https://github.com/coreos/pyapi-gitlab.git
|
git+https://github.com/coreos/pyapi-gitlab.git@timeout
|
||||||
git+https://github.com/coreos/resumablehashlib.git
|
git+https://github.com/coreos/resumablehashlib.git
|
||||||
|
git+https://github.com/coreos/mockldap.git
|
||||||
git+https://github.com/DevTable/python-etcd.git@sslfix
|
git+https://github.com/DevTable/python-etcd.git@sslfix
|
||||||
git+https://github.com/NateFerrero/oauth2lib.git
|
git+https://github.com/NateFerrero/oauth2lib.git
|
||||||
|
|
|
@ -2,6 +2,12 @@
|
||||||
padding: 20px;
|
padding: 20px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.team-view .team-title {
|
||||||
|
vertical-align: middle;
|
||||||
|
margin-right: 10px;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
|
||||||
.team-view .team-name {
|
.team-view .team-name {
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
margin-left: 6px;
|
margin-left: 6px;
|
||||||
|
|
|
@ -24,34 +24,41 @@
|
||||||
please check for JavaScript or networking issues and contact support.
|
please check for JavaScript or networking issues and contact support.
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<span class="no-logs" ng-if="!logEntries.length && currentBuild.phase == 'waiting'">
|
<div ng-show="!loadError && pollChannel.skipping">
|
||||||
(Waiting for build to start)
|
Refreshing Build Status...
|
||||||
</span>
|
<span class="cor-loader"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="log-container" ng-class="container.type" ng-repeat="container in logEntries">
|
<div ng-show="!pollChannel.skipping">
|
||||||
<div class="container-header" ng-class="container.type == 'phase' ? container.message : ''"
|
<span class="no-logs" ng-if="!logEntries.length && currentBuild.phase == 'waiting'">
|
||||||
ng-switch on="container.type" ng-click="container.logs.toggle()">
|
(Waiting for build to start)
|
||||||
<i class="fa chevron"
|
</span>
|
||||||
ng-class="container.logs.isVisible ? 'fa-chevron-down' : 'fa-chevron-right'"
|
|
||||||
ng-show="hasLogs(container)"></i>
|
|
||||||
<div ng-switch-when="phase">
|
|
||||||
<span class="container-content build-log-phase" phase="container"></span>
|
|
||||||
</div>
|
|
||||||
<div ng-switch-when="error">
|
|
||||||
<span class="container-content build-log-error" error="container" entries="logEntries"></span>
|
|
||||||
</div>
|
|
||||||
<div ng-switch-when="command">
|
|
||||||
<span class="container-content build-log-command" command="container"></span>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<!-- Display the entries for the container -->
|
<div class="log-container" ng-class="container.type" ng-repeat="container in logEntries">
|
||||||
<div class="container-logs" ng-show="container.logs.isVisible">
|
<div class="container-header" ng-class="container.type == 'phase' ? container.message : ''"
|
||||||
<div class="log-entry" bindonce ng-repeat="entry in container.logs.visibleEntries">
|
ng-switch on="container.type" ng-click="container.logs.toggle()">
|
||||||
<span class="id" bo-text="$index + container.index + 1" ng-if="!useTimestamps"></span>
|
<i class="fa chevron"
|
||||||
<span class="id" bo-text="formatDatetime(entry.data.datetime)" ng-if="useTimestamps"></span>
|
ng-class="container.logs.isVisible ? 'fa-chevron-down' : 'fa-chevron-right'"
|
||||||
<span class="message" bo-html="processANSI(entry.message, container)"></span>
|
ng-show="hasLogs(container)"></i>
|
||||||
<span class="timestamp" bo-text="formatDatetime(entry.data.datetime)" ng-if="!useTimestamps"></span>
|
<div ng-switch-when="phase">
|
||||||
|
<span class="container-content build-log-phase" phase="container"></span>
|
||||||
|
</div>
|
||||||
|
<div ng-switch-when="error">
|
||||||
|
<span class="container-content build-log-error" error="container" entries="logEntries"></span>
|
||||||
|
</div>
|
||||||
|
<div ng-switch-when="command">
|
||||||
|
<span class="container-content build-log-command" command="container"></span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Display the entries for the container -->
|
||||||
|
<div class="container-logs" ng-show="container.logs.isVisible">
|
||||||
|
<div class="log-entry" bindonce ng-repeat="entry in container.logs.visibleEntries">
|
||||||
|
<span class="id" bo-text="$index + container.index + 1" ng-if="!useTimestamps"></span>
|
||||||
|
<span class="id" bo-text="formatDatetime(entry.data.datetime)" ng-if="useTimestamps"></span>
|
||||||
|
<span class="message" bo-html="processANSI(entry.message, container)"></span>
|
||||||
|
<span class="timestamp" bo-text="formatDatetime(entry.data.datetime)" ng-if="!useTimestamps"></span>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -34,7 +34,7 @@
|
||||||
<div ng-show="!addingTag">
|
<div ng-show="!addingTag">
|
||||||
<input type="text" class="form-control" id="tagName"
|
<input type="text" class="form-control" id="tagName"
|
||||||
placeholder="Enter tag name"
|
placeholder="Enter tag name"
|
||||||
ng-model="tagToCreate" ng-pattern="/^([a-z0-9_\.-]){3,30}$/"
|
ng-model="tagToCreate" ng-pattern="/^[\w][\w\.-]{0,127}$/"
|
||||||
ng-disabled="creatingTag" autofocus required>
|
ng-disabled="creatingTag" autofocus required>
|
||||||
|
|
||||||
<div style="margin: 10px; margin-top: 20px;"
|
<div style="margin: 10px; margin-top: 20px;"
|
||||||
|
|
|
@ -23,15 +23,20 @@
|
||||||
|
|
||||||
<!-- Teams List -->
|
<!-- Teams List -->
|
||||||
<div ng-show="!showingMembers">
|
<div ng-show="!showingMembers">
|
||||||
<span class="popup-input-button hidden-xs"
|
<div class="row" style="margin-left: 0px; margin-right: 0px;">
|
||||||
pattern="TEAM_PATTERN" placeholder="'Team Name'"
|
<span class="popup-input-button hidden-xs"
|
||||||
submitted="createTeam(value)" ng-show="organization.is_admin"
|
pattern="TEAM_PATTERN" placeholder="'Team Name'"
|
||||||
style="margin-bottom: 10px;">
|
submitted="createTeam(value)" ng-show="organization.is_admin"
|
||||||
<i class="fa fa-plus" style="margin-right: 6px;"></i> Create New Team
|
style="margin-bottom: 10px;">
|
||||||
</span>
|
<i class="fa fa-plus" style="margin-right: 6px;"></i> Create New Team
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="row hidden-xs">
|
<div class="row hidden-xs">
|
||||||
<div class="col-md-4 col-md-offset-8 col-sm-5 col-sm-offset-7 header-col" ng-show="organization.is_admin">
|
<div class="col-sm-7 col-md-8 header-col">
|
||||||
|
<span class="header-text">Team Summary</span>
|
||||||
|
</div>
|
||||||
|
<div class="col-md-4 col-sm-5 header-col" ng-show="organization.is_admin">
|
||||||
<span class="header-text">Team Permissions</span>
|
<span class="header-text">Team Permissions</span>
|
||||||
<i class="info-icon fa fa-info-circle" data-placement="bottom" data-original-title="" data-title=""
|
<i class="info-icon fa fa-info-circle" data-placement="bottom" data-original-title="" data-title=""
|
||||||
data-content="Global permissions for the team and its members<br><br><dl><dt>Member</dt><dd>Permissions are assigned on a per repository basis</dd><dt>Creator</dt><dd>A team can create its own repositories</dd><dt>Admin</dt><dd>A team has full control of the organization</dd></dl>"
|
data-content="Global permissions for the team and its members<br><br><dl><dt>Member</dt><dd>Permissions are assigned on a per repository basis</dd><dt>Creator</dt><dd>A team can create its own repositories</dd><dt>Admin</dt><dd>A team has full control of the organization</dd></dl>"
|
||||||
|
@ -79,6 +84,9 @@
|
||||||
role-changed="setRole(role, team.name)" roles="teamRoles"></span>
|
role-changed="setRole(role, team.name)" roles="teamRoles"></span>
|
||||||
|
|
||||||
<span class="cor-options-menu">
|
<span class="cor-options-menu">
|
||||||
|
<span class="cor-option" option-click="viewTeam(team.name)">
|
||||||
|
<i class="fa fa-user"></i> Manage Team Members
|
||||||
|
</span>
|
||||||
<span class="cor-option" option-click="askDeleteTeam(team.name)">
|
<span class="cor-option" option-click="askDeleteTeam(team.name)">
|
||||||
<i class="fa fa-times"></i> Delete Team {{ team.name }}
|
<i class="fa fa-times"></i> Delete Team {{ team.name }}
|
||||||
</span>
|
</span>
|
||||||
|
|
|
@ -28,7 +28,7 @@ angular.module('quay').directive('buildLogsView', function () {
|
||||||
$scope.currentBuild = null;
|
$scope.currentBuild = null;
|
||||||
$scope.loadError = null;
|
$scope.loadError = null;
|
||||||
|
|
||||||
var pollChannel = null;
|
$scope.pollChannel = null;
|
||||||
|
|
||||||
var appendToTextLog = function(type, message) {
|
var appendToTextLog = function(type, message) {
|
||||||
if (type == 'phase') {
|
if (type == 'phase') {
|
||||||
|
@ -146,14 +146,14 @@ angular.module('quay').directive('buildLogsView', function () {
|
||||||
getBuildStatusAndLogs(build, callback);
|
getBuildStatusAndLogs(build, callback);
|
||||||
};
|
};
|
||||||
|
|
||||||
pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
||||||
pollChannel.start();
|
$scope.pollChannel.start();
|
||||||
};
|
};
|
||||||
|
|
||||||
var stopWatching = function() {
|
var stopWatching = function() {
|
||||||
if (pollChannel) {
|
if ($scope.pollChannel) {
|
||||||
pollChannel.stop();
|
$scope.pollChannel.stop();
|
||||||
pollChannel = null;
|
$scope.pollChannel = null;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -52,6 +52,7 @@ angular.module('quay').directive('dockerfileBuildDialog', function () {
|
||||||
if (sn && $scope.repository) {
|
if (sn && $scope.repository) {
|
||||||
$scope.viewTriggers = false;
|
$scope.viewTriggers = false;
|
||||||
$scope.startTrigger = null;
|
$scope.startTrigger = null;
|
||||||
|
$scope.errorMessage = null;
|
||||||
|
|
||||||
$element.find('.dockerfilebuildModal').modal({});
|
$element.find('.dockerfilebuildModal').modal({});
|
||||||
|
|
||||||
|
|
|
@ -196,21 +196,36 @@ angular.module('quay').directive('dockerfileBuildForm', function () {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
request.onerror = function() {
|
request.onerror = function() {
|
||||||
$scope.$apply(function() {
|
$scope.$apply(function() {
|
||||||
handleUploadFailed();
|
handleUploadFailed();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
request.onreadystatechange = function() {
|
request.onreadystatechange = function() {
|
||||||
var state = request.readyState;
|
var state = request.readyState;
|
||||||
|
var status = request.status;
|
||||||
|
|
||||||
if (state == 4) {
|
if (state == 4) {
|
||||||
$scope.$apply(function() {
|
if (Math.floor(status / 100) == 2) {
|
||||||
startBuild(fileId);
|
$scope.$apply(function() {
|
||||||
$scope.uploading = false;
|
startBuild(fileId);
|
||||||
});
|
$scope.uploading = false;
|
||||||
return;
|
});
|
||||||
|
} else {
|
||||||
|
var message = request.statusText;
|
||||||
|
if (status == 413) {
|
||||||
|
message = 'Selected file too large to upload';
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.$apply(function() {
|
||||||
|
handleUploadFailed(message);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
request.send(file);
|
request.send(file);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ angular.module('quay').directive('repoCountChecker', function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
$scope.checkingPlan = true;
|
$scope.checkingPlan = true;
|
||||||
$scope.isUserNamespace = UserService.isUserNamespace($scope.namespace);
|
$scope.isUserNamespace = !UserService.isOrganization($scope.namespace);
|
||||||
|
|
||||||
ApiService.getPrivateAllowed($scope.isUserNamespace ? null : $scope.namespace).then(function(resp) {
|
ApiService.getPrivateAllowed($scope.isUserNamespace ? null : $scope.namespace).then(function(resp) {
|
||||||
$scope.checkingPlan = false;
|
$scope.checkingPlan = false;
|
||||||
|
@ -71,8 +71,8 @@ angular.module('quay').directive('repoCountChecker', function () {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var isUserNamespace = UserService.isUserNamespace($scope.namespace);
|
$scope.isUserNamespace = !UserService.isOrganization($scope.namespace);
|
||||||
var namespace = isUserNamespace ? null : $scope.namespace;
|
var namespace = $scope.isUserNamespace ? null : $scope.namespace;
|
||||||
PlanService.changePlan($scope, namespace, $scope.planRequired.stripeId, callbacks);
|
PlanService.changePlan($scope, namespace, $scope.planRequired.stripeId, callbacks);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,9 @@ angular.module('quay').directive('repoTagHistory', function () {
|
||||||
$scope.tagHistoryData = null;
|
$scope.tagHistoryData = null;
|
||||||
$scope.tagHistoryLeaves = {};
|
$scope.tagHistoryLeaves = {};
|
||||||
|
|
||||||
|
// A delete followed by a create of a tag within this threshold is considered a move.
|
||||||
|
var MOVE_THRESHOLD = 2;
|
||||||
|
|
||||||
var loadTimeline = function() {
|
var loadTimeline = function() {
|
||||||
if (!$scope.repository || !$scope.isEnabled) { return; }
|
if (!$scope.repository || !$scope.isEnabled) { return; }
|
||||||
|
|
||||||
|
@ -68,11 +71,11 @@ angular.module('quay').directive('repoTagHistory', function () {
|
||||||
|
|
||||||
// If the tag has an end time, it was either deleted or moved.
|
// If the tag has an end time, it was either deleted or moved.
|
||||||
if (tag.end_ts) {
|
if (tag.end_ts) {
|
||||||
// If a future entry exists with a start time equal to the end time for this tag,
|
// If a future entry exists with a start time "equal" to the end time for this tag,
|
||||||
// then the action was a move, rather than a delete and a create.
|
// then the action was a move, rather than a delete and a create.
|
||||||
var currentEntries = tagEntries[tagName];
|
var currentEntries = tagEntries[tagName];
|
||||||
var futureEntry = currentEntries.length > 0 ? currentEntries[currentEntries.length - 1] : {};
|
var futureEntry = currentEntries.length > 0 ? currentEntries[currentEntries.length - 1] : {};
|
||||||
if (futureEntry.start_ts == tag.end_ts) {
|
if (tag.end_ts - futureEntry.start_ts <= MOVE_THRESHOLD) {
|
||||||
removeEntry(futureEntry);
|
removeEntry(futureEntry);
|
||||||
addEntry(futureEntry.reversion ? 'revert': 'move', tag.end_ts,
|
addEntry(futureEntry.reversion ? 'revert': 'move', tag.end_ts,
|
||||||
futureEntry.docker_image_id, dockerImageId);
|
futureEntry.docker_image_id, dockerImageId);
|
||||||
|
|
|
@ -119,7 +119,7 @@ angular.module('quay').directive('setupTriggerDialog', function () {
|
||||||
$scope.canceled({'trigger': $scope.trigger});
|
$scope.canceled({'trigger': $scope.trigger});
|
||||||
|
|
||||||
return ApiService.getErrorMessage(resp) +
|
return ApiService.getErrorMessage(resp) +
|
||||||
'\n\nThis usually means that you do not have admin access on the repository.';
|
'\n\nNote: Errors can occur if you do not have admin access on the repository.';
|
||||||
});
|
});
|
||||||
|
|
||||||
ApiService.activateBuildTrigger(data, params).then(function(resp) {
|
ApiService.activateBuildTrigger(data, params).then(function(resp) {
|
||||||
|
|
|
@ -171,6 +171,10 @@ angular.module('quay').directive('teamsManager', function () {
|
||||||
}, ApiService.errorDisplay('Cannot delete team'));
|
}, ApiService.errorDisplay('Cannot delete team'));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
$scope.viewTeam = function(teamName) {
|
||||||
|
document.location = '/organization/' + $scope.organization.name + '/teams/' + teamName;
|
||||||
|
};
|
||||||
|
|
||||||
$scope.showMembers = function(value) {
|
$scope.showMembers = function(value) {
|
||||||
$scope.showingMembers = value;
|
$scope.showingMembers = value;
|
||||||
if (value && !$scope.fullMemberList) {
|
if (value && !$scope.fullMemberList) {
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
$scope.HOSTNAME_REGEX = '^[a-zA-Z-0-9\.]+(:[0-9]+)?$';
|
$scope.HOSTNAME_REGEX = '^[a-zA-Z-0-9_\.\-]+(:[0-9]+)?$';
|
||||||
|
|
||||||
$scope.validateHostname = function(hostname) {
|
$scope.validateHostname = function(hostname) {
|
||||||
if (hostname.indexOf('127.0.0.1') == 0 || hostname.indexOf('localhost') == 0) {
|
if (hostname.indexOf('127.0.0.1') == 0 || hostname.indexOf('localhost') == 0) {
|
||||||
|
|
20
static/js/services/angular-poll-channel.js
vendored
20
static/js/services/angular-poll-channel.js
vendored
|
@ -1,8 +1,9 @@
|
||||||
/**
|
/**
|
||||||
* Specialized class for conducting an HTTP poll, while properly preventing multiple calls.
|
* Specialized class for conducting an HTTP poll, while properly preventing multiple calls.
|
||||||
*/
|
*/
|
||||||
angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout', 'DocumentVisibilityService',
|
angular.module('quay').factory('AngularPollChannel',
|
||||||
function(ApiService, $timeout, DocumentVisibilityService) {
|
['ApiService', '$timeout', 'DocumentVisibilityService', 'CORE_EVENT', '$rootScope',
|
||||||
|
function(ApiService, $timeout, DocumentVisibilityService, CORE_EVENT, $rootScope) {
|
||||||
var _PollChannel = function(scope, requester, opt_sleeptime) {
|
var _PollChannel = function(scope, requester, opt_sleeptime) {
|
||||||
this.scope_ = scope;
|
this.scope_ = scope;
|
||||||
this.requester_ = requester;
|
this.requester_ = requester;
|
||||||
|
@ -11,10 +12,20 @@ angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout',
|
||||||
|
|
||||||
this.working = false;
|
this.working = false;
|
||||||
this.polling = false;
|
this.polling = false;
|
||||||
|
this.skipping = false;
|
||||||
|
|
||||||
var that = this;
|
var that = this;
|
||||||
|
|
||||||
|
var visibilityHandler = $rootScope.$on(CORE_EVENT.DOC_VISIBILITY_CHANGE, function() {
|
||||||
|
// If the poll channel was skipping because the visibility was hidden, call it immediately.
|
||||||
|
if (that.skipping && !DocumentVisibilityService.isHidden()) {
|
||||||
|
that.call_();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
scope.$on('$destroy', function() {
|
scope.$on('$destroy', function() {
|
||||||
that.stop();
|
that.stop();
|
||||||
|
visibilityHandler();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -28,9 +39,10 @@ angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout',
|
||||||
if (this.timer_) {
|
if (this.timer_) {
|
||||||
$timeout.cancel(this.timer_);
|
$timeout.cancel(this.timer_);
|
||||||
this.timer_ = null;
|
this.timer_ = null;
|
||||||
this.polling_ = false;
|
this.polling = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
this.skipping = false;
|
||||||
this.working = false;
|
this.working = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -53,6 +65,7 @@ angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout',
|
||||||
|
|
||||||
// If the document is currently hidden, skip the call.
|
// If the document is currently hidden, skip the call.
|
||||||
if (DocumentVisibilityService.isHidden()) {
|
if (DocumentVisibilityService.isHidden()) {
|
||||||
|
this.skipping = true;
|
||||||
this.setupTimer_();
|
this.setupTimer_();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -63,6 +76,7 @@ angular.module('quay').factory('AngularPollChannel', ['ApiService', '$timeout',
|
||||||
that.requester_(function(status) {
|
that.requester_(function(status) {
|
||||||
if (status) {
|
if (status) {
|
||||||
that.working = false;
|
that.working = false;
|
||||||
|
that.skipping = false;
|
||||||
that.setupTimer_();
|
that.setupTimer_();
|
||||||
} else {
|
} else {
|
||||||
that.stop();
|
that.stop();
|
||||||
|
|
|
@ -126,10 +126,6 @@ function(ApiService, CookieService, $rootScope, Config) {
|
||||||
return userResponse;
|
return userResponse;
|
||||||
};
|
};
|
||||||
|
|
||||||
userService.isUserNamespace = function(namespace) {
|
|
||||||
return namespace == userResponse.username;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Update the user in the root scope.
|
// Update the user in the root scope.
|
||||||
userService.updateUserIn($rootScope);
|
userService.updateUserIn($rootScope);
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
</a>
|
</a>
|
||||||
</span>
|
</span>
|
||||||
<span class="cor-title-content">
|
<span class="cor-title-content">
|
||||||
|
<span class="team-title">Team</span>
|
||||||
<span class="avatar" data="team.avatar" size="32"></span>
|
<span class="avatar" data="team.avatar" size="32"></span>
|
||||||
<span class="team-name">{{ teamname }}</span>
|
<span class="team-name">{{ teamname }}</span>
|
||||||
</span>
|
</span>
|
||||||
|
|
|
@ -21,10 +21,6 @@ class StoragePaths(object):
|
||||||
def image_path(self, storage_uuid):
|
def image_path(self, storage_uuid):
|
||||||
return '{0}/{1}/'.format(self.shared_images, storage_uuid)
|
return '{0}/{1}/'.format(self.shared_images, storage_uuid)
|
||||||
|
|
||||||
def image_json_path(self, storage_uuid):
|
|
||||||
base_path = self.image_path(storage_uuid)
|
|
||||||
return '{0}json'.format(base_path)
|
|
||||||
|
|
||||||
def v1_image_layer_path(self, storage_uuid):
|
def v1_image_layer_path(self, storage_uuid):
|
||||||
base_path = self.image_path(storage_uuid)
|
base_path = self.image_path(storage_uuid)
|
||||||
return '{0}layer'.format(base_path)
|
return '{0}layer'.format(base_path)
|
||||||
|
|
Binary file not shown.
|
@ -162,6 +162,9 @@ class RegistryTestCaseMixin(LiveServerTestCase):
|
||||||
self.csrf_token = ''
|
self.csrf_token = ''
|
||||||
self.csrf_token = self.conduct('GET', '/__test/csrf').text
|
self.csrf_token = self.conduct('GET', '/__test/csrf').text
|
||||||
|
|
||||||
|
def do_tag(self, namespace, repository, tag, image_id, expected_code=200):
|
||||||
|
self.conduct('PUT', '/v1/repositories/%s/%s/tags/%s' % (namespace, repository, tag),
|
||||||
|
data='"%s"' % image_id, expected_code=expected_code, auth='sig')
|
||||||
|
|
||||||
def conduct_api_login(self, username, password):
|
def conduct_api_login(self, username, password):
|
||||||
self.conduct('POST', '/api/v1/signin',
|
self.conduct('POST', '/api/v1/signin',
|
||||||
|
@ -218,7 +221,7 @@ class V1RegistryMixin(BaseRegistryMixin):
|
||||||
|
|
||||||
|
|
||||||
class V1RegistryPushMixin(V1RegistryMixin):
|
class V1RegistryPushMixin(V1RegistryMixin):
|
||||||
def do_push(self, namespace, repository, username, password, images=None):
|
def do_push(self, namespace, repository, username, password, images=None, expected_code=201):
|
||||||
images = images or self._get_default_images()
|
images = images or self._get_default_images()
|
||||||
auth = (username, password)
|
auth = (username, password)
|
||||||
|
|
||||||
|
@ -228,7 +231,10 @@ class V1RegistryPushMixin(V1RegistryMixin):
|
||||||
# PUT /v1/repositories/{namespace}/{repository}/
|
# PUT /v1/repositories/{namespace}/{repository}/
|
||||||
self.conduct('PUT', '/v1/repositories/%s/%s' % (namespace, repository),
|
self.conduct('PUT', '/v1/repositories/%s/%s' % (namespace, repository),
|
||||||
data=json.dumps(images), auth=auth,
|
data=json.dumps(images), auth=auth,
|
||||||
expected_code=201)
|
expected_code=expected_code)
|
||||||
|
|
||||||
|
if expected_code != 201:
|
||||||
|
return
|
||||||
|
|
||||||
last_image_id = None
|
last_image_id = None
|
||||||
for image_data in images:
|
for image_data in images:
|
||||||
|
@ -264,9 +270,7 @@ class V1RegistryPushMixin(V1RegistryMixin):
|
||||||
|
|
||||||
|
|
||||||
# PUT /v1/repositories/{namespace}/{repository}/tags/latest
|
# PUT /v1/repositories/{namespace}/{repository}/tags/latest
|
||||||
self.conduct('PUT', '/v1/repositories/%s/%s/tags/latest' % (namespace, repository),
|
self.do_tag(namespace, repository, 'latest', images[0]['id'])
|
||||||
data='"' + last_image_id + '"',
|
|
||||||
auth='sig')
|
|
||||||
|
|
||||||
# PUT /v1/repositories/{namespace}/{repository}/images
|
# PUT /v1/repositories/{namespace}/{repository}/images
|
||||||
self.conduct('PUT', '/v1/repositories/%s/%s/images' % (namespace, repository),
|
self.conduct('PUT', '/v1/repositories/%s/%s/images' % (namespace, repository),
|
||||||
|
@ -680,11 +684,39 @@ class RegistryTestsMixin(object):
|
||||||
class V1RegistryTests(V1RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
class V1RegistryTests(V1RegistryPullMixin, V1RegistryPushMixin, RegistryTestsMixin,
|
||||||
RegistryTestCaseMixin, LiveServerTestCase):
|
RegistryTestCaseMixin, LiveServerTestCase):
|
||||||
""" Tests for V1 registry. """
|
""" Tests for V1 registry. """
|
||||||
|
def test_push_reponame_with_slashes(self):
|
||||||
|
# Attempt to add a repository name with slashes. This should fail as we do not support it.
|
||||||
|
images = [{
|
||||||
|
'id': 'onlyimagehere'
|
||||||
|
}]
|
||||||
|
self.do_push('public', 'newrepo/somesubrepo', 'public', 'password', images, expected_code=400)
|
||||||
|
|
||||||
|
def test_tag_validation(self):
|
||||||
|
image_id = 'onlyimagehere'
|
||||||
|
images = [{
|
||||||
|
'id': image_id
|
||||||
|
}]
|
||||||
|
self.do_push('public', 'newrepo', 'public', 'password', images)
|
||||||
|
self.do_tag('public', 'newrepo', '1', image_id)
|
||||||
|
self.do_tag('public', 'newrepo', 'x' * 128, image_id)
|
||||||
|
self.do_tag('public', 'newrepo', '', image_id, expected_code=400)
|
||||||
|
self.do_tag('public', 'newrepo', 'x' * 129, image_id, expected_code=400)
|
||||||
|
self.do_tag('public', 'newrepo', '.fail', image_id, expected_code=400)
|
||||||
|
self.do_tag('public', 'newrepo', '-fail', image_id, expected_code=400)
|
||||||
|
|
||||||
|
|
||||||
class V2RegistryTests(V2RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMixin,
|
class V2RegistryTests(V2RegistryPullMixin, V2RegistryPushMixin, RegistryTestsMixin,
|
||||||
RegistryTestCaseMixin, LiveServerTestCase):
|
RegistryTestCaseMixin, LiveServerTestCase):
|
||||||
""" Tests for V2 registry. """
|
""" Tests for V2 registry. """
|
||||||
|
|
||||||
|
def test_push_reponame_with_slashes(self):
|
||||||
|
# Attempt to add a repository name with slashes. This should fail as we do not support it.
|
||||||
|
images = [{
|
||||||
|
'id': 'onlyimagehere'
|
||||||
|
}]
|
||||||
|
self.do_push('public', 'newrepo/somesubrepo', 'devtable', 'password', images,
|
||||||
|
expected_auth_code=400)
|
||||||
|
|
||||||
def test_invalid_push(self):
|
def test_invalid_push(self):
|
||||||
self.do_push('devtable', 'newrepo', 'devtable', 'password', invalid=True)
|
self.do_push('devtable', 'newrepo', 'devtable', 'password', invalid=True)
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ from endpoints.api import api_bp, api
|
||||||
|
|
||||||
from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam, TeamMemberInvite
|
from endpoints.api.team import TeamMember, TeamMemberList, OrganizationTeam, TeamMemberInvite
|
||||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, ListRepositoryTags, RevertTag
|
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, ListRepositoryTags, RevertTag
|
||||||
from endpoints.api.search import FindRepositories, EntitySearch
|
from endpoints.api.search import EntitySearch
|
||||||
from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList
|
from endpoints.api.image import RepositoryImageChanges, RepositoryImage, RepositoryImageList
|
||||||
from endpoints.api.build import (FileDropResource, RepositoryBuildStatus, RepositoryBuildLogs,
|
from endpoints.api.build import (FileDropResource, RepositoryBuildStatus, RepositoryBuildLogs,
|
||||||
RepositoryBuildList, RepositoryBuildResource)
|
RepositoryBuildList, RepositoryBuildResource)
|
||||||
|
@ -118,25 +118,6 @@ class ApiTestCase(unittest.TestCase):
|
||||||
finished_database_for_testing(self)
|
finished_database_for_testing(self)
|
||||||
|
|
||||||
|
|
||||||
class TestFindRepositories(ApiTestCase):
|
|
||||||
def setUp(self):
|
|
||||||
ApiTestCase.setUp(self)
|
|
||||||
self._set_url(FindRepositories)
|
|
||||||
|
|
||||||
def test_get_anonymous(self):
|
|
||||||
self._run_test('GET', 200, None, None)
|
|
||||||
|
|
||||||
def test_get_freshuser(self):
|
|
||||||
self._run_test('GET', 200, 'freshuser', None)
|
|
||||||
|
|
||||||
def test_get_reader(self):
|
|
||||||
self._run_test('GET', 200, 'reader', None)
|
|
||||||
|
|
||||||
def test_get_devtable(self):
|
|
||||||
self._run_test('GET', 200, 'devtable', None)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TestUserStarredRepositoryList(ApiTestCase):
|
class TestUserStarredRepositoryList(ApiTestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
ApiTestCase.setUp(self)
|
ApiTestCase.setUp(self)
|
||||||
|
@ -397,16 +378,16 @@ class TestRepositoryList(ApiTestCase):
|
||||||
self._set_url(RepositoryList)
|
self._set_url(RepositoryList)
|
||||||
|
|
||||||
def test_get_anonymous(self):
|
def test_get_anonymous(self):
|
||||||
self._run_test('GET', 200, None, None)
|
self._run_test('GET', 400, None, None)
|
||||||
|
|
||||||
def test_get_freshuser(self):
|
def test_get_freshuser(self):
|
||||||
self._run_test('GET', 200, 'freshuser', None)
|
self._run_test('GET', 400, 'freshuser', None)
|
||||||
|
|
||||||
def test_get_reader(self):
|
def test_get_reader(self):
|
||||||
self._run_test('GET', 200, 'reader', None)
|
self._run_test('GET', 400, 'reader', None)
|
||||||
|
|
||||||
def test_get_devtable(self):
|
def test_get_devtable(self):
|
||||||
self._run_test('GET', 200, 'devtable', None)
|
self._run_test('GET', 400, 'devtable', None)
|
||||||
|
|
||||||
def test_post_anonymous(self):
|
def test_post_anonymous(self):
|
||||||
self._run_test('POST', 400, None, {u'visibility': u'public', u'repository': 'XZGB',
|
self._run_test('POST', 400, None, {u'visibility': u'public', u'repository': 'XZGB',
|
||||||
|
|
|
@ -12,15 +12,15 @@ from playhouse.test_utils import assert_query_count
|
||||||
from endpoints.api import api_bp, api
|
from endpoints.api import api_bp, api
|
||||||
from endpoints.building import PreparedBuild
|
from endpoints.building import PreparedBuild
|
||||||
from endpoints.webhooks import webhooks
|
from endpoints.webhooks import webhooks
|
||||||
from endpoints.trigger import BuildTriggerHandler
|
|
||||||
from app import app
|
from app import app
|
||||||
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||||
from data import database, model
|
from data import database, model
|
||||||
from data.database import RepositoryActionCount
|
from data.database import RepositoryActionCount
|
||||||
|
|
||||||
from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam
|
from endpoints.api.team import TeamMember, TeamMemberList, TeamMemberInvite, OrganizationTeam
|
||||||
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags
|
from endpoints.api.tag import RepositoryTagImages, RepositoryTag, RevertTag, ListRepositoryTags
|
||||||
from endpoints.api.search import FindRepositories, EntitySearch, ConductSearch
|
from endpoints.api.search import EntitySearch, ConductSearch
|
||||||
from endpoints.api.image import RepositoryImage, RepositoryImageList
|
from endpoints.api.image import RepositoryImage, RepositoryImageList
|
||||||
from endpoints.api.build import (RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList,
|
from endpoints.api.build import (RepositoryBuildStatus, RepositoryBuildLogs, RepositoryBuildList,
|
||||||
RepositoryBuildResource)
|
RepositoryBuildResource)
|
||||||
|
@ -1272,6 +1272,17 @@ class TestDeleteOrganizationTeamMember(ApiTestCase):
|
||||||
|
|
||||||
|
|
||||||
class TestCreateRepo(ApiTestCase):
|
class TestCreateRepo(ApiTestCase):
|
||||||
|
def test_invalidreponame(self):
|
||||||
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
||||||
|
json = self.postJsonResponse(RepositoryList,
|
||||||
|
data=dict(repository='some/repo',
|
||||||
|
visibility='public',
|
||||||
|
description=''),
|
||||||
|
expected_code=400)
|
||||||
|
|
||||||
|
self.assertEquals('Invalid repository name', json['error_description'])
|
||||||
|
|
||||||
def test_duplicaterepo(self):
|
def test_duplicaterepo(self):
|
||||||
self.login(ADMIN_ACCESS_USER)
|
self.login(ADMIN_ACCESS_USER)
|
||||||
|
|
||||||
|
@ -1312,30 +1323,6 @@ class TestCreateRepo(ApiTestCase):
|
||||||
self.assertEquals('newrepo', json['name'])
|
self.assertEquals('newrepo', json['name'])
|
||||||
|
|
||||||
|
|
||||||
class TestFindRepos(ApiTestCase):
|
|
||||||
def test_findrepos_asguest(self):
|
|
||||||
json = self.getJsonResponse(FindRepositories, params=dict(query='p'))
|
|
||||||
self.assertEquals(len(json['repositories']), 1)
|
|
||||||
|
|
||||||
self.assertEquals(json['repositories'][0]['namespace'], 'public')
|
|
||||||
self.assertEquals(json['repositories'][0]['name'], 'publicrepo')
|
|
||||||
|
|
||||||
def test_findrepos_asuser(self):
|
|
||||||
self.login(NO_ACCESS_USER)
|
|
||||||
|
|
||||||
json = self.getJsonResponse(FindRepositories, params=dict(query='p'))
|
|
||||||
self.assertEquals(len(json['repositories']), 1)
|
|
||||||
|
|
||||||
self.assertEquals(json['repositories'][0]['namespace'], 'public')
|
|
||||||
self.assertEquals(json['repositories'][0]['name'], 'publicrepo')
|
|
||||||
|
|
||||||
def test_findrepos_orgmember(self):
|
|
||||||
self.login(READ_ACCESS_USER)
|
|
||||||
|
|
||||||
json = self.getJsonResponse(FindRepositories, params=dict(query='p'))
|
|
||||||
self.assertGreater(len(json['repositories']), 1)
|
|
||||||
|
|
||||||
|
|
||||||
class TestListRepos(ApiTestCase):
|
class TestListRepos(ApiTestCase):
|
||||||
def test_listrepos_asguest(self):
|
def test_listrepos_asguest(self):
|
||||||
# Queries: Base + the list query
|
# Queries: Base + the list query
|
||||||
|
@ -1344,14 +1331,14 @@ class TestListRepos(ApiTestCase):
|
||||||
|
|
||||||
self.assertEquals(len(json['repositories']), 1)
|
self.assertEquals(len(json['repositories']), 1)
|
||||||
|
|
||||||
def test_listrepos_orgmember(self):
|
def test_listrepos_asorgmember(self):
|
||||||
self.login(READ_ACCESS_USER)
|
self.login(READ_ACCESS_USER)
|
||||||
|
|
||||||
# Queries: Base + the list query
|
# Queries: Base + the list query
|
||||||
with assert_query_count(BASE_LOGGEDIN_QUERY_COUNT + 1):
|
with assert_query_count(BASE_LOGGEDIN_QUERY_COUNT + 1):
|
||||||
json = self.getJsonResponse(RepositoryList, params=dict(public=True))
|
json = self.getJsonResponse(RepositoryList, params=dict(public=True))
|
||||||
|
|
||||||
self.assertGreater(len(json['repositories']), 1)
|
self.assertGreater(len(json['repositories']), 0)
|
||||||
|
|
||||||
def test_listrepos_filter(self):
|
def test_listrepos_filter(self):
|
||||||
self.login(READ_ACCESS_USER)
|
self.login(READ_ACCESS_USER)
|
||||||
|
@ -1366,7 +1353,7 @@ class TestListRepos(ApiTestCase):
|
||||||
|
|
||||||
def test_listrepos_limit(self):
|
def test_listrepos_limit(self):
|
||||||
self.login(READ_ACCESS_USER)
|
self.login(READ_ACCESS_USER)
|
||||||
json = self.getJsonResponse(RepositoryList, params=dict(limit=1))
|
json = self.getJsonResponse(RepositoryList, params=dict(limit=1, public=True))
|
||||||
self.assertEquals(len(json['repositories']), 1)
|
self.assertEquals(len(json['repositories']), 1)
|
||||||
|
|
||||||
def test_listrepos_allparams(self):
|
def test_listrepos_allparams(self):
|
||||||
|
@ -2177,6 +2164,12 @@ class TestListAndDeleteTag(ApiTestCase):
|
||||||
|
|
||||||
self.assertEquals(staging_images, json['images'])
|
self.assertEquals(staging_images, json['images'])
|
||||||
|
|
||||||
|
# Require a valid tag name.
|
||||||
|
self.putResponse(RepositoryTag,
|
||||||
|
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='-fail'),
|
||||||
|
data=dict(image=staging_images[0]['id']),
|
||||||
|
expected_code=400)
|
||||||
|
|
||||||
# Add a new tag to the staging image.
|
# Add a new tag to the staging image.
|
||||||
self.putResponse(RepositoryTag,
|
self.putResponse(RepositoryTag,
|
||||||
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='sometag'),
|
params=dict(repository=ADMIN_ACCESS_USER + '/complex', tag='sometag'),
|
||||||
|
|
|
@ -217,3 +217,7 @@ class TestImageSharing(unittest.TestCase):
|
||||||
still_uploading.save()
|
still_uploading.save()
|
||||||
|
|
||||||
self.assertDifferentStorage('an-image', still_uploading)
|
self.assertDifferentStorage('an-image', still_uploading)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
||||||
|
|
271
test/test_prepare_trigger.py
Normal file
271
test/test_prepare_trigger.py
Normal file
|
@ -0,0 +1,271 @@
|
||||||
|
import unittest
|
||||||
|
import json
|
||||||
|
|
||||||
|
from jsonschema import validate, ValidationError
|
||||||
|
from buildtrigger.basehandler import METADATA_SCHEMA
|
||||||
|
from buildtrigger.bitbuckethandler import get_transformed_webhook_payload as bb_webhook
|
||||||
|
from buildtrigger.bitbuckethandler import get_transformed_commit_info as bb_commit
|
||||||
|
from buildtrigger.githubhandler import get_transformed_webhook_payload as gh_webhook
|
||||||
|
from buildtrigger.gitlabhandler import get_transformed_webhook_payload as gl_webhook
|
||||||
|
|
||||||
|
class TestPrepareTrigger(unittest.TestCase):
|
||||||
|
def assertSchema(self, filename, expected, processor, *args, **kwargs):
|
||||||
|
with open('test/triggerjson/%s.json' % filename) as f:
|
||||||
|
payload = json.loads(f.read())
|
||||||
|
|
||||||
|
nargs = [payload]
|
||||||
|
nargs.extend(args)
|
||||||
|
|
||||||
|
created = processor(*nargs, **kwargs)
|
||||||
|
self.assertEquals(expected, created)
|
||||||
|
validate(created, METADATA_SCHEMA)
|
||||||
|
|
||||||
|
|
||||||
|
def test_bitbucket_customer_payload_noauthor(self):
|
||||||
|
expected = {
|
||||||
|
"commit": "a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"git_url": "git@bitbucket.org:lightsidelabs/svc-identity.git",
|
||||||
|
"commit_info": {
|
||||||
|
"url": "https://bitbucket.org/lightsidelabs/svc-identity/commits/a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"date": "2015-09-25T00:55:08+00:00",
|
||||||
|
"message": "Update version.py to 0.1.2 [skip ci]\n\n(by utilitybelt/scripts/autotag_version.py)\n",
|
||||||
|
"committer": {
|
||||||
|
"username": "LightSide_CodeShip",
|
||||||
|
"url": "https://bitbucket.org/LightSide_CodeShip/",
|
||||||
|
"avatar_url": "https://bitbucket.org/account/LightSide_CodeShip/avatar/32/",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('bitbucket_customer_example_noauthor', expected, bb_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_bitbucket_customer_payload_tag(self):
|
||||||
|
expected = {
|
||||||
|
"commit": "a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"ref": "refs/tags/0.1.2",
|
||||||
|
"git_url": "git@bitbucket.org:lightsidelabs/svc-identity.git",
|
||||||
|
"commit_info": {
|
||||||
|
"url": "https://bitbucket.org/lightsidelabs/svc-identity/commits/a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"date": "2015-09-25T00:55:08+00:00",
|
||||||
|
"message": "Update version.py to 0.1.2 [skip ci]\n\n(by utilitybelt/scripts/autotag_version.py)\n",
|
||||||
|
"committer": {
|
||||||
|
"username": "LightSide_CodeShip",
|
||||||
|
"url": "https://bitbucket.org/LightSide_CodeShip/",
|
||||||
|
"avatar_url": "https://bitbucket.org/account/LightSide_CodeShip/avatar/32/",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('bitbucket_customer_example_tag', expected, bb_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_bitbucket_commit(self):
|
||||||
|
ref = 'refs/heads/somebranch'
|
||||||
|
default_branch = 'somebranch'
|
||||||
|
repository_name = 'foo/bar'
|
||||||
|
|
||||||
|
def lookup_author(_):
|
||||||
|
return {
|
||||||
|
'user': {
|
||||||
|
'username': 'cooluser',
|
||||||
|
'avatar': 'http://some/avatar/url'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expected = {
|
||||||
|
"commit": u"abdeaf1b2b4a6b9ddf742c1e1754236380435a62",
|
||||||
|
"ref": u"refs/heads/somebranch",
|
||||||
|
"git_url": u"git@bitbucket.org:foo/bar.git",
|
||||||
|
"default_branch": u"somebranch",
|
||||||
|
"commit_info": {
|
||||||
|
"url": u"https://bitbucket.org/foo/bar/commits/abdeaf1b2b4a6b9ddf742c1e1754236380435a62",
|
||||||
|
"date": u"2012-07-24 00:26:36",
|
||||||
|
"message": u"making some changes\n",
|
||||||
|
"author": {
|
||||||
|
"url": u"https://bitbucket.org/cooluser/",
|
||||||
|
"avatar_url": u"http://some/avatar/url",
|
||||||
|
"username": u"cooluser",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('bitbucket_commit', expected, bb_commit, ref, default_branch,
|
||||||
|
repository_name, lookup_author)
|
||||||
|
|
||||||
|
def test_bitbucket_webhook_payload(self):
|
||||||
|
expected = {
|
||||||
|
"commit": u"af64ae7188685f8424040b4735ad12941b980d75",
|
||||||
|
"ref": u"refs/heads/master",
|
||||||
|
"git_url": u"git@bitbucket.org:jscoreos/another-repo.git",
|
||||||
|
"commit_info": {
|
||||||
|
"url": u"https://bitbucket.org/jscoreos/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75",
|
||||||
|
"date": u"2015-09-10T20:40:54+00:00",
|
||||||
|
"message": u"Dockerfile edited online with Bitbucket",
|
||||||
|
"author": {
|
||||||
|
"username": u"jscoreos",
|
||||||
|
"url": u"https://bitbucket.org/jscoreos/",
|
||||||
|
"avatar_url": u"https://bitbucket.org/account/jscoreos/avatar/32/",
|
||||||
|
},
|
||||||
|
"committer": {
|
||||||
|
"username": u"jscoreos",
|
||||||
|
"url": u"https://bitbucket.org/jscoreos/",
|
||||||
|
"avatar_url": u"https://bitbucket.org/account/jscoreos/avatar/32/",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('bitbucket_webhook', expected, bb_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_github_webhook_payload(self):
|
||||||
|
expected = {
|
||||||
|
'commit': u'410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'ref': u'refs/heads/master',
|
||||||
|
'git_url': u'git@github.com:josephschorr/anothertest.git',
|
||||||
|
'commit_info': {
|
||||||
|
'url': u'https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'date': u'2015-09-11T14:26:16-04:00',
|
||||||
|
'message': u'Update Dockerfile',
|
||||||
|
'committer': {
|
||||||
|
'username': u'josephschorr',
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'username': u'josephschorr',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('github_webhook', expected, gh_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_github_webhook_payload_with_lookup(self):
|
||||||
|
expected = {
|
||||||
|
'commit': u'410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'ref': u'refs/heads/master',
|
||||||
|
'git_url': u'git@github.com:josephschorr/anothertest.git',
|
||||||
|
'commit_info': {
|
||||||
|
'url': u'https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'date': u'2015-09-11T14:26:16-04:00',
|
||||||
|
'message': u'Update Dockerfile',
|
||||||
|
'committer': {
|
||||||
|
'username': u'josephschorr',
|
||||||
|
'url': u'http://github.com/josephschorr',
|
||||||
|
'avatar_url': u'http://some/avatar/url',
|
||||||
|
},
|
||||||
|
'author': {
|
||||||
|
'username': u'josephschorr',
|
||||||
|
'url': u'http://github.com/josephschorr',
|
||||||
|
'avatar_url': u'http://some/avatar/url',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def lookup_user(_):
|
||||||
|
return {
|
||||||
|
'html_url': 'http://github.com/josephschorr',
|
||||||
|
'avatar_url': 'http://some/avatar/url'
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('github_webhook', expected, gh_webhook, lookup_user=lookup_user)
|
||||||
|
|
||||||
|
|
||||||
|
def test_github_webhook_payload_missing_fields_with_lookup(self):
|
||||||
|
expected = {
|
||||||
|
'commit': u'410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'ref': u'refs/heads/master',
|
||||||
|
'git_url': u'git@github.com:josephschorr/anothertest.git',
|
||||||
|
'commit_info': {
|
||||||
|
'url': u'https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'date': u'2015-09-11T14:26:16-04:00',
|
||||||
|
'message': u'Update Dockerfile'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def lookup_user(username):
|
||||||
|
if not username:
|
||||||
|
raise Exception('Fail!')
|
||||||
|
|
||||||
|
return {
|
||||||
|
'html_url': 'http://github.com/josephschorr',
|
||||||
|
'avatar_url': 'http://some/avatar/url'
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('github_webhook_missing', expected, gh_webhook, lookup_user=lookup_user)
|
||||||
|
|
||||||
|
|
||||||
|
def test_gitlab_webhook_payload(self):
|
||||||
|
expected = {
|
||||||
|
'commit': u'fb88379ee45de28a0a4590fddcbd8eff8b36026e',
|
||||||
|
'ref': u'refs/heads/master',
|
||||||
|
'git_url': u'git@gitlab.com:jzelinskie/www-gitlab-com.git',
|
||||||
|
'commit_info': {
|
||||||
|
'url': u'https://gitlab.com/jzelinskie/www-gitlab-com/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e',
|
||||||
|
'date': u'2015-08-13T19:33:18+00:00',
|
||||||
|
'message': u'Fix link\n',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('gitlab_webhook', expected, gl_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_github_webhook_payload_known_issue(self):
|
||||||
|
expected = {
|
||||||
|
"commit": "118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"git_url": "git@github.com:silas/docker-test.git",
|
||||||
|
"commit_info": {
|
||||||
|
"url": "https://github.com/silas/docker-test/commit/118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"date": "2015-09-25T14:55:11-04:00",
|
||||||
|
"message": "Fail",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('github_webhook_noname', expected, gh_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_github_webhook_payload_missing_fields(self):
|
||||||
|
expected = {
|
||||||
|
'commit': u'410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'ref': u'refs/heads/master',
|
||||||
|
'git_url': u'git@github.com:josephschorr/anothertest.git',
|
||||||
|
'commit_info': {
|
||||||
|
'url': u'https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c',
|
||||||
|
'date': u'2015-09-11T14:26:16-04:00',
|
||||||
|
'message': u'Update Dockerfile'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('github_webhook_missing', expected, gh_webhook)
|
||||||
|
|
||||||
|
|
||||||
|
def test_gitlab_webhook_payload_with_lookup(self):
|
||||||
|
expected = {
|
||||||
|
'commit': u'fb88379ee45de28a0a4590fddcbd8eff8b36026e',
|
||||||
|
'ref': u'refs/heads/master',
|
||||||
|
'git_url': u'git@gitlab.com:jzelinskie/www-gitlab-com.git',
|
||||||
|
'commit_info': {
|
||||||
|
'url': u'https://gitlab.com/jzelinskie/www-gitlab-com/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e',
|
||||||
|
'date': u'2015-08-13T19:33:18+00:00',
|
||||||
|
'message': u'Fix link\n',
|
||||||
|
'author': {
|
||||||
|
'username': 'jzelinskie',
|
||||||
|
'url': 'http://gitlab.com/jzelinskie',
|
||||||
|
'avatar_url': 'http://some/avatar/url',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
def lookup_user(_):
|
||||||
|
return {
|
||||||
|
'username': 'jzelinskie',
|
||||||
|
'html_url': 'http://gitlab.com/jzelinskie',
|
||||||
|
'avatar_url': 'http://some/avatar/url',
|
||||||
|
}
|
||||||
|
|
||||||
|
self.assertSchema('gitlab_webhook', expected, gl_webhook, lookup_user=lookup_user)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
|
@ -1,7 +1,9 @@
|
||||||
import unittest
|
import unittest
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from endpoints.trigger import matches_ref
|
from buildtrigger.triggerutil import matches_ref, raise_if_skipped_build
|
||||||
|
from buildtrigger.triggerutil import SkipRequestException
|
||||||
|
from endpoints.building import PreparedBuild
|
||||||
|
|
||||||
class TestRegex(unittest.TestCase):
|
class TestRegex(unittest.TestCase):
|
||||||
def assertDoesNotMatch(self, ref, filt):
|
def assertDoesNotMatch(self, ref, filt):
|
||||||
|
@ -25,5 +27,55 @@ class TestRegex(unittest.TestCase):
|
||||||
|
|
||||||
self.assertDoesNotMatch('ref/heads/delta', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
self.assertDoesNotMatch('ref/heads/delta', '(((heads/alpha)|(heads/beta))|(heads/gamma))|(heads/master)')
|
||||||
|
|
||||||
|
|
||||||
|
class TestSkipBuild(unittest.TestCase):
|
||||||
|
def testSkipNoMetadata(self):
|
||||||
|
prepared = PreparedBuild()
|
||||||
|
prepared.metadata = {}
|
||||||
|
config = {}
|
||||||
|
|
||||||
|
self.assertRaises(SkipRequestException, raise_if_skipped_build, prepared, config)
|
||||||
|
|
||||||
|
def testSkipByBranchtagRegex(self):
|
||||||
|
prepared = PreparedBuild()
|
||||||
|
prepared.metadata = {
|
||||||
|
'ref': 'ref/heads/master',
|
||||||
|
}
|
||||||
|
|
||||||
|
config = {
|
||||||
|
'branchtag_regex': 'nothing'
|
||||||
|
}
|
||||||
|
self.assertRaises(SkipRequestException, raise_if_skipped_build, prepared, config)
|
||||||
|
|
||||||
|
def testSkipByMessage(self):
|
||||||
|
prepared = PreparedBuild()
|
||||||
|
prepared.metadata = {
|
||||||
|
'ref': 'ref/heads/master',
|
||||||
|
'commit_info': {
|
||||||
|
'message': '[skip build]',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
config = {}
|
||||||
|
|
||||||
|
self.assertRaises(SkipRequestException, raise_if_skipped_build, prepared, config)
|
||||||
|
|
||||||
|
|
||||||
|
def testDoesNotSkip(self):
|
||||||
|
prepared = PreparedBuild()
|
||||||
|
prepared.metadata = {
|
||||||
|
'ref': 'ref/heads/master',
|
||||||
|
'commit_info': {
|
||||||
|
'message': 'some cool message',
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
config = {
|
||||||
|
'branchtag_regex': '(master)|(heads/master)',
|
||||||
|
}
|
||||||
|
|
||||||
|
raise_if_skipped_build(prepared, config)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
|
@ -83,15 +83,24 @@ class TestUsernameGenerator(unittest.TestCase):
|
||||||
def test_basic_ascii_names(self):
|
def test_basic_ascii_names(self):
|
||||||
self.assert_generated_output('jake', 'jake')
|
self.assert_generated_output('jake', 'jake')
|
||||||
self.assert_generated_output('frank', 'frank')
|
self.assert_generated_output('frank', 'frank')
|
||||||
|
self.assert_generated_output('fra-nk', 'fra_nk')
|
||||||
|
|
||||||
def test_names_with_caps(self):
|
def test_names_with_caps(self):
|
||||||
self.assert_generated_output('Jake', 'jake')
|
self.assert_generated_output('Jake', 'jake')
|
||||||
self.assert_generated_output('FranK', 'frank')
|
self.assert_generated_output('FranK', 'frank')
|
||||||
|
|
||||||
|
def test_multiple_underscores(self):
|
||||||
|
self.assert_generated_output('ja__ke', 'ja_ke')
|
||||||
|
self.assert_generated_output('ja___ke', 'ja_ke')
|
||||||
|
|
||||||
|
def test_trailing_underscores(self):
|
||||||
|
self.assert_generated_output('ja__', 'ja00')
|
||||||
|
self.assert_generated_output('jake__', 'jake')
|
||||||
|
|
||||||
def test_short_names(self):
|
def test_short_names(self):
|
||||||
self.assert_generated_output('a', 'a___')
|
self.assert_generated_output('a', 'a000')
|
||||||
self.assert_generated_output('ab', 'ab__')
|
self.assert_generated_output('ab', 'ab00')
|
||||||
self.assert_generated_output('abc', 'abc_')
|
self.assert_generated_output('abc', 'abc0')
|
||||||
|
|
||||||
def test_long_names(self):
|
def test_long_names(self):
|
||||||
self.assert_generated_output('abcdefghijklmnopqrstuvwxyz1234567890',
|
self.assert_generated_output('abcdefghijklmnopqrstuvwxyz1234567890',
|
||||||
|
@ -108,16 +117,18 @@ class TestUsernameGenerator(unittest.TestCase):
|
||||||
self.assert_generated_output(u'\u0985\u09ad\u09bf\u099c\u09c0\u09a4', 'abhijiit')
|
self.assert_generated_output(u'\u0985\u09ad\u09bf\u099c\u09c0\u09a4', 'abhijiit')
|
||||||
self.assert_generated_output(u'\u0d05\u0d2d\u0d3f\u0d1c\u0d40\u0d24', 'abhijiit')
|
self.assert_generated_output(u'\u0d05\u0d2d\u0d3f\u0d1c\u0d40\u0d24', 'abhijiit')
|
||||||
self.assert_generated_output(u'\u0d2e\u0d32\u0d2f\u0d3e\u0d32\u0d2e\u0d4d', 'mlyaalm')
|
self.assert_generated_output(u'\u0d2e\u0d32\u0d2f\u0d3e\u0d32\u0d2e\u0d4d', 'mlyaalm')
|
||||||
self.assert_generated_output(u'\ue000', '____')
|
self.assert_generated_output(u'\ue000', '0000')
|
||||||
self.assert_generated_output(u'\u03ff', '____')
|
self.assert_generated_output(u'\u03ff', '0000')
|
||||||
|
|
||||||
|
self.assert_generated_output(u'\u0d2e\u0d32\u03ff\u03ff\u0d2e\u0d32', 'mlml')
|
||||||
|
|
||||||
def test_multiple_suggestions(self):
|
def test_multiple_suggestions(self):
|
||||||
name_gen = generate_valid_usernames('a')
|
name_gen = generate_valid_usernames('a')
|
||||||
generated_output = list(islice(name_gen, 4))
|
generated_output = list(islice(name_gen, 4))
|
||||||
self.assertEquals('a___', generated_output[0])
|
self.assertEquals('a000', generated_output[0])
|
||||||
self.assertEquals('a__0', generated_output[1])
|
self.assertEquals('a001', generated_output[1])
|
||||||
self.assertEquals('a__1', generated_output[2])
|
self.assertEquals('a002', generated_output[2])
|
||||||
self.assertEquals('a__2', generated_output[3])
|
self.assertEquals('a003', generated_output[3])
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
98
test/test_visible_repos.py
Normal file
98
test/test_visible_repos.py
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from app import app
|
||||||
|
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||||
|
from data import model
|
||||||
|
|
||||||
|
NO_ACCESS_USER = 'freshuser'
|
||||||
|
READ_ACCESS_USER = 'reader'
|
||||||
|
ADMIN_ACCESS_USER = 'devtable'
|
||||||
|
PUBLIC_USER = 'public'
|
||||||
|
RANDOM_USER = 'randomuser'
|
||||||
|
OUTSIDE_ORG_USER = 'outsideorg'
|
||||||
|
|
||||||
|
ADMIN_ROBOT_USER = 'devtable+dtrobot'
|
||||||
|
|
||||||
|
ORGANIZATION = 'buynlarge'
|
||||||
|
|
||||||
|
SIMPLE_REPO = 'simple'
|
||||||
|
PUBLIC_REPO = 'publicrepo'
|
||||||
|
RANDOM_REPO = 'randomrepo'
|
||||||
|
|
||||||
|
OUTSIDE_ORG_REPO = 'coolrepo'
|
||||||
|
|
||||||
|
ORG_REPO = 'orgrepo'
|
||||||
|
ANOTHER_ORG_REPO = 'anotherorgrepo'
|
||||||
|
|
||||||
|
# Note: The shared repo has devtable as admin, public as a writer and reader as a reader.
|
||||||
|
SHARED_REPO = 'shared'
|
||||||
|
|
||||||
|
class TestVisibleRepositories(unittest.TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
setup_database_for_testing(self)
|
||||||
|
self.app = app.test_client()
|
||||||
|
self.ctx = app.test_request_context()
|
||||||
|
self.ctx.__enter__()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
finished_database_for_testing(self)
|
||||||
|
self.ctx.__exit__(True, None, None)
|
||||||
|
|
||||||
|
def assertDoesNotHaveRepo(self, username, name):
|
||||||
|
repos = list(model.repository.get_visible_repositories(username))
|
||||||
|
names = [repo.name for repo in repos]
|
||||||
|
self.assertNotIn(name, names)
|
||||||
|
|
||||||
|
def assertHasRepo(self, username, name):
|
||||||
|
repos = list(model.repository.get_visible_repositories(username))
|
||||||
|
names = [repo.name for repo in repos]
|
||||||
|
self.assertIn(name, names)
|
||||||
|
|
||||||
|
def test_noaccess(self):
|
||||||
|
repos = list(model.repository.get_visible_repositories(NO_ACCESS_USER))
|
||||||
|
names = [repo.name for repo in repos]
|
||||||
|
self.assertEquals(0, len(names))
|
||||||
|
|
||||||
|
# Try retrieving public repos now.
|
||||||
|
repos = list(model.repository.get_visible_repositories(NO_ACCESS_USER, include_public=True))
|
||||||
|
names = [repo.name for repo in repos]
|
||||||
|
self.assertIn(PUBLIC_REPO, names)
|
||||||
|
|
||||||
|
|
||||||
|
def test_public(self):
|
||||||
|
self.assertHasRepo(PUBLIC_USER, PUBLIC_REPO)
|
||||||
|
self.assertHasRepo(PUBLIC_USER, SHARED_REPO)
|
||||||
|
|
||||||
|
self.assertDoesNotHaveRepo(PUBLIC_USER, SIMPLE_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(PUBLIC_USER, RANDOM_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(PUBLIC_USER, OUTSIDE_ORG_REPO)
|
||||||
|
|
||||||
|
def test_reader(self):
|
||||||
|
self.assertHasRepo(READ_ACCESS_USER, SHARED_REPO)
|
||||||
|
self.assertHasRepo(READ_ACCESS_USER, ORG_REPO)
|
||||||
|
|
||||||
|
self.assertDoesNotHaveRepo(READ_ACCESS_USER, SIMPLE_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(READ_ACCESS_USER, RANDOM_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(READ_ACCESS_USER, OUTSIDE_ORG_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(READ_ACCESS_USER, PUBLIC_REPO)
|
||||||
|
|
||||||
|
def test_random(self):
|
||||||
|
self.assertHasRepo(RANDOM_USER, RANDOM_REPO)
|
||||||
|
|
||||||
|
self.assertDoesNotHaveRepo(RANDOM_USER, SIMPLE_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(RANDOM_USER, SHARED_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(RANDOM_USER, ORG_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(RANDOM_USER, ANOTHER_ORG_REPO)
|
||||||
|
self.assertDoesNotHaveRepo(RANDOM_USER, PUBLIC_REPO)
|
||||||
|
|
||||||
|
def test_admin(self):
|
||||||
|
self.assertHasRepo(ADMIN_ACCESS_USER, SIMPLE_REPO)
|
||||||
|
self.assertHasRepo(ADMIN_ACCESS_USER, SHARED_REPO)
|
||||||
|
|
||||||
|
self.assertHasRepo(ADMIN_ACCESS_USER, ORG_REPO)
|
||||||
|
self.assertHasRepo(ADMIN_ACCESS_USER, ANOTHER_ORG_REPO)
|
||||||
|
|
||||||
|
self.assertDoesNotHaveRepo(ADMIN_ACCESS_USER, OUTSIDE_ORG_REPO)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
24
test/triggerjson/bitbucket_commit.json
Normal file
24
test/triggerjson/bitbucket_commit.json
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
{
|
||||||
|
"files": [
|
||||||
|
{
|
||||||
|
"type": "added",
|
||||||
|
"file": "AnotherFile.txt"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "modified",
|
||||||
|
"file": "Readme"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"raw_author": "Mary Anthony <manthony@172-28-13-105.staff.sf.atlassian.com>",
|
||||||
|
"utctimestamp": "2012-07-23 22:26:36+00:00",
|
||||||
|
"author": "Mary Anthony",
|
||||||
|
"timestamp": "2012-07-24 00:26:36",
|
||||||
|
"node": "abdeaf1b2b4a6b9ddf742c1e1754236380435a62",
|
||||||
|
"parents": [
|
||||||
|
"86432202a2d5"
|
||||||
|
],
|
||||||
|
"branch": "master",
|
||||||
|
"message": "making some changes\n",
|
||||||
|
"revision": null,
|
||||||
|
"size": -1
|
||||||
|
}
|
215
test/triggerjson/bitbucket_customer_example_noauthor.json
Normal file
215
test/triggerjson/bitbucket_customer_example_noauthor.json
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
{
|
||||||
|
"actor": {
|
||||||
|
"username": "LightSide_CodeShip",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/LightSide_CodeShip"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/LightSide_CodeShip/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/LightSide_CodeShip/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{d009ab20-b8b8-4840-9491-bfe72fbf666e}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "CodeShip Tagging"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"full_name": "lightsidelabs/svc-identity",
|
||||||
|
"name": "svc-identity",
|
||||||
|
"scm": "git",
|
||||||
|
"type": "repository",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/avatar/16/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"is_private": true,
|
||||||
|
"uuid": "{3400bed9-5cde-45b9-8d86-c1dac5d5e610}",
|
||||||
|
"owner": {
|
||||||
|
"username": "lightsidelabs",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/teams/lightsidelabs"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/lightsidelabs/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{456c5f28-7338-4d89-9506-c7b889ba2d11}",
|
||||||
|
"type": "team",
|
||||||
|
"display_name": "LightSIDE Labs"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"push": {
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"commits": [
|
||||||
|
{
|
||||||
|
"hash": "a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"raw": "scripts/autotag_version.py <utilitybelt@lightside>"
|
||||||
|
},
|
||||||
|
"type": "commit",
|
||||||
|
"message": "Update version.py to 0.1.2 [skip ci]\n\n(by utilitybelt/scripts/autotag_version.py)\n"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"created": false,
|
||||||
|
"forced": false,
|
||||||
|
"old": {
|
||||||
|
"target": {
|
||||||
|
"parents": [
|
||||||
|
{
|
||||||
|
"hash": "bd749165b0c50c65c15fc4df526b8e9df26eff10",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/bd749165b0c50c65c15fc4df526b8e9df26eff10"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/bd749165b0c50c65c15fc4df526b8e9df26eff10"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "commit"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"hash": "910b5624b74190dfaa51938d851563a4c5254926",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/910b5624b74190dfaa51938d851563a4c5254926"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/910b5624b74190dfaa51938d851563a4c5254926"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "commit"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"date": "2015-09-25T00:54:41+00:00",
|
||||||
|
"type": "commit",
|
||||||
|
"message": "Merged in create-update-user (pull request #3)\n\nCreate + update identity\n",
|
||||||
|
"hash": "263736ecc250113fad56a93f83b712093554ad42",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"raw": "Chris Winters <chris@cwinters.com>",
|
||||||
|
"user": {
|
||||||
|
"username": "cwinters",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/cwinters"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/cwinters/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/cwinters/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{a6209615-6d75-4294-8181-dbf96d40fc6b}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "Chris Winters"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/refs/branches/master"
|
||||||
|
},
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commits/master"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/branch/master"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"name": "master",
|
||||||
|
"type": "branch"
|
||||||
|
},
|
||||||
|
"links": {
|
||||||
|
"diff": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/diff/a0ec139843b2bb281ab21a433266ddc498e605dc..263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
},
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commits?include=a0ec139843b2bb281ab21a433266ddc498e605dc&exclude=263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/branches/compare/a0ec139843b2bb281ab21a433266ddc498e605dc..263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"new": {
|
||||||
|
"target": {
|
||||||
|
"parents": [
|
||||||
|
{
|
||||||
|
"hash": "263736ecc250113fad56a93f83b712093554ad42",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "commit"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"date": "2015-09-25T00:55:08+00:00",
|
||||||
|
"type": "commit",
|
||||||
|
"message": "Update version.py to 0.1.2 [skip ci]\n\n(by utilitybelt/scripts/autotag_version.py)\n",
|
||||||
|
"hash": "a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"raw": "scripts/autotag_version.py <utilitybelt@lightside>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"links": {
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/refs/branches/master"
|
||||||
|
},
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commits/master"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/branch/master"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"name": "master",
|
||||||
|
"type": "branch"
|
||||||
|
},
|
||||||
|
"closed": false,
|
||||||
|
"truncated": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
117
test/triggerjson/bitbucket_customer_example_tag.json
Normal file
117
test/triggerjson/bitbucket_customer_example_tag.json
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
{
|
||||||
|
"push": {
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"links": {
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commits?include=a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"closed": false,
|
||||||
|
"new": {
|
||||||
|
"target": {
|
||||||
|
"date": "2015-09-25T00:55:08+00:00",
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/a0ec139843b2bb281ab21a433266ddc498e605dc"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"message": "Update version.py to 0.1.2 [skip ci]\n\n(by utilitybelt/scripts/autotag_version.py)\n",
|
||||||
|
"type": "commit",
|
||||||
|
"parents": [
|
||||||
|
{
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commit/263736ecc250113fad56a93f83b712093554ad42"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"hash": "263736ecc250113fad56a93f83b712093554ad42",
|
||||||
|
"type": "commit"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hash": "a0ec139843b2bb281ab21a433266ddc498e605dc",
|
||||||
|
"author": {
|
||||||
|
"raw": "scripts/autotag_version.py <utilitybelt@lightside>"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"name": "0.1.2",
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/commits/tag/0.1.2"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/refs/tags/0.1.2"
|
||||||
|
},
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity/commits/0.1.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "tag"
|
||||||
|
},
|
||||||
|
"truncated": false,
|
||||||
|
"created": true,
|
||||||
|
"old": null,
|
||||||
|
"forced": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"name": "svc-identity",
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/lightsidelabs/svc-identity"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/svc-identity/avatar/16/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"is_private": true,
|
||||||
|
"type": "repository",
|
||||||
|
"scm": "git",
|
||||||
|
"owner": {
|
||||||
|
"username": "lightsidelabs",
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/lightsidelabs/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/teams/lightsidelabs"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/lightsidelabs/avatar/32/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"display_name": "LightSIDE Labs",
|
||||||
|
"uuid": "{456c5f28-7338-4d89-9506-c7b889ba2d11}",
|
||||||
|
"type": "team"
|
||||||
|
},
|
||||||
|
"full_name": "lightsidelabs/svc-identity",
|
||||||
|
"uuid": "{3400bed9-5cde-45b9-8d86-c1dac5d5e610}"
|
||||||
|
},
|
||||||
|
"actor": {
|
||||||
|
"username": "LightSide_CodeShip",
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/LightSide_CodeShip/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/LightSide_CodeShip"
|
||||||
|
},
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/LightSide_CodeShip/avatar/32/"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"display_name": "CodeShip Tagging",
|
||||||
|
"uuid": "{d009ab20-b8b8-4840-9491-bfe72fbf666e}",
|
||||||
|
"type": "user"
|
||||||
|
}
|
||||||
|
}
|
237
test/triggerjson/bitbucket_webhook.json
Normal file
237
test/triggerjson/bitbucket_webhook.json
Normal file
|
@ -0,0 +1,237 @@
|
||||||
|
{
|
||||||
|
"push": {
|
||||||
|
"changes": [
|
||||||
|
{
|
||||||
|
"links": {
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commits?include=af64ae7188685f8424040b4735ad12941b980d75&exclude=1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
},
|
||||||
|
"diff": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/diff/af64ae7188685f8424040b4735ad12941b980d75..1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/branches/compare/af64ae7188685f8424040b4735ad12941b980d75..1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"old": {
|
||||||
|
"name": "master",
|
||||||
|
"links": {
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commits/master"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/branch/master"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/refs/branches/master"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "branch",
|
||||||
|
"target": {
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/commits/1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"user": {
|
||||||
|
"links": {
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/jscoreos/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/jscoreos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "Joseph Schorr",
|
||||||
|
"username": "jscoreos"
|
||||||
|
},
|
||||||
|
"raw": "Joseph Schorr <joseph.schorr@coreos.com>"
|
||||||
|
},
|
||||||
|
"date": "2015-09-10T20:37:54+00:00",
|
||||||
|
"parents": [
|
||||||
|
{
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/commits/5329daa0961ec968de9ef36f30024bfa0da73103"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/5329daa0961ec968de9ef36f30024bfa0da73103"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "commit",
|
||||||
|
"hash": "5329daa0961ec968de9ef36f30024bfa0da73103"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"type": "commit",
|
||||||
|
"message": "Dockerfile edited online with Bitbucket",
|
||||||
|
"hash": "1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"forced": false,
|
||||||
|
"truncated": false,
|
||||||
|
"commits": [
|
||||||
|
{
|
||||||
|
"author": {
|
||||||
|
"user": {
|
||||||
|
"links": {
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/jscoreos/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/jscoreos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "Joseph Schorr",
|
||||||
|
"username": "jscoreos"
|
||||||
|
},
|
||||||
|
"raw": "Joseph Schorr <joseph.schorr@coreos.com>"
|
||||||
|
},
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/af64ae7188685f8424040b4735ad12941b980d75"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"message": "Dockerfile edited online with Bitbucket",
|
||||||
|
"type": "commit",
|
||||||
|
"hash": "af64ae7188685f8424040b4735ad12941b980d75"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"new": {
|
||||||
|
"name": "master",
|
||||||
|
"links": {
|
||||||
|
"commits": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commits/master"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/branch/master"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/refs/branches/master"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "branch",
|
||||||
|
"target": {
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/commits/af64ae7188685f8424040b4735ad12941b980d75"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/af64ae7188685f8424040b4735ad12941b980d75"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"user": {
|
||||||
|
"links": {
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/jscoreos/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/jscoreos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "Joseph Schorr",
|
||||||
|
"username": "jscoreos"
|
||||||
|
},
|
||||||
|
"raw": "Joseph Schorr <joseph.schorr@coreos.com>"
|
||||||
|
},
|
||||||
|
"date": "2015-09-10T20:40:54+00:00",
|
||||||
|
"parents": [
|
||||||
|
{
|
||||||
|
"links": {
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/commits/1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo/commit/1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"type": "commit",
|
||||||
|
"hash": "1784139225279a587e0afb151bed1f9ba3dd509e"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"type": "commit",
|
||||||
|
"message": "Dockerfile edited online with Bitbucket",
|
||||||
|
"hash": "af64ae7188685f8424040b4735ad12941b980d75"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"closed": false,
|
||||||
|
"created": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"links": {
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo/avatar/16/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/another-repo"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/repositories/jscoreos/another-repo"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"full_name": "jscoreos/another-repo",
|
||||||
|
"uuid": "{b3459203-3e58-497b-8059-ad087b6b01de}",
|
||||||
|
"type": "repository",
|
||||||
|
"is_private": true,
|
||||||
|
"name": "Another Repo",
|
||||||
|
"owner": {
|
||||||
|
"links": {
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/jscoreos/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/jscoreos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "Joseph Schorr",
|
||||||
|
"username": "jscoreos"
|
||||||
|
},
|
||||||
|
"scm": "git"
|
||||||
|
},
|
||||||
|
"actor": {
|
||||||
|
"links": {
|
||||||
|
"avatar": {
|
||||||
|
"href": "https://bitbucket.org/account/jscoreos/avatar/32/"
|
||||||
|
},
|
||||||
|
"html": {
|
||||||
|
"href": "https://bitbucket.org/jscoreos/"
|
||||||
|
},
|
||||||
|
"self": {
|
||||||
|
"href": "https://api.bitbucket.org/2.0/users/jscoreos"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"uuid": "{2fa27577-f361-45bb-999a-f4450c546b73}",
|
||||||
|
"type": "user",
|
||||||
|
"display_name": "Joseph Schorr",
|
||||||
|
"username": "jscoreos"
|
||||||
|
}
|
||||||
|
}
|
153
test/triggerjson/github_webhook.json
Normal file
153
test/triggerjson/github_webhook.json
Normal file
|
@ -0,0 +1,153 @@
|
||||||
|
{
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"before": "9ea43cab474709d4a61afb7e3340de1ffc405b41",
|
||||||
|
"after": "410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"created": false,
|
||||||
|
"deleted": false,
|
||||||
|
"forced": false,
|
||||||
|
"base_ref": null,
|
||||||
|
"compare": "https://github.com/josephschorr/anothertest/compare/9ea43cab4747...410f4cdf8ff0",
|
||||||
|
"commits": [
|
||||||
|
{
|
||||||
|
"id": "410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"distinct": true,
|
||||||
|
"message": "Update Dockerfile",
|
||||||
|
"timestamp": "2015-09-11T14:26:16-04:00",
|
||||||
|
"url": "https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"author": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com",
|
||||||
|
"username": "josephschorr"
|
||||||
|
},
|
||||||
|
"committer": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com",
|
||||||
|
"username": "josephschorr"
|
||||||
|
},
|
||||||
|
"added": [],
|
||||||
|
"removed": [],
|
||||||
|
"modified": [
|
||||||
|
"Dockerfile"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"head_commit": {
|
||||||
|
"id": "410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"distinct": true,
|
||||||
|
"message": "Update Dockerfile",
|
||||||
|
"timestamp": "2015-09-11T14:26:16-04:00",
|
||||||
|
"url": "https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"author": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com",
|
||||||
|
"username": "josephschorr"
|
||||||
|
},
|
||||||
|
"committer": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com",
|
||||||
|
"username": "josephschorr"
|
||||||
|
},
|
||||||
|
"added": [],
|
||||||
|
"removed": [],
|
||||||
|
"modified": [
|
||||||
|
"Dockerfile"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"id": 34876107,
|
||||||
|
"name": "anothertest",
|
||||||
|
"full_name": "josephschorr/anothertest",
|
||||||
|
"owner": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com"
|
||||||
|
},
|
||||||
|
"private": false,
|
||||||
|
"html_url": "https://github.com/josephschorr/anothertest",
|
||||||
|
"description": "",
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://github.com/josephschorr/anothertest",
|
||||||
|
"forks_url": "https://api.github.com/repos/josephschorr/anothertest/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/josephschorr/anothertest/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/josephschorr/anothertest/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/josephschorr/anothertest/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/josephschorr/anothertest/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/josephschorr/anothertest/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/josephschorr/anothertest/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/josephschorr/anothertest/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/josephschorr/anothertest/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/josephschorr/anothertest/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/josephschorr/anothertest/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/josephschorr/anothertest/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/josephschorr/anothertest/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/josephschorr/anothertest/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/josephschorr/anothertest/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/josephschorr/anothertest/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/josephschorr/anothertest/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/josephschorr/anothertest/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/josephschorr/anothertest/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/josephschorr/anothertest/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/josephschorr/anothertest/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/josephschorr/anothertest/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/josephschorr/anothertest/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/josephschorr/anothertest/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/josephschorr/anothertest/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/josephschorr/anothertest/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/josephschorr/anothertest/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/josephschorr/anothertest/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/josephschorr/anothertest/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/josephschorr/anothertest/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/josephschorr/anothertest/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/josephschorr/anothertest/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/josephschorr/anothertest/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/josephschorr/anothertest/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/josephschorr/anothertest/releases{/id}",
|
||||||
|
"created_at": 1430426945,
|
||||||
|
"updated_at": "2015-04-30T20:49:05Z",
|
||||||
|
"pushed_at": 1441995976,
|
||||||
|
"git_url": "git://github.com/josephschorr/anothertest.git",
|
||||||
|
"ssh_url": "git@github.com:josephschorr/anothertest.git",
|
||||||
|
"clone_url": "https://github.com/josephschorr/anothertest.git",
|
||||||
|
"svn_url": "https://github.com/josephschorr/anothertest",
|
||||||
|
"homepage": null,
|
||||||
|
"size": 144,
|
||||||
|
"stargazers_count": 0,
|
||||||
|
"watchers_count": 0,
|
||||||
|
"language": null,
|
||||||
|
"has_issues": true,
|
||||||
|
"has_downloads": true,
|
||||||
|
"has_wiki": true,
|
||||||
|
"has_pages": false,
|
||||||
|
"forks_count": 0,
|
||||||
|
"mirror_url": null,
|
||||||
|
"open_issues_count": 0,
|
||||||
|
"forks": 0,
|
||||||
|
"open_issues": 0,
|
||||||
|
"watchers": 0,
|
||||||
|
"default_branch": "master",
|
||||||
|
"stargazers": 0,
|
||||||
|
"master_branch": "master"
|
||||||
|
},
|
||||||
|
"pusher": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com"
|
||||||
|
},
|
||||||
|
"sender": {
|
||||||
|
"login": "josephschorr",
|
||||||
|
"id": 4073002,
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/4073002?v=3",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/josephschorr",
|
||||||
|
"html_url": "https://github.com/josephschorr",
|
||||||
|
"followers_url": "https://api.github.com/users/josephschorr/followers",
|
||||||
|
"following_url": "https://api.github.com/users/josephschorr/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/josephschorr/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/josephschorr/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/josephschorr/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/josephschorr/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/josephschorr/repos",
|
||||||
|
"events_url": "https://api.github.com/users/josephschorr/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/josephschorr/received_events",
|
||||||
|
"type": "User",
|
||||||
|
"site_admin": false
|
||||||
|
}
|
||||||
|
}
|
133
test/triggerjson/github_webhook_missing.json
Normal file
133
test/triggerjson/github_webhook_missing.json
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
{
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"before": "9ea43cab474709d4a61afb7e3340de1ffc405b41",
|
||||||
|
"after": "410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"created": false,
|
||||||
|
"deleted": false,
|
||||||
|
"forced": false,
|
||||||
|
"base_ref": null,
|
||||||
|
"compare": "https://github.com/josephschorr/anothertest/compare/9ea43cab4747...410f4cdf8ff0",
|
||||||
|
"commits": [
|
||||||
|
{
|
||||||
|
"id": "410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"distinct": true,
|
||||||
|
"message": "Update Dockerfile",
|
||||||
|
"timestamp": "2015-09-11T14:26:16-04:00",
|
||||||
|
"url": "https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"added": [],
|
||||||
|
"removed": [],
|
||||||
|
"modified": [
|
||||||
|
"Dockerfile"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"head_commit": {
|
||||||
|
"id": "410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"distinct": true,
|
||||||
|
"message": "Update Dockerfile",
|
||||||
|
"timestamp": "2015-09-11T14:26:16-04:00",
|
||||||
|
"url": "https://github.com/josephschorr/anothertest/commit/410f4cdf8ff09b87f245b13845e8497f90b90a4c",
|
||||||
|
"added": [],
|
||||||
|
"removed": [],
|
||||||
|
"modified": [
|
||||||
|
"Dockerfile"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"id": 34876107,
|
||||||
|
"name": "anothertest",
|
||||||
|
"full_name": "josephschorr/anothertest",
|
||||||
|
"owner": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com"
|
||||||
|
},
|
||||||
|
"private": false,
|
||||||
|
"html_url": "https://github.com/josephschorr/anothertest",
|
||||||
|
"description": "",
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://github.com/josephschorr/anothertest",
|
||||||
|
"forks_url": "https://api.github.com/repos/josephschorr/anothertest/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/josephschorr/anothertest/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/josephschorr/anothertest/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/josephschorr/anothertest/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/josephschorr/anothertest/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/josephschorr/anothertest/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/josephschorr/anothertest/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/josephschorr/anothertest/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/josephschorr/anothertest/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/josephschorr/anothertest/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/josephschorr/anothertest/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/josephschorr/anothertest/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/josephschorr/anothertest/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/josephschorr/anothertest/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/josephschorr/anothertest/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/josephschorr/anothertest/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/josephschorr/anothertest/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/josephschorr/anothertest/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/josephschorr/anothertest/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/josephschorr/anothertest/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/josephschorr/anothertest/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/josephschorr/anothertest/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/josephschorr/anothertest/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/josephschorr/anothertest/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/josephschorr/anothertest/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/josephschorr/anothertest/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/josephschorr/anothertest/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/josephschorr/anothertest/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/josephschorr/anothertest/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/josephschorr/anothertest/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/josephschorr/anothertest/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/josephschorr/anothertest/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/josephschorr/anothertest/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/josephschorr/anothertest/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/josephschorr/anothertest/releases{/id}",
|
||||||
|
"created_at": 1430426945,
|
||||||
|
"updated_at": "2015-04-30T20:49:05Z",
|
||||||
|
"pushed_at": 1441995976,
|
||||||
|
"git_url": "git://github.com/josephschorr/anothertest.git",
|
||||||
|
"ssh_url": "git@github.com:josephschorr/anothertest.git",
|
||||||
|
"clone_url": "https://github.com/josephschorr/anothertest.git",
|
||||||
|
"svn_url": "https://github.com/josephschorr/anothertest",
|
||||||
|
"homepage": null,
|
||||||
|
"size": 144,
|
||||||
|
"stargazers_count": 0,
|
||||||
|
"watchers_count": 0,
|
||||||
|
"language": null,
|
||||||
|
"has_issues": true,
|
||||||
|
"has_downloads": true,
|
||||||
|
"has_wiki": true,
|
||||||
|
"has_pages": false,
|
||||||
|
"forks_count": 0,
|
||||||
|
"mirror_url": null,
|
||||||
|
"open_issues_count": 0,
|
||||||
|
"forks": 0,
|
||||||
|
"open_issues": 0,
|
||||||
|
"watchers": 0,
|
||||||
|
"default_branch": "master",
|
||||||
|
"stargazers": 0,
|
||||||
|
"master_branch": "master"
|
||||||
|
},
|
||||||
|
"pusher": {
|
||||||
|
"name": "josephschorr",
|
||||||
|
"email": "josephschorr@users.noreply.github.com"
|
||||||
|
},
|
||||||
|
"sender": {
|
||||||
|
"login": "josephschorr",
|
||||||
|
"id": 4073002,
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/4073002?v=3",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/josephschorr",
|
||||||
|
"html_url": "https://github.com/josephschorr",
|
||||||
|
"followers_url": "https://api.github.com/users/josephschorr/followers",
|
||||||
|
"following_url": "https://api.github.com/users/josephschorr/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/josephschorr/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/josephschorr/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/josephschorr/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/josephschorr/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/josephschorr/repos",
|
||||||
|
"events_url": "https://api.github.com/users/josephschorr/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/josephschorr/received_events",
|
||||||
|
"type": "User",
|
||||||
|
"site_admin": false
|
||||||
|
}
|
||||||
|
}
|
149
test/triggerjson/github_webhook_noname.json
Normal file
149
test/triggerjson/github_webhook_noname.json
Normal file
|
@ -0,0 +1,149 @@
|
||||||
|
{
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"before": "9716b516939221dc754a056e0f9ddf599e71d4b8",
|
||||||
|
"after": "118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"created": false,
|
||||||
|
"deleted": false,
|
||||||
|
"forced": false,
|
||||||
|
"base_ref": null,
|
||||||
|
"compare": "https://github.com/silas/docker-test/compare/9716b5169392...118b07121695",
|
||||||
|
"commits": [
|
||||||
|
{
|
||||||
|
"id": "118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"distinct": true,
|
||||||
|
"message": "Fail",
|
||||||
|
"timestamp": "2015-09-25T14:55:11-04:00",
|
||||||
|
"url": "https://github.com/silas/docker-test/commit/118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"author": {
|
||||||
|
"name": "Silas Sewell",
|
||||||
|
"email": "silas@sewell-fail.org"
|
||||||
|
},
|
||||||
|
"committer": {
|
||||||
|
"name": "Silas Sewell",
|
||||||
|
"email": "silas@sewell-fail.org"
|
||||||
|
},
|
||||||
|
"added": [],
|
||||||
|
"removed": [],
|
||||||
|
"modified": [
|
||||||
|
"README.md"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"head_commit": {
|
||||||
|
"id": "118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"distinct": true,
|
||||||
|
"message": "Fail",
|
||||||
|
"timestamp": "2015-09-25T14:55:11-04:00",
|
||||||
|
"url": "https://github.com/silas/docker-test/commit/118b07121695d9f2e40a5ff264fdcc2917680870",
|
||||||
|
"author": {
|
||||||
|
"name": "Silas Sewell",
|
||||||
|
"email": "silas@sewell-fail.org"
|
||||||
|
},
|
||||||
|
"committer": {
|
||||||
|
"name": "Silas Sewell",
|
||||||
|
"email": "silas@sewell-fail.org"
|
||||||
|
},
|
||||||
|
"added": [],
|
||||||
|
"removed": [],
|
||||||
|
"modified": [
|
||||||
|
"README.md"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"id": 42467431,
|
||||||
|
"name": "docker-test",
|
||||||
|
"full_name": "silas/docker-test",
|
||||||
|
"owner": {
|
||||||
|
"name": "silas",
|
||||||
|
"email": "silas@sewell.org"
|
||||||
|
},
|
||||||
|
"private": false,
|
||||||
|
"html_url": "https://github.com/silas/docker-test",
|
||||||
|
"description": "",
|
||||||
|
"fork": false,
|
||||||
|
"url": "https://github.com/silas/docker-test",
|
||||||
|
"forks_url": "https://api.github.com/repos/silas/docker-test/forks",
|
||||||
|
"keys_url": "https://api.github.com/repos/silas/docker-test/keys{/key_id}",
|
||||||
|
"collaborators_url": "https://api.github.com/repos/silas/docker-test/collaborators{/collaborator}",
|
||||||
|
"teams_url": "https://api.github.com/repos/silas/docker-test/teams",
|
||||||
|
"hooks_url": "https://api.github.com/repos/silas/docker-test/hooks",
|
||||||
|
"issue_events_url": "https://api.github.com/repos/silas/docker-test/issues/events{/number}",
|
||||||
|
"events_url": "https://api.github.com/repos/silas/docker-test/events",
|
||||||
|
"assignees_url": "https://api.github.com/repos/silas/docker-test/assignees{/user}",
|
||||||
|
"branches_url": "https://api.github.com/repos/silas/docker-test/branches{/branch}",
|
||||||
|
"tags_url": "https://api.github.com/repos/silas/docker-test/tags",
|
||||||
|
"blobs_url": "https://api.github.com/repos/silas/docker-test/git/blobs{/sha}",
|
||||||
|
"git_tags_url": "https://api.github.com/repos/silas/docker-test/git/tags{/sha}",
|
||||||
|
"git_refs_url": "https://api.github.com/repos/silas/docker-test/git/refs{/sha}",
|
||||||
|
"trees_url": "https://api.github.com/repos/silas/docker-test/git/trees{/sha}",
|
||||||
|
"statuses_url": "https://api.github.com/repos/silas/docker-test/statuses/{sha}",
|
||||||
|
"languages_url": "https://api.github.com/repos/silas/docker-test/languages",
|
||||||
|
"stargazers_url": "https://api.github.com/repos/silas/docker-test/stargazers",
|
||||||
|
"contributors_url": "https://api.github.com/repos/silas/docker-test/contributors",
|
||||||
|
"subscribers_url": "https://api.github.com/repos/silas/docker-test/subscribers",
|
||||||
|
"subscription_url": "https://api.github.com/repos/silas/docker-test/subscription",
|
||||||
|
"commits_url": "https://api.github.com/repos/silas/docker-test/commits{/sha}",
|
||||||
|
"git_commits_url": "https://api.github.com/repos/silas/docker-test/git/commits{/sha}",
|
||||||
|
"comments_url": "https://api.github.com/repos/silas/docker-test/comments{/number}",
|
||||||
|
"issue_comment_url": "https://api.github.com/repos/silas/docker-test/issues/comments{/number}",
|
||||||
|
"contents_url": "https://api.github.com/repos/silas/docker-test/contents/{+path}",
|
||||||
|
"compare_url": "https://api.github.com/repos/silas/docker-test/compare/{base}...{head}",
|
||||||
|
"merges_url": "https://api.github.com/repos/silas/docker-test/merges",
|
||||||
|
"archive_url": "https://api.github.com/repos/silas/docker-test/{archive_format}{/ref}",
|
||||||
|
"downloads_url": "https://api.github.com/repos/silas/docker-test/downloads",
|
||||||
|
"issues_url": "https://api.github.com/repos/silas/docker-test/issues{/number}",
|
||||||
|
"pulls_url": "https://api.github.com/repos/silas/docker-test/pulls{/number}",
|
||||||
|
"milestones_url": "https://api.github.com/repos/silas/docker-test/milestones{/number}",
|
||||||
|
"notifications_url": "https://api.github.com/repos/silas/docker-test/notifications{?since,all,participating}",
|
||||||
|
"labels_url": "https://api.github.com/repos/silas/docker-test/labels{/name}",
|
||||||
|
"releases_url": "https://api.github.com/repos/silas/docker-test/releases{/id}",
|
||||||
|
"created_at": 1442254053,
|
||||||
|
"updated_at": "2015-09-14T18:07:33Z",
|
||||||
|
"pushed_at": 1443207315,
|
||||||
|
"git_url": "git://github.com/silas/docker-test.git",
|
||||||
|
"ssh_url": "git@github.com:silas/docker-test.git",
|
||||||
|
"clone_url": "https://github.com/silas/docker-test.git",
|
||||||
|
"svn_url": "https://github.com/silas/docker-test",
|
||||||
|
"homepage": null,
|
||||||
|
"size": 108,
|
||||||
|
"stargazers_count": 0,
|
||||||
|
"watchers_count": 0,
|
||||||
|
"language": null,
|
||||||
|
"has_issues": true,
|
||||||
|
"has_downloads": true,
|
||||||
|
"has_wiki": true,
|
||||||
|
"has_pages": false,
|
||||||
|
"forks_count": 0,
|
||||||
|
"mirror_url": null,
|
||||||
|
"open_issues_count": 0,
|
||||||
|
"forks": 0,
|
||||||
|
"open_issues": 0,
|
||||||
|
"watchers": 0,
|
||||||
|
"default_branch": "master",
|
||||||
|
"stargazers": 0,
|
||||||
|
"master_branch": "master"
|
||||||
|
},
|
||||||
|
"pusher": {
|
||||||
|
"name": "silas",
|
||||||
|
"email": "silas@sewell.org"
|
||||||
|
},
|
||||||
|
"sender": {
|
||||||
|
"login": "silas",
|
||||||
|
"id": 18528,
|
||||||
|
"avatar_url": "https://avatars.githubusercontent.com/u/18528?v=3",
|
||||||
|
"gravatar_id": "",
|
||||||
|
"url": "https://api.github.com/users/silas",
|
||||||
|
"html_url": "https://github.com/silas",
|
||||||
|
"followers_url": "https://api.github.com/users/silas/followers",
|
||||||
|
"following_url": "https://api.github.com/users/silas/following{/other_user}",
|
||||||
|
"gists_url": "https://api.github.com/users/silas/gists{/gist_id}",
|
||||||
|
"starred_url": "https://api.github.com/users/silas/starred{/owner}{/repo}",
|
||||||
|
"subscriptions_url": "https://api.github.com/users/silas/subscriptions",
|
||||||
|
"organizations_url": "https://api.github.com/users/silas/orgs",
|
||||||
|
"repos_url": "https://api.github.com/users/silas/repos",
|
||||||
|
"events_url": "https://api.github.com/users/silas/events{/privacy}",
|
||||||
|
"received_events_url": "https://api.github.com/users/silas/received_events",
|
||||||
|
"type": "User",
|
||||||
|
"site_admin": false
|
||||||
|
}
|
||||||
|
}
|
54
test/triggerjson/gitlab_webhook.json
Normal file
54
test/triggerjson/gitlab_webhook.json
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
{
|
||||||
|
"object_kind": "push",
|
||||||
|
"before": "11fcaca195e8b17ca7e3dc47d9608d5b6b892f45",
|
||||||
|
"after": "fb88379ee45de28a0a4590fddcbd8eff8b36026e",
|
||||||
|
"ref": "refs/heads/master",
|
||||||
|
"checkout_sha": "fb88379ee45de28a0a4590fddcbd8eff8b36026e",
|
||||||
|
"message": null,
|
||||||
|
"user_id": 95973,
|
||||||
|
"user_name": "Jimmy Zelinskie",
|
||||||
|
"user_email": "jimmyzelinskie@gmail.com",
|
||||||
|
"project_id": 406414,
|
||||||
|
"repository": {
|
||||||
|
"name": "www-gitlab-com",
|
||||||
|
"url": "git@gitlab.com:jzelinskie/www-gitlab-com.git",
|
||||||
|
"description": "",
|
||||||
|
"homepage": "https://gitlab.com/jzelinskie/www-gitlab-com",
|
||||||
|
"git_http_url": "https://gitlab.com/jzelinskie/www-gitlab-com.git",
|
||||||
|
"git_ssh_url": "git@gitlab.com:jzelinskie/www-gitlab-com.git",
|
||||||
|
"visibility_level": 20
|
||||||
|
},
|
||||||
|
"commits": [
|
||||||
|
{
|
||||||
|
"id": "fb88379ee45de28a0a4590fddcbd8eff8b36026e",
|
||||||
|
"message": "Fix link\n",
|
||||||
|
"timestamp": "2015-08-13T19:33:18+00:00",
|
||||||
|
"url": "https://gitlab.com/jzelinskie/www-gitlab-com/commit/fb88379ee45de28a0a4590fddcbd8eff8b36026e",
|
||||||
|
"author": {
|
||||||
|
"name": "Sytse Sijbrandij",
|
||||||
|
"email": "sytse@gitlab.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "4ca166bc0b511f21fa331873f260f1a7cb38d723",
|
||||||
|
"message": "Merge branch 'git-lfs' into 'master'\n\nGit lfs\n\n@JobV @dzaporozhets @DouweM please review the tone of this\n\nSee merge request !899\n",
|
||||||
|
"timestamp": "2015-08-13T15:52:15+00:00",
|
||||||
|
"url": "https://gitlab.com/jzelinskie/www-gitlab-com/commit/4ca166bc0b511f21fa331873f260f1a7cb38d723",
|
||||||
|
"author": {
|
||||||
|
"name": "Sytse Sijbrandij",
|
||||||
|
"email": "sytse@gitlab.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "11fcaca195e8b17ca7e3dc47d9608d5b6b892f45",
|
||||||
|
"message": "Merge branch 'release-7-3-5' into 'master'\n\n7-13-5 Release post.\n\nSee merge request !900\n",
|
||||||
|
"timestamp": "2015-08-13T09:31:47+00:00",
|
||||||
|
"url": "https://gitlab.com/jzelinskie/www-gitlab-com/commit/11fcaca195e8b17ca7e3dc47d9608d5b6b892f45",
|
||||||
|
"author": {
|
||||||
|
"name": "Valery Sizov",
|
||||||
|
"email": "valery@gitlab.com"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"total_commits_count": 3
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ import logging
|
||||||
from datetime import timedelta, datetime
|
from datetime import timedelta, datetime
|
||||||
|
|
||||||
from app import app
|
from app import app
|
||||||
from data.model import LogEntry
|
from data.database import LogEntry
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
76
util/dict_wrappers.py
Normal file
76
util/dict_wrappers.py
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
import json
|
||||||
|
from jsonpath_rw import parse
|
||||||
|
|
||||||
|
class SafeDictSetter(object):
|
||||||
|
""" Specialized write-only dictionary wrapper class that allows for setting
|
||||||
|
nested keys via a path syntax.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
sds = SafeDictSetter()
|
||||||
|
sds['foo.bar.baz'] = 'hello' # Sets 'foo' = {'bar': {'baz': 'hello'}}
|
||||||
|
sds['somekey'] = None # Does not set the key since the value is None
|
||||||
|
"""
|
||||||
|
def __init__(self, initial_object=None):
|
||||||
|
self._object = initial_object or {}
|
||||||
|
|
||||||
|
def __setitem__(self, path, value):
|
||||||
|
self.set(path, value)
|
||||||
|
|
||||||
|
def set(self, path, value, allow_none=False):
|
||||||
|
""" Sets the value of the given path to the given value. """
|
||||||
|
if value is None and not allow_none:
|
||||||
|
return
|
||||||
|
|
||||||
|
pieces = path.split('.')
|
||||||
|
current = self._object
|
||||||
|
|
||||||
|
for piece in pieces[:len(pieces)-1]:
|
||||||
|
current_obj = current.get(piece, {})
|
||||||
|
if not isinstance(current_obj, dict):
|
||||||
|
raise Exception('Key %s is a non-object value: %s' % (piece, current_obj))
|
||||||
|
|
||||||
|
current[piece] = current_obj
|
||||||
|
current = current_obj
|
||||||
|
|
||||||
|
current[pieces[-1]] = value
|
||||||
|
|
||||||
|
def dict_value(self):
|
||||||
|
""" Returns the dict value built. """
|
||||||
|
return self._object
|
||||||
|
|
||||||
|
def json_value(self):
|
||||||
|
""" Returns the JSON string value of the dictionary built. """
|
||||||
|
return json.dumps(self._object)
|
||||||
|
|
||||||
|
|
||||||
|
class JSONPathDict(object):
|
||||||
|
""" Specialized read-only dictionary wrapper class that uses the jsonpath_rw library
|
||||||
|
to access keys via an X-Path-like syntax.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
pd = JSONPathDict({'hello': {'hi': 'there'}})
|
||||||
|
pd['hello.hi'] # Returns 'there'
|
||||||
|
"""
|
||||||
|
def __init__(self, dict_value):
|
||||||
|
""" Init the helper with the JSON object.
|
||||||
|
"""
|
||||||
|
self._object = dict_value
|
||||||
|
|
||||||
|
def __getitem__(self, path):
|
||||||
|
return self.get(path)
|
||||||
|
|
||||||
|
def get(self, path, not_found_handler=None):
|
||||||
|
""" Returns the value found at the given path. Path is a json-path expression. """
|
||||||
|
jsonpath_expr = parse(path)
|
||||||
|
matches = jsonpath_expr.find(self._object)
|
||||||
|
if not matches:
|
||||||
|
return not_found_handler() if not_found_handler else None
|
||||||
|
|
||||||
|
match = matches[0].value
|
||||||
|
if not match:
|
||||||
|
return not_found_handler() if not_found_handler else None
|
||||||
|
|
||||||
|
if isinstance(match, dict):
|
||||||
|
return JSONPathDict(match)
|
||||||
|
|
||||||
|
return match
|
|
@ -1,44 +1,50 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from data.database import ImageStorage, Image, db
|
from data.database import ImageStorage, Image, db, db_for_update
|
||||||
from app import app
|
from app import app
|
||||||
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def backfill_aggregate_sizes():
|
def backfill_aggregate_sizes():
|
||||||
""" Generates aggregate sizes for any image storage entries without them """
|
""" Generates aggregate sizes for any image storage entries without them """
|
||||||
LOGGER.setLevel(logging.DEBUG)
|
logger.debug('Aggregate sizes backfill: Began execution')
|
||||||
LOGGER.debug('Aggregate sizes backfill: Began execution')
|
|
||||||
while True:
|
while True:
|
||||||
batch_storage_ids = list(ImageStorage
|
batch_image_ids = list(Image
|
||||||
.select(ImageStorage.id)
|
.select(Image.id)
|
||||||
.where(ImageStorage.aggregate_size >> None)
|
.where(Image.aggregate_size >> None)
|
||||||
.limit(10))
|
.limit(100))
|
||||||
|
|
||||||
if len(batch_storage_ids) == 0:
|
if len(batch_image_ids) == 0:
|
||||||
# There are no storages left to backfill. We're done!
|
# There are no storages left to backfill. We're done!
|
||||||
LOGGER.debug('Aggregate sizes backfill: Backfill completed')
|
logger.debug('Aggregate sizes backfill: Backfill completed')
|
||||||
return
|
return
|
||||||
|
|
||||||
LOGGER.debug('Aggregate sizes backfill: Found %s records to update', len(batch_storage_ids))
|
logger.debug('Aggregate sizes backfill: Found %s records to update', len(batch_image_ids))
|
||||||
for image_storage_id in batch_storage_ids:
|
for image_id in batch_image_ids:
|
||||||
LOGGER.debug('Updating image storage: %s', image_storage_id.id)
|
logger.debug('Updating image : %s', image_id.id)
|
||||||
|
|
||||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
try:
|
try:
|
||||||
storage = ImageStorage.select().where(ImageStorage.id == image_storage_id.id).get()
|
image = (Image
|
||||||
image = Image.select().where(Image.storage == storage).get()
|
.select(Image, ImageStorage)
|
||||||
|
.join(ImageStorage)
|
||||||
|
.where(Image.id == image_id)
|
||||||
|
.get())
|
||||||
|
|
||||||
|
aggregate_size = image.storage.image_size
|
||||||
|
|
||||||
image_ids = image.ancestors.split('/')[1:-1]
|
image_ids = image.ancestors.split('/')[1:-1]
|
||||||
aggregate_size = storage.image_size
|
|
||||||
for image_id in image_ids:
|
for image_id in image_ids:
|
||||||
current_image = Image.select().where(Image.id == image_id).join(ImageStorage)
|
to_add = db_for_update(Image
|
||||||
aggregate_size += image.storage.image_size
|
.select(Image, ImageStorage)
|
||||||
|
.join(ImageStorage)
|
||||||
|
.where(Image.id == image_id)).get()
|
||||||
|
aggregate_size += to_add.storage.image_size
|
||||||
|
|
||||||
storage.aggregate_size = aggregate_size
|
image.aggregate_size = aggregate_size
|
||||||
storage.save()
|
image.save()
|
||||||
except ImageStorage.DoesNotExist:
|
|
||||||
pass
|
|
||||||
except Image.DoesNotExist:
|
except Image.DoesNotExist:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
87
util/migrate/backfill_image_fields.py
Normal file
87
util/migrate/backfill_image_fields.py
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from peewee import (CharField, BigIntegerField, BooleanField, ForeignKeyField, DateTimeField,
|
||||||
|
TextField)
|
||||||
|
from data.database import BaseModel, db, db_for_update
|
||||||
|
from app import app
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Repository(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
# Vendor the information from tables we will be writing to at the time of this migration
|
||||||
|
class ImageStorage(BaseModel):
|
||||||
|
created = DateTimeField(null=True)
|
||||||
|
comment = TextField(null=True)
|
||||||
|
command = TextField(null=True)
|
||||||
|
aggregate_size = BigIntegerField(null=True)
|
||||||
|
uploading = BooleanField(default=True, null=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Image(BaseModel):
|
||||||
|
# This class is intentionally denormalized. Even though images are supposed
|
||||||
|
# to be globally unique we can't treat them as such for permissions and
|
||||||
|
# security reasons. So rather than Repository <-> Image being many to many
|
||||||
|
# each image now belongs to exactly one repository.
|
||||||
|
docker_image_id = CharField(index=True)
|
||||||
|
repository = ForeignKeyField(Repository)
|
||||||
|
|
||||||
|
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
|
||||||
|
ancestors = CharField(index=True, default='/', max_length=64535, null=True)
|
||||||
|
|
||||||
|
storage = ForeignKeyField(ImageStorage, index=True, null=True)
|
||||||
|
|
||||||
|
created = DateTimeField(null=True)
|
||||||
|
comment = TextField(null=True)
|
||||||
|
command = TextField(null=True)
|
||||||
|
aggregate_size = BigIntegerField(null=True)
|
||||||
|
v1_json_metadata = TextField(null=True)
|
||||||
|
|
||||||
|
|
||||||
|
def backfill_image_fields():
|
||||||
|
""" Copies metadata from image storages to their images. """
|
||||||
|
logger.debug('Image metadata backfill: Began execution')
|
||||||
|
while True:
|
||||||
|
batch_image_ids = list(Image
|
||||||
|
.select(Image.id)
|
||||||
|
.join(ImageStorage)
|
||||||
|
.where(Image.created >> None, Image.comment >> None,
|
||||||
|
Image.command >> None, Image.aggregate_size >> None,
|
||||||
|
ImageStorage.uploading == False,
|
||||||
|
~((ImageStorage.created >> None) &
|
||||||
|
(ImageStorage.comment >> None) &
|
||||||
|
(ImageStorage.command >> None) &
|
||||||
|
(ImageStorage.aggregate_size >> None)))
|
||||||
|
.limit(100))
|
||||||
|
|
||||||
|
if len(batch_image_ids) == 0:
|
||||||
|
logger.debug('Image metadata backfill: Backfill completed')
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug('Image metadata backfill: Found %s records to update', len(batch_image_ids))
|
||||||
|
for image_id in batch_image_ids:
|
||||||
|
logger.debug('Updating image: %s', image_id.id)
|
||||||
|
|
||||||
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
try:
|
||||||
|
image = db_for_update(Image
|
||||||
|
.select(Image, ImageStorage)
|
||||||
|
.join(ImageStorage)
|
||||||
|
.where(Image.id == image_id.id)).get()
|
||||||
|
|
||||||
|
image.created = image.storage.created
|
||||||
|
image.comment = image.storage.comment
|
||||||
|
image.command = image.storage.command
|
||||||
|
image.aggregate_size = image.storage.aggregate_size
|
||||||
|
image.save()
|
||||||
|
except Image.DoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
logging.getLogger('peewee').setLevel(logging.CRITICAL)
|
||||||
|
backfill_image_fields()
|
72
util/migrate/backfill_v1_metadata.py
Normal file
72
util/migrate/backfill_v1_metadata.py
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from peewee import JOIN_LEFT_OUTER
|
||||||
|
|
||||||
|
from data.database import (Image, ImageStorage, ImageStoragePlacement, ImageStorageLocation, db,
|
||||||
|
db_for_update)
|
||||||
|
from app import app, storage
|
||||||
|
from data import model
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def image_json_path(storage_uuid):
|
||||||
|
base_path = storage.image_path(storage_uuid)
|
||||||
|
return '{0}json'.format(base_path)
|
||||||
|
|
||||||
|
|
||||||
|
def backfill_v1_metadata():
|
||||||
|
""" Copies metadata from image storages to their images. """
|
||||||
|
logger.debug('Image v1 metadata backfill: Began execution')
|
||||||
|
while True:
|
||||||
|
batch_image_ids = list(Image
|
||||||
|
.select(Image.id)
|
||||||
|
.join(ImageStorage)
|
||||||
|
.where(Image.v1_json_metadata >> None, ImageStorage.uploading == False)
|
||||||
|
.limit(100))
|
||||||
|
|
||||||
|
if len(batch_image_ids) == 0:
|
||||||
|
logger.debug('Image v1 metadata backfill: Backfill completed')
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.debug('Image v1 metadata backfill: Found %s records to update', len(batch_image_ids))
|
||||||
|
for one_id in batch_image_ids:
|
||||||
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
try:
|
||||||
|
logger.debug('Loading image: %s', one_id.id)
|
||||||
|
|
||||||
|
raw_query = (ImageStoragePlacement
|
||||||
|
.select(ImageStoragePlacement, Image, ImageStorage, ImageStorageLocation)
|
||||||
|
.join(ImageStorageLocation)
|
||||||
|
.switch(ImageStoragePlacement)
|
||||||
|
.join(ImageStorage, JOIN_LEFT_OUTER)
|
||||||
|
.join(Image)
|
||||||
|
.where(Image.id == one_id.id))
|
||||||
|
|
||||||
|
placement_query = db_for_update(raw_query)
|
||||||
|
|
||||||
|
repo_image_list = model.image.invert_placement_query_results(placement_query)
|
||||||
|
if len(repo_image_list) > 1:
|
||||||
|
logger.error('Found more images than we requested, something is wrong with the query')
|
||||||
|
return
|
||||||
|
|
||||||
|
repo_image = repo_image_list[0]
|
||||||
|
uuid = repo_image.storage.uuid
|
||||||
|
json_path = image_json_path(uuid)
|
||||||
|
|
||||||
|
logger.debug('Updating image: %s from: %s', repo_image.id, json_path)
|
||||||
|
try:
|
||||||
|
data = storage.get_content(repo_image.storage.locations, json_path)
|
||||||
|
except IOError:
|
||||||
|
data = None
|
||||||
|
logger.exception('failed to find v1 metadata, defaulting to None')
|
||||||
|
repo_image.v1_json_metadata = data
|
||||||
|
repo_image.save()
|
||||||
|
except ImageStoragePlacement.DoesNotExist:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
# logging.getLogger('peewee').setLevel(logging.CRITICAL)
|
||||||
|
backfill_v1_metadata()
|
|
@ -5,7 +5,7 @@ from app import app
|
||||||
from data.database import configure, BaseModel, uuid_generator
|
from data.database import configure, BaseModel, uuid_generator
|
||||||
from peewee import *
|
from peewee import *
|
||||||
from bitbucket import BitBucket
|
from bitbucket import BitBucket
|
||||||
from endpoints.trigger import BitbucketBuildTrigger
|
from buildtrigger.bitbuckethandler import BitbucketBuildTrigger
|
||||||
|
|
||||||
configure(app.config)
|
configure(app.config)
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ import json
|
||||||
|
|
||||||
from data.database import RepositoryBuildTrigger, BuildTriggerService, db, db_for_update
|
from data.database import RepositoryBuildTrigger, BuildTriggerService, db, db_for_update
|
||||||
from app import app
|
from app import app
|
||||||
from endpoints.trigger import BuildTriggerHandler
|
from buildtrigger.basehandler import BuildTriggerHandler
|
||||||
from util.security.ssh import generate_ssh_keypair
|
from util.security.ssh import generate_ssh_keypair
|
||||||
from github import GithubException
|
from github import GithubException
|
||||||
|
|
||||||
|
@ -24,7 +24,8 @@ def backfill_github_deploykeys():
|
||||||
.select(RepositoryBuildTrigger.id)
|
.select(RepositoryBuildTrigger.id)
|
||||||
.where(RepositoryBuildTrigger.private_key >> None)
|
.where(RepositoryBuildTrigger.private_key >> None)
|
||||||
.where(RepositoryBuildTrigger.service == github_service)
|
.where(RepositoryBuildTrigger.service == github_service)
|
||||||
.limit(10))
|
.where(RepositoryBuildTrigger.used_legacy_github >> None)
|
||||||
|
.limit(100))
|
||||||
|
|
||||||
filtered_ids = [trigger.id for trigger in build_trigger_ids if trigger.id not in encountered]
|
filtered_ids = [trigger.id for trigger in build_trigger_ids if trigger.id not in encountered]
|
||||||
if len(filtered_ids) == 0:
|
if len(filtered_ids) == 0:
|
||||||
|
@ -39,15 +40,22 @@ def backfill_github_deploykeys():
|
||||||
|
|
||||||
with app.config['DB_TRANSACTION_FACTORY'](db):
|
with app.config['DB_TRANSACTION_FACTORY'](db):
|
||||||
try:
|
try:
|
||||||
query = RepositoryBuildTrigger.select(RepositoryBuildTrigger.id == trigger_id)
|
query = RepositoryBuildTrigger.select().where(RepositoryBuildTrigger.id == trigger_id)
|
||||||
trigger = db_for_update(query).get()
|
trigger = db_for_update(query).get()
|
||||||
except RepositoryBuildTrigger.DoesNotExist:
|
except RepositoryBuildTrigger.DoesNotExist:
|
||||||
logger.debug('Could not find build trigger %s', trigger_id)
|
logger.debug('Could not find build trigger %s', trigger_id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
trigger.used_legacy_github = True
|
||||||
|
trigger.save()
|
||||||
|
|
||||||
handler = BuildTriggerHandler.get_handler(trigger)
|
handler = BuildTriggerHandler.get_handler(trigger)
|
||||||
|
|
||||||
config = handler.config
|
config = handler.config
|
||||||
|
if not 'build_source' in config:
|
||||||
|
logger.debug('Could not find build source for trigger %s', trigger_id)
|
||||||
|
continue
|
||||||
|
|
||||||
build_source = config['build_source']
|
build_source = config['build_source']
|
||||||
gh_client = handler._get_client()
|
gh_client = handler._get_client()
|
||||||
|
|
||||||
|
@ -83,5 +91,8 @@ def backfill_github_deploykeys():
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
logging.getLogger('boto').setLevel(logging.CRITICAL)
|
||||||
|
logging.getLogger('github').setLevel(logging.CRITICAL)
|
||||||
|
|
||||||
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||||
backfill_github_deploykeys()
|
backfill_github_deploykeys()
|
||||||
|
|
|
@ -67,7 +67,7 @@ def backfill_sizes_from_data():
|
||||||
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
|
decompressor = zlib.decompressobj(ZLIB_GZIP_WINDOW)
|
||||||
|
|
||||||
uncompressed_size = 0
|
uncompressed_size = 0
|
||||||
with store.stream_read_file(with_locs.locations, store.image_layer_path(uuid)) as stream:
|
with store.stream_read_file(with_locs.locations, store.v1_image_layer_path(uuid)) as stream:
|
||||||
while True:
|
while True:
|
||||||
current_data = stream.read(CHUNK_SIZE)
|
current_data = stream.read(CHUNK_SIZE)
|
||||||
if len(current_data) == 0:
|
if len(current_data) == 0:
|
||||||
|
|
|
@ -6,6 +6,10 @@ from uuid import uuid4
|
||||||
|
|
||||||
REPOSITORY_NAME_REGEX = re.compile(r'^[\.a-zA-Z0-9_-]+$')
|
REPOSITORY_NAME_REGEX = re.compile(r'^[\.a-zA-Z0-9_-]+$')
|
||||||
|
|
||||||
|
TAG_REGEX = re.compile(r'^[\w][\w\.-]{0,127}$')
|
||||||
|
TAG_ERROR = ('Invalid tag: must match [A-Za-z0-9_.-], NOT start with "." or "-", '
|
||||||
|
'and can contain 1-128 characters')
|
||||||
|
|
||||||
def parse_namespace_repository(repository, include_tag=False):
|
def parse_namespace_repository(repository, include_tag=False):
|
||||||
parts = repository.rstrip('/').split('/', 1)
|
parts = repository.rstrip('/').split('/', 1)
|
||||||
if len(parts) < 2:
|
if len(parts) < 2:
|
||||||
|
|
|
@ -6,7 +6,8 @@ import anunidecode
|
||||||
INVALID_PASSWORD_MESSAGE = 'Invalid password, password must be at least ' + \
|
INVALID_PASSWORD_MESSAGE = 'Invalid password, password must be at least ' + \
|
||||||
'8 characters and contain no whitespace.'
|
'8 characters and contain no whitespace.'
|
||||||
INVALID_USERNAME_CHARACTERS = r'[^a-z0-9_]'
|
INVALID_USERNAME_CHARACTERS = r'[^a-z0-9_]'
|
||||||
VALID_CHARACTERS = '_' + string.digits + string.lowercase
|
VALID_CHARACTERS = string.digits + string.lowercase
|
||||||
|
|
||||||
MIN_LENGTH = 4
|
MIN_LENGTH = 4
|
||||||
MAX_LENGTH = 30
|
MAX_LENGTH = 30
|
||||||
|
|
||||||
|
@ -48,8 +49,13 @@ def _gen_filler_chars(num_filler_chars):
|
||||||
|
|
||||||
|
|
||||||
def generate_valid_usernames(input_username):
|
def generate_valid_usernames(input_username):
|
||||||
|
# Docker's regex: [a-z0-9]+(?:[._-][a-z0-9]+)*
|
||||||
normalized = input_username.encode('unidecode', 'ignore').strip().lower()
|
normalized = input_username.encode('unidecode', 'ignore').strip().lower()
|
||||||
prefix = re.sub(INVALID_USERNAME_CHARACTERS, '_', normalized)[:30]
|
prefix = re.sub(INVALID_USERNAME_CHARACTERS, '_', normalized)[:30]
|
||||||
|
prefix = re.sub(r'_{2,}', '_', prefix)
|
||||||
|
|
||||||
|
if prefix.endswith('_'):
|
||||||
|
prefix = prefix[0:len(prefix) - 1]
|
||||||
|
|
||||||
num_filler_chars = max(0, MIN_LENGTH - len(prefix))
|
num_filler_chars = max(0, MIN_LENGTH - len(prefix))
|
||||||
|
|
||||||
|
|
3
verbs.py
3
verbs.py
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import logging
|
import logging
|
||||||
import logging.config
|
import logging.config
|
||||||
|
|
||||||
|
@ -5,5 +6,7 @@ from app import app as application
|
||||||
|
|
||||||
from endpoints.verbs import verbs
|
from endpoints.verbs import verbs
|
||||||
|
|
||||||
|
if os.environ.get('DEBUGLOG') == 'true':
|
||||||
|
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||||
|
|
||||||
application.register_blueprint(verbs, url_prefix='/c1')
|
application.register_blueprint(verbs, url_prefix='/c1')
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue