- Merge branch 'master' into sha-lom

- Extract out the tar handling from streamlayerformat into tarlayerformat
- Add a new tarfileappender class to make it easy to append data to gzipped tars
- Fix the gzipwrap to properly close
- Have the .git injection use the new appender
This commit is contained in:
Joseph Schorr 2014-10-15 15:51:34 -04:00
commit d43109d7cb
48 changed files with 1232 additions and 532 deletions

View file

@ -34,7 +34,9 @@ ADD conf/init/doupdatelimits.sh /etc/my_init.d/
ADD conf/init/preplogsdir.sh /etc/my_init.d/
ADD conf/init/runmigration.sh /etc/my_init.d/
ADD conf/init/gunicorn /etc/service/gunicorn
ADD conf/init/gunicorn_web /etc/service/gunicorn_web
ADD conf/init/gunicorn_registry /etc/service/gunicorn_registry
ADD conf/init/gunicorn_verbs /etc/service/gunicorn_verbs
ADD conf/init/nginx /etc/service/nginx
ADD conf/init/diffsworker /etc/service/diffsworker
ADD conf/init/notificationworker /etc/service/notificationworker

39
app.py
View file

@ -3,7 +3,7 @@ import os
import json
import yaml
from flask import Flask as BaseFlask, Config as BaseConfig
from flask import Flask as BaseFlask, Config as BaseConfig, request, Request
from flask.ext.principal import Principal
from flask.ext.login import LoginManager
from flask.ext.mail import Mail
@ -18,12 +18,12 @@ from data.users import UserAuthentication
from util.analytics import Analytics
from util.exceptionlog import Sentry
from util.queuemetrics import QueueMetrics
from util.names import urn_generator
from data.billing import Billing
from data.buildlogs import BuildLogs
from data.archivedlogs import LogArchive
from data.queue import WorkQueue
from data.userevent import UserEventsBuilderModule
from datetime import datetime
class Config(BaseConfig):
@ -60,6 +60,7 @@ LICENSE_FILENAME = 'conf/stack/license.enc'
app = Flask(__name__)
logger = logging.getLogger(__name__)
profile = logging.getLogger('profile')
if 'TEST' in os.environ:
@ -82,6 +83,37 @@ else:
environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}'))
app.config.update(environ_config)
app.teardown_request(database.close_db_filter)
class RequestWithId(Request):
request_gen = staticmethod(urn_generator(['request']))
def __init__(self, *args, **kwargs):
super(RequestWithId, self).__init__(*args, **kwargs)
self.request_id = self.request_gen()
@app.before_request
def _request_start():
profile.debug('Starting request: %s', request.path)
@app.after_request
def _request_end(r):
profile.debug('Ending request: %s', request.path)
return r
class InjectingFilter(logging.Filter):
def filter(self, record):
record.msg = '[%s] %s' % (request.request_id, record.msg)
return True
profile.addFilter(InjectingFilter())
app.request_class = RequestWithId
features.import_features(app.config)
Principal(app, use_sessions=False)
@ -105,9 +137,6 @@ dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf
reporter=queue_metrics.report)
notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf)
# TODO: Remove this in the prod push following the notifications change.
webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'], tf)
database.configure(app.config)
model.config.app_config = app.config
model.config.store = storage

View file

@ -1,90 +1,14 @@
import logging
import logging.config
import uuid
from peewee import Proxy
from app import app as application
from flask import request, Request
from util.names import urn_generator
from data.database import db as model_db, read_slave
# Turn off debug logging for boto
logging.getLogger('boto').setLevel(logging.CRITICAL)
from endpoints.api import api_bp
from endpoints.index import index
from endpoints.web import web
from endpoints.tags import tags
from endpoints.registry import registry
from endpoints.verbs import verbs
from endpoints.webhooks import webhooks
from endpoints.realtime import realtime
from endpoints.callbacks import callback
from logentries import LogentriesHandler
logger = logging.getLogger(__name__)
# Bind all of the blueprints
import web
import verbs
import registry
werkzeug = logging.getLogger('werkzeug')
werkzeug.setLevel(logging.DEBUG)
profile = logging.getLogger('profile')
profile.setLevel(logging.DEBUG)
logentries_key = application.config.get('LOGENTRIES_KEY', None)
if logentries_key:
logger.debug('Initializing logentries with key: %s' % logentries_key)
werkzeug.addHandler(LogentriesHandler(logentries_key))
profile.addHandler(LogentriesHandler(logentries_key))
application.register_blueprint(web)
application.register_blueprint(callback, url_prefix='/oauth2')
application.register_blueprint(index, url_prefix='/v1')
application.register_blueprint(tags, url_prefix='/v1')
application.register_blueprint(registry, url_prefix='/v1')
application.register_blueprint(verbs, url_prefix='/c1')
application.register_blueprint(api_bp, url_prefix='/api')
application.register_blueprint(webhooks, url_prefix='/webhooks')
application.register_blueprint(realtime, url_prefix='/realtime')
class RequestWithId(Request):
request_gen = staticmethod(urn_generator(['request']))
def __init__(self, *args, **kwargs):
super(RequestWithId, self).__init__(*args, **kwargs)
self.request_id = self.request_gen()
@application.before_request
def _request_start():
profile.debug('Starting request: %s', request.path)
@application.after_request
def _request_end(r):
profile.debug('Ending request: %s', request.path)
return r
class InjectingFilter(logging.Filter):
def filter(self, record):
record.msg = '[%s] %s' % (request.request_id, record.msg)
return True
profile.addFilter(InjectingFilter())
def close_db(exc):
db = model_db
if not db.is_closed():
logger.debug('Disconnecting from database.')
db.close()
if read_slave.obj is not None and not read_slave.is_closed():
logger.debug('Disconnecting from read slave.')
read_slave.close()
application.teardown_request(close_db)
application.request_class = RequestWithId
if __name__ == '__main__':
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)

View file

@ -1,4 +1,4 @@
bind = 'unix:/tmp/gunicorn.sock'
bind = 'unix:/tmp/gunicorn_registry.sock'
workers = 16
worker_class = 'gevent'
timeout = 2000

6
conf/gunicorn_verbs.py Normal file
View file

@ -0,0 +1,6 @@
bind = 'unix:/tmp/gunicorn_verbs.sock'
workers = 8
timeout = 2000
logconfig = 'conf/logging.conf'
pythonpath = '.'
preload_app = True

7
conf/gunicorn_web.py Normal file
View file

@ -0,0 +1,7 @@
bind = 'unix:/tmp/gunicorn_web.sock'
workers = 2
worker_class = 'gevent'
timeout = 30
logconfig = 'conf/logging.conf'
pythonpath = '.'
preload_app = True

View file

@ -14,8 +14,12 @@ gzip_types text/plain text/xml text/css
text/javascript application/x-javascript
application/octet-stream;
upstream app_server {
server unix:/tmp/gunicorn.sock fail_timeout=0;
# For a TCP configuration:
# server 192.168.0.7:8000 fail_timeout=0;
upstream web_app_server {
server unix:/tmp/gunicorn_web.sock fail_timeout=0;
}
upstream verbs_app_server {
server unix:/tmp/gunicorn_verbs.sock fail_timeout=0;
}
upstream registry_app_server {
server unix:/tmp/gunicorn_registry.sock fail_timeout=0;
}

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec svlogd /var/log/gunicorn/

View file

@ -1,8 +0,0 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_config.py application:application
echo 'Gunicorn exited'

View file

@ -0,0 +1,2 @@
#!/bin/sh
exec svlogd /var/log/gunicorn_registry/

View file

@ -0,0 +1,8 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
echo 'Gunicorn exited'

View file

@ -0,0 +1,2 @@
#!/bin/sh
exec svlogd /var/log/gunicorn_verbs/

8
conf/init/gunicorn_verbs/run Executable file
View file

@ -0,0 +1,8 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
nice -10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
echo 'Gunicorn exited'

2
conf/init/gunicorn_web/log/run Executable file
View file

@ -0,0 +1,2 @@
#!/bin/sh
exec svlogd /var/log/gunicorn_web/

8
conf/init/gunicorn_web/run Executable file
View file

@ -0,0 +1,8 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_web.py web:application
echo 'Gunicorn exited'

View file

@ -17,6 +17,14 @@ qualname=application.profiler
level=DEBUG
handlers=console
[logger_boto]
level=INFO
handlers=console
[logger_werkzeug]
level=DEBUG
handlers=console
[logger_gunicorn.error]
level=INFO
handlers=console

View file

@ -1,4 +1,3 @@
client_max_body_size 20G;
client_body_temp_path /var/log/nginx/client_body 1 2;
server_name _;
@ -11,17 +10,35 @@ if ($args ~ "_escaped_fragment_") {
rewrite ^ /snapshot$uri;
}
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_set_header Transfer-Encoding $http_transfer_encoding;
location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_pass http://web_app_server;
}
location /v1/ {
proxy_buffering off;
proxy_request_buffering off;
proxy_set_header Transfer-Encoding $http_transfer_encoding;
proxy_pass http://app_server;
proxy_pass http://registry_app_server;
proxy_read_timeout 2000;
proxy_temp_path /var/log/nginx/proxy_temp 1 2;
client_max_body_size 20G;
}
location /c1/ {
proxy_buffering off;
proxy_request_buffering off;
proxy_pass http://verbs_app_server;
proxy_read_timeout 2000;
proxy_temp_path /var/log/nginx/proxy_temp 1 2;
}

View file

@ -80,11 +80,11 @@ class DefaultConfig(object):
AUTHENTICATION_TYPE = 'Database'
# Build logs
BUILDLOGS_REDIS = {'host': 'logs.quay.io'}
BUILDLOGS_REDIS = {'host': 'localhost'}
BUILDLOGS_OPTIONS = []
# Real-time user events
USER_EVENTS_REDIS = {'host': 'logs.quay.io'}
USER_EVENTS_REDIS = {'host': 'localhost'}
# Stripe config
BILLING_TYPE = 'FakeStripe'
@ -132,9 +132,6 @@ class DefaultConfig(object):
DIFFS_QUEUE_NAME = 'imagediff'
DOCKERFILE_BUILD_QUEUE_NAME = 'dockerfilebuild'
# TODO: Remove this in the prod push following the notifications change.
WEBHOOK_QUEUE_NAME = 'webhook'
# Super user config. Note: This MUST BE an empty list for the default config.
SUPER_USERS = []

View file

@ -7,9 +7,9 @@ from datetime import datetime
from peewee import *
from data.read_slave import ReadSlaveModel
from sqlalchemy.engine.url import make_url
from urlparse import urlparse
from util.names import urn_generator
logger = logging.getLogger(__name__)
@ -80,6 +80,16 @@ def uuid_generator():
return str(uuid.uuid4())
def close_db_filter(_):
if not db.is_closed():
logger.debug('Disconnecting from database.')
db.close()
if read_slave.obj is not None and not read_slave.is_closed():
logger.debug('Disconnecting from read slave.')
read_slave.close()
class BaseModel(ReadSlaveModel):
class Meta:
database = db

View file

@ -30,7 +30,7 @@ class UserEventsBuilderModule(object):
if not redis_config:
# This is the old key name.
redis_config = {
'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME')
'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME'),
}
user_events = UserEventBuilder(redis_config)
@ -45,7 +45,7 @@ class UserEventsBuilderModule(object):
class UserEvent(object):
"""
"""
Defines a helper class for publishing to realtime user events
as backed by Redis.
"""
@ -74,7 +74,7 @@ class UserEvent(object):
thread = threading.Thread(target=conduct)
thread.start()
class UserEventListener(object):
"""
Defines a helper class for subscribing to realtime user events as
@ -90,7 +90,7 @@ class UserEventListener(object):
@staticmethod
def _user_event_key(username, event_id):
return 'user/%s/events/%s' % (username, event_id)
def event_stream(self):
"""
Starts listening for events on the channel(s), yielding for each event

View file

@ -317,7 +317,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
if not found_repository:
return {
'status': 'error',
'message': 'Repository "%s" was not found' % (base_image)
'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
}
# If the repository is private and the user cannot see that repo, then
@ -326,7 +326,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
if found_repository.visibility.name != 'public' and not can_read:
return {
'status': 'error',
'message': 'Repository "%s" was not found' % (base_image)
'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
}
# Check to see if the repository is public. If not, we suggest the
@ -450,18 +450,18 @@ class BuildTriggerFieldValues(RepositoryParamResource):
""" Custom verb to fetch a values list for a particular field name. """
@require_repo_admin
@nickname('listTriggerFieldValues')
def get(self, namespace, repository, trigger_uuid, field_name):
def post(self, namespace, repository, trigger_uuid, field_name):
""" List the field values for a custom run field. """
try:
trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
except model.InvalidBuildTriggerException:
raise NotFound()
config = request.get_json() or json.loads(trigger.config)
user_permission = UserAdminPermission(trigger.connected_user.username)
if user_permission.can():
trigger_handler = BuildTriggerBase.get_trigger_for_service(trigger.service.name)
values = trigger_handler.list_field_values(trigger.auth_token, json.loads(trigger.config),
field_name)
values = trigger_handler.list_field_values(trigger.auth_token, config, field_name)
if values is None:
raise NotFound()

View file

@ -70,7 +70,7 @@ def create_user():
abort(400, 'User creation is disabled. Please speak to your administrator.')
user_data = request.get_json()
if not 'username' in user_data:
if not user_data or not 'username' in user_data:
abort(400, 'Missing username')
username = user_data['username']

View file

@ -211,7 +211,7 @@ class FlowdockMethod(NotificationMethod):
if not token:
return
owner = model.get_user(notification.repository.namespace_user.username)
owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -267,7 +267,7 @@ class HipchatMethod(NotificationMethod):
if not token or not room_id:
return
owner = model.get_user(notification.repository.namespace_user.username)
owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
return
@ -334,7 +334,7 @@ class SlackMethod(NotificationMethod):
if not token or not subdomain:
return
owner = model.get_user(notification.repository.namespace_user.username)
owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner:
# Something went wrong.
return

View file

@ -3,12 +3,13 @@ import io
import os.path
import tarfile
import base64
from StringIO import StringIO
import re
from github import Github, UnknownObjectException, GithubException
from tempfile import SpooledTemporaryFile
from app import app, userfiles as user_files
from util.tarfileappender import TarfileAppender
client = app.config['HTTPCLIENT']
@ -230,13 +231,35 @@ class GithubBuildTrigger(BuildTrigger):
return repos_by_org
def matches_branch(self, branch_name, regex):
if not regex:
return False
m = regex.match(branch_name)
if not m:
return False
return len(m.group(0)) == len(branch_name)
def list_build_subdirs(self, auth_token, config):
gh_client = self._get_client(auth_token)
source = config['build_source']
try:
try:
repo = gh_client.get_repo(source)
default_commit = repo.get_branch(repo.default_branch or 'master').commit
# Find the first matching branch.
branches = None
if 'branch_regex' in config:
try:
regex = re.compile(config['branch_regex'])
branches = [branch.name for branch in repo.get_branches()
if self.matches_branch(branch.name, regex)]
except:
pass
branches = branches or [repo.default_branch or 'master']
default_commit = repo.get_branch(branches[0]).commit
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
return [os.path.dirname(elem.path) for elem in commit_tree.tree
@ -298,50 +321,21 @@ class GithubBuildTrigger(BuildTrigger):
# Seek to position 0 to make tarfile happy
tarball.seek(0)
# Pull out the name of the subdir that GitHub generated.
# Pull out the name of the subdir that GitHub generated
with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0]
tarball_subdir_info = archive.getmember(tarball_subdir)
tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make tarfile happy
# Seek to position 0 to make tarfile happy.
tarball.seek(0)
with SpooledTemporaryFile(CHUNK_SIZE) as updated_tarball:
def add_entry(arch, dir_path, base_info, contents=None):
info = tarfile.TarInfo(dir_path)
entries = {
tarball_subdir + '/.git/HEAD': commit_sha,
tarball_subdir + '/.git/objects/': None,
tarball_subdir + '/.git/refs/': None
}
info.uid = base_info.uid
info.gid = base_info.gid
info.uname = base_info.uname
info.gname = base_info.gname
info.mode = base_info.mode
info.mtime = base_info.mtime
info.type = tarfile.REGTYPE if contents else tarfile.DIRTYPE
if contents:
info.size = len(contents)
arch.addfile(info, fileobj=StringIO(contents) if contents else None)
with tarfile.open(fileobj=updated_tarball, mode='w|gz') as updated_archive:
# Copy existing members of the tar to the updated archive.
with tarfile.open(fileobj=tarball) as archive:
for tar_info in archive:
if tar_info.isreg():
updated_archive.addfile(tar_info, archive.extractfile(tar_info.name))
else:
updated_archive.addfile(tar_info)
# Add the synthetic .git directory to the tarball, containing the commit_sha.
add_entry(updated_archive, tarball_subdir + '/.git/HEAD', tarball_subdir_info,
contents=commit_sha)
add_entry(updated_archive, tarball_subdir + '/.git/objects/', tarball_subdir_info)
add_entry(updated_archive, tarball_subdir + '/.git/refs/', tarball_subdir_info)
# Seek to position 0 to make boto multipart happy
updated_tarball.seek(0)
dockerfile_id = user_files.store_file(updated_tarball, TARBALL_MIME)
appender = TarfileAppender(tarball, entries).get_stream()
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
logger.debug('Successfully prepared job')
@ -367,7 +361,7 @@ class GithubBuildTrigger(BuildTrigger):
payload = request.get_json()
if not payload or payload.get('head_commit') is None:
raise SkipRequestException()
if 'zen' in payload:
raise ValidationRequestException()
@ -376,6 +370,16 @@ class GithubBuildTrigger(BuildTrigger):
commit_sha = payload['head_commit']['id']
commit_message = payload['head_commit'].get('message', '')
if 'branch_regex' in config:
try:
regex = re.compile(config['branch_regex'])
except:
regex = re.compile('.*')
branch = ref.split('/')[-1]
if not self.matches_branch(branch, regex):
raise SkipRequestException()
if should_skip_commit(commit_message):
raise SkipRequestException()

View file

@ -69,8 +69,9 @@ def get_squashed_tag(namespace, repository, tag):
permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository):
# Lookup the requested tag.
tag_image = model.get_tag_image(namespace, repository, tag)
if not tag_image:
try:
tag_image = model.get_tag_image(namespace, repository, tag)
except model.DataModelException:
abort(404)
# Lookup the tag's image and storage.

13
registry.py Normal file
View file

@ -0,0 +1,13 @@
import logging
import logging.config
from app import app as application
from endpoints.index import index
from endpoints.tags import tags
from endpoints.registry import registry
application.register_blueprint(index, url_prefix='/v1')
application.register_blueprint(tags, url_prefix='/v1')
application.register_blueprint(registry, url_prefix='/v1')

View file

@ -4105,6 +4105,27 @@ pre.command:before {
border-bottom-left-radius: 0px;
}
.trigger-setup-github-element .branch-reference.not-match {
color: #ccc !important;
}
.trigger-setup-github-element .branch-reference.not-match a {
color: #ccc !important;
text-decoration: line-through;
}
.trigger-setup-github-element .branch-filter {
white-space: nowrap;
}
.trigger-setup-github-element .branch-filter span {
display: inline-block;
}
.trigger-setup-github-element .selected-info {
margin-bottom: 20px;
}
.trigger-setup-github-element .github-org-icon {
width: 20px;
margin-right: 8px;
@ -4120,6 +4141,45 @@ pre.command:before {
padding-left: 6px;
}
.trigger-setup-github-element .matching-branches {
margin: 0px;
padding: 0px;
margin-left: 10px;
display: inline-block;
}
.trigger-setup-github-element .matching-branches li:before {
content: "\f126";
font-family: FontAwesome;
}
.trigger-setup-github-element .matching-branches li {
list-style: none;
display: inline-block;
margin-left: 10px;
}
.setup-trigger-directive-element .dockerfile-found-content {
margin-left: 32px;
}
.setup-trigger-directive-element .dockerfile-found-content:before {
content: "\f071";
font-family: FontAwesome;
color: rgb(255, 194, 0);
position: absolute;
top: 0px;
left: 0px;
font-size: 20px;
}
.setup-trigger-directive-element .dockerfile-found {
position: relative;
margin-bottom: 16px;
padding-bottom: 16px;
border-bottom: 1px solid #eee;
}
.slideinout {
-webkit-transition:0.5s all;
transition:0.5s linear all;
@ -4127,7 +4187,7 @@ pre.command:before {
position: relative;
height: 75px;
height: 32px;
opacity: 1;
}

View file

@ -8,102 +8,110 @@
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button>
<h4 class="modal-title">Setup new build trigger</h4>
</div>
<div class="modal-body" ng-show="activating">
<div class="modal-body" ng-show="currentView == 'activating'">
<span class="quay-spinner"></span> Setting up trigger...
</div>
<div class="modal-body" ng-show="!activating">
<div class="modal-body" ng-show="currentView != 'activating'">
<!-- Trigger-specific setup -->
<div class="trigger-description-element trigger-option-section" ng-switch on="trigger.service">
<div ng-switch-when="github">
<div class="trigger-setup-github" repository="repository" trigger="trigger"
next-step-counter="nextStepCounter" current-step-valid="state.stepValid"
analyze="checkAnalyze(isValid)"></div>
</div>
</div>
<!-- Loading pull information -->
<div ng-show="currentView == 'analyzing'">
<span class="quay-spinner"></span> Checking pull credential requirements...
</div>
<!-- Pull information -->
<div class="trigger-option-section" ng-show="showPullRequirements">
<div ng-show="!pullRequirements">
<span class="quay-spinner"></span> Checking pull credential requirements...
</div>
<div class="trigger-option-section" ng-show="currentView == 'analyzed'">
<div ng-show="pullRequirements">
<div class="alert alert-danger" ng-if="pullRequirements.status == 'error'">
{{ pullRequirements.message }}
</div>
<div class="alert alert-warning" ng-if="pullRequirements.status == 'warning'">
{{ pullRequirements.message }}
</div>
<div class="alert alert-success" ng-if="pullRequirements.status == 'analyzed' && pullRequirements.is_public === false">
The
<a href="{{ pullRequirements.dockerfile_url }}" ng-if="pullRequirements.dockerfile_url" target="_blank">Dockerfile found</a>
<span ng-if="!pullRequirements.dockerfile_url">Dockerfile found</span>
depends on the private <span class="registry-name"></span> repository
<a href="/repository/{{ pullRequirements.namespace }}/{{ pullRequirements.name }}" target="_blank">
{{ pullRequirements.namespace }}/{{ pullRequirements.name }}
</a> which requires
a robot account for pull access, because it is marked <strong>private</strong>.
<!-- Messaging -->
<div class="alert alert-danger" ng-if="pullInfo.analysis.status == 'error'">
{{ pullInfo.analysis.message }}
</div>
<div class="alert alert-warning" ng-if="pullInfo.analysis.status == 'warning'">
{{ pullRequirements.message }}
</div>
<div class="dockerfile-found" ng-if="pullInfo.analysis.is_public === false">
<div class="dockerfile-found-content">
A robot account is <strong>required</strong> for this build trigger because
the
<a href="{{ pullInfo.analysis.dockerfile_url }}" ng-if="pullInfo.analysis.dockerfile_url" target="_blank">
Dockerfile found
</a>
<span ng-if="!pullInfo.analysis.dockerfile_url">Dockerfile found</span>
pulls from the private <span class="registry-name"></span> repository
<a href="/repository/{{ pullInfo.analysis.namespace }}/{{ pullInfo.analysis.name }}" target="_blank">
{{ pullInfo.analysis.namespace }}/{{ pullInfo.analysis.name }}
</a>
</div>
</div>
<div ng-show="pullRequirements">
<table style="width: 100%;">
<tr>
<td style="width: 162px">
<span class="context-tooltip" data-title="The credentials given to 'docker pull' in the builder for pulling images"
style="margin-bottom: 10px" bs-tooltip>
docker pull Credentials:
</span>
</td>
<td>
<div ng-if="!isNamespaceAdmin(repository.namespace)" style="color: #aaa;">
In order to set pull credentials for a build trigger, you must be an Administrator of the namespace <strong>{{ repository.namespace }}</strong>
</div>
<div class="btn-group btn-group-sm" ng-if="isNamespaceAdmin(repository.namespace)">
<button type="button" class="btn btn-default"
ng-class="publicPull ? 'active btn-info' : ''" ng-click="setPublicPull(true)">None</button>
<button type="button" class="btn btn-default"
ng-class="publicPull ? '' : 'active btn-info'" ng-click="setPublicPull(false)">
<i class="fa fa-wrench"></i>
Robot account
</button>
</div>
</td>
</tr>
</table>
<div style="margin-bottom: 12px">Please select the credentials to use when pulling the base image:</div>
<div ng-if="!isNamespaceAdmin(repository.namespace)" style="color: #aaa;">
<strong>Note:</strong> In order to set pull credentials for a build trigger, you must be an
Administrator of the namespace <strong>{{ repository.namespace }}</strong>
</div>
<table style="width: 100%;">
<tr ng-show="!publicPull">
<td>
<div class="entity-search" namespace="repository.namespace"
placeholder="'Select robot account for pulling...'"
current-entity="pullEntity"
allowed-entities="['robot']"></div>
<!-- Namespace admin -->
<div ng-show="isNamespaceAdmin(repository.namespace)">
<!-- Select credentials -->
<div class="btn-group btn-group-sm">
<button type="button" class="btn btn-default"
ng-class="pullInfo.is_public ? 'active btn-info' : ''"
ng-click="pullInfo.is_public = true">
None
</button>
<button type="button" class="btn btn-default"
ng-class="pullInfo.is_public ? '' : 'active btn-info'"
ng-click="pullInfo.is_public = false">
<i class="fa fa-wrench"></i>
Robot account
</button>
</div>
<div class="alert alert-info" ng-if="pullRequirements.status == 'analyzed' && pullRequirements.robots.length"
style="margin-top: 20px; margin-bottom: 0px;">
Note: We've automatically selected robot account
<span class="entity-reference" entity="pullRequirements.robots[0]"></span>, since it has access to the private
repository.
</div>
<div class="alert alert-warning"
ng-if="pullRequirements.status == 'analyzed' && !pullRequirements.robots.length && pullRequirements.name"
style="margin-top: 20px; margin-bottom: 0px;">
Note: No robot account currently has access to the private repository. Please create one and/or assign access in the
<a href="/repository/{{ pullRequirements.namespace }}/{{ pullRequirements.name }}/admin" target="_blank">repository's
admin panel</a>.
</div>
</td>
</tr>
</table>
<!-- Robot Select -->
<div ng-show="!pullInfo.is_public" style="margin-top: 10px">
<div class="entity-search" namespace="repository.namespace"
placeholder="'Select robot account for pulling...'"
current-entity="pullInfo.pull_entity"
allowed-entities="['robot']"></div>
<div ng-if="pullInfo.analysis.robots.length" style="margin-top: 20px; margin-bottom: 0px;">
<strong>Note</strong>: We've automatically selected robot account
<span class="entity-reference" entity="pullInfo.analysis.robots[0]"></span>,
since it has access to the private repository.
</div>
<div ng-if="!pullInfo.analysis.robots.length && pullInfo.analysis.name"
style="margin-top: 20px; margin-bottom: 0px;">
<strong>Note</strong>: No robot account currently has access to the private repository. Please create one and/or assign access in the
<a href="/repository/{{ pullInfo.analysis.namespace }}/{{ pullInfo.analysis.name }}/admin" target="_blank">
repository's admin panel.
</a>
</div>
</div>
</div>
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" ng-disabled="!state.stepValid"
ng-click="nextStepCounter = nextStepCounter + 1"
ng-show="currentView == 'config'">Next</button>
<button type="button" class="btn btn-primary"
ng-disabled="!trigger.$ready || (!publicPull && !pullEntity) || checkingPullRequirements || activating"
ng-click="activate()">Finished</button>
ng-disabled="!trigger.$ready || (!pullInfo['is_public'] && !pullInfo['pull_entity'])"
ng-click="activate()"
ng-show="currentView == 'analyzed'">Create Trigger</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
</div>
</div><!-- /.modal-content -->

View file

@ -0,0 +1,9 @@
<span class="step-view-step-content">
<span ng-show="!loading">
<span ng-transclude></span>
</span>
<span ng-show="loading">
<span class="quay-spinner"></span>
{{ loadMessage }}
</span>
</span>

View file

@ -0,0 +1,3 @@
<div class="step-view-element">
<div class="transcluded" ng-transclude>
</div>

View file

@ -2,19 +2,18 @@
<span ng-switch-when="github">
<i class="fa fa-github fa-lg" style="margin-right: 6px" data-title="GitHub" bs-tooltip="tooltip.title"></i>
Push to GitHub repository <a href="https://github.com/{{ trigger.config.build_source }}" target="_new">{{ trigger.config.build_source }}</a>
<div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="trigger.config.subdir">
<span>Dockerfile:
<a href="https://github.com/{{ trigger.config.build_source }}/tree/{{ trigger.config.master_branch || 'master' }}/{{ trigger.config.subdir }}/Dockerfile" target="_blank">
//{{ trigger.config.subdir }}/Dockerfile
</a>
</span>
</div>
<div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="!trigger.config.subdir && !short">
<span><span class="trigger-description-subtitle">Dockerfile:</span>
<a href="https://github.com/{{ trigger.config.build_source }}/tree/{{ trigger.config.master_branch || 'master' }}/Dockerfile" target="_blank">
//Dockerfile
</a>
</span>
<div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="!short">
<div>
<span class="trigger-description-subtitle">Branches:</span>
<span ng-if="trigger.config.branch_regex">Matching Regular Expression {{ trigger.config.branch_regex }}</span>
<span ng-if="!trigger.config.branch_regex">(All Branches)</span>
</div>
<div>
<span class="trigger-description-subtitle">Dockerfile:</span>
<span ng-if="trigger.config.subdir">//{{ trigger.config.subdir}}/Dockerfile</span>
<span ng-if="!trigger.config.subdir">//Dockerfile</span>
</div>
</div>
</span>
<span ng-switch-default>

View file

@ -1,48 +1,147 @@
<div class="trigger-setup-github-element">
<div ng-show="loading">
<span class="quay-spinner" style="vertical-align: middle; margin-right: 10px"></span>
Loading Repository List
<!-- Current selected info -->
<div class="selected-info" ng-show="nextStepCounter > 0">
<table style="width: 100%;">
<tr ng-show="currentRepo && nextStepCounter > 0">
<td width="200px">
Repository:
</td>
<td>
<div class="current-repo">
<img class="dropdown-select-icon github-org-icon"
ng-src="{{ currentRepo.avatar_url ? currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
{{ currentRepo.repo }}
</div>
</td>
</tr>
<tr ng-show="nextStepCounter > 1">
<td>
Branches:
</td>
<td>
<div class="branch-filter">
<span ng-if="!state.hasBranchFilter">(All Branches)</span>
<span ng-if="state.hasBranchFilter">Regular Expression: <code>{{ state.branchFilter }}</code></span>
</div>
</td>
</tr>
<tr ng-show="nextStepCounter > 2">
<td>
Dockerfile Location:
</td>
<td>
<div class="dockerfile-location">
<i class="fa fa-folder fa-lg"></i> {{ state.currentLocation || '(Repository Root)' }}
</div>
</td>
</tr>
</table>
</div>
<div ng-show="!loading">
<div style="margin-bottom: 18px">Please choose the GitHub repository that will trigger the build:</div>
<!-- Step view -->
<div class="step-view" next-step-counter="nextStepCounter" current-step-valid="currentStepValid"
steps-completed="stepsCompleted()">
<!-- Repository select -->
<div class="dropdown-select" placeholder="'Select a repository'" selected-item="currentRepo"
lookahead-items="repoLookahead">
<!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-github fa-lg"></i>
<img class="dropdown-select-icon github-org-icon" ng-src="{{ currentRepo.avatar_url ? currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
<div class="step-view-step" complete-condition="currentRepo" load-callback="loadRepositories(callback)"
load-message="Loading Repositories">
<div style="margin-bottom: 12px">Please choose the GitHub repository that will trigger the build:</div>
<div class="dropdown-select" placeholder="'Select a repository'" selected-item="currentRepo"
lookahead-items="repoLookahead">
<!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-github fa-lg"></i>
<img class="dropdown-select-icon github-org-icon"
ng-src="{{ currentRepo.avatar_url ? currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
<!-- Dropdown menu -->
<ul class="dropdown-select-menu" role="menu">
<li ng-repeat-start="org in orgs" role="presentation" class="dropdown-header github-org-header">
<img ng-src="{{ org.info.avatar_url }}" class="github-org-icon">{{ org.info.name }}
</li>
<li ng-repeat="repo in org.repos" class="github-repo-listing">
<a href="javascript:void(0)" ng-click="selectRepo(repo, org)"><i class="fa fa-github fa-lg"></i> {{ repo }}</a>
</li>
<li role="presentation" class="divider" ng-repeat-end ng-show="$index < orgs.length - 1"></li>
</ul>
<!-- Dropdown menu -->
<ul class="dropdown-select-menu" role="menu">
<li ng-repeat-start="org in orgs" role="presentation" class="dropdown-header github-org-header">
<img ng-src="{{ org.info.avatar_url }}" class="github-org-icon">{{ org.info.name }}
</li>
<li ng-repeat="repo in org.repos" class="github-repo-listing">
<a href="javascript:void(0)" ng-click="selectRepo(repo, org)"><i class="fa fa-github fa-lg"></i> {{ repo }}</a>
</li>
<li role="presentation" class="divider" ng-repeat-end ng-show="$index < orgs.length - 1"></li>
</ul>
</div>
</div>
<!-- Branch filter/select -->
<div class="step-view-step" complete-condition="!state.hasBranchFilter || state.branchFilter"
load-callback="loadBranches(callback)"
load-message="Loading Branches">
<div style="margin-bottom: 12px">Please choose the branches to which this trigger will apply:</div>
<div style="margin-left: 20px;">
<div class="btn-group btn-group-sm" style="margin-bottom: 12px">
<button type="button" class="btn btn-default"
ng-class="state.hasBranchFilter ? '' : 'active btn-info'" ng-click="state.hasBranchFilter = false">
All Branches
</button>
<button type="button" class="btn btn-default"
ng-class="state.hasBranchFilter ? 'active btn-info' : ''" ng-click="state.hasBranchFilter = true">
Matching Regular Expression
</button>
</div>
<div ng-show="state.hasBranchFilter" style="margin-top: 10px;">
<form>
<input class="form-control" type="text" ng-model="state.branchFilter"
placeholder="(Regular expression)" required>
</form>
<div style="margin-top: 10px">
<div ng-if="branchNames.length">
Branches:
<ul class="matching-branches">
<li ng-repeat="branchName in branchNames | limitTo:20"
class="branch-reference"
ng-class="isMatchingBranch(branchName, state.branchFilter) ? 'match' : 'not-match'">
<a href="https://github.com/{{ currentRepo.repo }}/tree/{{ branchName }}" target="_blank">
{{ branchName }}
</a>
</li>
</ul>
<span ng-if="branchNames.length > 20">...</span>
</div>
<div ng-if="state.branchFilter && !branchNames.length"
style="margin-top: 10px">
<strong>Warning:</strong> No branches found
</div>
</div>
</div>
</div>
</div>
<!-- Dockerfile folder select -->
<div class="slideinout" ng-show="currentRepo">
<div style="margin-top: 10px">Dockerfile Location:</div>
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)"
<div class="step-view-step" complete-condition="trigger.$ready" load-callback="loadLocations(callback)"
load-message="Loading Folders">
<div style="margin-bottom: 12px">Dockerfile Location:</div>
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="state.currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)"
handle-item-selected="handleLocationSelected(datum)"
allow-custom-input="true">
<!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="state.isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!state.isInvalidLocation"></i>
<i class="dropdown-select-icon fa fa-folder fa-lg"></i>
<!-- Dropdown menu -->
<ul class="dropdown-select-menu" role="menu">
<li ng-repeat="location in locations">
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="!location"><i class="fa fa-github fa-lg"></i> Repository Root</a>
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="location"><i class="fa fa-folder fa-lg"></i> {{ location }}</a>
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="!location">
<i class="fa fa-github fa-lg"></i> Repository Root
</a>
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="location">
<i class="fa fa-folder fa-lg"></i> {{ location }}
</a>
</li>
<li class="dropdown-header" role="presentation" ng-show="!locations.length">No Dockerfiles found in repository</li>
<li class="dropdown-header" role="presentation" ng-show="!locations.length">
No Dockerfiles found in repository
</li>
</ul>
</div>
@ -53,10 +152,10 @@
<div class="alert alert-warning" ng-show="locationError">
{{ locationError }}
</div>
<div class="alert alert-info" ng-show="locations.length && isInvalidLocation">
<div class="alert alert-info" ng-show="locations.length && state.isInvalidLocation">
Note: The folder does not currently exist or contain a Dockerfile
</div>
</div>
<!-- /step-view -->
</div>
</div>

View file

@ -2988,6 +2988,28 @@ quayApp.directive('dockerAuthDialog', function (Config) {
});
quayApp.filter('regex', function() {
return function(input, regex) {
if (!regex) { return []; }
try {
var patt = new RegExp(regex);
} catch (ex) {
return [];
}
var out = [];
for (var i = 0; i < input.length; ++i){
var m = input[i].match(patt);
if (m && m[0].length == input[i].length) {
out.push(input[i]);
}
}
return out;
};
});
quayApp.filter('reverse', function() {
return function(items) {
return items.slice().reverse();
@ -4744,6 +4766,118 @@ quayApp.directive('triggerDescription', function () {
});
quayApp.directive('stepView', function ($compile) {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/step-view.html',
replace: true,
transclude: true,
restrict: 'C',
scope: {
'nextStepCounter': '=nextStepCounter',
'currentStepValid': '=currentStepValid',
'stepsCompleted': '&stepsCompleted'
},
controller: function($scope, $element, $rootScope) {
this.currentStepIndex = -1;
this.steps = [];
this.watcher = null;
this.getCurrentStep = function() {
return this.steps[this.currentStepIndex];
};
this.reset = function() {
this.currentStepIndex = -1;
for (var i = 0; i < this.steps.length; ++i) {
this.steps[i].element.hide();
}
$scope.currentStepValid = false;
};
this.next = function() {
if (this.currentStepIndex >= 0) {
if (!this.getCurrentStep().scope.completeCondition) {
return;
}
this.getCurrentStep().element.hide();
if (this.unwatch) {
this.unwatch();
this.unwatch = null;
}
}
this.currentStepIndex++;
if (this.currentStepIndex < this.steps.length) {
var currentStep = this.getCurrentStep();
currentStep.element.show();
currentStep.scope.load()
this.unwatch = currentStep.scope.$watch('completeCondition', function(cc) {
$scope.currentStepValid = !!cc;
});
} else {
$scope.stepsCompleted();
}
};
this.register = function(scope, element) {
element.hide();
this.steps.push({
'scope': scope,
'element': element
});
};
var that = this;
$scope.$watch('nextStepCounter', function(nsc) {
if (nsc >= 0) {
that.next();
} else {
that.reset();
}
});
}
};
return directiveDefinitionObject;
});
quayApp.directive('stepViewStep', function () {
var directiveDefinitionObject = {
priority: 1,
require: '^stepView',
templateUrl: '/static/directives/step-view-step.html',
replace: false,
transclude: true,
restrict: 'C',
scope: {
'completeCondition': '=completeCondition',
'loadCallback': '&loadCallback',
'loadMessage': '@loadMessage'
},
link: function(scope, element, attrs, controller) {
controller.register(scope, element);
},
controller: function($scope, $element) {
$scope.load = function() {
$scope.loading = true;
$scope.loadCallback({'callback': function() {
$scope.loading = false;
}});
};
}
};
return directiveDefinitionObject;
});
quayApp.directive('dropdownSelect', function ($compile) {
var directiveDefinitionObject = {
priority: 0,
@ -4986,25 +5120,28 @@ quayApp.directive('setupTriggerDialog', function () {
controller: function($scope, $element, ApiService, UserService) {
var modalSetup = false;
$scope.state = {};
$scope.nextStepCounter = -1;
$scope.currentView = 'config';
$scope.show = function() {
if (!$scope.trigger || !$scope.repository) { return; }
$scope.activating = false;
$scope.pullEntity = null;
$scope.publicPull = true;
$scope.showPullRequirements = false;
$scope.currentView = 'config';
$('#setupTriggerModal').modal({});
if (!modalSetup) {
$('#setupTriggerModal').on('hidden.bs.modal', function () {
if (!$scope.trigger || $scope.trigger['is_active']) { return; }
$scope.nextStepCounter = -1;
$scope.$apply(function() {
$scope.cancelSetupTrigger();
});
});
modalSetup = true;
$scope.nextStepCounter = 0;
}
};
@ -5017,27 +5154,20 @@ quayApp.directive('setupTriggerDialog', function () {
};
$scope.hide = function() {
$scope.activating = false;
$('#setupTriggerModal').modal('hide');
};
$scope.setPublicPull = function(value) {
$scope.publicPull = value;
};
$scope.checkAnalyze = function(isValid) {
$scope.currentView = 'analyzing';
$scope.pullInfo = {
'is_public': true
};
if (!isValid) {
$scope.publicPull = true;
$scope.pullEntity = null;
$scope.showPullRequirements = false;
$scope.checkingPullRequirements = false;
$scope.currentView = 'analyzed';
return;
}
$scope.checkingPullRequirements = true;
$scope.showPullRequirements = true;
$scope.pullRequirements = null;
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id
@ -5048,26 +5178,20 @@ quayApp.directive('setupTriggerDialog', function () {
};
ApiService.analyzeBuildTrigger(data, params).then(function(resp) {
$scope.pullRequirements = resp;
if (resp['status'] == 'publicbase') {
$scope.publicPull = true;
$scope.pullEntity = null;
} else if (resp['namespace']) {
$scope.publicPull = false;
$scope.currentView = 'analyzed';
if (resp['status'] == 'analyzed') {
if (resp['robots'] && resp['robots'].length > 0) {
$scope.pullEntity = resp['robots'][0];
} else {
$scope.pullEntity = null;
$scope.pullInfo['pull_entity'] = resp['robots'][0];
} else {
$scope.pullInfo['pull_entity'] = null;
}
$scope.pullInfo['is_public'] = false;
}
$scope.checkingPullRequirements = false;
}, function(resp) {
$scope.pullRequirements = resp;
$scope.checkingPullRequirements = false;
});
$scope.pullInfo['analysis'] = resp;
}, ApiService.errorDisplay('Cannot load Dockerfile information'));
};
$scope.activate = function() {
@ -5084,7 +5208,7 @@ quayApp.directive('setupTriggerDialog', function () {
data['pull_robot'] = $scope.pullEntity['name'];
}
$scope.activating = true;
$scope.currentView = 'activating';
var errorHandler = ApiService.errorDisplay('Cannot activate build trigger', function(resp) {
$scope.hide();
@ -5125,17 +5249,99 @@ quayApp.directive('triggerSetupGithub', function () {
scope: {
'repository': '=repository',
'trigger': '=trigger',
'nextStepCounter': '=nextStepCounter',
'currentStepValid': '=currentStepValid',
'analyze': '&analyze'
},
controller: function($scope, $element, ApiService) {
$scope.analyzeCounter = 0;
$scope.setupReady = false;
$scope.loading = true;
$scope.branchNames = null;
$scope.state = {
'branchFilter': '',
'hasBranchFilter': false,
'isInvalidLocation': true,
'currentLocation': null
};
$scope.isMatchingBranch = function(branchName, filter) {
try {
var patt = new RegExp(filter);
} catch (ex) {
return false;
}
var m = branchName.match(patt);
return m && m[0].length == branchName.length;
}
$scope.stepsCompleted = function() {
$scope.analyze({'isValid': !$scope.state.isInvalidLocation});
};
$scope.loadRepositories = function(callback) {
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id
};
ApiService.listTriggerBuildSources(null, params).then(function(resp) {
$scope.orgs = resp['sources'];
setupTypeahead();
callback();
}, ApiService.errorDisplay('Cannot load repositories'));
};
$scope.loadBranches = function(callback) {
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger['id'],
'field_name': 'branch_name'
};
ApiService.listTriggerFieldValues($scope.trigger['config'], params).then(function(resp) {
$scope.branchNames = resp['values'];
callback();
}, ApiService.errorDisplay('Cannot load branch names'));
};
$scope.loadLocations = function(callback) {
$scope.locations = null;
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id
};
ApiService.listBuildTriggerSubdirs($scope.trigger['config'], params).then(function(resp) {
if (resp['status'] == 'error') {
callback(resp['message'] || 'Could not load Dockerfile locations');
return;
}
$scope.locations = resp['subdir'] || [];
// Select a default location (if any).
if ($scope.locations.length > 0) {
$scope.setLocation($scope.locations[0]);
} else {
$scope.state.currentLocation = null;
$scope.state.isInvalidLocation = resp['subdir'].indexOf('') < 0;
$scope.trigger.$ready = true;
}
callback();
}, ApiService.errorDisplay('Cannot load locations'));
}
$scope.handleLocationInput = function(location) {
$scope.state.isInvalidLocation = $scope.locations.indexOf(location) < 0;
$scope.trigger['config']['subdir'] = location || '';
$scope.isInvalidLocation = $scope.locations.indexOf(location) < 0;
$scope.analyze({'isValid': !$scope.isInvalidLocation});
$scope.trigger.$ready = true;
};
$scope.handleLocationSelected = function(datum) {
@ -5143,10 +5349,10 @@ quayApp.directive('triggerSetupGithub', function () {
};
$scope.setLocation = function(location) {
$scope.currentLocation = location;
$scope.state.currentLocation = location;
$scope.state.isInvalidLocation = false;
$scope.trigger['config']['subdir'] = location || '';
$scope.isInvalidLocation = false;
$scope.analyze({'isValid': true});
$scope.trigger.$ready = true;
};
$scope.selectRepo = function(repo, org) {
@ -5160,10 +5366,7 @@ quayApp.directive('triggerSetupGithub', function () {
};
$scope.selectRepoInternal = function(currentRepo) {
if (!currentRepo) {
$scope.trigger.$ready = false;
return;
}
$scope.trigger.$ready = false;
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
@ -5175,39 +5378,6 @@ quayApp.directive('triggerSetupGithub', function () {
'build_source': repo,
'subdir': ''
};
// Lookup the possible Dockerfile locations.
$scope.locations = null;
if (repo) {
ApiService.listBuildTriggerSubdirs($scope.trigger['config'], params).then(function(resp) {
if (resp['status'] == 'error') {
$scope.locationError = resp['message'] || 'Could not load Dockerfile locations';
$scope.locations = null;
$scope.trigger.$ready = false;
$scope.isInvalidLocation = false;
$scope.analyze({'isValid': false});
return;
}
$scope.locationError = null;
$scope.locations = resp['subdir'] || [];
$scope.trigger.$ready = true;
if ($scope.locations.length > 0) {
$scope.setLocation($scope.locations[0]);
} else {
$scope.currentLocation = null;
$scope.isInvalidLocation = resp['subdir'].indexOf('') < 0;
$scope.analyze({'isValid': !$scope.isInvalidLocation});
}
}, function(resp) {
$scope.locationError = resp['message'] || 'Could not load Dockerfile locations';
$scope.locations = null;
$scope.trigger.$ready = false;
$scope.isInvalidLocation = false;
$scope.analyze({'isValid': false});
});
}
};
var setupTypeahead = function() {
@ -5237,30 +5407,20 @@ quayApp.directive('triggerSetupGithub', function () {
$scope.repoLookahead = repos;
};
var loadSources = function() {
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id
};
ApiService.listTriggerBuildSources(null, params).then(function(resp) {
$scope.orgs = resp['sources'];
setupTypeahead();
$scope.loading = false;
});
};
var check = function() {
if ($scope.repository && $scope.trigger) {
loadSources();
}
};
$scope.$watch('repository', check);
$scope.$watch('trigger', check);
$scope.$watch('currentRepo', function(repo) {
$scope.selectRepoInternal(repo);
if (repo) {
$scope.selectRepoInternal(repo);
}
});
$scope.$watch('state.branchFilter', function(bf) {
if (!$scope.trigger) { return; }
if ($scope.state.hasBranchFilter) {
$scope.trigger['config']['branch_regex'] = bf;
} else {
delete $scope.trigger['config']['branch_regex'];
}
});
}
};

View file

@ -426,7 +426,7 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
});
if ($scope.currentTag) {
var squash = 'curl -f ' + Config.getHost('ACCOUNTNAME:PASSWORDORTOKEN');
var squash = 'curl -L -f ' + Config.getHost('ACCOUNTNAME:PASSWORDORTOKEN');
squash += '/c1/squash/' + namespace + '/' + name + '/' + $scope.currentTag.name;
squash += ' | docker load';

Binary file not shown.

View file

@ -1072,17 +1072,17 @@ class TestBuildTriggerFieldValuesSwo1PublicPublicrepo(ApiTestCase):
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="public/publicrepo",
field_name="test_field")
def test_get_anonymous(self):
self._run_test('GET', 401, None, {})
def test_post_anonymous(self):
self._run_test('POST', 401, None, {})
def test_get_freshuser(self):
self._run_test('GET', 403, 'freshuser', {})
def test_post_freshuser(self):
self._run_test('POST', 403, 'freshuser', {})
def test_get_reader(self):
self._run_test('GET', 403, 'reader', {})
def test_post_reader(self):
self._run_test('POST', 403, 'reader', {})
def test_get_devtable(self):
self._run_test('GET', 403, 'devtable', {})
def test_post_devtable(self):
self._run_test('POST', 403, 'devtable', {})
class TestBuildTriggerFieldValuesSwo1DevtableShared(ApiTestCase):
@ -1091,17 +1091,17 @@ class TestBuildTriggerFieldValuesSwo1DevtableShared(ApiTestCase):
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="devtable/shared",
field_name="test_field")
def test_get_anonymous(self):
self._run_test('GET', 401, None, {})
def test_post_anonymous(self):
self._run_test('POST', 401, None, {})
def test_get_freshuser(self):
self._run_test('GET', 403, 'freshuser', {})
def test_post_freshuser(self):
self._run_test('POST', 403, 'freshuser', {})
def test_get_reader(self):
self._run_test('GET', 403, 'reader', {})
def test_post_reader(self):
self._run_test('POST', 403, 'reader', {})
def test_get_devtable(self):
self._run_test('GET', 404, 'devtable', {'config': {}})
def test_post_devtable(self):
self._run_test('POST', 404, 'devtable', {'config': {}})
class TestBuildTriggerFieldValuesSwo1BuynlargeOrgrepo(ApiTestCase):
@ -1110,17 +1110,17 @@ class TestBuildTriggerFieldValuesSwo1BuynlargeOrgrepo(ApiTestCase):
self._set_url(BuildTriggerFieldValues, trigger_uuid="SWO1", repository="buynlarge/orgrepo",
field_name="test_field")
def test_get_anonymous(self):
self._run_test('GET', 401, None, {})
def test_post_anonymous(self):
self._run_test('POST', 401, None, {})
def test_get_freshuser(self):
self._run_test('GET', 403, 'freshuser', {})
def test_post_freshuser(self):
self._run_test('POST', 403, 'freshuser', {})
def test_get_reader(self):
self._run_test('GET', 403, 'reader', {})
def test_post_reader(self):
self._run_test('POST', 403, 'reader', {})
def test_get_devtable(self):
self._run_test('GET', 404, 'devtable', {'config': {}})
def test_post_devtable(self):
self._run_test('POST', 404, 'devtable', {'config': {}})
class TestBuildTriggerSources831cPublicPublicrepo(ApiTestCase):

View file

@ -2152,7 +2152,7 @@ class TestBuildTriggers(ApiTestCase):
data={'config': trigger_config})
self.assertEquals('error', analyze_json['status'])
self.assertEquals('Repository "localhost:5000/nothere/randomrepo" was not found', analyze_json['message'])
self.assertEquals('Repository "localhost:5000/nothere/randomrepo" referenced by the Dockerfile was not found', analyze_json['message'])
# Analyze the trigger's dockerfile: Sixth, dockerfile with private repo that the user cannot see.
trigger_config = {'dockerfile': 'FROM localhost:5000/randomuser/randomrepo'}
@ -2161,7 +2161,7 @@ class TestBuildTriggers(ApiTestCase):
data={'config': trigger_config})
self.assertEquals('error', analyze_json['status'])
self.assertEquals('Repository "localhost:5000/randomuser/randomrepo" was not found', analyze_json['message'])
self.assertEquals('Repository "localhost:5000/randomuser/randomrepo" referenced by the Dockerfile was not found', analyze_json['message'])
# Analyze the trigger's dockerfile: Seventh, dockerfile with private repo that the user see.
trigger_config = {'dockerfile': 'FROM localhost:5000/devtable/complex'}
@ -2225,13 +2225,13 @@ class TestBuildTriggers(ApiTestCase):
expected_code=400)
# Retrieve values for a field.
result = self.getJsonResponse(BuildTriggerFieldValues,
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
result = self.postJsonResponse(BuildTriggerFieldValues,
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
trigger_uuid=trigger.uuid, field_name="test_field"))
self.assertEquals(result['values'], [1, 2, 3])
self.getResponse(BuildTriggerFieldValues,
self.postResponse(BuildTriggerFieldValues,
params=dict(repository=ADMIN_ACCESS_USER + '/simple',
trigger_uuid=trigger.uuid, field_name="another_field"),
expected_code = 404)

View file

@ -8,15 +8,22 @@ class TestStreamLayerMerger(unittest.TestCase):
def create_layer(self, **kwargs):
output = StringIO()
with tarfile.open(fileobj=output, mode='w:gz') as tar:
for filename in kwargs:
current_filename = filename
current_contents = kwargs[filename]
for current_contents in kwargs:
current_filename = kwargs[current_contents]
if current_contents is None:
if current_contents == '_':
# This is a deleted file.
current_filename = AUFS_WHITEOUT + current_filename
if current_filename.endswith('/'):
current_filename = current_filename[:-1]
parts = current_filename.split('/')
if len(parts) > 1:
current_filename = '/'.join(parts[:-1]) + '/' + AUFS_WHITEOUT + parts[-1]
else:
current_filename = AUFS_WHITEOUT + parts[-1]
current_contents = ''
info = tarfile.TarInfo(name=current_filename)
info.size = len(current_contents)
tar.addfile(info, fileobj=StringIO(current_contents))
@ -40,15 +47,16 @@ class TestStreamLayerMerger(unittest.TestCase):
with tarfile.open(fileobj=StringIO(squashed), mode='r:*') as tar:
try:
member = tar.getmember(filename)
self.fail('Filename %s found' % filename)
except:
pass
except Exception as ex:
return
self.fail('Filename %s found' % filename)
def test_single_layer(self):
tar_layer = self.create_layer(
some_file = 'foo',
another_file = 'bar',
third_file = 'meh')
foo = 'some_file',
bar = 'another_file',
meh = 'third_file')
squashed = self.squash_layers([tar_layer])
@ -58,12 +66,12 @@ class TestStreamLayerMerger(unittest.TestCase):
def test_multiple_layers(self):
second_layer = self.create_layer(
some_file = 'foo',
another_file = 'bar',
third_file = 'meh')
foo = 'some_file',
bar = 'another_file',
meh = 'third_file')
first_layer = self.create_layer(
top_file = 'top')
top = 'top_file')
squashed = self.squash_layers([first_layer, second_layer])
@ -72,14 +80,30 @@ class TestStreamLayerMerger(unittest.TestCase):
self.assertHasFile(squashed, 'third_file', 'meh')
self.assertHasFile(squashed, 'top_file', 'top')
def test_multiple_layers_overwrite(self):
def test_multiple_layers_dot(self):
second_layer = self.create_layer(
some_file = 'foo',
another_file = 'bar',
third_file = 'meh')
foo = './some_file',
bar = 'another_file',
meh = './third_file')
first_layer = self.create_layer(
another_file = 'top')
top = 'top_file')
squashed = self.squash_layers([first_layer, second_layer])
self.assertHasFile(squashed, './some_file', 'foo')
self.assertHasFile(squashed, 'another_file', 'bar')
self.assertHasFile(squashed, './third_file', 'meh')
self.assertHasFile(squashed, 'top_file', 'top')
def test_multiple_layers_overwrite(self):
second_layer = self.create_layer(
foo = 'some_file',
bar = 'another_file',
meh = 'third_file')
first_layer = self.create_layer(
top = 'another_file')
squashed = self.squash_layers([first_layer, second_layer])
@ -87,14 +111,46 @@ class TestStreamLayerMerger(unittest.TestCase):
self.assertHasFile(squashed, 'third_file', 'meh')
self.assertHasFile(squashed, 'another_file', 'top')
def test_deleted_file(self):
def test_multiple_layers_overwrite_base_dot(self):
second_layer = self.create_layer(
some_file = 'foo',
another_file = 'bar',
third_file = 'meh')
foo = 'some_file',
bar = './another_file',
meh = 'third_file')
first_layer = self.create_layer(
another_file = None)
top = 'another_file')
squashed = self.squash_layers([first_layer, second_layer])
self.assertHasFile(squashed, 'some_file', 'foo')
self.assertHasFile(squashed, 'third_file', 'meh')
self.assertHasFile(squashed, 'another_file', 'top')
self.assertDoesNotHaveFile(squashed, './another_file')
def test_multiple_layers_overwrite_top_dot(self):
second_layer = self.create_layer(
foo = 'some_file',
bar = 'another_file',
meh = 'third_file')
first_layer = self.create_layer(
top = './another_file')
squashed = self.squash_layers([first_layer, second_layer])
self.assertHasFile(squashed, 'some_file', 'foo')
self.assertHasFile(squashed, 'third_file', 'meh')
self.assertHasFile(squashed, './another_file', 'top')
self.assertDoesNotHaveFile(squashed, 'another_file')
def test_deleted_file(self):
second_layer = self.create_layer(
foo = 'some_file',
bar = 'another_file',
meh = 'third_file')
first_layer = self.create_layer(
_ = 'another_file')
squashed = self.squash_layers([first_layer, second_layer])
@ -104,15 +160,15 @@ class TestStreamLayerMerger(unittest.TestCase):
def test_deleted_readded_file(self):
third_layer = self.create_layer(
another_file = 'bar')
bar = 'another_file')
second_layer = self.create_layer(
some_file = 'foo',
another_file = None,
third_file = 'meh')
foo = 'some_file',
_ = 'another_file',
meh = 'third_file')
first_layer = self.create_layer(
another_file = 'newagain')
newagain = 'another_file')
squashed = self.squash_layers([first_layer, second_layer, third_layer])
@ -122,22 +178,164 @@ class TestStreamLayerMerger(unittest.TestCase):
def test_deleted_in_lower_layer(self):
third_layer = self.create_layer(
another_file = 'bar')
bar = 'deleted_file')
second_layer = self.create_layer(
some_file = 'foo',
another_file = None,
third_file = 'meh')
foo = 'some_file',
_ = 'deleted_file',
meh = 'third_file')
first_layer = self.create_layer(
top_file = 'top')
top = 'top_file')
squashed = self.squash_layers([first_layer, second_layer, third_layer])
self.assertHasFile(squashed, 'some_file', 'foo')
self.assertHasFile(squashed, 'third_file', 'meh')
self.assertHasFile(squashed, 'top_file', 'top')
self.assertDoesNotHaveFile(squashed, 'another_file')
self.assertDoesNotHaveFile(squashed, 'deleted_file')
def test_deleted_in_lower_layer_with_added_dot(self):
third_layer = self.create_layer(
something = './deleted_file')
second_layer = self.create_layer(
_ = 'deleted_file')
squashed = self.squash_layers([second_layer, third_layer])
self.assertDoesNotHaveFile(squashed, 'deleted_file')
def test_deleted_in_lower_layer_with_deleted_dot(self):
third_layer = self.create_layer(
something = './deleted_file')
second_layer = self.create_layer(
_ = './deleted_file')
squashed = self.squash_layers([second_layer, third_layer])
self.assertDoesNotHaveFile(squashed, 'deleted_file')
def test_directory(self):
second_layer = self.create_layer(
foo = 'foo/some_file',
bar = 'foo/another_file')
first_layer = self.create_layer(
top = 'foo/some_file')
squashed = self.squash_layers([first_layer, second_layer])
self.assertHasFile(squashed, 'foo/some_file', 'top')
self.assertHasFile(squashed, 'foo/another_file', 'bar')
def test_sub_directory(self):
second_layer = self.create_layer(
foo = 'foo/some_file',
bar = 'foo/bar/another_file')
first_layer = self.create_layer(
top = 'foo/some_file')
squashed = self.squash_layers([first_layer, second_layer])
self.assertHasFile(squashed, 'foo/some_file', 'top')
self.assertHasFile(squashed, 'foo/bar/another_file', 'bar')
def test_delete_directory(self):
second_layer = self.create_layer(
foo = 'foo/some_file',
bar = 'foo/another_file')
first_layer = self.create_layer(
_ = 'foo/')
squashed = self.squash_layers([first_layer, second_layer])
self.assertDoesNotHaveFile(squashed, 'foo/some_file')
self.assertDoesNotHaveFile(squashed, 'foo/another_file')
def test_delete_sub_directory(self):
second_layer = self.create_layer(
foo = 'foo/some_file',
bar = 'foo/bar/another_file')
first_layer = self.create_layer(
_ = 'foo/bar/')
squashed = self.squash_layers([first_layer, second_layer])
self.assertDoesNotHaveFile(squashed, 'foo/bar/another_file')
self.assertHasFile(squashed, 'foo/some_file', 'foo')
def test_delete_sub_directory_with_dot(self):
second_layer = self.create_layer(
foo = 'foo/some_file',
bar = 'foo/bar/another_file')
first_layer = self.create_layer(
_ = './foo/bar/')
squashed = self.squash_layers([first_layer, second_layer])
self.assertDoesNotHaveFile(squashed, 'foo/bar/another_file')
self.assertHasFile(squashed, 'foo/some_file', 'foo')
def test_delete_sub_directory_with_subdot(self):
second_layer = self.create_layer(
foo = './foo/some_file',
bar = './foo/bar/another_file')
first_layer = self.create_layer(
_ = 'foo/bar/')
squashed = self.squash_layers([first_layer, second_layer])
self.assertDoesNotHaveFile(squashed, 'foo/bar/another_file')
self.assertDoesNotHaveFile(squashed, './foo/bar/another_file')
self.assertHasFile(squashed, './foo/some_file', 'foo')
def test_delete_directory_recreate(self):
third_layer = self.create_layer(
foo = 'foo/some_file',
bar = 'foo/another_file')
second_layer = self.create_layer(
_ = 'foo/')
first_layer = self.create_layer(
baz = 'foo/some_file')
squashed = self.squash_layers([first_layer, second_layer, third_layer])
self.assertHasFile(squashed, 'foo/some_file', 'baz')
self.assertDoesNotHaveFile(squashed, 'foo/another_file')
def test_delete_directory_prefix(self):
third_layer = self.create_layer(
foo = 'foobar/some_file',
bar = 'foo/another_file')
second_layer = self.create_layer(
_ = 'foo/')
squashed = self.squash_layers([second_layer, third_layer])
self.assertHasFile(squashed, 'foobar/some_file', 'foo')
self.assertDoesNotHaveFile(squashed, 'foo/another_file')
def test_delete_directory_pre_prefix(self):
third_layer = self.create_layer(
foo = 'foobar/baz/some_file',
bar = 'foo/another_file')
second_layer = self.create_layer(
_ = 'foo/')
squashed = self.squash_layers([second_layer, third_layer])
self.assertHasFile(squashed, 'foobar/baz/some_file', 'foo')
self.assertDoesNotHaveFile(squashed, 'foo/another_file')
if __name__ == '__main__':
unittest.main()

View file

@ -1,7 +1,6 @@
from datetime import datetime, timedelta
from config import DefaultConfig
from test.testlogs import TestBuildLogs
class FakeTransaction(object):

View file

@ -4,28 +4,28 @@ AUFS_METADATA = u'.wh..wh.'
AUFS_WHITEOUT = u'.wh.'
AUFS_WHITEOUT_PREFIX_LENGTH = len(AUFS_WHITEOUT)
def is_aufs_metadata(filepath):
""" Returns whether the given filepath references an AUFS metadata file. """
filename = os.path.basename(filepath)
return filename.startswith(AUFS_METADATA) or filepath.startswith(AUFS_METADATA)
def is_aufs_metadata(absolute):
""" Returns whether the given absolute references an AUFS metadata file. """
filename = os.path.basename(absolute)
return filename.startswith(AUFS_METADATA) or absolute.startswith(AUFS_METADATA)
def get_deleted_filename(filepath):
def get_deleted_filename(absolute):
""" Returns the name of the deleted file referenced by the AUFS whiteout file at
the given path or None if the file path does not reference a whiteout file.
"""
filename = os.path.basename(filepath)
filename = os.path.basename(absolute)
if not filename.startswith(AUFS_WHITEOUT):
return None
return filename[AUFS_WHITEOUT_PREFIX_LENGTH:]
def get_deleted_prefix(filepath):
def get_deleted_prefix(absolute):
""" Returns the path prefix of the deleted file referenced by the AUFS whiteout file at
the given path or None if the file path does not reference a whiteout file.
"""
deleted_filename = get_deleted_filename(filepath)
deleted_filename = get_deleted_filename(absolute)
if deleted_filename is None:
return None
dirname = os.path.dirname(filepath)
return os.path.join('/', dirname, deleted_filename)
dirname = os.path.dirname(absolute)
return os.path.join('/', dirname, deleted_filename)[1:]

View file

@ -1,4 +1,4 @@
from util.gzipwrap import GzipWrap
from util.gzipwrap import GzipWrap, GZIP_BUFFER_SIZE
from util.streamlayerformat import StreamLayerMerger
from app import app
@ -75,7 +75,11 @@ def _import_format_generator(namespace, repository, tag, synthetic_image_id,
# If the yielded size is less than the estimated size (which is likely), fill the rest with
# zeros.
if yielded_size < estimated_file_size:
yield '\0' * (estimated_file_size - yielded_size)
to_yield = estimated_file_size - yielded_size
while to_yield > 0:
yielded = min(to_yield, GZIP_BUFFER_SIZE)
yield '\0' * yielded
to_yield -= yielded
# Yield any file padding to 512 bytes that is necessary.
yield _tar_file_padding(estimated_file_size)

View file

@ -1,12 +1,19 @@
from gzip import GzipFile
# 256K buffer to Gzip
GZIP_BUFFER_SIZE = 1024 * 256
class GzipWrap(object):
def __init__(self, input, filename=None, compresslevel=1):
self.input = iter(input)
self.buffer = ''
self.zipper = GzipFile(filename, mode='wb', fileobj=self, compresslevel=compresslevel)
self.is_done = False
def read(self, size=-1):
if self.is_done:
return ''
# If the buffer already has enough bytes, then simply pop them off of
# the beginning and return them.
if len(self.buffer) >= size:
@ -21,7 +28,7 @@ class GzipWrap(object):
input_size = 0
input_buffer = ''
while input_size < 1024 * 256: # 256K buffer to Gzip
while input_size < GZIP_BUFFER_SIZE:
try:
s = self.input.next()
input_buffer += s
@ -34,6 +41,8 @@ class GzipWrap(object):
if is_done:
self.zipper.flush()
self.zipper.close()
self.is_done = True
if len(self.buffer) >= size or is_done:
ret = self.buffer[0:size]

View file

@ -3,7 +3,6 @@ import logging
import multiprocessing
import os
import time
import gipc
import sys
import traceback
@ -31,7 +30,7 @@ class QueueProcess(object):
@staticmethod
def run_process(target, args):
gipc.start_process(target=target, args=args)
Process(target=target, args=args).start()
def run(self):
# Important! gipc is used here because normal multiprocessing does not work
@ -50,9 +49,9 @@ def _run(get_producer, queues, chunk_size, args):
for queue in queues:
try:
queue.put(data, block=True, timeout=10)
queue.put(data, block=True)
except Exception as ex:
# One of the listeners stopped listening.
logger.exception('Exception writing to queue.')
return
if data is None or isinstance(data, Exception):

View file

@ -2,72 +2,29 @@ import marisa_trie
import os
import tarfile
from aufs import is_aufs_metadata, get_deleted_prefix
from util.tarlayerformat import TarLayerFormat
AUFS_METADATA = u'.wh..wh.'
AUFS_WHITEOUT = u'.wh.'
AUFS_WHITEOUT_PREFIX_LENGTH = len(AUFS_WHITEOUT)
class StreamLayerMerger(object):
class StreamLayerMerger(TarLayerFormat):
""" Class which creates a generator of the combined TAR data for a set of Docker layers. """
def __init__(self, layer_iterator):
self.trie = marisa_trie.Trie()
self.layer_iterator = layer_iterator
self.encountered = []
super(StreamLayerMerger, self).__init__(layer_iterator)
def get_generator(self):
for current_layer in self.layer_iterator():
# Read the current layer as TAR. If it is empty, we just continue
# to the next layer.
try:
tar_file = tarfile.open(mode='r|*', fileobj=current_layer)
except tarfile.ReadError as re:
continue
self.path_trie = marisa_trie.Trie()
self.path_encountered = []
# For each of the tar entries, yield them IF and ONLY IF we have not
# encountered the path before.
self.prefix_trie = marisa_trie.Trie()
self.prefix_encountered = []
# 9MB (+ padding below) so that it matches the 10MB expected by Gzip.
chunk_size = 1024 * 1024 * 9
for tar_info in tar_file:
if not self.check_tar_info(tar_info):
continue
# Yield the tar header.
yield tar_info.tobuf()
# Try to extract any file contents for the tar. If found, we yield them as well.
if tar_info.isreg():
file_stream = tar_file.extractfile(tar_info)
if file_stream is not None:
length = 0
while True:
current_block = file_stream.read(chunk_size)
if not len(current_block):
break
yield current_block
length += len(current_block)
file_stream.close()
# Files must be padding to 512 byte multiples.
if length % 512 != 0:
yield '\0' * (512 - (length % 512))
# Close the layer stream now that we're done with it.
tar_file.close()
# Update the trie with the new encountered entries.
self.trie = marisa_trie.Trie(self.encountered)
def after_tar_layer(stream, current_layer):
# Update the tries.
self.path_trie = marisa_trie.Trie(self.path_encountered)
self.prefix_trie = marisa_trie.Trie(self.prefix_encountered)
# Last two records are empty in TAR spec.
yield '\0' * 512
yield '\0' * 512
def check_tar_info(self, tar_info):
absolute = os.path.relpath(tar_info.name.decode('utf-8'), './')
@ -76,16 +33,22 @@ class StreamLayerMerger(object):
return False
# Add any prefix of deleted paths to the prefix list.
deleted_prefix = get_deleted_prefix(absolute)
deleted_prefix = get_deleted_prefix(absolute)
if deleted_prefix is not None:
self.encountered.append(deleted_prefix)
self.prefix_encountered.append(deleted_prefix)
return False
# Check if this file has already been encountered somewhere. If so,
# skip it.
if unicode(absolute) in self.trie:
ubsolute = unicode(absolute)
if ubsolute in self.path_trie:
return False
# Check if this file is under a deleted path.
for prefix in self.prefix_trie.iter_prefixes(ubsolute):
if not os.path.relpath(ubsolute, prefix).startswith('..'):
return False
# Otherwise, add the path to the encountered list and return it.
self.encountered.append(absolute)
self.path_encountered.append(absolute)
return True

52
util/tarfileappender.py Normal file
View file

@ -0,0 +1,52 @@
import tarfile
from cStringIO import StringIO
from util.tarlayerformat import TarLayerFormat
from util.gzipwrap import GzipWrap
class TarfileAppender(TarLayerFormat):
""" Helper class which allows for appending entries to a gzipped-tarfile and doing so
in a streaming manner.
"""
def __init__(self, base_tar_file, entries):
super(TarfileAppender, self).__init__(self._get_tar_iterator)
self.entries = entries
self.base_tar_file = base_tar_file
self.last_info = None
def get_stream(self):
return GzipWrap(self.get_generator())
def after_tar_layer(stream, current_layer):
pass
def check_tar_info(self, tar_info):
self.last_info = tar_info
return True
def _get_tar_iterator(self):
# Yield the contents of the base tar.
yield self.base_tar_file
# Construct an in-memory tar containing the entries to append, and then yield
# its data.
def add_entry(arch, dir_path, contents=None):
info = tarfile.TarInfo(dir_path)
info.uid = self.last_info.uid
info.gid = self.last_info.gid
info.type = tarfile.REGTYPE if contents else tarfile.DIRTYPE
if contents:
info.size = len(contents)
arch.addfile(info, fileobj=StringIO(contents) if contents else None)
append_tarball = StringIO()
with tarfile.open(fileobj=append_tarball, mode='w') as updated_archive:
for entry in self.entries:
add_entry(updated_archive, entry, self.entries[entry])
# To make tarfile happy.
append_tarball.seek(0)
yield append_tarball

70
util/tarlayerformat.py Normal file
View file

@ -0,0 +1,70 @@
import os
import tarfile
class TarLayerFormat(object):
""" Class which creates a generator of the combined TAR data. """
def __init__(self, tar_iterator):
self.tar_iterator = tar_iterator
def get_generator(self):
for current_tar in self.tar_iterator():
# Read the current TAR. If it is empty, we just continue
# to the next one.
try:
tar_file = tarfile.open(mode='r|*', fileobj=current_tar)
except tarfile.ReadError as re:
raise re
continue
# For each of the tar entries, yield them IF and ONLY IF we have not
# encountered the path before.
# 9MB (+ padding below) so that it matches the 10MB expected by Gzip.
chunk_size = 1024 * 1024 * 9
for tar_info in tar_file:
if not self.check_tar_info(tar_info):
continue
# Yield the tar header.
yield tar_info.tobuf()
# Try to extract any file contents for the tar. If found, we yield them as well.
if tar_info.isreg():
file_stream = tar_file.extractfile(tar_info)
if file_stream is not None:
length = 0
while True:
current_block = file_stream.read(chunk_size)
if not len(current_block):
break
yield current_block
length += len(current_block)
file_stream.close()
# Files must be padding to 512 byte multiples.
if length % 512 != 0:
yield '\0' * (512 - (length % 512))
# Close the layer stream now that we're done with it.
tar_file.close()
# Conduct any post-tar work.
self.after_tar_layer(current_tar)
# Last two records are empty in TAR spec.
yield '\0' * 512
yield '\0' * 512
def check_tar_info(self, tar_info):
""" Returns true if the current tar_info should be added to the combined tar. False
otherwise.
"""
raise NotImplementedError()
def after_tar_layer(self, current_tar):
""" Invoked after a TAR layer is added, to do any post-add work. """
raise NotImplementedError()

9
verbs.py Normal file
View file

@ -0,0 +1,9 @@
import logging
import logging.config
from app import app as application
from endpoints.verbs import verbs
application.register_blueprint(verbs, url_prefix='/c1')

17
web.py Normal file
View file

@ -0,0 +1,17 @@
import logging
import logging.config
from app import app as application
from endpoints.api import api_bp
from endpoints.web import web
from endpoints.webhooks import webhooks
from endpoints.realtime import realtime
from endpoints.callbacks import callback
application.register_blueprint(web)
application.register_blueprint(callback, url_prefix='/oauth2')
application.register_blueprint(api_bp, url_prefix='/api')
application.register_blueprint(webhooks, url_prefix='/webhooks')
application.register_blueprint(realtime, url_prefix='/realtime')