Merge branch 'master' into bees

This commit is contained in:
Joseph Schorr 2014-11-17 13:14:27 -05:00
commit ccc16fd6f4
245 changed files with 243608 additions and 1263 deletions

View file

@ -9,3 +9,4 @@ Bobfile
README.md README.md
requirements-nover.txt requirements-nover.txt
run-local.sh run-local.sh
.DS_Store

View file

@ -1,4 +1,4 @@
FROM phusion/baseimage:0.9.13 FROM phusion/baseimage:0.9.15
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root ENV HOME /root
@ -7,13 +7,15 @@ ENV HOME /root
RUN apt-get update # 10SEP2014 RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands # New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev
# Build the python dependencies # Build the python dependencies
ADD requirements.txt requirements.txt ADD requirements.txt requirements.txt
RUN virtualenv --distribute venv RUN virtualenv --distribute venv
RUN venv/bin/pip install -r requirements.txt RUN venv/bin/pip install -r requirements.txt
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev
### End common section ### ### End common section ###
RUN apt-get install -y lxc aufs-tools RUN apt-get install -y lxc aufs-tools
@ -30,6 +32,10 @@ ADD conf/init/preplogsdir.sh /etc/my_init.d/
ADD conf/init/tutumdocker /etc/service/tutumdocker ADD conf/init/tutumdocker /etc/service/tutumdocker
ADD conf/init/dockerfilebuild /etc/service/dockerfilebuild ADD conf/init/dockerfilebuild /etc/service/dockerfilebuild
RUN apt-get remove -y --auto-remove nodejs npm git phantomjs
RUN apt-get autoremove -y
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
VOLUME ["/var/lib/docker", "/var/lib/lxc", "/conf/stack", "/var/log"] VOLUME ["/var/lib/docker", "/var/lib/lxc", "/conf/stack", "/var/log"]
CMD ["/sbin/my_init"] CMD ["/sbin/my_init"]

View file

@ -1,4 +1,4 @@
FROM phusion/baseimage:0.9.13 FROM phusion/baseimage:0.9.15
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root ENV HOME /root
@ -7,26 +7,33 @@ ENV HOME /root
RUN apt-get update # 10SEP2014 RUN apt-get update # 10SEP2014
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands # New ubuntu packages should be added as their own apt-get install lines below the existing install commands
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap2-dev libsasl2-dev libpq-dev RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev
# Build the python dependencies # Build the python dependencies
ADD requirements.txt requirements.txt ADD requirements.txt requirements.txt
RUN virtualenv --distribute venv RUN virtualenv --distribute venv
RUN venv/bin/pip install -r requirements.txt RUN venv/bin/pip install -r requirements.txt
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev
### End common section ###
# Install the binary dependencies # Install the binary dependencies
ADD binary_dependencies binary_dependencies ADD binary_dependencies binary_dependencies
RUN gdebi --n binary_dependencies/*.deb RUN gdebi --n binary_dependencies/*.deb
# Grunt # Install Grunt
RUN ln -s /usr/bin/nodejs /usr/bin/node RUN ln -s /usr/bin/nodejs /usr/bin/node
RUN npm install -g grunt-cli RUN npm install -g grunt-cli
# Install Grunt depenencies
ADD grunt grunt
RUN cd grunt && npm install
# Add all of the files! # Add all of the files!
ADD . . ADD . .
# Run grunt # Run grunt
RUN cd grunt && npm install
RUN cd grunt && grunt RUN cd grunt && grunt
ADD conf/init/svlogd_config /svlogd_config ADD conf/init/svlogd_config /svlogd_config
@ -34,7 +41,9 @@ ADD conf/init/doupdatelimits.sh /etc/my_init.d/
ADD conf/init/preplogsdir.sh /etc/my_init.d/ ADD conf/init/preplogsdir.sh /etc/my_init.d/
ADD conf/init/runmigration.sh /etc/my_init.d/ ADD conf/init/runmigration.sh /etc/my_init.d/
ADD conf/init/gunicorn /etc/service/gunicorn ADD conf/init/gunicorn_web /etc/service/gunicorn_web
ADD conf/init/gunicorn_registry /etc/service/gunicorn_registry
ADD conf/init/gunicorn_verbs /etc/service/gunicorn_verbs
ADD conf/init/nginx /etc/service/nginx ADD conf/init/nginx /etc/service/nginx
ADD conf/init/diffsworker /etc/service/diffsworker ADD conf/init/diffsworker /etc/service/diffsworker
ADD conf/init/notificationworker /etc/service/notificationworker ADD conf/init/notificationworker /etc/service/notificationworker
@ -44,6 +53,9 @@ ADD conf/init/buildlogsarchiver /etc/service/buildlogsarchiver
RUN mkdir static/fonts static/ldn RUN mkdir static/fonts static/ldn
RUN venv/bin/python -m external_libraries RUN venv/bin/python -m external_libraries
RUN apt-get autoremove -y
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
# Run the tests # Run the tests
RUN TEST=true venv/bin/python -m unittest discover RUN TEST=true venv/bin/python -m unittest discover

45
app.py
View file

@ -3,7 +3,7 @@ import os
import json import json
import yaml import yaml
from flask import Flask as BaseFlask, Config as BaseConfig from flask import Flask as BaseFlask, Config as BaseConfig, request, Request
from flask.ext.principal import Principal from flask.ext.principal import Principal
from flask.ext.login import LoginManager from flask.ext.login import LoginManager
from flask.ext.mail import Mail from flask.ext.mail import Mail
@ -18,12 +18,13 @@ from data.users import UserAuthentication
from util.analytics import Analytics from util.analytics import Analytics
from util.exceptionlog import Sentry from util.exceptionlog import Sentry
from util.queuemetrics import QueueMetrics from util.queuemetrics import QueueMetrics
from util.names import urn_generator
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig
from data.billing import Billing from data.billing import Billing
from data.buildlogs import BuildLogs from data.buildlogs import BuildLogs
from data.archivedlogs import LogArchive from data.archivedlogs import LogArchive
from data.queue import WorkQueue from data.queue import WorkQueue
from data.userevent import UserEventsBuilderModule from data.userevent import UserEventsBuilderModule
from datetime import datetime
class Config(BaseConfig): class Config(BaseConfig):
@ -60,6 +61,7 @@ LICENSE_FILENAME = 'conf/stack/license.enc'
app = Flask(__name__) app = Flask(__name__)
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
profile = logging.getLogger('profile')
if 'TEST' in os.environ: if 'TEST' in os.environ:
@ -82,6 +84,37 @@ else:
environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}')) environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}'))
app.config.update(environ_config) app.config.update(environ_config)
app.teardown_request(database.close_db_filter)
class RequestWithId(Request):
request_gen = staticmethod(urn_generator(['request']))
def __init__(self, *args, **kwargs):
super(RequestWithId, self).__init__(*args, **kwargs)
self.request_id = self.request_gen()
@app.before_request
def _request_start():
profile.debug('Starting request: %s', request.path)
@app.after_request
def _request_end(r):
profile.debug('Ending request: %s', request.path)
return r
class InjectingFilter(logging.Filter):
def filter(self, record):
record.msg = '[%s] %s' % (request.request_id, record.msg)
return True
profile.addFilter(InjectingFilter())
app.request_class = RequestWithId
features.import_features(app.config) features.import_features(app.config)
Principal(app, use_sessions=False) Principal(app, use_sessions=False)
@ -99,15 +132,17 @@ queue_metrics = QueueMetrics(app)
authentication = UserAuthentication(app) authentication = UserAuthentication(app)
userevents = UserEventsBuilderModule(app) userevents = UserEventsBuilderModule(app)
github_login = GithubOAuthConfig(app, 'GITHUB_LOGIN_CONFIG')
github_trigger = GithubOAuthConfig(app, 'GITHUB_TRIGGER_CONFIG')
google_login = GoogleOAuthConfig(app, 'GOOGLE_LOGIN_CONFIG')
oauth_apps = [github_login, github_trigger, google_login]
tf = app.config['DB_TRANSACTION_FACTORY'] tf = app.config['DB_TRANSACTION_FACTORY']
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf) image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf, dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
reporter=queue_metrics.report) reporter=queue_metrics.report)
notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf) notification_queue = WorkQueue(app.config['NOTIFICATION_QUEUE_NAME'], tf)
# TODO: Remove this in the prod push following the notifications change.
webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'], tf)
database.configure(app.config) database.configure(app.config)
model.config.app_config = app.config model.config.app_config = app.config
model.config.store = storage model.config.store = storage

View file

@ -1,88 +1,14 @@
import logging import logging
import logging.config import logging.config
import uuid
from peewee import Proxy
from app import app as application from app import app as application
from flask import request, Request
from util.names import urn_generator
from data.database import db as model_db, read_slave
# Turn off debug logging for boto
logging.getLogger('boto').setLevel(logging.CRITICAL)
from endpoints.api import api_bp
from endpoints.index import index
from endpoints.web import web
from endpoints.tags import tags
from endpoints.registry import registry
from endpoints.webhooks import webhooks
from endpoints.realtime import realtime
from endpoints.callbacks import callback
from logentries import LogentriesHandler
logger = logging.getLogger(__name__) # Bind all of the blueprints
import web
import verbs
import registry
werkzeug = logging.getLogger('werkzeug')
werkzeug.setLevel(logging.DEBUG)
profile = logging.getLogger('profile')
profile.setLevel(logging.DEBUG)
logentries_key = application.config.get('LOGENTRIES_KEY', None)
if logentries_key:
logger.debug('Initializing logentries with key: %s' % logentries_key)
werkzeug.addHandler(LogentriesHandler(logentries_key))
profile.addHandler(LogentriesHandler(logentries_key))
application.register_blueprint(web)
application.register_blueprint(callback, url_prefix='/oauth2')
application.register_blueprint(index, url_prefix='/v1')
application.register_blueprint(tags, url_prefix='/v1')
application.register_blueprint(registry, url_prefix='/v1')
application.register_blueprint(api_bp, url_prefix='/api')
application.register_blueprint(webhooks, url_prefix='/webhooks')
application.register_blueprint(realtime, url_prefix='/realtime')
class RequestWithId(Request):
request_gen = staticmethod(urn_generator(['request']))
def __init__(self, *args, **kwargs):
super(RequestWithId, self).__init__(*args, **kwargs)
self.request_id = self.request_gen()
@application.before_request
def _request_start():
profile.debug('Starting request: %s', request.path)
@application.after_request
def _request_end(r):
profile.debug('Ending request: %s', request.path)
return r
class InjectingFilter(logging.Filter):
def filter(self, record):
record.msg = '[%s] %s' % (request.request_id, record.msg)
return True
profile.addFilter(InjectingFilter())
def close_db(exc):
db = model_db
if not db.is_closed():
logger.debug('Disconnecting from database.')
db.close()
if read_slave.obj is not None and not read_slave.is_closed():
logger.debug('Disconnecting from read slave.')
read_slave.close()
application.teardown_request(close_db)
application.request_class = RequestWithId
if __name__ == '__main__': if __name__ == '__main__':
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False) logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)

View file

@ -1,5 +1,5 @@
bind = 'unix:/tmp/gunicorn.sock' bind = 'unix:/tmp/gunicorn_registry.sock'
workers = 16 workers = 8
worker_class = 'gevent' worker_class = 'gevent'
timeout = 2000 timeout = 2000
logconfig = 'conf/logging.conf' logconfig = 'conf/logging.conf'

6
conf/gunicorn_verbs.py Normal file
View file

@ -0,0 +1,6 @@
bind = 'unix:/tmp/gunicorn_verbs.sock'
workers = 4
timeout = 2000
logconfig = 'conf/logging.conf'
pythonpath = '.'
preload_app = True

7
conf/gunicorn_web.py Normal file
View file

@ -0,0 +1,7 @@
bind = 'unix:/tmp/gunicorn_web.sock'
workers = 2
worker_class = 'gevent'
timeout = 30
logconfig = 'conf/logging.conf'
pythonpath = '.'
preload_app = True

View file

@ -14,8 +14,12 @@ gzip_types text/plain text/xml text/css
text/javascript application/x-javascript text/javascript application/x-javascript
application/octet-stream; application/octet-stream;
upstream app_server { upstream web_app_server {
server unix:/tmp/gunicorn.sock fail_timeout=0; server unix:/tmp/gunicorn_web.sock fail_timeout=0;
# For a TCP configuration: }
# server 192.168.0.7:8000 fail_timeout=0; upstream verbs_app_server {
server unix:/tmp/gunicorn_verbs.sock fail_timeout=0;
}
upstream registry_app_server {
server unix:/tmp/gunicorn_registry.sock fail_timeout=0;
} }

View file

@ -1,2 +0,0 @@
#!/bin/sh
exec svlogd /var/log/gunicorn/

View file

@ -1,8 +0,0 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_config.py application:application
echo 'Gunicorn exited'

View file

@ -0,0 +1,2 @@
#!/bin/sh
exec svlogd /var/log/gunicorn_registry/

View file

@ -0,0 +1,8 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
echo 'Gunicorn exited'

View file

@ -0,0 +1,2 @@
#!/bin/sh
exec svlogd /var/log/gunicorn_verbs/

8
conf/init/gunicorn_verbs/run Executable file
View file

@ -0,0 +1,8 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
nice -10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
echo 'Gunicorn exited'

2
conf/init/gunicorn_web/log/run Executable file
View file

@ -0,0 +1,2 @@
#!/bin/sh
exec svlogd /var/log/gunicorn_web/

8
conf/init/gunicorn_web/run Executable file
View file

@ -0,0 +1,8 @@
#! /bin/bash
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_web.py web:application
echo 'Gunicorn exited'

View file

@ -3,3 +3,6 @@ set -e
# Run the database migration # Run the database migration
PYTHONPATH=. venv/bin/alembic upgrade head PYTHONPATH=. venv/bin/alembic upgrade head
# Run the uncompressed size migration
PYTHONPATH=. venv/bin/python -m util.uncompressedsize

View file

@ -1,5 +1,5 @@
[loggers] [loggers]
keys=root, gunicorn.error, gunicorn.access, application.profiler keys=root, gunicorn.error, gunicorn.access, application.profiler, boto, werkzeug
[handlers] [handlers]
keys=console keys=console
@ -17,6 +17,18 @@ qualname=application.profiler
level=DEBUG level=DEBUG
handlers=console handlers=console
[logger_boto]
level=INFO
handlers=console
propagate=0
qualname=boto
[logger_werkzeug]
level=DEBUG
handlers=console
propagate=0
qualname=werkzeug
[logger_gunicorn.error] [logger_gunicorn.error]
level=INFO level=INFO
handlers=console handlers=console

View file

@ -13,10 +13,5 @@ http {
include server-base.conf; include server-base.conf;
listen 80 default; listen 80 default;
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
}
} }
} }

View file

@ -23,10 +23,5 @@ http {
ssl_protocols SSLv3 TLSv1; ssl_protocols SSLv3 TLSv1;
ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP; ssl_ciphers ALL:!ADH:!EXPORT56:RC4+RSA:+HIGH:+MEDIUM:+LOW:+SSLv3:+EXP;
ssl_prefer_server_ciphers on; ssl_prefer_server_ciphers on;
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
}
} }
} }

View file

@ -1,4 +1,3 @@
client_max_body_size 20G;
client_body_temp_path /var/log/nginx/client_body 1 2; client_body_temp_path /var/log/nginx/client_body 1 2;
server_name _; server_name _;
@ -11,17 +10,53 @@ if ($args ~ "_escaped_fragment_") {
rewrite ^ /snapshot$uri; rewrite ^ /snapshot$uri;
} }
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_set_header Transfer-Encoding $http_transfer_encoding;
location / { location / {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_pass http://web_app_server;
proxy_set_header X-Forwarded-Proto $scheme; }
proxy_set_header Host $http_host;
proxy_redirect off; location /realtime {
proxy_pass http://web_app_server;
proxy_buffering off;
proxy_request_buffering off;
}
location /v1/ {
proxy_buffering off; proxy_buffering off;
proxy_request_buffering off; proxy_request_buffering off;
proxy_set_header Transfer-Encoding $http_transfer_encoding;
proxy_pass http://app_server; proxy_pass http://registry_app_server;
proxy_read_timeout 2000;
proxy_temp_path /var/log/nginx/proxy_temp 1 2;
client_max_body_size 20G;
}
location /c1/ {
proxy_buffering off;
proxy_request_buffering off;
proxy_pass http://verbs_app_server;
proxy_read_timeout 2000; proxy_read_timeout 2000;
proxy_temp_path /var/log/nginx/proxy_temp 1 2; proxy_temp_path /var/log/nginx/proxy_temp 1 2;
} }
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
}
location /v1/_ping {
add_header Content-Type text/plain;
add_header X-Docker-Registry-Version 0.6.0;
add_header X-Docker-Registry-Standalone 0;
return 200 'true';
}

View file

@ -15,11 +15,11 @@ def build_requests_session():
# The set of configuration key names that will be accessible in the client. Since these # The set of configuration key names that will be accessible in the client. Since these
# values are set to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list. # values are sent to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list.
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID', CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'MIXPANEL_KEY',
'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY', 'STRIPE_PUBLISHABLE_KEY', 'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN',
'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE', 'AUTHENTICATION_TYPE', 'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT',
'REGISTRY_TITLE', 'REGISTRY_TITLE_SHORT', 'GOOGLE_LOGIN_CLIENT_ID'] 'CONTACT_INFO']
def getFrontendVisibleConfig(config_dict): def getFrontendVisibleConfig(config_dict):
@ -48,6 +48,12 @@ class DefaultConfig(object):
REGISTRY_TITLE = 'Quay.io' REGISTRY_TITLE = 'Quay.io'
REGISTRY_TITLE_SHORT = 'Quay.io' REGISTRY_TITLE_SHORT = 'Quay.io'
CONTACT_INFO = [
'mailto:support@quay.io',
'irc://chat.freenode.net:6665/quayio',
'tel:+1-888-930-3475',
'https://twitter.com/quayio',
]
# Mail config # Mail config
MAIL_SERVER = '' MAIL_SERVER = ''
@ -55,7 +61,7 @@ class DefaultConfig(object):
MAIL_PORT = 587 MAIL_PORT = 587
MAIL_USERNAME = '' MAIL_USERNAME = ''
MAIL_PASSWORD = '' MAIL_PASSWORD = ''
DEFAULT_MAIL_SENDER = '' MAIL_DEFAULT_SENDER = 'support@quay.io'
MAIL_FAIL_SILENTLY = False MAIL_FAIL_SILENTLY = False
TESTING = True TESTING = True
@ -80,11 +86,11 @@ class DefaultConfig(object):
AUTHENTICATION_TYPE = 'Database' AUTHENTICATION_TYPE = 'Database'
# Build logs # Build logs
BUILDLOGS_REDIS = {'host': 'logs.quay.io'} BUILDLOGS_REDIS = {'host': 'localhost'}
BUILDLOGS_OPTIONS = [] BUILDLOGS_OPTIONS = []
# Real-time user events # Real-time user events
USER_EVENTS_REDIS = {'host': 'logs.quay.io'} USER_EVENTS_REDIS = {'host': 'localhost'}
# Stripe config # Stripe config
BILLING_TYPE = 'FakeStripe' BILLING_TYPE = 'FakeStripe'
@ -101,22 +107,11 @@ class DefaultConfig(object):
SENTRY_PUBLIC_DSN = None SENTRY_PUBLIC_DSN = None
# Github Config # Github Config
GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token' GITHUB_LOGIN_CONFIG = None
GITHUB_USER_URL = 'https://api.github.com/user' GITHUB_TRIGGER_CONFIG = None
GITHUB_USER_EMAILS = GITHUB_USER_URL + '/emails'
GITHUB_CLIENT_ID = ''
GITHUB_CLIENT_SECRET = ''
GITHUB_LOGIN_CLIENT_ID = ''
GITHUB_LOGIN_CLIENT_SECRET = ''
# Google Config. # Google Config.
GOOGLE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/token' GOOGLE_LOGIN_CONFIG = None
GOOGLE_USER_URL = 'https://www.googleapis.com/oauth2/v1/userinfo'
GOOGLE_LOGIN_CLIENT_ID = ''
GOOGLE_LOGIN_CLIENT_SECRET = ''
# Requests based HTTP client with a large request pool # Requests based HTTP client with a large request pool
HTTPCLIENT = build_requests_session() HTTPCLIENT = build_requests_session()
@ -132,9 +127,6 @@ class DefaultConfig(object):
DIFFS_QUEUE_NAME = 'imagediff' DIFFS_QUEUE_NAME = 'imagediff'
DOCKERFILE_BUILD_QUEUE_NAME = 'dockerfilebuild' DOCKERFILE_BUILD_QUEUE_NAME = 'dockerfilebuild'
# TODO: Remove this in the prod push following the notifications change.
WEBHOOK_QUEUE_NAME = 'webhook'
# Super user config. Note: This MUST BE an empty list for the default config. # Super user config. Note: This MUST BE an empty list for the default config.
SUPER_USERS = [] SUPER_USERS = []
@ -175,6 +167,9 @@ class DefaultConfig(object):
DISTRIBUTED_STORAGE_PREFERENCE = ['local_us'] DISTRIBUTED_STORAGE_PREFERENCE = ['local_us']
# Health checker.
HEALTH_CHECKER = ('LocalHealthCheck', {})
# Userfiles # Userfiles
USERFILES_LOCATION = 'local_us' USERFILES_LOCATION = 'local_us'
USERFILES_PATH = 'userfiles/' USERFILES_PATH = 'userfiles/'
@ -182,3 +177,6 @@ class DefaultConfig(object):
# Build logs archive # Build logs archive
LOG_ARCHIVE_LOCATION = 'local_us' LOG_ARCHIVE_LOCATION = 'local_us'
LOG_ARCHIVE_PATH = 'logarchive/' LOG_ARCHIVE_PATH = 'logarchive/'
# For enterprise:
MAXIMUM_REPOSITORY_USAGE = 20

View file

@ -7,9 +7,9 @@ from datetime import datetime
from peewee import * from peewee import *
from data.read_slave import ReadSlaveModel from data.read_slave import ReadSlaveModel
from sqlalchemy.engine.url import make_url from sqlalchemy.engine.url import make_url
from urlparse import urlparse
from util.names import urn_generator from util.names import urn_generator
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -35,6 +35,36 @@ class CallableProxy(Proxy):
raise AttributeError('Cannot use uninitialized Proxy.') raise AttributeError('Cannot use uninitialized Proxy.')
return self.obj(*args, **kwargs) return self.obj(*args, **kwargs)
class CloseForLongOperation(object):
""" Helper object which disconnects the database then reconnects after the nested operation
completes.
"""
def __init__(self, config_object):
self.config_object = config_object
def __enter__(self):
close_db_filter(None)
def __exit__(self, type, value, traceback):
# Note: Nothing to do. The next SQL call will reconnect automatically.
pass
class UseThenDisconnect(object):
""" Helper object for conducting work with a database and then tearing it down. """
def __init__(self, config_object):
self.config_object = config_object
def __enter__(self):
configure(self.config_object)
def __exit__(self, type, value, traceback):
close_db_filter(None)
db = Proxy() db = Proxy()
read_slave = Proxy() read_slave = Proxy()
db_random_func = CallableProxy() db_random_func = CallableProxy()
@ -56,6 +86,7 @@ def _db_from_url(url, db_kwargs):
def configure(config_object): def configure(config_object):
logger.debug('Configuring database')
db_kwargs = dict(config_object['DB_CONNECTION_ARGS']) db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
write_db_uri = config_object['DB_URI'] write_db_uri = config_object['DB_URI']
db.initialize(_db_from_url(write_db_uri, db_kwargs)) db.initialize(_db_from_url(write_db_uri, db_kwargs))
@ -80,6 +111,25 @@ def uuid_generator():
return str(uuid.uuid4()) return str(uuid.uuid4())
def close_db_filter(_):
if not db.is_closed():
logger.debug('Disconnecting from database.')
db.close()
if read_slave.obj is not None and not read_slave.is_closed():
logger.debug('Disconnecting from read slave.')
read_slave.close()
class QuayUserField(ForeignKeyField):
def __init__(self, allows_robots=False, *args, **kwargs):
self.allows_robots = allows_robots
if not 'rel_model' in kwargs:
kwargs['rel_model'] = User
super(QuayUserField, self).__init__(*args, **kwargs)
class BaseModel(ReadSlaveModel): class BaseModel(ReadSlaveModel):
class Meta: class Meta:
database = db database = db
@ -99,6 +149,19 @@ class User(BaseModel):
invalid_login_attempts = IntegerField(default=0) invalid_login_attempts = IntegerField(default=0)
last_invalid_login = DateTimeField(default=datetime.utcnow) last_invalid_login = DateTimeField(default=datetime.utcnow)
def delete_instance(self, recursive=False, delete_nullable=False):
# If we are deleting a robot account, only execute the subset of queries necessary.
if self.robot:
# For all the model dependencies, only delete those that allow robots.
for query, fk in self.dependencies(search_nullable=True):
if isinstance(fk, QuayUserField) and fk.allows_robots:
model = fk.model_class
model.delete().where(query).execute()
# Delete the instance itself.
super(User, self).delete_instance(recursive=False, delete_nullable=False)
else:
super(User, self).delete_instance(recursive=recursive, delete_nullable=delete_nullable)
class TeamRole(BaseModel): class TeamRole(BaseModel):
name = CharField(index=True) name = CharField(index=True)
@ -106,7 +169,7 @@ class TeamRole(BaseModel):
class Team(BaseModel): class Team(BaseModel):
name = CharField(index=True) name = CharField(index=True)
organization = ForeignKeyField(User, index=True) organization = QuayUserField(index=True)
role = ForeignKeyField(TeamRole) role = ForeignKeyField(TeamRole)
description = TextField(default='') description = TextField(default='')
@ -120,7 +183,7 @@ class Team(BaseModel):
class TeamMember(BaseModel): class TeamMember(BaseModel):
user = ForeignKeyField(User, index=True) user = QuayUserField(allows_robots=True, index=True)
team = ForeignKeyField(Team, index=True) team = ForeignKeyField(Team, index=True)
class Meta: class Meta:
@ -134,7 +197,7 @@ class TeamMember(BaseModel):
class TeamMemberInvite(BaseModel): class TeamMemberInvite(BaseModel):
# Note: Either user OR email will be filled in, but not both. # Note: Either user OR email will be filled in, but not both.
user = ForeignKeyField(User, index=True, null=True) user = QuayUserField(index=True, null=True)
email = CharField(null=True) email = CharField(null=True)
team = ForeignKeyField(Team, index=True) team = ForeignKeyField(Team, index=True)
inviter = ForeignKeyField(User, related_name='inviter') inviter = ForeignKeyField(User, related_name='inviter')
@ -146,7 +209,7 @@ class LoginService(BaseModel):
class FederatedLogin(BaseModel): class FederatedLogin(BaseModel):
user = ForeignKeyField(User, index=True) user = QuayUserField(allows_robots=True, index=True)
service = ForeignKeyField(LoginService, index=True) service = ForeignKeyField(LoginService, index=True)
service_ident = CharField() service_ident = CharField()
metadata_json = TextField(default='{}') metadata_json = TextField(default='{}')
@ -168,7 +231,7 @@ class Visibility(BaseModel):
class Repository(BaseModel): class Repository(BaseModel):
namespace_user = ForeignKeyField(User) namespace_user = QuayUserField(null=True)
name = CharField() name = CharField()
visibility = ForeignKeyField(Visibility) visibility = ForeignKeyField(Visibility)
description = TextField(null=True) description = TextField(null=True)
@ -182,6 +245,24 @@ class Repository(BaseModel):
(('namespace_user', 'name'), True), (('namespace_user', 'name'), True),
) )
def delete_instance(self, recursive=False, delete_nullable=False):
# Note: peewee generates extra nested deletion statements here that are slow and unnecessary.
# Therefore, we define our own deletion order here and use the dependency system to verify it.
ordered_dependencies = [RepositoryAuthorizedEmail, RepositoryTag, Image, LogEntry,
RepositoryBuild, RepositoryBuildTrigger, RepositoryNotification,
RepositoryPermission, AccessToken]
for query, fk in self.dependencies(search_nullable=True):
model = fk.model_class
if not model in ordered_dependencies:
raise Exception('Missing repository deletion dependency: %s', model)
for model in ordered_dependencies:
model.delete().where(model.repository == self).execute()
# Delete the repository itself.
super(Repository, self).delete_instance(recursive=False, delete_nullable=False)
class Role(BaseModel): class Role(BaseModel):
name = CharField(index=True, unique=True) name = CharField(index=True, unique=True)
@ -189,7 +270,7 @@ class Role(BaseModel):
class RepositoryPermission(BaseModel): class RepositoryPermission(BaseModel):
team = ForeignKeyField(Team, index=True, null=True) team = ForeignKeyField(Team, index=True, null=True)
user = ForeignKeyField(User, index=True, null=True) user = QuayUserField(allows_robots=True, index=True, null=True)
repository = ForeignKeyField(Repository, index=True) repository = ForeignKeyField(Repository, index=True)
role = ForeignKeyField(Role) role = ForeignKeyField(Role)
@ -203,12 +284,12 @@ class RepositoryPermission(BaseModel):
class PermissionPrototype(BaseModel): class PermissionPrototype(BaseModel):
org = ForeignKeyField(User, index=True, related_name='orgpermissionproto') org = QuayUserField(index=True, related_name='orgpermissionproto')
uuid = CharField(default=uuid_generator) uuid = CharField(default=uuid_generator)
activating_user = ForeignKeyField(User, index=True, null=True, activating_user = QuayUserField(allows_robots=True, index=True, null=True,
related_name='userpermissionproto') related_name='userpermissionproto')
delegate_user = ForeignKeyField(User, related_name='receivingpermission', delegate_user = QuayUserField(allows_robots=True,related_name='receivingpermission',
null=True) null=True)
delegate_team = ForeignKeyField(Team, related_name='receivingpermission', delegate_team = ForeignKeyField(Team, related_name='receivingpermission',
null=True) null=True)
role = ForeignKeyField(Role) role = ForeignKeyField(Role)
@ -221,7 +302,6 @@ class PermissionPrototype(BaseModel):
) )
class AccessToken(BaseModel): class AccessToken(BaseModel):
friendly_name = CharField(null=True) friendly_name = CharField(null=True)
code = CharField(default=random_string_generator(length=64), unique=True, code = CharField(default=random_string_generator(length=64), unique=True,
@ -240,16 +320,16 @@ class RepositoryBuildTrigger(BaseModel):
uuid = CharField(default=uuid_generator) uuid = CharField(default=uuid_generator)
service = ForeignKeyField(BuildTriggerService, index=True) service = ForeignKeyField(BuildTriggerService, index=True)
repository = ForeignKeyField(Repository, index=True) repository = ForeignKeyField(Repository, index=True)
connected_user = ForeignKeyField(User) connected_user = QuayUserField()
auth_token = CharField() auth_token = CharField()
config = TextField(default='{}') config = TextField(default='{}')
write_token = ForeignKeyField(AccessToken, null=True) write_token = ForeignKeyField(AccessToken, null=True)
pull_robot = ForeignKeyField(User, null=True, related_name='triggerpullrobot') pull_robot = QuayUserField(allows_robots=True, null=True, related_name='triggerpullrobot')
class EmailConfirmation(BaseModel): class EmailConfirmation(BaseModel):
code = CharField(default=random_string_generator(), unique=True, index=True) code = CharField(default=random_string_generator(), unique=True, index=True)
user = ForeignKeyField(User) user = QuayUserField()
pw_reset = BooleanField(default=False) pw_reset = BooleanField(default=False)
new_email = CharField(null=True) new_email = CharField(null=True)
email_confirm = BooleanField(default=False) email_confirm = BooleanField(default=False)
@ -257,7 +337,7 @@ class EmailConfirmation(BaseModel):
class ImageStorage(BaseModel): class ImageStorage(BaseModel):
uuid = CharField(default=uuid_generator) uuid = CharField(default=uuid_generator, index=True, unique=True)
checksum = CharField(null=True) checksum = CharField(null=True)
created = DateTimeField(null=True) created = DateTimeField(null=True)
comment = TextField(null=True) comment = TextField(null=True)
@ -267,6 +347,23 @@ class ImageStorage(BaseModel):
uploading = BooleanField(default=True, null=True) uploading = BooleanField(default=True, null=True)
class ImageStorageTransformation(BaseModel):
name = CharField(index=True, unique=True)
class DerivedImageStorage(BaseModel):
source = ForeignKeyField(ImageStorage, null=True, related_name='source')
derivative = ForeignKeyField(ImageStorage, related_name='derivative')
transformation = ForeignKeyField(ImageStorageTransformation)
class Meta:
database = db
read_slaves = (read_slave,)
indexes = (
(('source', 'transformation'), True),
)
class ImageStorageLocation(BaseModel): class ImageStorageLocation(BaseModel):
name = CharField(unique=True, index=True) name = CharField(unique=True, index=True)
@ -289,7 +386,7 @@ class Image(BaseModel):
# to be globally unique we can't treat them as such for permissions and # to be globally unique we can't treat them as such for permissions and
# security reasons. So rather than Repository <-> Image being many to many # security reasons. So rather than Repository <-> Image being many to many
# each image now belongs to exactly one repository. # each image now belongs to exactly one repository.
docker_image_id = CharField() docker_image_id = CharField(index=True)
repository = ForeignKeyField(Repository) repository = ForeignKeyField(Repository)
# '/' separated list of ancestory ids, e.g. /1/2/6/7/10/ # '/' separated list of ancestory ids, e.g. /1/2/6/7/10/
@ -339,7 +436,7 @@ class RepositoryBuild(BaseModel):
started = DateTimeField(default=datetime.now) started = DateTimeField(default=datetime.now)
display_name = CharField() display_name = CharField()
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True) trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
pull_robot = ForeignKeyField(User, null=True, related_name='buildpullrobot') pull_robot = QuayUserField(null=True, related_name='buildpullrobot')
logs_archived = BooleanField(default=False) logs_archived = BooleanField(default=False)
@ -358,11 +455,10 @@ class LogEntryKind(BaseModel):
class LogEntry(BaseModel): class LogEntry(BaseModel):
kind = ForeignKeyField(LogEntryKind, index=True) kind = ForeignKeyField(LogEntryKind, index=True)
account = ForeignKeyField(User, index=True, related_name='account') account = QuayUserField(index=True, related_name='account')
performer = ForeignKeyField(User, index=True, null=True, performer = QuayUserField(allows_robots=True, index=True, null=True,
related_name='performer') related_name='performer')
repository = ForeignKeyField(Repository, index=True, null=True) repository = ForeignKeyField(Repository, index=True, null=True)
access_token = ForeignKeyField(AccessToken, null=True)
datetime = DateTimeField(default=datetime.now, index=True) datetime = DateTimeField(default=datetime.now, index=True)
ip = CharField(null=True) ip = CharField(null=True)
metadata_json = TextField(default='{}') metadata_json = TextField(default='{}')
@ -373,7 +469,7 @@ class OAuthApplication(BaseModel):
client_secret = CharField(default=random_string_generator(length=40)) client_secret = CharField(default=random_string_generator(length=40))
redirect_uri = CharField() redirect_uri = CharField()
application_uri = CharField() application_uri = CharField()
organization = ForeignKeyField(User) organization = QuayUserField()
name = CharField() name = CharField()
description = TextField(default='') description = TextField(default='')
@ -390,7 +486,7 @@ class OAuthAuthorizationCode(BaseModel):
class OAuthAccessToken(BaseModel): class OAuthAccessToken(BaseModel):
uuid = CharField(default=uuid_generator, index=True) uuid = CharField(default=uuid_generator, index=True)
application = ForeignKeyField(OAuthApplication) application = ForeignKeyField(OAuthApplication)
authorized_user = ForeignKeyField(User) authorized_user = QuayUserField()
scope = CharField() scope = CharField()
access_token = CharField(index=True) access_token = CharField(index=True)
token_type = CharField(default='Bearer') token_type = CharField(default='Bearer')
@ -406,7 +502,7 @@ class NotificationKind(BaseModel):
class Notification(BaseModel): class Notification(BaseModel):
uuid = CharField(default=uuid_generator, index=True) uuid = CharField(default=uuid_generator, index=True)
kind = ForeignKeyField(NotificationKind, index=True) kind = ForeignKeyField(NotificationKind, index=True)
target = ForeignKeyField(User, index=True) target = QuayUserField(index=True)
metadata_json = TextField(default='{}') metadata_json = TextField(default='{}')
created = DateTimeField(default=datetime.now, index=True) created = DateTimeField(default=datetime.now, index=True)
dismissed = BooleanField(default=False) dismissed = BooleanField(default=False)
@ -451,4 +547,5 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind, OAuthApplication, OAuthAuthorizationCode, OAuthAccessToken, NotificationKind,
Notification, ImageStorageLocation, ImageStoragePlacement, Notification, ImageStorageLocation, ImageStoragePlacement,
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification, ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
RepositoryAuthorizedEmail, TeamMemberInvite] RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
TeamMemberInvite]

View file

@ -1,8 +1,11 @@
from __future__ import with_statement from __future__ import with_statement
import os
from alembic import context from alembic import context
from sqlalchemy import engine_from_config, pool from sqlalchemy import engine_from_config, pool
from logging.config import fileConfig from logging.config import fileConfig
from urllib import unquote from urllib import unquote, quote
from peewee import SqliteDatabase from peewee import SqliteDatabase
from data.database import all_models, db from data.database import all_models, db
@ -12,8 +15,22 @@ from util.morecollections import AttrDict
# this is the Alembic Config object, which provides # this is the Alembic Config object, which provides
# access to the values within the .ini file in use. # access to the values within the .ini file in use.
db_uri = unquote(app.config['DB_URI'])
if 'GENMIGRATE' in os.environ:
docker_host = os.environ.get('DOCKER_HOST')
docker_host_ip = docker_host[len('tcp://'):].split(':')[0]
if os.environ.get('GENMIGRATE') == 'mysql':
db_uri = 'mysql+pymysql://root:password@%s/genschema' % (docker_host_ip)
else:
db_uri = 'postgresql://postgres@%s/genschema' % (docker_host_ip)
if 'DB_URI' in os.environ:
db_uri = os.environ['DB_URI']
app.config['DB_URI'] = db_uri
config = context.config config = context.config
config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI'])) config.set_main_option('sqlalchemy.url', db_uri)
# Interpret the config file for Python logging. # Interpret the config file for Python logging.
# This line sets up loggers basically. # This line sets up loggers basically.
@ -57,7 +74,7 @@ def run_migrations_online():
""" """
if isinstance(db.obj, SqliteDatabase): if isinstance(db.obj, SqliteDatabase) and not 'GENMIGRATE' in os.environ and not 'DB_URI' in os.environ:
print ('Skipping Sqlite migration!') print ('Skipping Sqlite migration!')
return return

82
data/migrations/migration.sh Executable file
View file

@ -0,0 +1,82 @@
set -e
up_mysql() {
# Run a SQL database on port 3306 inside of Docker.
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
# Sleep for 5s to get MySQL get started.
echo 'Sleeping for 10...'
sleep 10
# Add the database to mysql.
docker run --rm --link mysql:mysql mysql sh -c 'echo "create database genschema" | mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -ppassword'
}
down_mysql() {
docker kill mysql
docker rm mysql
}
up_postgres() {
# Run a SQL database on port 5432 inside of Docker.
docker run --name postgres -p 5432:5432 -d postgres
# Sleep for 5s to get SQL get started.
echo 'Sleeping for 5...'
sleep 5
# Add the database to postgres.
docker run --rm --link postgres:postgres postgres sh -c 'echo "create database genschema" | psql -h "$POSTGRES_PORT_5432_TCP_ADDR" -p "$POSTGRES_PORT_5432_TCP_PORT" -U postgres'
}
down_postgres() {
docker kill postgres
docker rm postgres
}
gen_migrate() {
# Generate a SQLite database with the schema as defined by the existing alembic model.
GENMIGRATE=$1 PYTHONPATH=. alembic upgrade head
# Generate the migration to the current model.
GENMIGRATE=$1 PYTHONPATH=. alembic revision --autogenerate -m "$2"
}
test_migrate() {
# Generate a SQLite database with the schema as defined by the existing alembic model.
GENMIGRATE=$1 PYTHONPATH=. alembic upgrade head
# Downgrade to verify it works in both directions.
COUNT=`ls data/migrations/versions/*.py | wc -l | tr -d ' '`
GENMIGRATE=$1 PYTHONPATH=. alembic downgrade "-$COUNT"
}
# Test (and generate, if requested) via MySQL.
echo '> Starting MySQL'
up_mysql
if [ ! -z "$@" ]
then
set +e
echo '> Generating Migration'
gen_migrate "mysql" "$@"
set -e
fi
echo '> Testing Migration (mysql)'
set +e
test_migrate "mysql"
set -e
down_mysql
# Test via Postgres.
echo '> Starting Postgres'
up_postgres
echo '> Testing Migration (postgres)'
set +e
test_migrate "postgres"
set -e
down_postgres

View file

@ -44,11 +44,11 @@ def downgrade(tables):
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False) op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
op.drop_index('logentrykind_name', table_name='logentrykind') op.drop_index('logentrykind_name', table_name='logentrykind')
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False) op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
op.add_column('image', sa.Column('created', mysql.DATETIME(), nullable=True)) op.add_column('image', sa.Column('created', sa.DateTime(), nullable=True))
op.add_column('image', sa.Column('command', mysql.LONGTEXT(), nullable=True)) op.add_column('image', sa.Column('command', sa.Text(), nullable=True))
op.add_column('image', sa.Column('image_size', mysql.BIGINT(display_width=20), nullable=True)) op.add_column('image', sa.Column('image_size', sa.BigInteger(), nullable=True))
op.add_column('image', sa.Column('checksum', mysql.VARCHAR(length=255), nullable=True)) op.add_column('image', sa.Column('checksum', sa.String(length=255), nullable=True))
op.add_column('image', sa.Column('comment', mysql.LONGTEXT(), nullable=True)) op.add_column('image', sa.Column('comment', sa.Text(), nullable=True))
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice') op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False) op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
### end Alembic commands ### ### end Alembic commands ###

View file

@ -0,0 +1,28 @@
"""Add log entry kind for verbs
Revision ID: 204abf14783d
Revises: 2430f55c41d5
Create Date: 2014-10-29 15:38:06.100915
"""
# revision identifiers, used by Alembic.
revision = '204abf14783d'
down_revision = '2430f55c41d5'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.bulk_insert(tables.logentrykind,
[
{'id': 46, 'name':'repo_verb'},
])
def downgrade(tables):
op.execute(
(tables.logentrykind.delete()
.where(tables.logentrykind.c.name == op.inline_literal('repo_verb')))
)

View file

@ -0,0 +1,24 @@
"""Calculate uncompressed sizes for all images
Revision ID: 2430f55c41d5
Revises: 3b4d3a4461dc
Create Date: 2014-10-07 14:50:04.660315
"""
# revision identifiers, used by Alembic.
revision = '2430f55c41d5'
down_revision = '3b4d3a4461dc'
from alembic import op
import sqlalchemy as sa
from util.uncompressedsize import backfill_sizes_from_data
def upgrade(tables):
# Note: Doing non-alembic operations inside alembic can cause a deadlock. This call has been
# moved to runmigration.sh.
pass
def downgrade(tables):
pass

View file

@ -0,0 +1,26 @@
"""Add an index to the docker_image_id field
Revision ID: 313d297811c4
Revises: 204abf14783d
Create Date: 2014-11-13 12:40:57.414787
"""
# revision identifiers, used by Alembic.
revision = '313d297811c4'
down_revision = '204abf14783d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_index('image_docker_image_id', 'image', ['docker_image_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_index('image_docker_image_id', table_name='image')
### end Alembic commands ###

View file

@ -0,0 +1,49 @@
"""Add support for squashed images
Revision ID: 3b4d3a4461dc
Revises: b1d41e2071b
Create Date: 2014-10-07 14:49:13.105746
"""
# revision identifiers, used by Alembic.
revision = '3b4d3a4461dc'
down_revision = 'b1d41e2071b'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.create_table('imagestoragetransformation',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragetransformation'))
)
op.create_index('imagestoragetransformation_name', 'imagestoragetransformation', ['name'], unique=True)
op.bulk_insert(tables.imagestoragetransformation,
[
{'id':1, 'name':'squash'},
])
op.create_table('derivedimagestorage',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('source_id', sa.Integer(), nullable=True),
sa.Column('derivative_id', sa.Integer(), nullable=False),
sa.Column('transformation_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['derivative_id'], ['imagestorage.id'], name=op.f('fk_derivedimagestorage_derivative_id_imagestorage')),
sa.ForeignKeyConstraint(['source_id'], ['imagestorage.id'], name=op.f('fk_derivedimagestorage_source_id_imagestorage')),
sa.ForeignKeyConstraint(['transformation_id'], ['imagestoragetransformation.id'], name=op.f('fk_dis_transformation_id_ist')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_derivedimagestorage'))
)
op.create_index('derivedimagestorage_derivative_id', 'derivedimagestorage', ['derivative_id'], unique=False)
op.create_index('derivedimagestorage_source_id', 'derivedimagestorage', ['source_id'], unique=False)
op.create_index('derivedimagestorage_source_id_transformation_id', 'derivedimagestorage', ['source_id', 'transformation_id'], unique=True)
op.create_index('derivedimagestorage_transformation_id', 'derivedimagestorage', ['transformation_id'], unique=False)
### end Alembic commands ###
def downgrade(tables):
### commands auto generated by Alembic - please adjust! ###
op.drop_table('derivedimagestorage')
op.drop_table('imagestoragetransformation')
### end Alembic commands ###

View file

@ -16,8 +16,8 @@ import sqlalchemy as sa
def upgrade(tables): def upgrade(tables):
conn = op.get_bind() conn = op.get_bind()
conn.execute('update repository set namespace_user_id = (select id from user where user.username = repository.namespace) where namespace_user_id is NULL') user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
conn.execute('update repository set namespace_user_id = (select id from {0} where {0}.username = repository.namespace) where namespace_user_id is NULL'.format(user_table_name_escaped))
op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True) op.create_index('repository_namespace_user_id_name', 'repository', ['namespace_user_id', 'name'], unique=True)

View file

@ -12,7 +12,6 @@ down_revision = '82297d834ad'
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(tables): def upgrade(tables):
op.bulk_insert(tables.logentrykind, op.bulk_insert(tables.logentrykind,

View file

@ -17,7 +17,7 @@ from sqlalchemy.dialects import mysql
def upgrade(tables): def upgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0")) op.add_column('user', sa.Column('invalid_login_attempts', sa.Integer(), nullable=False, server_default="0"))
op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False, server_default=sa.func.now())) op.add_column('user', sa.Column('last_invalid_login', sa.DateTime(), nullable=False))
### end Alembic commands ### ### end Alembic commands ###

View file

@ -378,7 +378,7 @@ def upgrade(tables):
) )
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False) op.create_index('image_ancestors', 'image', ['ancestors'], unique=False)
op.create_index('image_repository_id', 'image', ['repository_id'], unique=False) op.create_index('image_repository_id', 'image', ['repository_id'], unique=False)
op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=False) op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=True)
op.create_index('image_storage_id', 'image', ['storage_id'], unique=False) op.create_index('image_storage_id', 'image', ['storage_id'], unique=False)
op.create_table('permissionprototype', op.create_table('permissionprototype',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),

View file

@ -22,7 +22,8 @@ def upgrade(tables):
def downgrade(tables): def downgrade(tables):
conn = op.get_bind() conn = op.get_bind()
conn.execute('update repository set namespace = (select username from user where user.id = repository.namespace_user_id) where namespace is NULL') user_table_name_escaped = conn.dialect.identifier_preparer.format_table(tables['user'])
conn.execute('update repository set namespace = (select username from {0} where {0}.id = repository.namespace_user_id) where namespace is NULL'.format(user_table_name_escaped))
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True) op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255)) op.alter_column('repository', 'namespace', nullable=False, existing_type=sa.String(length=255))

View file

@ -0,0 +1,22 @@
"""Add an index to the uuid in the image storage table.
Revision ID: b1d41e2071b
Revises: 9a1087b007d
Create Date: 2014-10-06 18:42:10.021235
"""
# revision identifiers, used by Alembic.
revision = 'b1d41e2071b'
down_revision = '9a1087b007d'
from alembic import op
import sqlalchemy as sa
def upgrade(tables):
op.create_index('imagestorage_uuid', 'imagestorage', ['uuid'], unique=True)
def downgrade(tables):
op.drop_index('imagestorage_uuid', table_name='imagestorage')

View file

@ -23,13 +23,11 @@ def upgrade(tables):
def downgrade(tables): def downgrade(tables):
### commands auto generated by Alembic - please adjust! ### ### commands auto generated by Alembic - please adjust! ###
op.create_table('webhook', op.create_table('webhook',
sa.Column('id', mysql.INTEGER(display_width=11), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('public_id', mysql.VARCHAR(length=255), nullable=False), sa.Column('public_id', sa.String(length=255), nullable=False),
sa.Column('repository_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False), sa.Column('repository_id', sa.Integer(), nullable=False),
sa.Column('parameters', mysql.LONGTEXT(), nullable=False), sa.Column('parameters', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['repository_id'], [u'repository.id'], name=u'fk_webhook_repository_repository_id'), sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
sa.PrimaryKeyConstraint('id'), sa.PrimaryKeyConstraint('id')
mysql_default_charset=u'latin1',
mysql_engine=u'InnoDB'
) )
### end Alembic commands ### ### end Alembic commands ###

View file

@ -3,7 +3,7 @@ import logging
import dateutil.parser import dateutil.parser
import json import json
from datetime import datetime, timedelta from datetime import datetime, timedelta, date
from data.database import (User, Repository, Image, AccessToken, Role, RepositoryPermission, from data.database import (User, Repository, Image, AccessToken, Role, RepositoryPermission,
Visibility, RepositoryTag, EmailConfirmation, FederatedLogin, Visibility, RepositoryTag, EmailConfirmation, FederatedLogin,
@ -13,7 +13,8 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
Notification, ImageStorageLocation, ImageStoragePlacement, Notification, ImageStorageLocation, ImageStoragePlacement,
ExternalNotificationEvent, ExternalNotificationMethod, ExternalNotificationEvent, ExternalNotificationMethod,
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite, RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
random_string_generator, db, BUILD_PHASE) DerivedImageStorage, ImageStorageTransformation, random_string_generator,
db, BUILD_PHASE, QuayUserField)
from peewee import JOIN_LEFT_OUTER, fn from peewee import JOIN_LEFT_OUTER, fn
from util.validation import (validate_username, validate_email, validate_password, from util.validation import (validate_username, validate_email, validate_password,
INVALID_PASSWORD_MESSAGE) INVALID_PASSWORD_MESSAGE)
@ -287,6 +288,7 @@ def delete_robot(robot_username):
try: try:
robot = User.get(username=robot_username, robot=True) robot = User.get(username=robot_username, robot=True)
robot.delete_instance(recursive=True, delete_nullable=True) robot.delete_instance(recursive=True, delete_nullable=True)
except User.DoesNotExist: except User.DoesNotExist:
raise InvalidRobotException('Could not find robot with username: %s' % raise InvalidRobotException('Could not find robot with username: %s' %
robot_username) robot_username)
@ -578,6 +580,13 @@ def get_user(username):
return None return None
def get_namespace_user(username):
try:
return User.get(User.username == username)
except User.DoesNotExist:
return None
def get_user_or_org(username): def get_user_or_org(username):
try: try:
return User.get(User.username == username, User.robot == False) return User.get(User.username == username, User.robot == False)
@ -617,7 +626,7 @@ def get_matching_users(username_prefix, robot_namespace=None,
query = (User query = (User
.select(User.username, User.robot) .select(User.username, User.robot)
.group_by(User.username) .group_by(User.username, User.robot)
.where(direct_user_query)) .where(direct_user_query))
if organization: if organization:
@ -814,8 +823,10 @@ def _filter_to_repos_for_user(query, username=None, namespace=None,
if namespace: if namespace:
where_clause = where_clause & (Namespace.username == namespace) where_clause = where_clause & (Namespace.username == namespace)
# TODO(jschorr, jake): Figure out why the old join on Visibility was so darn slow and
# remove this hack.
if include_public: if include_public:
new_clause = (Visibility.name == 'public') new_clause = (Repository.visibility == _get_public_repo_visibility())
if where_clause: if where_clause:
where_clause = where_clause | new_clause where_clause = where_clause | new_clause
else: else:
@ -824,6 +835,16 @@ def _filter_to_repos_for_user(query, username=None, namespace=None,
return query.where(where_clause) return query.where(where_clause)
_public_repo_visibility_cache = None
def _get_public_repo_visibility():
global _public_repo_visibility_cache
if not _public_repo_visibility_cache:
_public_repo_visibility_cache = Visibility.get(name='public')
return _public_repo_visibility_cache
def get_matching_repositories(repo_term, username=None): def get_matching_repositories(repo_term, username=None):
namespace_term = repo_term namespace_term = repo_term
name_term = repo_term name_term = repo_term
@ -1026,16 +1047,26 @@ def get_repository(namespace_name, repository_name):
return None return None
def get_repo_image(namespace_name, repository_name, image_id): def get_repo_image(namespace_name, repository_name, docker_image_id):
def limit_to_image_id(query): def limit_to_image_id(query):
return query.where(Image.docker_image_id == image_id) return query.where(Image.docker_image_id == docker_image_id).limit(1)
query = _get_repository_images(namespace_name, repository_name, limit_to_image_id)
try:
return query.get()
except Image.DoesNotExist:
return None
def get_repo_image_extended(namespace_name, repository_name, docker_image_id):
def limit_to_image_id(query):
return query.where(Image.docker_image_id == docker_image_id).limit(1)
images = _get_repository_images_base(namespace_name, repository_name, limit_to_image_id) images = _get_repository_images_base(namespace_name, repository_name, limit_to_image_id)
if not images: if not images:
return None return None
else:
return images[0]
return images[0]
def repository_is_public(namespace_name, repository_name): def repository_is_public(namespace_name, repository_name):
try: try:
@ -1128,53 +1159,48 @@ def __translate_ancestry(old_ancestry, translations, repository, username, prefe
if old_ancestry == '/': if old_ancestry == '/':
return '/' return '/'
def translate_id(old_id): def translate_id(old_id, docker_image_id):
logger.debug('Translating id: %s', old_id) logger.debug('Translating id: %s', old_id)
if old_id not in translations: if old_id not in translations:
# Figure out which docker_image_id the old id refers to, then find a image_in_repo = find_create_or_link_image(docker_image_id, repository, username,
# a local one
old = Image.select(Image.docker_image_id).where(Image.id == old_id).get()
image_in_repo = find_create_or_link_image(old.docker_image_id, repository, username,
translations, preferred_location) translations, preferred_location)
translations[old_id] = image_in_repo.id translations[old_id] = image_in_repo.id
return translations[old_id] return translations[old_id]
# Select all the ancestor Docker IDs in a single query.
old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]] old_ids = [int(id_str) for id_str in old_ancestry.split('/')[1:-1]]
new_ids = [str(translate_id(old_id)) for old_id in old_ids] query = Image.select(Image.id, Image.docker_image_id).where(Image.id << old_ids)
old_images = {i.id: i.docker_image_id for i in query}
# Translate the old images into new ones.
new_ids = [str(translate_id(old_id, old_images[old_id])) for old_id in old_ids]
return '/%s/' % '/'.join(new_ids) return '/%s/' % '/'.join(new_ids)
def find_create_or_link_image(docker_image_id, repository, username, translations, def _create_storage(location_name):
preferred_location): storage = ImageStorage.create()
location = ImageStorageLocation.get(name=location_name)
ImageStoragePlacement.create(location=location, storage=storage)
storage.locations = {location_name}
return storage
def _find_or_link_image(existing_image, repository, username, translations, preferred_location):
# TODO(jake): This call is currently recursively done under a single transaction. Can we make
# it instead be done under a set of transactions?
with config.app_config['DB_TRANSACTION_FACTORY'](db): with config.app_config['DB_TRANSACTION_FACTORY'](db):
# Check for an existing image, under the transaction, to make sure it doesn't already exist.
repo_image = get_repo_image(repository.namespace_user.username, repository.name, repo_image = get_repo_image(repository.namespace_user.username, repository.name,
docker_image_id) existing_image.docker_image_id)
if repo_image: if repo_image:
return repo_image return repo_image
query = (Image # Make sure the existing base image still exists.
.select(Image, ImageStorage)
.distinct()
.join(ImageStorage)
.switch(Image)
.join(Repository)
.join(Visibility)
.switch(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER)
.switch(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(ImageStorage.uploading == False))
query = (_filter_to_repos_for_user(query, username)
.where(Image.docker_image_id == docker_image_id))
new_image_ancestry = '/'
origin_image_id = None
try: try:
to_copy = query.get() to_copy = Image.select().join(ImageStorage).where(Image.id == existing_image.id).get()
msg = 'Linking image to existing storage with docker id: %s and uuid: %s' msg = 'Linking image to existing storage with docker id: %s and uuid: %s'
logger.debug(msg, docker_image_id, to_copy.storage.uuid) logger.debug(msg, existing_image.docker_image_id, to_copy.storage.uuid)
new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repository, new_image_ancestry = __translate_ancestry(to_copy.ancestors, translations, repository,
username, preferred_location) username, preferred_location)
@ -1182,28 +1208,105 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
storage = to_copy.storage storage = to_copy.storage
storage.locations = {placement.location.name storage.locations = {placement.location.name
for placement in storage.imagestorageplacement_set} for placement in storage.imagestorageplacement_set}
origin_image_id = to_copy.id
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
repository=repository, storage=storage,
ancestors=new_image_ancestry)
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
translations[existing_image.id] = new_image.id
return new_image
except Image.DoesNotExist: except Image.DoesNotExist:
logger.debug('Creating new storage for docker id: %s', docker_image_id) return None
storage = ImageStorage.create()
location = ImageStorageLocation.get(name=preferred_location)
ImageStoragePlacement.create(location=location, storage=storage)
storage.locations = {preferred_location}
logger.debug('Storage locations: %s', storage.locations)
new_image = Image.create(docker_image_id=docker_image_id,
repository=repository, storage=storage,
ancestors=new_image_ancestry)
logger.debug('new_image storage locations: %s', new_image.storage.locations)
if origin_image_id: def find_create_or_link_image(docker_image_id, repository, username, translations,
logger.debug('Storing translation %s -> %s', origin_image_id, new_image.id) preferred_location):
translations[origin_image_id] = new_image.id
return new_image # First check for the image existing in the repository. If found, we simply return it.
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
docker_image_id)
if repo_image:
return repo_image
# We next check to see if there is an existing storage the new image can link to.
existing_image_query = (Image
.select(Image, ImageStorage)
.distinct()
.join(ImageStorage)
.switch(Image)
.join(Repository)
.join(RepositoryPermission, JOIN_LEFT_OUTER)
.switch(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(ImageStorage.uploading == False))
existing_image_query = (_filter_to_repos_for_user(existing_image_query, username)
.where(Image.docker_image_id == docker_image_id))
# If there is an existing image, we try to translate its ancestry and copy its storage.
new_image = None
try:
logger.debug('Looking up existing image for ID: %s', docker_image_id)
existing_image = existing_image_query.get()
logger.debug('Existing image %s found for ID: %s', existing_image.id, docker_image_id)
new_image = _find_or_link_image(existing_image, repository, username, translations,
preferred_location)
if new_image:
return new_image
except Image.DoesNotExist:
logger.debug('No existing image found for ID: %s', docker_image_id)
pass
# Otherwise, create a new storage directly.
with config.app_config['DB_TRANSACTION_FACTORY'](db):
# Final check for an existing image, under the transaction.
repo_image = get_repo_image(repository.namespace_user.username, repository.name,
docker_image_id)
if repo_image:
return repo_image
logger.debug('Creating new storage for docker id: %s', docker_image_id)
storage = _create_storage(preferred_location)
return Image.create(docker_image_id=docker_image_id,
repository=repository, storage=storage,
ancestors='/')
def find_or_create_derived_storage(source, transformation_name, preferred_location):
try:
found = (ImageStorage
.select(ImageStorage, DerivedImageStorage)
.join(DerivedImageStorage, on=(ImageStorage.id == DerivedImageStorage.derivative))
.join(ImageStorageTransformation)
.where(DerivedImageStorage.source == source,
ImageStorageTransformation.name == transformation_name)
.get())
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
return found
except ImageStorage.DoesNotExist:
logger.debug('Creating storage dervied from source: %s', source.uuid)
trans = ImageStorageTransformation.get(name=transformation_name)
new_storage = _create_storage(preferred_location)
DerivedImageStorage.create(source=source, derivative=new_storage, transformation=trans)
return new_storage
def delete_derived_storage_by_uuid(storage_uuid):
try:
image_storage = get_storage_by_uuid(storage_uuid)
except InvalidImageException:
return
try:
DerivedImageStorage.get(derivative=image_storage)
except DerivedImageStorage.DoesNotExist:
return
image_storage.delete_instance(recursive=True)
def get_storage_by_uuid(storage_uuid): def get_storage_by_uuid(storage_uuid):
@ -1264,7 +1367,15 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
# We cleanup any old checksum in case it's a retry after a fail # We cleanup any old checksum in case it's a retry after a fail
fetched.storage.checksum = None fetched.storage.checksum = None
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None) fetched.storage.created = datetime.now()
if created_date_str is not None:
try:
fetched.storage.created = dateutil.parser.parse(created_date_str).replace(tzinfo=None)
except:
# parse raises different exceptions, so we cannot use a specific kind of handler here.
pass
fetched.storage.comment = comment fetched.storage.comment = comment
fetched.storage.command = command fetched.storage.command = command
@ -1275,6 +1386,15 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
fetched.storage.save() fetched.storage.save()
return fetched return fetched
def _get_repository_images(namespace_name, repository_name, query_modifier):
query = (Image
.select()
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name))
query = query_modifier(query)
return query
def _get_repository_images_base(namespace_name, repository_name, query_modifier): def _get_repository_images_base(namespace_name, repository_name, query_modifier):
query = (ImageStoragePlacement query = (ImageStoragePlacement
@ -1311,6 +1431,20 @@ def _get_repository_images_base(namespace_name, repository_name, query_modifier)
return images.values() return images.values()
def lookup_repository_images(namespace_name, repository_name, docker_image_ids):
return (Image
.select()
.join(Repository)
.join(Namespace, on=(Repository.namespace_user == Namespace.id))
.where(Repository.name == repository_name, Namespace.username == namespace_name,
Image.docker_image_id << docker_image_ids))
def get_matching_repository_images(namespace_name, repository_name, docker_image_ids):
def modify_query(q):
return q.where(Image.docker_image_id << docker_image_ids)
return _get_repository_images_base(namespace_name, repository_name, modify_query)
def get_repository_images(namespace_name, repository_name): def get_repository_images(namespace_name, repository_name):
return _get_repository_images_base(namespace_name, repository_name, lambda q: q) return _get_repository_images_base(namespace_name, repository_name, lambda q: q)
@ -1326,7 +1460,12 @@ def list_repository_tags(namespace_name, repository_name):
def garbage_collect_repository(namespace_name, repository_name): def garbage_collect_repository(namespace_name, repository_name):
storage_id_whitelist = {}
with config.app_config['DB_TRANSACTION_FACTORY'](db): with config.app_config['DB_TRANSACTION_FACTORY'](db):
# TODO (jake): We could probably select this and all the images in a single query using
# a different kind of join.
# Get a list of all images used by tags in the repository # Get a list of all images used by tags in the repository
tag_query = (RepositoryTag tag_query = (RepositoryTag
.select(RepositoryTag, Image, ImageStorage) .select(RepositoryTag, Image, ImageStorage)
@ -1345,42 +1484,110 @@ def garbage_collect_repository(namespace_name, repository_name):
referenced_anscestors = referenced_anscestors.union(set(ancestor_list)) referenced_anscestors = referenced_anscestors.union(set(ancestor_list))
referenced_anscestors.add(tag.image.id) referenced_anscestors.add(tag.image.id)
all_repo_images = get_repository_images(namespace_name, repository_name) all_repo_images = _get_repository_images(namespace_name, repository_name, lambda q: q)
all_images = {int(img.id): img for img in all_repo_images} all_images = {int(img.id): img for img in all_repo_images}
to_remove = set(all_images.keys()).difference(referenced_anscestors) to_remove = set(all_images.keys()).difference(referenced_anscestors)
logger.info('Cleaning up unreferenced images: %s', to_remove) if len(to_remove) > 0:
logger.info('Cleaning up unreferenced images: %s', to_remove)
storage_id_whitelist = {all_images[to_remove_id].storage.id for to_remove_id in to_remove}
Image.delete().where(Image.id << list(to_remove)).execute()
uuids_to_check_for_gc = set() if len(to_remove) > 0:
for image_id_to_remove in to_remove: logger.info('Garbage collecting storage for images: %s', to_remove)
image_to_remove = all_images[image_id_to_remove] garbage_collect_storage(storage_id_whitelist)
logger.debug('Adding image storage to the gc list: %s', return len(to_remove)
image_to_remove.storage.uuid)
uuids_to_check_for_gc.add(image_to_remove.storage.uuid)
image_to_remove.delete_instance()
if uuids_to_check_for_gc: def garbage_collect_storage(storage_id_whitelist):
storage_to_remove = (ImageStorage if len(storage_id_whitelist) == 0:
.select() return
.join(Image, JOIN_LEFT_OUTER)
.group_by(ImageStorage)
.where(ImageStorage.uuid << list(uuids_to_check_for_gc))
.having(fn.Count(Image.id) == 0))
for storage in storage_to_remove: def placements_query_to_paths_set(placements_query):
logger.debug('Garbage collecting image storage: %s', storage.uuid) return {(placement.location.name, config.store.image_path(placement.storage.uuid))
for placement in placements_query}
image_path = config.store.image_path(storage.uuid) def orphaned_storage_query(select_base_query, candidates, group_by):
for placement in storage.imagestorageplacement_set: return (select_base_query
location_name = placement.location.name .switch(ImageStorage)
placement.delete_instance() .join(Image, JOIN_LEFT_OUTER)
config.store.remove({location_name}, image_path) .switch(ImageStorage)
.join(DerivedImageStorage, JOIN_LEFT_OUTER,
on=(ImageStorage.id == DerivedImageStorage.derivative))
.where(ImageStorage.id << list(candidates))
.group_by(*group_by)
.having((fn.Count(Image.id) == 0) & (fn.Count(DerivedImageStorage.id) == 0)))
storage.delete_instance() # Note: We remove the derived image storage in its own transaction as a way to reduce the
# time that the transaction holds on the database indicies. This could result in a derived
# image storage being deleted for an image storage which is later reused during this time,
# but since these are caches anyway, it isn't terrible and worth the tradeoff (for now).
logger.debug('Garbage collecting derived storage from candidates: %s', storage_id_whitelist)
with config.app_config['DB_TRANSACTION_FACTORY'](db):
# Find out which derived storages will be removed, and add them to the whitelist
orphaned_from_candidates = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
storage_id_whitelist,
(ImageStorage.id,)))
return len(to_remove) if len(orphaned_from_candidates) > 0:
derived_to_remove = (ImageStorage
.select(ImageStorage.id)
.join(DerivedImageStorage,
on=(ImageStorage.id == DerivedImageStorage.derivative))
.where(DerivedImageStorage.source << orphaned_from_candidates))
storage_id_whitelist.update({derived.id for derived in derived_to_remove})
# Remove the dervived image storages with sources of orphaned storages
(DerivedImageStorage
.delete()
.where(DerivedImageStorage.source << orphaned_from_candidates)
.execute())
# Note: Both of these deletes must occur in the same transaction (unfortunately) because a
# storage without any placement is invalid, and a placement cannot exist without a storage.
# TODO(jake): We might want to allow for null storages on placements, which would allow us to
# delete the storages, then delete the placements in a non-transaction.
logger.debug('Garbage collecting storages from candidates: %s', storage_id_whitelist)
with config.app_config['DB_TRANSACTION_FACTORY'](db):
# Track all of the data that should be removed from blob storage
placements_to_remove = orphaned_storage_query(ImageStoragePlacement
.select(ImageStoragePlacement,
ImageStorage,
ImageStorageLocation)
.join(ImageStorageLocation)
.switch(ImageStoragePlacement)
.join(ImageStorage),
storage_id_whitelist,
(ImageStorage, ImageStoragePlacement,
ImageStorageLocation))
paths_to_remove = placements_query_to_paths_set(placements_to_remove.clone())
# Remove the placements for orphaned storages
placements_subquery = list(placements_to_remove.clone().select(ImageStoragePlacement.id))
if len(placements_subquery) > 0:
(ImageStoragePlacement
.delete()
.where(ImageStoragePlacement.id << list(placements_subquery))
.execute())
# Remove the all orphaned storages
orphaned_storages = list(orphaned_storage_query(ImageStorage.select(ImageStorage.id),
storage_id_whitelist,
(ImageStorage.id,)))
if len(orphaned_storages) > 0:
(ImageStorage
.delete()
.where(ImageStorage.id << orphaned_storages)
.execute())
# We are going to make the conscious decision to not delete image storage blobs inside
# transactions.
# This may end up producing garbage in s3, trading off for higher availability in the database.
for location_name, image_path in paths_to_remove:
logger.debug('Removing %s from %s', image_path, location_name)
config.store.remove({location_name}, image_path)
def get_tag_image(namespace_name, repository_name, tag_name): def get_tag_image(namespace_name, repository_name, tag_name):
@ -1398,7 +1605,7 @@ def get_tag_image(namespace_name, repository_name, tag_name):
def get_image_by_id(namespace_name, repository_name, docker_image_id): def get_image_by_id(namespace_name, repository_name, docker_image_id):
image = get_repo_image(namespace_name, repository_name, docker_image_id) image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
if not image: if not image:
raise DataModelException('Unable to find image \'%s\' for repo \'%s/%s\'' % raise DataModelException('Unable to find image \'%s\' for repo \'%s/%s\'' %
(docker_image_id, namespace_name, repository_name)) (docker_image_id, namespace_name, repository_name))
@ -1585,7 +1792,7 @@ def purge_repository(namespace_name, repository_name):
# Delete the rest of the repository metadata # Delete the rest of the repository metadata
fetched = _get_repository(namespace_name, repository_name) fetched = _get_repository(namespace_name, repository_name)
fetched.delete_instance(recursive=True) fetched.delete_instance(recursive=True, delete_nullable=True)
def get_private_repo_count(username): def get_private_repo_count(username):
@ -1629,11 +1836,10 @@ def get_repository_delegate_tokens(namespace_name, repository_name):
def get_repo_delegate_token(namespace_name, repository_name, code): def get_repo_delegate_token(namespace_name, repository_name, code):
repo_query = get_repository_delegate_tokens(namespace_name, repository_name) repo_query = get_repository_delegate_tokens(namespace_name, repository_name)
found = list(repo_query.where(AccessToken.code == code))
if found: try:
return found[0] return repo_query.where(AccessToken.code == code).get()
else: except AccessToken.DoesNotExist:
raise InvalidTokenException('Unable to find token with code: %s' % code) raise InvalidTokenException('Unable to find token with code: %s' % code)
@ -1802,9 +2008,9 @@ def list_logs(start_time, end_time, performer=None, repository=None, namespace=N
if namespace: if namespace:
joined = joined.where(User.username == namespace) joined = joined.where(User.username == namespace)
return joined.where( return list(joined.where(
LogEntry.datetime >= start_time, LogEntry.datetime >= start_time,
LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc()) LogEntry.datetime < end_time).order_by(LogEntry.datetime.desc()))
def log_action(kind_name, user_or_organization_name, performer=None, def log_action(kind_name, user_or_organization_name, performer=None,
@ -1816,7 +2022,7 @@ def log_action(kind_name, user_or_organization_name, performer=None,
kind = LogEntryKind.get(LogEntryKind.name == kind_name) kind = LogEntryKind.get(LogEntryKind.name == kind_name)
account = User.get(User.username == user_or_organization_name) account = User.get(User.username == user_or_organization_name)
LogEntry.create(kind=kind, account=account, performer=performer, LogEntry.create(kind=kind, account=account, performer=performer,
repository=repository, access_token=access_token, ip=ip, repository=repository, ip=ip,
metadata_json=json.dumps(metadata), datetime=timestamp) metadata_json=json.dumps(metadata), datetime=timestamp)
@ -2106,6 +2312,18 @@ def confirm_team_invite(code, user):
found.delete_instance() found.delete_instance()
return (team, inviter) return (team, inviter)
def get_repository_usage():
one_month_ago = date.today() - timedelta(weeks=4)
repo_pull = LogEntryKind.get(name = 'pull_repo')
repo_verb = LogEntryKind.get(name = 'repo_verb')
return (LogEntry.select(LogEntry.ip, LogEntry.repository)
.where((LogEntry.kind == repo_pull) | (LogEntry.kind == repo_verb))
.where(~(LogEntry.repository >> None))
.where(LogEntry.datetime >= one_month_ago)
.group_by(LogEntry.ip, LogEntry.repository)
.count())
def archivable_buildlogs_query(): def archivable_buildlogs_query():
presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE presumed_dead_date = datetime.utcnow() - PRESUMED_DEAD_BUILD_AGE
return (RepositoryBuild.select() return (RepositoryBuild.select()

View file

@ -41,20 +41,20 @@ class WorkQueue(object):
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list) return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
def update_metrics(self): def update_metrics(self):
with self._transaction_factory(db): if self._reporter is None:
if self._reporter is None: return
return
with self._transaction_factory(db):
now = datetime.utcnow() now = datetime.utcnow()
name_match_query = self._name_match_query() name_match_query = self._name_match_query()
running_query = self._running_jobs(now, name_match_query) running_query = self._running_jobs(now, name_match_query)
running_count =running_query.distinct().count() running_count = running_query.distinct().count()
avialable_query = self._available_jobs(now, name_match_query, running_query) avialable_query = self._available_jobs(now, name_match_query, running_query)
available_count = avialable_query.select(QueueItem.queue_name).distinct().count() available_count = avialable_query.select(QueueItem.queue_name).distinct().count()
self._reporter(self._currently_processing, running_count, running_count + available_count) self._reporter(self._currently_processing, running_count, running_count + available_count)
def put(self, canonical_name_list, message, available_after=0, retries_remaining=5): def put(self, canonical_name_list, message, available_after=0, retries_remaining=5):
""" """

View file

@ -30,7 +30,7 @@ class UserEventsBuilderModule(object):
if not redis_config: if not redis_config:
# This is the old key name. # This is the old key name.
redis_config = { redis_config = {
'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME') 'host': app.config.get('USER_EVENTS_REDIS_HOSTNAME'),
} }
user_events = UserEventBuilder(redis_config) user_events = UserEventBuilder(redis_config)

View file

@ -28,8 +28,8 @@ api_bp = Blueprint('api', __name__)
api = Api() api = Api()
api.init_app(api_bp) api.init_app(api_bp)
api.decorators = [csrf_protect, api.decorators = [csrf_protect,
process_oauth, crossdomain(origin='*', headers=['Authorization', 'Content-Type']),
crossdomain(origin='*', headers=['Authorization', 'Content-Type'])] process_oauth]
class ApiException(Exception): class ApiException(Exception):
@ -91,6 +91,7 @@ def handle_api_error(error):
if error.error_type is not None: if error.error_type is not None:
response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' % response.headers['WWW-Authenticate'] = ('Bearer error="%s" error_description="%s"' %
(error.error_type, error.error_description)) (error.error_type, error.error_description))
return response return response
@ -195,6 +196,7 @@ def query_param(name, help_str, type=reqparse.text_type, default=None,
'default': default, 'default': default,
'choices': choices, 'choices': choices,
'required': required, 'required': required,
'location': ('args')
}) })
return func return func
return add_param return add_param

View file

@ -76,7 +76,7 @@ class RepositoryImage(RepositoryParamResource):
@nickname('getImage') @nickname('getImage')
def get(self, namespace, repository, image_id): def get(self, namespace, repository, image_id):
""" Get the information available for the specified image. """ """ Get the information available for the specified image. """
image = model.get_repo_image(namespace, repository, image_id) image = model.get_repo_image_extended(namespace, repository, image_id)
if not image: if not image:
raise NotFound() raise NotFound()
@ -99,7 +99,7 @@ class RepositoryImageChanges(RepositoryParamResource):
@nickname('getImageChanges') @nickname('getImageChanges')
def get(self, namespace, repository, image_id): def get(self, namespace, repository, image_id):
""" Get the list of changes for the specified image. """ """ Get the list of changes for the specified image. """
image = model.get_repo_image(namespace, repository, image_id) image = model.get_repo_image_extended(namespace, repository, image_id)
if not image: if not image:
raise NotFound() raise NotFound()

View file

@ -52,6 +52,25 @@ def user_view(user):
'super_user': user.username in app.config['SUPER_USERS'] 'super_user': user.username in app.config['SUPER_USERS']
} }
@resource('/v1/superuser/usage/')
@internal_only
@show_if(features.SUPER_USERS)
class UsageInformation(ApiResource):
""" Resource for returning the usage information for enterprise customers. """
@require_fresh_login
@nickname('getSystemUsage')
def get(self):
""" Returns the number of repository handles currently held. """
if SuperUserPermission().can():
return {
'usage': model.get_repository_usage(),
'allowed': app.config.get('MAXIMUM_REPOSITORY_USAGE', 20)
}
abort(403)
@resource('/v1/superuser/users/') @resource('/v1/superuser/users/')
@internal_only @internal_only
@show_if(features.SUPER_USERS) @show_if(features.SUPER_USERS)

View file

@ -326,7 +326,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
if not found_repository: if not found_repository:
return { return {
'status': 'error', 'status': 'error',
'message': 'Repository "%s" was not found' % (base_image) 'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
} }
# If the repository is private and the user cannot see that repo, then # If the repository is private and the user cannot see that repo, then
@ -335,7 +335,7 @@ class BuildTriggerAnalyze(RepositoryParamResource):
if found_repository.visibility.name != 'public' and not can_read: if found_repository.visibility.name != 'public' and not can_read:
return { return {
'status': 'error', 'status': 'error',
'message': 'Repository "%s" was not found' % (base_image) 'message': 'Repository "%s" referenced by the Dockerfile was not found' % (base_image)
} }
# Check to see if the repository is public. If not, we suggest the # Check to see if the repository is public. If not, we suggest the
@ -463,18 +463,18 @@ class BuildTriggerFieldValues(RepositoryParamResource):
""" Custom verb to fetch a values list for a particular field name. """ """ Custom verb to fetch a values list for a particular field name. """
@require_repo_admin @require_repo_admin
@nickname('listTriggerFieldValues') @nickname('listTriggerFieldValues')
def get(self, namespace, repository, trigger_uuid, field_name): def post(self, namespace, repository, trigger_uuid, field_name):
""" List the field values for a custom run field. """ """ List the field values for a custom run field. """
try: try:
trigger = model.get_build_trigger(namespace, repository, trigger_uuid) trigger = model.get_build_trigger(namespace, repository, trigger_uuid)
except model.InvalidBuildTriggerException: except model.InvalidBuildTriggerException:
raise NotFound() raise NotFound()
config = request.get_json() or json.loads(trigger.config)
user_permission = UserAdminPermission(trigger.connected_user.username) user_permission = UserAdminPermission(trigger.connected_user.username)
if user_permission.can(): if user_permission.can():
trigger_handler = BuildTriggerBase.get_trigger_for_service(trigger.service.name) trigger_handler = BuildTriggerBase.get_trigger_for_service(trigger.service.name)
values = trigger_handler.list_field_values(trigger.auth_token, json.loads(trigger.config), values = trigger_handler.list_field_values(trigger.auth_token, config, field_name)
field_name)
if values is None: if values is None:
raise NotFound() raise NotFound()

View file

@ -120,6 +120,10 @@ class User(ApiResource):
'type': 'string', 'type': 'string',
'description': 'The user\'s email address', 'description': 'The user\'s email address',
}, },
'invite_code': {
'type': 'string',
'description': 'The optional invite code'
}
} }
}, },
'UpdateUser': { 'UpdateUser': {
@ -239,15 +243,12 @@ class User(ApiResource):
@show_if(features.USER_CREATION) @show_if(features.USER_CREATION)
@nickname('createNewUser') @nickname('createNewUser')
@parse_args
@query_param('inviteCode', 'Invitation code given for creating the user.', type=str,
default='')
@internal_only @internal_only
@validate_json_request('NewUser') @validate_json_request('NewUser')
def post(self, args): def post(self):
""" Create a new user. """ """ Create a new user. """
user_data = request.get_json() user_data = request.get_json()
invite_code = args['inviteCode'] invite_code = user_data.get('invite_code', '')
existing_user = model.get_user(user_data['username']) existing_user = model.get_user(user_data['username'])
if existing_user: if existing_user:

View file

@ -1,10 +1,11 @@
import logging import logging
import requests
from flask import request, redirect, url_for, Blueprint from flask import request, redirect, url_for, Blueprint
from flask.ext.login import current_user from flask.ext.login import current_user
from endpoints.common import render_page_template, common_login, route_show_if from endpoints.common import render_page_template, common_login, route_show_if
from app import app, analytics, get_app_url from app import app, analytics, get_app_url, github_login, google_login, github_trigger
from data import model from data import model
from util.names import parse_repository_name from util.names import parse_repository_name
from util.validation import generate_valid_usernames from util.validation import generate_valid_usernames
@ -29,20 +30,16 @@ def render_ologin_error(service_name,
service_url=get_app_url(), service_url=get_app_url(),
user_creation=features.USER_CREATION) user_creation=features.USER_CREATION)
def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_encode=False, def exchange_code_for_token(code, service, form_encode=False, redirect_suffix=''):
redirect_suffix=''):
code = request.args.get('code') code = request.args.get('code')
id_config = service_name + '_LOGIN_CLIENT_ID' if for_login else service_name + '_CLIENT_ID'
secret_config = service_name + '_LOGIN_CLIENT_SECRET' if for_login else service_name + '_CLIENT_SECRET'
payload = { payload = {
'client_id': app.config[id_config], 'client_id': service.client_id(),
'client_secret': app.config[secret_config], 'client_secret': service.client_secret(),
'code': code, 'code': code,
'grant_type': 'authorization_code', 'grant_type': 'authorization_code',
'redirect_uri': '%s://%s/oauth2/%s/callback%s' % (app.config['PREFERRED_URL_SCHEME'], 'redirect_uri': '%s://%s/oauth2/%s/callback%s' % (app.config['PREFERRED_URL_SCHEME'],
app.config['SERVER_HOSTNAME'], app.config['SERVER_HOSTNAME'],
service_name.lower(), service.service_name().lower(),
redirect_suffix) redirect_suffix)
} }
@ -50,12 +47,11 @@ def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_en
'Accept': 'application/json' 'Accept': 'application/json'
} }
token_url = service.token_endpoint()
if form_encode: if form_encode:
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'], get_access_token = client.post(token_url, data=payload, headers=headers)
data=payload, headers=headers)
else: else:
get_access_token = client.post(app.config[service_name + '_TOKEN_URL'], get_access_token = client.post(token_url, params=payload, headers=headers)
params=payload, headers=headers)
json_data = get_access_token.json() json_data = get_access_token.json()
if not json_data: if not json_data:
@ -65,25 +61,20 @@ def exchange_code_for_token(code, service_name='GITHUB', for_login=True, form_en
return token return token
def get_github_user(token): def get_user(service, token):
token_param = {
'access_token': token,
}
get_user = client.get(app.config['GITHUB_USER_URL'], params=token_param)
return get_user.json()
def get_google_user(token):
token_param = { token_param = {
'access_token': token, 'access_token': token,
'alt': 'json', 'alt': 'json',
} }
get_user = client.get(service.user_endpoint(), params=token_param)
if get_user.status_code != requests.codes.ok:
return {}
get_user = client.get(app.config['GOOGLE_USER_URL'], params=token_param)
return get_user.json() return get_user.json()
def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
def conduct_oauth_login(service, user_id, username, email, metadata={}):
service_name = service.service_name()
to_login = model.verify_federated_login(service_name.lower(), user_id) to_login = model.verify_federated_login(service_name.lower(), user_id)
if not to_login: if not to_login:
# See if we can create a new user. # See if we can create a new user.
@ -93,8 +84,15 @@ def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
# Try to create the user # Try to create the user
try: try:
valid = next(generate_valid_usernames(username)) new_username = None
to_login = model.create_federated_user(valid, email, service_name.lower(), for valid in generate_valid_usernames(username):
if model.get_user_or_org(valid):
continue
new_username = valid
break
to_login = model.create_federated_user(new_username, email, service_name.lower(),
user_id, set_password_notification=True, user_id, set_password_notification=True,
metadata=metadata) metadata=metadata)
@ -106,7 +104,15 @@ def conduct_oauth_login(service_name, user_id, username, email, metadata={}):
logger.debug('Aliasing with state: %s' % state) logger.debug('Aliasing with state: %s' % state)
analytics.alias(to_login.username, state) analytics.alias(to_login.username, state)
except model.DataModelException, ex: except model.InvalidEmailAddressException as ieex:
message = "The e-mail address %s is already associated " % (email, )
message = message + "with an existing %s account." % (app.config['REGISTRY_TITLE_SHORT'], )
message = message + "\nPlease log in with your username and password and "
message = message + "associate your %s account to use it in the future." % (service_name, )
return render_ologin_error(service_name, message)
except model.DataModelException as ex:
return render_ologin_error(service_name, ex.message) return render_ologin_error(service_name, ex.message)
if common_login(to_login): if common_login(to_login):
@ -130,8 +136,8 @@ def google_oauth_callback():
if error: if error:
return render_ologin_error('Google', error) return render_ologin_error('Google', error)
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', form_encode=True) token = exchange_code_for_token(request.args.get('code'), google_login, form_encode=True)
user_data = get_google_user(token) user_data = get_user(google_login, token)
if not user_data or not user_data.get('id', None) or not user_data.get('email', None): if not user_data or not user_data.get('id', None) or not user_data.get('email', None):
return render_ologin_error('Google') return render_ologin_error('Google')
@ -140,7 +146,7 @@ def google_oauth_callback():
'service_username': user_data['email'] 'service_username': user_data['email']
} }
return conduct_oauth_login('Google', user_data['id'], username, user_data['email'], return conduct_oauth_login(google_login, user_data['id'], username, user_data['email'],
metadata=metadata) metadata=metadata)
@ -151,8 +157,8 @@ def github_oauth_callback():
if error: if error:
return render_ologin_error('GitHub', error) return render_ologin_error('GitHub', error)
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB') token = exchange_code_for_token(request.args.get('code'), github_login)
user_data = get_github_user(token) user_data = get_user(github_login, token)
if not user_data or not 'login' in user_data: if not user_data or not 'login' in user_data:
return render_ologin_error('GitHub') return render_ologin_error('GitHub')
@ -166,7 +172,7 @@ def github_oauth_callback():
token_param = { token_param = {
'access_token': token, 'access_token': token,
} }
get_email = client.get(app.config['GITHUB_USER_EMAILS'], params=token_param, get_email = client.get(github_login.email_endpoint(), params=token_param,
headers=v3_media_type) headers=v3_media_type)
# We will accept any email, but we prefer the primary # We will accept any email, but we prefer the primary
@ -180,17 +186,17 @@ def github_oauth_callback():
'service_username': username 'service_username': username
} }
return conduct_oauth_login('github', github_id, username, found_email, metadata=metadata) return conduct_oauth_login(github_login, github_id, username, found_email, metadata=metadata)
@callback.route('/google/callback/attach', methods=['GET']) @callback.route('/google/callback/attach', methods=['GET'])
@route_show_if(features.GOOGLE_LOGIN) @route_show_if(features.GOOGLE_LOGIN)
@require_session_login @require_session_login
def google_oauth_attach(): def google_oauth_attach():
token = exchange_code_for_token(request.args.get('code'), service_name='GOOGLE', token = exchange_code_for_token(request.args.get('code'), google_login,
redirect_suffix='/attach', form_encode=True) redirect_suffix='/attach', form_encode=True)
user_data = get_google_user(token) user_data = get_user(google_login, token)
if not user_data or not user_data.get('id', None): if not user_data or not user_data.get('id', None):
return render_ologin_error('Google') return render_ologin_error('Google')
@ -216,8 +222,8 @@ def google_oauth_attach():
@route_show_if(features.GITHUB_LOGIN) @route_show_if(features.GITHUB_LOGIN)
@require_session_login @require_session_login
def github_oauth_attach(): def github_oauth_attach():
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB') token = exchange_code_for_token(request.args.get('code'), github_login)
user_data = get_github_user(token) user_data = get_user(github_login, token)
if not user_data: if not user_data:
return render_ologin_error('GitHub') return render_ologin_error('GitHub')
@ -247,8 +253,7 @@ def github_oauth_attach():
def attach_github_build_trigger(namespace, repository): def attach_github_build_trigger(namespace, repository):
permission = AdministerRepositoryPermission(namespace, repository) permission = AdministerRepositoryPermission(namespace, repository)
if permission.can(): if permission.can():
token = exchange_code_for_token(request.args.get('code'), service_name='GITHUB', token = exchange_code_for_token(request.args.get('code'), github_trigger)
for_login=False)
repo = model.get_repository(namespace, repository) repo = model.get_repository(namespace, repository)
if not repo: if not repo:
msg = 'Invalid repository: %s/%s' % (namespace, repository) msg = 'Invalid repository: %s/%s' % (namespace, repository)

View file

@ -10,7 +10,9 @@ from flask.ext.principal import identity_changed
from random import SystemRandom from random import SystemRandom
from data import model from data import model
from app import app, login_manager, dockerfile_build_queue, notification_queue from data.database import db
from app import app, login_manager, dockerfile_build_queue, notification_queue, oauth_apps
from auth.permissions import QuayDeferredPermissionUser from auth.permissions import QuayDeferredPermissionUser
from auth import scopes from auth import scopes
from endpoints.api.discovery import swagger_route_data from endpoints.api.discovery import swagger_route_data
@ -19,6 +21,7 @@ from functools import wraps
from config import getFrontendVisibleConfig from config import getFrontendVisibleConfig
from external_libraries import get_external_javascript, get_external_css from external_libraries import get_external_javascript, get_external_css
from endpoints.notificationhelper import spawn_notification from endpoints.notificationhelper import spawn_notification
from util.useremails import CannotSendEmailException
import features import features
@ -128,6 +131,10 @@ def handle_dme(ex):
logger.exception(ex) logger.exception(ex)
return make_response(json.dumps({'message': ex.message}), 400) return make_response(json.dumps({'message': ex.message}), 400)
@app.errorhandler(CannotSendEmailException)
def handle_emailexception(ex):
message = 'Could not send email. Please contact an administrator and report this problem.'
return make_response(json.dumps({'message': message}), 400)
def random_string(): def random_string():
random = SystemRandom() random = SystemRandom()
@ -170,6 +177,17 @@ def render_page_template(name, **kwargs):
external_styles = get_external_css(local=not app.config.get('USE_CDN', True)) external_styles = get_external_css(local=not app.config.get('USE_CDN', True))
external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True)) external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True))
def get_oauth_config():
oauth_config = {}
for oauth_app in oauth_apps:
oauth_config[oauth_app.key_name] = oauth_app.get_public_config()
return oauth_config
contact_href = None
if len(app.config.get('CONTACT_INFO', [])) == 1:
contact_href = app.config['CONTACT_INFO'][0]
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()), resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
external_styles=external_styles, external_styles=external_styles,
external_scripts=external_scripts, external_scripts=external_scripts,
@ -179,6 +197,7 @@ def render_page_template(name, **kwargs):
library_scripts=library_scripts, library_scripts=library_scripts,
feature_set=json.dumps(features.get_features()), feature_set=json.dumps(features.get_features()),
config_set=json.dumps(getFrontendVisibleConfig(app.config)), config_set=json.dumps(getFrontendVisibleConfig(app.config)),
oauth_set=json.dumps(get_oauth_config()),
mixpanel_key=app.config.get('MIXPANEL_KEY', ''), mixpanel_key=app.config.get('MIXPANEL_KEY', ''),
google_analytics_key=app.config.get('GOOGLE_ANALYTICS_KEY', ''), google_analytics_key=app.config.get('GOOGLE_ANALYTICS_KEY', ''),
sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''), sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
@ -186,6 +205,7 @@ def render_page_template(name, **kwargs):
show_chat=features.OLARK_CHAT, show_chat=features.OLARK_CHAT,
cache_buster=cache_buster, cache_buster=cache_buster,
has_billing=features.BILLING, has_billing=features.BILLING,
contact_href=contact_href,
**kwargs)) **kwargs))
resp.headers['X-FRAME-OPTIONS'] = 'DENY' resp.headers['X-FRAME-OPTIONS'] = 'DENY'
@ -217,16 +237,17 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
'build_subdir': subdir 'build_subdir': subdir
} }
build_request = model.create_repository_build(repository, token, job_config, with app.config['DB_TRANSACTION_FACTORY'](db):
dockerfile_id, build_name, build_request = model.create_repository_build(repository, token, job_config,
trigger, pull_robot_name=pull_robot_name) dockerfile_id, build_name,
trigger, pull_robot_name=pull_robot_name)
dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({ dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({
'build_uuid': build_request.uuid, 'build_uuid': build_request.uuid,
'namespace': repository.namespace_user.username, 'namespace': repository.namespace_user.username,
'repository': repository.name, 'repository': repository.name,
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None 'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
}), retries_remaining=1) }), retries_remaining=1)
# Add the build to the repo's log. # Add the build to the repo's log.
metadata = { metadata = {
@ -261,4 +282,3 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
subpage='build?current=%s' % build_request.uuid, subpage='build?current=%s' % build_request.uuid,
pathargs=['build', build_request.uuid]) pathargs=['build', build_request.uuid])
return build_request return build_request

View file

@ -8,7 +8,7 @@ from collections import OrderedDict
from data import model from data import model
from data.model import oauth from data.model import oauth
from app import analytics, app, authentication, userevents, storage from app import app, authentication, userevents, storage
from auth.auth import process_auth from auth.auth import process_auth
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
from util.names import parse_repository_name from util.names import parse_repository_name
@ -17,6 +17,7 @@ from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
ReadRepositoryPermission, CreateRepositoryPermission) ReadRepositoryPermission, CreateRepositoryPermission)
from util.http import abort from util.http import abort
from endpoints.trackhelper import track_and_log
from endpoints.notificationhelper import spawn_notification from endpoints.notificationhelper import spawn_notification
import features import features
@ -70,7 +71,7 @@ def create_user():
abort(400, 'User creation is disabled. Please speak to your administrator.') abort(400, 'User creation is disabled. Please speak to your administrator.')
user_data = request.get_json() user_data = request.get_json()
if not 'username' in user_data: if not user_data or not 'username' in user_data:
abort(400, 'Missing username') abort(400, 'Missing username')
username = user_data['username'] username = user_data['username']
@ -222,13 +223,20 @@ def create_repository(namespace, repository):
repo = model.create_repository(namespace, repository, repo = model.create_repository(namespace, repository,
get_authenticated_user()) get_authenticated_user())
profile.debug('Determining added images') profile.debug('Determining already added images')
added_images = OrderedDict([(desc['id'], desc) added_images = OrderedDict([(desc['id'], desc) for desc in image_descriptions])
for desc in image_descriptions])
new_repo_images = dict(added_images) new_repo_images = dict(added_images)
for existing in model.get_repository_images(namespace, repository): # Optimization: Lookup any existing images in the repository with matching docker IDs and
if existing.docker_image_id in new_repo_images: # remove them from the added dict, so we don't need to look them up one-by-one.
def chunks(l, n):
for i in xrange(0, len(l), n):
yield l[i:i+n]
# Note: We do this in chunks in an effort to not hit the SQL query size limit.
for chunk in chunks(new_repo_images.keys(), 50):
existing_images = model.lookup_repository_images(namespace, repository, chunk)
for existing in existing_images:
added_images.pop(existing.docker_image_id) added_images.pop(existing.docker_image_id)
profile.debug('Creating/Linking necessary images') profile.debug('Creating/Linking necessary images')
@ -240,49 +248,8 @@ def create_repository(namespace, repository):
profile.debug('Created images') profile.debug('Created images')
response = make_response('Created', 201) track_and_log('push_repo', repo)
return make_response('Created', 201)
extra_params = {
'repository': '%s/%s' % (namespace, repository),
}
metadata = {
'repo': repository,
'namespace': namespace
}
if get_validated_oauth_token():
analytics.track(username, 'push_repo', extra_params)
oauth_token = get_validated_oauth_token()
metadata['oauth_token_id'] = oauth_token.id
metadata['oauth_token_application_id'] = oauth_token.application.client_id
metadata['oauth_token_application'] = oauth_token.application.name
elif get_authenticated_user():
username = get_authenticated_user().username
analytics.track(username, 'push_repo', extra_params)
metadata['username'] = username
# Mark that the user has started pushing the repo.
user_data = {
'action': 'push_repo',
'repository': repository,
'namespace': namespace
}
event = userevents.get_event(username)
event.publish_event_data('docker-cli', user_data)
elif get_validated_token():
analytics.track(get_validated_token().code, 'push_repo', extra_params)
metadata['token'] = get_validated_token().friendly_name
metadata['token_code'] = get_validated_token().code
model.log_action('push_repo', namespace, performer=get_authenticated_user(),
ip=request.remote_addr, metadata=metadata, repository=repo)
return response
@index.route('/repositories/<path:repository>/images', methods=['PUT']) @index.route('/repositories/<path:repository>/images', methods=['PUT'])
@ -299,13 +266,6 @@ def update_images(namespace, repository):
# Make sure the repo actually exists. # Make sure the repo actually exists.
abort(404, message='Unknown repository', issue='unknown-repo') abort(404, message='Unknown repository', issue='unknown-repo')
profile.debug('Parsing image data')
image_with_checksums = json.loads(request.data.decode('utf8'))
updated_tags = {}
for image in image_with_checksums:
updated_tags[image['Tag']] = image['id']
if get_authenticated_user(): if get_authenticated_user():
profile.debug('Publishing push event') profile.debug('Publishing push event')
username = get_authenticated_user().username username = get_authenticated_user().username
@ -326,12 +286,11 @@ def update_images(namespace, repository):
# Generate a job for each notification that has been added to this repo # Generate a job for each notification that has been added to this repo
profile.debug('Adding notifications for repository') profile.debug('Adding notifications for repository')
updated_tags = session.get('pushed_tags', {})
event_data = { event_data = {
'updated_tags': updated_tags, 'updated_tags': updated_tags,
'pushed_image_count': len(image_with_checksums),
'pruned_image_count': num_removed 'pruned_image_count': num_removed
} }
spawn_notification(repo, 'repo_push', event_data) spawn_notification(repo, 'repo_push', event_data)
return make_response('Updated', 204) return make_response('Updated', 204)
@ -368,38 +327,7 @@ def get_repository_images(namespace, repository):
resp = make_response(json.dumps(all_images), 200) resp = make_response(json.dumps(all_images), 200)
resp.mimetype = 'application/json' resp.mimetype = 'application/json'
metadata = { track_and_log('pull_repo', repo)
'repo': repository,
'namespace': namespace,
}
profile.debug('Logging the pull to Mixpanel and the log system')
if get_validated_oauth_token():
oauth_token = get_validated_oauth_token()
metadata['oauth_token_id'] = oauth_token.id
metadata['oauth_token_application_id'] = oauth_token.application.client_id
metadata['oauth_token_application'] = oauth_token.application.name
elif get_authenticated_user():
metadata['username'] = get_authenticated_user().username
elif get_validated_token():
metadata['token'] = get_validated_token().friendly_name
metadata['token_code'] = get_validated_token().code
else:
metadata['public'] = True
pull_username = 'anonymous'
if get_authenticated_user():
pull_username = get_authenticated_user().username
extra_params = {
'repository': '%s/%s' % (namespace, repository),
}
analytics.track(pull_username, 'pull_repo', extra_params)
model.log_action('pull_repo', namespace,
performer=get_authenticated_user(),
ip=request.remote_addr, metadata=metadata,
repository=repo)
return resp return resp
abort(403) abort(403)
@ -458,6 +386,7 @@ def get_search():
@index.route('/_ping') @index.route('/_ping')
@index.route('/_ping') @index.route('/_ping')
def ping(): def ping():
# NOTE: any changes made here must also be reflected in the nginx config
response = make_response('true', 200) response = make_response('true', 200)
response.headers['X-Docker-Registry-Version'] = '0.6.0' response.headers['X-Docker-Registry-Version'] = '0.6.0'
response.headers['X-Docker-Registry-Standalone'] = '0' response.headers['X-Docker-Registry-Standalone'] = '0'

View file

@ -1,7 +1,9 @@
import logging import logging
from notificationhelper import build_event_data from notificationhelper import build_event_data
from util.jinjautil import get_template_env
template_env = get_template_env("events")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class InvalidNotificationEventException(Exception): class InvalidNotificationEventException(Exception):
@ -14,7 +16,7 @@ class NotificationEvent(object):
def get_level(self, event_data, notification_data): def get_level(self, event_data, notification_data):
""" """
Returns a 'level' representing the severity of the event. Returns a 'level' representing the severity of the event.
Valid values are: 'info', 'warning', 'error', 'primary' Valid values are: 'info', 'warning', 'error', 'primary', 'success'
""" """
raise NotImplementedError raise NotImplementedError
@ -28,7 +30,10 @@ class NotificationEvent(object):
""" """
Returns a human readable HTML message for the given notification data. Returns a human readable HTML message for the given notification data.
""" """
raise NotImplementedError return template_env.get_template(self.event_name() + '.html').render({
'event_data': event_data,
'notification_data': notification_data
})
def get_sample_data(self, repository=None): def get_sample_data(self, repository=None):
""" """
@ -59,32 +64,14 @@ class RepoPushEvent(NotificationEvent):
return 'repo_push' return 'repo_push'
def get_level(self, event_data, notification_data): def get_level(self, event_data, notification_data):
return 'info' return 'primary'
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
return 'Repository %s updated' % (event_data['repository']) return 'Repository %s updated' % (event_data['repository'])
def get_message(self, event_data, notification_data):
if not event_data.get('updated_tags', {}).keys():
html = """
Repository <a href="%s">%s</a> has been updated via a push.
""" % (event_data['homepage'],
event_data['repository'])
else:
html = """
Repository <a href="%s">%s</a> has been updated via a push.
<br><br>
Tags Updated: %s
""" % (event_data['homepage'],
event_data['repository'],
', '.join(event_data['updated_tags'].keys()))
return html
def get_sample_data(self, repository): def get_sample_data(self, repository):
return build_event_data(repository, { return build_event_data(repository, {
'updated_tags': {'latest': 'someimageid', 'foo': 'anotherimage'}, 'updated_tags': {'latest': 'someimageid', 'foo': 'anotherimage'},
'pushed_image_count': 10,
'pruned_image_count': 3 'pruned_image_count': 3
}) })
@ -111,25 +98,6 @@ class BuildQueueEvent(NotificationEvent):
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
return 'Build queued for repository %s' % (event_data['repository']) return 'Build queued for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
is_manual = event_data['is_manual']
if is_manual:
html = """
A <a href="%s">new build</a> has been manually queued to start on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['repository'], event_data['build_id'])
else:
html = """
A <a href="%s">new build</a> has been queued via a %s trigger to start on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['trigger_kind'],
event_data['repository'], event_data['build_id'])
return html
class BuildStartEvent(NotificationEvent): class BuildStartEvent(NotificationEvent):
@classmethod @classmethod
@ -152,15 +120,6 @@ class BuildStartEvent(NotificationEvent):
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
return 'Build started for repository %s' % (event_data['repository']) return 'Build started for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
html = """
A <a href="%s">new build</a> has started on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['repository'], event_data['build_id'])
return html
class BuildSuccessEvent(NotificationEvent): class BuildSuccessEvent(NotificationEvent):
@classmethod @classmethod
@ -168,7 +127,7 @@ class BuildSuccessEvent(NotificationEvent):
return 'build_success' return 'build_success'
def get_level(self, event_data, notification_data): def get_level(self, event_data, notification_data):
return 'primary' return 'success'
def get_sample_data(self, repository): def get_sample_data(self, repository):
build_uuid = 'fake-build-id' build_uuid = 'fake-build-id'
@ -183,15 +142,6 @@ class BuildSuccessEvent(NotificationEvent):
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
return 'Build succeeded for repository %s' % (event_data['repository']) return 'Build succeeded for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
html = """
A <a href="%s">build</a> has finished on repository %s.
<br><br>
Build ID: %s
""" % (event_data['homepage'], event_data['repository'], event_data['build_id'])
return html
class BuildFailureEvent(NotificationEvent): class BuildFailureEvent(NotificationEvent):
@classmethod @classmethod
@ -215,13 +165,3 @@ class BuildFailureEvent(NotificationEvent):
def get_summary(self, event_data, notification_data): def get_summary(self, event_data, notification_data):
return 'Build failure for repository %s' % (event_data['repository']) return 'Build failure for repository %s' % (event_data['repository'])
def get_message(self, event_data, notification_data):
html = """
A <a href="%s">build</a> has failed on repository %s.
<br><br>
Reason: %s<br>
Build ID: %s<br>
""" % (event_data['homepage'], event_data['repository'],
event_data['error_message'], event_data['build_id'])
return html

View file

@ -1,5 +1,6 @@
from app import app, notification_queue from app import app, notification_queue
from data import model from data import model
from auth.auth_context import get_authenticated_user, get_validated_oauth_token
import json import json
@ -27,21 +28,37 @@ def build_event_data(repo, extra_data={}, subpage=None):
event_data.update(extra_data) event_data.update(extra_data)
return event_data return event_data
def build_notification_data(notification, event_data): def build_notification_data(notification, event_data, performer_data=None):
if not performer_data:
performer_data = {}
oauth_token = get_validated_oauth_token()
if oauth_token:
performer_data['oauth_token_id'] = oauth_token.id
performer_data['oauth_token_application_id'] = oauth_token.application.client_id
performer_data['oauth_token_application'] = oauth_token.application.name
performer_user = get_authenticated_user()
if performer_user:
performer_data['entity_id'] = performer_user.id
performer_data['entity_name'] = performer_user.username
return { return {
'notification_uuid': notification.uuid, 'notification_uuid': notification.uuid,
'repository_namespace': notification.repository.namespace_user.username, 'repository_namespace': notification.repository.namespace_user.username,
'repository_name': notification.repository.name, 'repository_name': notification.repository.name,
'event_data': event_data 'event_data': event_data,
'performer_data': performer_data
} }
def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[]): def spawn_notification(repo, event_name, extra_data={}, subpage=None, pathargs=[],
performer_data=None):
event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage) event_data = build_event_data(repo, extra_data=extra_data, subpage=subpage)
notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name, notifications = model.list_repo_notifications(repo.namespace_user.username, repo.name,
event_name=event_name) event_name=event_name)
for notification in notifications: for notification in list(notifications):
notification_data = build_notification_data(notification, event_data) notification_data = build_notification_data(notification, event_data, performer_data)
path = [repo.namespace_user.username, repo.name, event_name] + pathargs path = [repo.namespace_user.username, repo.name, event_name] + pathargs
notification_queue.put(path, json.dumps(notification_data)) notification_queue.put(path, json.dumps(notification_data))

View file

@ -211,7 +211,7 @@ class FlowdockMethod(NotificationMethod):
if not token: if not token:
return return
owner = model.get_user(notification.repository.namespace_user.username) owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner: if not owner:
# Something went wrong. # Something went wrong.
return return
@ -267,7 +267,7 @@ class HipchatMethod(NotificationMethod):
if not token or not room_id: if not token or not room_id:
return return
owner = model.get_user(notification.repository.namespace_user.username) owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner: if not owner:
# Something went wrong. # Something went wrong.
return return
@ -279,6 +279,7 @@ class HipchatMethod(NotificationMethod):
'info': 'gray', 'info': 'gray',
'warning': 'yellow', 'warning': 'yellow',
'error': 'red', 'error': 'red',
'success': 'green',
'primary': 'purple' 'primary': 'purple'
}.get(level, 'gray') }.get(level, 'gray')
@ -303,6 +304,56 @@ class HipchatMethod(NotificationMethod):
raise NotificationMethodPerformException(ex.message) raise NotificationMethodPerformException(ex.message)
from HTMLParser import HTMLParser
class SlackAdjuster(HTMLParser):
def __init__(self):
self.reset()
self.result = []
def handle_data(self, d):
self.result.append(d)
def get_attr(self, attrs, name):
for attr in attrs:
if attr[0] == name:
return attr[1]
return ''
def handle_starttag(self, tag, attrs):
if tag == 'a':
self.result.append('<%s|' % (self.get_attr(attrs, 'href'), ))
if tag == 'i':
self.result.append('_')
if tag == 'b' or tag == 'strong':
self.result.append('*')
if tag == 'img':
self.result.append(self.get_attr(attrs, 'alt'))
self.result.append(' ')
def handle_endtag(self, tag):
if tag == 'a':
self.result.append('>')
if tag == 'b' or tag == 'strong':
self.result.append('*')
if tag == 'i':
self.result.append('_')
def get_data(self):
return ''.join(self.result)
def adjust_tags(html):
s = SlackAdjuster()
s.feed(html)
return s.get_data()
class SlackMethod(NotificationMethod): class SlackMethod(NotificationMethod):
""" Method for sending notifications to Slack via the API: """ Method for sending notifications to Slack via the API:
https://api.slack.com/docs/attachments https://api.slack.com/docs/attachments
@ -318,12 +369,11 @@ class SlackMethod(NotificationMethod):
if not config_data.get('subdomain', '').isalnum(): if not config_data.get('subdomain', '').isalnum():
raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name') raise CannotValidateNotificationMethodException('Missing Slack Subdomain Name')
def formatForSlack(self, message): def format_for_slack(self, message):
message = message.replace('\n', '') message = message.replace('\n', '')
message = re.sub(r'\s+', ' ', message) message = re.sub(r'\s+', ' ', message)
message = message.replace('<br>', '\n') message = message.replace('<br>', '\n')
message = re.sub(r'<a href="(.+)">(.+)</a>', '<\\1|\\2>', message) return adjust_tags(message)
return message
def perform(self, notification, event_handler, notification_data): def perform(self, notification, event_handler, notification_data):
config_data = json.loads(notification.config_json) config_data = json.loads(notification.config_json)
@ -334,7 +384,7 @@ class SlackMethod(NotificationMethod):
if not token or not subdomain: if not token or not subdomain:
return return
owner = model.get_user(notification.repository.namespace_user.username) owner = model.get_user_or_org(notification.repository.namespace_user.username)
if not owner: if not owner:
# Something went wrong. # Something went wrong.
return return
@ -346,6 +396,7 @@ class SlackMethod(NotificationMethod):
'info': '#ffffff', 'info': '#ffffff',
'warning': 'warning', 'warning': 'warning',
'error': 'danger', 'error': 'danger',
'success': 'good',
'primary': 'good' 'primary': 'good'
}.get(level, '#ffffff') }.get(level, '#ffffff')
@ -359,8 +410,9 @@ class SlackMethod(NotificationMethod):
'attachments': [ 'attachments': [
{ {
'fallback': summary, 'fallback': summary,
'text': self.formatForSlack(message), 'text': self.format_for_slack(message),
'color': color 'color': color,
'mrkdwn_in': ["text"]
} }
] ]
} }

View file

@ -7,13 +7,13 @@ from functools import wraps
from datetime import datetime from datetime import datetime
from time import time from time import time
from app import storage as store, image_diff_queue from app import storage as store, image_diff_queue, app
from auth.auth import process_auth, extract_namespace_repo_from_session from auth.auth import process_auth, extract_namespace_repo_from_session
from util import checksums, changes from util import checksums, changes
from util.http import abort, exact_abort from util.http import abort, exact_abort
from auth.permissions import (ReadRepositoryPermission, from auth.permissions import (ReadRepositoryPermission,
ModifyRepositoryPermission) ModifyRepositoryPermission)
from data import model from data import model, database
from util import gzipstream from util import gzipstream
@ -59,7 +59,7 @@ def require_completion(f):
@wraps(f) @wraps(f)
def wrapper(namespace, repository, *args, **kwargs): def wrapper(namespace, repository, *args, **kwargs):
image_id = kwargs['image_id'] image_id = kwargs['image_id']
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
if image_is_uploading(repo_image): if image_is_uploading(repo_image):
abort(400, 'Image %(image_id)s is being uploaded, retry later', abort(400, 'Image %(image_id)s is being uploaded, retry later',
issue='upload-in-progress', image_id=kwargs['image_id']) issue='upload-in-progress', image_id=kwargs['image_id'])
@ -103,7 +103,7 @@ def head_image_layer(namespace, repository, image_id, headers):
profile.debug('Checking repo permissions') profile.debug('Checking repo permissions')
if permission.can() or model.repository_is_public(namespace, repository): if permission.can() or model.repository_is_public(namespace, repository):
profile.debug('Looking up repo image') profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
if not repo_image: if not repo_image:
profile.debug('Image not found') profile.debug('Image not found')
abort(404, 'Image %(image_id)s not found', issue='unknown-image', abort(404, 'Image %(image_id)s not found', issue='unknown-image',
@ -136,7 +136,7 @@ def get_image_layer(namespace, repository, image_id, headers):
profile.debug('Checking repo permissions') profile.debug('Checking repo permissions')
if permission.can() or model.repository_is_public(namespace, repository): if permission.can() or model.repository_is_public(namespace, repository):
profile.debug('Looking up repo image') profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
profile.debug('Looking up the layer path') profile.debug('Looking up the layer path')
try: try:
@ -151,6 +151,10 @@ def get_image_layer(namespace, repository, image_id, headers):
return resp return resp
profile.debug('Streaming layer data') profile.debug('Streaming layer data')
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers) return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
except (IOError, AttributeError): except (IOError, AttributeError):
profile.debug('Image not found') profile.debug('Image not found')
@ -170,7 +174,7 @@ def put_image_layer(namespace, repository, image_id):
abort(403) abort(403)
profile.debug('Retrieving image') profile.debug('Retrieving image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
try: try:
profile.debug('Retrieving image data') profile.debug('Retrieving image data')
uuid = repo_image.storage.uuid uuid = repo_image.storage.uuid
@ -197,12 +201,15 @@ def put_image_layer(namespace, repository, image_id):
# Create a socket reader to read the input stream containing the layer data. # Create a socket reader to read the input stream containing the layer data.
sr = SocketReader(input_stream) sr = SocketReader(input_stream)
# Add a handler that store the data in storage. # Add a handler that copies the data into a temp file. This is used to calculate the tarsum,
tmp, store_hndlr = store.temp_store_handler() # which is only needed for older versions of Docker.
sr.add_handler(store_hndlr) requires_tarsum = session.get('checksum_format') == 'tarsum'
if requires_tarsum:
tmp, tmp_hndlr = store.temp_store_handler()
sr.add_handler(tmp_hndlr)
# Add a handler to compute the uncompressed size of the layer. # Add a handler to compute the compressed and uncompressed sizes of the layer.
uncompressed_size_info, size_hndlr = gzipstream.calculate_size_handler() size_info, size_hndlr = gzipstream.calculate_size_handler()
sr.add_handler(size_hndlr) sr.add_handler(size_hndlr)
# Add a handler which computes the checksum. # Add a handler which computes the checksum.
@ -210,21 +217,23 @@ def put_image_layer(namespace, repository, image_id):
sr.add_handler(sum_hndlr) sr.add_handler(sum_hndlr)
# Stream write the data to storage. # Stream write the data to storage.
store.stream_write(repo_image.storage.locations, layer_path, sr) with database.CloseForLongOperation(app.config):
store.stream_write(repo_image.storage.locations, layer_path, sr)
# Append the computed checksum. # Append the computed checksum.
csums = [] csums = []
csums.append('sha256:{0}'.format(h.hexdigest())) csums.append('sha256:{0}'.format(h.hexdigest()))
try: try:
image_size = tmp.tell()
# Save the size of the image. # Save the size of the image.
model.set_image_size(image_id, namespace, repository, image_size, uncompressed_size_info.size) model.set_image_size(image_id, namespace, repository, size_info.compressed_size,
size_info.uncompressed_size)
if requires_tarsum:
tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data))
tmp.close()
tmp.seek(0)
csums.append(checksums.compute_tarsum(tmp, json_data))
tmp.close()
except (IOError, checksums.TarError) as e: except (IOError, checksums.TarError) as e:
logger.debug('put_image_layer: Error when computing tarsum ' logger.debug('put_image_layer: Error when computing tarsum '
'{0}'.format(e)) '{0}'.format(e))
@ -267,7 +276,19 @@ def put_image_checksum(namespace, repository, image_id):
if not permission.can(): if not permission.can():
abort(403) abort(403)
checksum = request.headers.get('X-Docker-Checksum') # Docker Version < 0.10 (tarsum+sha):
old_checksum = request.headers.get('X-Docker-Checksum')
# Docker Version >= 0.10 (sha):
new_checksum = request.headers.get('X-Docker-Checksum-Payload')
# Store whether we need to calculate the tarsum.
if new_checksum:
session['checksum_format'] = 'sha256'
else:
session['checksum_format'] = 'tarsum'
checksum = new_checksum or old_checksum
if not checksum: if not checksum:
abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum', abort(400, "Missing checksum for image %(image_id)s", issue='missing-checksum',
image_id=image_id) image_id=image_id)
@ -277,7 +298,10 @@ def put_image_checksum(namespace, repository, image_id):
issue='missing-checksum-cookie', image_id=image_id) issue='missing-checksum-cookie', image_id=image_id)
profile.debug('Looking up repo image') profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
if not repo_image or not repo_image.storage:
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
uuid = repo_image.storage.uuid uuid = repo_image.storage.uuid
profile.debug('Looking up repo layer data') profile.debug('Looking up repo layer data')
@ -329,7 +353,7 @@ def get_image_json(namespace, repository, image_id, headers):
abort(403) abort(403)
profile.debug('Looking up repo image') profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
profile.debug('Looking up repo layer data') profile.debug('Looking up repo layer data')
try: try:
@ -360,7 +384,7 @@ def get_image_ancestry(namespace, repository, image_id, headers):
abort(403) abort(403)
profile.debug('Looking up repo image') profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
profile.debug('Looking up image data') profile.debug('Looking up image data')
try: try:
@ -424,7 +448,7 @@ def put_image_json(namespace, repository, image_id):
issue='invalid-request', image_id=image_id) issue='invalid-request', image_id=image_id)
profile.debug('Looking up repo image') profile.debug('Looking up repo image')
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
if not repo_image: if not repo_image:
profile.debug('Image not found') profile.debug('Image not found')
abort(404, 'Image %(image_id)s not found', issue='unknown-image', abort(404, 'Image %(image_id)s not found', issue='unknown-image',
@ -441,7 +465,7 @@ def put_image_json(namespace, repository, image_id):
parent_image = None parent_image = None
if parent_id: if parent_id:
profile.debug('Looking up parent image') profile.debug('Looking up parent image')
parent_image = model.get_repo_image(namespace, repository, parent_id) parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
parent_uuid = parent_image and parent_image.storage.uuid parent_uuid = parent_image and parent_image.storage.uuid
parent_locations = parent_image and parent_image.storage.locations parent_locations = parent_image and parent_image.storage.locations
@ -494,7 +518,7 @@ def put_image_json(namespace, repository, image_id):
def process_image_changes(namespace, repository, image_id): def process_image_changes(namespace, repository, image_id):
logger.debug('Generating diffs for image: %s' % image_id) logger.debug('Generating diffs for image: %s' % image_id)
repo_image = model.get_repo_image(namespace, repository, image_id) repo_image = model.get_repo_image_extended(namespace, repository, image_id)
if not repo_image: if not repo_image:
logger.warning('No image for id: %s', image_id) logger.warning('No image for id: %s', image_id)
return None, None return None, None

View file

@ -2,7 +2,7 @@
import logging import logging
import json import json
from flask import abort, request, jsonify, make_response, Blueprint from flask import abort, request, jsonify, make_response, Blueprint, session
from app import app from app import app
from util.names import parse_repository_name from util.names import parse_repository_name
@ -59,6 +59,12 @@ def put_tag(namespace, repository, tag):
docker_image_id = json.loads(request.data) docker_image_id = json.loads(request.data)
model.create_or_update_tag(namespace, repository, tag, docker_image_id) model.create_or_update_tag(namespace, repository, tag, docker_image_id)
# Store the updated tag.
if not 'pushed_tags' in session:
session['pushed_tags'] = {}
session['pushed_tags'][tag] = docker_image_id
return make_response('Created', 200) return make_response('Created', 200)
abort(403) abort(403)

62
endpoints/trackhelper.py Normal file
View file

@ -0,0 +1,62 @@
import logging
from app import analytics, app, userevents
from data import model
from flask import request
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
logger = logging.getLogger(__name__)
profile = logging.getLogger('application.profiler')
def track_and_log(event_name, repo, **kwargs):
repository = repo.name
namespace = repo.namespace_user.username
metadata = {
'repo': repository,
'namespace': namespace,
}
metadata.update(kwargs)
analytics_id = 'anonymous'
profile.debug('Logging the %s to Mixpanel and the log system', event_name)
if get_validated_oauth_token():
oauth_token = get_validated_oauth_token()
metadata['oauth_token_id'] = oauth_token.id
metadata['oauth_token_application_id'] = oauth_token.application.client_id
metadata['oauth_token_application'] = oauth_token.application.name
analytics_id = 'oauth:' + oauth_token.id
elif get_authenticated_user():
metadata['username'] = get_authenticated_user().username
analytics_id = get_authenticated_user().username
elif get_validated_token():
metadata['token'] = get_validated_token().friendly_name
metadata['token_code'] = get_validated_token().code
analytics_id = 'token:' + get_validated_token().code
else:
metadata['public'] = True
analytics_id = 'anonymous'
extra_params = {
'repository': '%s/%s' % (namespace, repository),
}
# Publish the user event (if applicable)
if get_authenticated_user():
user_event_data = {
'action': event_name,
'repository': repository,
'namespace': namespace
}
event = userevents.get_event(get_authenticated_user().username)
event.publish_event_data('docker-cli', user_event_data)
# Save the action to mixpanel.
analytics.track(analytics_id, event_name, extra_params)
# Log the action to the database.
model.log_action(event_name, namespace,
performer=get_authenticated_user(),
ip=request.remote_addr, metadata=metadata,
repository=repo)

View file

@ -3,11 +3,13 @@ import io
import os.path import os.path
import tarfile import tarfile
import base64 import base64
import re
from github import Github, UnknownObjectException, GithubException from github import Github, UnknownObjectException, GithubException
from tempfile import SpooledTemporaryFile from tempfile import SpooledTemporaryFile
from app import app, userfiles as user_files from app import app, userfiles as user_files, github_trigger
from util.tarfileappender import TarfileAppender
client = app.config['HTTPCLIENT'] client = app.config['HTTPCLIENT']
@ -148,8 +150,8 @@ def raise_unsupported():
class GithubBuildTrigger(BuildTrigger): class GithubBuildTrigger(BuildTrigger):
@staticmethod @staticmethod
def _get_client(auth_token): def _get_client(auth_token):
return Github(auth_token, client_id=app.config['GITHUB_CLIENT_ID'], return Github(auth_token, client_id=github_trigger.client_id(),
client_secret=app.config['GITHUB_CLIENT_SECRET']) client_secret=github_trigger.client_secret())
@classmethod @classmethod
def service_name(cls): def service_name(cls):
@ -229,13 +231,36 @@ class GithubBuildTrigger(BuildTrigger):
return repos_by_org return repos_by_org
def matches_ref(self, ref, regex):
match_string = ref.split('/', 1)[1]
if not regex:
return False
m = regex.match(match_string)
if not m:
return False
return len(m.group(0)) == len(match_string)
def list_build_subdirs(self, auth_token, config): def list_build_subdirs(self, auth_token, config):
gh_client = self._get_client(auth_token) gh_client = self._get_client(auth_token)
source = config['build_source'] source = config['build_source']
try: try:
repo = gh_client.get_repo(source) repo = gh_client.get_repo(source)
default_commit = repo.get_branch(repo.default_branch or 'master').commit
# Find the first matching branch.
branches = None
if 'branchtag_regex' in config:
try:
regex = re.compile(config['branchtag_regex'])
branches = [branch.name for branch in repo.get_branches()
if self.matches_ref('refs/heads/' + branch.name, regex)]
except:
pass
branches = branches or [repo.default_branch or 'master']
default_commit = repo.get_branch(branches[0]).commit
commit_tree = repo.get_git_tree(default_commit.sha, recursive=True) commit_tree = repo.get_git_tree(default_commit.sha, recursive=True)
return [os.path.dirname(elem.path) for elem in commit_tree.tree return [os.path.dirname(elem.path) for elem in commit_tree.tree
@ -301,10 +326,17 @@ class GithubBuildTrigger(BuildTrigger):
with tarfile.open(fileobj=tarball) as archive: with tarfile.open(fileobj=tarball) as archive:
tarball_subdir = archive.getnames()[0] tarball_subdir = archive.getnames()[0]
# Seek to position 0 to make boto multipart happy # Seek to position 0 to make tarfile happy.
tarball.seek(0) tarball.seek(0)
dockerfile_id = user_files.store_file(tarball, TARBALL_MIME) entries = {
tarball_subdir + '/.git/HEAD': commit_sha,
tarball_subdir + '/.git/objects/': None,
tarball_subdir + '/.git/refs/': None
}
appender = TarfileAppender(tarball, entries).get_stream()
dockerfile_id = user_files.store_file(appender, TARBALL_MIME)
logger.debug('Successfully prepared job') logger.debug('Successfully prepared job')
@ -339,6 +371,15 @@ class GithubBuildTrigger(BuildTrigger):
commit_sha = payload['head_commit']['id'] commit_sha = payload['head_commit']['id']
commit_message = payload['head_commit'].get('message', '') commit_message = payload['head_commit'].get('message', '')
if 'branchtag_regex' in config:
try:
regex = re.compile(config['branchtag_regex'])
except:
regex = re.compile('.*')
if not self.matches_ref(ref, regex):
raise SkipRequestException()
if should_skip_commit(commit_message): if should_skip_commit(commit_message):
raise SkipRequestException() raise SkipRequestException()
@ -362,22 +403,36 @@ class GithubBuildTrigger(BuildTrigger):
gh_client = self._get_client(auth_token) gh_client = self._get_client(auth_token)
repo = gh_client.get_repo(source) repo = gh_client.get_repo(source)
master = repo.get_branch(repo.default_branch) branch_name = run_parameters.get('branch_name') or repo.default_branch
master_sha = master.commit.sha branch = repo.get_branch(branch_name)
short_sha = GithubBuildTrigger.get_display_name(master_sha) branch_sha = branch.commit.sha
ref = 'refs/heads/%s' % (run_parameters.get('branch_name') or repo.default_branch) short_sha = GithubBuildTrigger.get_display_name(branch_sha)
ref = 'refs/heads/%s' % (branch_name)
return self._prepare_build(config, repo, master_sha, short_sha, ref) return self._prepare_build(config, repo, branch_sha, short_sha, ref)
except GithubException as ghe: except GithubException as ghe:
raise TriggerStartException(ghe.data['message']) raise TriggerStartException(ghe.data['message'])
def list_field_values(self, auth_token, config, field_name): def list_field_values(self, auth_token, config, field_name):
if field_name == 'refs':
branches = self.list_field_values(auth_token, config, 'branch_name')
tags = self.list_field_values(auth_token, config, 'tag_name')
return ([{'kind': 'branch', 'name': b} for b in branches] +
[{'kind': 'tag', 'name': tag} for tag in tags])
if field_name == 'tag_name':
gh_client = self._get_client(auth_token)
source = config['build_source']
repo = gh_client.get_repo(source)
return [tag.name for tag in repo.get_tags()]
if field_name == 'branch_name': if field_name == 'branch_name':
gh_client = self._get_client(auth_token) gh_client = self._get_client(auth_token)
source = config['build_source'] source = config['build_source']
repo = gh_client.get_repo(source) repo = gh_client.get_repo(source)
branches = [branch['name'] for branch in repo.get_branches()] branches = [branch.name for branch in repo.get_branches()]
if not repo.default_branch in branches: if not repo.default_branch in branches:
branches.insert(0, repo.default_branch) branches.insert(0, repo.default_branch)

152
endpoints/verbs.py Normal file
View file

@ -0,0 +1,152 @@
import logging
import json
import hashlib
from flask import redirect, Blueprint, abort, send_file, request
from app import app
from auth.auth import process_auth
from auth.auth_context import get_authenticated_user
from auth.permissions import ReadRepositoryPermission
from data import model
from data import database
from endpoints.trackhelper import track_and_log
from storage import Storage
from util.queuefile import QueueFile
from util.queueprocess import QueueProcess
from util.gzipwrap import GzipWrap
from util.dockerloadformat import build_docker_load_stream
verbs = Blueprint('verbs', __name__)
logger = logging.getLogger(__name__)
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_id_list):
store = Storage(app)
# For performance reasons, we load the full image list here, cache it, then disconnect from
# the database.
with database.UseThenDisconnect(app.config):
image_list = list(model.get_matching_repository_images(namespace, repository, image_id_list))
image_list.sort(key=lambda image: image_id_list.index(image.docker_image_id))
def get_next_image():
for current_image in image_list:
yield current_image
def get_next_layer():
for current_image_entry in image_list:
current_image_path = store.image_layer_path(current_image_entry.storage.uuid)
current_image_stream = store.stream_read_file(current_image_entry.storage.locations,
current_image_path)
current_image_id = current_image_entry.id
logger.debug('Returning image layer %s: %s' % (current_image_id, current_image_path))
yield current_image_stream
stream = build_docker_load_stream(namespace, repository, tag, synthetic_image_id, image_json,
get_next_image, get_next_layer)
return stream.read
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
store = Storage(app)
def handle_exception(ex):
logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex)
with database.UseThenDisconnect(app.config):
model.delete_derived_storage_by_uuid(linked_storage_uuid)
queue_file.add_exception_handler(handle_exception)
image_path = store.image_layer_path(linked_storage_uuid)
store.stream_write(linked_locations, image_path, queue_file)
queue_file.close()
if not queue_file.raised_exception:
with database.UseThenDisconnect(app.config):
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
done_uploading.uploading = False
done_uploading.save()
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
@process_auth
def get_squashed_tag(namespace, repository, tag):
permission = ReadRepositoryPermission(namespace, repository)
if permission.can() or model.repository_is_public(namespace, repository):
# Lookup the requested tag.
try:
tag_image = model.get_tag_image(namespace, repository, tag)
except model.DataModelException:
abort(404)
# Lookup the tag's image and storage.
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
if not repo_image:
abort(404)
# Log the action.
track_and_log('repo_verb', repo_image.repository, tag=tag, verb='squash')
store = Storage(app)
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
store.preferred_locations[0])
if not derived.uploading:
logger.debug('Derived image %s exists in storage', derived.uuid)
derived_layer_path = store.image_layer_path(derived.uuid)
download_url = store.get_direct_download_url(derived.locations, derived_layer_path)
if download_url:
logger.debug('Redirecting to download URL for derived image %s', derived.uuid)
return redirect(download_url)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
logger.debug('Sending cached derived image %s', derived.uuid)
return send_file(store.stream_read_file(derived.locations, derived_layer_path))
# Load the ancestry for the image.
logger.debug('Building and returning derived image %s', derived.uuid)
uuid = repo_image.storage.uuid
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
full_image_list = json.loads(ancestry_data)
# Load the image's JSON layer.
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
image_json = json.loads(image_json_data)
# Calculate a synthetic image ID.
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':squash').hexdigest()
# Create a queue process to generate the data. The queue files will read from the process
# and send the results to the client and storage.
def _cleanup():
# Close any existing DB connection once the process has exited.
database.close_db_filter(None)
args = (namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
queue_process = QueueProcess(_open_stream,
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
args, finished=_cleanup)
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
# Start building.
queue_process.run()
# Start the storage saving.
storage_args = (derived.uuid, derived.locations, storage_queue_file)
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
# Close the database handle here for this process before we send the long download.
database.close_db_filter(None)
# Return the client's data.
return send_file(client_queue_file)
abort(403)

View file

@ -5,6 +5,7 @@ from flask import (abort, redirect, request, url_for, make_response, Response,
Blueprint, send_from_directory, jsonify) Blueprint, send_from_directory, jsonify)
from flask.ext.login import current_user from flask.ext.login import current_user
from urlparse import urlparse from urlparse import urlparse
from health.healthcheck import HealthCheck
from data import model from data import model
from data.model.oauth import DatabaseAuthorizationProvider from data.model.oauth import DatabaseAuthorizationProvider
@ -151,6 +152,20 @@ def v1():
return index('') return index('')
@web.route('/health', methods=['GET'])
@no_cache
def health():
db_healthy = model.check_health()
buildlogs_healthy = build_logs.check_health()
check = HealthCheck.get_check(app.config['HEALTH_CHECKER'][0], app.config['HEALTH_CHECKER'][1])
(data, is_healthy) = check.conduct_healthcheck(db_healthy, buildlogs_healthy)
response = jsonify(dict(data = data, is_healthy = is_healthy))
response.status_code = 200 if is_healthy else 503
return response
@web.route('/status', methods=['GET']) @web.route('/status', methods=['GET'])
@no_cache @no_cache
def status(): def status():
@ -160,6 +175,7 @@ def status():
response = jsonify({ response = jsonify({
'db_healthy': db_healthy, 'db_healthy': db_healthy,
'buildlogs_healthy': buildlogs_healthy, 'buildlogs_healthy': buildlogs_healthy,
'is_testing': app.config['TESTING'],
}) })
response.status_code = 200 if db_healthy and buildlogs_healthy else 503 response.status_code = 200 if db_healthy and buildlogs_healthy else 503

View file

@ -0,0 +1,2 @@
<a href="{{ event_data.homepage }}">Build</a> failed for repository
{{ event_data.repository | repository_reference }} ({{ event_data.build_id }}): {{ event_data.error_message }}

9
events/build_queued.html Normal file
View file

@ -0,0 +1,9 @@
{% if event_data.is_manual and notification_data.performer_data.entity_name %}
{{ notification_data.performer_data.entity_name | user_reference }} queued a
<a href="{{ event_data.homepage }}">build</a>
{% elif event_data.trigger_kind %}
<a href="{{ event_data.homepage }}">Build</a> queued via a {{ event_data.trigger_kind }} trigger
{% else %}
<a href="{{ event_data.homepage }}">Build</a> queued
{% endif %}
for repository {{ event_data.repository | repository_reference }} ({{ event_data.build_id }})

2
events/build_start.html Normal file
View file

@ -0,0 +1,2 @@
<a href="{{ event_data.homepage }}">Build</a> started for repository
{{ event_data.repository | repository_reference }} ({{ event_data.build_id }})

View file

@ -0,0 +1,2 @@
<a href="{{ event_data.homepage }}">Build</a> completed for repository
{{ event_data.repository | repository_reference }} ({{ event_data.build_id }})

12
events/repo_push.html Normal file
View file

@ -0,0 +1,12 @@
{% if notification_data.performer_data.entity_name %}
{{ notification_data.performer_data.entity_name | user_reference }} pushed
{% else %}
Push of
{% endif %}
{% if event_data.updated_tags %}
{{ 'tags' | icon_image }}
{% for tag in event_data.updated_tags %}{%if loop.index > 1 %}, {% endif %}{{ (event_data.repository, tag) | repository_tag_reference }}{% endfor %} in
{% endif %}
repository {{ event_data.repository | repository_reference }}

0
health/__init__.py Normal file
View file

84
health/healthcheck.py Normal file
View file

@ -0,0 +1,84 @@
import boto.rds2
import logging
logger = logging.getLogger(__name__)
class HealthCheck(object):
def __init__(self):
pass
def conduct_healthcheck(self, db_healthy, buildlogs_healthy):
"""
Conducts any custom healthcheck work, returning a dict representing the HealthCheck
output and a boolean indicating whether the instance is healthy.
"""
raise NotImplementedError
@classmethod
def get_check(cls, name, parameters):
for subc in cls.__subclasses__():
if subc.check_name() == name:
return subc(**parameters)
raise Exception('Unknown health check with name %s' % name)
class LocalHealthCheck(HealthCheck):
def __init__(self):
pass
@classmethod
def check_name(cls):
return 'LocalHealthCheck'
def conduct_healthcheck(self, db_healthy, buildlogs_healthy):
data = {
'db_healthy': db_healthy,
'buildlogs_healthy': buildlogs_healthy
}
return (data, db_healthy and buildlogs_healthy)
class ProductionHealthCheck(HealthCheck):
def __init__(self, access_key, secret_key):
self.access_key = access_key
self.secret_key = secret_key
@classmethod
def check_name(cls):
return 'ProductionHealthCheck'
def conduct_healthcheck(self, db_healthy, buildlogs_healthy):
data = {
'db_healthy': db_healthy,
'buildlogs_healthy': buildlogs_healthy
}
# Only report unhealthy if the machine cannot connect to the DB. Redis isn't required for
# mission critical/high avaliability operations.
if not db_healthy:
# If the database is marked as unhealthy, check the status of RDS directly. If RDS is
# reporting as available, then the problem is with this instance. Otherwise, the problem is
# with RDS, and we can keep this machine as 'healthy'.
is_rds_working = False
try:
region = boto.rds2.connect_to_region('us-east-1',
aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key)
response = region.describe_db_instances()['DescribeDBInstancesResponse']
result = response['DescribeDBInstancesResult']
instances = result['DBInstances']
status = instances[0]['DBInstanceStatus']
is_rds_working = status == 'available'
except:
logger.exception("Exception while checking RDS status")
pass
data['db_available_checked'] = True
data['db_available_status'] = is_rds_working
# If RDS is down, then we still report the machine as healthy, so that it can handle
# requests once RDS comes back up.
return (data, not is_rds_working)
return (data, db_healthy)

View file

@ -3,11 +3,13 @@ import json
import hashlib import hashlib
import random import random
import calendar import calendar
import os
from datetime import datetime, timedelta from datetime import datetime, timedelta
from email.utils import formatdate from email.utils import formatdate
from peewee import (SqliteDatabase, create_model_tables, drop_model_tables, from peewee import (SqliteDatabase, create_model_tables, drop_model_tables,
savepoint_sqlite) savepoint_sqlite, savepoint)
from uuid import UUID
from data.database import * from data.database import *
from data import model from data import model
@ -20,18 +22,6 @@ logger = logging.getLogger(__name__)
SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i SAMPLE_DIFFS = ['test/data/sample/diffs/diffs%s.json' % i
for i in range(1, 10)] for i in range(1, 10)]
IMAGE_UUIDS = ['ab5160d1-8fb4-4022-a135-3c4de7f6ed97',
'4259533e-868d-4db3-9a78-fc24ffc03a2b',
'c2c6dc6e-24d1-4f15-a616-81c41e3e3629',
'8ec59952-8f5a-4fa0-897e-57c3337e1914',
'08a8ab1f-4aaa-4337-88ab-5b5c71a8d492',
'4a71f3db-cbb1-4c3b-858f-1be032b3e875',
'd40d531a-c70c-47f9-bf5b-2a4381db2d60',
'6fe6cebb-52b2-4036-892e-b86d6487a56b',
'e969ff76-e87d-4ea3-8cb3-0db9b5bcb8d9',
'2e3b616b-301f-437c-98ab-37352f444a60',
]
SAMPLE_CMDS = [["/bin/bash"], SAMPLE_CMDS = [["/bin/bash"],
["/bin/sh", "-c", ["/bin/sh", "-c",
"echo \"PasswordAuthentication no\" >> /etc/ssh/sshd_config"], "echo \"PasswordAuthentication no\" >> /etc/ssh/sshd_config"],
@ -45,6 +35,8 @@ SAMPLE_CMDS = [["/bin/bash"],
REFERENCE_DATE = datetime(2013, 6, 23) REFERENCE_DATE = datetime(2013, 6, 23)
TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG' TEST_STRIPE_ID = 'cus_2tmnh3PkXQS8NG'
IS_TESTING_REAL_DATABASE = bool(os.environ.get('TEST_DATABASE_URI'))
def __gen_checksum(image_id): def __gen_checksum(image_id):
h = hashlib.md5(image_id) h = hashlib.md5(image_id)
return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest() return 'tarsum+sha256:' + h.hexdigest() + h.hexdigest()
@ -57,6 +49,13 @@ def __gen_image_id(repo, image_num):
return h.hexdigest() + h.hexdigest() return h.hexdigest() + h.hexdigest()
def __gen_image_uuid(repo, image_num):
str_to_hash = "%s/%s/%s" % (repo.namespace_user.username, repo.name, image_num)
h = hashlib.md5(str_to_hash)
return UUID(bytes=h.digest())
global_image_num = [0] global_image_num = [0]
def __create_subtree(repo, structure, creator_username, parent): def __create_subtree(repo, structure, creator_username, parent):
num_nodes, subtrees, last_node_tags = structure num_nodes, subtrees, last_node_tags = structure
@ -71,7 +70,7 @@ def __create_subtree(repo, structure, creator_username, parent):
new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us') new_image = model.find_create_or_link_image(docker_image_id, repo, None, {}, 'local_us')
new_image_locations = new_image.storage.locations new_image_locations = new_image.storage.locations
new_image.storage.uuid = IMAGE_UUIDS[image_num % len(IMAGE_UUIDS)] new_image.storage.uuid = __gen_image_uuid(repo, image_num)
new_image.storage.uploading = False new_image.storage.uploading = False
new_image.storage.checksum = checksum new_image.storage.checksum = checksum
new_image.storage.save() new_image.storage.save()
@ -148,7 +147,7 @@ def setup_database_for_testing(testcase):
# Sanity check to make sure we're not killing our prod db # Sanity check to make sure we're not killing our prod db
db = model.db db = model.db
if not isinstance(model.db.obj, SqliteDatabase): if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
raise RuntimeError('Attempted to wipe production database!') raise RuntimeError('Attempted to wipe production database!')
global db_initialized_for_testing global db_initialized_for_testing
@ -160,12 +159,18 @@ def setup_database_for_testing(testcase):
initialize_database() initialize_database()
populate_database() populate_database()
# Enable foreign key constraints.
if not IS_TESTING_REAL_DATABASE:
model.db.obj.execute_sql('PRAGMA foreign_keys = ON;')
db_initialized_for_testing = True db_initialized_for_testing = True
# Create a savepoint for the testcase. # Create a savepoint for the testcase.
test_savepoint = savepoint(db) if IS_TESTING_REAL_DATABASE else savepoint_sqlite(db)
global testcases global testcases
testcases[testcase] = {} testcases[testcase] = {}
testcases[testcase]['savepoint'] = savepoint_sqlite(db) testcases[testcase]['savepoint'] = test_savepoint
testcases[testcase]['savepoint'].__enter__() testcases[testcase]['savepoint'].__enter__()
def initialize_database(): def initialize_database():
@ -244,9 +249,13 @@ def initialize_database():
LogEntryKind.create(name='regenerate_robot_token') LogEntryKind.create(name='regenerate_robot_token')
LogEntryKind.create(name='repo_verb')
ImageStorageLocation.create(name='local_eu') ImageStorageLocation.create(name='local_eu')
ImageStorageLocation.create(name='local_us') ImageStorageLocation.create(name='local_us')
ImageStorageTransformation.create(name='squash')
# NOTE: These MUST be copied over to NotificationKind, since every external # NOTE: These MUST be copied over to NotificationKind, since every external
# notification can also generate a Quay.io notification. # notification can also generate a Quay.io notification.
ExternalNotificationEvent.create(name='repo_push') ExternalNotificationEvent.create(name='repo_push')
@ -283,7 +292,7 @@ def wipe_database():
# Sanity check to make sure we're not killing our prod db # Sanity check to make sure we're not killing our prod db
db = model.db db = model.db
if not isinstance(model.db.obj, SqliteDatabase): if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
raise RuntimeError('Attempted to wipe production database!') raise RuntimeError('Attempted to wipe production database!')
drop_model_tables(all_models, fail_silently=True) drop_model_tables(all_models, fail_silently=True)
@ -551,7 +560,7 @@ if __name__ == '__main__':
log_level = getattr(logging, app.config['LOGGING_LEVEL']) log_level = getattr(logging, app.config['LOGGING_LEVEL'])
logging.basicConfig(level=log_level) logging.basicConfig(level=log_level)
if not isinstance(model.db.obj, SqliteDatabase): if not IS_TESTING_REAL_DATABASE and not isinstance(model.db.obj, SqliteDatabase):
raise RuntimeError('Attempted to initialize production database!') raise RuntimeError('Attempted to initialize production database!')
initialize_database() initialize_database()

13
registry.py Normal file
View file

@ -0,0 +1,13 @@
import logging
import logging.config
from app import app as application
from endpoints.index import index
from endpoints.tags import tags
from endpoints.registry import registry
application.register_blueprint(index, url_prefix='/v1')
application.register_blueprint(tags, url_prefix='/v1')
application.register_blueprint(registry, url_prefix='/v1')

View file

@ -18,7 +18,7 @@ paramiko
xhtml2pdf xhtml2pdf
redis redis
hiredis hiredis
docker-py git+https://github.com/devtable/docker-py.git@emptydirs
pygithub pygithub
flask-restful flask-restful
jsonschema jsonschema
@ -36,3 +36,4 @@ psycopg2
pyyaml pyyaml
git+https://github.com/DevTable/aniso8601-fake.git git+https://github.com/DevTable/aniso8601-fake.git
git+https://github.com/DevTable/anunidecode.git git+https://github.com/DevTable/anunidecode.git
gipc

View file

@ -1,58 +1,57 @@
APScheduler==3.0.0 APScheduler==3.0.0
Flask==0.10.1 Flask==0.10.1
Flask-Login==0.2.11 Flask-Login==0.2.11
Flask-Mail==0.9.0 Flask-Mail==0.9.1
Flask-Principal==0.4.0 Flask-Principal==0.4.0
Flask-RESTful==0.2.12 Flask-RESTful==0.2.12
Jinja2==2.7.3 Jinja2==2.7.3
LogentriesLogger==0.2.1 LogentriesLogger==0.2.1
Mako==1.0.0 Mako==1.0.0
MarkupSafe==0.23 MarkupSafe==0.23
Pillow==2.5.1 Pillow==2.6.0
PyGithub==1.25.0 PyGithub==1.25.1
PyMySQL==0.6.2 PyMySQL==0.6.2
PyPDF2==1.22 PyPDF2==1.23
PyYAML==3.11 PyYAML==3.11
SQLAlchemy==0.9.7 SQLAlchemy==0.9.7
Werkzeug==0.9.6 Werkzeug==0.9.6
alembic==0.6.5
git+https://github.com/DevTable/aniso8601-fake.git git+https://github.com/DevTable/aniso8601-fake.git
git+https://github.com/DevTable/anunidecode.git git+https://github.com/DevTable/anunidecode.git
argparse==1.2.1 alembic==0.6.7
backports.ssl-match-hostname==3.4.0.2
beautifulsoup4==4.3.2 beautifulsoup4==4.3.2
blinker==1.3 blinker==1.3
boto==2.32.0 boto==2.32.1
coverage==3.7.1 git+https://github.com/devtable/docker-py.git@emptydirs
docker-py==0.4.0
ecdsa==0.11 ecdsa==0.11
futures==2.1.6 futures==2.2.0
gevent==1.0.1 gevent==1.0.1
greenlet==0.4.2 gipc==0.4.0
greenlet==0.4.4
gunicorn==18.0 gunicorn==18.0
hiredis==0.1.4 hiredis==0.1.5
html5lib==0.999 html5lib==0.999
itsdangerous==0.24 itsdangerous==0.24
jsonschema==2.3.0 jsonschema==2.4.0
marisa-trie==0.6 marisa-trie==0.6
mixpanel-py==3.1.3
mock==1.0.1
git+https://github.com/NateFerrero/oauth2lib.git git+https://github.com/NateFerrero/oauth2lib.git
paramiko==1.14.0 mixpanel-py==3.2.0
peewee==2.2.5 paramiko==1.15.1
peewee==2.3.3
psycopg2==2.5.4
py-bcrypt==0.4 py-bcrypt==0.4
pycrypto==2.6.1 pycrypto==2.6.1
python-dateutil==2.2 python-dateutil==2.2
python-ldap==2.4.15 python-ldap==2.4.17
python-magic==0.4.6 python-magic==0.4.6
pytz==2014.4 pytz==2014.7
psycopg2==2.5.3
raven==5.0.0 raven==5.0.0
redis==2.10.1 redis==2.10.3
reportlab==2.7 reportlab==2.7
requests==2.3.0 requests==2.4.3
six==1.7.3 six==1.8.0
stripe==1.19.0 stripe==1.19.0
tzlocal==1.1.1 tzlocal==1.1.1
websocket-client==0.11.0 websocket-client==0.18.0
wsgiref==0.1.2 wsgiref==0.1.2
xhtml2pdf==0.0.6 xhtml2pdf==0.0.6

View file

@ -19,6 +19,23 @@
} }
} }
.scrollable-menu {
max-height: 400px;
overflow: auto;
}
.dropdown.input-group-addon {
padding: 0px;
border: 0px;
background-color: transparent;
text-align: left;
}
.dropdown.input-group-addon .dropdown-toggle {
border-left: 0px;
border-top-left-radius: 0px;
border-bottom-left-radius: 0px;
}
#quay-logo { #quay-logo {
width: 100px; width: 100px;
@ -2215,37 +2232,57 @@ p.editable:hover i {
font-size: 0.8em; font-size: 0.8em;
position: relative; position: relative;
margin-top: 30px; margin-top: 30px;
margin-right: 26px;
} }
.repo .pull-container { .repo .pull-container {
display: inline-block; display: inline-block;
width: 300px; width: 460px;
margin-left: 10px; margin-left: 10px;
margin-right: 10px; margin-right: 10px;
vertical-align: middle; vertical-align: middle;
position: relative;
} }
.repo .pull-container input { .repo .pull-container .pull-selector {
cursor: default;
background: white;
color: #666;
padding: 4px;
border: 1px solid #ddd;
width: 300px;
}
.repo-image-view .id-container {
display: inline-block; display: inline-block;
margin-top: 10px; width: 114px;
font-size: 14px;
height: 36px;
vertical-align: top;
border: 1px solid #ddd;
margin-right: -3px;
background: #f8f8f8;
outline: none;
border-top-left-radius: 4px;
border-bottom-left-radius: 4px;
} }
.repo-image-view .id-container input { .repo .pull-container .pull-selector i {
background: #fefefe; display: inline-block;
margin-right: 6px;
} }
.repo-image-view .id-container .input-group {
width: 542px; .repo .pull-container .copy-box {
width: 340px;
display: inline-block;
}
.repo .pull-container .copy-box .copy-container {
border-top-left-radius: 0px !important;
border-bottom-left-radius: 0px !important;
border-left: 0px;
}
.repo .pull-container .dropdown-menu li i.fa {
text-align: center;
width: 12px;
display: inline-block;
}
.repo .pull-container sup {
margin-left: 4px;
color: red;
} }
.repo-image-view #clipboardCopied { .repo-image-view #clipboardCopied {
@ -2281,25 +2318,45 @@ p.editable:hover i {
position: relative; position: relative;
} }
.copy-box-element.disabled .input-group-addon { .copy-box-element .copy-container {
display: none; border-radius: 4px !important;
border: 1px solid #ddd;
position: relative;
}
.copy-box-element input {
border: 0px;
padding-right: 32px;
}
.copy-box-element .copy-container .copy-icon {
position: absolute;
top: 8px;
right: 10px;
display: inline-block;
color: #ddd;
font-size: 16px;
cursor: pointer;
transition: color 0.5s ease-in-out;
}
.copy-box-element .copy-container .copy-icon.zeroclipboard-is-hover {
color: #444;
} }
.copy-box-element.disabled input { .copy-box-element.disabled input {
border-radius: 4px !important; margin-right: 0px;
}
.copy-box-element.disabled .copy-icon {
display: none;
} }
.global-zeroclipboard-container embed { .global-zeroclipboard-container embed {
cursor: pointer; cursor: pointer;
} }
#copyClipboard.zeroclipboard-is-hover, .copy-box-element .zeroclipboard-is-hover { .copy-box-element .hovering {
background: #428bca;
color: white;
cursor: pointer !important;
}
#clipboardCopied.hovering, .copy-box-element .hovering {
position: absolute; position: absolute;
right: 0px; right: 0px;
top: 40px; top: 40px;
@ -2307,16 +2364,11 @@ p.editable:hover i {
z-index: 100; z-index: 100;
} }
.copy-box-element .id-container {
display: inline-block;
vertical-align: middle;
}
.copy-box-element input { .copy-box-element input {
background-color: white !important; background-color: white !important;
} }
#clipboardCopied, .clipboard-copied-message { .clipboard-copied-message {
font-size: 0.8em; font-size: 0.8em;
display: inline-block; display: inline-block;
margin-right: 10px; margin-right: 10px;
@ -2327,7 +2379,7 @@ p.editable:hover i {
border-radius: 4px; border-radius: 4px;
} }
#clipboardCopied.animated, .clipboard-copied-message { .clipboard-copied-message {
-webkit-animation: fadeOut 4s ease-in-out 0s 1 forwards; -webkit-animation: fadeOut 4s ease-in-out 0s 1 forwards;
-moz-animation: fadeOut 4s ease-in-out 0s 1 forwards; -moz-animation: fadeOut 4s ease-in-out 0s 1 forwards;
-ms-animation: fadeOut 4s ease-in-out 0s 1 forwards; -ms-animation: fadeOut 4s ease-in-out 0s 1 forwards;
@ -3079,38 +3131,38 @@ p.editable:hover i {
stroke-width: 1.5px; stroke-width: 1.5px;
} }
.usage-chart { .usage-chart-element {
display: inline-block; display: inline-block;
vertical-align: middle; vertical-align: middle;
width: 200px; width: 200px;
height: 200px; height: 200px;
} }
.usage-chart .count-text { .usage-chart-element .count-text {
font-size: 22px; font-size: 22px;
} }
.usage-chart.limit-at path.arc-0 { .usage-chart-element.limit-at path.arc-0 {
fill: #c09853; fill: #c09853;
} }
.usage-chart.limit-over path.arc-0 { .usage-chart-element.limit-over path.arc-0 {
fill: #b94a48; fill: #b94a48;
} }
.usage-chart.limit-near path.arc-0 { .usage-chart-element.limit-near path.arc-0 {
fill: #468847; fill: #468847;
} }
.usage-chart.limit-over path.arc-1 { .usage-chart-element.limit-over path.arc-1 {
fill: #fcf8e3; fill: #fcf8e3;
} }
.usage-chart.limit-at path.arc-1 { .usage-chart-element.limit-at path.arc-1 {
fill: #f2dede; fill: #f2dede;
} }
.usage-chart.limit-near path.arc-1 { .usage-chart-element.limit-near path.arc-1 {
fill: #dff0d8; fill: #dff0d8;
} }
@ -3939,7 +3991,7 @@ pre.command:before {
color: #00b0ed; color: #00b0ed;
} }
.contact-options .option-phone .fa-circle { .contact-options .option-tel .fa-circle {
color: #1dd924; color: #1dd924;
} }
@ -3947,10 +3999,14 @@ pre.command:before {
color: #e52f00; color: #e52f00;
} }
.contact-options .option-email .fa-circle { .contact-options .option-mailto .fa-circle {
color: #1b72f1; color: #1b72f1;
} }
.contact-options .option-url .fa-circle {
color: #F1A51B;
}
.about-us .row { .about-us .row {
margin-bottom: 30px; margin-bottom: 30px;
} }
@ -4070,6 +4126,44 @@ pre.command:before {
border-bottom-left-radius: 0px; border-bottom-left-radius: 0px;
} }
.trigger-setup-github-element .ref-reference {
color: #ccc;
}
.trigger-setup-github-element .ref-reference span {
cursor: pointer;
text-decoration: line-through;
}
.trigger-setup-github-element .ref-reference:hover {
color: #3276b1;
}
.trigger-setup-github-element .ref-reference:hover span {
text-decoration: none;
}
.trigger-setup-github-element .ref-reference.match {
color: black;
}
.trigger-setup-github-element .ref-reference.match span {
text-decoration: none;
cursor: default;
}
.trigger-setup-github-element .ref-filter {
white-space: nowrap;
}
.trigger-setup-github-element .ref-filter span {
display: inline-block;
}
.trigger-setup-github-element .selected-info {
margin-bottom: 20px;
}
.trigger-setup-github-element .github-org-icon { .trigger-setup-github-element .github-org-icon {
width: 20px; width: 20px;
margin-right: 8px; margin-right: 8px;
@ -4085,6 +4179,63 @@ pre.command:before {
padding-left: 6px; padding-left: 6px;
} }
.trigger-setup-github-element .matching-refs {
margin: 0px;
padding: 0px;
margin-left: 10px;
display: inline-block;
}
.trigger-setup-github-element .ref-matches {
padding-left: 70px;
position: relative;
margin-bottom: 10px;
}
.trigger-setup-github-element .ref-matches .kind {
font-weight: bold;
position: absolute;
top: 0px;
left: 0px;
}
.trigger-setup-github-element .matching-refs.tags li:before {
content: "\f02b";
font-family: FontAwesome;
}
.trigger-setup-github-element .matching-refs.branches li:before {
content: "\f126";
font-family: FontAwesome;
}
.trigger-setup-github-element .matching-refs li {
list-style: none;
display: inline-block;
margin-left: 10px;
}
.setup-trigger-directive-element .dockerfile-found-content {
margin-left: 32px;
}
.setup-trigger-directive-element .dockerfile-found-content:before {
content: "\f071";
font-family: FontAwesome;
color: rgb(255, 194, 0);
position: absolute;
top: 0px;
left: 0px;
font-size: 20px;
}
.setup-trigger-directive-element .dockerfile-found {
position: relative;
margin-bottom: 16px;
padding-bottom: 16px;
border-bottom: 1px solid #eee;
}
.slideinout { .slideinout {
-webkit-transition:0.5s all; -webkit-transition:0.5s all;
transition:0.5s linear all; transition:0.5s linear all;
@ -4092,7 +4243,7 @@ pre.command:before {
position: relative; position: relative;
height: 75px; height: 32px;
opacity: 1; opacity: 1;
} }
@ -4234,11 +4385,14 @@ pre.command:before {
} }
.trigger-pull-credentials { .trigger-pull-credentials {
margin-top: 4px;
padding-left: 26px; padding-left: 26px;
font-size: 12px; font-size: 12px;
} }
.trigger-pull-credentials .entity-reference {
margin-left: 10px;
}
.trigger-pull-credentials .context-tooltip { .trigger-pull-credentials .context-tooltip {
color: gray; color: gray;
margin-right: 4px; margin-right: 4px;
@ -4246,7 +4400,8 @@ pre.command:before {
.trigger-description .trigger-description-subtitle { .trigger-description .trigger-description-subtitle {
display: inline-block; display: inline-block;
margin-right: 34px; width: 100px;
margin-bottom: 4px;
} }
.trigger-option-section:not(:first-child) { .trigger-option-section:not(:first-child) {

View file

@ -1,9 +1,12 @@
<div class="copy-box-element" ng-class="disabled ? 'disabled' : ''"> <div class="copy-box-element" ng-class="disabled ? 'disabled' : ''">
<div class="id-container"> <div class="id-container">
<div class="input-group"> <div class="copy-container">
<input type="text" class="form-control" value="{{ value }}" readonly> <input type="text" class="form-control" value="{{ value }}" readonly>
<span class="input-group-addon" data-title="Copy to Clipboard"> <span class="copy-icon" data-title="Copy to Clipboard"
<i class="fa fa-copy"></i> data-container="body"
data-placement="bottom"
bs-tooltip>
<i class="fa fa-clipboard"></i>
</span> </span>
</div> </div>
</div> </div>

View file

@ -2,8 +2,15 @@
<span ng-if="provider == 'github'"> <span ng-if="provider == 'github'">
<a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px" ng-disabled="signingIn"> <a href="javascript:void(0)" class="btn btn-primary btn-block" quay-require="['GITHUB_LOGIN']" ng-click="startSignin('github')" style="margin-bottom: 10px" ng-disabled="signingIn">
<i class="fa fa-github fa-lg"></i> <i class="fa fa-github fa-lg"></i>
<span ng-if="action != 'attach'">Sign In with GitHub</span> <span ng-if="action != 'attach'">
<span ng-if="action == 'attach'">Attach to GitHub Account</span> Sign In with GitHub
<span ng-if="isEnterprise('github')">Enterprise</span>
</span>
<span ng-if="action == 'attach'">
Attach to GitHub
<span ng-if="isEnterprise('github')">Enterprise</span>
Account
</span>
</a> </a>
</span> </span>

View file

@ -24,10 +24,11 @@
</div> </div>
<!-- Chart --> <!-- Chart -->
<div> <div class="usage-chart" total="subscribedPlan.privateRepos || 0"
<div id="repository-usage-chart" class="usage-chart limit-{{limit}}"></div> current="subscription.usedPrivateRepos || 0"
<span class="usage-caption" ng-show="chart">Repository Usage</span> limit="limit"
</div> usage-title="Repository Usage"
ng-show="!planLoading"></div>
<!-- Plans Table --> <!-- Plans Table -->
<table class="table table-hover plans-list-table" ng-show="!planLoading"> <table class="table table-hover plans-list-table" ng-show="!planLoading">

View file

@ -3,7 +3,7 @@
<div class="container" ng-show="!loading"> <div class="container" ng-show="!loading">
<div class="alert alert-info"> <div class="alert alert-info">
Default permissions provide a means of specifying <span class="context-tooltip" data-title="By default, all repositories have the creating user added as an 'Admin'" bs-tooltip="tooltip.title">additional</span> permissions that should be granted automatically to a repository. Default permissions provide a means of specifying <span class="context-tooltip" data-title="By default, all repositories have the creating user added as an 'Admin'" bs-tooltip="tooltip.title">additional</span> permissions that should be granted automatically to a repository <strong>when it is created</strong>.
</div> </div>
<div class="side-controls"> <div class="side-controls">

View file

@ -8,102 +8,110 @@
<button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button> <button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button>
<h4 class="modal-title">Setup new build trigger</h4> <h4 class="modal-title">Setup new build trigger</h4>
</div> </div>
<div class="modal-body" ng-show="activating"> <div class="modal-body" ng-show="currentView == 'activating'">
<span class="quay-spinner"></span> Setting up trigger... <span class="quay-spinner"></span> Setting up trigger...
</div> </div>
<div class="modal-body" ng-show="!activating"> <div class="modal-body" ng-show="currentView != 'activating'">
<!-- Trigger-specific setup --> <!-- Trigger-specific setup -->
<div class="trigger-description-element trigger-option-section" ng-switch on="trigger.service"> <div class="trigger-description-element trigger-option-section" ng-switch on="trigger.service">
<div ng-switch-when="github"> <div ng-switch-when="github">
<div class="trigger-setup-github" repository="repository" trigger="trigger" <div class="trigger-setup-github" repository="repository" trigger="trigger"
next-step-counter="nextStepCounter" current-step-valid="state.stepValid"
analyze="checkAnalyze(isValid)"></div> analyze="checkAnalyze(isValid)"></div>
</div> </div>
</div> </div>
<!-- Loading pull information -->
<div ng-show="currentView == 'analyzing'">
<span class="quay-spinner"></span> Checking pull credential requirements...
</div>
<!-- Pull information --> <!-- Pull information -->
<div class="trigger-option-section" ng-show="showPullRequirements"> <div class="trigger-option-section" ng-show="currentView == 'analyzed'">
<div ng-show="!pullRequirements">
<span class="quay-spinner"></span> Checking pull credential requirements...
</div>
<div ng-show="pullRequirements"> <!-- Messaging -->
<div class="alert alert-danger" ng-if="pullRequirements.status == 'error'"> <div class="alert alert-danger" ng-if="pullInfo.analysis.status == 'error'">
{{ pullRequirements.message }} {{ pullInfo.analysis.message }}
</div> </div>
<div class="alert alert-warning" ng-if="pullRequirements.status == 'warning'"> <div class="alert alert-warning" ng-if="pullInfo.analysis.status == 'warning'">
{{ pullRequirements.message }} {{ pullRequirements.message }}
</div> </div>
<div class="alert alert-success" ng-if="pullRequirements.status == 'analyzed' && pullRequirements.is_public === false"> <div class="dockerfile-found" ng-if="pullInfo.analysis.is_public === false">
The <div class="dockerfile-found-content">
<a href="{{ pullRequirements.dockerfile_url }}" ng-if="pullRequirements.dockerfile_url" target="_blank">Dockerfile found</a> A robot account is <strong>required</strong> for this build trigger because
<span ng-if="!pullRequirements.dockerfile_url">Dockerfile found</span>
depends on the private <span class="registry-name"></span> repository the
<a href="/repository/{{ pullRequirements.namespace }}/{{ pullRequirements.name }}" target="_blank"> <a href="{{ pullInfo.analysis.dockerfile_url }}" ng-if="pullInfo.analysis.dockerfile_url" target="_blank">
{{ pullRequirements.namespace }}/{{ pullRequirements.name }} Dockerfile found
</a> which requires </a>
a robot account for pull access, because it is marked <strong>private</strong>. <span ng-if="!pullInfo.analysis.dockerfile_url">Dockerfile found</span>
pulls from the private <span class="registry-name"></span> repository
<a href="/repository/{{ pullInfo.analysis.namespace }}/{{ pullInfo.analysis.name }}" target="_blank">
{{ pullInfo.analysis.namespace }}/{{ pullInfo.analysis.name }}
</a>
</div> </div>
</div> </div>
<div ng-show="pullRequirements"> <div style="margin-bottom: 12px">Please select the credentials to use when pulling the base image:</div>
<table style="width: 100%;"> <div ng-if="!isNamespaceAdmin(repository.namespace)" style="color: #aaa;">
<tr> <strong>Note:</strong> In order to set pull credentials for a build trigger, you must be an
<td style="width: 162px"> Administrator of the namespace <strong>{{ repository.namespace }}</strong>
<span class="context-tooltip" data-title="The credentials given to 'docker pull' in the builder for pulling images" </div>
style="margin-bottom: 10px" bs-tooltip>
docker pull Credentials:
</span>
</td>
<td>
<div ng-if="!isNamespaceAdmin(repository.namespace)" style="color: #aaa;">
In order to set pull credentials for a build trigger, you must be an Administrator of the namespace <strong>{{ repository.namespace }}</strong>
</div>
<div class="btn-group btn-group-sm" ng-if="isNamespaceAdmin(repository.namespace)">
<button type="button" class="btn btn-default"
ng-class="publicPull ? 'active btn-info' : ''" ng-click="setPublicPull(true)">None</button>
<button type="button" class="btn btn-default"
ng-class="publicPull ? '' : 'active btn-info'" ng-click="setPublicPull(false)">
<i class="fa fa-wrench"></i>
Robot account
</button>
</div>
</td>
</tr>
</table>
<table style="width: 100%;"> <!-- Namespace admin -->
<tr ng-show="!publicPull"> <div ng-show="isNamespaceAdmin(repository.namespace)">
<td> <!-- Select credentials -->
<div class="entity-search" namespace="repository.namespace" <div class="btn-group btn-group-sm">
placeholder="'Select robot account for pulling...'" <button type="button" class="btn btn-default"
current-entity="pullEntity" ng-class="pullInfo.is_public ? 'active btn-info' : ''"
allowed-entities="['robot']"></div> ng-click="pullInfo.is_public = true">
None
</button>
<button type="button" class="btn btn-default"
ng-class="pullInfo.is_public ? '' : 'active btn-info'"
ng-click="pullInfo.is_public = false">
<i class="fa fa-wrench"></i>
Robot account
</button>
</div>
<div class="alert alert-info" ng-if="pullRequirements.status == 'analyzed' && pullRequirements.robots.length" <!-- Robot Select -->
style="margin-top: 20px; margin-bottom: 0px;"> <div ng-show="!pullInfo.is_public" style="margin-top: 10px">
Note: We've automatically selected robot account <div class="entity-search" namespace="repository.namespace"
<span class="entity-reference" entity="pullRequirements.robots[0]"></span>, since it has access to the private placeholder="'Select robot account for pulling...'"
repository. current-entity="pullInfo.pull_entity"
</div> allowed-entities="['robot']"></div>
<div class="alert alert-warning"
ng-if="pullRequirements.status == 'analyzed' && !pullRequirements.robots.length && pullRequirements.name" <div ng-if="pullInfo.analysis.robots.length" style="margin-top: 20px; margin-bottom: 0px;">
style="margin-top: 20px; margin-bottom: 0px;"> <strong>Note</strong>: We've automatically selected robot account
Note: No robot account currently has access to the private repository. Please create one and/or assign access in the <span class="entity-reference" entity="pullInfo.analysis.robots[0]"></span>,
<a href="/repository/{{ pullRequirements.namespace }}/{{ pullRequirements.name }}/admin" target="_blank">repository's since it has access to the private repository.
admin panel</a>. </div>
</div> <div ng-if="!pullInfo.analysis.robots.length && pullInfo.analysis.name"
</td> style="margin-top: 20px; margin-bottom: 0px;">
</tr> <strong>Note</strong>: No robot account currently has access to the private repository. Please create one and/or assign access in the
</table> <a href="/repository/{{ pullInfo.analysis.namespace }}/{{ pullInfo.analysis.name }}/admin" target="_blank">
repository's admin panel.
</a>
</div>
</div>
</div> </div>
</div> </div>
</div> </div>
<div class="modal-footer"> <div class="modal-footer">
<button type="button" class="btn btn-primary" ng-disabled="!state.stepValid"
ng-click="nextStepCounter = nextStepCounter + 1"
ng-show="currentView == 'config'">Next</button>
<button type="button" class="btn btn-primary" <button type="button" class="btn btn-primary"
ng-disabled="!trigger.$ready || (!publicPull && !pullEntity) || checkingPullRequirements || activating" ng-disabled="!trigger.$ready || (!pullInfo['is_public'] && !pullInfo['pull_entity'])"
ng-click="activate()">Finished</button> ng-click="activate()"
ng-show="currentView == 'analyzed'">Create Trigger</button>
<button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button> <button type="button" class="btn btn-default" data-dismiss="modal">Cancel</button>
</div> </div>
</div><!-- /.modal-content --> </div><!-- /.modal-content -->

View file

@ -0,0 +1,9 @@
<span class="step-view-step-content">
<span ng-show="!loading">
<span ng-transclude></span>
</span>
<span ng-show="loading">
<span class="quay-spinner"></span>
{{ loadMessage }}
</span>
</span>

View file

@ -0,0 +1,3 @@
<div class="step-view-element">
<div class="transcluded" ng-transclude>
</div>

View file

@ -2,19 +2,18 @@
<span ng-switch-when="github"> <span ng-switch-when="github">
<i class="fa fa-github fa-lg" style="margin-right: 6px" data-title="GitHub" bs-tooltip="tooltip.title"></i> <i class="fa fa-github fa-lg" style="margin-right: 6px" data-title="GitHub" bs-tooltip="tooltip.title"></i>
Push to GitHub repository <a href="https://github.com/{{ trigger.config.build_source }}" target="_new">{{ trigger.config.build_source }}</a> Push to GitHub repository <a href="https://github.com/{{ trigger.config.build_source }}" target="_new">{{ trigger.config.build_source }}</a>
<div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="trigger.config.subdir"> <div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="!short">
<span>Dockerfile: <div>
<a href="https://github.com/{{ trigger.config.build_source }}/tree/{{ trigger.config.master_branch || 'master' }}/{{ trigger.config.subdir }}/Dockerfile" target="_blank"> <span class="trigger-description-subtitle">Branches/Tags:</span>
//{{ trigger.config.subdir }}/Dockerfile <span ng-if="trigger.config.branchtag_regex">Matching Regular Expression {{ trigger.config.branchtag_regex }}</span>
</a> <span ng-if="!trigger.config.branchtag_regex">(All Branches and Tags)</span>
</span> </div>
</div>
<div style="margin-top: 4px; margin-left: 26px; font-size: 12px; color: gray;" ng-if="!trigger.config.subdir && !short"> <div>
<span><span class="trigger-description-subtitle">Dockerfile:</span> <span class="trigger-description-subtitle">Dockerfile:</span>
<a href="https://github.com/{{ trigger.config.build_source }}/tree/{{ trigger.config.master_branch || 'master' }}/Dockerfile" target="_blank"> <span ng-if="trigger.config.subdir">//{{ trigger.config.subdir}}/Dockerfile</span>
//Dockerfile <span ng-if="!trigger.config.subdir">//Dockerfile</span>
</a> </div>
</span>
</div> </div>
</span> </span>
<span ng-switch-default> <span ng-switch-default>

View file

@ -1,62 +1,192 @@
<div class="trigger-setup-github-element"> <div class="trigger-setup-github-element">
<div ng-show="loading"> <!-- Current selected info -->
<span class="quay-spinner" style="vertical-align: middle; margin-right: 10px"></span> <div class="selected-info" ng-show="nextStepCounter > 0">
Loading Repository List <table style="width: 100%;">
<tr ng-show="state.currentRepo && nextStepCounter > 0">
<td width="200px">
Repository:
</td>
<td>
<div class="current-repo">
<img class="dropdown-select-icon github-org-icon"
ng-src="{{ state.currentRepo.avatar_url ? state.currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
<a ng-href="https://github.com/{{ state.currentRepo.repo }}" target="_blank">{{ state.currentRepo.repo }}</a>
</div>
</td>
</tr>
<tr ng-show="nextStepCounter > 1">
<td>
Branches and Tags:
</td>
<td>
<div class="ref-filter">
<span ng-if="!state.hasBranchTagFilter">(Build All)</span>
<span ng-if="state.hasBranchTagFilter">Regular Expression: <code>{{ state.branchTagFilter }}</code></span>
</div>
</td>
</tr>
<tr ng-show="nextStepCounter > 2">
<td>
Dockerfile Location:
</td>
<td>
<div class="dockerfile-location">
<i class="fa fa-folder fa-lg"></i> {{ state.currentLocation || '(Repository Root)' }}
</div>
</td>
</tr>
</table>
</div> </div>
<div ng-show="!loading">
<div style="margin-bottom: 18px">Please choose the GitHub repository that will trigger the build:</div>
<!-- Step view -->
<div class="step-view" next-step-counter="nextStepCounter" current-step-valid="currentStepValid"
steps-completed="stepsCompleted()">
<!-- Repository select --> <!-- Repository select -->
<div class="dropdown-select" placeholder="'Select a repository'" selected-item="currentRepo" <div class="step-view-step" complete-condition="state.currentRepo" load-callback="loadRepositories(callback)"
lookahead-items="repoLookahead"> load-message="Loading Repositories">
<!-- Icons --> <div style="margin-bottom: 12px">Please choose the GitHub repository that will trigger the build:</div>
<i class="dropdown-select-icon none-icon fa fa-github fa-lg"></i> <div class="dropdown-select" placeholder="'Enter or select a repository'" selected-item="state.currentRepo"
<img class="dropdown-select-icon github-org-icon" ng-src="{{ currentRepo.avatar_url ? currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}"> lookahead-items="repoLookahead" allow-custom-input="true">
<!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-github fa-lg"></i>
<img class="dropdown-select-icon github-org-icon"
ng-src="{{ state.currentRepo.avatar_url ? state.currentRepo.avatar_url : '//www.gravatar.com/avatar/' }}">
<!-- Dropdown menu --> <!-- Dropdown menu -->
<ul class="dropdown-select-menu" role="menu"> <ul class="dropdown-select-menu scrollable-menu" role="menu">
<li ng-repeat-start="org in orgs" role="presentation" class="dropdown-header github-org-header"> <li ng-repeat-start="org in orgs" role="presentation" class="dropdown-header github-org-header">
<img ng-src="{{ org.info.avatar_url }}" class="github-org-icon">{{ org.info.name }} <img ng-src="{{ org.info.avatar_url }}" class="github-org-icon">{{ org.info.name }}
</li> </li>
<li ng-repeat="repo in org.repos" class="github-repo-listing"> <li ng-repeat="repo in org.repos" class="github-repo-listing">
<a href="javascript:void(0)" ng-click="selectRepo(repo, org)"><i class="fa fa-github fa-lg"></i> {{ repo }}</a> <a href="javascript:void(0)" ng-click="selectRepo(repo, org)"><i class="fa fa-github fa-lg"></i> {{ repo }}</a>
</li> </li>
<li role="presentation" class="divider" ng-repeat-end ng-show="$index < orgs.length - 1"></li> <li role="presentation" class="divider" ng-repeat-end ng-show="$index < orgs.length - 1"></li>
</ul> </ul>
</div>
</div>
<!-- Branch/Tag filter/select -->
<div class="step-view-step" complete-condition="!state.hasBranchTagFilter || state.branchTagFilter"
load-callback="loadBranchesAndTags(callback)"
load-message="Loading Branches and Tags">
<div style="margin-bottom: 12px">Please choose the branches and tags to which this trigger will apply:</div>
<div style="margin-left: 20px;">
<div class="btn-group btn-group-sm" style="margin-bottom: 12px">
<button type="button" class="btn btn-default"
ng-class="state.hasBranchTagFilter ? '' : 'active btn-info'" ng-click="state.hasBranchTagFilter = false">
All Branches and Tags
</button>
<button type="button" class="btn btn-default"
ng-class="state.hasBranchTagFilter ? 'active btn-info' : ''" ng-click="state.hasBranchTagFilter = true">
Matching Regular Expression
</button>
</div>
<div ng-show="state.hasBranchTagFilter" style="margin-top: 10px;">
<form>
<div class="form-group">
<div class="input-group">
<input class="form-control" type="text" ng-model="state.branchTagFilter"
placeholder="(Regular expression. Examples: heads/branchname, tags/tagname)" required>
<div class="dropdown input-group-addon">
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">
<span class="caret"></span>
</button>
<ul class="dropdown-menu pull-right">
<li><a href="javascript:void(0)" ng-click="state.branchTagFilter = 'heads/.+'">
<i class="fa fa-code-fork"></i>All Branches</a>
</li>
<li><a href="javascript:void(0)" ng-click="state.branchTagFilter = 'tags/.+'">
<i class="fa fa-tag"></i>All Tags</a>
</li>
</ul>
</div>
</div>
</div>
</form>
<div style="margin-top: 10px">
<div class="ref-matches" ng-if="branchNames.length">
<span class="kind">Branches:</span>
<ul class="matching-refs branches">
<li ng-repeat="branchName in branchNames | limitTo:20"
class="ref-reference"
ng-class="isMatching('heads', branchName, state.branchTagFilter) ? 'match' : 'not-match'">
<span ng-click="addRef('heads', branchName)" target="_blank">
{{ branchName }}
</span>
</li>
</ul>
<span ng-if="branchNames.length > 20">...</span>
</div>
<div class="ref-matches" ng-if="tagNames.length" style="margin-bottom: -20px">
<span class="kind">Tags:</span>
<ul class="matching-refs tags">
<li ng-repeat="tagName in tagNames | limitTo:20"
class="ref-reference"
ng-class="isMatching('tags', tagName, state.branchTagFilter) ? 'match' : 'not-match'">
<span ng-click="addRef('tags', tagName)" target="_blank">
{{ tagName }}
</span>
</li>
</ul>
<span ng-if="tagNames.length > 20">...</span>
</div>
<div ng-if="state.branchTagFilter && !branchNames.length"
style="margin-top: 10px">
<strong>Warning:</strong> No branches found
</div>
</div>
</div>
</div>
</div> </div>
<!-- Dockerfile folder select --> <!-- Dockerfile folder select -->
<div class="slideinout" ng-show="currentRepo"> <div class="step-view-step" complete-condition="trigger.$ready" load-callback="loadLocations(callback)"
<div style="margin-top: 10px">Dockerfile Location:</div> load-message="Loading Folders">
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)" handle-item-selected="handleLocationSelected(datum)" <div style="margin-bottom: 12px">Dockerfile Location:</div>
<div class="dropdown-select" placeholder="'(Repository Root)'" selected-item="state.currentLocation"
lookahead-items="locations" handle-input="handleLocationInput(input)"
handle-item-selected="handleLocationSelected(datum)"
allow-custom-input="true"> allow-custom-input="true">
<!-- Icons --> <!-- Icons -->
<i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="isInvalidLocation"></i> <i class="dropdown-select-icon none-icon fa fa-folder-o fa-lg" ng-show="state.isInvalidLocation"></i>
<i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!isInvalidLocation"></i> <i class="dropdown-select-icon none-icon fa fa-folder fa-lg" style="color: black;" ng-show="!state.isInvalidLocation"></i>
<i class="dropdown-select-icon fa fa-folder fa-lg"></i> <i class="dropdown-select-icon fa fa-folder fa-lg"></i>
<!-- Dropdown menu --> <!-- Dropdown menu -->
<ul class="dropdown-select-menu" role="menu"> <ul class="dropdown-select-menu" role="menu">
<li ng-repeat="location in locations"> <li ng-repeat="location in locations">
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="!location"><i class="fa fa-github fa-lg"></i> Repository Root</a> <a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="!location">
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="location"><i class="fa fa-folder fa-lg"></i> {{ location }}</a> <i class="fa fa-github fa-lg"></i> Repository Root
</a>
<a href="javascript:void(0)" ng-click="setLocation(location)" ng-if="location">
<i class="fa fa-folder fa-lg"></i> {{ location }}
</a>
</li>
<li class="dropdown-header" role="presentation" ng-show="!locations.length">
No Dockerfiles found in repository
</li> </li>
<li class="dropdown-header" role="presentation" ng-show="!locations.length">No Dockerfiles found in repository</li>
</ul> </ul>
</div> </div>
<div class="quay-spinner" ng-show="!locations && !locationError"></div> <div class="quay-spinner" ng-show="!locations && !locationError"></div>
<div class="alert alert-warning" ng-show="locations && !locations.length"> <div class="alert alert-warning" ng-show="locations && !locations.length">
Warning: No Dockerfiles were found in {{ currentRepo.repo }} Warning: No Dockerfiles were found in {{ state.currentRepo.repo }}
</div> </div>
<div class="alert alert-warning" ng-show="locationError"> <div class="alert alert-warning" ng-show="locationError">
{{ locationError }} {{ locationError }}
</div> </div>
<div class="alert alert-info" ng-show="locations.length && isInvalidLocation"> <div class="alert alert-info" ng-show="locations.length && state.isInvalidLocation">
Note: The folder does not currently exist or contain a Dockerfile Note: The folder does not currently exist or contain a Dockerfile
</div> </div>
</div> </div>
<!-- /step-view -->
</div> </div>
</div> </div>

View file

@ -0,0 +1,2 @@
<span id="usage-chart-element" class="usage-chart-element" ng-class="'limit-' + limit" ng-show="total != null"></span>
<span class="usage-caption" ng-show="total != null && usageTitle">{{ usageTitle }}</span>

BIN
static/img/icons/tags.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 B

BIN
static/img/icons/wrench.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 379 B

View file

@ -620,7 +620,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
}]); }]);
$provide.factory('TriggerService', ['UtilService', '$sanitize', function(UtilService, $sanitize) { $provide.factory('TriggerService', ['UtilService', '$sanitize', 'KeyService',
function(UtilService, $sanitize, KeyService) {
var triggerService = {}; var triggerService = {};
var triggerTypes = { var triggerTypes = {
@ -639,10 +640,29 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'type': 'option', 'type': 'option',
'name': 'branch_name' 'name': 'branch_name'
} }
] ],
'get_redirect_url': function(namespace, repository) {
var redirect_uri = KeyService['githubRedirectUri'] + '/trigger/' +
namespace + '/' + repository;
var authorize_url = KeyService['githubTriggerAuthorizeUrl'];
var client_id = KeyService['githubTriggerClientId'];
return authorize_url + 'client_id=' + client_id +
'&scope=repo,user:email&redirect_uri=' + redirect_uri;
}
} }
} }
triggerService.getRedirectUrl = function(name, namespace, repository) {
var type = triggerTypes[name];
if (!type) {
return '';
}
return type['get_redirect_url'](namespace, repository);
};
triggerService.getDescription = function(name, config) { triggerService.getDescription = function(name, config) {
var type = triggerTypes[name]; var type = triggerTypes[name];
if (!type) { if (!type) {
@ -842,6 +862,15 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
return config['SERVER_HOSTNAME']; return config['SERVER_HOSTNAME'];
}; };
config.getHost = function(opt_auth) {
var auth = opt_auth;
if (auth) {
auth = auth + '@';
}
return config['PREFERRED_URL_SCHEME'] + '://' + auth + config['SERVER_HOSTNAME'];
};
config.getUrl = function(opt_path) { config.getUrl = function(opt_path) {
var path = opt_path || ''; var path = opt_path || '';
return config['PREFERRED_URL_SCHEME'] + '://' + config['SERVER_HOSTNAME'] + path; return config['PREFERRED_URL_SCHEME'] + '://' + config['SERVER_HOSTNAME'] + path;
@ -1303,29 +1332,37 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
'id': 'repo_push', 'id': 'repo_push',
'title': 'Push to Repository', 'title': 'Push to Repository',
'icon': 'fa-upload' 'icon': 'fa-upload'
},
{
'id': 'build_queued',
'title': 'Dockerfile Build Queued',
'icon': 'fa-tasks'
},
{
'id': 'build_start',
'title': 'Dockerfile Build Started',
'icon': 'fa-circle-o-notch'
},
{
'id': 'build_success',
'title': 'Dockerfile Build Successfully Completed',
'icon': 'fa-check-circle-o'
},
{
'id': 'build_failure',
'title': 'Dockerfile Build Failed',
'icon': 'fa-times-circle-o'
} }
]; ];
if (Features.BUILD_SUPPORT) {
var buildEvents = [
{
'id': 'build_queued',
'title': 'Dockerfile Build Queued',
'icon': 'fa-tasks'
},
{
'id': 'build_start',
'title': 'Dockerfile Build Started',
'icon': 'fa-circle-o-notch'
},
{
'id': 'build_success',
'title': 'Dockerfile Build Successfully Completed',
'icon': 'fa-check-circle-o'
},
{
'id': 'build_failure',
'title': 'Dockerfile Build Failed',
'icon': 'fa-times-circle-o'
}];
for (var i = 0; i < buildEvents.length; ++i) {
events.push(buildEvents[i]);
}
}
var methods = [ var methods = [
{ {
'id': 'quay_notification', 'id': 'quay_notification',
@ -1390,7 +1427,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
{ {
'name': 'notification_token', 'name': 'notification_token',
'type': 'string', 'type': 'string',
'title': 'Notification Token' 'title': 'Room Notification Token',
'help_url': 'https://hipchat.com/rooms/tokens/{room_id}'
} }
] ]
}, },
@ -1527,7 +1565,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
if (metadata.updated_tags && Object.getOwnPropertyNames(metadata.updated_tags).length) { if (metadata.updated_tags && Object.getOwnPropertyNames(metadata.updated_tags).length) {
return 'Repository {repository} has been pushed with the following tags updated: {updated_tags}'; return 'Repository {repository} has been pushed with the following tags updated: {updated_tags}';
} else { } else {
return 'Repository {repository} has been pushed'; return 'Repository {repository} fhas been pushed';
} }
}, },
'page': function(metadata) { 'page': function(metadata) {
@ -1675,21 +1713,31 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
$provide.factory('KeyService', ['$location', 'Config', function($location, Config) { $provide.factory('KeyService', ['$location', 'Config', function($location, Config) {
var keyService = {} var keyService = {}
var oauth = window.__oauth;
keyService['stripePublishableKey'] = Config['STRIPE_PUBLISHABLE_KEY']; keyService['stripePublishableKey'] = Config['STRIPE_PUBLISHABLE_KEY'];
keyService['githubClientId'] = Config['GITHUB_CLIENT_ID']; keyService['githubTriggerClientId'] = oauth['GITHUB_TRIGGER_CONFIG']['CLIENT_ID'];
keyService['githubLoginClientId'] = Config['GITHUB_LOGIN_CLIENT_ID']; keyService['githubLoginClientId'] = oauth['GITHUB_LOGIN_CONFIG']['CLIENT_ID'];
keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback'); keyService['googleLoginClientId'] = oauth['GOOGLE_LOGIN_CONFIG']['CLIENT_ID'];
keyService['googleLoginClientId'] = Config['GOOGLE_LOGIN_CLIENT_ID']; keyService['githubRedirectUri'] = Config.getUrl('/oauth2/github/callback');
keyService['googleRedirectUri'] = Config.getUrl('/oauth2/google/callback'); keyService['googleRedirectUri'] = Config.getUrl('/oauth2/google/callback');
keyService['googleLoginUrl'] = 'https://accounts.google.com/o/oauth2/auth?response_type=code&'; keyService['githubLoginUrl'] = oauth['GITHUB_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
keyService['githubLoginUrl'] = 'https://github.com/login/oauth/authorize?'; keyService['googleLoginUrl'] = oauth['GOOGLE_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
keyService['githubEndpoint'] = oauth['GITHUB_LOGIN_CONFIG']['GITHUB_ENDPOINT'];
keyService['githubTriggerAuthorizeUrl'] = oauth['GITHUB_LOGIN_CONFIG']['AUTHORIZE_ENDPOINT'];
keyService['googleLoginScope'] = 'openid email';
keyService['githubLoginScope'] = 'user:email'; keyService['githubLoginScope'] = 'user:email';
keyService['googleLoginScope'] = 'openid email';
keyService.isEnterprise = function(service) {
var isGithubEnterprise = keyService['githubLoginUrl'].indexOf('https://github.com/') < 0;
return service == 'github' && isGithubEnterprise;
};
keyService.getExternalLoginUrl = function(service, action) { keyService.getExternalLoginUrl = function(service, action) {
var state_clause = ''; var state_clause = '';
@ -2537,7 +2585,10 @@ quayApp.directive('focusablePopoverContent', ['$timeout', '$popover', function (
$body = $('body'); $body = $('body');
var hide = function() { var hide = function() {
$body.off('click'); $body.off('click');
if (!scope) { return; }
scope.$apply(function() { scope.$apply(function() {
if (!scope) { return; }
scope.$hide(); scope.$hide();
}); });
}; };
@ -2587,7 +2638,7 @@ quayApp.directive('copyBox', function () {
restrict: 'C', restrict: 'C',
scope: { scope: {
'value': '=value', 'value': '=value',
'hoveringMessage': '=hoveringMessage' 'hoveringMessage': '=hoveringMessage',
}, },
controller: function($scope, $element, $rootScope) { controller: function($scope, $element, $rootScope) {
$scope.disabled = false; $scope.disabled = false;
@ -2596,7 +2647,7 @@ quayApp.directive('copyBox', function () {
$rootScope.__copyBoxIdCounter = number + 1; $rootScope.__copyBoxIdCounter = number + 1;
$scope.inputId = "copy-box-input-" + number; $scope.inputId = "copy-box-input-" + number;
var button = $($element).find('.input-group-addon'); var button = $($element).find('.copy-icon');
var input = $($element).find('input'); var input = $($element).find('input');
input.attr('id', $scope.inputId); input.attr('id', $scope.inputId);
@ -2634,9 +2685,9 @@ quayApp.directive('userSetup', function () {
$scope.errorMessage = ''; $scope.errorMessage = '';
$scope.sent = true; $scope.sent = true;
$scope.sendingRecovery = false; $scope.sendingRecovery = false;
}, function(result) { }, function(resp) {
$scope.invalidRecovery = true; $scope.invalidRecovery = true;
$scope.errorMessage = result.data; $scope.errorMessage = ApiService.getErrorMessage(resp, 'Cannot send recovery email');
$scope.sent = false; $scope.sent = false;
$scope.sendingRecovery = false; $scope.sendingRecovery = false;
}); });
@ -2670,6 +2721,8 @@ quayApp.directive('externalLoginButton', function () {
}, },
controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) { controller: function($scope, $timeout, $interval, ApiService, KeyService, CookieService, Features, Config) {
$scope.signingIn = false; $scope.signingIn = false;
$scope.isEnterprise = KeyService.isEnterprise;
$scope.startSignin = function(service) { $scope.startSignin = function(service) {
$scope.signInStarted({'service': service}); $scope.signInStarted({'service': service});
@ -2813,7 +2866,7 @@ quayApp.directive('signupForm', function () {
$scope.registering = true; $scope.registering = true;
if ($scope.inviteCode) { if ($scope.inviteCode) {
$scope.newUser['inviteCode'] = $scope.inviteCode; $scope.newUser['invite_code'] = $scope.inviteCode;
} }
ApiService.createNewUser($scope.newUser).then(function(resp) { ApiService.createNewUser($scope.newUser).then(function(resp) {
@ -2978,6 +3031,28 @@ quayApp.directive('dockerAuthDialog', function (Config) {
}); });
quayApp.filter('regex', function() {
return function(input, regex) {
if (!regex) { return []; }
try {
var patt = new RegExp(regex);
} catch (ex) {
return [];
}
var out = [];
for (var i = 0; i < input.length; ++i){
var m = input[i].match(patt);
if (m && m[0].length == input[i].length) {
out.push(input[i]);
}
}
return out;
};
});
quayApp.filter('reverse', function() { quayApp.filter('reverse', function() {
return function(items) { return function(items) {
return items.slice().reverse(); return items.slice().reverse();
@ -3104,6 +3179,22 @@ quayApp.directive('logsView', function () {
'delete_robot': 'Delete Robot Account: {robot}', 'delete_robot': 'Delete Robot Account: {robot}',
'create_repo': 'Create Repository: {repo}', 'create_repo': 'Create Repository: {repo}',
'push_repo': 'Push to repository: {repo}', 'push_repo': 'Push to repository: {repo}',
'repo_verb': function(metadata) {
var prefix = '';
if (metadata.verb == 'squash') {
prefix = 'Pull of squashed tag {tag}'
}
if (metadata.token) {
prefix += ' via token {token}';
} else if (metadata.username) {
prefix += ' by {username}';
} else {
prefix += ' by {_ip}';
}
return prefix;
},
'pull_repo': function(metadata) { 'pull_repo': function(metadata) {
if (metadata.token) { if (metadata.token) {
return 'Pull repository {repo} via token {token}'; return 'Pull repository {repo} via token {token}';
@ -3234,6 +3325,7 @@ quayApp.directive('logsView', function () {
'delete_robot': 'Delete Robot Account', 'delete_robot': 'Delete Robot Account',
'create_repo': 'Create Repository', 'create_repo': 'Create Repository',
'push_repo': 'Push to repository', 'push_repo': 'Push to repository',
'repo_verb': 'Pull Repo Verb',
'pull_repo': 'Pull repository', 'pull_repo': 'Pull repository',
'delete_repo': 'Delete repository', 'delete_repo': 'Delete repository',
'change_repo_permission': 'Change repository permission', 'change_repo_permission': 'Change repository permission',
@ -3325,7 +3417,6 @@ quayApp.directive('logsView', function () {
$scope.logsPath = '/api/v1/' + url; $scope.logsPath = '/api/v1/' + url;
if (!$scope.chart) { if (!$scope.chart) {
window.console.log('creating chart');
$scope.chart = new LogUsageChart(logKinds); $scope.chart = new LogUsageChart(logKinds);
$($scope.chart).bind('filteringChanged', function(e) { $($scope.chart).bind('filteringChanged', function(e) {
$scope.$apply(function() { $scope.kindsAllowed = e.allowed; }); $scope.$apply(function() { $scope.kindsAllowed = e.allowed; });
@ -4508,23 +4599,6 @@ quayApp.directive('planManager', function () {
$scope.planChanged({ 'plan': subscribedPlan }); $scope.planChanged({ 'plan': subscribedPlan });
} }
if (sub.usedPrivateRepos > $scope.subscribedPlan.privateRepos) {
$scope.limit = 'over';
} else if (sub.usedPrivateRepos == $scope.subscribedPlan.privateRepos) {
$scope.limit = 'at';
} else if (sub.usedPrivateRepos >= $scope.subscribedPlan.privateRepos * 0.7) {
$scope.limit = 'near';
} else {
$scope.limit = 'none';
}
if (!$scope.chart) {
$scope.chart = new UsageChart();
$scope.chart.draw('repository-usage-chart');
}
$scope.chart.update(sub.usedPrivateRepos || 0, $scope.subscribedPlan.privateRepos || 0);
$scope.planChanging = false; $scope.planChanging = false;
$scope.planLoading = false; $scope.planLoading = false;
}); });
@ -4734,6 +4808,121 @@ quayApp.directive('triggerDescription', function () {
}); });
quayApp.directive('stepView', function ($compile) {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/step-view.html',
replace: true,
transclude: true,
restrict: 'C',
scope: {
'nextStepCounter': '=nextStepCounter',
'currentStepValid': '=currentStepValid',
'stepsCompleted': '&stepsCompleted'
},
controller: function($scope, $element, $rootScope) {
this.currentStepIndex = -1;
this.steps = [];
this.watcher = null;
this.getCurrentStep = function() {
return this.steps[this.currentStepIndex];
};
this.reset = function() {
this.currentStepIndex = -1;
for (var i = 0; i < this.steps.length; ++i) {
this.steps[i].element.hide();
}
$scope.currentStepValid = false;
};
this.next = function() {
if (this.currentStepIndex >= 0) {
var currentStep = this.getCurrentStep();
if (!currentStep || !currentStep.scope) { return; }
if (!currentStep.scope.completeCondition) {
return;
}
currentStep.element.hide();
if (this.unwatch) {
this.unwatch();
this.unwatch = null;
}
}
this.currentStepIndex++;
if (this.currentStepIndex < this.steps.length) {
var currentStep = this.getCurrentStep();
currentStep.element.show();
currentStep.scope.load()
this.unwatch = currentStep.scope.$watch('completeCondition', function(cc) {
$scope.currentStepValid = !!cc;
});
} else {
$scope.stepsCompleted();
}
};
this.register = function(scope, element) {
element.hide();
this.steps.push({
'scope': scope,
'element': element
});
};
var that = this;
$scope.$watch('nextStepCounter', function(nsc) {
if (nsc >= 0) {
that.next();
} else {
that.reset();
}
});
}
};
return directiveDefinitionObject;
});
quayApp.directive('stepViewStep', function () {
var directiveDefinitionObject = {
priority: 1,
require: '^stepView',
templateUrl: '/static/directives/step-view-step.html',
replace: false,
transclude: true,
restrict: 'C',
scope: {
'completeCondition': '=completeCondition',
'loadCallback': '&loadCallback',
'loadMessage': '@loadMessage'
},
link: function(scope, element, attrs, controller) {
controller.register(scope, element);
},
controller: function($scope, $element) {
$scope.load = function() {
$scope.loading = true;
$scope.loadCallback({'callback': function() {
$scope.loading = false;
}});
};
}
};
return directiveDefinitionObject;
});
quayApp.directive('dropdownSelect', function ($compile) { quayApp.directive('dropdownSelect', function ($compile) {
var directiveDefinitionObject = { var directiveDefinitionObject = {
priority: 0, priority: 0,
@ -4976,25 +5165,28 @@ quayApp.directive('setupTriggerDialog', function () {
controller: function($scope, $element, ApiService, UserService) { controller: function($scope, $element, ApiService, UserService) {
var modalSetup = false; var modalSetup = false;
$scope.state = {};
$scope.nextStepCounter = -1;
$scope.currentView = 'config';
$scope.show = function() { $scope.show = function() {
if (!$scope.trigger || !$scope.repository) { return; } if (!$scope.trigger || !$scope.repository) { return; }
$scope.activating = false; $scope.currentView = 'config';
$scope.pullEntity = null;
$scope.publicPull = true;
$scope.showPullRequirements = false;
$('#setupTriggerModal').modal({}); $('#setupTriggerModal').modal({});
if (!modalSetup) { if (!modalSetup) {
$('#setupTriggerModal').on('hidden.bs.modal', function () { $('#setupTriggerModal').on('hidden.bs.modal', function () {
if (!$scope.trigger || $scope.trigger['is_active']) { return; } if (!$scope.trigger || $scope.trigger['is_active']) { return; }
$scope.nextStepCounter = -1;
$scope.$apply(function() { $scope.$apply(function() {
$scope.cancelSetupTrigger(); $scope.cancelSetupTrigger();
}); });
}); });
modalSetup = true; modalSetup = true;
$scope.nextStepCounter = 0;
} }
}; };
@ -5007,27 +5199,20 @@ quayApp.directive('setupTriggerDialog', function () {
}; };
$scope.hide = function() { $scope.hide = function() {
$scope.activating = false;
$('#setupTriggerModal').modal('hide'); $('#setupTriggerModal').modal('hide');
}; };
$scope.setPublicPull = function(value) {
$scope.publicPull = value;
};
$scope.checkAnalyze = function(isValid) { $scope.checkAnalyze = function(isValid) {
$scope.currentView = 'analyzing';
$scope.pullInfo = {
'is_public': true
};
if (!isValid) { if (!isValid) {
$scope.publicPull = true; $scope.currentView = 'analyzed';
$scope.pullEntity = null;
$scope.showPullRequirements = false;
$scope.checkingPullRequirements = false;
return; return;
} }
$scope.checkingPullRequirements = true;
$scope.showPullRequirements = true;
$scope.pullRequirements = null;
var params = { var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name, 'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id 'trigger_uuid': $scope.trigger.id
@ -5038,26 +5223,20 @@ quayApp.directive('setupTriggerDialog', function () {
}; };
ApiService.analyzeBuildTrigger(data, params).then(function(resp) { ApiService.analyzeBuildTrigger(data, params).then(function(resp) {
$scope.pullRequirements = resp; $scope.currentView = 'analyzed';
if (resp['status'] == 'publicbase') {
$scope.publicPull = true;
$scope.pullEntity = null;
} else if (resp['namespace']) {
$scope.publicPull = false;
if (resp['status'] == 'analyzed') {
if (resp['robots'] && resp['robots'].length > 0) { if (resp['robots'] && resp['robots'].length > 0) {
$scope.pullEntity = resp['robots'][0]; $scope.pullInfo['pull_entity'] = resp['robots'][0];
} else { } else {
$scope.pullEntity = null; $scope.pullInfo['pull_entity'] = null;
} }
$scope.pullInfo['is_public'] = false;
} }
$scope.checkingPullRequirements = false; $scope.pullInfo['analysis'] = resp;
}, function(resp) { }, ApiService.errorDisplay('Cannot load Dockerfile information'));
$scope.pullRequirements = resp;
$scope.checkingPullRequirements = false;
});
}; };
$scope.activate = function() { $scope.activate = function() {
@ -5070,11 +5249,11 @@ quayApp.directive('setupTriggerDialog', function () {
'config': $scope.trigger['config'] 'config': $scope.trigger['config']
}; };
if ($scope.pullEntity) { if ($scope.pullInfo['pull_entity']) {
data['pull_robot'] = $scope.pullEntity['name']; data['pull_robot'] = $scope.pullInfo['pull_entity']['name'];
} }
$scope.activating = true; $scope.currentView = 'activating';
var errorHandler = ApiService.errorDisplay('Cannot activate build trigger', function(resp) { var errorHandler = ApiService.errorDisplay('Cannot activate build trigger', function(resp) {
$scope.hide(); $scope.hide();
@ -5115,17 +5294,128 @@ quayApp.directive('triggerSetupGithub', function () {
scope: { scope: {
'repository': '=repository', 'repository': '=repository',
'trigger': '=trigger', 'trigger': '=trigger',
'nextStepCounter': '=nextStepCounter',
'currentStepValid': '=currentStepValid',
'analyze': '&analyze' 'analyze': '&analyze'
}, },
controller: function($scope, $element, ApiService) { controller: function($scope, $element, ApiService) {
$scope.analyzeCounter = 0; $scope.analyzeCounter = 0;
$scope.setupReady = false; $scope.setupReady = false;
$scope.loading = true; $scope.refs = null;
$scope.branchNames = null;
$scope.tagNames = null;
$scope.state = {
'currentRepo': null,
'branchTagFilter': '',
'hasBranchTagFilter': false,
'isInvalidLocation': true,
'currentLocation': null
};
$scope.isMatching = function(kind, name, filter) {
try {
var patt = new RegExp(filter);
} catch (ex) {
return false;
}
var fullname = (kind + '/' + name);
var m = fullname.match(patt);
return m && m[0].length == fullname.length;
}
$scope.addRef = function(kind, name) {
if ($scope.isMatching(kind, name, $scope.state.branchTagFilter)) {
return;
}
var newFilter = kind + '/' + name;
var existing = $scope.state.branchTagFilter;
if (existing) {
$scope.state.branchTagFilter = '(' + existing + ')|(' + newFilter + ')';
} else {
$scope.state.branchTagFilter = newFilter;
}
}
$scope.stepsCompleted = function() {
$scope.analyze({'isValid': !$scope.state.isInvalidLocation});
};
$scope.loadRepositories = function(callback) {
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id
};
ApiService.listTriggerBuildSources(null, params).then(function(resp) {
$scope.orgs = resp['sources'];
setupTypeahead();
callback();
}, ApiService.errorDisplay('Cannot load repositories'));
};
$scope.loadBranchesAndTags = function(callback) {
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger['id'],
'field_name': 'refs'
};
ApiService.listTriggerFieldValues($scope.trigger['config'], params).then(function(resp) {
$scope.refs = resp['values'];
$scope.branchNames = [];
$scope.tagNames = [];
for (var i = 0; i < $scope.refs.length; ++i) {
var ref = $scope.refs[i];
if (ref.kind == 'branch') {
$scope.branchNames.push(ref.name);
} else {
$scope.tagNames.push(ref.name);
}
}
callback();
}, ApiService.errorDisplay('Cannot load branch and tag names'));
};
$scope.loadLocations = function(callback) {
$scope.locations = null;
var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name,
'trigger_uuid': $scope.trigger.id
};
ApiService.listBuildTriggerSubdirs($scope.trigger['config'], params).then(function(resp) {
if (resp['status'] == 'error') {
callback(resp['message'] || 'Could not load Dockerfile locations');
return;
}
$scope.locations = resp['subdir'] || [];
// Select a default location (if any).
if ($scope.locations.length > 0) {
$scope.setLocation($scope.locations[0]);
} else {
$scope.state.currentLocation = null;
$scope.state.isInvalidLocation = resp['subdir'].indexOf('') < 0;
$scope.trigger.$ready = true;
}
callback();
}, ApiService.errorDisplay('Cannot load locations'));
}
$scope.handleLocationInput = function(location) { $scope.handleLocationInput = function(location) {
$scope.state.isInvalidLocation = $scope.locations.indexOf(location) < 0;
$scope.trigger['config']['subdir'] = location || ''; $scope.trigger['config']['subdir'] = location || '';
$scope.isInvalidLocation = $scope.locations.indexOf(location) < 0; $scope.trigger.$ready = true;
$scope.analyze({'isValid': !$scope.isInvalidLocation});
}; };
$scope.handleLocationSelected = function(datum) { $scope.handleLocationSelected = function(datum) {
@ -5133,14 +5423,14 @@ quayApp.directive('triggerSetupGithub', function () {
}; };
$scope.setLocation = function(location) { $scope.setLocation = function(location) {
$scope.currentLocation = location; $scope.state.currentLocation = location;
$scope.state.isInvalidLocation = false;
$scope.trigger['config']['subdir'] = location || ''; $scope.trigger['config']['subdir'] = location || '';
$scope.isInvalidLocation = false; $scope.trigger.$ready = true;
$scope.analyze({'isValid': true});
}; };
$scope.selectRepo = function(repo, org) { $scope.selectRepo = function(repo, org) {
$scope.currentRepo = { $scope.state.currentRepo = {
'repo': repo, 'repo': repo,
'avatar_url': org['info']['avatar_url'], 'avatar_url': org['info']['avatar_url'],
'toString': function() { 'toString': function() {
@ -5150,10 +5440,7 @@ quayApp.directive('triggerSetupGithub', function () {
}; };
$scope.selectRepoInternal = function(currentRepo) { $scope.selectRepoInternal = function(currentRepo) {
if (!currentRepo) { $scope.trigger.$ready = false;
$scope.trigger.$ready = false;
return;
}
var params = { var params = {
'repository': $scope.repository.namespace + '/' + $scope.repository.name, 'repository': $scope.repository.namespace + '/' + $scope.repository.name,
@ -5165,39 +5452,6 @@ quayApp.directive('triggerSetupGithub', function () {
'build_source': repo, 'build_source': repo,
'subdir': '' 'subdir': ''
}; };
// Lookup the possible Dockerfile locations.
$scope.locations = null;
if (repo) {
ApiService.listBuildTriggerSubdirs($scope.trigger['config'], params).then(function(resp) {
if (resp['status'] == 'error') {
$scope.locationError = resp['message'] || 'Could not load Dockerfile locations';
$scope.locations = null;
$scope.trigger.$ready = false;
$scope.isInvalidLocation = false;
$scope.analyze({'isValid': false});
return;
}
$scope.locationError = null;
$scope.locations = resp['subdir'] || [];
$scope.trigger.$ready = true;
if ($scope.locations.length > 0) {
$scope.setLocation($scope.locations[0]);
} else {
$scope.currentLocation = null;
$scope.isInvalidLocation = resp['subdir'].indexOf('') < 0;
$scope.analyze({'isValid': !$scope.isInvalidLocation});
}
}, function(resp) {
$scope.locationError = resp['message'] || 'Could not load Dockerfile locations';
$scope.locations = null;
$scope.trigger.$ready = false;
$scope.isInvalidLocation = false;
$scope.analyze({'isValid': false});
});
}
}; };
var setupTypeahead = function() { var setupTypeahead = function() {
@ -5227,30 +5481,20 @@ quayApp.directive('triggerSetupGithub', function () {
$scope.repoLookahead = repos; $scope.repoLookahead = repos;
}; };
var loadSources = function() { $scope.$watch('state.currentRepo', function(repo) {
var params = { if (repo) {
'repository': $scope.repository.namespace + '/' + $scope.repository.name, $scope.selectRepoInternal(repo);
'trigger_uuid': $scope.trigger.id
};
ApiService.listTriggerBuildSources(null, params).then(function(resp) {
$scope.orgs = resp['sources'];
setupTypeahead();
$scope.loading = false;
});
};
var check = function() {
if ($scope.repository && $scope.trigger) {
loadSources();
} }
}; });
$scope.$watch('repository', check); $scope.$watch('state.branchTagFilter', function(bf) {
$scope.$watch('trigger', check); if (!$scope.trigger) { return; }
$scope.$watch('currentRepo', function(repo) { if ($scope.state.hasBranchTagFilter) {
$scope.selectRepoInternal(repo); $scope.trigger['config']['branchtag_regex'] = bf;
} else {
delete $scope.trigger['config']['branchtag_regex'];
}
}); });
} }
}; };
@ -5730,6 +5974,54 @@ quayApp.directive('notificationsBubble', function () {
}); });
quayApp.directive('usageChart', function () {
var directiveDefinitionObject = {
priority: 0,
templateUrl: '/static/directives/usage-chart.html',
replace: false,
transclude: false,
restrict: 'C',
scope: {
'current': '=current',
'total': '=total',
'limit': '=limit',
'usageTitle': '@usageTitle'
},
controller: function($scope, $element) {
$scope.limit = "";
var chart = null;
var update = function() {
if ($scope.current == null || $scope.total == null) { return; }
if (!chart) {
chart = new UsageChart();
chart.draw('usage-chart-element');
}
var current = $scope.current || 0;
var total = $scope.total || 0;
if (current > total) {
$scope.limit = 'over';
} else if (current == total) {
$scope.limit = 'at';
} else if (current >= total * 0.7) {
$scope.limit = 'near';
} else {
$scope.limit = 'none';
}
chart.update($scope.current, $scope.total);
};
$scope.$watch('current', update);
$scope.$watch('total', update);
}
};
return directiveDefinitionObject;
});
quayApp.directive('notificationView', function () { quayApp.directive('notificationView', function () {
var directiveDefinitionObject = { var directiveDefinitionObject = {
priority: 0, priority: 0,
@ -6304,6 +6596,14 @@ quayApp.directive('ngVisible', function () {
}; };
}); });
quayApp.config( [
'$compileProvider',
function( $compileProvider )
{
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|ftp|mailto|tel|irc):/);
}
]);
quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout', 'CookieService', 'Features', '$anchorScroll', quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout', 'CookieService', 'Features', '$anchorScroll',
function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout, CookieService, Features, $anchorScroll) { function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout, CookieService, Features, $anchorScroll) {

View file

@ -8,7 +8,49 @@ function GuideCtrl() {
function SecurityCtrl($scope) { function SecurityCtrl($scope) {
} }
function ContactCtrl($scope) { function ContactCtrl($scope, Config) {
$scope.Config = Config;
$scope.colsize = Math.floor(12 / Config.CONTACT_INFO.length);
$scope.getKind = function(contactInfo) {
var colon = contactInfo.indexOf(':');
var scheme = contactInfo.substr(0, colon);
if (scheme == 'https' || scheme == 'http') {
if (contactInfo.indexOf('//twitter.com/') > 0) {
return 'twitter';
}
return 'url';
}
return scheme;
};
$scope.getTitle = function(contactInfo) {
switch ($scope.getKind(contactInfo)) {
case 'url':
return contactInfo;
case 'twitter':
var parts = contactInfo.split('/');
return '@' + parts[parts.length - 1];
case 'tel':
return contactInfo.substr('tel:'.length);
case 'irc':
// irc://chat.freenode.net:6665/quayio
var parts = contactInfo.substr('irc://'.length).split('/');
var server = parts[0];
if (server.indexOf('freenode') > 0) {
server = 'Freenode';
}
return server + ': #' + parts[parts.length - 1];
case 'mailto':
return contactInfo.substr('mailto:'.length);
}
}
} }
function PlansCtrl($scope, $location, UserService, PlanService, $routeParams) { function PlansCtrl($scope, $location, UserService, PlanService, $routeParams) {
@ -365,6 +407,9 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
var namespace = $routeParams.namespace; var namespace = $routeParams.namespace;
var name = $routeParams.name; var name = $routeParams.name;
$scope.pullCommands = [];
$scope.currentPullCommand = null;
$rootScope.title = 'Loading...'; $rootScope.title = 'Loading...';
// Watch for the destruction of the scope. // Watch for the destruction of the scope.
@ -399,6 +444,46 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
$scope.buildDialogShowCounter = 0; $scope.buildDialogShowCounter = 0;
$scope.getFormattedCommand = ImageMetadataService.getFormattedCommand; $scope.getFormattedCommand = ImageMetadataService.getFormattedCommand;
$scope.setCurrentPullCommand = function(pullCommand) {
$scope.currentPullCommand = pullCommand;
};
$scope.updatePullCommand = function() {
$scope.pullCommands = [];
if ($scope.currentTag) {
$scope.pullCommands.push({
'title': 'docker pull (Tag ' + $scope.currentTag.name + ')',
'shortTitle': 'Pull Tag',
'icon': 'fa-tag',
'command': 'docker pull ' + Config.getDomain() + '/' + namespace + '/' + name + ':' + $scope.currentTag.name
});
}
$scope.pullCommands.push({
'title': 'docker pull (Full Repository)',
'shortTitle': 'Pull Repo',
'icon': 'fa-code-fork',
'command': 'docker pull ' + Config.getDomain() + '/' + namespace + '/' + name
});
if ($scope.currentTag) {
var squash = 'curl -L -f ' + Config.getHost('ACCOUNTNAME:PASSWORDORTOKEN');
squash += '/c1/squash/' + namespace + '/' + name + '/' + $scope.currentTag.name;
squash += ' | docker load';
$scope.pullCommands.push({
'title': 'Squashed image (Tag ' + $scope.currentTag.name + ')',
'shortTitle': 'Squashed',
'icon': 'fa-file-archive-o',
'command': squash,
'experimental': true
});
}
$scope.currentPullCommand = $scope.pullCommands[0];
};
$scope.showNewBuildDialog = function() { $scope.showNewBuildDialog = function() {
$scope.buildDialogShowCounter++; $scope.buildDialogShowCounter++;
}; };
@ -593,6 +678,8 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
$location.search('tag', null); $location.search('tag', null);
$location.search('image', imageId.substr(0, 12)); $location.search('image', imageId.substr(0, 12));
} }
$scope.updatePullCommand();
}; };
$scope.setTag = function(tagName, opt_updateURL) { $scope.setTag = function(tagName, opt_updateURL) {
@ -627,6 +714,8 @@ function RepoCtrl($scope, $sanitize, Restangular, ImageMetadataService, ApiServi
$scope.currentTag = null; $scope.currentTag = null;
$scope.currentImage = null; $scope.currentImage = null;
} }
$scope.updatePullCommand();
}; };
$scope.getFirstTextLine = getFirstTextLine; $scope.getFirstTextLine = getFirstTextLine;
@ -1241,15 +1330,13 @@ function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerServi
var name = $routeParams.name; var name = $routeParams.name;
$scope.Features = Features; $scope.Features = Features;
$scope.permissions = {'team': [], 'user': []}; $scope.TriggerService = TriggerService;
$scope.permissions = {'team': [], 'user': [], 'loading': 2};
$scope.logsShown = 0; $scope.logsShown = 0;
$scope.deleting = false; $scope.deleting = false;
$scope.permissionCache = {}; $scope.permissionCache = {};
$scope.githubRedirectUri = KeyService.githubRedirectUri;
$scope.githubClientId = KeyService.githubClientId;
$scope.showTriggerSetupCounter = 0; $scope.showTriggerSetupCounter = 0;
$scope.getBadgeFormat = function(format, repo) { $scope.getBadgeFormat = function(format, repo) {
@ -1591,6 +1678,7 @@ function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerServi
var permissionsFetch = Restangular.one('repository/' + namespace + '/' + name + '/permissions/' + kind + '/'); var permissionsFetch = Restangular.one('repository/' + namespace + '/' + name + '/permissions/' + kind + '/');
permissionsFetch.get().then(function(resp) { permissionsFetch.get().then(function(resp) {
$scope.permissions[kind] = resp.permissions; $scope.permissions[kind] = resp.permissions;
$scope.permissions['loading']--;
}, function() { }, function() {
$scope.permissions[kind] = null; $scope.permissions[kind] = null;
}); });
@ -1650,6 +1738,7 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
if (login.service == 'github') { if (login.service == 'github') {
$scope.hasGithubLogin = true; $scope.hasGithubLogin = true;
$scope.githubLogin = login.metadata['service_username']; $scope.githubLogin = login.metadata['service_username'];
$scope.githubEndpoint = KeyService['githubEndpoint'];
} }
if (login.service == 'google') { if (login.service == 'google') {
@ -1940,12 +2029,10 @@ function V1Ctrl($scope, $location, UserService) {
UserService.updateUserIn($scope); UserService.updateUserIn($scope);
} }
function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService, PlanService, KeyService, Features) { function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService, PlanService, TriggerService, Features) {
UserService.updateUserIn($scope); UserService.updateUserIn($scope);
$scope.Features = Features; $scope.Features = Features;
$scope.githubRedirectUri = KeyService.githubRedirectUri;
$scope.githubClientId = KeyService.githubClientId;
$scope.repo = { $scope.repo = {
'is_public': 0, 'is_public': 0,
@ -2024,9 +2111,7 @@ function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService
// Conduct the Github redirect if applicable. // Conduct the Github redirect if applicable.
if ($scope.repo.initialize == 'github') { if ($scope.repo.initialize == 'github') {
window.location = 'https://github.com/login/oauth/authorize?client_id=' + $scope.githubClientId + window.location = TriggerService.getRedirectUrl('github', repo.namespace, repo.name);
'&scope=repo,user:email&redirect_uri=' + $scope.githubRedirectUri + '/trigger/' +
repo.namespace + '/' + repo.name;
return; return;
} }
@ -2699,6 +2784,15 @@ function SuperUserAdminCtrl($scope, ApiService, Features, UserService) {
$scope.logsCounter = 0; $scope.logsCounter = 0;
$scope.newUser = {}; $scope.newUser = {};
$scope.createdUsers = []; $scope.createdUsers = [];
$scope.systemUsage = null;
$scope.getUsage = function() {
if ($scope.systemUsage) { return; }
ApiService.getSystemUsage().then(function(resp) {
$scope.systemUsage = resp;
}, ApiService.errorDisplay('Cannot load system usage. Please contact support.'))
}
$scope.loadLogs = function() { $scope.loadLogs = function() {
$scope.logsCounter++; $scope.logsCounter++;

View file

@ -377,6 +377,23 @@ ImageHistoryTree.prototype.expandCollapsed_ = function(imageNode) {
}; };
/**
* Returns the level of the node in the tree. Recursively computes and updates
* if necessary.
*/
ImageHistoryTree.prototype.calculateLevel_ = function(node) {
if (node['level'] != null) {
return node['level'];
}
if (node['parent'] == null) {
return node['level'] = 0;
}
return node['level'] = (this.calculateLevel_(node['parent']) + 1);
};
/** /**
* Builds the root node for the tree. * Builds the root node for the tree.
*/ */
@ -392,11 +409,16 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
var imageByDockerId = {}; var imageByDockerId = {};
for (var i = 0; i < this.images_.length; ++i) { for (var i = 0; i < this.images_.length; ++i) {
var image = this.images_[i]; var image = this.images_[i];
// Skip images that are currently uploading.
if (image.uploading) { continue; }
var imageNode = { var imageNode = {
"name": image.id.substr(0, 12), "name": image.id.substr(0, 12),
"children": [], "children": [],
"image": image, "image": image,
"tags": image.tags "tags": image.tags,
"level": null
}; };
imageByDockerId[image.id] = imageNode; imageByDockerId[image.id] = imageNode;
} }
@ -405,6 +427,7 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
// For each node, attach it to its immediate parent. If there is no immediate parent, // For each node, attach it to its immediate parent. If there is no immediate parent,
// then the node is the root. // then the node is the root.
var roots = []; var roots = [];
var nodeCountsByLevel = {};
for (var i = 0; i < this.images_.length; ++i) { for (var i = 0; i < this.images_.length; ++i) {
var image = this.images_[i]; var image = this.images_[i];
@ -420,10 +443,27 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
imageNode.parent = parent; imageNode.parent = parent;
parent.children.push(imageNode); parent.children.push(imageNode);
} else { } else {
imageNode['level'] = 0;
roots.push(imageNode); roots.push(imageNode);
} }
} }
// Calculate each node's level.
for (var i = 0; i < this.images_.length; ++i) {
var image = this.images_[i];
// Skip images that are currently uploading.
if (image.uploading) { continue; }
var imageNode = imageByDockerId[image.id];
var level = this.calculateLevel_(imageNode);
if (nodeCountsByLevel[level] == null) {
nodeCountsByLevel[level] = 1;
} else {
nodeCountsByLevel[level]++;
}
}
// If there are multiple root nodes, then there is at least one branch without shared // If there are multiple root nodes, then there is at least one branch without shared
// ancestry and we use the virtual node. Otherwise, we use the root node found. // ancestry and we use the virtual node. Otherwise, we use the root node found.
var root = { var root = {
@ -438,16 +478,12 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
// Determine the maximum number of nodes at a particular level. This is used to size // Determine the maximum number of nodes at a particular level. This is used to size
// the width of the tree properly. // the width of the tree properly.
var maxChildCount = roots.length; var maxChildCount = 0;
for (var i = 0; i < this.images_.length; ++i) { var maxChildHeight = 0;
var image = this.images_[i]; Object.keys(nodeCountsByLevel).forEach(function(key){
maxChildCount = Math.max(maxChildCount, nodeCountsByLevel[key]);
// Skip images that are currently uploading. maxChildHeight = Math.max(maxChildHeight, key);
if (image.uploading) { continue; } });
var imageNode = imageByDockerId[image.id];
maxChildCount = Math.max(maxChildCount, this.determineMaximumChildCount_(imageNode));
}
// Compact the graph so that any single chain of three (or more) images becomes a collapsed // Compact the graph so that any single chain of three (or more) images becomes a collapsed
// section. We only do this if the max width is > 1 (since for a single width tree, no long // section. We only do this if the max width is > 1 (since for a single width tree, no long
@ -456,22 +492,21 @@ ImageHistoryTree.prototype.buildRoot_ = function() {
this.collapseNodes_(root); this.collapseNodes_(root);
} }
// Determine the maximum height of the tree. // Determine the maximum height of the tree, with collapsed nodes.
var maxHeight = this.determineMaximumHeight_(root); var maxCollapsedHeight = this.determineMaximumHeight_(root);
// Finally, set the root node and return. // Finally, set the root node and return.
this.root_ = root; this.root_ = root;
return { return {
'maxWidth': maxChildCount + 1, 'maxWidth': maxChildCount + 1,
'maxHeight': maxHeight 'maxHeight': maxCollapsedHeight
}; };
}; };
/** /**
* Collapses long single chains of nodes (3 or more) into single nodes to make the graph more * Determines the height of the tree at its longest chain.
* compact.
*/ */
ImageHistoryTree.prototype.determineMaximumHeight_ = function(node) { ImageHistoryTree.prototype.determineMaximumHeight_ = function(node) {
var maxHeight = 0; var maxHeight = 0;
@ -901,7 +936,7 @@ ImageHistoryTree.prototype.toggle_ = function(d) {
ImageHistoryTree.prototype.dispose = function() { ImageHistoryTree.prototype.dispose = function() {
var container = this.container_ ; var container = this.container_ ;
$('#' + container).removeOverscroll(); $('#' + container).removeOverscroll();
document.getElementById(container).innerHTML = ''; $('#' + container).html('');
}; };
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////

View file

@ -5,40 +5,21 @@
</h2> </h2>
<div class="row contact-options"> <div class="row contact-options">
<div class="col-sm-3 text-center option-email"> <div class="text-center" ng-repeat="info in Config.CONTACT_INFO"
ng-class="['option-' + getKind(info), 'col-sm-' + colsize]">
<span class="fa-stack fa-3x text-center"> <span class="fa-stack fa-3x text-center">
<i class="fa fa-circle fa-stack-2x"></i> <i class="fa fa-circle fa-stack-2x"></i>
<i class="fa fa-envelope fa-stack-1x fa-inverse"></i> <i class="fa fa-stack-1x fa-inverse"
ng-class="{'mailto': 'fa-envelope', 'irc': 'fa-comment', 'tel': 'fa-phone', 'twitter': 'fa-twitter', 'url': 'fa-ticket'}[getKind(info)]"></i>
</span> </span>
<h4>Email Us</h4> <span ng-switch="getKind(info)">
<h4><a href="mailto:support@quay.io">support@quay.io</a></h4> <h4 ng-switch-when="mailto">Email</h4>
</div> <h4 ng-switch-when="irc">IRC</h4>
<h4 ng-switch-when="tel">Call</h4>
<div class="col-sm-3 text-center option-irc"> <h4 ng-switch-when="twitter">Tweet</h4>
<span class="fa-stack fa-3x"> <h4 ng-switch-when="url">Help System</h4>
<i class="fa fa-circle fa-stack-2x"></i>
<i class="fa fa-comment fa-stack-1x fa-inverse"></i>
</span> </span>
<h4>IRC</h4> <h4><a ng-href="{{ info }}">{{ getTitle(info) }}</a></h4>
<h4><a href="irc://chat.freenode.net:6665/quayio">Freenode: #quayio</a></h4>
</div>
<div class="col-sm-3 text-center option-phone">
<span class="fa-stack fa-3x">
<i class="fa fa-circle fa-stack-2x"></i>
<i class="fa fa-phone fa-stack-1x fa-inverse"></i>
</span>
<h4>Call Us</h4>
<h4><a href="tel:+1-888-930-3475">888-930-3475</a></h4>
</div>
<div class="col-sm-3 text-center option-twitter">
<span class="fa-stack fa-3x">
<i class="fa fa-circle fa-stack-2x"></i>
<i class="fa fa-twitter fa-stack-1x fa-inverse"></i>
</span>
<h4>Tweet Us</h4>
<h4><a href="https://twitter.com/quayio">@quayio</a></h4>
</div> </div>
</div> </div>
</div> </div>

View file

@ -97,7 +97,7 @@
</div> </div>
</div> </div>
<div class="row"> <div class="row" ng-show="Features.BUILD_SUPPORT">
<div class="col-md-12"> <div class="col-md-12">
<div class="section"> <div class="section">

View file

@ -92,8 +92,9 @@
<i class="info-icon fa fa-info-circle" data-placement="left" data-content="Allow any number of users, robot accounts or teams to read, write or administer this repository"></i> <i class="info-icon fa fa-info-circle" data-placement="left" data-content="Allow any number of users, robot accounts or teams to read, write or administer this repository"></i>
</div> </div>
<div class="panel-body"> <div class="panel-body">
<!-- Throbber -->
<table class="permissions"> <span class="quay-spinner" ng-show="permissions.loading > 0"></span>
<table class="permissions" ng-show="permissions.loading <= 0">
<thead> <thead>
<tr> <tr>
<td style="min-width: 400px;">User<span ng-show="repo.is_organization">/Team</span>/Robot Account</td> <td style="min-width: 400px;">User<span ng-show="repo.is_organization">/Team</span>/Robot Account</td>
@ -260,7 +261,7 @@
<span class="entity-reference" entity="trigger.pull_robot"></span> <span class="entity-reference" entity="trigger.pull_robot"></span>
</div> </div>
</td> </td>
<td style="white-space: nowrap;"> <td style="white-space: nowrap;" valign="top">
<div class="dropdown" style="display: inline-block" ng-visible="trigger.is_active"> <div class="dropdown" style="display: inline-block" ng-visible="trigger.is_active">
<button class="btn btn-default dropdown-toggle" data-toggle="dropdown" data-title="Build History" bs-tooltip="tooltip.title" data-container="body" <button class="btn btn-default dropdown-toggle" data-toggle="dropdown" data-title="Build History" bs-tooltip="tooltip.title" data-container="body"
ng-click="loadTriggerBuildHistory(trigger)"> ng-click="loadTriggerBuildHistory(trigger)">
@ -305,7 +306,11 @@
<b class="caret"></b> <b class="caret"></b>
</button> </button>
<ul class="dropdown-menu dropdown-menu-right pull-right"> <ul class="dropdown-menu dropdown-menu-right pull-right">
<li><a href="https://github.com/login/oauth/authorize?client_id={{ githubClientId }}&scope=repo,user:email&redirect_uri={{ githubRedirectUri }}/trigger/{{ repo.namespace }}/{{ repo.name }}"><i class="fa fa-github fa-lg"></i>GitHub - Repository Push</a></li> <li>
<a href="{{ TriggerService.getRedirectUrl('github', repo.namespace, repo.name) }}">
<i class="fa fa-github fa-lg"></i>GitHub - Repository Push
</a>
</li>
</ul> </ul>
</div> </div>
</div> </div>

View file

@ -13,6 +13,9 @@
<li> <li>
<a href="javascript:void(0)" data-toggle="tab" data-target="#create-user">Create User</a> <a href="javascript:void(0)" data-toggle="tab" data-target="#create-user">Create User</a>
</li> </li>
<li>
<a href="javascript:void(0)" data-toggle="tab" data-target="#usage-counter" ng-click="getUsage()">System Usage</a>
</li>
<li> <li>
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">System Logs</a> <a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">System Logs</a>
</li> </li>
@ -27,6 +30,29 @@
<div class="logsView" makevisible="logsCounter" all-logs="true"></div> <div class="logsView" makevisible="logsCounter" all-logs="true"></div>
</div> </div>
<!-- Usage tab -->
<div id="usage-counter" class="tab-pane">
<div class="quay-spinner" ng-show="systemUsage == null"></div>
<div class="usage-chart" total="systemUsage.allowed" limit="systemUsageLimit"
current="systemUsage.usage" usage-title="Deployed Repositories"></div>
<!-- Alerts -->
<div class="alert alert-danger" ng-show="systemUsageLimit == 'over' && systemUsage">
You have deployed more repositories than your plan allows. Please
upgrade your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
</div>
<div class="alert alert-warning" ng-show="systemUsageLimit == 'at' && systemUsage">
You are at your current plan's number of allowed repositories. It might be time to think about
upgrading your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
</div>
<div class="alert alert-success" ng-show="systemUsageLimit == 'near' && systemUsage">
You are nearing the number of allowed deployed repositories. It might be time to think about
upgrading your subscription by contacting <a href="mailto:sales@coreos.com">CoreOS Sales</a>.
</div>
</div>
<!-- Create user tab --> <!-- Create user tab -->
<div id="create-user" class="tab-pane"> <div id="create-user" class="tab-pane">
<span class="quay-spinner" ng-show="creatingUser"></span> <span class="quay-spinner" ng-show="creatingUser"></span>

View file

@ -176,7 +176,7 @@
<div class="panel-body"> <div class="panel-body">
<div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8"> <div ng-show="hasGithubLogin && githubLogin" class="lead col-md-8">
<i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i> <i class="fa fa-github fa-lg" style="margin-right: 6px;" data-title="GitHub" bs-tooltip="tooltip.title"></i>
<b><a href="https://github.com/{{githubLogin}}" target="_blank">{{githubLogin}}</a></b> <b><a href="{{githubEndpoint}}{{githubLogin}}" target="_blank">{{githubLogin}}</a></b>
<span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px" <span class="delete-ui" button-title="'Detach'" delete-title="'Detach Account'" style="margin-left: 10px"
perform-delete="detachExternalLogin('github')"></span> perform-delete="detachExternalLogin('github')"></span>
</div> </div>

View file

@ -56,10 +56,21 @@
<!-- Pull Command --> <!-- Pull Command -->
<span class="pull-command visible-md-inline"> <span class="pull-command visible-md-inline">
<div class="pull-container" data-title="Pull repository" bs-tooltip="tooltip.title"> <div class="pull-container" ng-show="currentPullCommand">
<div class="input-group"> <button class="pull-selector dropdown-toggle" data-toggle="dropdown">
<div class="copy-box" hovering-message="true" value="'docker pull ' + Config.getDomain() + '/' + repo.namespace + '/' + repo.name"></div> <i class="fa" ng-class="currentPullCommand.icon"></i>
</div> {{ currentPullCommand.shortTitle }}
<b class="caret"></b>
</button>
<ul class="dropdown-menu">
<li ng-repeat="pullCommand in pullCommands">
<a href="javascript:void(0)" ng-click="setCurrentPullCommand(pullCommand)"><i class="fa" ng-class="pullCommand.icon"></i>
{{ pullCommand.title }}
<sup ng-if="pullCommand.experimental">Experimental</sup>
</a>
</li>
</ul>
<div class="copy-box" hovering-message="true" value="currentPullCommand.command"></div>
</div> </div>
</span> </span>
</div> </div>
@ -112,7 +123,7 @@
<!-- Content view --> <!-- Content view -->
<div class="repo-content" ng-show="currentTag.image_id || currentImage"> <div class="repo-content" ng-show="currentTag.image_id || currentImage">
<!-- Image History --> <!-- Image History -->
<div id="image-history" style="max-height: 10px;"> <div id="image-history">
<div class="row"> <div class="row">
<!-- Tree View container --> <!-- Tree View container -->
<div class="col-md-8"> <div class="col-md-8">

View file

@ -44,6 +44,7 @@
window.__endpoints = {{ route_data|safe }}.apis; window.__endpoints = {{ route_data|safe }}.apis;
window.__features = {{ feature_set|safe }}; window.__features = {{ feature_set|safe }};
window.__config = {{ config_set|safe }}; window.__config = {{ config_set|safe }};
window.__oauth = {{ oauth_set|safe }};
window.__token = '{{ csrf_token() }}'; window.__token = '{{ csrf_token() }}';
</script> </script>
@ -116,7 +117,7 @@ mixpanel.init("{{ mixpanel_key }}", { track_pageview : false, debug: {{ is_debug
<li quay-require="['BILLING']"><a href="/privacy" target="_self">Privacy</a></li> <li quay-require="['BILLING']"><a href="/privacy" target="_self">Privacy</a></li>
<li quay-require="['BILLING']"><a href="/security/" target="_self">Security</a></li> <li quay-require="['BILLING']"><a href="/security/" target="_self">Security</a></li>
<li quay-require="['BILLING']"><a href="/about/" target="_self">About</a></li> <li quay-require="['BILLING']"><a href="/about/" target="_self">About</a></li>
<li><b><a href="/contact/" target="_self">Contact</a></b></li> <li><b><a href="{{ contact_href or '/contact/' }}" target="_self">Contact</a></b></li>
<li quay-require="['BILLING']"><b><a href="http://status.quay.io" target="_self">Service Status</a></b></li> <li quay-require="['BILLING']"><b><a href="http://status.quay.io" target="_self">Service Status</a></b></li>
</ul> </ul>
</div> </div>

View file

@ -0,0 +1,85 @@
{
"removed": [
"/opt/elasticsearch-0.90.5/LICENSE.txt",
"/opt/elasticsearch-0.90.5/NOTICE.txt",
"/opt/elasticsearch-0.90.5/README.textile",
"/opt/elasticsearch-0.90.5/bin/elasticsearch",
"/opt/elasticsearch-0.90.5/bin/elasticsearch.in.sh",
"/opt/elasticsearch-0.90.5/bin/plugin",
"/opt/elasticsearch-0.90.5/config/elasticsearch.yml",
"/opt/elasticsearch-0.90.5/config/logging.yml",
"/opt/elasticsearch-0.90.5/lib/elasticsearch-0.90.5.jar",
"/opt/elasticsearch-0.90.5/lib/jna-3.3.0.jar",
"/opt/elasticsearch-0.90.5/lib/jts-1.12.jar",
"/opt/elasticsearch-0.90.5/lib/log4j-1.2.17.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-analyzers-common-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-codecs-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-core-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-grouping-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-highlighter-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-join-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-memory-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-misc-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-queries-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-queryparser-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-sandbox-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-spatial-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-suggest-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-freebsd-6.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-linux.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-ia64-linux.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc64-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal-macosx.dylib",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal64-macosx.dylib",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-5.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-6.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-linux.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/sigar-1.6.4.jar",
"/opt/elasticsearch-0.90.5/lib/spatial4j-0.3.jar"
],
"added": [
"/opt/elasticsearch/LICENSE.txt",
"/opt/elasticsearch/NOTICE.txt",
"/opt/elasticsearch/README.textile",
"/opt/elasticsearch/bin/elasticsearch",
"/opt/elasticsearch/bin/elasticsearch.in.sh",
"/opt/elasticsearch/bin/plugin",
"/opt/elasticsearch/config/elasticsearch.yml",
"/opt/elasticsearch/config/logging.yml",
"/opt/elasticsearch/lib/elasticsearch-0.90.5.jar",
"/opt/elasticsearch/lib/jna-3.3.0.jar",
"/opt/elasticsearch/lib/jts-1.12.jar",
"/opt/elasticsearch/lib/log4j-1.2.17.jar",
"/opt/elasticsearch/lib/lucene-analyzers-common-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-codecs-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-core-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-grouping-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-highlighter-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-join-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-memory-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-misc-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-queries-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-queryparser-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-sandbox-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-spatial-4.4.0.jar",
"/opt/elasticsearch/lib/lucene-suggest-4.4.0.jar",
"/opt/elasticsearch/lib/sigar/libsigar-amd64-freebsd-6.so",
"/opt/elasticsearch/lib/sigar/libsigar-amd64-linux.so",
"/opt/elasticsearch/lib/sigar/libsigar-amd64-solaris.so",
"/opt/elasticsearch/lib/sigar/libsigar-ia64-linux.so",
"/opt/elasticsearch/lib/sigar/libsigar-sparc-solaris.so",
"/opt/elasticsearch/lib/sigar/libsigar-sparc64-solaris.so",
"/opt/elasticsearch/lib/sigar/libsigar-universal-macosx.dylib",
"/opt/elasticsearch/lib/sigar/libsigar-universal64-macosx.dylib",
"/opt/elasticsearch/lib/sigar/libsigar-x86-freebsd-5.so",
"/opt/elasticsearch/lib/sigar/libsigar-x86-freebsd-6.so",
"/opt/elasticsearch/lib/sigar/libsigar-x86-linux.so",
"/opt/elasticsearch/lib/sigar/libsigar-x86-solaris.so",
"/opt/elasticsearch/lib/sigar/sigar-1.6.4.jar",
"/opt/elasticsearch/lib/spatial4j-0.3.jar"
],
"changed": []
}

View file

@ -0,0 +1,45 @@
{
"removed": [],
"added": [
"/opt/elasticsearch-0.90.5/LICENSE.txt",
"/opt/elasticsearch-0.90.5/NOTICE.txt",
"/opt/elasticsearch-0.90.5/README.textile",
"/opt/elasticsearch-0.90.5/bin/elasticsearch",
"/opt/elasticsearch-0.90.5/bin/elasticsearch.in.sh",
"/opt/elasticsearch-0.90.5/bin/plugin",
"/opt/elasticsearch-0.90.5/config/elasticsearch.yml",
"/opt/elasticsearch-0.90.5/config/logging.yml",
"/opt/elasticsearch-0.90.5/lib/elasticsearch-0.90.5.jar",
"/opt/elasticsearch-0.90.5/lib/jna-3.3.0.jar",
"/opt/elasticsearch-0.90.5/lib/jts-1.12.jar",
"/opt/elasticsearch-0.90.5/lib/log4j-1.2.17.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-analyzers-common-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-codecs-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-core-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-grouping-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-highlighter-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-join-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-memory-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-misc-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-queries-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-queryparser-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-sandbox-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-spatial-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/lucene-suggest-4.4.0.jar",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-freebsd-6.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-linux.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-amd64-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-ia64-linux.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-sparc64-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal-macosx.dylib",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-universal64-macosx.dylib",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-5.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-freebsd-6.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-linux.so",
"/opt/elasticsearch-0.90.5/lib/sigar/libsigar-x86-solaris.so",
"/opt/elasticsearch-0.90.5/lib/sigar/sigar-1.6.4.jar",
"/opt/elasticsearch-0.90.5/lib/spatial4j-0.3.jar"
],
"changed": []
}

Some files were not shown because too many files have changed in this diff Show more