Merge branch 'master' into quark
This commit is contained in:
commit
fbdbc21eb1
137 changed files with 8691 additions and 2414 deletions
8
Bobfile
8
Bobfile
|
@ -9,14 +9,8 @@ version = 1
|
||||||
|
|
||||||
[[container]]
|
[[container]]
|
||||||
name = "quay"
|
name = "quay"
|
||||||
Dockerfile = "Dockerfile.web"
|
Dockerfile = "Dockerfile"
|
||||||
project = "quay"
|
project = "quay"
|
||||||
tags = ["git:short"]
|
tags = ["git:short"]
|
||||||
|
|
||||||
[[container]]
|
|
||||||
name = "builder"
|
|
||||||
Dockerfile = "Dockerfile.buildworker"
|
|
||||||
project = "builder"
|
|
||||||
tags = ["git:short"]
|
|
||||||
|
|
||||||
# vim:ft=toml
|
# vim:ft=toml
|
||||||
|
|
|
@ -1,34 +1,21 @@
|
||||||
# vim:ft=dockerfile
|
# vim:ft=dockerfile
|
||||||
|
|
||||||
###############################
|
FROM phusion/baseimage:0.9.16
|
||||||
# BEGIN COMMON SECION
|
|
||||||
###############################
|
|
||||||
|
|
||||||
FROM phusion/baseimage:0.9.15
|
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
ENV HOME /root
|
ENV HOME /root
|
||||||
|
|
||||||
# Install the dependencies.
|
# Install the dependencies.
|
||||||
RUN apt-get update # 11DEC2014
|
RUN apt-get update # 29JAN2015
|
||||||
|
|
||||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev
|
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev libgpgme11 libgpgme11-dev
|
||||||
|
|
||||||
# Build the python dependencies
|
# Build the python dependencies
|
||||||
ADD requirements.txt requirements.txt
|
ADD requirements.txt requirements.txt
|
||||||
RUN virtualenv --distribute venv
|
RUN virtualenv --distribute venv
|
||||||
RUN venv/bin/pip install -r requirements.txt
|
RUN venv/bin/pip install -r requirements.txt
|
||||||
|
|
||||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev
|
|
||||||
|
|
||||||
###############################
|
|
||||||
# END COMMON SECION
|
|
||||||
###############################
|
|
||||||
|
|
||||||
# Remove SSH.
|
|
||||||
RUN rm -rf /etc/service/sshd /etc/my_init.d/00_regen_ssh_host_keys.sh
|
|
||||||
|
|
||||||
# Install the binary dependencies
|
# Install the binary dependencies
|
||||||
ADD binary_dependencies binary_dependencies
|
ADD binary_dependencies binary_dependencies
|
||||||
RUN gdebi --n binary_dependencies/*.deb
|
RUN gdebi --n binary_dependencies/*.deb
|
||||||
|
@ -41,6 +28,10 @@ RUN npm install -g grunt-cli
|
||||||
ADD grunt grunt
|
ADD grunt grunt
|
||||||
RUN cd grunt && npm install
|
RUN cd grunt && npm install
|
||||||
|
|
||||||
|
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev libgpgme11-dev
|
||||||
|
RUN apt-get autoremove -y
|
||||||
|
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
# Add all of the files!
|
# Add all of the files!
|
||||||
ADD . .
|
ADD . .
|
||||||
|
|
||||||
|
@ -65,14 +56,9 @@ ADD conf/init/buildmanager /etc/service/buildmanager
|
||||||
RUN mkdir static/fonts static/ldn
|
RUN mkdir static/fonts static/ldn
|
||||||
RUN venv/bin/python -m external_libraries
|
RUN venv/bin/python -m external_libraries
|
||||||
|
|
||||||
RUN apt-get autoremove -y
|
|
||||||
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
|
||||||
|
|
||||||
# Run the tests
|
# Run the tests
|
||||||
RUN TEST=true venv/bin/python -m unittest discover
|
RUN TEST=true venv/bin/python -m unittest discover
|
||||||
|
|
||||||
VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp"]
|
VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp", "/conf/etcd"]
|
||||||
|
|
||||||
EXPOSE 443 8443 80
|
EXPOSE 443 8443 80
|
||||||
|
|
||||||
CMD ["/sbin/my_init"]
|
|
|
@ -1,49 +0,0 @@
|
||||||
# vim:ft=dockerfile
|
|
||||||
|
|
||||||
###############################
|
|
||||||
# BEGIN COMMON SECION
|
|
||||||
###############################
|
|
||||||
|
|
||||||
FROM phusion/baseimage:0.9.15
|
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
|
||||||
ENV HOME /root
|
|
||||||
|
|
||||||
# Install the dependencies.
|
|
||||||
RUN apt-get update # 11DEC2014
|
|
||||||
|
|
||||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
|
||||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev
|
|
||||||
|
|
||||||
# Build the python dependencies
|
|
||||||
ADD requirements.txt requirements.txt
|
|
||||||
RUN virtualenv --distribute venv
|
|
||||||
RUN venv/bin/pip install -r requirements.txt
|
|
||||||
|
|
||||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev
|
|
||||||
|
|
||||||
###############################
|
|
||||||
# END COMMON SECION
|
|
||||||
###############################
|
|
||||||
|
|
||||||
RUN apt-get install -y lxc aufs-tools
|
|
||||||
|
|
||||||
RUN usermod -v 100000-200000 -w 100000-200000 root
|
|
||||||
|
|
||||||
ADD binary_dependencies/builder binary_dependencies/builder
|
|
||||||
RUN gdebi --n binary_dependencies/builder/*.deb
|
|
||||||
|
|
||||||
ADD . .
|
|
||||||
|
|
||||||
ADD conf/init/svlogd_config /svlogd_config
|
|
||||||
ADD conf/init/preplogsdir.sh /etc/my_init.d/
|
|
||||||
ADD conf/init/tutumdocker /etc/service/tutumdocker
|
|
||||||
ADD conf/init/dockerfilebuild /etc/service/dockerfilebuild
|
|
||||||
|
|
||||||
RUN apt-get remove -y --auto-remove nodejs npm git phantomjs
|
|
||||||
RUN apt-get autoremove -y
|
|
||||||
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
|
||||||
|
|
||||||
VOLUME ["/var/lib/docker", "/var/lib/lxc", "/conf/stack", "/var/log"]
|
|
||||||
|
|
||||||
CMD ["/sbin/my_init"]
|
|
100
app.py
100
app.py
|
@ -1,71 +1,54 @@
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import yaml
|
|
||||||
|
|
||||||
from flask import Flask as BaseFlask, Config as BaseConfig, request, Request
|
from flask import Flask, Config, request, Request, _request_ctx_stack
|
||||||
from flask.ext.principal import Principal
|
from flask.ext.principal import Principal
|
||||||
from flask.ext.login import LoginManager, UserMixin
|
from flask.ext.login import LoginManager, UserMixin
|
||||||
from flask.ext.mail import Mail
|
from flask.ext.mail import Mail
|
||||||
|
|
||||||
import features
|
import features
|
||||||
|
|
||||||
|
from avatars.avatars import Avatar
|
||||||
from storage import Storage
|
from storage import Storage
|
||||||
|
|
||||||
|
from avatars.avatars import Avatar
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from data import database
|
from data import database
|
||||||
from data.userfiles import Userfiles
|
from data.userfiles import Userfiles
|
||||||
from data.users import UserAuthentication
|
from data.users import UserAuthentication
|
||||||
from util.analytics import Analytics
|
|
||||||
from util.exceptionlog import Sentry
|
|
||||||
from util.names import urn_generator
|
|
||||||
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig
|
|
||||||
from data.billing import Billing
|
from data.billing import Billing
|
||||||
from data.buildlogs import BuildLogs
|
from data.buildlogs import BuildLogs
|
||||||
from data.archivedlogs import LogArchive
|
from data.archivedlogs import LogArchive
|
||||||
from data.userevent import UserEventsBuilderModule
|
from data.userevent import UserEventsBuilderModule
|
||||||
from avatars.avatars import Avatar
|
|
||||||
from util.queuemetrics import QueueMetrics
|
|
||||||
from data.queue import WorkQueue
|
from data.queue import WorkQueue
|
||||||
|
from util.analytics import Analytics
|
||||||
|
from util.exceptionlog import Sentry
|
||||||
|
from util.names import urn_generator
|
||||||
|
from util.oauth import GoogleOAuthConfig, GithubOAuthConfig
|
||||||
|
from util.signing import Signer
|
||||||
|
from util.queuemetrics import QueueMetrics
|
||||||
|
from util.config.provider import FileConfigProvider, TestConfigProvider
|
||||||
|
from util.config.configutil import generate_secret_key
|
||||||
|
from util.config.superusermanager import SuperUserManager
|
||||||
|
|
||||||
|
OVERRIDE_CONFIG_DIRECTORY = 'conf/stack/'
|
||||||
class Config(BaseConfig):
|
|
||||||
""" Flask config enhanced with a `from_yamlfile` method """
|
|
||||||
|
|
||||||
def from_yamlfile(self, config_file):
|
|
||||||
with open(config_file) as f:
|
|
||||||
c = yaml.load(f)
|
|
||||||
if not c:
|
|
||||||
logger.debug('Empty YAML config file')
|
|
||||||
return
|
|
||||||
|
|
||||||
if isinstance(c, str):
|
|
||||||
raise Exception('Invalid YAML config file: ' + str(c))
|
|
||||||
|
|
||||||
for key in c.iterkeys():
|
|
||||||
if key.isupper():
|
|
||||||
self[key] = c[key]
|
|
||||||
|
|
||||||
class Flask(BaseFlask):
|
|
||||||
""" Extends the Flask class to implement our custom Config class. """
|
|
||||||
|
|
||||||
def make_config(self, instance_relative=False):
|
|
||||||
root_path = self.instance_path if instance_relative else self.root_path
|
|
||||||
return Config(root_path, self.default_config)
|
|
||||||
|
|
||||||
|
|
||||||
OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml'
|
OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml'
|
||||||
OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
|
OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
|
||||||
|
|
||||||
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
||||||
LICENSE_FILENAME = 'conf/stack/license.enc'
|
LICENSE_FILENAME = 'conf/stack/license.enc'
|
||||||
|
|
||||||
|
CONFIG_PROVIDER = FileConfigProvider(OVERRIDE_CONFIG_DIRECTORY, 'config.yaml', 'config.py')
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
profile = logging.getLogger('profile')
|
|
||||||
|
|
||||||
|
|
||||||
|
# Instantiate the default configuration (for test or for normal operation).
|
||||||
if 'TEST' in os.environ:
|
if 'TEST' in os.environ:
|
||||||
|
CONFIG_PROVIDER = TestConfigProvider()
|
||||||
|
|
||||||
from test.testconfig import TestConfig
|
from test.testconfig import TestConfig
|
||||||
logger.debug('Loading test config.')
|
logger.debug('Loading test config.')
|
||||||
app.config.from_object(TestConfig())
|
app.config.from_object(TestConfig())
|
||||||
|
@ -73,20 +56,17 @@ else:
|
||||||
from config import DefaultConfig
|
from config import DefaultConfig
|
||||||
logger.debug('Loading default config.')
|
logger.debug('Loading default config.')
|
||||||
app.config.from_object(DefaultConfig())
|
app.config.from_object(DefaultConfig())
|
||||||
|
|
||||||
if os.path.exists(OVERRIDE_CONFIG_PY_FILENAME):
|
|
||||||
logger.debug('Applying config file: %s', OVERRIDE_CONFIG_PY_FILENAME)
|
|
||||||
app.config.from_pyfile(OVERRIDE_CONFIG_PY_FILENAME)
|
|
||||||
|
|
||||||
if os.path.exists(OVERRIDE_CONFIG_YAML_FILENAME):
|
|
||||||
logger.debug('Applying config file: %s', OVERRIDE_CONFIG_YAML_FILENAME)
|
|
||||||
app.config.from_yamlfile(OVERRIDE_CONFIG_YAML_FILENAME)
|
|
||||||
|
|
||||||
environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}'))
|
|
||||||
app.config.update(environ_config)
|
|
||||||
|
|
||||||
app.teardown_request(database.close_db_filter)
|
app.teardown_request(database.close_db_filter)
|
||||||
|
|
||||||
|
# Load the override config via the provider.
|
||||||
|
CONFIG_PROVIDER.update_app_config(app.config)
|
||||||
|
|
||||||
|
# Update any configuration found in the override environment variable.
|
||||||
|
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
||||||
|
|
||||||
|
environ_config = json.loads(os.environ.get(OVERRIDE_CONFIG_KEY, '{}'))
|
||||||
|
app.config.update(environ_config)
|
||||||
|
|
||||||
|
|
||||||
class RequestWithId(Request):
|
class RequestWithId(Request):
|
||||||
request_gen = staticmethod(urn_generator(['request']))
|
request_gen = staticmethod(urn_generator(['request']))
|
||||||
|
@ -98,21 +78,24 @@ class RequestWithId(Request):
|
||||||
|
|
||||||
@app.before_request
|
@app.before_request
|
||||||
def _request_start():
|
def _request_start():
|
||||||
profile.debug('Starting request: %s', request.path)
|
logger.debug('Starting request: %s', request.path)
|
||||||
|
|
||||||
|
|
||||||
@app.after_request
|
@app.after_request
|
||||||
def _request_end(r):
|
def _request_end(r):
|
||||||
profile.debug('Ending request: %s', request.path)
|
logger.debug('Ending request: %s', request.path)
|
||||||
return r
|
return r
|
||||||
|
|
||||||
|
|
||||||
class InjectingFilter(logging.Filter):
|
class InjectingFilter(logging.Filter):
|
||||||
def filter(self, record):
|
def filter(self, record):
|
||||||
record.msg = '[%s] %s' % (request.request_id, record.msg)
|
if _request_ctx_stack.top is not None:
|
||||||
|
record.msg = '[%s] %s' % (request.request_id, record.msg)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
profile.addFilter(InjectingFilter())
|
# Add the request id filter to all handlers of the root logger
|
||||||
|
for handler in logging.getLogger().handlers:
|
||||||
|
handler.addFilter(InjectingFilter())
|
||||||
|
|
||||||
app.request_class = RequestWithId
|
app.request_class = RequestWithId
|
||||||
|
|
||||||
|
@ -132,13 +115,15 @@ sentry = Sentry(app)
|
||||||
build_logs = BuildLogs(app)
|
build_logs = BuildLogs(app)
|
||||||
authentication = UserAuthentication(app)
|
authentication = UserAuthentication(app)
|
||||||
userevents = UserEventsBuilderModule(app)
|
userevents = UserEventsBuilderModule(app)
|
||||||
|
superusers = SuperUserManager(app)
|
||||||
|
signer = Signer(app, OVERRIDE_CONFIG_DIRECTORY)
|
||||||
queue_metrics = QueueMetrics(app)
|
queue_metrics = QueueMetrics(app)
|
||||||
|
|
||||||
tf = app.config['DB_TRANSACTION_FACTORY']
|
tf = app.config['DB_TRANSACTION_FACTORY']
|
||||||
|
|
||||||
github_login = GithubOAuthConfig(app, 'GITHUB_LOGIN_CONFIG')
|
github_login = GithubOAuthConfig(app.config, 'GITHUB_LOGIN_CONFIG')
|
||||||
github_trigger = GithubOAuthConfig(app, 'GITHUB_TRIGGER_CONFIG')
|
github_trigger = GithubOAuthConfig(app.config, 'GITHUB_TRIGGER_CONFIG')
|
||||||
google_login = GoogleOAuthConfig(app, 'GOOGLE_LOGIN_CONFIG')
|
google_login = GoogleOAuthConfig(app.config, 'GOOGLE_LOGIN_CONFIG')
|
||||||
oauth_apps = [github_login, github_trigger, google_login]
|
oauth_apps = [github_login, github_trigger, google_login]
|
||||||
|
|
||||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
||||||
|
@ -150,6 +135,11 @@ database.configure(app.config)
|
||||||
model.config.app_config = app.config
|
model.config.app_config = app.config
|
||||||
model.config.store = storage
|
model.config.store = storage
|
||||||
|
|
||||||
|
# Generate a secret key if none was specified.
|
||||||
|
if app.config['SECRET_KEY'] is None:
|
||||||
|
logger.debug('Generating in-memory secret key')
|
||||||
|
app.config['SECRET_KEY'] = generate_secret_key()
|
||||||
|
|
||||||
@login_manager.user_loader
|
@login_manager.user_loader
|
||||||
def load_user(user_uuid):
|
def load_user(user_uuid):
|
||||||
logger.debug('User loader loading deferred user with uuid: %s' % user_uuid)
|
logger.debug('User loader loading deferred user with uuid: %s' % user_uuid)
|
||||||
|
|
|
@ -11,5 +11,5 @@ import registry
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
logging.config.fileConfig('conf/logging_debug.conf', disable_existing_loggers=False)
|
||||||
application.run(port=5000, debug=True, threaded=True, host='0.0.0.0')
|
application.run(port=5000, debug=True, threaded=True, host='0.0.0.0')
|
||||||
|
|
|
@ -7,7 +7,7 @@ from functools import partial
|
||||||
import scopes
|
import scopes
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from app import app
|
from app import app, superusers
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -92,9 +92,11 @@ class QuayDeferredPermissionUser(Identity):
|
||||||
if user_object is None:
|
if user_object is None:
|
||||||
return super(QuayDeferredPermissionUser, self).can(permission)
|
return super(QuayDeferredPermissionUser, self).can(permission)
|
||||||
|
|
||||||
|
if user_object is None:
|
||||||
|
return super(QuayDeferredPermissionUser, self).can(permission)
|
||||||
|
|
||||||
# Add the superuser need, if applicable.
|
# Add the superuser need, if applicable.
|
||||||
if (user_object.username is not None and
|
if superusers.is_superuser(user_object.username):
|
||||||
user_object.username in app.config.get('SUPER_USERS', [])):
|
|
||||||
self.provides.add(_SuperUserNeed())
|
self.provides.add(_SuperUserNeed())
|
||||||
|
|
||||||
# Add the user specific permissions, only for non-oauth permission
|
# Add the user specific permissions, only for non-oauth permission
|
||||||
|
|
Binary file not shown.
2
build.sh
Executable file
2
build.sh
Executable file
|
@ -0,0 +1,2 @@
|
||||||
|
docker build -t quay.io/quay/quay:`git rev-parse --short HEAD` .
|
||||||
|
echo quay.io/quay/quay:`git rev-parse --short HEAD`
|
27
buildman/asyncutil.py
Normal file
27
buildman/asyncutil.py
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
from functools import partial, wraps
|
||||||
|
from trollius import get_event_loop
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncWrapper(object):
|
||||||
|
""" Wrapper class which will transform a syncronous library to one that can be used with
|
||||||
|
trollius coroutines.
|
||||||
|
"""
|
||||||
|
def __init__(self, delegate, loop=None, executor=None):
|
||||||
|
self._loop = loop if loop is not None else get_event_loop()
|
||||||
|
self._delegate = delegate
|
||||||
|
self._executor = executor
|
||||||
|
|
||||||
|
def __getattr__(self, attrib):
|
||||||
|
delegate_attr = getattr(self._delegate, attrib)
|
||||||
|
|
||||||
|
if not callable(delegate_attr):
|
||||||
|
return delegate_attr
|
||||||
|
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
""" Wraps the delegate_attr with primitives that will transform sync calls to ones shelled
|
||||||
|
out to a thread pool.
|
||||||
|
"""
|
||||||
|
callable_delegate_attr = partial(delegate_attr, *args, **kwargs)
|
||||||
|
return self._loop.run_in_executor(self._executor, callable_delegate_attr)
|
||||||
|
|
||||||
|
return wrapper
|
|
@ -6,6 +6,7 @@ import time
|
||||||
from app import app, userfiles as user_files, build_logs, dockerfile_build_queue
|
from app import app, userfiles as user_files, build_logs, dockerfile_build_queue
|
||||||
|
|
||||||
from buildman.manager.enterprise import EnterpriseManager
|
from buildman.manager.enterprise import EnterpriseManager
|
||||||
|
from buildman.manager.ephemeral import EphemeralBuilderManager
|
||||||
from buildman.server import BuilderServer
|
from buildman.server import BuilderServer
|
||||||
|
|
||||||
from trollius import SSLContext
|
from trollius import SSLContext
|
||||||
|
@ -13,14 +14,22 @@ from trollius import SSLContext
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
BUILD_MANAGERS = {
|
BUILD_MANAGERS = {
|
||||||
'enterprise': EnterpriseManager
|
'enterprise': EnterpriseManager,
|
||||||
|
'ephemeral': EphemeralBuilderManager,
|
||||||
}
|
}
|
||||||
|
|
||||||
EXTERNALLY_MANAGED = 'external'
|
EXTERNALLY_MANAGED = 'external'
|
||||||
|
|
||||||
|
DEFAULT_WEBSOCKET_PORT = 8787
|
||||||
|
DEFAULT_CONTROLLER_PORT = 8686
|
||||||
|
|
||||||
|
LOG_FORMAT = "%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s"
|
||||||
|
|
||||||
def run_build_manager():
|
def run_build_manager():
|
||||||
if not features.BUILD_SUPPORT:
|
if not features.BUILD_SUPPORT:
|
||||||
logger.debug('Building is disabled. Please enable the feature flag')
|
logger.debug('Building is disabled. Please enable the feature flag')
|
||||||
|
while True:
|
||||||
|
time.sleep(1000)
|
||||||
return
|
return
|
||||||
|
|
||||||
build_manager_config = app.config.get('BUILD_MANAGER')
|
build_manager_config = app.config.get('BUILD_MANAGER')
|
||||||
|
@ -39,6 +48,19 @@ def run_build_manager():
|
||||||
if manager_klass is None:
|
if manager_klass is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
manager_hostname = os.environ.get('BUILDMAN_HOSTNAME',
|
||||||
|
app.config.get('BUILDMAN_HOSTNAME',
|
||||||
|
app.config['SERVER_HOSTNAME']))
|
||||||
|
websocket_port = int(os.environ.get('BUILDMAN_WEBSOCKET_PORT',
|
||||||
|
app.config.get('BUILDMAN_WEBSOCKET_PORT',
|
||||||
|
DEFAULT_WEBSOCKET_PORT)))
|
||||||
|
controller_port = int(os.environ.get('BUILDMAN_CONTROLLER_PORT',
|
||||||
|
app.config.get('BUILDMAN_CONTROLLER_PORT',
|
||||||
|
DEFAULT_CONTROLLER_PORT)))
|
||||||
|
|
||||||
|
logger.debug('Will pass buildman hostname %s to builders for websocket connection',
|
||||||
|
manager_hostname)
|
||||||
|
|
||||||
logger.debug('Starting build manager with lifecycle "%s"', build_manager_config[0])
|
logger.debug('Starting build manager with lifecycle "%s"', build_manager_config[0])
|
||||||
ssl_context = None
|
ssl_context = None
|
||||||
if os.environ.get('SSL_CONFIG'):
|
if os.environ.get('SSL_CONFIG'):
|
||||||
|
@ -48,9 +70,10 @@ def run_build_manager():
|
||||||
os.path.join(os.environ.get('SSL_CONFIG'), 'ssl.key'))
|
os.path.join(os.environ.get('SSL_CONFIG'), 'ssl.key'))
|
||||||
|
|
||||||
server = BuilderServer(app.config['SERVER_HOSTNAME'], dockerfile_build_queue, build_logs,
|
server = BuilderServer(app.config['SERVER_HOSTNAME'], dockerfile_build_queue, build_logs,
|
||||||
user_files, manager_klass)
|
user_files, manager_klass, build_manager_config[1], manager_hostname)
|
||||||
server.run('0.0.0.0', ssl=ssl_context)
|
server.run('0.0.0.0', websocket_port, controller_port, ssl=ssl_context)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT)
|
||||||
|
logging.getLogger('peewee').setLevel(logging.WARN)
|
||||||
run_build_manager()
|
run_build_manager()
|
||||||
|
|
|
@ -6,11 +6,10 @@ import trollius
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from autobahn.wamp.exception import ApplicationError
|
from autobahn.wamp.exception import ApplicationError
|
||||||
from trollius.coroutines import From
|
|
||||||
|
|
||||||
from buildman.server import BuildJobResult
|
from buildman.server import BuildJobResult
|
||||||
from buildman.component.basecomponent import BaseComponent
|
from buildman.component.basecomponent import BaseComponent
|
||||||
from buildman.jobutil.buildpack import BuildPackage, BuildPackageException
|
from buildman.jobutil.buildjob import BuildJobLoadException
|
||||||
from buildman.jobutil.buildstatus import StatusHandler
|
from buildman.jobutil.buildstatus import StatusHandler
|
||||||
from buildman.jobutil.workererror import WorkerError
|
from buildman.jobutil.workererror import WorkerError
|
||||||
|
|
||||||
|
@ -20,7 +19,7 @@ HEARTBEAT_DELTA = datetime.timedelta(seconds=30)
|
||||||
HEARTBEAT_TIMEOUT = 10
|
HEARTBEAT_TIMEOUT = 10
|
||||||
INITIAL_TIMEOUT = 25
|
INITIAL_TIMEOUT = 25
|
||||||
|
|
||||||
SUPPORTED_WORKER_VERSIONS = ['0.1-beta']
|
SUPPORTED_WORKER_VERSIONS = ['0.3']
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -39,13 +38,14 @@ class BuildComponent(BaseComponent):
|
||||||
self.builder_realm = realm
|
self.builder_realm = realm
|
||||||
|
|
||||||
self.parent_manager = None
|
self.parent_manager = None
|
||||||
self.server_hostname = None
|
self.registry_hostname = None
|
||||||
|
|
||||||
self._component_status = ComponentStatus.JOINING
|
self._component_status = ComponentStatus.JOINING
|
||||||
self._last_heartbeat = None
|
self._last_heartbeat = None
|
||||||
self._current_job = None
|
self._current_job = None
|
||||||
self._build_status = None
|
self._build_status = None
|
||||||
self._image_info = None
|
self._image_info = None
|
||||||
|
self._worker_version = None
|
||||||
|
|
||||||
BaseComponent.__init__(self, config, **kwargs)
|
BaseComponent.__init__(self, config, **kwargs)
|
||||||
|
|
||||||
|
@ -57,69 +57,52 @@ class BuildComponent(BaseComponent):
|
||||||
|
|
||||||
def onJoin(self, details):
|
def onJoin(self, details):
|
||||||
logger.debug('Registering methods and listeners for component %s', self.builder_realm)
|
logger.debug('Registering methods and listeners for component %s', self.builder_realm)
|
||||||
yield From(self.register(self._on_ready, u'io.quay.buildworker.ready'))
|
yield trollius.From(self.register(self._on_ready, u'io.quay.buildworker.ready'))
|
||||||
yield From(self.register(self._ping, u'io.quay.buildworker.ping'))
|
yield trollius.From(self.register(self._determine_cache_tag,
|
||||||
yield From(self.subscribe(self._on_heartbeat, 'io.quay.builder.heartbeat'))
|
u'io.quay.buildworker.determinecachetag'))
|
||||||
yield From(self.subscribe(self._on_log_message, 'io.quay.builder.logmessage'))
|
yield trollius.From(self.register(self._ping, u'io.quay.buildworker.ping'))
|
||||||
|
|
||||||
self._set_status(ComponentStatus.WAITING)
|
yield trollius.From(self.subscribe(self._on_heartbeat, 'io.quay.builder.heartbeat'))
|
||||||
|
yield trollius.From(self.subscribe(self._on_log_message, 'io.quay.builder.logmessage'))
|
||||||
|
|
||||||
|
yield trollius.From(self._set_status(ComponentStatus.WAITING))
|
||||||
|
|
||||||
def is_ready(self):
|
def is_ready(self):
|
||||||
""" Determines whether a build component is ready to begin a build. """
|
""" Determines whether a build component is ready to begin a build. """
|
||||||
return self._component_status == ComponentStatus.RUNNING
|
return self._component_status == ComponentStatus.RUNNING
|
||||||
|
|
||||||
|
@trollius.coroutine
|
||||||
def start_build(self, build_job):
|
def start_build(self, build_job):
|
||||||
""" Starts a build. """
|
""" Starts a build. """
|
||||||
|
logger.debug('Starting build for component %s (worker version: %s)',
|
||||||
|
self.builder_realm, self._worker_version)
|
||||||
|
|
||||||
self._current_job = build_job
|
self._current_job = build_job
|
||||||
self._build_status = StatusHandler(self.build_logs, build_job.repo_build())
|
self._build_status = StatusHandler(self.build_logs, build_job.repo_build.uuid)
|
||||||
self._image_info = {}
|
self._image_info = {}
|
||||||
|
|
||||||
self._set_status(ComponentStatus.BUILDING)
|
yield trollius.From(self._set_status(ComponentStatus.BUILDING))
|
||||||
|
|
||||||
# Retrieve the job's buildpack.
|
# Send the notification that the build has started.
|
||||||
buildpack_url = self.user_files.get_file_url(build_job.repo_build().resource_key,
|
build_job.send_notification('build_start')
|
||||||
|
|
||||||
|
# Parse the build configuration.
|
||||||
|
try:
|
||||||
|
build_config = build_job.build_config
|
||||||
|
except BuildJobLoadException as irbe:
|
||||||
|
self._build_failure('Could not load build job information', irbe)
|
||||||
|
|
||||||
|
base_image_information = {}
|
||||||
|
buildpack_url = self.user_files.get_file_url(build_job.repo_build.resource_key,
|
||||||
requires_cors=False)
|
requires_cors=False)
|
||||||
|
|
||||||
logger.debug('Retreiving build package: %s', buildpack_url)
|
|
||||||
buildpack = None
|
|
||||||
try:
|
|
||||||
buildpack = BuildPackage.from_url(buildpack_url)
|
|
||||||
except BuildPackageException as bpe:
|
|
||||||
self._build_failure('Could not retrieve build package', bpe)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Extract the base image information from the Dockerfile.
|
|
||||||
parsed_dockerfile = None
|
|
||||||
logger.debug('Parsing dockerfile')
|
|
||||||
|
|
||||||
build_config = build_job.build_config()
|
|
||||||
try:
|
|
||||||
parsed_dockerfile = buildpack.parse_dockerfile(build_config.get('build_subdir'))
|
|
||||||
except BuildPackageException as bpe:
|
|
||||||
self._build_failure('Could not find Dockerfile in build package', bpe)
|
|
||||||
return
|
|
||||||
|
|
||||||
image_and_tag_tuple = parsed_dockerfile.get_image_and_tag()
|
|
||||||
if image_and_tag_tuple is None or image_and_tag_tuple[0] is None:
|
|
||||||
self._build_failure('Missing FROM line in Dockerfile')
|
|
||||||
return
|
|
||||||
|
|
||||||
base_image_information = {
|
|
||||||
'repository': image_and_tag_tuple[0],
|
|
||||||
'tag': image_and_tag_tuple[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
# Extract the number of steps from the Dockerfile.
|
|
||||||
with self._build_status as status_dict:
|
|
||||||
status_dict['total_commands'] = len(parsed_dockerfile.commands)
|
|
||||||
|
|
||||||
# Add the pull robot information, if any.
|
# Add the pull robot information, if any.
|
||||||
if build_config.get('pull_credentials') is not None:
|
if build_job.pull_credentials:
|
||||||
base_image_information['username'] = build_config['pull_credentials'].get('username', '')
|
base_image_information['username'] = build_job.pull_credentials.get('username', '')
|
||||||
base_image_information['password'] = build_config['pull_credentials'].get('password', '')
|
base_image_information['password'] = build_job.pull_credentials.get('password', '')
|
||||||
|
|
||||||
# Retrieve the repository's fully qualified name.
|
# Retrieve the repository's fully qualified name.
|
||||||
repo = build_job.repo_build().repository
|
repo = build_job.repo_build.repository
|
||||||
repository_name = repo.namespace_user.username + '/' + repo.name
|
repository_name = repo.namespace_user.username + '/' + repo.name
|
||||||
|
|
||||||
# Parse the build queue item into build arguments.
|
# Parse the build queue item into build arguments.
|
||||||
|
@ -131,29 +114,26 @@ class BuildComponent(BaseComponent):
|
||||||
# push_token: The token to use to push the built image.
|
# push_token: The token to use to push the built image.
|
||||||
# tag_names: The name(s) of the tag(s) for the newly built image.
|
# tag_names: The name(s) of the tag(s) for the newly built image.
|
||||||
# base_image: The image name and credentials to use to conduct the base image pull.
|
# base_image: The image name and credentials to use to conduct the base image pull.
|
||||||
# repository: The repository to pull.
|
# repository: The repository to pull (DEPRECATED 0.2)
|
||||||
# tag: The tag to pull.
|
# tag: The tag to pull (DEPRECATED in 0.2)
|
||||||
# username: The username for pulling the base image (if any).
|
# username: The username for pulling the base image (if any).
|
||||||
# password: The password for pulling the base image (if any).
|
# password: The password for pulling the base image (if any).
|
||||||
build_arguments = {
|
build_arguments = {
|
||||||
'build_package': buildpack_url,
|
'build_package': buildpack_url,
|
||||||
'sub_directory': build_config.get('build_subdir', ''),
|
'sub_directory': build_config.get('build_subdir', ''),
|
||||||
'repository': repository_name,
|
'repository': repository_name,
|
||||||
'registry': self.server_hostname,
|
'registry': self.registry_hostname,
|
||||||
'pull_token': build_job.repo_build().access_token.code,
|
'pull_token': build_job.repo_build.access_token.code,
|
||||||
'push_token': build_job.repo_build().access_token.code,
|
'push_token': build_job.repo_build.access_token.code,
|
||||||
'tag_names': build_config.get('docker_tags', ['latest']),
|
'tag_names': build_config.get('docker_tags', ['latest']),
|
||||||
'base_image': base_image_information,
|
'base_image': base_image_information
|
||||||
'cached_tag': build_job.determine_cached_tag() or ''
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Invoke the build.
|
# Invoke the build.
|
||||||
logger.debug('Invoking build: %s', self.builder_realm)
|
logger.debug('Invoking build: %s', self.builder_realm)
|
||||||
logger.debug('With Arguments: %s', build_arguments)
|
logger.debug('With Arguments: %s', build_arguments)
|
||||||
|
|
||||||
return (self
|
self.call("io.quay.builder.build", **build_arguments).add_done_callback(self._build_complete)
|
||||||
.call("io.quay.builder.build", **build_arguments)
|
|
||||||
.add_done_callback(self._build_complete))
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _total_completion(statuses, total_images):
|
def _total_completion(statuses, total_images):
|
||||||
|
@ -240,18 +220,28 @@ class BuildComponent(BaseComponent):
|
||||||
elif phase == BUILD_PHASE.BUILDING:
|
elif phase == BUILD_PHASE.BUILDING:
|
||||||
self._build_status.append_log(current_status_string)
|
self._build_status.append_log(current_status_string)
|
||||||
|
|
||||||
|
@trollius.coroutine
|
||||||
|
def _determine_cache_tag(self, command_comments, base_image_name, base_image_tag, base_image_id):
|
||||||
|
with self._build_status as status_dict:
|
||||||
|
status_dict['total_commands'] = len(command_comments) + 1
|
||||||
|
|
||||||
|
logger.debug('Checking cache on realm %s. Base image: %s:%s (%s)', self.builder_realm,
|
||||||
|
base_image_name, base_image_tag, base_image_id)
|
||||||
|
|
||||||
|
tag_found = self._current_job.determine_cached_tag(base_image_id, command_comments)
|
||||||
|
raise trollius.Return(tag_found or '')
|
||||||
|
|
||||||
def _build_failure(self, error_message, exception=None):
|
def _build_failure(self, error_message, exception=None):
|
||||||
""" Handles and logs a failed build. """
|
""" Handles and logs a failed build. """
|
||||||
self._build_status.set_error(error_message, {
|
self._build_status.set_error(error_message, {
|
||||||
'internal_error': exception.message if exception else None
|
'internal_error': str(exception) if exception else None
|
||||||
})
|
})
|
||||||
|
|
||||||
build_id = self._current_job.repo_build().uuid
|
build_id = self._current_job.repo_build.uuid
|
||||||
logger.warning('Build %s failed with message: %s', build_id, error_message)
|
logger.warning('Build %s failed with message: %s', build_id, error_message)
|
||||||
|
|
||||||
# Mark that the build has finished (in an error state)
|
# Mark that the build has finished (in an error state)
|
||||||
self._build_finished(BuildJobResult.ERROR)
|
trollius.async(self._build_finished(BuildJobResult.ERROR))
|
||||||
|
|
||||||
def _build_complete(self, result):
|
def _build_complete(self, result):
|
||||||
""" Wraps up a completed build. Handles any errors and calls self._build_finished. """
|
""" Wraps up a completed build. Handles any errors and calls self._build_finished. """
|
||||||
|
@ -259,60 +249,78 @@ class BuildComponent(BaseComponent):
|
||||||
# Retrieve the result. This will raise an ApplicationError on any error that occurred.
|
# Retrieve the result. This will raise an ApplicationError on any error that occurred.
|
||||||
result.result()
|
result.result()
|
||||||
self._build_status.set_phase(BUILD_PHASE.COMPLETE)
|
self._build_status.set_phase(BUILD_PHASE.COMPLETE)
|
||||||
self._build_finished(BuildJobResult.COMPLETE)
|
trollius.async(self._build_finished(BuildJobResult.COMPLETE))
|
||||||
|
|
||||||
|
# Send the notification that the build has completed successfully.
|
||||||
|
self._current_job.send_notification('build_success')
|
||||||
except ApplicationError as aex:
|
except ApplicationError as aex:
|
||||||
worker_error = WorkerError(aex.error, aex.kwargs.get('base_error'))
|
worker_error = WorkerError(aex.error, aex.kwargs.get('base_error'))
|
||||||
|
|
||||||
# Write the error to the log.
|
# Write the error to the log.
|
||||||
self._build_status.set_error(worker_error.public_message(), worker_error.extra_data(),
|
self._build_status.set_error(worker_error.public_message(), worker_error.extra_data(),
|
||||||
internal_error=worker_error.is_internal_error())
|
internal_error=worker_error.is_internal_error(),
|
||||||
|
requeued=self._current_job.has_retries_remaining())
|
||||||
|
|
||||||
|
# Send the notification that the build has failed.
|
||||||
|
self._current_job.send_notification('build_failure',
|
||||||
|
error_message=worker_error.public_message())
|
||||||
|
|
||||||
# Mark the build as completed.
|
# Mark the build as completed.
|
||||||
if worker_error.is_internal_error():
|
if worker_error.is_internal_error():
|
||||||
self._build_finished(BuildJobResult.INCOMPLETE)
|
trollius.async(self._build_finished(BuildJobResult.INCOMPLETE))
|
||||||
else:
|
else:
|
||||||
self._build_finished(BuildJobResult.ERROR)
|
trollius.async(self._build_finished(BuildJobResult.ERROR))
|
||||||
|
|
||||||
|
@trollius.coroutine
|
||||||
def _build_finished(self, job_status):
|
def _build_finished(self, job_status):
|
||||||
""" Alerts the parent that a build has completed and sets the status back to running. """
|
""" Alerts the parent that a build has completed and sets the status back to running. """
|
||||||
self.parent_manager.job_completed(self._current_job, job_status, self)
|
yield trollius.From(self.parent_manager.job_completed(self._current_job, job_status, self))
|
||||||
self._current_job = None
|
self._current_job = None
|
||||||
|
|
||||||
# Set the component back to a running state.
|
# Set the component back to a running state.
|
||||||
self._set_status(ComponentStatus.RUNNING)
|
yield trollius.From(self._set_status(ComponentStatus.RUNNING))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _ping():
|
def _ping():
|
||||||
""" Ping pong. """
|
""" Ping pong. """
|
||||||
return 'pong'
|
return 'pong'
|
||||||
|
|
||||||
|
@trollius.coroutine
|
||||||
def _on_ready(self, token, version):
|
def _on_ready(self, token, version):
|
||||||
if not version in SUPPORTED_WORKER_VERSIONS:
|
self._worker_version = version
|
||||||
logger.warning('Build component (token "%s") is running an out-of-date version: %s', version)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if self._component_status != 'waiting':
|
if not version in SUPPORTED_WORKER_VERSIONS:
|
||||||
|
logger.warning('Build component (token "%s") is running an out-of-date version: %s', token,
|
||||||
|
version)
|
||||||
|
raise trollius.Return(False)
|
||||||
|
|
||||||
|
if self._component_status != ComponentStatus.WAITING:
|
||||||
logger.warning('Build component (token "%s") is already connected', self.expected_token)
|
logger.warning('Build component (token "%s") is already connected', self.expected_token)
|
||||||
return False
|
raise trollius.Return(False)
|
||||||
|
|
||||||
if token != self.expected_token:
|
if token != self.expected_token:
|
||||||
logger.warning('Builder token mismatch. Expected: "%s". Found: "%s"', self.expected_token, token)
|
logger.warning('Builder token mismatch. Expected: "%s". Found: "%s"', self.expected_token,
|
||||||
return False
|
token)
|
||||||
|
raise trollius.Return(False)
|
||||||
|
|
||||||
self._set_status(ComponentStatus.RUNNING)
|
yield trollius.From(self._set_status(ComponentStatus.RUNNING))
|
||||||
|
|
||||||
# Start the heartbeat check and updating loop.
|
# Start the heartbeat check and updating loop.
|
||||||
loop = trollius.get_event_loop()
|
loop = trollius.get_event_loop()
|
||||||
loop.create_task(self._heartbeat())
|
loop.create_task(self._heartbeat())
|
||||||
logger.debug('Build worker %s is connected and ready', self.builder_realm)
|
logger.debug('Build worker %s is connected and ready', self.builder_realm)
|
||||||
return True
|
raise trollius.Return(True)
|
||||||
|
|
||||||
|
@trollius.coroutine
|
||||||
def _set_status(self, phase):
|
def _set_status(self, phase):
|
||||||
|
if phase == ComponentStatus.RUNNING:
|
||||||
|
yield trollius.From(self.parent_manager.build_component_ready(self))
|
||||||
|
|
||||||
self._component_status = phase
|
self._component_status = phase
|
||||||
|
|
||||||
def _on_heartbeat(self):
|
def _on_heartbeat(self):
|
||||||
""" Updates the last known heartbeat. """
|
""" Updates the last known heartbeat. """
|
||||||
self._last_heartbeat = datetime.datetime.now()
|
self._last_heartbeat = datetime.datetime.utcnow()
|
||||||
|
|
||||||
@trollius.coroutine
|
@trollius.coroutine
|
||||||
def _heartbeat(self):
|
def _heartbeat(self):
|
||||||
|
@ -320,13 +328,13 @@ class BuildComponent(BaseComponent):
|
||||||
and updating the heartbeat in the build status dictionary (if applicable). This allows
|
and updating the heartbeat in the build status dictionary (if applicable). This allows
|
||||||
the build system to catch crashes from either end.
|
the build system to catch crashes from either end.
|
||||||
"""
|
"""
|
||||||
yield From(trollius.sleep(INITIAL_TIMEOUT))
|
yield trollius.From(trollius.sleep(INITIAL_TIMEOUT))
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
# If the component is no longer running or actively building, nothing more to do.
|
# If the component is no longer running or actively building, nothing more to do.
|
||||||
if (self._component_status != ComponentStatus.RUNNING and
|
if (self._component_status != ComponentStatus.RUNNING and
|
||||||
self._component_status != ComponentStatus.BUILDING):
|
self._component_status != ComponentStatus.BUILDING):
|
||||||
return
|
raise trollius.Return()
|
||||||
|
|
||||||
# If there is an active build, write the heartbeat to its status.
|
# If there is an active build, write the heartbeat to its status.
|
||||||
build_status = self._build_status
|
build_status = self._build_status
|
||||||
|
@ -334,35 +342,37 @@ class BuildComponent(BaseComponent):
|
||||||
with build_status as status_dict:
|
with build_status as status_dict:
|
||||||
status_dict['heartbeat'] = int(time.time())
|
status_dict['heartbeat'] = int(time.time())
|
||||||
|
|
||||||
|
|
||||||
# Mark the build item.
|
# Mark the build item.
|
||||||
current_job = self._current_job
|
current_job = self._current_job
|
||||||
if current_job is not None:
|
if current_job is not None:
|
||||||
self.parent_manager.job_heartbeat(current_job)
|
yield trollius.From(self.parent_manager.job_heartbeat(current_job))
|
||||||
|
|
||||||
# Check the heartbeat from the worker.
|
# Check the heartbeat from the worker.
|
||||||
logger.debug('Checking heartbeat on realm %s', self.builder_realm)
|
logger.debug('Checking heartbeat on realm %s', self.builder_realm)
|
||||||
if self._last_heartbeat and self._last_heartbeat < datetime.datetime.now() - HEARTBEAT_DELTA:
|
if (self._last_heartbeat and
|
||||||
self._timeout()
|
self._last_heartbeat < datetime.datetime.utcnow() - HEARTBEAT_DELTA):
|
||||||
return
|
yield trollius.From(self._timeout())
|
||||||
|
raise trollius.Return()
|
||||||
|
|
||||||
yield From(trollius.sleep(HEARTBEAT_TIMEOUT))
|
yield trollius.From(trollius.sleep(HEARTBEAT_TIMEOUT))
|
||||||
|
|
||||||
|
@trollius.coroutine
|
||||||
def _timeout(self):
|
def _timeout(self):
|
||||||
self._set_status(ComponentStatus.TIMED_OUT)
|
if self._component_status == ComponentStatus.TIMED_OUT:
|
||||||
logger.warning('Build component with realm %s has timed out', self.builder_realm)
|
raise trollius.Return()
|
||||||
self._dispose(timed_out=True)
|
|
||||||
|
yield trollius.From(self._set_status(ComponentStatus.TIMED_OUT))
|
||||||
|
logger.warning('Build component with realm %s has timed out', self.builder_realm)
|
||||||
|
|
||||||
def _dispose(self, timed_out=False):
|
|
||||||
# If we still have a running job, then it has not completed and we need to tell the parent
|
# If we still have a running job, then it has not completed and we need to tell the parent
|
||||||
# manager.
|
# manager.
|
||||||
if self._current_job is not None:
|
if self._current_job is not None:
|
||||||
if timed_out:
|
self._build_status.set_error('Build worker timed out', internal_error=True,
|
||||||
self._build_status.set_error('Build worker timed out', internal_error=True)
|
requeued=self._current_job.has_retries_remaining())
|
||||||
|
|
||||||
self.parent_manager.job_completed(self._current_job, BuildJobResult.INCOMPLETE, self)
|
self.parent_manager.job_completed(self._current_job, BuildJobResult.INCOMPLETE, self)
|
||||||
self._build_status = None
|
self._build_status = None
|
||||||
self._current_job = None
|
self._current_job = None
|
||||||
|
|
||||||
# Unregister the current component so that it cannot be invoked again.
|
# Unregister the current component so that it cannot be invoked again.
|
||||||
self.parent_manager.build_component_disposed(self, timed_out)
|
self.parent_manager.build_component_disposed(self, True)
|
||||||
|
|
|
@ -1,6 +1,13 @@
|
||||||
from data import model
|
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from cachetools import lru_cache
|
||||||
|
from endpoints.notificationhelper import spawn_notification
|
||||||
|
from data import model
|
||||||
|
from util.imagetree import ImageTree
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BuildJobLoadException(Exception):
|
class BuildJobLoadException(Exception):
|
||||||
""" Exception raised if a build job could not be instantiated for some reason. """
|
""" Exception raised if a build job could not be instantiated for some reason. """
|
||||||
|
@ -9,50 +16,123 @@ class BuildJobLoadException(Exception):
|
||||||
class BuildJob(object):
|
class BuildJob(object):
|
||||||
""" Represents a single in-progress build job. """
|
""" Represents a single in-progress build job. """
|
||||||
def __init__(self, job_item):
|
def __init__(self, job_item):
|
||||||
self._job_item = job_item
|
self.job_item = job_item
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._job_details = json.loads(job_item.body)
|
self.job_details = json.loads(job_item.body)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise BuildJobLoadException(
|
raise BuildJobLoadException(
|
||||||
'Could not parse build queue item config with ID %s' % self._job_details['build_uuid']
|
'Could not parse build queue item config with ID %s' % self.job_details['build_uuid']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def has_retries_remaining(self):
|
||||||
|
return self.job_item.retries_remaining > 0
|
||||||
|
|
||||||
|
def send_notification(self, kind, error_message=None):
|
||||||
|
tags = self.build_config.get('docker_tags', ['latest'])
|
||||||
|
event_data = {
|
||||||
|
'build_id': self.repo_build.uuid,
|
||||||
|
'build_name': self.repo_build.display_name,
|
||||||
|
'docker_tags': tags,
|
||||||
|
'trigger_id': self.repo_build.trigger.uuid,
|
||||||
|
'trigger_kind': self.repo_build.trigger.service.name
|
||||||
|
}
|
||||||
|
|
||||||
|
if error_message is not None:
|
||||||
|
event_data['error_message'] = error_message
|
||||||
|
|
||||||
|
spawn_notification(self.repo_build.repository, kind, event_data,
|
||||||
|
subpage='build?current=%s' % self.repo_build.uuid,
|
||||||
|
pathargs=['build', self.repo_build.uuid])
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(maxsize=1)
|
||||||
|
def _load_repo_build(self):
|
||||||
try:
|
try:
|
||||||
self._repo_build = model.get_repository_build(self._job_details['build_uuid'])
|
return model.get_repository_build(self.job_details['build_uuid'])
|
||||||
except model.InvalidRepositoryBuildException:
|
except model.InvalidRepositoryBuildException:
|
||||||
raise BuildJobLoadException(
|
raise BuildJobLoadException(
|
||||||
'Could not load repository build with ID %s' % self._job_details['build_uuid'])
|
'Could not load repository build with ID %s' % self.job_details['build_uuid'])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repo_build(self):
|
||||||
|
return self._load_repo_build()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pull_credentials(self):
|
||||||
|
""" Returns the pull credentials for this job, or None if none. """
|
||||||
|
return self.job_details.get('pull_credentials')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def build_config(self):
|
||||||
try:
|
try:
|
||||||
self._build_config = json.loads(self._repo_build.job_config)
|
return json.loads(self.repo_build.job_config)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise BuildJobLoadException(
|
raise BuildJobLoadException(
|
||||||
'Could not parse repository build job config with ID %s' % self._job_details['build_uuid']
|
'Could not parse repository build job config with ID %s' % self.job_details['build_uuid']
|
||||||
)
|
)
|
||||||
|
|
||||||
def determine_cached_tag(self):
|
def determine_cached_tag(self, base_image_id=None, cache_comments=None):
|
||||||
""" Returns the tag to pull to prime the cache or None if none. """
|
""" Returns the tag to pull to prime the cache or None if none. """
|
||||||
# TODO(jschorr): Change this to use the more complicated caching rules, once we have caching
|
cached_tag = None
|
||||||
# be a pull of things besides the constructed tags.
|
if base_image_id and cache_comments:
|
||||||
tags = self._build_config.get('docker_tags', ['latest'])
|
cached_tag = self._determine_cached_tag_by_comments(base_image_id, cache_comments)
|
||||||
existing_tags = model.list_repository_tags(self._repo_build.repository.namespace_user.username,
|
|
||||||
self._repo_build.repository.name)
|
|
||||||
|
|
||||||
|
if not cached_tag:
|
||||||
|
cached_tag = self._determine_cached_tag_by_tag()
|
||||||
|
|
||||||
|
logger.debug('Determined cached tag %s for %s: %s', cached_tag, base_image_id, cache_comments)
|
||||||
|
|
||||||
|
return cached_tag
|
||||||
|
|
||||||
|
def _determine_cached_tag_by_comments(self, base_image_id, cache_commands):
|
||||||
|
""" Determines the tag to use for priming the cache for this build job, by matching commands
|
||||||
|
starting at the given base_image_id. This mimics the Docker cache checking, so it should,
|
||||||
|
in theory, provide "perfect" caching.
|
||||||
|
"""
|
||||||
|
# Lookup the base image in the repository. If it doesn't exist, nothing more to do.
|
||||||
|
repo_build = self.repo_build
|
||||||
|
repo_namespace = repo_build.repository.namespace_user.username
|
||||||
|
repo_name = repo_build.repository.name
|
||||||
|
|
||||||
|
base_image = model.get_image(repo_build.repository, base_image_id)
|
||||||
|
if base_image is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Build an in-memory tree of the full heirarchy of images in the repository.
|
||||||
|
all_images = model.get_repository_images(repo_namespace, repo_name)
|
||||||
|
all_tags = model.list_repository_tags(repo_namespace, repo_name)
|
||||||
|
tree = ImageTree(all_images, all_tags, base_filter=base_image.id)
|
||||||
|
|
||||||
|
# Find a path in the tree, starting at the base image, that matches the cache comments
|
||||||
|
# or some subset thereof.
|
||||||
|
def checker(step, image):
|
||||||
|
if step >= len(cache_commands):
|
||||||
|
return False
|
||||||
|
|
||||||
|
full_command = '["/bin/sh", "-c", "%s"]' % cache_commands[step]
|
||||||
|
logger.debug('Checking step #%s: %s, %s == %s', step, image.id,
|
||||||
|
image.storage.command, full_command)
|
||||||
|
|
||||||
|
return image.storage.command == full_command
|
||||||
|
|
||||||
|
path = tree.find_longest_path(base_image.id, checker)
|
||||||
|
if not path:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Find any tag associated with the last image in the path.
|
||||||
|
return tree.tag_containing_image(path[-1])
|
||||||
|
|
||||||
|
|
||||||
|
def _determine_cached_tag_by_tag(self):
|
||||||
|
""" Determines the cached tag by looking for one of the tags being built, and seeing if it
|
||||||
|
exists in the repository. This is a fallback for when no comment information is available.
|
||||||
|
"""
|
||||||
|
tags = self.build_config.get('docker_tags', ['latest'])
|
||||||
|
repository = self.repo_build.repository
|
||||||
|
existing_tags = model.list_repository_tags(repository.namespace_user.username, repository.name)
|
||||||
cached_tags = set(tags) & set([tag.name for tag in existing_tags])
|
cached_tags = set(tags) & set([tag.name for tag in existing_tags])
|
||||||
if cached_tags:
|
if cached_tags:
|
||||||
return list(cached_tags)[0]
|
return list(cached_tags)[0]
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def job_item(self):
|
|
||||||
""" Returns the job's queue item. """
|
|
||||||
return self._job_item
|
|
||||||
|
|
||||||
def repo_build(self):
|
|
||||||
""" Returns the repository build DB row for the job. """
|
|
||||||
return self._repo_build
|
|
||||||
|
|
||||||
def build_config(self):
|
|
||||||
""" Returns the parsed repository build config for the job. """
|
|
||||||
return self._build_config
|
|
||||||
|
|
|
@ -1,88 +0,0 @@
|
||||||
import tarfile
|
|
||||||
import requests
|
|
||||||
import os
|
|
||||||
|
|
||||||
from tempfile import TemporaryFile, mkdtemp
|
|
||||||
from zipfile import ZipFile
|
|
||||||
from util.dockerfileparse import parse_dockerfile
|
|
||||||
from util.safetar import safe_extractall
|
|
||||||
|
|
||||||
class BuildPackageException(Exception):
|
|
||||||
""" Exception raised when retrieving or parsing a build package. """
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class BuildPackage(object):
|
|
||||||
""" Helper class for easy reading and updating of a Dockerfile build pack. """
|
|
||||||
|
|
||||||
def __init__(self, requests_file):
|
|
||||||
self._mime_processors = {
|
|
||||||
'application/zip': BuildPackage._prepare_zip,
|
|
||||||
'application/x-zip-compressed': BuildPackage._prepare_zip,
|
|
||||||
'text/plain': BuildPackage._prepare_dockerfile,
|
|
||||||
'application/octet-stream': BuildPackage._prepare_dockerfile,
|
|
||||||
'application/x-tar': BuildPackage._prepare_tarball,
|
|
||||||
'application/gzip': BuildPackage._prepare_tarball,
|
|
||||||
'application/x-gzip': BuildPackage._prepare_tarball,
|
|
||||||
}
|
|
||||||
|
|
||||||
c_type = requests_file.headers['content-type']
|
|
||||||
c_type = c_type.split(';')[0] if ';' in c_type else c_type
|
|
||||||
|
|
||||||
if c_type not in self._mime_processors:
|
|
||||||
raise BuildPackageException('Unknown build package mime type: %s' % c_type)
|
|
||||||
|
|
||||||
self._package_directory = None
|
|
||||||
try:
|
|
||||||
self._package_directory = self._mime_processors[c_type](requests_file)
|
|
||||||
except Exception as ex:
|
|
||||||
raise BuildPackageException(ex.message)
|
|
||||||
|
|
||||||
def parse_dockerfile(self, subdirectory):
|
|
||||||
dockerfile_path = os.path.join(self._package_directory, subdirectory, 'Dockerfile')
|
|
||||||
if not os.path.exists(dockerfile_path):
|
|
||||||
if subdirectory:
|
|
||||||
message = 'Build package did not contain a Dockerfile at sub directory %s.' % subdirectory
|
|
||||||
else:
|
|
||||||
message = 'Build package did not contain a Dockerfile at the root directory.'
|
|
||||||
|
|
||||||
raise BuildPackageException(message)
|
|
||||||
|
|
||||||
with open(dockerfile_path, 'r') as dockerfileobj:
|
|
||||||
return parse_dockerfile(dockerfileobj.read())
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_url(url):
|
|
||||||
buildpack_resource = requests.get(url, stream=True)
|
|
||||||
return BuildPackage(buildpack_resource)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _prepare_zip(request_file):
|
|
||||||
build_dir = mkdtemp(prefix='docker-build-')
|
|
||||||
|
|
||||||
# Save the zip file to temp somewhere
|
|
||||||
with TemporaryFile() as zip_file:
|
|
||||||
zip_file.write(request_file.content)
|
|
||||||
to_extract = ZipFile(zip_file)
|
|
||||||
to_extract.extractall(build_dir)
|
|
||||||
|
|
||||||
return build_dir
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _prepare_dockerfile(request_file):
|
|
||||||
build_dir = mkdtemp(prefix='docker-build-')
|
|
||||||
dockerfile_path = os.path.join(build_dir, "Dockerfile")
|
|
||||||
with open(dockerfile_path, 'w') as dockerfile:
|
|
||||||
dockerfile.write(request_file.content)
|
|
||||||
|
|
||||||
return build_dir
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _prepare_tarball(request_file):
|
|
||||||
build_dir = mkdtemp(prefix='docker-build-')
|
|
||||||
|
|
||||||
# Save the zip file to temp somewhere
|
|
||||||
with tarfile.open(mode='r|*', fileobj=request_file.raw) as tar_stream:
|
|
||||||
safe_extractall(tar_stream, build_dir)
|
|
||||||
|
|
||||||
return build_dir
|
|
|
@ -1,16 +1,18 @@
|
||||||
from data.database import BUILD_PHASE
|
from data.database import BUILD_PHASE
|
||||||
|
from data import model
|
||||||
|
import datetime
|
||||||
|
|
||||||
class StatusHandler(object):
|
class StatusHandler(object):
|
||||||
""" Context wrapper for writing status to build logs. """
|
""" Context wrapper for writing status to build logs. """
|
||||||
|
|
||||||
def __init__(self, build_logs, repository_build):
|
def __init__(self, build_logs, repository_build_uuid):
|
||||||
self._current_phase = None
|
self._current_phase = None
|
||||||
self._repository_build = repository_build
|
self._current_command = None
|
||||||
self._uuid = repository_build.uuid
|
self._uuid = repository_build_uuid
|
||||||
self._build_logs = build_logs
|
self._build_logs = build_logs
|
||||||
|
|
||||||
self._status = {
|
self._status = {
|
||||||
'total_commands': None,
|
'total_commands': 0,
|
||||||
'current_command': None,
|
'current_command': None,
|
||||||
'push_completion': 0.0,
|
'push_completion': 0.0,
|
||||||
'pull_completion': 0.0,
|
'pull_completion': 0.0,
|
||||||
|
@ -20,16 +22,25 @@ class StatusHandler(object):
|
||||||
self.__exit__(None, None, None)
|
self.__exit__(None, None, None)
|
||||||
|
|
||||||
def _append_log_message(self, log_message, log_type=None, log_data=None):
|
def _append_log_message(self, log_message, log_type=None, log_data=None):
|
||||||
|
log_data = log_data or {}
|
||||||
|
log_data['datetime'] = str(datetime.datetime.now())
|
||||||
self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data)
|
self._build_logs.append_log_message(self._uuid, log_message, log_type, log_data)
|
||||||
|
|
||||||
def append_log(self, log_message, extra_data=None):
|
def append_log(self, log_message, extra_data=None):
|
||||||
|
if log_message is None:
|
||||||
|
return
|
||||||
|
|
||||||
self._append_log_message(log_message, log_data=extra_data)
|
self._append_log_message(log_message, log_data=extra_data)
|
||||||
|
|
||||||
def set_command(self, command, extra_data=None):
|
def set_command(self, command, extra_data=None):
|
||||||
|
if self._current_command == command:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._current_command = command
|
||||||
self._append_log_message(command, self._build_logs.COMMAND, extra_data)
|
self._append_log_message(command, self._build_logs.COMMAND, extra_data)
|
||||||
|
|
||||||
def set_error(self, error_message, extra_data=None, internal_error=False):
|
def set_error(self, error_message, extra_data=None, internal_error=False, requeued=False):
|
||||||
self.set_phase(BUILD_PHASE.INTERNAL_ERROR if internal_error else BUILD_PHASE.ERROR)
|
self.set_phase(BUILD_PHASE.INTERNAL_ERROR if internal_error and requeued else BUILD_PHASE.ERROR)
|
||||||
|
|
||||||
extra_data = extra_data or {}
|
extra_data = extra_data or {}
|
||||||
extra_data['internal_error'] = internal_error
|
extra_data['internal_error'] = internal_error
|
||||||
|
@ -41,8 +52,12 @@ class StatusHandler(object):
|
||||||
|
|
||||||
self._current_phase = phase
|
self._current_phase = phase
|
||||||
self._append_log_message(phase, self._build_logs.PHASE, extra_data)
|
self._append_log_message(phase, self._build_logs.PHASE, extra_data)
|
||||||
self._repository_build.phase = phase
|
|
||||||
self._repository_build.save()
|
# Update the repository build with the new phase
|
||||||
|
repo_build = model.get_repository_build(self._uuid)
|
||||||
|
repo_build.phase = phase
|
||||||
|
repo_build.save()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
|
|
|
@ -19,13 +19,19 @@ class WorkerError(object):
|
||||||
'is_internal': True
|
'is_internal': True
|
||||||
},
|
},
|
||||||
|
|
||||||
|
'io.quay.builder.dockerfileissue': {
|
||||||
|
'message': 'Could not find or parse Dockerfile',
|
||||||
|
'show_base_error': True
|
||||||
|
},
|
||||||
|
|
||||||
'io.quay.builder.cannotpullbaseimage': {
|
'io.quay.builder.cannotpullbaseimage': {
|
||||||
'message': 'Could not pull base image',
|
'message': 'Could not pull base image',
|
||||||
'show_base_error': True
|
'show_base_error': True
|
||||||
},
|
},
|
||||||
|
|
||||||
'io.quay.builder.internalerror': {
|
'io.quay.builder.internalerror': {
|
||||||
'message': 'An internal error occurred while building. Please submit a ticket.'
|
'message': 'An internal error occurred while building. Please submit a ticket.',
|
||||||
|
'is_internal': True
|
||||||
},
|
},
|
||||||
|
|
||||||
'io.quay.builder.buildrunerror': {
|
'io.quay.builder.buildrunerror': {
|
||||||
|
@ -57,6 +63,11 @@ class WorkerError(object):
|
||||||
'io.quay.builder.missingorinvalidargument': {
|
'io.quay.builder.missingorinvalidargument': {
|
||||||
'message': 'Missing required arguments for builder',
|
'message': 'Missing required arguments for builder',
|
||||||
'is_internal': True
|
'is_internal': True
|
||||||
|
},
|
||||||
|
|
||||||
|
'io.quay.builder.cachelookupissue': {
|
||||||
|
'message': 'Error checking for a cached tag',
|
||||||
|
'is_internal': True
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,17 @@
|
||||||
|
from trollius import coroutine
|
||||||
|
|
||||||
class BaseManager(object):
|
class BaseManager(object):
|
||||||
""" Base for all worker managers. """
|
""" Base for all worker managers. """
|
||||||
def __init__(self, register_component, unregister_component, job_heartbeat_callback,
|
def __init__(self, register_component, unregister_component, job_heartbeat_callback,
|
||||||
job_complete_callback):
|
job_complete_callback, manager_hostname, heartbeat_period_sec):
|
||||||
self.register_component = register_component
|
self.register_component = register_component
|
||||||
self.unregister_component = unregister_component
|
self.unregister_component = unregister_component
|
||||||
self.job_heartbeat_callback = job_heartbeat_callback
|
self.job_heartbeat_callback = job_heartbeat_callback
|
||||||
self.job_complete_callback = job_complete_callback
|
self.job_complete_callback = job_complete_callback
|
||||||
|
self.manager_hostname = manager_hostname
|
||||||
|
self.heartbeat_period_sec = heartbeat_period_sec
|
||||||
|
|
||||||
|
@coroutine
|
||||||
def job_heartbeat(self, build_job):
|
def job_heartbeat(self, build_job):
|
||||||
""" Method invoked to tell the manager that a job is still running. This method will be called
|
""" Method invoked to tell the manager that a job is still running. This method will be called
|
||||||
every few minutes. """
|
every few minutes. """
|
||||||
|
@ -25,26 +30,36 @@ class BaseManager(object):
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def schedule(self, build_job, loop):
|
@coroutine
|
||||||
|
def schedule(self, build_job):
|
||||||
""" Schedules a queue item to be built. Returns True if the item was properly scheduled
|
""" Schedules a queue item to be built. Returns True if the item was properly scheduled
|
||||||
and False if all workers are busy.
|
and False if all workers are busy.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def initialize(self):
|
def initialize(self, manager_config):
|
||||||
""" Runs any initialization code for the manager. Called once the server is in a ready state.
|
""" Runs any initialization code for the manager. Called once the server is in a ready state.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def build_component_ready(self, build_component):
|
||||||
|
""" Method invoked whenever a build component announces itself as ready.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
def build_component_disposed(self, build_component, timed_out):
|
def build_component_disposed(self, build_component, timed_out):
|
||||||
""" Method invoked whenever a build component has been disposed. The timed_out boolean indicates
|
""" Method invoked whenever a build component has been disposed. The timed_out boolean indicates
|
||||||
whether the component's heartbeat timed out.
|
whether the component's heartbeat timed out.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@coroutine
|
||||||
def job_completed(self, build_job, job_status, build_component):
|
def job_completed(self, build_job, job_status, build_component):
|
||||||
""" Method invoked once a job_item has completed, in some manner. The job_status will be
|
""" Method invoked once a job_item has completed, in some manner. The job_status will be
|
||||||
one of: incomplete, error, complete. If incomplete, the job should be requeued.
|
one of: incomplete, error, complete. Implementations of this method should call
|
||||||
|
self.job_complete_callback with a status of Incomplete if they wish for the job to be
|
||||||
|
automatically requeued.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ from buildman.component.basecomponent import BaseComponent
|
||||||
from buildman.component.buildcomponent import BuildComponent
|
from buildman.component.buildcomponent import BuildComponent
|
||||||
from buildman.manager.basemanager import BaseManager
|
from buildman.manager.basemanager import BaseManager
|
||||||
|
|
||||||
from trollius.coroutines import From
|
from trollius import From, Return, coroutine
|
||||||
|
|
||||||
REGISTRATION_REALM = 'registration'
|
REGISTRATION_REALM = 'registration'
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -13,9 +13,6 @@ logger = logging.getLogger(__name__)
|
||||||
class DynamicRegistrationComponent(BaseComponent):
|
class DynamicRegistrationComponent(BaseComponent):
|
||||||
""" Component session that handles dynamic registration of the builder components. """
|
""" Component session that handles dynamic registration of the builder components. """
|
||||||
|
|
||||||
def kind(self):
|
|
||||||
return 'registration'
|
|
||||||
|
|
||||||
def onConnect(self):
|
def onConnect(self):
|
||||||
self.join(REGISTRATION_REALM)
|
self.join(REGISTRATION_REALM)
|
||||||
|
|
||||||
|
@ -31,10 +28,15 @@ class DynamicRegistrationComponent(BaseComponent):
|
||||||
|
|
||||||
class EnterpriseManager(BaseManager):
|
class EnterpriseManager(BaseManager):
|
||||||
""" Build manager implementation for the Enterprise Registry. """
|
""" Build manager implementation for the Enterprise Registry. """
|
||||||
build_components = []
|
|
||||||
shutting_down = False
|
|
||||||
|
|
||||||
def initialize(self):
|
def __init__(self, *args, **kwargs):
|
||||||
|
self.ready_components = set()
|
||||||
|
self.all_components = set()
|
||||||
|
self.shutting_down = False
|
||||||
|
|
||||||
|
super(EnterpriseManager, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def initialize(self, manager_config):
|
||||||
# Add a component which is used by build workers for dynamic registration. Unlike
|
# Add a component which is used by build workers for dynamic registration. Unlike
|
||||||
# production, build workers in enterprise are long-lived and register dynamically.
|
# production, build workers in enterprise are long-lived and register dynamically.
|
||||||
self.register_component(REGISTRATION_REALM, DynamicRegistrationComponent)
|
self.register_component(REGISTRATION_REALM, DynamicRegistrationComponent)
|
||||||
|
@ -48,31 +50,37 @@ class EnterpriseManager(BaseManager):
|
||||||
""" Adds a new build component for an Enterprise Registry. """
|
""" Adds a new build component for an Enterprise Registry. """
|
||||||
# Generate a new unique realm ID for the build worker.
|
# Generate a new unique realm ID for the build worker.
|
||||||
realm = str(uuid.uuid4())
|
realm = str(uuid.uuid4())
|
||||||
component = self.register_component(realm, BuildComponent, token="")
|
new_component = self.register_component(realm, BuildComponent, token="")
|
||||||
self.build_components.append(component)
|
self.all_components.add(new_component)
|
||||||
return realm
|
return realm
|
||||||
|
|
||||||
def schedule(self, build_job, loop):
|
@coroutine
|
||||||
|
def schedule(self, build_job):
|
||||||
""" Schedules a build for an Enterprise Registry. """
|
""" Schedules a build for an Enterprise Registry. """
|
||||||
if self.shutting_down:
|
if self.shutting_down or not self.ready_components:
|
||||||
return False
|
raise Return(False)
|
||||||
|
|
||||||
for component in self.build_components:
|
component = self.ready_components.pop()
|
||||||
if component.is_ready():
|
|
||||||
loop.call_soon(component.start_build, build_job)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
yield From(component.start_build(build_job))
|
||||||
|
|
||||||
|
raise Return(True)
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def build_component_ready(self, build_component):
|
||||||
|
self.ready_components.add(build_component)
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.shutting_down = True
|
self.shutting_down = True
|
||||||
|
|
||||||
|
@coroutine
|
||||||
def job_completed(self, build_job, job_status, build_component):
|
def job_completed(self, build_job, job_status, build_component):
|
||||||
self.job_complete_callback(build_job, job_status)
|
self.job_complete_callback(build_job, job_status)
|
||||||
|
|
||||||
def build_component_disposed(self, build_component, timed_out):
|
def build_component_disposed(self, build_component, timed_out):
|
||||||
self.build_components.remove(build_component)
|
self.all_components.remove(build_component)
|
||||||
self.unregister_component(build_component)
|
if build_component in self.ready_components:
|
||||||
|
self.ready_components.remove(build_component)
|
||||||
|
|
||||||
def num_workers(self):
|
def num_workers(self):
|
||||||
return len(self.build_components)
|
return len(self.all_components)
|
||||||
|
|
328
buildman/manager/ephemeral.py
Normal file
328
buildman/manager/ephemeral.py
Normal file
|
@ -0,0 +1,328 @@
|
||||||
|
import logging
|
||||||
|
import etcd
|
||||||
|
import uuid
|
||||||
|
import calendar
|
||||||
|
import os.path
|
||||||
|
import json
|
||||||
|
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from trollius import From, coroutine, Return, async
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from urllib3.exceptions import ReadTimeoutError, ProtocolError
|
||||||
|
|
||||||
|
from buildman.manager.basemanager import BaseManager
|
||||||
|
from buildman.manager.executor import PopenExecutor, EC2Executor
|
||||||
|
from buildman.component.buildcomponent import BuildComponent
|
||||||
|
from buildman.jobutil.buildjob import BuildJob
|
||||||
|
from buildman.asyncutil import AsyncWrapper
|
||||||
|
from util.morecollections import AttrDict
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
ETCD_DISABLE_TIMEOUT = 0
|
||||||
|
|
||||||
|
|
||||||
|
class EtcdAction(object):
|
||||||
|
GET = 'get'
|
||||||
|
SET = 'set'
|
||||||
|
EXPIRE = 'expire'
|
||||||
|
UPDATE = 'update'
|
||||||
|
DELETE = 'delete'
|
||||||
|
CREATE = 'create'
|
||||||
|
COMPARE_AND_SWAP = 'compareAndSwap'
|
||||||
|
COMPARE_AND_DELETE = 'compareAndDelete'
|
||||||
|
|
||||||
|
|
||||||
|
class EphemeralBuilderManager(BaseManager):
|
||||||
|
""" Build manager implementation for the Enterprise Registry. """
|
||||||
|
_executors = {
|
||||||
|
'popen': PopenExecutor,
|
||||||
|
'ec2': EC2Executor,
|
||||||
|
}
|
||||||
|
|
||||||
|
_etcd_client_klass = etcd.Client
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self._shutting_down = False
|
||||||
|
|
||||||
|
self._manager_config = None
|
||||||
|
self._async_thread_executor = None
|
||||||
|
self._etcd_client = None
|
||||||
|
|
||||||
|
self._etcd_realm_prefix = None
|
||||||
|
self._etcd_builder_prefix = None
|
||||||
|
|
||||||
|
self._component_to_job = {}
|
||||||
|
self._job_uuid_to_component = {}
|
||||||
|
self._component_to_builder = {}
|
||||||
|
|
||||||
|
self._executor = None
|
||||||
|
|
||||||
|
# Map of etcd keys being watched to the tasks watching them
|
||||||
|
self._watch_tasks = {}
|
||||||
|
|
||||||
|
super(EphemeralBuilderManager, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def _watch_etcd(self, etcd_key, change_callback, recursive=True):
|
||||||
|
watch_task_key = (etcd_key, recursive)
|
||||||
|
def callback_wrapper(changed_key_future):
|
||||||
|
if watch_task_key not in self._watch_tasks or self._watch_tasks[watch_task_key].done():
|
||||||
|
self._watch_etcd(etcd_key, change_callback)
|
||||||
|
|
||||||
|
if changed_key_future.cancelled():
|
||||||
|
# Due to lack of interest, tomorrow has been cancelled
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
etcd_result = changed_key_future.result()
|
||||||
|
except (ReadTimeoutError, ProtocolError):
|
||||||
|
return
|
||||||
|
|
||||||
|
change_callback(etcd_result)
|
||||||
|
|
||||||
|
if not self._shutting_down:
|
||||||
|
watch_future = self._etcd_client.watch(etcd_key, recursive=recursive,
|
||||||
|
timeout=ETCD_DISABLE_TIMEOUT)
|
||||||
|
watch_future.add_done_callback(callback_wrapper)
|
||||||
|
logger.debug('Scheduling watch of key: %s%s', etcd_key, '/*' if recursive else '')
|
||||||
|
self._watch_tasks[watch_task_key] = async(watch_future)
|
||||||
|
|
||||||
|
def _handle_builder_expiration(self, etcd_result):
|
||||||
|
if etcd_result.action == EtcdAction.EXPIRE:
|
||||||
|
# Handle the expiration
|
||||||
|
logger.debug('Builder expired, clean up the old build node')
|
||||||
|
job_metadata = json.loads(etcd_result._prev_node.value)
|
||||||
|
|
||||||
|
if 'builder_id' in job_metadata:
|
||||||
|
logger.info('Terminating expired build node.')
|
||||||
|
async(self._executor.stop_builder(job_metadata['builder_id']))
|
||||||
|
|
||||||
|
def _handle_realm_change(self, etcd_result):
|
||||||
|
if etcd_result.action == EtcdAction.CREATE:
|
||||||
|
# We must listen on the realm created by ourselves or another worker
|
||||||
|
realm_spec = json.loads(etcd_result.value)
|
||||||
|
self._register_realm(realm_spec)
|
||||||
|
|
||||||
|
elif etcd_result.action == EtcdAction.DELETE or etcd_result.action == EtcdAction.EXPIRE:
|
||||||
|
# We must stop listening for new connections on the specified realm, if we did not get the
|
||||||
|
# connection
|
||||||
|
realm_spec = json.loads(etcd_result._prev_node.value)
|
||||||
|
build_job = BuildJob(AttrDict(realm_spec['job_queue_item']))
|
||||||
|
component = self._job_uuid_to_component.pop(build_job.job_details['build_uuid'], None)
|
||||||
|
if component is not None:
|
||||||
|
# We were not the manager which the worker connected to, remove the bookkeeping for it
|
||||||
|
logger.debug('Unregistering unused component on realm: %s', realm_spec['realm'])
|
||||||
|
del self._component_to_job[component]
|
||||||
|
del self._component_to_builder[component]
|
||||||
|
self.unregister_component(component)
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.warning('Unexpected action (%s) on realm key: %s', etcd_result.action, etcd_result.key)
|
||||||
|
|
||||||
|
def _register_realm(self, realm_spec):
|
||||||
|
logger.debug('Registering realm with manager: %s', realm_spec['realm'])
|
||||||
|
component = self.register_component(realm_spec['realm'], BuildComponent,
|
||||||
|
token=realm_spec['token'])
|
||||||
|
build_job = BuildJob(AttrDict(realm_spec['job_queue_item']))
|
||||||
|
self._component_to_job[component] = build_job
|
||||||
|
self._component_to_builder[component] = realm_spec['builder_id']
|
||||||
|
self._job_uuid_to_component[build_job.job_details['build_uuid']] = component
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def _register_existing_realms(self):
|
||||||
|
try:
|
||||||
|
all_realms = yield From(self._etcd_client.read(self._etcd_realm_prefix, recursive=True))
|
||||||
|
for realm in all_realms.children:
|
||||||
|
if not realm.dir:
|
||||||
|
self._register_realm(json.loads(realm.value))
|
||||||
|
except KeyError:
|
||||||
|
# no realms have been registered yet
|
||||||
|
pass
|
||||||
|
|
||||||
|
def initialize(self, manager_config):
|
||||||
|
logger.debug('Calling initialize')
|
||||||
|
self._manager_config = manager_config
|
||||||
|
|
||||||
|
executor_klass = self._executors.get(manager_config.get('EXECUTOR', ''), PopenExecutor)
|
||||||
|
self._executor = executor_klass(manager_config.get('EXECUTOR_CONFIG', {}),
|
||||||
|
self.manager_hostname)
|
||||||
|
|
||||||
|
etcd_host = self._manager_config.get('ETCD_HOST', '127.0.0.1')
|
||||||
|
etcd_port = self._manager_config.get('ETCD_PORT', 2379)
|
||||||
|
etcd_auth = self._manager_config.get('ETCD_CERT_AND_KEY', None)
|
||||||
|
etcd_ca_cert = self._manager_config.get('ETCD_CA_CERT', None)
|
||||||
|
etcd_protocol = 'http' if etcd_auth is None else 'https'
|
||||||
|
logger.debug('Connecting to etcd on %s:%s', etcd_host, etcd_port)
|
||||||
|
|
||||||
|
worker_threads = self._manager_config.get('ETCD_WORKER_THREADS', 5)
|
||||||
|
self._async_thread_executor = ThreadPoolExecutor(worker_threads)
|
||||||
|
self._etcd_client = AsyncWrapper(self._etcd_client_klass(host=etcd_host, port=etcd_port,
|
||||||
|
cert=etcd_auth, ca_cert=etcd_ca_cert,
|
||||||
|
protocol=etcd_protocol),
|
||||||
|
executor=self._async_thread_executor)
|
||||||
|
|
||||||
|
self._etcd_builder_prefix = self._manager_config.get('ETCD_BUILDER_PREFIX', 'building/')
|
||||||
|
self._watch_etcd(self._etcd_builder_prefix, self._handle_builder_expiration)
|
||||||
|
|
||||||
|
self._etcd_realm_prefix = self._manager_config.get('ETCD_REALM_PREFIX', 'realm/')
|
||||||
|
self._watch_etcd(self._etcd_realm_prefix, self._handle_realm_change)
|
||||||
|
|
||||||
|
# Load components for all realms currently known to the cluster
|
||||||
|
async(self._register_existing_realms())
|
||||||
|
|
||||||
|
def setup_time(self):
|
||||||
|
setup_time = self._manager_config.get('MACHINE_SETUP_TIME', 300)
|
||||||
|
return setup_time
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
logger.debug('Shutting down worker.')
|
||||||
|
self._shutting_down = True
|
||||||
|
|
||||||
|
for (etcd_key, _), task in self._watch_tasks.items():
|
||||||
|
if not task.done():
|
||||||
|
logger.debug('Canceling watch task for %s', etcd_key)
|
||||||
|
task.cancel()
|
||||||
|
|
||||||
|
if self._async_thread_executor is not None:
|
||||||
|
logger.debug('Shutting down thread pool executor.')
|
||||||
|
self._async_thread_executor.shutdown()
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def schedule(self, build_job):
|
||||||
|
build_uuid = build_job.job_details['build_uuid']
|
||||||
|
logger.debug('Calling schedule with job: %s', build_uuid)
|
||||||
|
|
||||||
|
# Check if there are worker slots avialable by checking the number of jobs in etcd
|
||||||
|
allowed_worker_count = self._manager_config.get('ALLOWED_WORKER_COUNT', 1)
|
||||||
|
try:
|
||||||
|
building = yield From(self._etcd_client.read(self._etcd_builder_prefix, recursive=True))
|
||||||
|
workers_alive = sum(1 for child in building.children if not child.dir)
|
||||||
|
except KeyError:
|
||||||
|
workers_alive = 0
|
||||||
|
|
||||||
|
logger.debug('Total jobs: %s', workers_alive)
|
||||||
|
|
||||||
|
if workers_alive >= allowed_worker_count:
|
||||||
|
logger.info('Too many workers alive, unable to start new worker. %s >= %s', workers_alive,
|
||||||
|
allowed_worker_count)
|
||||||
|
raise Return(False)
|
||||||
|
|
||||||
|
job_key = self._etcd_job_key(build_job)
|
||||||
|
|
||||||
|
# First try to take a lock for this job, meaning we will be responsible for its lifeline
|
||||||
|
realm = str(uuid.uuid4())
|
||||||
|
token = str(uuid.uuid4())
|
||||||
|
ttl = self.setup_time()
|
||||||
|
expiration = datetime.utcnow() + timedelta(seconds=ttl)
|
||||||
|
|
||||||
|
machine_max_expiration = self._manager_config.get('MACHINE_MAX_TIME', 7200)
|
||||||
|
max_expiration = datetime.utcnow() + timedelta(seconds=machine_max_expiration)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'expiration': calendar.timegm(expiration.timetuple()),
|
||||||
|
'max_expiration': calendar.timegm(max_expiration.timetuple()),
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield From(self._etcd_client.write(job_key, json.dumps(payload), prevExist=False, ttl=ttl))
|
||||||
|
except KeyError:
|
||||||
|
# The job was already taken by someone else, we are probably a retry
|
||||||
|
logger.error('Job already exists in etcd, are timeouts misconfigured or is the queue broken?')
|
||||||
|
raise Return(False)
|
||||||
|
|
||||||
|
logger.debug('Starting builder with executor: %s', self._executor)
|
||||||
|
builder_id = yield From(self._executor.start_builder(realm, token, build_uuid))
|
||||||
|
|
||||||
|
# Store the builder in etcd associated with the job id
|
||||||
|
payload['builder_id'] = builder_id
|
||||||
|
yield From(self._etcd_client.write(job_key, json.dumps(payload), prevExist=True, ttl=ttl))
|
||||||
|
|
||||||
|
# Store the realm spec which will allow any manager to accept this builder when it connects
|
||||||
|
realm_spec = json.dumps({
|
||||||
|
'realm': realm,
|
||||||
|
'token': token,
|
||||||
|
'builder_id': builder_id,
|
||||||
|
'job_queue_item': build_job.job_item,
|
||||||
|
})
|
||||||
|
try:
|
||||||
|
yield From(self._etcd_client.write(self._etcd_realm_key(realm), realm_spec, prevExist=False,
|
||||||
|
ttl=ttl))
|
||||||
|
except KeyError:
|
||||||
|
logger.error('Realm already exists in etcd. UUID collision or something is very very wrong.')
|
||||||
|
raise Return(False)
|
||||||
|
|
||||||
|
raise Return(True)
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def build_component_ready(self, build_component):
|
||||||
|
try:
|
||||||
|
# Clean up the bookkeeping for allowing any manager to take the job
|
||||||
|
job = self._component_to_job.pop(build_component)
|
||||||
|
del self._job_uuid_to_component[job.job_details['build_uuid']]
|
||||||
|
yield From(self._etcd_client.delete(self._etcd_realm_key(build_component.builder_realm)))
|
||||||
|
|
||||||
|
logger.debug('Sending build %s to newly ready component on realm %s',
|
||||||
|
job.job_details['build_uuid'], build_component.builder_realm)
|
||||||
|
yield From(build_component.start_build(job))
|
||||||
|
except KeyError:
|
||||||
|
logger.debug('Builder is asking for more work, but work already completed')
|
||||||
|
|
||||||
|
def build_component_disposed(self, build_component, timed_out):
|
||||||
|
logger.debug('Calling build_component_disposed.')
|
||||||
|
|
||||||
|
# TODO make it so that I don't have to unregister the component if it timed out
|
||||||
|
self.unregister_component(build_component)
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def job_completed(self, build_job, job_status, build_component):
|
||||||
|
logger.debug('Calling job_completed with status: %s', job_status)
|
||||||
|
|
||||||
|
# Kill the ephmeral builder
|
||||||
|
yield From(self._executor.stop_builder(self._component_to_builder.pop(build_component)))
|
||||||
|
|
||||||
|
# Release the lock in etcd
|
||||||
|
job_key = self._etcd_job_key(build_job)
|
||||||
|
yield From(self._etcd_client.delete(job_key))
|
||||||
|
|
||||||
|
self.job_complete_callback(build_job, job_status)
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def job_heartbeat(self, build_job):
|
||||||
|
# Extend the deadline in etcd
|
||||||
|
job_key = self._etcd_job_key(build_job)
|
||||||
|
build_job_metadata_response = yield From(self._etcd_client.read(job_key))
|
||||||
|
build_job_metadata = json.loads(build_job_metadata_response.value)
|
||||||
|
|
||||||
|
max_expiration = datetime.utcfromtimestamp(build_job_metadata['max_expiration'])
|
||||||
|
max_expiration_remaining = max_expiration - datetime.utcnow()
|
||||||
|
max_expiration_sec = max(0, int(max_expiration_remaining.total_seconds()))
|
||||||
|
|
||||||
|
ttl = min(self.heartbeat_period_sec * 2, max_expiration_sec)
|
||||||
|
new_expiration = datetime.utcnow() + timedelta(seconds=ttl)
|
||||||
|
|
||||||
|
payload = {
|
||||||
|
'expiration': calendar.timegm(new_expiration.timetuple()),
|
||||||
|
'builder_id': build_job_metadata['builder_id'],
|
||||||
|
'max_expiration': build_job_metadata['max_expiration'],
|
||||||
|
}
|
||||||
|
|
||||||
|
yield From(self._etcd_client.write(job_key, json.dumps(payload), ttl=ttl))
|
||||||
|
|
||||||
|
self.job_heartbeat_callback(build_job)
|
||||||
|
|
||||||
|
def _etcd_job_key(self, build_job):
|
||||||
|
""" Create a key which is used to track a job in etcd.
|
||||||
|
"""
|
||||||
|
return os.path.join(self._etcd_builder_prefix, build_job.job_details['build_uuid'])
|
||||||
|
|
||||||
|
def _etcd_realm_key(self, realm):
|
||||||
|
""" Create a key which is used to track an incoming connection on a realm.
|
||||||
|
"""
|
||||||
|
return os.path.join(self._etcd_realm_prefix, realm)
|
||||||
|
|
||||||
|
def num_workers(self):
|
||||||
|
""" Return the number of workers we're managing locally.
|
||||||
|
"""
|
||||||
|
return len(self._component_to_builder)
|
238
buildman/manager/executor.py
Normal file
238
buildman/manager/executor.py
Normal file
|
@ -0,0 +1,238 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import uuid
|
||||||
|
import threading
|
||||||
|
import boto.ec2
|
||||||
|
import requests
|
||||||
|
import cachetools
|
||||||
|
|
||||||
|
from jinja2 import FileSystemLoader, Environment
|
||||||
|
from trollius import coroutine, From, Return, get_event_loop
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from buildman.asyncutil import AsyncWrapper
|
||||||
|
from container_cloud_config import CloudConfigContext
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
ONE_HOUR = 60*60
|
||||||
|
|
||||||
|
ENV = Environment(loader=FileSystemLoader('buildman/templates'))
|
||||||
|
TEMPLATE = ENV.get_template('cloudconfig.yaml')
|
||||||
|
CloudConfigContext().populate_jinja_environment(ENV)
|
||||||
|
|
||||||
|
class ExecutorException(Exception):
|
||||||
|
""" Exception raised when there is a problem starting or stopping a builder.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class BuilderExecutor(object):
|
||||||
|
def __init__(self, executor_config, manager_hostname):
|
||||||
|
self.executor_config = executor_config
|
||||||
|
self.manager_hostname = manager_hostname
|
||||||
|
|
||||||
|
""" Interface which can be plugged into the EphemeralNodeManager to provide a strategy for
|
||||||
|
starting and stopping builders.
|
||||||
|
"""
|
||||||
|
@coroutine
|
||||||
|
def start_builder(self, realm, token, build_uuid):
|
||||||
|
""" Create a builder with the specified config. Returns a unique id which can be used to manage
|
||||||
|
the builder.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def stop_builder(self, builder_id):
|
||||||
|
""" Stop a builder which is currently running.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def get_manager_websocket_url(self):
|
||||||
|
return 'ws://{0}:'
|
||||||
|
|
||||||
|
def generate_cloud_config(self, realm, token, coreos_channel, manager_hostname,
|
||||||
|
quay_username=None, quay_password=None):
|
||||||
|
if quay_username is None:
|
||||||
|
quay_username = self.executor_config['QUAY_USERNAME']
|
||||||
|
|
||||||
|
if quay_password is None:
|
||||||
|
quay_password = self.executor_config['QUAY_PASSWORD']
|
||||||
|
|
||||||
|
return TEMPLATE.render(
|
||||||
|
realm=realm,
|
||||||
|
token=token,
|
||||||
|
quay_username=quay_username,
|
||||||
|
quay_password=quay_password,
|
||||||
|
manager_hostname=manager_hostname,
|
||||||
|
coreos_channel=coreos_channel,
|
||||||
|
worker_tag=self.executor_config['WORKER_TAG'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EC2Executor(BuilderExecutor):
|
||||||
|
""" Implementation of BuilderExecutor which uses libcloud to start machines on a variety of cloud
|
||||||
|
providers.
|
||||||
|
"""
|
||||||
|
COREOS_STACK_URL = 'http://%s.release.core-os.net/amd64-usr/current/coreos_production_ami_hvm.txt'
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
self._loop = get_event_loop()
|
||||||
|
super(EC2Executor, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def _get_conn(self):
|
||||||
|
""" Creates an ec2 connection which can be used to manage instances.
|
||||||
|
"""
|
||||||
|
return AsyncWrapper(boto.ec2.connect_to_region(
|
||||||
|
self.executor_config['EC2_REGION'],
|
||||||
|
aws_access_key_id=self.executor_config['AWS_ACCESS_KEY'],
|
||||||
|
aws_secret_access_key=self.executor_config['AWS_SECRET_KEY'],
|
||||||
|
))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
@cachetools.ttl_cache(ttl=ONE_HOUR)
|
||||||
|
def _get_coreos_ami(cls, ec2_region, coreos_channel):
|
||||||
|
""" Retrieve the CoreOS AMI id from the canonical listing.
|
||||||
|
"""
|
||||||
|
stack_list_string = requests.get(EC2Executor.COREOS_STACK_URL % coreos_channel).text
|
||||||
|
stack_amis = dict([stack.split('=') for stack in stack_list_string.split('|')])
|
||||||
|
return stack_amis[ec2_region]
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def start_builder(self, realm, token, build_uuid):
|
||||||
|
region = self.executor_config['EC2_REGION']
|
||||||
|
channel = self.executor_config.get('COREOS_CHANNEL', 'stable')
|
||||||
|
get_ami_callable = partial(self._get_coreos_ami, region, channel)
|
||||||
|
coreos_ami = yield From(self._loop.run_in_executor(None, get_ami_callable))
|
||||||
|
user_data = self.generate_cloud_config(realm, token, channel, self.manager_hostname)
|
||||||
|
|
||||||
|
logger.debug('Generated cloud config: %s', user_data)
|
||||||
|
|
||||||
|
ec2_conn = self._get_conn()
|
||||||
|
|
||||||
|
ssd_root_ebs = boto.ec2.blockdevicemapping.BlockDeviceType(
|
||||||
|
size=32,
|
||||||
|
volume_type='gp2',
|
||||||
|
delete_on_termination=True,
|
||||||
|
)
|
||||||
|
block_devices = boto.ec2.blockdevicemapping.BlockDeviceMapping()
|
||||||
|
block_devices['/dev/xvda'] = ssd_root_ebs
|
||||||
|
|
||||||
|
interface = boto.ec2.networkinterface.NetworkInterfaceSpecification(
|
||||||
|
subnet_id=self.executor_config['EC2_VPC_SUBNET_ID'],
|
||||||
|
groups=self.executor_config['EC2_SECURITY_GROUP_IDS'],
|
||||||
|
associate_public_ip_address=True,
|
||||||
|
)
|
||||||
|
interfaces = boto.ec2.networkinterface.NetworkInterfaceCollection(interface)
|
||||||
|
|
||||||
|
reservation = yield From(ec2_conn.run_instances(
|
||||||
|
coreos_ami,
|
||||||
|
instance_type=self.executor_config['EC2_INSTANCE_TYPE'],
|
||||||
|
key_name=self.executor_config.get('EC2_KEY_NAME', None),
|
||||||
|
user_data=user_data,
|
||||||
|
instance_initiated_shutdown_behavior='terminate',
|
||||||
|
block_device_map=block_devices,
|
||||||
|
network_interfaces=interfaces,
|
||||||
|
))
|
||||||
|
|
||||||
|
if not reservation.instances:
|
||||||
|
raise ExecutorException('Unable to spawn builder instance.')
|
||||||
|
elif len(reservation.instances) != 1:
|
||||||
|
raise ExecutorException('EC2 started wrong number of instances!')
|
||||||
|
|
||||||
|
launched = AsyncWrapper(reservation.instances[0])
|
||||||
|
yield From(launched.add_tags({
|
||||||
|
'Name': 'Quay Ephemeral Builder',
|
||||||
|
'Realm': realm,
|
||||||
|
'Token': token,
|
||||||
|
'BuildUUID': build_uuid,
|
||||||
|
}))
|
||||||
|
raise Return(launched.id)
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def stop_builder(self, builder_id):
|
||||||
|
ec2_conn = self._get_conn()
|
||||||
|
terminated_instances = yield From(ec2_conn.terminate_instances([builder_id]))
|
||||||
|
if builder_id not in [si.id for si in terminated_instances]:
|
||||||
|
raise ExecutorException('Unable to terminate instance: %s' % builder_id)
|
||||||
|
|
||||||
|
|
||||||
|
class PopenExecutor(BuilderExecutor):
|
||||||
|
""" Implementation of BuilderExecutor which uses Popen to fork a quay-builder process.
|
||||||
|
"""
|
||||||
|
def __init__(self, executor_config, manager_hostname):
|
||||||
|
self._jobs = {}
|
||||||
|
|
||||||
|
super(PopenExecutor, self).__init__(executor_config, manager_hostname)
|
||||||
|
|
||||||
|
""" Executor which uses Popen to fork a quay-builder process.
|
||||||
|
"""
|
||||||
|
@coroutine
|
||||||
|
def start_builder(self, realm, token, build_uuid):
|
||||||
|
# Now start a machine for this job, adding the machine id to the etcd information
|
||||||
|
logger.debug('Forking process for build')
|
||||||
|
import subprocess
|
||||||
|
builder_env = {
|
||||||
|
'TOKEN': token,
|
||||||
|
'REALM': realm,
|
||||||
|
'ENDPOINT': 'ws://localhost:8787',
|
||||||
|
'DOCKER_TLS_VERIFY': os.environ.get('DOCKER_TLS_VERIFY', ''),
|
||||||
|
'DOCKER_CERT_PATH': os.environ.get('DOCKER_CERT_PATH', ''),
|
||||||
|
'DOCKER_HOST': os.environ.get('DOCKER_HOST', ''),
|
||||||
|
}
|
||||||
|
|
||||||
|
logpipe = LogPipe(logging.INFO)
|
||||||
|
spawned = subprocess.Popen('/Users/jake/bin/quay-builder', stdout=logpipe, stderr=logpipe,
|
||||||
|
env=builder_env)
|
||||||
|
|
||||||
|
builder_id = str(uuid.uuid4())
|
||||||
|
self._jobs[builder_id] = (spawned, logpipe)
|
||||||
|
logger.debug('Builder spawned with id: %s', builder_id)
|
||||||
|
raise Return(builder_id)
|
||||||
|
|
||||||
|
@coroutine
|
||||||
|
def stop_builder(self, builder_id):
|
||||||
|
if builder_id not in self._jobs:
|
||||||
|
raise ExecutorException('Builder id not being tracked by executor.')
|
||||||
|
|
||||||
|
logger.debug('Killing builder with id: %s', builder_id)
|
||||||
|
spawned, logpipe = self._jobs[builder_id]
|
||||||
|
|
||||||
|
if spawned.poll() is None:
|
||||||
|
spawned.kill()
|
||||||
|
logpipe.close()
|
||||||
|
|
||||||
|
|
||||||
|
class LogPipe(threading.Thread):
|
||||||
|
""" Adapted from http://codereview.stackexchange.com/a/17959
|
||||||
|
"""
|
||||||
|
def __init__(self, level):
|
||||||
|
"""Setup the object with a logger and a loglevel
|
||||||
|
and start the thread
|
||||||
|
"""
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.daemon = False
|
||||||
|
self.level = level
|
||||||
|
self.fd_read, self.fd_write = os.pipe()
|
||||||
|
self.pipe_reader = os.fdopen(self.fd_read)
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
"""Return the write file descriptor of the pipe
|
||||||
|
"""
|
||||||
|
return self.fd_write
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""Run the thread, logging everything.
|
||||||
|
"""
|
||||||
|
for line in iter(self.pipe_reader.readline, ''):
|
||||||
|
logging.log(self.level, line.strip('\n'))
|
||||||
|
|
||||||
|
self.pipe_reader.close()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
"""Close the write end of the pipe.
|
||||||
|
"""
|
||||||
|
os.close(self.fd_write)
|
|
@ -12,8 +12,11 @@ from threading import Event
|
||||||
from trollius.coroutines import From
|
from trollius.coroutines import From
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from buildman.jobutil.buildstatus import StatusHandler
|
||||||
from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException
|
from buildman.jobutil.buildjob import BuildJob, BuildJobLoadException
|
||||||
|
from data import database
|
||||||
from data.queue import WorkQueue
|
from data.queue import WorkQueue
|
||||||
|
from app import app
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -22,8 +25,7 @@ TIMEOUT_PERIOD_MINUTES = 20
|
||||||
JOB_TIMEOUT_SECONDS = 300
|
JOB_TIMEOUT_SECONDS = 300
|
||||||
MINIMUM_JOB_EXTENSION = timedelta(minutes=2)
|
MINIMUM_JOB_EXTENSION = timedelta(minutes=2)
|
||||||
|
|
||||||
WEBSOCKET_PORT = 8787
|
HEARTBEAT_PERIOD_SEC = 30
|
||||||
CONTROLLER_PORT = 8686
|
|
||||||
|
|
||||||
class BuildJobResult(object):
|
class BuildJobResult(object):
|
||||||
""" Build job result enum """
|
""" Build job result enum """
|
||||||
|
@ -35,14 +37,15 @@ class BuilderServer(object):
|
||||||
""" Server which handles both HTTP and WAMP requests, managing the full state of the build
|
""" Server which handles both HTTP and WAMP requests, managing the full state of the build
|
||||||
controller.
|
controller.
|
||||||
"""
|
"""
|
||||||
def __init__(self, server_hostname, queue, build_logs, user_files, lifecycle_manager_klass):
|
def __init__(self, registry_hostname, queue, build_logs, user_files, lifecycle_manager_klass,
|
||||||
|
lifecycle_manager_config, manager_hostname):
|
||||||
self._loop = None
|
self._loop = None
|
||||||
self._current_status = 'starting'
|
self._current_status = 'starting'
|
||||||
self._current_components = []
|
self._current_components = []
|
||||||
self._job_count = 0
|
self._job_count = 0
|
||||||
|
|
||||||
self._session_factory = RouterSessionFactory(RouterFactory())
|
self._session_factory = RouterSessionFactory(RouterFactory())
|
||||||
self._server_hostname = server_hostname
|
self._registry_hostname = registry_hostname
|
||||||
self._queue = queue
|
self._queue = queue
|
||||||
self._build_logs = build_logs
|
self._build_logs = build_logs
|
||||||
self._user_files = user_files
|
self._user_files = user_files
|
||||||
|
@ -50,8 +53,11 @@ class BuilderServer(object):
|
||||||
self._register_component,
|
self._register_component,
|
||||||
self._unregister_component,
|
self._unregister_component,
|
||||||
self._job_heartbeat,
|
self._job_heartbeat,
|
||||||
self._job_complete
|
self._job_complete,
|
||||||
|
manager_hostname,
|
||||||
|
HEARTBEAT_PERIOD_SEC,
|
||||||
)
|
)
|
||||||
|
self._lifecycle_manager_config = lifecycle_manager_config
|
||||||
|
|
||||||
self._shutdown_event = Event()
|
self._shutdown_event = Event()
|
||||||
self._current_status = 'running'
|
self._current_status = 'running'
|
||||||
|
@ -81,18 +87,17 @@ class BuilderServer(object):
|
||||||
|
|
||||||
self._controller_app = controller_app
|
self._controller_app = controller_app
|
||||||
|
|
||||||
def run(self, host, ssl=None):
|
def run(self, host, websocket_port, controller_port, ssl=None):
|
||||||
logger.debug('Initializing the lifecycle manager')
|
logger.debug('Initializing the lifecycle manager')
|
||||||
self._lifecycle_manager.initialize()
|
self._lifecycle_manager.initialize(self._lifecycle_manager_config)
|
||||||
|
|
||||||
logger.debug('Initializing all members of the event loop')
|
logger.debug('Initializing all members of the event loop')
|
||||||
loop = trollius.get_event_loop()
|
loop = trollius.get_event_loop()
|
||||||
trollius.Task(self._initialize(loop, host, ssl))
|
|
||||||
|
|
||||||
logger.debug('Starting server on port %s, with controller on port %s', WEBSOCKET_PORT,
|
logger.debug('Starting server on port %s, with controller on port %s', websocket_port,
|
||||||
CONTROLLER_PORT)
|
controller_port)
|
||||||
try:
|
try:
|
||||||
loop.run_forever()
|
loop.run_until_complete(self._initialize(loop, host, websocket_port, controller_port, ssl))
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
|
@ -116,7 +121,7 @@ class BuilderServer(object):
|
||||||
component.parent_manager = self._lifecycle_manager
|
component.parent_manager = self._lifecycle_manager
|
||||||
component.build_logs = self._build_logs
|
component.build_logs = self._build_logs
|
||||||
component.user_files = self._user_files
|
component.user_files = self._user_files
|
||||||
component.server_hostname = self._server_hostname
|
component.registry_hostname = self._registry_hostname
|
||||||
|
|
||||||
self._current_components.append(component)
|
self._current_components.append(component)
|
||||||
self._session_factory.add(component)
|
self._session_factory.add(component)
|
||||||
|
@ -130,32 +135,32 @@ class BuilderServer(object):
|
||||||
self._session_factory.remove(component)
|
self._session_factory.remove(component)
|
||||||
|
|
||||||
def _job_heartbeat(self, build_job):
|
def _job_heartbeat(self, build_job):
|
||||||
WorkQueue.extend_processing(build_job.job_item(), seconds_from_now=JOB_TIMEOUT_SECONDS,
|
self._queue.extend_processing(build_job.job_item, seconds_from_now=JOB_TIMEOUT_SECONDS,
|
||||||
retry_count=1, minimum_extension=MINIMUM_JOB_EXTENSION)
|
minimum_extension=MINIMUM_JOB_EXTENSION)
|
||||||
|
|
||||||
def _job_complete(self, build_job, job_status):
|
def _job_complete(self, build_job, job_status):
|
||||||
if job_status == BuildJobResult.INCOMPLETE:
|
if job_status == BuildJobResult.INCOMPLETE:
|
||||||
self._queue.incomplete(build_job.job_item(), restore_retry=True, retry_after=30)
|
self._queue.incomplete(build_job.job_item, restore_retry=False, retry_after=30)
|
||||||
elif job_status == BuildJobResult.ERROR:
|
|
||||||
self._queue.incomplete(build_job.job_item(), restore_retry=False)
|
|
||||||
else:
|
else:
|
||||||
self._queue.complete(build_job.job_item())
|
self._queue.complete(build_job.job_item)
|
||||||
|
|
||||||
self._job_count = self._job_count - 1
|
self._job_count = self._job_count - 1
|
||||||
|
|
||||||
if self._current_status == 'shutting_down' and not self._job_count:
|
if self._current_status == 'shutting_down' and not self._job_count:
|
||||||
self._shutdown_event.set()
|
self._shutdown_event.set()
|
||||||
|
|
||||||
# TODO(jschorr): check for work here?
|
|
||||||
|
|
||||||
@trollius.coroutine
|
@trollius.coroutine
|
||||||
def _work_checker(self):
|
def _work_checker(self):
|
||||||
while self._current_status == 'running':
|
while self._current_status == 'running':
|
||||||
logger.debug('Checking for more work for %d active workers', self._lifecycle_manager.num_workers())
|
with database.CloseForLongOperation(app.config):
|
||||||
|
yield From(trollius.sleep(WORK_CHECK_TIMEOUT))
|
||||||
|
|
||||||
|
logger.debug('Checking for more work for %d active workers',
|
||||||
|
self._lifecycle_manager.num_workers())
|
||||||
|
|
||||||
job_item = self._queue.get(processing_time=self._lifecycle_manager.setup_time())
|
job_item = self._queue.get(processing_time=self._lifecycle_manager.setup_time())
|
||||||
if job_item is None:
|
if job_item is None:
|
||||||
logger.debug('No additional work found. Going to sleep for %s seconds', WORK_CHECK_TIMEOUT)
|
logger.debug('No additional work found. Going to sleep for %s seconds', WORK_CHECK_TIMEOUT)
|
||||||
yield From(trollius.sleep(WORK_CHECK_TIMEOUT))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -163,20 +168,24 @@ class BuilderServer(object):
|
||||||
except BuildJobLoadException as irbe:
|
except BuildJobLoadException as irbe:
|
||||||
logger.exception(irbe)
|
logger.exception(irbe)
|
||||||
self._queue.incomplete(job_item, restore_retry=False)
|
self._queue.incomplete(job_item, restore_retry=False)
|
||||||
|
continue
|
||||||
|
|
||||||
logger.debug('Build job found. Checking for an avaliable worker.')
|
logger.debug('Build job found. Checking for an avaliable worker.')
|
||||||
if self._lifecycle_manager.schedule(build_job, self._loop):
|
scheduled = yield From(self._lifecycle_manager.schedule(build_job))
|
||||||
|
if scheduled:
|
||||||
|
status_handler = StatusHandler(self._build_logs, build_job.repo_build.uuid)
|
||||||
|
status_handler.set_phase('build-scheduled')
|
||||||
|
|
||||||
self._job_count = self._job_count + 1
|
self._job_count = self._job_count + 1
|
||||||
logger.debug('Build job scheduled. Running: %s', self._job_count)
|
logger.debug('Build job scheduled. Running: %s', self._job_count)
|
||||||
else:
|
else:
|
||||||
logger.debug('All workers are busy. Requeuing.')
|
logger.debug('All workers are busy. Requeuing.')
|
||||||
self._queue.incomplete(job_item, restore_retry=True, retry_after=0)
|
self._queue.incomplete(job_item, restore_retry=True, retry_after=0)
|
||||||
|
|
||||||
yield From(trollius.sleep(WORK_CHECK_TIMEOUT))
|
|
||||||
|
|
||||||
|
|
||||||
@trollius.coroutine
|
@trollius.coroutine
|
||||||
def _initialize(self, loop, host, ssl=None):
|
def _initialize(self, loop, host, websocket_port, controller_port, ssl=None):
|
||||||
self._loop = loop
|
self._loop = loop
|
||||||
|
|
||||||
# Create the WAMP server.
|
# Create the WAMP server.
|
||||||
|
@ -184,8 +193,8 @@ class BuilderServer(object):
|
||||||
transport_factory.setProtocolOptions(failByDrop=True)
|
transport_factory.setProtocolOptions(failByDrop=True)
|
||||||
|
|
||||||
# Initialize the controller server and the WAMP server
|
# Initialize the controller server and the WAMP server
|
||||||
create_wsgi_server(self._controller_app, loop=loop, host=host, port=CONTROLLER_PORT, ssl=ssl)
|
create_wsgi_server(self._controller_app, loop=loop, host=host, port=controller_port, ssl=ssl)
|
||||||
yield From(loop.create_server(transport_factory, host, WEBSOCKET_PORT, ssl=ssl))
|
yield From(loop.create_server(transport_factory, host, websocket_port, ssl=ssl))
|
||||||
|
|
||||||
# Initialize the work queue checker.
|
# Initialize the work queue checker.
|
||||||
yield From(self._work_checker())
|
yield From(self._work_checker())
|
||||||
|
|
31
buildman/templates/cloudconfig.yaml
Normal file
31
buildman/templates/cloudconfig.yaml
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
#cloud-config
|
||||||
|
|
||||||
|
ssh_authorized_keys:
|
||||||
|
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCC0m+hVmyR3vn/xoxJe9+atRWBxSK+YXgyufNVDMcb7H00Jfnc341QH3kDVYZamUbhVh/nyc2RP7YbnZR5zORFtgOaNSdkMYrPozzBvxjnvSUokkCCWbLqXDHvIKiR12r+UTSijPJE/Yk702Mb2ejAFuae1C3Ec+qKAoOCagDjpQ3THyb5oaKE7VPHdwCWjWIQLRhC+plu77ObhoXIFJLD13gCi01L/rp4mYVCxIc2lX5A8rkK+bZHnIZwWUQ4t8SIjWxIaUo0FE7oZ83nKuNkYj5ngmLHQLY23Nx2WhE9H6NBthUpik9SmqQPtVYbhIG+bISPoH9Xs8CLrFb0VRjz Joey's Mac
|
||||||
|
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCo6FhAP7mFFOAzM91gtaKW7saahtaN4lur42FMMztz6aqUycIltCmvxo+3FmrXgCG30maMNU36Vm1+9QRtVQEd+eRuoIWP28t+8MT01Fh4zPuE2Wca3pOHSNo3X81FfWJLzmwEHiQKs9HPQqUhezR9PcVWVkbMyAzw85c0UycGmHGFNb0UiRd9HFY6XbgbxhZv/mvKLZ99xE3xkOzS1PNsdSNvjUKwZR7pSUPqNS5S/1NXyR4GhFTU24VPH/bTATOv2ATH+PSzsZ7Qyz9UHj38tKC+ALJHEDJ4HXGzobyOUP78cHGZOfCB5FYubq0zmOudAjKIAhwI8XTFvJ2DX1P3 jimmyzelinskie
|
||||||
|
- ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDNvw8qo9m8np7yQ/Smv/oklM8bo8VyNRZriGYBDuolWDL/mZpYCQnZJXphQo7RFdNABYistikjJlBuuwUohLf2uSq0iKoFa2TgwI43wViWzvuzU4nA02/ITD5BZdmWAFNyIoqeB50Ol4qUgDwLAZ+7Kv7uCi6chcgr9gTi99jY3GHyZjrMiXMHGVGi+FExFuzhVC2drKjbz5q6oRfQeLtNfG4psl5GU3MQU6FkX4fgoCx0r9R48/b7l4+TT7pWblJQiRfeldixu6308vyoTUEHasdkU3/X0OTaGz/h5XqTKnGQc6stvvoED3w+L3QFp0H5Z8sZ9stSsitmCBrmbcKZ jakemoshenko
|
||||||
|
|
||||||
|
write_files:
|
||||||
|
- path: /root/overrides.list
|
||||||
|
permission: '0644'
|
||||||
|
content: |
|
||||||
|
REALM={{ realm }}
|
||||||
|
TOKEN={{ token }}
|
||||||
|
SERVER=wss://{{ manager_hostname }}
|
||||||
|
|
||||||
|
coreos:
|
||||||
|
update:
|
||||||
|
reboot-strategy: off
|
||||||
|
group: {{ coreos_channel }}
|
||||||
|
|
||||||
|
units:
|
||||||
|
{{ dockersystemd('quay-builder',
|
||||||
|
'quay.io/coreos/registry-build-worker',
|
||||||
|
quay_username,
|
||||||
|
quay_password,
|
||||||
|
worker_tag,
|
||||||
|
extra_args='--net=host --privileged --env-file /root/overrides.list -v /var/run/docker.sock:/var/run/docker.sock -v /usr/share/ca-certificates:/etc/ssl/certs',
|
||||||
|
exec_stop_post=['/bin/sh -xc "/bin/sleep 120; /usr/bin/systemctl --no-block poweroff"'],
|
||||||
|
flattened=True,
|
||||||
|
restart_policy='no'
|
||||||
|
) | indent(4) }}
|
|
@ -3,5 +3,6 @@ workers = 2
|
||||||
worker_class = 'gevent'
|
worker_class = 'gevent'
|
||||||
timeout = 2000
|
timeout = 2000
|
||||||
daemon = False
|
daemon = False
|
||||||
logconfig = 'conf/logging.conf'
|
logconfig = 'conf/logging_debug.conf'
|
||||||
pythonpath = '.'
|
pythonpath = '.'
|
||||||
|
preload_app = True
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
exec svlogd /var/log/dockerfilebuild/
|
|
|
@ -1,6 +0,0 @@
|
||||||
#! /bin/bash
|
|
||||||
|
|
||||||
sv start tutumdocker || exit 1
|
|
||||||
|
|
||||||
cd /
|
|
||||||
venv/bin/python -m workers.dockerfilebuild
|
|
|
@ -1,2 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
exec svlogd /var/log/tutumdocker/
|
|
|
@ -1,96 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
# First, make sure that cgroups are mounted correctly.
|
|
||||||
CGROUP=/sys/fs/cgroup
|
|
||||||
|
|
||||||
[ -d $CGROUP ] ||
|
|
||||||
mkdir $CGROUP
|
|
||||||
|
|
||||||
mountpoint -q $CGROUP ||
|
|
||||||
mount -n -t tmpfs -o uid=0,gid=0,mode=0755 cgroup $CGROUP || {
|
|
||||||
echo "Could not make a tmpfs mount. Did you use -privileged?"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
if [ -d /sys/kernel/security ] && ! mountpoint -q /sys/kernel/security
|
|
||||||
then
|
|
||||||
mount -t securityfs none /sys/kernel/security || {
|
|
||||||
echo "Could not mount /sys/kernel/security."
|
|
||||||
echo "AppArmor detection and -privileged mode might break."
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Mount the cgroup hierarchies exactly as they are in the parent system.
|
|
||||||
for SUBSYS in $(cut -d: -f2 /proc/1/cgroup)
|
|
||||||
do
|
|
||||||
[ -d $CGROUP/$SUBSYS ] || mkdir $CGROUP/$SUBSYS
|
|
||||||
mountpoint -q $CGROUP/$SUBSYS ||
|
|
||||||
mount -n -t cgroup -o $SUBSYS cgroup $CGROUP/$SUBSYS
|
|
||||||
|
|
||||||
# The two following sections address a bug which manifests itself
|
|
||||||
# by a cryptic "lxc-start: no ns_cgroup option specified" when
|
|
||||||
# trying to start containers withina container.
|
|
||||||
# The bug seems to appear when the cgroup hierarchies are not
|
|
||||||
# mounted on the exact same directories in the host, and in the
|
|
||||||
# container.
|
|
||||||
|
|
||||||
# Named, control-less cgroups are mounted with "-o name=foo"
|
|
||||||
# (and appear as such under /proc/<pid>/cgroup) but are usually
|
|
||||||
# mounted on a directory named "foo" (without the "name=" prefix).
|
|
||||||
# Systemd and OpenRC (and possibly others) both create such a
|
|
||||||
# cgroup. To avoid the aforementioned bug, we symlink "foo" to
|
|
||||||
# "name=foo". This shouldn't have any adverse effect.
|
|
||||||
echo $SUBSYS | grep -q ^name= && {
|
|
||||||
NAME=$(echo $SUBSYS | sed s/^name=//)
|
|
||||||
ln -s $SUBSYS $CGROUP/$NAME
|
|
||||||
}
|
|
||||||
|
|
||||||
# Likewise, on at least one system, it has been reported that
|
|
||||||
# systemd would mount the CPU and CPU accounting controllers
|
|
||||||
# (respectively "cpu" and "cpuacct") with "-o cpuacct,cpu"
|
|
||||||
# but on a directory called "cpu,cpuacct" (note the inversion
|
|
||||||
# in the order of the groups). This tries to work around it.
|
|
||||||
[ $SUBSYS = cpuacct,cpu ] && ln -s $SUBSYS $CGROUP/cpu,cpuacct
|
|
||||||
done
|
|
||||||
|
|
||||||
# Note: as I write those lines, the LXC userland tools cannot setup
|
|
||||||
# a "sub-container" properly if the "devices" cgroup is not in its
|
|
||||||
# own hierarchy. Let's detect this and issue a warning.
|
|
||||||
grep -q :devices: /proc/1/cgroup ||
|
|
||||||
echo "WARNING: the 'devices' cgroup should be in its own hierarchy."
|
|
||||||
grep -qw devices /proc/1/cgroup ||
|
|
||||||
echo "WARNING: it looks like the 'devices' cgroup is not mounted."
|
|
||||||
|
|
||||||
# Now, close extraneous file descriptors.
|
|
||||||
pushd /proc/self/fd >/dev/null
|
|
||||||
for FD in *
|
|
||||||
do
|
|
||||||
case "$FD" in
|
|
||||||
# Keep stdin/stdout/stderr
|
|
||||||
[012])
|
|
||||||
;;
|
|
||||||
# Nuke everything else
|
|
||||||
*)
|
|
||||||
eval exec "$FD>&-"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
popd >/dev/null
|
|
||||||
|
|
||||||
|
|
||||||
# If a pidfile is still around (for example after a container restart),
|
|
||||||
# delete it so that docker can start.
|
|
||||||
rm -rf /var/run/docker.pid
|
|
||||||
|
|
||||||
chmod 777 /var/lib/lxc
|
|
||||||
chmod 777 /var/lib/docker
|
|
||||||
|
|
||||||
|
|
||||||
# If we were given a PORT environment variable, start as a simple daemon;
|
|
||||||
# otherwise, spawn a shell as well
|
|
||||||
if [ "$PORT" ]
|
|
||||||
then
|
|
||||||
exec docker -d -H 0.0.0.0:$PORT
|
|
||||||
else
|
|
||||||
docker -d -D -e lxc 2>&1
|
|
||||||
fi
|
|
|
@ -1,5 +1,5 @@
|
||||||
[loggers]
|
[loggers]
|
||||||
keys=root, gunicorn.error, gunicorn.access, application.profiler, boto, werkzeug
|
keys=root
|
||||||
|
|
||||||
[handlers]
|
[handlers]
|
||||||
keys=console
|
keys=console
|
||||||
|
@ -7,39 +7,9 @@ keys=console
|
||||||
[formatters]
|
[formatters]
|
||||||
keys=generic
|
keys=generic
|
||||||
|
|
||||||
[logger_application.profiler]
|
|
||||||
level=DEBUG
|
|
||||||
handlers=console
|
|
||||||
propagate=0
|
|
||||||
qualname=application.profiler
|
|
||||||
|
|
||||||
[logger_root]
|
[logger_root]
|
||||||
level=DEBUG
|
|
||||||
handlers=console
|
|
||||||
|
|
||||||
[logger_boto]
|
|
||||||
level=INFO
|
level=INFO
|
||||||
handlers=console
|
handlers=console
|
||||||
propagate=0
|
|
||||||
qualname=boto
|
|
||||||
|
|
||||||
[logger_werkzeug]
|
|
||||||
level=DEBUG
|
|
||||||
handlers=console
|
|
||||||
propagate=0
|
|
||||||
qualname=werkzeug
|
|
||||||
|
|
||||||
[logger_gunicorn.error]
|
|
||||||
level=INFO
|
|
||||||
handlers=console
|
|
||||||
propagate=1
|
|
||||||
qualname=gunicorn.error
|
|
||||||
|
|
||||||
[logger_gunicorn.access]
|
|
||||||
level=INFO
|
|
||||||
handlers=console
|
|
||||||
propagate=0
|
|
||||||
qualname=gunicorn.access
|
|
||||||
|
|
||||||
[handler_console]
|
[handler_console]
|
||||||
class=StreamHandler
|
class=StreamHandler
|
||||||
|
|
21
conf/logging_debug.conf
Normal file
21
conf/logging_debug.conf
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
[loggers]
|
||||||
|
keys=root
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys=console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys=generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level=DEBUG
|
||||||
|
handlers=console
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class=StreamHandler
|
||||||
|
formatter=generic
|
||||||
|
args=(sys.stdout, )
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format=%(asctime)s [%(process)d] [%(levelname)s] [%(name)s] %(message)s
|
||||||
|
class=logging.Formatter
|
20
config.py
20
config.py
|
@ -36,7 +36,6 @@ def getFrontendVisibleConfig(config_dict):
|
||||||
|
|
||||||
class DefaultConfig(object):
|
class DefaultConfig(object):
|
||||||
# Flask config
|
# Flask config
|
||||||
SECRET_KEY = 'a36c9d7d-25a9-4d3f-a586-3d2f8dc40a83'
|
|
||||||
JSONIFY_PRETTYPRINT_REGULAR = False
|
JSONIFY_PRETTYPRINT_REGULAR = False
|
||||||
SESSION_COOKIE_SECURE = False
|
SESSION_COOKIE_SECURE = False
|
||||||
|
|
||||||
|
@ -48,8 +47,9 @@ class DefaultConfig(object):
|
||||||
|
|
||||||
AVATAR_KIND = 'local'
|
AVATAR_KIND = 'local'
|
||||||
|
|
||||||
REGISTRY_TITLE = 'Quay.io'
|
REGISTRY_TITLE = 'CoreOS Enterprise Registry'
|
||||||
REGISTRY_TITLE_SHORT = 'Quay.io'
|
REGISTRY_TITLE_SHORT = 'Enterprise Registry'
|
||||||
|
|
||||||
CONTACT_INFO = [
|
CONTACT_INFO = [
|
||||||
'mailto:support@quay.io',
|
'mailto:support@quay.io',
|
||||||
'irc://chat.freenode.net:6665/quayio',
|
'irc://chat.freenode.net:6665/quayio',
|
||||||
|
@ -132,6 +132,9 @@ class DefaultConfig(object):
|
||||||
# Super user config. Note: This MUST BE an empty list for the default config.
|
# Super user config. Note: This MUST BE an empty list for the default config.
|
||||||
SUPER_USERS = []
|
SUPER_USERS = []
|
||||||
|
|
||||||
|
# Feature Flag: Whether super users are supported.
|
||||||
|
FEATURE_SUPER_USERS = True
|
||||||
|
|
||||||
# Feature Flag: Whether billing is required.
|
# Feature Flag: Whether billing is required.
|
||||||
FEATURE_BILLING = False
|
FEATURE_BILLING = False
|
||||||
|
|
||||||
|
@ -147,9 +150,6 @@ class DefaultConfig(object):
|
||||||
# Feature flag, whether to enable olark chat
|
# Feature flag, whether to enable olark chat
|
||||||
FEATURE_OLARK_CHAT = False
|
FEATURE_OLARK_CHAT = False
|
||||||
|
|
||||||
# Feature Flag: Whether super users are supported.
|
|
||||||
FEATURE_SUPER_USERS = False
|
|
||||||
|
|
||||||
# Feature Flag: Whether to support GitHub build triggers.
|
# Feature Flag: Whether to support GitHub build triggers.
|
||||||
FEATURE_GITHUB_BUILD = False
|
FEATURE_GITHUB_BUILD = False
|
||||||
|
|
||||||
|
@ -187,3 +187,11 @@ class DefaultConfig(object):
|
||||||
|
|
||||||
# For enterprise:
|
# For enterprise:
|
||||||
MAXIMUM_REPOSITORY_USAGE = 20
|
MAXIMUM_REPOSITORY_USAGE = 20
|
||||||
|
|
||||||
|
# System logs.
|
||||||
|
SYSTEM_LOGS_PATH = "/var/log/"
|
||||||
|
SYSTEM_SERVICE_LOGS_PATH = "/var/log/%s/current"
|
||||||
|
SYSTEM_SERVICES_PATH = "conf/init/"
|
||||||
|
|
||||||
|
# Services that should not be shown in the logs view.
|
||||||
|
SYSTEM_SERVICE_BLACKLIST = ['tutumdocker', 'dockerfilebuild']
|
|
@ -29,6 +29,16 @@ SCHEME_RANDOM_FUNCTION = {
|
||||||
'postgresql+psycopg2': fn.Random,
|
'postgresql+psycopg2': fn.Random,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def real_for_update(query):
|
||||||
|
return query.for_update()
|
||||||
|
|
||||||
|
def null_for_update(query):
|
||||||
|
return query
|
||||||
|
|
||||||
|
SCHEME_SPECIALIZED_FOR_UPDATE = {
|
||||||
|
'sqlite': null_for_update,
|
||||||
|
}
|
||||||
|
|
||||||
class CallableProxy(Proxy):
|
class CallableProxy(Proxy):
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
if self.obj is None:
|
if self.obj is None:
|
||||||
|
@ -68,6 +78,7 @@ class UseThenDisconnect(object):
|
||||||
db = Proxy()
|
db = Proxy()
|
||||||
read_slave = Proxy()
|
read_slave = Proxy()
|
||||||
db_random_func = CallableProxy()
|
db_random_func = CallableProxy()
|
||||||
|
db_for_update = CallableProxy()
|
||||||
|
|
||||||
|
|
||||||
def validate_database_url(url, connect_timeout=5):
|
def validate_database_url(url, connect_timeout=5):
|
||||||
|
@ -105,6 +116,8 @@ def configure(config_object):
|
||||||
|
|
||||||
parsed_write_uri = make_url(write_db_uri)
|
parsed_write_uri = make_url(write_db_uri)
|
||||||
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
|
db_random_func.initialize(SCHEME_RANDOM_FUNCTION[parsed_write_uri.drivername])
|
||||||
|
db_for_update.initialize(SCHEME_SPECIALIZED_FOR_UPDATE.get(parsed_write_uri.drivername,
|
||||||
|
real_for_update))
|
||||||
|
|
||||||
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
read_slave_uri = config_object.get('DB_READ_SLAVE_URI', None)
|
||||||
if read_slave_uri is not None:
|
if read_slave_uri is not None:
|
||||||
|
@ -369,6 +382,24 @@ class ImageStorageTransformation(BaseModel):
|
||||||
name = CharField(index=True, unique=True)
|
name = CharField(index=True, unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ImageStorageSignatureKind(BaseModel):
|
||||||
|
name = CharField(index=True, unique=True)
|
||||||
|
|
||||||
|
|
||||||
|
class ImageStorageSignature(BaseModel):
|
||||||
|
storage = ForeignKeyField(ImageStorage, index=True)
|
||||||
|
kind = ForeignKeyField(ImageStorageSignatureKind)
|
||||||
|
signature = TextField(null=True)
|
||||||
|
uploading = BooleanField(default=True, null=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
database = db
|
||||||
|
read_slaves = (read_slave,)
|
||||||
|
indexes = (
|
||||||
|
(('kind', 'storage'), True),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class DerivedImageStorage(BaseModel):
|
class DerivedImageStorage(BaseModel):
|
||||||
source = ForeignKeyField(ImageStorage, null=True, related_name='source')
|
source = ForeignKeyField(ImageStorage, null=True, related_name='source')
|
||||||
derivative = ForeignKeyField(ImageStorage, related_name='derivative')
|
derivative = ForeignKeyField(ImageStorage, related_name='derivative')
|
||||||
|
@ -442,23 +473,10 @@ class BUILD_PHASE(object):
|
||||||
PULLING = 'pulling'
|
PULLING = 'pulling'
|
||||||
BUILDING = 'building'
|
BUILDING = 'building'
|
||||||
PUSHING = 'pushing'
|
PUSHING = 'pushing'
|
||||||
|
WAITING = 'waiting'
|
||||||
COMPLETE = 'complete'
|
COMPLETE = 'complete'
|
||||||
|
|
||||||
|
|
||||||
class RepositoryBuild(BaseModel):
|
|
||||||
uuid = CharField(default=uuid_generator, index=True)
|
|
||||||
repository = ForeignKeyField(Repository, index=True)
|
|
||||||
access_token = ForeignKeyField(AccessToken)
|
|
||||||
resource_key = CharField(index=True)
|
|
||||||
job_config = TextField()
|
|
||||||
phase = CharField(default='waiting')
|
|
||||||
started = DateTimeField(default=datetime.now)
|
|
||||||
display_name = CharField()
|
|
||||||
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
|
||||||
pull_robot = QuayUserField(null=True, related_name='buildpullrobot')
|
|
||||||
logs_archived = BooleanField(default=False)
|
|
||||||
|
|
||||||
|
|
||||||
class QueueItem(BaseModel):
|
class QueueItem(BaseModel):
|
||||||
queue_name = CharField(index=True, max_length=1024)
|
queue_name = CharField(index=True, max_length=1024)
|
||||||
body = TextField()
|
body = TextField()
|
||||||
|
@ -468,6 +486,21 @@ class QueueItem(BaseModel):
|
||||||
retries_remaining = IntegerField(default=5)
|
retries_remaining = IntegerField(default=5)
|
||||||
|
|
||||||
|
|
||||||
|
class RepositoryBuild(BaseModel):
|
||||||
|
uuid = CharField(default=uuid_generator, index=True)
|
||||||
|
repository = ForeignKeyField(Repository, index=True)
|
||||||
|
access_token = ForeignKeyField(AccessToken)
|
||||||
|
resource_key = CharField(index=True)
|
||||||
|
job_config = TextField()
|
||||||
|
phase = CharField(default=BUILD_PHASE.WAITING)
|
||||||
|
started = DateTimeField(default=datetime.now)
|
||||||
|
display_name = CharField()
|
||||||
|
trigger = ForeignKeyField(RepositoryBuildTrigger, null=True, index=True)
|
||||||
|
pull_robot = QuayUserField(null=True, related_name='buildpullrobot')
|
||||||
|
logs_archived = BooleanField(default=False)
|
||||||
|
queue_item = ForeignKeyField(QueueItem, null=True, index=True)
|
||||||
|
|
||||||
|
|
||||||
class LogEntryKind(BaseModel):
|
class LogEntryKind(BaseModel):
|
||||||
name = CharField(index=True, unique=True)
|
name = CharField(index=True, unique=True)
|
||||||
|
|
||||||
|
@ -567,4 +600,4 @@ all_models = [User, Repository, Image, AccessToken, Role, RepositoryPermission,
|
||||||
Notification, ImageStorageLocation, ImageStoragePlacement,
|
Notification, ImageStorageLocation, ImageStoragePlacement,
|
||||||
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
ExternalNotificationEvent, ExternalNotificationMethod, RepositoryNotification,
|
||||||
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
|
RepositoryAuthorizedEmail, ImageStorageTransformation, DerivedImageStorage,
|
||||||
TeamMemberInvite]
|
TeamMemberInvite, ImageStorageSignature, ImageStorageSignatureKind]
|
||||||
|
|
|
@ -18,7 +18,8 @@ config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI']))
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
# Interpret the config file for Python logging.
|
||||||
# This line sets up loggers basically.
|
# This line sets up loggers basically.
|
||||||
fileConfig(config.config_file_name)
|
if config.config_file_name:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
# add your model's MetaData object here
|
# add your model's MetaData object here
|
||||||
# for 'autogenerate' support
|
# for 'autogenerate' support
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
"""Add build queue item reference to the repositorybuild table
|
||||||
|
|
||||||
|
Revision ID: 14fe12ade3df
|
||||||
|
Revises: 5ad999136045
|
||||||
|
Create Date: 2015-02-12 16:11:57.814645
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '14fe12ade3df'
|
||||||
|
down_revision = '5ad999136045'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column('repositorybuild', sa.Column('queue_item_id', sa.Integer(), nullable=True))
|
||||||
|
op.create_index('repositorybuild_queue_item_id', 'repositorybuild', ['queue_item_id'], unique=False)
|
||||||
|
op.create_foreign_key(op.f('fk_repositorybuild_queue_item_id_queueitem'), 'repositorybuild', 'queueitem', ['queue_item_id'], ['id'])
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_constraint(op.f('fk_repositorybuild_queue_item_id_queueitem'), 'repositorybuild', type_='foreignkey')
|
||||||
|
op.drop_index('repositorybuild_queue_item_id', table_name='repositorybuild')
|
||||||
|
op.drop_column('repositorybuild', 'queue_item_id')
|
||||||
|
### end Alembic commands ###
|
|
@ -0,0 +1,37 @@
|
||||||
|
"""Actually remove the column access_token_id
|
||||||
|
|
||||||
|
Revision ID: 1d2d86d09fcd
|
||||||
|
Revises: 14fe12ade3df
|
||||||
|
Create Date: 2015-02-12 16:27:30.260797
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '1d2d86d09fcd'
|
||||||
|
down_revision = '14fe12ade3df'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
from sqlalchemy.exc import InternalError
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
try:
|
||||||
|
op.drop_constraint(u'fk_logentry_access_token_id_accesstoken', 'logentry', type_='foreignkey')
|
||||||
|
op.drop_index('logentry_access_token_id', table_name='logentry')
|
||||||
|
op.drop_column('logentry', 'access_token_id')
|
||||||
|
except InternalError:
|
||||||
|
pass
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
try:
|
||||||
|
op.add_column('logentry', sa.Column('access_token_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
|
||||||
|
op.create_foreign_key(u'fk_logentry_access_token_id_accesstoken', 'logentry', 'accesstoken', ['access_token_id'], ['id'])
|
||||||
|
op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False)
|
||||||
|
except InternalError:
|
||||||
|
pass
|
||||||
|
### end Alembic commands ###
|
|
@ -0,0 +1,55 @@
|
||||||
|
"""Add signature storage
|
||||||
|
|
||||||
|
Revision ID: 5ad999136045
|
||||||
|
Revises: 228d1af6af1c
|
||||||
|
Create Date: 2015-02-05 15:01:54.989573
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '5ad999136045'
|
||||||
|
down_revision = '228d1af6af1c'
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects import mysql
|
||||||
|
|
||||||
|
def upgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table('imagestoragesignaturekind',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('name', sa.String(length=255), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignaturekind'))
|
||||||
|
)
|
||||||
|
op.create_index('imagestoragesignaturekind_name', 'imagestoragesignaturekind', ['name'], unique=True)
|
||||||
|
op.create_table('imagestoragesignature',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('storage_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('signature', sa.Text(), nullable=True),
|
||||||
|
sa.Column('uploading', sa.Boolean(), nullable=True),
|
||||||
|
sa.ForeignKeyConstraint(['kind_id'], ['imagestoragesignaturekind.id'], name=op.f('fk_imagestoragesignature_kind_id_imagestoragesignaturekind')),
|
||||||
|
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], name=op.f('fk_imagestoragesignature_storage_id_imagestorage')),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_imagestoragesignature'))
|
||||||
|
)
|
||||||
|
op.create_index('imagestoragesignature_kind_id', 'imagestoragesignature', ['kind_id'], unique=False)
|
||||||
|
op.create_index('imagestoragesignature_kind_id_storage_id', 'imagestoragesignature', ['kind_id', 'storage_id'], unique=True)
|
||||||
|
op.create_index('imagestoragesignature_storage_id', 'imagestoragesignature', ['storage_id'], unique=False)
|
||||||
|
### end Alembic commands ###
|
||||||
|
|
||||||
|
op.bulk_insert(tables.imagestoragetransformation,
|
||||||
|
[
|
||||||
|
{'id': 2, 'name':'aci'},
|
||||||
|
])
|
||||||
|
|
||||||
|
op.bulk_insert(tables.imagestoragesignaturekind,
|
||||||
|
[
|
||||||
|
{'id': 1, 'name':'gpg2'},
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(tables):
|
||||||
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.drop_table('imagestoragesignature')
|
||||||
|
op.drop_table('imagestoragesignaturekind')
|
||||||
|
### end Alembic commands ###
|
|
@ -14,7 +14,8 @@ from data.database import (User, Repository, Image, AccessToken, Role, Repositor
|
||||||
ExternalNotificationEvent, ExternalNotificationMethod,
|
ExternalNotificationEvent, ExternalNotificationMethod,
|
||||||
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
|
RepositoryNotification, RepositoryAuthorizedEmail, TeamMemberInvite,
|
||||||
DerivedImageStorage, ImageStorageTransformation, random_string_generator,
|
DerivedImageStorage, ImageStorageTransformation, random_string_generator,
|
||||||
db, BUILD_PHASE, QuayUserField, validate_database_url)
|
db, BUILD_PHASE, QuayUserField, ImageStorageSignature, QueueItem,
|
||||||
|
ImageStorageSignatureKind, validate_database_url, db_for_update)
|
||||||
from peewee import JOIN_LEFT_OUTER, fn
|
from peewee import JOIN_LEFT_OUTER, fn
|
||||||
from util.validation import (validate_username, validate_email, validate_password,
|
from util.validation import (validate_username, validate_email, validate_password,
|
||||||
INVALID_PASSWORD_MESSAGE)
|
INVALID_PASSWORD_MESSAGE)
|
||||||
|
@ -295,6 +296,9 @@ def delete_robot(robot_username):
|
||||||
|
|
||||||
|
|
||||||
def _list_entity_robots(entity_name):
|
def _list_entity_robots(entity_name):
|
||||||
|
""" Return the list of robots for the specified entity. This MUST return a query, not a
|
||||||
|
materialized list so that callers can use db_for_update.
|
||||||
|
"""
|
||||||
return (User
|
return (User
|
||||||
.select()
|
.select()
|
||||||
.join(FederatedLogin)
|
.join(FederatedLogin)
|
||||||
|
@ -903,14 +907,17 @@ def change_password(user, new_password):
|
||||||
delete_notifications_by_kind(user, 'password_required')
|
delete_notifications_by_kind(user, 'password_required')
|
||||||
|
|
||||||
|
|
||||||
def change_username(user, new_username):
|
def change_username(user_id, new_username):
|
||||||
(username_valid, username_issue) = validate_username(new_username)
|
(username_valid, username_issue) = validate_username(new_username)
|
||||||
if not username_valid:
|
if not username_valid:
|
||||||
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
raise InvalidUsernameException('Invalid username %s: %s' % (new_username, username_issue))
|
||||||
|
|
||||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
# Reload the user for update
|
||||||
|
user = db_for_update(User.select().where(User.id == user_id)).get()
|
||||||
|
|
||||||
# Rename the robots
|
# Rename the robots
|
||||||
for robot in _list_entity_robots(user.username):
|
for robot in db_for_update(_list_entity_robots(user.username)):
|
||||||
_, robot_shortname = parse_robot_username(robot.username)
|
_, robot_shortname = parse_robot_username(robot.username)
|
||||||
new_robot_name = format_robot_username(new_username, robot_shortname)
|
new_robot_name = format_robot_username(new_username, robot_shortname)
|
||||||
robot.username = new_robot_name
|
robot.username = new_robot_name
|
||||||
|
@ -1089,6 +1096,26 @@ def get_repository(namespace_name, repository_name):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_image(repo, dockerfile_id):
|
||||||
|
try:
|
||||||
|
return Image.get(Image.docker_image_id == dockerfile_id, Image.repository == repo)
|
||||||
|
except Image.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def find_child_image(repo, parent_image, command):
|
||||||
|
try:
|
||||||
|
return (Image.select()
|
||||||
|
.join(ImageStorage)
|
||||||
|
.switch(Image)
|
||||||
|
.where(Image.ancestors % '%/' + parent_image.id + '/%',
|
||||||
|
ImageStorage.command == command)
|
||||||
|
.order_by(ImageStorage.created.desc())
|
||||||
|
.get())
|
||||||
|
except Image.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_repo_image(namespace_name, repository_name, docker_image_id):
|
def get_repo_image(namespace_name, repository_name, docker_image_id):
|
||||||
def limit_to_image_id(query):
|
def limit_to_image_id(query):
|
||||||
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
return query.where(Image.docker_image_id == docker_image_id).limit(1)
|
||||||
|
@ -1251,9 +1278,9 @@ def _find_or_link_image(existing_image, repository, username, translations, pref
|
||||||
storage.locations = {placement.location.name
|
storage.locations = {placement.location.name
|
||||||
for placement in storage.imagestorageplacement_set}
|
for placement in storage.imagestorageplacement_set}
|
||||||
|
|
||||||
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
new_image = Image.create(docker_image_id=existing_image.docker_image_id,
|
||||||
repository=repository, storage=storage,
|
repository=repository, storage=storage,
|
||||||
ancestors=new_image_ancestry)
|
ancestors=new_image_ancestry)
|
||||||
|
|
||||||
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
logger.debug('Storing translation %s -> %s', existing_image.id, new_image.id)
|
||||||
translations[existing_image.id] = new_image.id
|
translations[existing_image.id] = new_image.id
|
||||||
|
@ -1317,7 +1344,28 @@ def find_create_or_link_image(docker_image_id, repository, username, translation
|
||||||
ancestors='/')
|
ancestors='/')
|
||||||
|
|
||||||
|
|
||||||
def find_or_create_derived_storage(source, transformation_name, preferred_location):
|
def find_or_create_storage_signature(storage, signature_kind):
|
||||||
|
found = lookup_storage_signature(storage, signature_kind)
|
||||||
|
if found is None:
|
||||||
|
kind = ImageStorageSignatureKind.get(name=signature_kind)
|
||||||
|
found = ImageStorageSignature.create(storage=storage, kind=kind)
|
||||||
|
|
||||||
|
return found
|
||||||
|
|
||||||
|
|
||||||
|
def lookup_storage_signature(storage, signature_kind):
|
||||||
|
kind = ImageStorageSignatureKind.get(name=signature_kind)
|
||||||
|
try:
|
||||||
|
return (ImageStorageSignature
|
||||||
|
.select()
|
||||||
|
.where(ImageStorageSignature.storage == storage,
|
||||||
|
ImageStorageSignature.kind == kind)
|
||||||
|
.get())
|
||||||
|
except ImageStorageSignature.DoesNotExist:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def find_derived_storage(source, transformation_name):
|
||||||
try:
|
try:
|
||||||
found = (ImageStorage
|
found = (ImageStorage
|
||||||
.select(ImageStorage, DerivedImageStorage)
|
.select(ImageStorage, DerivedImageStorage)
|
||||||
|
@ -1330,11 +1378,19 @@ def find_or_create_derived_storage(source, transformation_name, preferred_locati
|
||||||
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
|
found.locations = {placement.location.name for placement in found.imagestorageplacement_set}
|
||||||
return found
|
return found
|
||||||
except ImageStorage.DoesNotExist:
|
except ImageStorage.DoesNotExist:
|
||||||
logger.debug('Creating storage dervied from source: %s', source.uuid)
|
return None
|
||||||
trans = ImageStorageTransformation.get(name=transformation_name)
|
|
||||||
new_storage = _create_storage(preferred_location)
|
|
||||||
DerivedImageStorage.create(source=source, derivative=new_storage, transformation=trans)
|
def find_or_create_derived_storage(source, transformation_name, preferred_location):
|
||||||
return new_storage
|
existing = find_derived_storage(source, transformation_name)
|
||||||
|
if existing is not None:
|
||||||
|
return existing
|
||||||
|
|
||||||
|
logger.debug('Creating storage dervied from source: %s', source.uuid)
|
||||||
|
trans = ImageStorageTransformation.get(name=transformation_name)
|
||||||
|
new_storage = _create_storage(preferred_location)
|
||||||
|
DerivedImageStorage.create(source=source, derivative=new_storage, transformation=trans)
|
||||||
|
return new_storage
|
||||||
|
|
||||||
|
|
||||||
def delete_derived_storage_by_uuid(storage_uuid):
|
def delete_derived_storage_by_uuid(storage_uuid):
|
||||||
|
@ -1403,7 +1459,7 @@ def set_image_metadata(docker_image_id, namespace_name, repository_name, created
|
||||||
Image.docker_image_id == docker_image_id))
|
Image.docker_image_id == docker_image_id))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fetched = query.get()
|
fetched = db_for_update(query).get()
|
||||||
except Image.DoesNotExist:
|
except Image.DoesNotExist:
|
||||||
raise DataModelException('No image with specified id and repository')
|
raise DataModelException('No image with specified id and repository')
|
||||||
|
|
||||||
|
@ -1645,7 +1701,6 @@ def get_tag_image(namespace_name, repository_name, tag_name):
|
||||||
else:
|
else:
|
||||||
return images[0]
|
return images[0]
|
||||||
|
|
||||||
|
|
||||||
def get_image_by_id(namespace_name, repository_name, docker_image_id):
|
def get_image_by_id(namespace_name, repository_name, docker_image_id):
|
||||||
image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
|
image = get_repo_image_extended(namespace_name, repository_name, docker_image_id)
|
||||||
if not image:
|
if not image:
|
||||||
|
@ -2376,6 +2431,32 @@ def confirm_team_invite(code, user):
|
||||||
found.delete_instance()
|
found.delete_instance()
|
||||||
return (team, inviter)
|
return (team, inviter)
|
||||||
|
|
||||||
|
def cancel_repository_build(build):
|
||||||
|
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||||
|
# Reload the build for update.
|
||||||
|
try:
|
||||||
|
build = db_for_update(RepositoryBuild.select().where(RepositoryBuild.id == build.id)).get()
|
||||||
|
except RepositoryBuild.DoesNotExist:
|
||||||
|
return False
|
||||||
|
|
||||||
|
if build.phase != BUILD_PHASE.WAITING or not build.queue_item:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Load the build queue item for update.
|
||||||
|
try:
|
||||||
|
queue_item = db_for_update(QueueItem.select()
|
||||||
|
.where(QueueItem.id == build.queue_item.id)).get()
|
||||||
|
except QueueItem.DoesNotExist:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check the queue item.
|
||||||
|
if not queue_item.available or queue_item.retries_remaining == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Delete the queue item and build.
|
||||||
|
queue_item.delete_instance(recursive=True)
|
||||||
|
build.delete_instance()
|
||||||
|
return True
|
||||||
|
|
||||||
def get_repository_usage():
|
def get_repository_usage():
|
||||||
one_month_ago = date.today() - timedelta(weeks=4)
|
one_month_ago = date.today() - timedelta(weeks=4)
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
from data.database import QueueItem, db
|
from data.database import QueueItem, db, db_for_update
|
||||||
from util.morecollections import AttrDict
|
from util.morecollections import AttrDict
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,17 +31,28 @@ class WorkQueue(object):
|
||||||
QueueItem.processing_expires > now,
|
QueueItem.processing_expires > now,
|
||||||
QueueItem.queue_name ** name_match_query))
|
QueueItem.queue_name ** name_match_query))
|
||||||
|
|
||||||
def _available_jobs(self, now, name_match_query, running_query):
|
def _available_jobs(self, now, name_match_query):
|
||||||
return (QueueItem
|
return (QueueItem
|
||||||
.select()
|
.select()
|
||||||
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
||||||
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
||||||
QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query)))
|
QueueItem.retries_remaining > 0))
|
||||||
|
|
||||||
|
def _available_jobs_not_running(self, now, name_match_query, running_query):
|
||||||
|
return (self
|
||||||
|
._available_jobs(now, name_match_query)
|
||||||
|
.where(~(QueueItem.queue_name << running_query)))
|
||||||
|
|
||||||
def _name_match_query(self):
|
def _name_match_query(self):
|
||||||
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
|
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
|
||||||
|
|
||||||
def get_metrics(self):
|
def _item_by_id_for_update(self, queue_id):
|
||||||
|
return db_for_update(QueueItem.select().where(QueueItem.id == queue_id)).get()
|
||||||
|
|
||||||
|
def update_metrics(self):
|
||||||
|
if self._reporter is None:
|
||||||
|
return
|
||||||
|
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
now = datetime.utcnow()
|
now = datetime.utcnow()
|
||||||
name_match_query = self._name_match_query()
|
name_match_query = self._name_match_query()
|
||||||
|
@ -49,16 +60,9 @@ class WorkQueue(object):
|
||||||
running_query = self._running_jobs(now, name_match_query)
|
running_query = self._running_jobs(now, name_match_query)
|
||||||
running_count = running_query.distinct().count()
|
running_count = running_query.distinct().count()
|
||||||
|
|
||||||
available_query = self._available_jobs(now, name_match_query, running_query)
|
available_query = self._available_jobs_not_running(now, name_match_query, running_query)
|
||||||
available_count = available_query.select(QueueItem.queue_name).distinct().count()
|
available_count = available_query.select(QueueItem.queue_name).distinct().count()
|
||||||
|
|
||||||
return (running_count, available_count)
|
|
||||||
|
|
||||||
def update_metrics(self):
|
|
||||||
if self._reporter is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
(running_count, available_count) = self.get_metrics()
|
|
||||||
self._reporter(self._currently_processing, running_count, running_count + available_count)
|
self._reporter(self._currently_processing, running_count, running_count + available_count)
|
||||||
|
|
||||||
def put(self, canonical_name_list, message, available_after=0, retries_remaining=5):
|
def put(self, canonical_name_list, message, available_after=0, retries_remaining=5):
|
||||||
|
@ -77,24 +81,31 @@ class WorkQueue(object):
|
||||||
params['available_after'] = available_date
|
params['available_after'] = available_date
|
||||||
|
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
QueueItem.create(**params)
|
return QueueItem.create(**params)
|
||||||
|
|
||||||
def get(self, processing_time=300):
|
def get(self, processing_time=300):
|
||||||
"""
|
"""
|
||||||
Get an available item and mark it as unavailable for the default of five
|
Get an available item and mark it as unavailable for the default of five
|
||||||
minutes.
|
minutes. The result of this method must always be composed of simple
|
||||||
|
python objects which are JSON serializable for network portability reasons.
|
||||||
"""
|
"""
|
||||||
now = datetime.utcnow()
|
now = datetime.utcnow()
|
||||||
|
|
||||||
name_match_query = self._name_match_query()
|
name_match_query = self._name_match_query()
|
||||||
|
|
||||||
with self._transaction_factory(db):
|
running = self._running_jobs(now, name_match_query)
|
||||||
running = self._running_jobs(now, name_match_query)
|
avail = self._available_jobs_not_running(now, name_match_query, running)
|
||||||
avail = self._available_jobs(now, name_match_query, running)
|
|
||||||
|
|
||||||
item = None
|
item = None
|
||||||
try:
|
try:
|
||||||
db_item = avail.order_by(QueueItem.id).get()
|
db_item_candidate = avail.order_by(QueueItem.id).get()
|
||||||
|
|
||||||
|
with self._transaction_factory(db):
|
||||||
|
still_available_query = (db_for_update(self
|
||||||
|
._available_jobs(now, name_match_query)
|
||||||
|
.where(QueueItem.id == db_item_candidate.id)))
|
||||||
|
|
||||||
|
db_item = still_available_query.get()
|
||||||
db_item.available = False
|
db_item.available = False
|
||||||
db_item.processing_expires = now + timedelta(seconds=processing_time)
|
db_item.processing_expires = now + timedelta(seconds=processing_time)
|
||||||
db_item.retries_remaining -= 1
|
db_item.retries_remaining -= 1
|
||||||
|
@ -103,25 +114,26 @@ class WorkQueue(object):
|
||||||
item = AttrDict({
|
item = AttrDict({
|
||||||
'id': db_item.id,
|
'id': db_item.id,
|
||||||
'body': db_item.body,
|
'body': db_item.body,
|
||||||
|
'retries_remaining': db_item.retries_remaining
|
||||||
})
|
})
|
||||||
|
|
||||||
self._currently_processing = True
|
self._currently_processing = True
|
||||||
except QueueItem.DoesNotExist:
|
except QueueItem.DoesNotExist:
|
||||||
self._currently_processing = False
|
self._currently_processing = False
|
||||||
|
|
||||||
# Return a view of the queue item rather than an active db object
|
# Return a view of the queue item rather than an active db object
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def complete(self, completed_item):
|
def complete(self, completed_item):
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
completed_item_obj = QueueItem.get(QueueItem.id == completed_item.id)
|
completed_item_obj = self._item_by_id_for_update(completed_item.id)
|
||||||
completed_item_obj.delete_instance()
|
completed_item_obj.delete_instance()
|
||||||
self._currently_processing = False
|
self._currently_processing = False
|
||||||
|
|
||||||
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
||||||
with self._transaction_factory(db):
|
with self._transaction_factory(db):
|
||||||
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
||||||
incomplete_item_obj = QueueItem.get(QueueItem.id == incomplete_item.id)
|
incomplete_item_obj = self._item_by_id_for_update(incomplete_item.id)
|
||||||
incomplete_item_obj.available_after = retry_date
|
incomplete_item_obj.available_after = retry_date
|
||||||
incomplete_item_obj.available = True
|
incomplete_item_obj.available = True
|
||||||
|
|
||||||
|
@ -130,17 +142,14 @@ class WorkQueue(object):
|
||||||
|
|
||||||
incomplete_item_obj.save()
|
incomplete_item_obj.save()
|
||||||
self._currently_processing = False
|
self._currently_processing = False
|
||||||
|
return incomplete_item_obj.retries_remaining > 0
|
||||||
|
|
||||||
@staticmethod
|
def extend_processing(self, item, seconds_from_now, minimum_extension=MINIMUM_EXTENSION):
|
||||||
def extend_processing(queue_item_info, seconds_from_now, retry_count=None,
|
with self._transaction_factory(db):
|
||||||
minimum_extension=MINIMUM_EXTENSION):
|
queue_item = self._item_by_id_for_update(item.id)
|
||||||
queue_item = QueueItem.get(QueueItem.id == queue_item_info.id)
|
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
||||||
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
|
||||||
|
|
||||||
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
||||||
if new_expiration - queue_item.processing_expires > minimum_extension:
|
if new_expiration - queue_item.processing_expires > minimum_extension:
|
||||||
if retry_count is not None:
|
queue_item.processing_expires = new_expiration
|
||||||
queue_item.retries_remaining = retry_count
|
queue_item.save()
|
||||||
|
|
||||||
queue_item.processing_expires = new_expiration
|
|
||||||
queue_item.save()
|
|
||||||
|
|
20
data/runmigration.py
Normal file
20
data/runmigration.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from alembic.config import Config
|
||||||
|
from alembic.script import ScriptDirectory
|
||||||
|
from alembic.environment import EnvironmentContext
|
||||||
|
from alembic.migration import __name__ as migration_name
|
||||||
|
|
||||||
|
def run_alembic_migration(log_handler=None):
|
||||||
|
if log_handler:
|
||||||
|
logging.getLogger(migration_name).addHandler(log_handler)
|
||||||
|
|
||||||
|
config = Config()
|
||||||
|
config.set_main_option("script_location", "data:migrations")
|
||||||
|
script = ScriptDirectory.from_config(config)
|
||||||
|
|
||||||
|
def fn(rev, context):
|
||||||
|
return script._upgrade_revs('head', rev)
|
||||||
|
|
||||||
|
with EnvironmentContext(config, script, fn=fn, destination_rev='head'):
|
||||||
|
script.run_env()
|
|
@ -280,6 +280,23 @@ require_user_read = require_user_permission(UserReadPermission, scopes.READ_USER
|
||||||
require_user_admin = require_user_permission(UserAdminPermission, None)
|
require_user_admin = require_user_permission(UserAdminPermission, None)
|
||||||
require_fresh_user_admin = require_user_permission(UserAdminPermission, None)
|
require_fresh_user_admin = require_user_permission(UserAdminPermission, None)
|
||||||
|
|
||||||
|
|
||||||
|
def verify_not_prod(func):
|
||||||
|
@add_method_metadata('enterprise_only', True)
|
||||||
|
@wraps(func)
|
||||||
|
def wrapped(*args, **kwargs):
|
||||||
|
# Verify that we are not running on a production (i.e. hosted) stack. If so, we fail.
|
||||||
|
# This should never happen (because of the feature-flag on SUPER_USERS), but we want to be
|
||||||
|
# absolutely sure.
|
||||||
|
if app.config['SERVER_HOSTNAME'].find('quay.io') >= 0:
|
||||||
|
logger.error('!!! Super user method called IN PRODUCTION !!!')
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
|
||||||
def require_fresh_login(func):
|
def require_fresh_login(func):
|
||||||
@add_method_metadata('requires_fresh_login', True)
|
@add_method_metadata('requires_fresh_login', True)
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
|
@ -385,8 +402,10 @@ import endpoints.api.repoemail
|
||||||
import endpoints.api.repotoken
|
import endpoints.api.repotoken
|
||||||
import endpoints.api.robot
|
import endpoints.api.robot
|
||||||
import endpoints.api.search
|
import endpoints.api.search
|
||||||
|
import endpoints.api.suconfig
|
||||||
import endpoints.api.superuser
|
import endpoints.api.superuser
|
||||||
import endpoints.api.tag
|
import endpoints.api.tag
|
||||||
import endpoints.api.team
|
import endpoints.api.team
|
||||||
import endpoints.api.trigger
|
import endpoints.api.trigger
|
||||||
import endpoints.api.user
|
import endpoints.api.user
|
||||||
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ from app import app, userfiles as user_files, build_logs, log_archive
|
||||||
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
from endpoints.api import (RepositoryParamResource, parse_args, query_param, nickname, resource,
|
||||||
require_repo_read, require_repo_write, validate_json_request,
|
require_repo_read, require_repo_write, validate_json_request,
|
||||||
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
ApiResource, internal_only, format_date, api, Unauthorized, NotFound,
|
||||||
path_param)
|
path_param, InvalidRequest, require_repo_admin)
|
||||||
from endpoints.common import start_build
|
from endpoints.common import start_build
|
||||||
from endpoints.trigger import BuildTrigger
|
from endpoints.trigger import BuildTrigger
|
||||||
from data import model, database
|
from data import model, database
|
||||||
|
@ -72,10 +72,16 @@ def build_status_view(build_obj, can_write=False):
|
||||||
# minutes. If not, then the build timed out.
|
# minutes. If not, then the build timed out.
|
||||||
if phase != database.BUILD_PHASE.COMPLETE and phase != database.BUILD_PHASE.ERROR:
|
if phase != database.BUILD_PHASE.COMPLETE and phase != database.BUILD_PHASE.ERROR:
|
||||||
if status is not None and 'heartbeat' in status and status['heartbeat']:
|
if status is not None and 'heartbeat' in status and status['heartbeat']:
|
||||||
heartbeat = datetime.datetime.fromtimestamp(status['heartbeat'])
|
heartbeat = datetime.datetime.utcfromtimestamp(status['heartbeat'])
|
||||||
if datetime.datetime.now() - heartbeat > datetime.timedelta(minutes=1):
|
if datetime.datetime.utcnow() - heartbeat > datetime.timedelta(minutes=1):
|
||||||
phase = database.BUILD_PHASE.INTERNAL_ERROR
|
phase = database.BUILD_PHASE.INTERNAL_ERROR
|
||||||
|
|
||||||
|
# If the phase is internal error, return 'error' instead of the number if retries
|
||||||
|
# on the queue item is 0.
|
||||||
|
if phase == database.BUILD_PHASE.INTERNAL_ERROR:
|
||||||
|
if build_obj.queue_item is None or build_obj.queue_item.retries_remaining == 0:
|
||||||
|
phase = database.BUILD_PHASE.ERROR
|
||||||
|
|
||||||
logger.debug('Can write: %s job_config: %s', can_write, build_obj.job_config)
|
logger.debug('Can write: %s job_config: %s', can_write, build_obj.job_config)
|
||||||
resp = {
|
resp = {
|
||||||
'id': build_obj.uuid,
|
'id': build_obj.uuid,
|
||||||
|
@ -87,7 +93,7 @@ def build_status_view(build_obj, can_write=False):
|
||||||
'is_writer': can_write,
|
'is_writer': can_write,
|
||||||
'trigger': trigger_view(build_obj.trigger),
|
'trigger': trigger_view(build_obj.trigger),
|
||||||
'resource_key': build_obj.resource_key,
|
'resource_key': build_obj.resource_key,
|
||||||
'pull_robot': user_view(build_obj.pull_robot) if build_obj.pull_robot else None,
|
'pull_robot': user_view(build_obj.pull_robot) if build_obj.pull_robot else None
|
||||||
}
|
}
|
||||||
|
|
||||||
if can_write:
|
if can_write:
|
||||||
|
@ -201,6 +207,31 @@ class RepositoryBuildList(RepositoryParamResource):
|
||||||
return resp, 201, headers
|
return resp, 201, headers
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/repository/<repopath:repository>/build/<build_uuid>')
|
||||||
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||||
|
@path_param('build_uuid', 'The UUID of the build')
|
||||||
|
class RepositoryBuildResource(RepositoryParamResource):
|
||||||
|
""" Resource for dealing with repository builds. """
|
||||||
|
@require_repo_admin
|
||||||
|
@nickname('cancelRepoBuild')
|
||||||
|
def delete(self, namespace, repository, build_uuid):
|
||||||
|
""" Cancels a repository build if it has not yet been picked up by a build worker. """
|
||||||
|
try:
|
||||||
|
build = model.get_repository_build(build_uuid)
|
||||||
|
except model.InvalidRepositoryBuildException:
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
|
if build.repository.name != repository or build.repository.namespace_user.username != namespace:
|
||||||
|
raise NotFound()
|
||||||
|
|
||||||
|
if model.cancel_repository_build(build):
|
||||||
|
return 'Okay', 201
|
||||||
|
else:
|
||||||
|
raise InvalidRequest('Build is currently running or has finished')
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/repository/<repopath:repository>/build/<build_uuid>/status')
|
@resource('/v1/repository/<repopath:repository>/build/<build_uuid>/status')
|
||||||
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
@path_param('repository', 'The full path of the repository. e.g. namespace/name')
|
||||||
@path_param('build_uuid', 'The UUID of the build')
|
@path_param('build_uuid', 'The UUID of the build')
|
||||||
|
|
361
endpoints/api/suconfig.py
Normal file
361
endpoints/api/suconfig.py
Normal file
|
@ -0,0 +1,361 @@
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import signal
|
||||||
|
|
||||||
|
from flask import abort, Response
|
||||||
|
from endpoints.api import (ApiResource, nickname, resource, internal_only, show_if,
|
||||||
|
require_fresh_login, request, validate_json_request, verify_not_prod)
|
||||||
|
|
||||||
|
from endpoints.common import common_login
|
||||||
|
from app import app, CONFIG_PROVIDER, superusers
|
||||||
|
from data import model
|
||||||
|
from data.database import configure
|
||||||
|
from auth.permissions import SuperUserPermission
|
||||||
|
from auth.auth_context import get_authenticated_user
|
||||||
|
from data.database import User
|
||||||
|
from util.config.configutil import add_enterprise_config_defaults
|
||||||
|
from util.config.validator import validate_service_for_config, SSL_FILENAMES
|
||||||
|
from data.runmigration import run_alembic_migration
|
||||||
|
|
||||||
|
import features
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def database_is_valid():
|
||||||
|
""" Returns whether the database, as configured, is valid. """
|
||||||
|
if app.config['TESTING']:
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
list(User.select().limit(1))
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def database_has_users():
|
||||||
|
""" Returns whether the database has any users defined. """
|
||||||
|
return bool(list(User.select().limit(1)))
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/registrystatus')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserRegistryStatus(ApiResource):
|
||||||
|
""" Resource for determining the status of the registry, such as if config exists,
|
||||||
|
if a database is configured, and if it has any defined users.
|
||||||
|
"""
|
||||||
|
@nickname('scRegistryStatus')
|
||||||
|
@verify_not_prod
|
||||||
|
def get(self):
|
||||||
|
""" Returns the status of the registry. """
|
||||||
|
# If there is no conf/stack volume, then report that status.
|
||||||
|
if not CONFIG_PROVIDER.volume_exists():
|
||||||
|
return {
|
||||||
|
'status': 'missing-config-dir'
|
||||||
|
}
|
||||||
|
|
||||||
|
# If there is no config file, we need to setup the database.
|
||||||
|
if not CONFIG_PROVIDER.yaml_exists():
|
||||||
|
return {
|
||||||
|
'status': 'config-db'
|
||||||
|
}
|
||||||
|
|
||||||
|
# If the database isn't yet valid, then we need to set it up.
|
||||||
|
if not database_is_valid():
|
||||||
|
return {
|
||||||
|
'status': 'setup-db'
|
||||||
|
}
|
||||||
|
|
||||||
|
# If we have SETUP_COMPLETE, then we're ready to go!
|
||||||
|
if app.config.get('SETUP_COMPLETE', False):
|
||||||
|
return {
|
||||||
|
'requires_restart': CONFIG_PROVIDER.requires_restart(app.config),
|
||||||
|
'status': 'ready'
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'status': 'create-superuser' if not database_has_users() else 'config'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class _AlembicLogHandler(logging.Handler):
|
||||||
|
def __init__(self):
|
||||||
|
super(_AlembicLogHandler, self).__init__()
|
||||||
|
self.records = []
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
self.records.append({
|
||||||
|
'level': record.levelname,
|
||||||
|
'message': record.getMessage()
|
||||||
|
})
|
||||||
|
|
||||||
|
@resource('/v1/superuser/setupdb')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserSetupDatabase(ApiResource):
|
||||||
|
""" Resource for invoking alembic to setup the database. """
|
||||||
|
@verify_not_prod
|
||||||
|
@nickname('scSetupDatabase')
|
||||||
|
def get(self):
|
||||||
|
""" Invokes the alembic upgrade process. """
|
||||||
|
# Note: This method is called after the database configured is saved, but before the
|
||||||
|
# database has any tables. Therefore, we only allow it to be run in that unique case.
|
||||||
|
if CONFIG_PROVIDER.yaml_exists() and not database_is_valid():
|
||||||
|
# Note: We need to reconfigure the database here as the config has changed.
|
||||||
|
combined = dict(**app.config)
|
||||||
|
combined.update(CONFIG_PROVIDER.get_yaml())
|
||||||
|
|
||||||
|
configure(combined)
|
||||||
|
app.config['DB_URI'] = combined['DB_URI']
|
||||||
|
|
||||||
|
log_handler = _AlembicLogHandler()
|
||||||
|
|
||||||
|
try:
|
||||||
|
run_alembic_migration(log_handler)
|
||||||
|
except Exception as ex:
|
||||||
|
return {
|
||||||
|
'error': str(ex)
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
'logs': log_handler.records
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/shutdown')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserShutdown(ApiResource):
|
||||||
|
""" Resource for sending a shutdown signal to the container. """
|
||||||
|
|
||||||
|
@verify_not_prod
|
||||||
|
@nickname('scShutdownContainer')
|
||||||
|
def post(self):
|
||||||
|
""" Sends a signal to the phusion init system to shut down the container. """
|
||||||
|
# Note: This method is called to set the database configuration before super users exists,
|
||||||
|
# so we also allow it to be called if there is no valid registry configuration setup.
|
||||||
|
if app.config['TESTING'] or not database_has_users() or SuperUserPermission().can():
|
||||||
|
# Note: We skip if debugging locally.
|
||||||
|
if app.config.get('DEBUGGING') == True:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
os.kill(1, signal.SIGINT)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/config')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserConfig(ApiResource):
|
||||||
|
""" Resource for fetching and updating the current configuration, if any. """
|
||||||
|
schemas = {
|
||||||
|
'UpdateConfig': {
|
||||||
|
'id': 'UpdateConfig',
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'Updates the YAML config file',
|
||||||
|
'required': [
|
||||||
|
'config',
|
||||||
|
'hostname'
|
||||||
|
],
|
||||||
|
'properties': {
|
||||||
|
'config': {
|
||||||
|
'type': 'object'
|
||||||
|
},
|
||||||
|
'hostname': {
|
||||||
|
'type': 'string'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
|
@nickname('scGetConfig')
|
||||||
|
def get(self):
|
||||||
|
""" Returns the currently defined configuration, if any. """
|
||||||
|
if SuperUserPermission().can():
|
||||||
|
config_object = CONFIG_PROVIDER.get_yaml()
|
||||||
|
return {
|
||||||
|
'config': config_object
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
@nickname('scUpdateConfig')
|
||||||
|
@verify_not_prod
|
||||||
|
@validate_json_request('UpdateConfig')
|
||||||
|
def put(self):
|
||||||
|
""" Updates the config.yaml file. """
|
||||||
|
# Note: This method is called to set the database configuration before super users exists,
|
||||||
|
# so we also allow it to be called if there is no valid registry configuration setup.
|
||||||
|
if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can():
|
||||||
|
config_object = request.get_json()['config']
|
||||||
|
hostname = request.get_json()['hostname']
|
||||||
|
|
||||||
|
# Add any enterprise defaults missing from the config.
|
||||||
|
add_enterprise_config_defaults(config_object, app.config['SECRET_KEY'], hostname)
|
||||||
|
|
||||||
|
# Write the configuration changes to the YAML file.
|
||||||
|
CONFIG_PROVIDER.save_yaml(config_object)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'exists': True,
|
||||||
|
'config': config_object
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/config/file/<filename>')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserConfigFile(ApiResource):
|
||||||
|
""" Resource for fetching the status of config files and overriding them. """
|
||||||
|
@nickname('scConfigFileExists')
|
||||||
|
@verify_not_prod
|
||||||
|
def get(self, filename):
|
||||||
|
""" Returns whether the configuration file with the given name exists. """
|
||||||
|
if not filename in SSL_FILENAMES:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
if SuperUserPermission().can():
|
||||||
|
return {
|
||||||
|
'exists': CONFIG_PROVIDER.volume_file_exists(filename)
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
@nickname('scUpdateConfigFile')
|
||||||
|
@verify_not_prod
|
||||||
|
def post(self, filename):
|
||||||
|
""" Updates the configuration file with the given name. """
|
||||||
|
if not filename in SSL_FILENAMES:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
if SuperUserPermission().can():
|
||||||
|
uploaded_file = request.files['file']
|
||||||
|
if not uploaded_file:
|
||||||
|
abort(400)
|
||||||
|
|
||||||
|
CONFIG_PROVIDER.save_volume_file(filename, uploaded_file)
|
||||||
|
return {
|
||||||
|
'status': True
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/config/createsuperuser')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserCreateInitialSuperUser(ApiResource):
|
||||||
|
""" Resource for creating the initial super user. """
|
||||||
|
schemas = {
|
||||||
|
'CreateSuperUser': {
|
||||||
|
'id': 'CreateSuperUser',
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'Information for creating the initial super user',
|
||||||
|
'required': [
|
||||||
|
'username',
|
||||||
|
'password',
|
||||||
|
'email'
|
||||||
|
],
|
||||||
|
'properties': {
|
||||||
|
'username': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The username for the superuser'
|
||||||
|
},
|
||||||
|
'password': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The password for the superuser'
|
||||||
|
},
|
||||||
|
'email': {
|
||||||
|
'type': 'string',
|
||||||
|
'description': 'The e-mail address for the superuser'
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@nickname('scCreateInitialSuperuser')
|
||||||
|
@verify_not_prod
|
||||||
|
@validate_json_request('CreateSuperUser')
|
||||||
|
def post(self):
|
||||||
|
""" Creates the initial super user, updates the underlying configuration and
|
||||||
|
sets the current session to have that super user. """
|
||||||
|
|
||||||
|
# Special security check: This method is only accessible when:
|
||||||
|
# - There is a valid config YAML file.
|
||||||
|
# - There are currently no users in the database (clean install)
|
||||||
|
#
|
||||||
|
# We do this special security check because at the point this method is called, the database
|
||||||
|
# is clean but does not (yet) have any super users for our permissions code to check against.
|
||||||
|
if CONFIG_PROVIDER.yaml_exists() and not database_has_users():
|
||||||
|
data = request.get_json()
|
||||||
|
username = data['username']
|
||||||
|
password = data['password']
|
||||||
|
email = data['email']
|
||||||
|
|
||||||
|
# Create the user in the database.
|
||||||
|
superuser = model.create_user(username, password, email, auto_verify=True)
|
||||||
|
|
||||||
|
# Add the user to the config.
|
||||||
|
config_object = CONFIG_PROVIDER.get_yaml()
|
||||||
|
config_object['SUPER_USERS'] = [username]
|
||||||
|
CONFIG_PROVIDER.save_yaml(config_object)
|
||||||
|
|
||||||
|
# Update the in-memory config for the new superuser.
|
||||||
|
superusers.register_superuser(username)
|
||||||
|
|
||||||
|
# Conduct login with that user.
|
||||||
|
common_login(superuser)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'status': True
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/config/validate/<service>')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserConfigValidate(ApiResource):
|
||||||
|
""" Resource for validating a block of configuration against an external service. """
|
||||||
|
schemas = {
|
||||||
|
'ValidateConfig': {
|
||||||
|
'id': 'ValidateConfig',
|
||||||
|
'type': 'object',
|
||||||
|
'description': 'Validates configuration',
|
||||||
|
'required': [
|
||||||
|
'config'
|
||||||
|
],
|
||||||
|
'properties': {
|
||||||
|
'config': {
|
||||||
|
'type': 'object'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@nickname('scValidateConfig')
|
||||||
|
@verify_not_prod
|
||||||
|
@validate_json_request('ValidateConfig')
|
||||||
|
def post(self, service):
|
||||||
|
""" Validates the given config for the given service. """
|
||||||
|
# Note: This method is called to validate the database configuration before super users exists,
|
||||||
|
# so we also allow it to be called if there is no valid registry configuration setup. Note that
|
||||||
|
# this is also safe since this method does not access any information not given in the request.
|
||||||
|
if not CONFIG_PROVIDER.yaml_exists() or SuperUserPermission().can():
|
||||||
|
config = request.get_json()['config']
|
||||||
|
return validate_service_for_config(service, config)
|
||||||
|
|
||||||
|
abort(403)
|
|
@ -1,15 +1,16 @@
|
||||||
import string
|
import string
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
from random import SystemRandom
|
from random import SystemRandom
|
||||||
from app import app
|
from app import app, avatar, superusers
|
||||||
from flask import request
|
from flask import request
|
||||||
|
|
||||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
|
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
|
||||||
log_action, internal_only, NotFound, require_user_admin, format_date,
|
log_action, internal_only, NotFound, require_user_admin, format_date,
|
||||||
InvalidToken, require_scope, format_date, hide_if, show_if, parse_args,
|
InvalidToken, require_scope, format_date, hide_if, show_if, parse_args,
|
||||||
query_param, abort, require_fresh_login, path_param)
|
query_param, abort, require_fresh_login, path_param, verify_not_prod)
|
||||||
|
|
||||||
from endpoints.api.logs import get_logs
|
from endpoints.api.logs import get_logs
|
||||||
|
|
||||||
|
@ -22,18 +23,76 @@ import features
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def get_immediate_subdirectories(directory):
|
||||||
|
return [name for name in os.listdir(directory) if os.path.isdir(os.path.join(directory, name))]
|
||||||
|
|
||||||
|
def get_services():
|
||||||
|
services = set(get_immediate_subdirectories(app.config['SYSTEM_SERVICES_PATH']))
|
||||||
|
services = services - set(app.config['SYSTEM_SERVICE_BLACKLIST'])
|
||||||
|
return services
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/systemlogs/<service>')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserGetLogsForService(ApiResource):
|
||||||
|
""" Resource for fetching the kinds of system logs in the system. """
|
||||||
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
|
@nickname('getSystemLogs')
|
||||||
|
def get(self, service):
|
||||||
|
""" Returns the logs for the specific service. """
|
||||||
|
if SuperUserPermission().can():
|
||||||
|
if not service in get_services():
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(app.config['SYSTEM_SERVICE_LOGS_PATH'] % service, 'r') as f:
|
||||||
|
logs = f.read()
|
||||||
|
except Exception as ex:
|
||||||
|
logger.exception('Cannot read logs')
|
||||||
|
abort(400)
|
||||||
|
|
||||||
|
return {
|
||||||
|
'logs': logs
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
@resource('/v1/superuser/systemlogs/')
|
||||||
|
@internal_only
|
||||||
|
@show_if(features.SUPER_USERS)
|
||||||
|
class SuperUserSystemLogServices(ApiResource):
|
||||||
|
""" Resource for fetching the kinds of system logs in the system. """
|
||||||
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
|
@nickname('listSystemLogServices')
|
||||||
|
def get(self):
|
||||||
|
""" List the system logs for the current system. """
|
||||||
|
if SuperUserPermission().can():
|
||||||
|
return {
|
||||||
|
'services': list(get_services())
|
||||||
|
}
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@resource('/v1/superuser/logs')
|
@resource('/v1/superuser/logs')
|
||||||
@internal_only
|
@internal_only
|
||||||
@show_if(features.SUPER_USERS)
|
@show_if(features.SUPER_USERS)
|
||||||
class SuperUserLogs(ApiResource):
|
class SuperUserLogs(ApiResource):
|
||||||
""" Resource for fetching all logs in the system. """
|
""" Resource for fetching all logs in the system. """
|
||||||
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('listAllLogs')
|
@nickname('listAllLogs')
|
||||||
@parse_args
|
@parse_args
|
||||||
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
@query_param('starttime', 'Earliest time from which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||||
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
@query_param('endtime', 'Latest time to which to get logs. (%m/%d/%Y %Z)', type=str)
|
||||||
@query_param('performer', 'Username for which to filter logs.', type=str)
|
@query_param('performer', 'Username for which to filter logs.', type=str)
|
||||||
def get(self, args):
|
def get(self, args):
|
||||||
""" List the logs for the current system. """
|
""" List the usage logs for the current system. """
|
||||||
if SuperUserPermission().can():
|
if SuperUserPermission().can():
|
||||||
performer_name = args['performer']
|
performer_name = args['performer']
|
||||||
start_time = args['starttime']
|
start_time = args['starttime']
|
||||||
|
@ -49,7 +108,8 @@ def user_view(user):
|
||||||
'username': user.username,
|
'username': user.username,
|
||||||
'email': user.email,
|
'email': user.email,
|
||||||
'verified': user.verified,
|
'verified': user.verified,
|
||||||
'super_user': user.username in app.config['SUPER_USERS']
|
'avatar': avatar.compute_hash(user.email, name=user.username),
|
||||||
|
'super_user': superusers.is_superuser(user.username)
|
||||||
}
|
}
|
||||||
|
|
||||||
@resource('/v1/superuser/usage/')
|
@resource('/v1/superuser/usage/')
|
||||||
|
@ -58,6 +118,7 @@ def user_view(user):
|
||||||
class UsageInformation(ApiResource):
|
class UsageInformation(ApiResource):
|
||||||
""" Resource for returning the usage information for enterprise customers. """
|
""" Resource for returning the usage information for enterprise customers. """
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('getSystemUsage')
|
@nickname('getSystemUsage')
|
||||||
def get(self):
|
def get(self):
|
||||||
""" Returns the number of repository handles currently held. """
|
""" Returns the number of repository handles currently held. """
|
||||||
|
@ -96,6 +157,7 @@ class SuperUserList(ApiResource):
|
||||||
}
|
}
|
||||||
|
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('listAllUsers')
|
@nickname('listAllUsers')
|
||||||
def get(self):
|
def get(self):
|
||||||
""" Returns a list of all users in the system. """
|
""" Returns a list of all users in the system. """
|
||||||
|
@ -109,6 +171,7 @@ class SuperUserList(ApiResource):
|
||||||
|
|
||||||
|
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('createInstallUser')
|
@nickname('createInstallUser')
|
||||||
@validate_json_request('CreateInstallUser')
|
@validate_json_request('CreateInstallUser')
|
||||||
def post(self):
|
def post(self):
|
||||||
|
@ -146,6 +209,7 @@ class SuperUserList(ApiResource):
|
||||||
class SuperUserSendRecoveryEmail(ApiResource):
|
class SuperUserSendRecoveryEmail(ApiResource):
|
||||||
""" Resource for sending a recovery user on behalf of a user. """
|
""" Resource for sending a recovery user on behalf of a user. """
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('sendInstallUserRecoveryEmail')
|
@nickname('sendInstallUserRecoveryEmail')
|
||||||
def post(self, username):
|
def post(self, username):
|
||||||
if SuperUserPermission().can():
|
if SuperUserPermission().can():
|
||||||
|
@ -153,7 +217,7 @@ class SuperUserSendRecoveryEmail(ApiResource):
|
||||||
if not user or user.organization or user.robot:
|
if not user or user.organization or user.robot:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
if username in app.config['SUPER_USERS']:
|
if superusers.is_superuser(username):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
code = model.create_reset_password_email_code(user.email)
|
code = model.create_reset_password_email_code(user.email)
|
||||||
|
@ -190,6 +254,7 @@ class SuperUserManagement(ApiResource):
|
||||||
}
|
}
|
||||||
|
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('getInstallUser')
|
@nickname('getInstallUser')
|
||||||
def get(self, username):
|
def get(self, username):
|
||||||
""" Returns information about the specified user. """
|
""" Returns information about the specified user. """
|
||||||
|
@ -203,6 +268,7 @@ class SuperUserManagement(ApiResource):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('deleteInstallUser')
|
@nickname('deleteInstallUser')
|
||||||
def delete(self, username):
|
def delete(self, username):
|
||||||
""" Deletes the specified user. """
|
""" Deletes the specified user. """
|
||||||
|
@ -211,7 +277,7 @@ class SuperUserManagement(ApiResource):
|
||||||
if not user or user.organization or user.robot:
|
if not user or user.organization or user.robot:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
if username in app.config['SUPER_USERS']:
|
if superusers.is_superuser(username):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
model.delete_user(user)
|
model.delete_user(user)
|
||||||
|
@ -220,6 +286,7 @@ class SuperUserManagement(ApiResource):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
@require_fresh_login
|
@require_fresh_login
|
||||||
|
@verify_not_prod
|
||||||
@nickname('changeInstallUser')
|
@nickname('changeInstallUser')
|
||||||
@validate_json_request('UpdateUser')
|
@validate_json_request('UpdateUser')
|
||||||
def put(self, username):
|
def put(self, username):
|
||||||
|
@ -229,7 +296,7 @@ class SuperUserManagement(ApiResource):
|
||||||
if not user or user.organization or user.robot:
|
if not user or user.organization or user.robot:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
if username in app.config['SUPER_USERS']:
|
if superusers.is_superuser(username):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
user_data = request.get_json()
|
user_data = request.get_json()
|
||||||
|
|
|
@ -246,7 +246,7 @@ class User(ApiResource):
|
||||||
# Username already used
|
# Username already used
|
||||||
raise request_error(message='Username is already in use')
|
raise request_error(message='Username is already in use')
|
||||||
|
|
||||||
model.change_username(user, new_username)
|
model.change_username(user.id, new_username)
|
||||||
|
|
||||||
except model.InvalidPasswordException, ex:
|
except model.InvalidPasswordException, ex:
|
||||||
raise request_error(exception=ex)
|
raise request_error(exception=ex)
|
||||||
|
|
|
@ -3,6 +3,7 @@ import urlparse
|
||||||
import json
|
import json
|
||||||
import string
|
import string
|
||||||
import datetime
|
import datetime
|
||||||
|
import os
|
||||||
|
|
||||||
# Register the various exceptions via decorators.
|
# Register the various exceptions via decorators.
|
||||||
import endpoints.decorated
|
import endpoints.decorated
|
||||||
|
@ -28,10 +29,26 @@ from endpoints.notificationhelper import spawn_notification
|
||||||
import features
|
import features
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
profile = logging.getLogger('application.profiler')
|
|
||||||
|
|
||||||
route_data = None
|
route_data = None
|
||||||
|
|
||||||
|
CACHE_BUSTERS_JSON = 'static/dist/cachebusters.json'
|
||||||
|
CACHE_BUSTERS = None
|
||||||
|
|
||||||
|
def get_cache_busters():
|
||||||
|
""" Retrieves the cache busters hashes. """
|
||||||
|
global CACHE_BUSTERS
|
||||||
|
if CACHE_BUSTERS is not None:
|
||||||
|
return CACHE_BUSTERS
|
||||||
|
|
||||||
|
if not os.path.exists(CACHE_BUSTERS_JSON):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
with open(CACHE_BUSTERS_JSON, 'r') as f:
|
||||||
|
CACHE_BUSTERS = json.loads(f.read())
|
||||||
|
return CACHE_BUSTERS
|
||||||
|
|
||||||
|
|
||||||
class RepoPathConverter(BaseConverter):
|
class RepoPathConverter(BaseConverter):
|
||||||
regex = '[\.a-zA-Z0-9_\-]+/[\.a-zA-Z0-9_\-]+'
|
regex = '[\.a-zA-Z0-9_\-]+/[\.a-zA-Z0-9_\-]+'
|
||||||
weight = 200
|
weight = 200
|
||||||
|
@ -113,17 +130,15 @@ def list_files(path, extension):
|
||||||
filepath = 'static/' + path
|
filepath = 'static/' + path
|
||||||
return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
|
return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
|
||||||
|
|
||||||
SAVED_CACHE_STRING = random_string()
|
|
||||||
|
|
||||||
def render_page_template(name, **kwargs):
|
def render_page_template(name, **kwargs):
|
||||||
if app.config.get('DEBUGGING', False):
|
debugging = app.config.get('DEBUGGING', False)
|
||||||
|
if debugging:
|
||||||
# If DEBUGGING is enabled, then we load the full set of individual JS and CSS files
|
# If DEBUGGING is enabled, then we load the full set of individual JS and CSS files
|
||||||
# from the file system.
|
# from the file system.
|
||||||
library_styles = list_files('lib', 'css')
|
library_styles = list_files('lib', 'css')
|
||||||
main_styles = list_files('css', 'css')
|
main_styles = list_files('css', 'css')
|
||||||
library_scripts = list_files('lib', 'js')
|
library_scripts = list_files('lib', 'js')
|
||||||
main_scripts = list_files('js', 'js')
|
main_scripts = list_files('js', 'js')
|
||||||
cache_buster = 'debugging'
|
|
||||||
|
|
||||||
file_lists = [library_styles, main_styles, library_scripts, main_scripts]
|
file_lists = [library_styles, main_styles, library_scripts, main_scripts]
|
||||||
for file_list in file_lists:
|
for file_list in file_lists:
|
||||||
|
@ -133,7 +148,6 @@ def render_page_template(name, **kwargs):
|
||||||
main_styles = ['dist/quay-frontend.css']
|
main_styles = ['dist/quay-frontend.css']
|
||||||
library_scripts = []
|
library_scripts = []
|
||||||
main_scripts = ['dist/quay-frontend.min.js']
|
main_scripts = ['dist/quay-frontend.min.js']
|
||||||
cache_buster = SAVED_CACHE_STRING
|
|
||||||
|
|
||||||
use_cdn = app.config.get('USE_CDN', True)
|
use_cdn = app.config.get('USE_CDN', True)
|
||||||
if request.args.get('use_cdn') is not None:
|
if request.args.get('use_cdn') is not None:
|
||||||
|
@ -142,6 +156,12 @@ def render_page_template(name, **kwargs):
|
||||||
external_styles = get_external_css(local=not use_cdn)
|
external_styles = get_external_css(local=not use_cdn)
|
||||||
external_scripts = get_external_javascript(local=not use_cdn)
|
external_scripts = get_external_javascript(local=not use_cdn)
|
||||||
|
|
||||||
|
def add_cachebusters(filenames):
|
||||||
|
cachebusters = get_cache_busters()
|
||||||
|
for filename in filenames:
|
||||||
|
cache_buster = cachebusters.get(filename, random_string()) if not debugging else 'debugging'
|
||||||
|
yield (filename, cache_buster)
|
||||||
|
|
||||||
def get_oauth_config():
|
def get_oauth_config():
|
||||||
oauth_config = {}
|
oauth_config = {}
|
||||||
for oauth_app in oauth_apps:
|
for oauth_app in oauth_apps:
|
||||||
|
@ -153,13 +173,14 @@ def render_page_template(name, **kwargs):
|
||||||
if len(app.config.get('CONTACT_INFO', [])) == 1:
|
if len(app.config.get('CONTACT_INFO', [])) == 1:
|
||||||
contact_href = app.config['CONTACT_INFO'][0]
|
contact_href = app.config['CONTACT_INFO'][0]
|
||||||
|
|
||||||
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
|
resp = make_response(render_template(name,
|
||||||
|
route_data=json.dumps(get_route_data()),
|
||||||
external_styles=external_styles,
|
external_styles=external_styles,
|
||||||
external_scripts=external_scripts,
|
external_scripts=external_scripts,
|
||||||
main_styles=main_styles,
|
main_styles=add_cachebusters(main_styles),
|
||||||
library_styles=library_styles,
|
library_styles=add_cachebusters(library_styles),
|
||||||
main_scripts=main_scripts,
|
main_scripts=add_cachebusters(main_scripts),
|
||||||
library_scripts=library_scripts,
|
library_scripts=add_cachebusters(library_scripts),
|
||||||
feature_set=json.dumps(features.get_features()),
|
feature_set=json.dumps(features.get_features()),
|
||||||
config_set=json.dumps(getFrontendVisibleConfig(app.config)),
|
config_set=json.dumps(getFrontendVisibleConfig(app.config)),
|
||||||
oauth_set=json.dumps(get_oauth_config()),
|
oauth_set=json.dumps(get_oauth_config()),
|
||||||
|
@ -169,9 +190,10 @@ def render_page_template(name, **kwargs):
|
||||||
sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
|
sentry_public_dsn=app.config.get('SENTRY_PUBLIC_DSN', ''),
|
||||||
is_debug=str(app.config.get('DEBUGGING', False)).lower(),
|
is_debug=str(app.config.get('DEBUGGING', False)).lower(),
|
||||||
show_chat=features.OLARK_CHAT,
|
show_chat=features.OLARK_CHAT,
|
||||||
cache_buster=cache_buster,
|
|
||||||
has_billing=features.BILLING,
|
has_billing=features.BILLING,
|
||||||
contact_href=contact_href,
|
contact_href=contact_href,
|
||||||
|
hostname=app.config['SERVER_HOSTNAME'],
|
||||||
|
preferred_scheme=app.config['PREFERRED_URL_SCHEME'],
|
||||||
**kwargs))
|
**kwargs))
|
||||||
|
|
||||||
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
||||||
|
@ -208,10 +230,17 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
||||||
dockerfile_id, build_name,
|
dockerfile_id, build_name,
|
||||||
trigger, pull_robot_name=pull_robot_name)
|
trigger, pull_robot_name=pull_robot_name)
|
||||||
|
|
||||||
dockerfile_build_queue.put([repository.namespace_user.username, repository.name], json.dumps({
|
json_data = json.dumps({
|
||||||
'build_uuid': build_request.uuid,
|
'build_uuid': build_request.uuid,
|
||||||
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
'pull_credentials': model.get_pull_credentials(pull_robot_name) if pull_robot_name else None
|
||||||
}), retries_remaining=1)
|
})
|
||||||
|
|
||||||
|
queue_item = dockerfile_build_queue.put([repository.namespace_user.username, repository.name],
|
||||||
|
json_data,
|
||||||
|
retries_remaining=3)
|
||||||
|
|
||||||
|
build_request.queue_item = queue_item
|
||||||
|
build_request.save()
|
||||||
|
|
||||||
# Add the build to the repo's log.
|
# Add the build to the repo's log.
|
||||||
metadata = {
|
metadata = {
|
||||||
|
@ -230,7 +259,7 @@ def start_build(repository, dockerfile_id, tags, build_name, subdir, manual,
|
||||||
metadata=metadata, repository=repository)
|
metadata=metadata, repository=repository)
|
||||||
|
|
||||||
# Add notifications for the build queue.
|
# Add notifications for the build queue.
|
||||||
profile.debug('Adding notifications for repository')
|
logger.debug('Adding notifications for repository')
|
||||||
event_data = {
|
event_data = {
|
||||||
'build_id': build_request.uuid,
|
'build_id': build_request.uuid,
|
||||||
'build_name': build_name,
|
'build_name': build_name,
|
||||||
|
|
|
@ -19,19 +19,21 @@ def generate_csrf_token():
|
||||||
|
|
||||||
return session['_csrf_token']
|
return session['_csrf_token']
|
||||||
|
|
||||||
|
def verify_csrf():
|
||||||
|
token = session.get('_csrf_token', None)
|
||||||
|
found_token = request.values.get('_csrf_token', None)
|
||||||
|
|
||||||
|
if not token or token != found_token:
|
||||||
|
msg = 'CSRF Failure. Session token was %s and request token was %s'
|
||||||
|
logger.error(msg, token, found_token)
|
||||||
|
abort(403, message='CSRF token was invalid or missing.')
|
||||||
|
|
||||||
def csrf_protect(func):
|
def csrf_protect(func):
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
oauth_token = get_validated_oauth_token()
|
oauth_token = get_validated_oauth_token()
|
||||||
if oauth_token is None and request.method != "GET" and request.method != "HEAD":
|
if oauth_token is None and request.method != "GET" and request.method != "HEAD":
|
||||||
token = session.get('_csrf_token', None)
|
verify_csrf()
|
||||||
found_token = request.values.get('_csrf_token', None)
|
|
||||||
|
|
||||||
if not token or token != found_token:
|
|
||||||
msg = 'CSRF Failure. Session token was %s and request token was %s'
|
|
||||||
logger.error(msg, token, found_token)
|
|
||||||
abort(403, message='CSRF token was invalid or missing.')
|
|
||||||
|
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
|
@ -23,7 +23,6 @@ from endpoints.notificationhelper import spawn_notification
|
||||||
import features
|
import features
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
profile = logging.getLogger('application.profiler')
|
|
||||||
|
|
||||||
index = Blueprint('index', __name__)
|
index = Blueprint('index', __name__)
|
||||||
|
|
||||||
|
@ -120,7 +119,7 @@ def create_user():
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# New user case
|
# New user case
|
||||||
profile.debug('Creating user')
|
logger.debug('Creating user')
|
||||||
new_user = None
|
new_user = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -128,10 +127,10 @@ def create_user():
|
||||||
except model.TooManyUsersException as ex:
|
except model.TooManyUsersException as ex:
|
||||||
abort(402, 'Seat limit has been reached for this license', issue='seat-limit')
|
abort(402, 'Seat limit has been reached for this license', issue='seat-limit')
|
||||||
|
|
||||||
profile.debug('Creating email code for user')
|
logger.debug('Creating email code for user')
|
||||||
code = model.create_confirm_email_code(new_user)
|
code = model.create_confirm_email_code(new_user)
|
||||||
|
|
||||||
profile.debug('Sending email code to user')
|
logger.debug('Sending email code to user')
|
||||||
send_confirmation_email(new_user.username, new_user.email, code.code)
|
send_confirmation_email(new_user.username, new_user.email, code.code)
|
||||||
|
|
||||||
return make_response('Created', 201)
|
return make_response('Created', 201)
|
||||||
|
@ -168,12 +167,12 @@ def update_user(username):
|
||||||
update_request = request.get_json()
|
update_request = request.get_json()
|
||||||
|
|
||||||
if 'password' in update_request:
|
if 'password' in update_request:
|
||||||
profile.debug('Updating user password')
|
logger.debug('Updating user password')
|
||||||
model.change_password(get_authenticated_user(),
|
model.change_password(get_authenticated_user(),
|
||||||
update_request['password'])
|
update_request['password'])
|
||||||
|
|
||||||
if 'email' in update_request:
|
if 'email' in update_request:
|
||||||
profile.debug('Updating user email')
|
logger.debug('Updating user email')
|
||||||
model.update_email(get_authenticated_user(), update_request['email'])
|
model.update_email(get_authenticated_user(), update_request['email'])
|
||||||
|
|
||||||
return jsonify({
|
return jsonify({
|
||||||
|
@ -189,13 +188,13 @@ def update_user(username):
|
||||||
@parse_repository_name
|
@parse_repository_name
|
||||||
@generate_headers(role='write')
|
@generate_headers(role='write')
|
||||||
def create_repository(namespace, repository):
|
def create_repository(namespace, repository):
|
||||||
profile.debug('Parsing image descriptions')
|
logger.debug('Parsing image descriptions')
|
||||||
image_descriptions = json.loads(request.data.decode('utf8'))
|
image_descriptions = json.loads(request.data.decode('utf8'))
|
||||||
|
|
||||||
profile.debug('Looking up repository')
|
logger.debug('Looking up repository')
|
||||||
repo = model.get_repository(namespace, repository)
|
repo = model.get_repository(namespace, repository)
|
||||||
|
|
||||||
profile.debug('Repository looked up')
|
logger.debug('Repository looked up')
|
||||||
if not repo and get_authenticated_user() is None:
|
if not repo and get_authenticated_user() is None:
|
||||||
logger.debug('Attempt to create new repository without user auth.')
|
logger.debug('Attempt to create new repository without user auth.')
|
||||||
abort(401,
|
abort(401,
|
||||||
|
@ -219,11 +218,11 @@ def create_repository(namespace, repository):
|
||||||
issue='no-create-permission',
|
issue='no-create-permission',
|
||||||
namespace=namespace)
|
namespace=namespace)
|
||||||
|
|
||||||
profile.debug('Creaing repository with owner: %s', get_authenticated_user().username)
|
logger.debug('Creaing repository with owner: %s', get_authenticated_user().username)
|
||||||
repo = model.create_repository(namespace, repository,
|
repo = model.create_repository(namespace, repository,
|
||||||
get_authenticated_user())
|
get_authenticated_user())
|
||||||
|
|
||||||
profile.debug('Determining already added images')
|
logger.debug('Determining already added images')
|
||||||
added_images = OrderedDict([(desc['id'], desc) for desc in image_descriptions])
|
added_images = OrderedDict([(desc['id'], desc) for desc in image_descriptions])
|
||||||
new_repo_images = dict(added_images)
|
new_repo_images = dict(added_images)
|
||||||
|
|
||||||
|
@ -239,7 +238,7 @@ def create_repository(namespace, repository):
|
||||||
for existing in existing_images:
|
for existing in existing_images:
|
||||||
added_images.pop(existing.docker_image_id)
|
added_images.pop(existing.docker_image_id)
|
||||||
|
|
||||||
profile.debug('Creating/Linking necessary images')
|
logger.debug('Creating/Linking necessary images')
|
||||||
username = get_authenticated_user() and get_authenticated_user().username
|
username = get_authenticated_user() and get_authenticated_user().username
|
||||||
translations = {}
|
translations = {}
|
||||||
for image_description in added_images.values():
|
for image_description in added_images.values():
|
||||||
|
@ -247,7 +246,7 @@ def create_repository(namespace, repository):
|
||||||
translations, storage.preferred_locations[0])
|
translations, storage.preferred_locations[0])
|
||||||
|
|
||||||
|
|
||||||
profile.debug('Created images')
|
logger.debug('Created images')
|
||||||
track_and_log('push_repo', repo)
|
track_and_log('push_repo', repo)
|
||||||
return make_response('Created', 201)
|
return make_response('Created', 201)
|
||||||
|
|
||||||
|
@ -260,14 +259,14 @@ def update_images(namespace, repository):
|
||||||
permission = ModifyRepositoryPermission(namespace, repository)
|
permission = ModifyRepositoryPermission(namespace, repository)
|
||||||
|
|
||||||
if permission.can():
|
if permission.can():
|
||||||
profile.debug('Looking up repository')
|
logger.debug('Looking up repository')
|
||||||
repo = model.get_repository(namespace, repository)
|
repo = model.get_repository(namespace, repository)
|
||||||
if not repo:
|
if not repo:
|
||||||
# Make sure the repo actually exists.
|
# Make sure the repo actually exists.
|
||||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||||
|
|
||||||
if get_authenticated_user():
|
if get_authenticated_user():
|
||||||
profile.debug('Publishing push event')
|
logger.debug('Publishing push event')
|
||||||
username = get_authenticated_user().username
|
username = get_authenticated_user().username
|
||||||
|
|
||||||
# Mark that the user has pushed the repo.
|
# Mark that the user has pushed the repo.
|
||||||
|
@ -280,11 +279,11 @@ def update_images(namespace, repository):
|
||||||
event = userevents.get_event(username)
|
event = userevents.get_event(username)
|
||||||
event.publish_event_data('docker-cli', user_data)
|
event.publish_event_data('docker-cli', user_data)
|
||||||
|
|
||||||
profile.debug('GCing repository')
|
logger.debug('GCing repository')
|
||||||
num_removed = model.garbage_collect_repository(namespace, repository)
|
num_removed = model.garbage_collect_repository(namespace, repository)
|
||||||
|
|
||||||
# Generate a job for each notification that has been added to this repo
|
# Generate a job for each notification that has been added to this repo
|
||||||
profile.debug('Adding notifications for repository')
|
logger.debug('Adding notifications for repository')
|
||||||
|
|
||||||
updated_tags = session.get('pushed_tags', {})
|
updated_tags = session.get('pushed_tags', {})
|
||||||
event_data = {
|
event_data = {
|
||||||
|
@ -307,13 +306,13 @@ def get_repository_images(namespace, repository):
|
||||||
# TODO invalidate token?
|
# TODO invalidate token?
|
||||||
if permission.can() or model.repository_is_public(namespace, repository):
|
if permission.can() or model.repository_is_public(namespace, repository):
|
||||||
# We can't rely on permissions to tell us if a repo exists anymore
|
# We can't rely on permissions to tell us if a repo exists anymore
|
||||||
profile.debug('Looking up repository')
|
logger.debug('Looking up repository')
|
||||||
repo = model.get_repository(namespace, repository)
|
repo = model.get_repository(namespace, repository)
|
||||||
if not repo:
|
if not repo:
|
||||||
abort(404, message='Unknown repository', issue='unknown-repo')
|
abort(404, message='Unknown repository', issue='unknown-repo')
|
||||||
|
|
||||||
all_images = []
|
all_images = []
|
||||||
profile.debug('Retrieving repository images')
|
logger.debug('Retrieving repository images')
|
||||||
for image in model.get_repository_images(namespace, repository):
|
for image in model.get_repository_images(namespace, repository):
|
||||||
new_image_view = {
|
new_image_view = {
|
||||||
'id': image.docker_image_id,
|
'id': image.docker_image_id,
|
||||||
|
@ -321,7 +320,7 @@ def get_repository_images(namespace, repository):
|
||||||
}
|
}
|
||||||
all_images.append(new_image_view)
|
all_images.append(new_image_view)
|
||||||
|
|
||||||
profile.debug('Building repository image response')
|
logger.debug('Building repository image response')
|
||||||
resp = make_response(json.dumps(all_images), 200)
|
resp = make_response(json.dumps(all_images), 200)
|
||||||
resp.mimetype = 'application/json'
|
resp.mimetype = 'application/json'
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ from util import gzipstream
|
||||||
registry = Blueprint('registry', __name__)
|
registry = Blueprint('registry', __name__)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
profile = logging.getLogger('application.profiler')
|
|
||||||
|
|
||||||
class SocketReader(object):
|
class SocketReader(object):
|
||||||
def __init__(self, fp):
|
def __init__(self, fp):
|
||||||
|
@ -100,12 +99,12 @@ def set_cache_headers(f):
|
||||||
def head_image_layer(namespace, repository, image_id, headers):
|
def head_image_layer(namespace, repository, image_id, headers):
|
||||||
permission = ReadRepositoryPermission(namespace, repository)
|
permission = ReadRepositoryPermission(namespace, repository)
|
||||||
|
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
if permission.can() or model.repository_is_public(namespace, repository):
|
if permission.can() or model.repository_is_public(namespace, repository):
|
||||||
profile.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
if not repo_image:
|
if not repo_image:
|
||||||
profile.debug('Image not found')
|
logger.debug('Image not found')
|
||||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||||
image_id=image_id)
|
image_id=image_id)
|
||||||
|
|
||||||
|
@ -114,7 +113,7 @@ def head_image_layer(namespace, repository, image_id, headers):
|
||||||
# Add the Accept-Ranges header if the storage engine supports resumable
|
# Add the Accept-Ranges header if the storage engine supports resumable
|
||||||
# downloads.
|
# downloads.
|
||||||
if store.get_supports_resumable_downloads(repo_image.storage.locations):
|
if store.get_supports_resumable_downloads(repo_image.storage.locations):
|
||||||
profile.debug('Storage supports resumable downloads')
|
logger.debug('Storage supports resumable downloads')
|
||||||
extra_headers['Accept-Ranges'] = 'bytes'
|
extra_headers['Accept-Ranges'] = 'bytes'
|
||||||
|
|
||||||
resp = make_response('')
|
resp = make_response('')
|
||||||
|
@ -133,31 +132,35 @@ def head_image_layer(namespace, repository, image_id, headers):
|
||||||
def get_image_layer(namespace, repository, image_id, headers):
|
def get_image_layer(namespace, repository, image_id, headers):
|
||||||
permission = ReadRepositoryPermission(namespace, repository)
|
permission = ReadRepositoryPermission(namespace, repository)
|
||||||
|
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
if permission.can() or model.repository_is_public(namespace, repository):
|
if permission.can() or model.repository_is_public(namespace, repository):
|
||||||
profile.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
|
if not repo_image:
|
||||||
|
logger.debug('Image not found')
|
||||||
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||||
|
image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Looking up the layer path')
|
logger.debug('Looking up the layer path')
|
||||||
try:
|
try:
|
||||||
path = store.image_layer_path(repo_image.storage.uuid)
|
path = store.image_layer_path(repo_image.storage.uuid)
|
||||||
|
|
||||||
profile.debug('Looking up the direct download URL')
|
logger.debug('Looking up the direct download URL')
|
||||||
direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path)
|
direct_download_url = store.get_direct_download_url(repo_image.storage.locations, path)
|
||||||
|
|
||||||
if direct_download_url:
|
if direct_download_url:
|
||||||
profile.debug('Returning direct download URL')
|
logger.debug('Returning direct download URL')
|
||||||
resp = redirect(direct_download_url)
|
resp = redirect(direct_download_url)
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
profile.debug('Streaming layer data')
|
logger.debug('Streaming layer data')
|
||||||
|
|
||||||
# Close the database handle here for this process before we send the long download.
|
# Close the database handle here for this process before we send the long download.
|
||||||
database.close_db_filter(None)
|
database.close_db_filter(None)
|
||||||
|
|
||||||
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
|
return Response(store.stream_read(repo_image.storage.locations, path), headers=headers)
|
||||||
except (IOError, AttributeError):
|
except (IOError, AttributeError):
|
||||||
profile.debug('Image not found')
|
logger.exception('Image layer data not found')
|
||||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||||
image_id=image_id)
|
image_id=image_id)
|
||||||
|
|
||||||
|
@ -168,29 +171,30 @@ def get_image_layer(namespace, repository, image_id, headers):
|
||||||
@process_auth
|
@process_auth
|
||||||
@extract_namespace_repo_from_session
|
@extract_namespace_repo_from_session
|
||||||
def put_image_layer(namespace, repository, image_id):
|
def put_image_layer(namespace, repository, image_id):
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
permission = ModifyRepositoryPermission(namespace, repository)
|
permission = ModifyRepositoryPermission(namespace, repository)
|
||||||
if not permission.can():
|
if not permission.can():
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
profile.debug('Retrieving image')
|
logger.debug('Retrieving image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
try:
|
try:
|
||||||
profile.debug('Retrieving image data')
|
logger.debug('Retrieving image data')
|
||||||
uuid = repo_image.storage.uuid
|
uuid = repo_image.storage.uuid
|
||||||
json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||||
except (IOError, AttributeError):
|
except (IOError, AttributeError):
|
||||||
|
logger.exception('Exception when retrieving image data')
|
||||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||||
image_id=image_id)
|
image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Retrieving image path info')
|
logger.debug('Retrieving image path info')
|
||||||
layer_path = store.image_layer_path(uuid)
|
layer_path = store.image_layer_path(uuid)
|
||||||
|
|
||||||
if (store.exists(repo_image.storage.locations, layer_path) and not
|
if (store.exists(repo_image.storage.locations, layer_path) and not
|
||||||
image_is_uploading(repo_image)):
|
image_is_uploading(repo_image)):
|
||||||
exact_abort(409, 'Image already exists')
|
exact_abort(409, 'Image already exists')
|
||||||
|
|
||||||
profile.debug('Storing layer data')
|
logger.debug('Storing layer data')
|
||||||
|
|
||||||
input_stream = request.stream
|
input_stream = request.stream
|
||||||
if request.headers.get('transfer-encoding') == 'chunked':
|
if request.headers.get('transfer-encoding') == 'chunked':
|
||||||
|
@ -257,7 +261,7 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
|
|
||||||
# The layer is ready for download, send a job to the work queue to
|
# The layer is ready for download, send a job to the work queue to
|
||||||
# process it.
|
# process it.
|
||||||
profile.debug('Adding layer to diff queue')
|
logger.debug('Adding layer to diff queue')
|
||||||
repo = model.get_repository(namespace, repository)
|
repo = model.get_repository(namespace, repository)
|
||||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||||
'namespace_user_id': repo.namespace_user.id,
|
'namespace_user_id': repo.namespace_user.id,
|
||||||
|
@ -272,7 +276,7 @@ def put_image_layer(namespace, repository, image_id):
|
||||||
@process_auth
|
@process_auth
|
||||||
@extract_namespace_repo_from_session
|
@extract_namespace_repo_from_session
|
||||||
def put_image_checksum(namespace, repository, image_id):
|
def put_image_checksum(namespace, repository, image_id):
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
permission = ModifyRepositoryPermission(namespace, repository)
|
permission = ModifyRepositoryPermission(namespace, repository)
|
||||||
if not permission.can():
|
if not permission.can():
|
||||||
abort(403)
|
abort(403)
|
||||||
|
@ -298,23 +302,23 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
abort(400, 'Checksum not found in Cookie for image %(image_id)s',
|
abort(400, 'Checksum not found in Cookie for image %(image_id)s',
|
||||||
issue='missing-checksum-cookie', image_id=image_id)
|
issue='missing-checksum-cookie', image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
if not repo_image or not repo_image.storage:
|
if not repo_image or not repo_image.storage:
|
||||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||||
|
|
||||||
uuid = repo_image.storage.uuid
|
uuid = repo_image.storage.uuid
|
||||||
|
|
||||||
profile.debug('Looking up repo layer data')
|
logger.debug('Looking up repo layer data')
|
||||||
if not store.exists(repo_image.storage.locations, store.image_json_path(uuid)):
|
if not store.exists(repo_image.storage.locations, store.image_json_path(uuid)):
|
||||||
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
abort(404, 'Image not found: %(image_id)s', issue='unknown-image', image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Marking image path')
|
logger.debug('Marking image path')
|
||||||
if not image_is_uploading(repo_image):
|
if not image_is_uploading(repo_image):
|
||||||
abort(409, 'Cannot set checksum for image %(image_id)s',
|
abort(409, 'Cannot set checksum for image %(image_id)s',
|
||||||
issue='image-write-error', image_id=image_id)
|
issue='image-write-error', image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Storing image checksum')
|
logger.debug('Storing image checksum')
|
||||||
err = store_checksum(repo_image.storage, checksum)
|
err = store_checksum(repo_image.storage, checksum)
|
||||||
if err:
|
if err:
|
||||||
abort(400, err)
|
abort(400, err)
|
||||||
|
@ -331,7 +335,7 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
|
|
||||||
# The layer is ready for download, send a job to the work queue to
|
# The layer is ready for download, send a job to the work queue to
|
||||||
# process it.
|
# process it.
|
||||||
profile.debug('Adding layer to diff queue')
|
logger.debug('Adding layer to diff queue')
|
||||||
repo = model.get_repository(namespace, repository)
|
repo = model.get_repository(namespace, repository)
|
||||||
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
image_diff_queue.put([repo.namespace_user.username, repository, image_id], json.dumps({
|
||||||
'namespace_user_id': repo.namespace_user.id,
|
'namespace_user_id': repo.namespace_user.id,
|
||||||
|
@ -348,23 +352,23 @@ def put_image_checksum(namespace, repository, image_id):
|
||||||
@require_completion
|
@require_completion
|
||||||
@set_cache_headers
|
@set_cache_headers
|
||||||
def get_image_json(namespace, repository, image_id, headers):
|
def get_image_json(namespace, repository, image_id, headers):
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
permission = ReadRepositoryPermission(namespace, repository)
|
permission = ReadRepositoryPermission(namespace, repository)
|
||||||
if not permission.can() and not model.repository_is_public(namespace,
|
if not permission.can() and not model.repository_is_public(namespace,
|
||||||
repository):
|
repository):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
profile.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
|
|
||||||
profile.debug('Looking up repo layer data')
|
logger.debug('Looking up repo layer data')
|
||||||
try:
|
try:
|
||||||
uuid = repo_image.storage.uuid
|
uuid = repo_image.storage.uuid
|
||||||
data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||||
except (IOError, AttributeError):
|
except (IOError, AttributeError):
|
||||||
flask_abort(404)
|
flask_abort(404)
|
||||||
|
|
||||||
profile.debug('Looking up repo layer size')
|
logger.debug('Looking up repo layer size')
|
||||||
size = repo_image.storage.image_size
|
size = repo_image.storage.image_size
|
||||||
headers['X-Docker-Size'] = str(size)
|
headers['X-Docker-Size'] = str(size)
|
||||||
|
|
||||||
|
@ -379,16 +383,16 @@ def get_image_json(namespace, repository, image_id, headers):
|
||||||
@require_completion
|
@require_completion
|
||||||
@set_cache_headers
|
@set_cache_headers
|
||||||
def get_image_ancestry(namespace, repository, image_id, headers):
|
def get_image_ancestry(namespace, repository, image_id, headers):
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
permission = ReadRepositoryPermission(namespace, repository)
|
permission = ReadRepositoryPermission(namespace, repository)
|
||||||
if not permission.can() and not model.repository_is_public(namespace,
|
if not permission.can() and not model.repository_is_public(namespace,
|
||||||
repository):
|
repository):
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
profile.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
|
|
||||||
profile.debug('Looking up image data')
|
logger.debug('Looking up image data')
|
||||||
try:
|
try:
|
||||||
uuid = repo_image.storage.uuid
|
uuid = repo_image.storage.uuid
|
||||||
data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
||||||
|
@ -396,11 +400,11 @@ def get_image_ancestry(namespace, repository, image_id, headers):
|
||||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||||
image_id=image_id)
|
image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Converting to <-> from JSON')
|
logger.debug('Converting to <-> from JSON')
|
||||||
response = make_response(json.dumps(json.loads(data)), 200)
|
response = make_response(json.dumps(json.loads(data)), 200)
|
||||||
response.headers.extend(headers)
|
response.headers.extend(headers)
|
||||||
|
|
||||||
profile.debug('Done')
|
logger.debug('Done')
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@ -430,12 +434,12 @@ def store_checksum(image_storage, checksum):
|
||||||
@process_auth
|
@process_auth
|
||||||
@extract_namespace_repo_from_session
|
@extract_namespace_repo_from_session
|
||||||
def put_image_json(namespace, repository, image_id):
|
def put_image_json(namespace, repository, image_id):
|
||||||
profile.debug('Checking repo permissions')
|
logger.debug('Checking repo permissions')
|
||||||
permission = ModifyRepositoryPermission(namespace, repository)
|
permission = ModifyRepositoryPermission(namespace, repository)
|
||||||
if not permission.can():
|
if not permission.can():
|
||||||
abort(403)
|
abort(403)
|
||||||
|
|
||||||
profile.debug('Parsing image JSON')
|
logger.debug('Parsing image JSON')
|
||||||
try:
|
try:
|
||||||
data = json.loads(request.data.decode('utf8'))
|
data = json.loads(request.data.decode('utf8'))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
|
@ -449,10 +453,10 @@ def put_image_json(namespace, repository, image_id):
|
||||||
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
abort(400, 'Missing key `id` in JSON for image: %(image_id)s',
|
||||||
issue='invalid-request', image_id=image_id)
|
issue='invalid-request', image_id=image_id)
|
||||||
|
|
||||||
profile.debug('Looking up repo image')
|
logger.debug('Looking up repo image')
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
repo_image = model.get_repo_image_extended(namespace, repository, image_id)
|
||||||
if not repo_image:
|
if not repo_image:
|
||||||
profile.debug('Image not found')
|
logger.debug('Image not found')
|
||||||
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
abort(404, 'Image %(image_id)s not found', issue='unknown-image',
|
||||||
image_id=image_id)
|
image_id=image_id)
|
||||||
|
|
||||||
|
@ -466,24 +470,24 @@ def put_image_json(namespace, repository, image_id):
|
||||||
|
|
||||||
parent_image = None
|
parent_image = None
|
||||||
if parent_id:
|
if parent_id:
|
||||||
profile.debug('Looking up parent image')
|
logger.debug('Looking up parent image')
|
||||||
parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
|
parent_image = model.get_repo_image_extended(namespace, repository, parent_id)
|
||||||
|
|
||||||
parent_uuid = parent_image and parent_image.storage.uuid
|
parent_uuid = parent_image and parent_image.storage.uuid
|
||||||
parent_locations = parent_image and parent_image.storage.locations
|
parent_locations = parent_image and parent_image.storage.locations
|
||||||
|
|
||||||
if parent_id:
|
if parent_id:
|
||||||
profile.debug('Looking up parent image data')
|
logger.debug('Looking up parent image data')
|
||||||
|
|
||||||
if (parent_id and not
|
if (parent_id and not
|
||||||
store.exists(parent_locations, store.image_json_path(parent_uuid))):
|
store.exists(parent_locations, store.image_json_path(parent_uuid))):
|
||||||
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
abort(400, 'Image %(image_id)s depends on non existing parent image %(parent_id)s',
|
||||||
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
issue='invalid-request', image_id=image_id, parent_id=parent_id)
|
||||||
|
|
||||||
profile.debug('Looking up image storage paths')
|
logger.debug('Looking up image storage paths')
|
||||||
json_path = store.image_json_path(uuid)
|
json_path = store.image_json_path(uuid)
|
||||||
|
|
||||||
profile.debug('Checking if image already exists')
|
logger.debug('Checking if image already exists')
|
||||||
if (store.exists(repo_image.storage.locations, json_path) and not
|
if (store.exists(repo_image.storage.locations, json_path) and not
|
||||||
image_is_uploading(repo_image)):
|
image_is_uploading(repo_image)):
|
||||||
exact_abort(409, 'Image already exists')
|
exact_abort(409, 'Image already exists')
|
||||||
|
@ -496,24 +500,24 @@ def put_image_json(namespace, repository, image_id):
|
||||||
command_list = data.get('container_config', {}).get('Cmd', None)
|
command_list = data.get('container_config', {}).get('Cmd', None)
|
||||||
command = json.dumps(command_list) if command_list else None
|
command = json.dumps(command_list) if command_list else None
|
||||||
|
|
||||||
profile.debug('Setting image metadata')
|
logger.debug('Setting image metadata')
|
||||||
model.set_image_metadata(image_id, namespace, repository,
|
model.set_image_metadata(image_id, namespace, repository,
|
||||||
data.get('created'), data.get('comment'), command,
|
data.get('created'), data.get('comment'), command,
|
||||||
parent_image)
|
parent_image)
|
||||||
|
|
||||||
profile.debug('Putting json path')
|
logger.debug('Putting json path')
|
||||||
store.put_content(repo_image.storage.locations, json_path, request.data)
|
store.put_content(repo_image.storage.locations, json_path, request.data)
|
||||||
|
|
||||||
profile.debug('Generating image ancestry')
|
logger.debug('Generating image ancestry')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid,
|
generate_ancestry(image_id, uuid, repo_image.storage.locations, parent_id, parent_uuid,
|
||||||
parent_locations)
|
parent_locations)
|
||||||
except IOError as ioe:
|
except IOError as ioe:
|
||||||
profile.debug('Error when generating ancestry: %s' % ioe.message)
|
logger.debug('Error when generating ancestry: %s' % ioe.message)
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
profile.debug('Done')
|
logger.debug('Done')
|
||||||
return make_response('true', 200)
|
return make_response('true', 200)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ from flask import request
|
||||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
profile = logging.getLogger('application.profiler')
|
|
||||||
|
|
||||||
def track_and_log(event_name, repo, **kwargs):
|
def track_and_log(event_name, repo, **kwargs):
|
||||||
repository = repo.name
|
repository = repo.name
|
||||||
|
@ -23,7 +22,7 @@ def track_and_log(event_name, repo, **kwargs):
|
||||||
authenticated_user = get_authenticated_user()
|
authenticated_user = get_authenticated_user()
|
||||||
authenticated_token = get_validated_token() if not authenticated_user else None
|
authenticated_token = get_validated_token() if not authenticated_user else None
|
||||||
|
|
||||||
profile.debug('Logging the %s to Mixpanel and the log system', event_name)
|
logger.debug('Logging the %s to Mixpanel and the log system', event_name)
|
||||||
if authenticated_oauth_token:
|
if authenticated_oauth_token:
|
||||||
metadata['oauth_token_id'] = authenticated_oauth_token.id
|
metadata['oauth_token_id'] = authenticated_oauth_token.id
|
||||||
metadata['oauth_token_application_id'] = authenticated_oauth_token.application.client_id
|
metadata['oauth_token_application_id'] = authenticated_oauth_token.application.client_id
|
||||||
|
@ -45,9 +44,9 @@ def track_and_log(event_name, repo, **kwargs):
|
||||||
}
|
}
|
||||||
|
|
||||||
# Publish the user event (if applicable)
|
# Publish the user event (if applicable)
|
||||||
profile.debug('Checking publishing %s to the user events system', event_name)
|
logger.debug('Checking publishing %s to the user events system', event_name)
|
||||||
if authenticated_user:
|
if authenticated_user:
|
||||||
profile.debug('Publishing %s to the user events system', event_name)
|
logger.debug('Publishing %s to the user events system', event_name)
|
||||||
user_event_data = {
|
user_event_data = {
|
||||||
'action': event_name,
|
'action': event_name,
|
||||||
'repository': repository,
|
'repository': repository,
|
||||||
|
@ -58,14 +57,14 @@ def track_and_log(event_name, repo, **kwargs):
|
||||||
event.publish_event_data('docker-cli', user_event_data)
|
event.publish_event_data('docker-cli', user_event_data)
|
||||||
|
|
||||||
# Save the action to mixpanel.
|
# Save the action to mixpanel.
|
||||||
profile.debug('Logging the %s to Mixpanel', event_name)
|
logger.debug('Logging the %s to Mixpanel', event_name)
|
||||||
analytics.track(analytics_id, event_name, extra_params)
|
analytics.track(analytics_id, event_name, extra_params)
|
||||||
|
|
||||||
# Log the action to the database.
|
# Log the action to the database.
|
||||||
profile.debug('Logging the %s to logs system', event_name)
|
logger.debug('Logging the %s to logs system', event_name)
|
||||||
model.log_action(event_name, namespace,
|
model.log_action(event_name, namespace,
|
||||||
performer=authenticated_user,
|
performer=authenticated_user,
|
||||||
ip=request.remote_addr, metadata=metadata,
|
ip=request.remote_addr, metadata=metadata,
|
||||||
repository=repo)
|
repository=repo)
|
||||||
|
|
||||||
profile.debug('Track and log of %s complete', event_name)
|
logger.debug('Track and log of %s complete', event_name)
|
||||||
|
|
|
@ -2,11 +2,10 @@ import logging
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
from flask import redirect, Blueprint, abort, send_file, request
|
from flask import redirect, Blueprint, abort, send_file, make_response
|
||||||
|
|
||||||
from app import app
|
from app import app, signer
|
||||||
from auth.auth import process_auth
|
from auth.auth import process_auth
|
||||||
from auth.auth_context import get_authenticated_user
|
|
||||||
from auth.permissions import ReadRepositoryPermission
|
from auth.permissions import ReadRepositoryPermission
|
||||||
from data import model
|
from data import model
|
||||||
from data import database
|
from data import database
|
||||||
|
@ -15,13 +14,16 @@ from storage import Storage
|
||||||
|
|
||||||
from util.queuefile import QueueFile
|
from util.queuefile import QueueFile
|
||||||
from util.queueprocess import QueueProcess
|
from util.queueprocess import QueueProcess
|
||||||
from util.gzipwrap import GzipWrap
|
from formats.squashed import SquashedDockerImage
|
||||||
from util.dockerloadformat import build_docker_load_stream
|
from formats.aci import ACIImage
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
verbs = Blueprint('verbs', __name__)
|
verbs = Blueprint('verbs', __name__)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, image_id_list):
|
def _open_stream(formatter, namespace, repository, tag, synthetic_image_id, image_json,
|
||||||
|
image_id_list):
|
||||||
store = Storage(app)
|
store = Storage(app)
|
||||||
|
|
||||||
# For performance reasons, we load the full image list here, cache it, then disconnect from
|
# For performance reasons, we load the full image list here, cache it, then disconnect from
|
||||||
|
@ -42,20 +44,43 @@ def _open_stream(namespace, repository, tag, synthetic_image_id, image_json, ima
|
||||||
current_image_path)
|
current_image_path)
|
||||||
|
|
||||||
current_image_id = current_image_entry.id
|
current_image_id = current_image_entry.id
|
||||||
logger.debug('Returning image layer %s: %s' % (current_image_id, current_image_path))
|
logger.debug('Returning image layer %s: %s', current_image_id, current_image_path)
|
||||||
yield current_image_stream
|
yield current_image_stream
|
||||||
|
|
||||||
stream = build_docker_load_stream(namespace, repository, tag, synthetic_image_id, image_json,
|
stream = formatter.build_stream(namespace, repository, tag, synthetic_image_id, image_json,
|
||||||
get_next_image, get_next_layer)
|
get_next_image, get_next_layer)
|
||||||
|
|
||||||
return stream.read
|
return stream.read
|
||||||
|
|
||||||
|
|
||||||
def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, queue_file):
|
def _sign_sythentic_image(verb, linked_storage_uuid, queue_file):
|
||||||
|
signature = None
|
||||||
|
try:
|
||||||
|
signature = signer.detached_sign(queue_file)
|
||||||
|
except:
|
||||||
|
logger.exception('Exception when signing %s image %s', verb, linked_storage_uuid)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Setup the database (since this is a new process) and then disconnect immediately
|
||||||
|
# once the operation completes.
|
||||||
|
if not queue_file.raised_exception:
|
||||||
|
with database.UseThenDisconnect(app.config):
|
||||||
|
try:
|
||||||
|
derived = model.get_storage_by_uuid(linked_storage_uuid)
|
||||||
|
except model.InvalidImageException:
|
||||||
|
return
|
||||||
|
|
||||||
|
signature_entry = model.find_or_create_storage_signature(derived, signer.name)
|
||||||
|
signature_entry.signature = signature
|
||||||
|
signature_entry.uploading = False
|
||||||
|
signature_entry.save()
|
||||||
|
|
||||||
|
|
||||||
|
def _write_synthetic_image_to_storage(verb, linked_storage_uuid, linked_locations, queue_file):
|
||||||
store = Storage(app)
|
store = Storage(app)
|
||||||
|
|
||||||
def handle_exception(ex):
|
def handle_exception(ex):
|
||||||
logger.debug('Exception when building squashed image %s: %s', linked_storage_uuid, ex)
|
logger.debug('Exception when building %s image %s: %s', verb, linked_storage_uuid, ex)
|
||||||
|
|
||||||
with database.UseThenDisconnect(app.config):
|
with database.UseThenDisconnect(app.config):
|
||||||
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
model.delete_derived_storage_by_uuid(linked_storage_uuid)
|
||||||
|
@ -67,86 +92,193 @@ def _write_synthetic_image_to_storage(linked_storage_uuid, linked_locations, que
|
||||||
queue_file.close()
|
queue_file.close()
|
||||||
|
|
||||||
if not queue_file.raised_exception:
|
if not queue_file.raised_exception:
|
||||||
|
# Setup the database (since this is a new process) and then disconnect immediately
|
||||||
|
# once the operation completes.
|
||||||
with database.UseThenDisconnect(app.config):
|
with database.UseThenDisconnect(app.config):
|
||||||
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
done_uploading = model.get_storage_by_uuid(linked_storage_uuid)
|
||||||
done_uploading.uploading = False
|
done_uploading.uploading = False
|
||||||
done_uploading.save()
|
done_uploading.save()
|
||||||
|
|
||||||
|
|
||||||
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
|
# pylint: disable=too-many-locals
|
||||||
@process_auth
|
def _verify_repo_verb(store, namespace, repository, tag, verb, checker=None):
|
||||||
def get_squashed_tag(namespace, repository, tag):
|
|
||||||
permission = ReadRepositoryPermission(namespace, repository)
|
permission = ReadRepositoryPermission(namespace, repository)
|
||||||
if permission.can() or model.repository_is_public(namespace, repository):
|
|
||||||
# Lookup the requested tag.
|
|
||||||
try:
|
|
||||||
tag_image = model.get_tag_image(namespace, repository, tag)
|
|
||||||
except model.DataModelException:
|
|
||||||
abort(404)
|
|
||||||
|
|
||||||
# Lookup the tag's image and storage.
|
# pylint: disable=no-member
|
||||||
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
|
if not permission.can() and not model.repository_is_public(namespace, repository):
|
||||||
if not repo_image:
|
abort(403)
|
||||||
abort(404)
|
|
||||||
|
|
||||||
# Log the action.
|
# Lookup the requested tag.
|
||||||
track_and_log('repo_verb', repo_image.repository, tag=tag, verb='squash')
|
try:
|
||||||
|
tag_image = model.get_tag_image(namespace, repository, tag)
|
||||||
|
except model.DataModelException:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
store = Storage(app)
|
# Lookup the tag's image and storage.
|
||||||
derived = model.find_or_create_derived_storage(repo_image.storage, 'squash',
|
repo_image = model.get_repo_image_extended(namespace, repository, tag_image.docker_image_id)
|
||||||
store.preferred_locations[0])
|
if not repo_image:
|
||||||
if not derived.uploading:
|
abort(404)
|
||||||
logger.debug('Derived image %s exists in storage', derived.uuid)
|
|
||||||
derived_layer_path = store.image_layer_path(derived.uuid)
|
|
||||||
download_url = store.get_direct_download_url(derived.locations, derived_layer_path)
|
|
||||||
if download_url:
|
|
||||||
logger.debug('Redirecting to download URL for derived image %s', derived.uuid)
|
|
||||||
return redirect(download_url)
|
|
||||||
|
|
||||||
# Close the database handle here for this process before we send the long download.
|
# If there is a data checker, call it first.
|
||||||
database.close_db_filter(None)
|
uuid = repo_image.storage.uuid
|
||||||
|
image_json = None
|
||||||
|
|
||||||
logger.debug('Sending cached derived image %s', derived.uuid)
|
if checker is not None:
|
||||||
return send_file(store.stream_read_file(derived.locations, derived_layer_path))
|
|
||||||
|
|
||||||
# Load the ancestry for the image.
|
|
||||||
logger.debug('Building and returning derived image %s', derived.uuid)
|
|
||||||
uuid = repo_image.storage.uuid
|
|
||||||
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
|
||||||
full_image_list = json.loads(ancestry_data)
|
|
||||||
|
|
||||||
# Load the image's JSON layer.
|
|
||||||
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||||
image_json = json.loads(image_json_data)
|
image_json = json.loads(image_json_data)
|
||||||
|
|
||||||
# Calculate a synthetic image ID.
|
if not checker(image_json):
|
||||||
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':squash').hexdigest()
|
logger.debug('Check mismatch on %s/%s:%s, verb %s', namespace, repository, tag, verb)
|
||||||
|
abort(404)
|
||||||
|
|
||||||
# Create a queue process to generate the data. The queue files will read from the process
|
return (repo_image, tag_image, image_json)
|
||||||
# and send the results to the client and storage.
|
|
||||||
def _cleanup():
|
|
||||||
# Close any existing DB connection once the process has exited.
|
|
||||||
database.close_db_filter(None)
|
|
||||||
|
|
||||||
args = (namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
|
|
||||||
queue_process = QueueProcess(_open_stream,
|
|
||||||
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
|
|
||||||
args, finished=_cleanup)
|
|
||||||
|
|
||||||
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
|
# pylint: disable=too-many-locals
|
||||||
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
|
def _repo_verb_signature(namespace, repository, tag, verb, checker=None, **kwargs):
|
||||||
|
# Verify that the image exists and that we have access to it.
|
||||||
|
store = Storage(app)
|
||||||
|
result = _verify_repo_verb(store, namespace, repository, tag, verb, checker)
|
||||||
|
(repo_image, tag_image, image_json) = result
|
||||||
|
|
||||||
# Start building.
|
# Lookup the derived image storage for the verb.
|
||||||
queue_process.run()
|
derived = model.find_derived_storage(repo_image.storage, verb)
|
||||||
|
if derived is None or derived.uploading:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
# Start the storage saving.
|
# Check if we have a valid signer configured.
|
||||||
storage_args = (derived.uuid, derived.locations, storage_queue_file)
|
if not signer.name:
|
||||||
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
|
abort(404)
|
||||||
|
|
||||||
|
# Lookup the signature for the verb.
|
||||||
|
signature_entry = model.lookup_storage_signature(derived, signer.name)
|
||||||
|
if signature_entry is None:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
# Return the signature.
|
||||||
|
return make_response(signature_entry.signature)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=too-many-locals
|
||||||
|
def _repo_verb(namespace, repository, tag, verb, formatter, sign=False, checker=None, **kwargs):
|
||||||
|
# Verify that the image exists and that we have access to it.
|
||||||
|
store = Storage(app)
|
||||||
|
result = _verify_repo_verb(store, namespace, repository, tag, verb, checker)
|
||||||
|
(repo_image, tag_image, image_json) = result
|
||||||
|
|
||||||
|
# Log the action.
|
||||||
|
track_and_log('repo_verb', repo_image.repository, tag=tag, verb=verb, **kwargs)
|
||||||
|
|
||||||
|
# Lookup/create the derived image storage for the verb.
|
||||||
|
derived = model.find_or_create_derived_storage(repo_image.storage, verb,
|
||||||
|
store.preferred_locations[0])
|
||||||
|
|
||||||
|
if not derived.uploading:
|
||||||
|
logger.debug('Derived %s image %s exists in storage', verb, derived.uuid)
|
||||||
|
derived_layer_path = store.image_layer_path(derived.uuid)
|
||||||
|
download_url = store.get_direct_download_url(derived.locations, derived_layer_path)
|
||||||
|
if download_url:
|
||||||
|
logger.debug('Redirecting to download URL for derived %s image %s', verb, derived.uuid)
|
||||||
|
return redirect(download_url)
|
||||||
|
|
||||||
# Close the database handle here for this process before we send the long download.
|
# Close the database handle here for this process before we send the long download.
|
||||||
database.close_db_filter(None)
|
database.close_db_filter(None)
|
||||||
|
|
||||||
# Return the client's data.
|
logger.debug('Sending cached derived %s image %s', verb, derived.uuid)
|
||||||
return send_file(client_queue_file)
|
return send_file(store.stream_read_file(derived.locations, derived_layer_path))
|
||||||
|
|
||||||
|
# Load the ancestry for the image.
|
||||||
|
uuid = repo_image.storage.uuid
|
||||||
|
|
||||||
|
logger.debug('Building and returning derived %s image %s', verb, derived.uuid)
|
||||||
|
ancestry_data = store.get_content(repo_image.storage.locations, store.image_ancestry_path(uuid))
|
||||||
|
full_image_list = json.loads(ancestry_data)
|
||||||
|
|
||||||
|
# Load the image's JSON layer.
|
||||||
|
if not image_json:
|
||||||
|
image_json_data = store.get_content(repo_image.storage.locations, store.image_json_path(uuid))
|
||||||
|
image_json = json.loads(image_json_data)
|
||||||
|
|
||||||
|
# Calculate a synthetic image ID.
|
||||||
|
synthetic_image_id = hashlib.sha256(tag_image.docker_image_id + ':' + verb).hexdigest()
|
||||||
|
|
||||||
|
def _cleanup():
|
||||||
|
# Close any existing DB connection once the process has exited.
|
||||||
|
database.close_db_filter(None)
|
||||||
|
|
||||||
|
# Create a queue process to generate the data. The queue files will read from the process
|
||||||
|
# and send the results to the client and storage.
|
||||||
|
args = (formatter, namespace, repository, tag, synthetic_image_id, image_json, full_image_list)
|
||||||
|
queue_process = QueueProcess(_open_stream,
|
||||||
|
8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max
|
||||||
|
args, finished=_cleanup)
|
||||||
|
|
||||||
|
client_queue_file = QueueFile(queue_process.create_queue(), 'client')
|
||||||
|
storage_queue_file = QueueFile(queue_process.create_queue(), 'storage')
|
||||||
|
|
||||||
|
# If signing is required, add a QueueFile for signing the image as we stream it out.
|
||||||
|
signing_queue_file = None
|
||||||
|
if sign and signer.name:
|
||||||
|
signing_queue_file = QueueFile(queue_process.create_queue(), 'signing')
|
||||||
|
|
||||||
|
# Start building.
|
||||||
|
queue_process.run()
|
||||||
|
|
||||||
|
# Start the storage saving.
|
||||||
|
storage_args = (verb, derived.uuid, derived.locations, storage_queue_file)
|
||||||
|
QueueProcess.run_process(_write_synthetic_image_to_storage, storage_args, finished=_cleanup)
|
||||||
|
|
||||||
|
if sign and signer.name:
|
||||||
|
signing_args = (verb, derived.uuid, signing_queue_file)
|
||||||
|
QueueProcess.run_process(_sign_sythentic_image, signing_args, finished=_cleanup)
|
||||||
|
|
||||||
|
# Close the database handle here for this process before we send the long download.
|
||||||
|
database.close_db_filter(None)
|
||||||
|
|
||||||
|
# Return the client's data.
|
||||||
|
return send_file(client_queue_file)
|
||||||
|
|
||||||
|
|
||||||
|
def os_arch_checker(os, arch):
|
||||||
|
def checker(image_json):
|
||||||
|
# Verify the architecture and os.
|
||||||
|
operating_system = image_json.get('os', 'linux')
|
||||||
|
if operating_system != os:
|
||||||
|
return False
|
||||||
|
|
||||||
|
architecture = image_json.get('architecture', 'amd64')
|
||||||
|
|
||||||
|
# Note: Some older Docker images have 'x86_64' rather than 'amd64'.
|
||||||
|
# We allow the conversion here.
|
||||||
|
if architecture == 'x86_64' and operating_system == 'linux':
|
||||||
|
architecture = 'amd64'
|
||||||
|
|
||||||
|
if architecture != arch:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
return checker
|
||||||
|
|
||||||
|
|
||||||
|
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/sig/<os>/<arch>/', methods=['GET'])
|
||||||
|
@process_auth
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def get_aci_signature(server, namespace, repository, tag, os, arch):
|
||||||
|
return _repo_verb_signature(namespace, repository, tag, 'aci', checker=os_arch_checker(os, arch),
|
||||||
|
os=os, arch=arch)
|
||||||
|
|
||||||
|
|
||||||
|
@verbs.route('/aci/<server>/<namespace>/<repository>/<tag>/aci/<os>/<arch>/', methods=['GET'])
|
||||||
|
@process_auth
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def get_aci_image(server, namespace, repository, tag, os, arch):
|
||||||
|
return _repo_verb(namespace, repository, tag, 'aci', ACIImage(),
|
||||||
|
sign=True, checker=os_arch_checker(os, arch), os=os, arch=arch)
|
||||||
|
|
||||||
|
|
||||||
|
@verbs.route('/squash/<namespace>/<repository>/<tag>', methods=['GET'])
|
||||||
|
@process_auth
|
||||||
|
def get_squashed_tag(namespace, repository, tag):
|
||||||
|
return _repo_verb(namespace, repository, tag, 'squash', SquashedDockerImage())
|
||||||
|
|
||||||
abort(403)
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from flask import (abort, redirect, request, url_for, make_response, Response,
|
from flask import (abort, redirect, request, url_for, make_response, Response,
|
||||||
Blueprint, send_from_directory, jsonify)
|
Blueprint, send_from_directory, jsonify, send_file)
|
||||||
|
|
||||||
from avatar_generator import Avatar
|
from avatar_generator import Avatar
|
||||||
from flask.ext.login import current_user
|
from flask.ext.login import current_user
|
||||||
|
@ -10,17 +10,20 @@ from health.healthcheck import get_healthchecker
|
||||||
|
|
||||||
from data import model
|
from data import model
|
||||||
from data.model.oauth import DatabaseAuthorizationProvider
|
from data.model.oauth import DatabaseAuthorizationProvider
|
||||||
from app import app, billing as stripe, build_logs, avatar
|
from app import app, billing as stripe, build_logs, avatar, signer
|
||||||
from auth.auth import require_session_login, process_oauth
|
from auth.auth import require_session_login, process_oauth
|
||||||
from auth.permissions import AdministerOrganizationPermission, ReadRepositoryPermission
|
from auth.permissions import (AdministerOrganizationPermission, ReadRepositoryPermission,
|
||||||
|
SuperUserPermission)
|
||||||
|
|
||||||
from util.invoice import renderInvoiceToPdf
|
from util.invoice import renderInvoiceToPdf
|
||||||
from util.seo import render_snapshot
|
from util.seo import render_snapshot
|
||||||
from util.cache import no_cache
|
from util.cache import no_cache
|
||||||
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
||||||
from endpoints.csrf import csrf_protect, generate_csrf_token
|
from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf
|
||||||
from endpoints.registry import set_cache_headers
|
from endpoints.registry import set_cache_headers
|
||||||
from util.names import parse_repository_name, parse_repository_name_and_tag
|
from util.names import parse_repository_name, parse_repository_name_and_tag
|
||||||
from util.useremails import send_email_changed
|
from util.useremails import send_email_changed
|
||||||
|
from util.systemlogs import build_logs_archive
|
||||||
from auth import scopes
|
from auth import scopes
|
||||||
|
|
||||||
import features
|
import features
|
||||||
|
@ -60,6 +63,14 @@ def snapshot(path = ''):
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
|
|
||||||
|
@web.route('/aci-signing-key')
|
||||||
|
@no_cache
|
||||||
|
def aci_signing_key():
|
||||||
|
if not signer.name:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
return send_file(signer.public_key_path)
|
||||||
|
|
||||||
@web.route('/plans/')
|
@web.route('/plans/')
|
||||||
@no_cache
|
@no_cache
|
||||||
@route_show_if(features.BILLING)
|
@route_show_if(features.BILLING)
|
||||||
|
@ -98,6 +109,7 @@ def organizations():
|
||||||
def user():
|
def user():
|
||||||
return index('')
|
return index('')
|
||||||
|
|
||||||
|
|
||||||
@web.route('/superuser/')
|
@web.route('/superuser/')
|
||||||
@no_cache
|
@no_cache
|
||||||
@route_show_if(features.SUPER_USERS)
|
@route_show_if(features.SUPER_USERS)
|
||||||
|
@ -105,6 +117,13 @@ def superuser():
|
||||||
return index('')
|
return index('')
|
||||||
|
|
||||||
|
|
||||||
|
@web.route('/setup/')
|
||||||
|
@no_cache
|
||||||
|
@route_show_if(features.SUPER_USERS)
|
||||||
|
def setup():
|
||||||
|
return index('')
|
||||||
|
|
||||||
|
|
||||||
@web.route('/signin/')
|
@web.route('/signin/')
|
||||||
@no_cache
|
@no_cache
|
||||||
def signin(redirect=None):
|
def signin(redirect=None):
|
||||||
|
@ -463,3 +482,21 @@ def exchange_code_for_token():
|
||||||
|
|
||||||
provider = FlaskAuthorizationProvider()
|
provider = FlaskAuthorizationProvider()
|
||||||
return provider.get_token(grant_type, client_id, client_secret, redirect_uri, code, scope=scope)
|
return provider.get_token(grant_type, client_id, client_secret, redirect_uri, code, scope=scope)
|
||||||
|
|
||||||
|
|
||||||
|
@web.route('/systemlogsarchive', methods=['GET'])
|
||||||
|
@process_oauth
|
||||||
|
@route_show_if(features.SUPER_USERS)
|
||||||
|
@no_cache
|
||||||
|
def download_logs_archive():
|
||||||
|
# Note: We cannot use the decorator here because this is a GET method. That being said, this
|
||||||
|
# information is sensitive enough that we want the extra protection.
|
||||||
|
verify_csrf()
|
||||||
|
|
||||||
|
if SuperUserPermission().can():
|
||||||
|
archive_data = build_logs_archive(app)
|
||||||
|
return Response(archive_data,
|
||||||
|
mimetype="application/octet-stream",
|
||||||
|
headers={"Content-Disposition": "attachment;filename=erlogs.tar.gz"})
|
||||||
|
|
||||||
|
abort(403)
|
||||||
|
|
|
@ -18,15 +18,15 @@ EXTERNAL_JS = [
|
||||||
]
|
]
|
||||||
|
|
||||||
EXTERNAL_CSS = [
|
EXTERNAL_CSS = [
|
||||||
'netdna.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.css',
|
'netdna.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.css',
|
||||||
'netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css',
|
'netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css',
|
||||||
'fonts.googleapis.com/css?family=Droid+Sans:400,700',
|
'fonts.googleapis.com/css?family=Source+Sans+Pro:400,700',
|
||||||
]
|
]
|
||||||
|
|
||||||
EXTERNAL_FONTS = [
|
EXTERNAL_FONTS = [
|
||||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.woff?v=4.0.3',
|
'netdna.bootstrapcdn.com/font-awesome/4.2.0/fonts/fontawesome-webfont.woff?v=4.2.0',
|
||||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.ttf?v=4.0.3',
|
'netdna.bootstrapcdn.com/font-awesome/4.2.0/fonts/fontawesome-webfont.ttf?v=4.2.0',
|
||||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.svg?v=4.0.3',
|
'netdna.bootstrapcdn.com/font-awesome/4.2.0/fonts/fontawesome-webfont.svg?v=4.2.0',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
0
formats/__init__.py
Normal file
0
formats/__init__.py
Normal file
196
formats/aci.py
Normal file
196
formats/aci.py
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
from app import app
|
||||||
|
from util.streamlayerformat import StreamLayerMerger
|
||||||
|
from formats.tarimageformatter import TarImageFormatter
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
# pylint: disable=bad-continuation
|
||||||
|
|
||||||
|
class ACIImage(TarImageFormatter):
|
||||||
|
""" Image formatter which produces an ACI-compatible TAR.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments
|
||||||
|
def stream_generator(self, namespace, repository, tag, synthetic_image_id,
|
||||||
|
layer_json, get_image_iterator, get_layer_iterator):
|
||||||
|
# ACI Format (.tar):
|
||||||
|
# manifest - The JSON manifest
|
||||||
|
# rootfs - The root file system
|
||||||
|
|
||||||
|
# Yield the manifest.
|
||||||
|
yield self.tar_file('manifest', self._build_manifest(namespace, repository, tag, layer_json,
|
||||||
|
synthetic_image_id))
|
||||||
|
|
||||||
|
# Yield the merged layer dtaa.
|
||||||
|
yield self.tar_folder('rootfs')
|
||||||
|
|
||||||
|
layer_merger = StreamLayerMerger(get_layer_iterator, path_prefix='rootfs/')
|
||||||
|
for entry in layer_merger.get_generator():
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_isolators(docker_config):
|
||||||
|
""" Builds ACI isolator config from the docker config. """
|
||||||
|
|
||||||
|
def _isolate_memory(memory):
|
||||||
|
return {
|
||||||
|
"name": "memory/limit",
|
||||||
|
"value": str(memory) + 'B'
|
||||||
|
}
|
||||||
|
|
||||||
|
def _isolate_swap(memory):
|
||||||
|
return {
|
||||||
|
"name": "memory/swap",
|
||||||
|
"value": str(memory) + 'B'
|
||||||
|
}
|
||||||
|
|
||||||
|
def _isolate_cpu(cpu):
|
||||||
|
return {
|
||||||
|
"name": "cpu/shares",
|
||||||
|
"value": str(cpu)
|
||||||
|
}
|
||||||
|
|
||||||
|
def _isolate_capabilities(capabilities_set_value):
|
||||||
|
capabilities_set = re.split(r'[\s,]', capabilities_set_value)
|
||||||
|
return {
|
||||||
|
"name": "capabilities/bounding-set",
|
||||||
|
"value": ' '.join(capabilities_set)
|
||||||
|
}
|
||||||
|
|
||||||
|
mappers = {
|
||||||
|
'Memory': _isolate_memory,
|
||||||
|
'MemorySwap': _isolate_swap,
|
||||||
|
'CpuShares': _isolate_cpu,
|
||||||
|
'Cpuset': _isolate_capabilities
|
||||||
|
}
|
||||||
|
|
||||||
|
isolators = []
|
||||||
|
|
||||||
|
for config_key in mappers:
|
||||||
|
value = docker_config.get(config_key)
|
||||||
|
if value:
|
||||||
|
isolators.append(mappers[config_key](value))
|
||||||
|
|
||||||
|
return isolators
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_ports(docker_config):
|
||||||
|
""" Builds the ports definitions for the ACI. """
|
||||||
|
ports = []
|
||||||
|
|
||||||
|
for docker_port_definition in docker_config.get('ports', {}):
|
||||||
|
# Formats:
|
||||||
|
# port/tcp
|
||||||
|
# port/udp
|
||||||
|
# port
|
||||||
|
|
||||||
|
protocol = 'tcp'
|
||||||
|
port_number = -1
|
||||||
|
|
||||||
|
if '/' in docker_port_definition:
|
||||||
|
(port_number, protocol) = docker_port_definition.split('/')
|
||||||
|
else:
|
||||||
|
port_number = docker_port_definition
|
||||||
|
|
||||||
|
try:
|
||||||
|
port_number = int(port_number)
|
||||||
|
ports.append({
|
||||||
|
"name": "port-%s" % port_number,
|
||||||
|
"port": port_number,
|
||||||
|
"protocol": protocol
|
||||||
|
})
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return ports
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_volumes(docker_config):
|
||||||
|
""" Builds the volumes definitions for the ACI. """
|
||||||
|
volumes = []
|
||||||
|
names = set()
|
||||||
|
|
||||||
|
def get_name(docker_volume_path):
|
||||||
|
parts = docker_volume_path.split('/')
|
||||||
|
name = ''
|
||||||
|
|
||||||
|
while True:
|
||||||
|
name = name + parts[-1]
|
||||||
|
parts = parts[0:-1]
|
||||||
|
if names.add(name):
|
||||||
|
break
|
||||||
|
|
||||||
|
name = '/' + name
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
for docker_volume_path in docker_config.get('volumes', {}):
|
||||||
|
volumes.append({
|
||||||
|
"name": get_name(docker_volume_path),
|
||||||
|
"path": docker_volume_path,
|
||||||
|
"readOnly": False
|
||||||
|
})
|
||||||
|
return volumes
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_manifest(namespace, repository, tag, docker_layer_data, synthetic_image_id):
|
||||||
|
""" Builds an ACI manifest from the docker layer data. """
|
||||||
|
|
||||||
|
config = docker_layer_data.get('config', {})
|
||||||
|
|
||||||
|
source_url = "%s://%s/%s/%s:%s" % (app.config['PREFERRED_URL_SCHEME'],
|
||||||
|
app.config['SERVER_HOSTNAME'],
|
||||||
|
namespace, repository, tag)
|
||||||
|
|
||||||
|
# ACI requires that the execution command be absolutely referenced. Therefore, if we find
|
||||||
|
# a relative command, we give it as an argument to /bin/sh to resolve and execute for us.
|
||||||
|
entrypoint = config.get('Entrypoint', []) or []
|
||||||
|
exec_path = entrypoint + (config.get('Cmd', []) or [])
|
||||||
|
if exec_path and not exec_path[0].startswith('/'):
|
||||||
|
exec_path = ['/bin/sh', '-c', '""%s""' % ' '.join(exec_path)]
|
||||||
|
|
||||||
|
# TODO(jschorr): ACI doesn't support : in the name, so remove any ports.
|
||||||
|
hostname = app.config['SERVER_HOSTNAME']
|
||||||
|
hostname = hostname.split(':', 1)[0]
|
||||||
|
|
||||||
|
manifest = {
|
||||||
|
"acKind": "ImageManifest",
|
||||||
|
"acVersion": "0.2.0",
|
||||||
|
"name": '%s/%s/%s/%s' % (hostname, namespace, repository, tag),
|
||||||
|
"labels": [
|
||||||
|
{
|
||||||
|
"name": "version",
|
||||||
|
"value": "1.0.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "arch",
|
||||||
|
"value": docker_layer_data.get('architecture', 'amd64')
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "os",
|
||||||
|
"value": docker_layer_data.get('os', 'linux')
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"app": {
|
||||||
|
"exec": exec_path,
|
||||||
|
# Below, `or 'root'` is required to replace empty string from Dockerfiles.
|
||||||
|
"user": config.get('User', '') or 'root',
|
||||||
|
"group": config.get('Group', '') or 'root',
|
||||||
|
"eventHandlers": [],
|
||||||
|
"workingDirectory": config.get('WorkingDir', '') or '/',
|
||||||
|
"environment": [{"name": key, "value": value}
|
||||||
|
for (key, value) in [e.split('=') for e in config.get('Env')]],
|
||||||
|
"isolators": ACIImage._build_isolators(config),
|
||||||
|
"mountPoints": ACIImage._build_volumes(config),
|
||||||
|
"ports": ACIImage._build_ports(config),
|
||||||
|
"annotations": [
|
||||||
|
{"name": "created", "value": docker_layer_data.get('created', '')},
|
||||||
|
{"name": "homepage", "value": source_url},
|
||||||
|
{"name": "quay.io/derived-image", "value": synthetic_image_id},
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return json.dumps(manifest)
|
102
formats/squashed.py
Normal file
102
formats/squashed.py
Normal file
|
@ -0,0 +1,102 @@
|
||||||
|
from app import app
|
||||||
|
from util.gzipwrap import GZIP_BUFFER_SIZE
|
||||||
|
from util.streamlayerformat import StreamLayerMerger
|
||||||
|
from formats.tarimageformatter import TarImageFormatter
|
||||||
|
|
||||||
|
import copy
|
||||||
|
import json
|
||||||
|
|
||||||
|
class FileEstimationException(Exception):
|
||||||
|
""" Exception raised by build_docker_load_stream if the estimated size of the layer TAR
|
||||||
|
was lower than the actual size. This means the sent TAR header is wrong, and we have
|
||||||
|
to fail.
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SquashedDockerImage(TarImageFormatter):
|
||||||
|
""" Image formatter which produces a squashed image compatible with the `docker load`
|
||||||
|
command.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments,too-many-locals
|
||||||
|
def stream_generator(self, namespace, repository, tag, synthetic_image_id,
|
||||||
|
layer_json, get_image_iterator, get_layer_iterator):
|
||||||
|
# Docker import V1 Format (.tar):
|
||||||
|
# repositories - JSON file containing a repo -> tag -> image map
|
||||||
|
# {image ID folder}:
|
||||||
|
# json - The layer JSON
|
||||||
|
# layer.tar - The TARed contents of the layer
|
||||||
|
# VERSION - The docker import version: '1.0'
|
||||||
|
layer_merger = StreamLayerMerger(get_layer_iterator)
|
||||||
|
|
||||||
|
# Yield the repositories file:
|
||||||
|
synthetic_layer_info = {}
|
||||||
|
synthetic_layer_info[tag + '.squash'] = synthetic_image_id
|
||||||
|
|
||||||
|
hostname = app.config['SERVER_HOSTNAME']
|
||||||
|
repositories = {}
|
||||||
|
repositories[hostname + '/' + namespace + '/' + repository] = synthetic_layer_info
|
||||||
|
|
||||||
|
yield self.tar_file('repositories', json.dumps(repositories))
|
||||||
|
|
||||||
|
# Yield the image ID folder.
|
||||||
|
yield self.tar_folder(synthetic_image_id)
|
||||||
|
|
||||||
|
# Yield the JSON layer data.
|
||||||
|
layer_json = SquashedDockerImage._build_layer_json(layer_json, synthetic_image_id)
|
||||||
|
yield self.tar_file(synthetic_image_id + '/json', json.dumps(layer_json))
|
||||||
|
|
||||||
|
# Yield the VERSION file.
|
||||||
|
yield self.tar_file(synthetic_image_id + '/VERSION', '1.0')
|
||||||
|
|
||||||
|
# Yield the merged layer data's header.
|
||||||
|
estimated_file_size = 0
|
||||||
|
for image in get_image_iterator():
|
||||||
|
estimated_file_size += image.storage.uncompressed_size
|
||||||
|
|
||||||
|
yield self.tar_file_header(synthetic_image_id + '/layer.tar', estimated_file_size)
|
||||||
|
|
||||||
|
# Yield the contents of the merged layer.
|
||||||
|
yielded_size = 0
|
||||||
|
for entry in layer_merger.get_generator():
|
||||||
|
yield entry
|
||||||
|
yielded_size += len(entry)
|
||||||
|
|
||||||
|
# If the yielded size is more than the estimated size (which is unlikely but possible), then
|
||||||
|
# raise an exception since the tar header will be wrong.
|
||||||
|
if yielded_size > estimated_file_size:
|
||||||
|
raise FileEstimationException()
|
||||||
|
|
||||||
|
# If the yielded size is less than the estimated size (which is likely), fill the rest with
|
||||||
|
# zeros.
|
||||||
|
if yielded_size < estimated_file_size:
|
||||||
|
to_yield = estimated_file_size - yielded_size
|
||||||
|
while to_yield > 0:
|
||||||
|
yielded = min(to_yield, GZIP_BUFFER_SIZE)
|
||||||
|
yield '\0' * yielded
|
||||||
|
to_yield -= yielded
|
||||||
|
|
||||||
|
# Yield any file padding to 512 bytes that is necessary.
|
||||||
|
yield self.tar_file_padding(estimated_file_size)
|
||||||
|
|
||||||
|
# Last two records are empty in TAR spec.
|
||||||
|
yield '\0' * 512
|
||||||
|
yield '\0' * 512
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _build_layer_json(layer_json, synthetic_image_id):
|
||||||
|
updated_json = copy.deepcopy(layer_json)
|
||||||
|
updated_json['id'] = synthetic_image_id
|
||||||
|
|
||||||
|
if 'parent' in updated_json:
|
||||||
|
del updated_json['parent']
|
||||||
|
|
||||||
|
if 'config' in updated_json and 'Image' in updated_json['config']:
|
||||||
|
updated_json['config']['Image'] = synthetic_image_id
|
||||||
|
|
||||||
|
if 'container_config' in updated_json and 'Image' in updated_json['container_config']:
|
||||||
|
updated_json['container_config']['Image'] = synthetic_image_id
|
||||||
|
|
||||||
|
return updated_json
|
46
formats/tarimageformatter.py
Normal file
46
formats/tarimageformatter.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
import tarfile
|
||||||
|
from util.gzipwrap import GzipWrap
|
||||||
|
|
||||||
|
class TarImageFormatter(object):
|
||||||
|
""" Base class for classes which produce a TAR containing image and layer data. """
|
||||||
|
|
||||||
|
def build_stream(self, namespace, repository, tag, synthetic_image_id, layer_json,
|
||||||
|
get_image_iterator, get_layer_iterator):
|
||||||
|
""" Builds and streams a synthetic .tar.gz that represents the formatted TAR created by this
|
||||||
|
class's implementation.
|
||||||
|
"""
|
||||||
|
return GzipWrap(self.stream_generator(namespace, repository, tag,
|
||||||
|
synthetic_image_id, layer_json,
|
||||||
|
get_image_iterator, get_layer_iterator))
|
||||||
|
|
||||||
|
def stream_generator(self, namespace, repository, tag, synthetic_image_id,
|
||||||
|
layer_json, get_image_iterator, get_layer_iterator):
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def tar_file(self, name, contents):
|
||||||
|
""" Returns the TAR binary representation for a file with the given name and file contents. """
|
||||||
|
length = len(contents)
|
||||||
|
tar_data = self.tar_file_header(name, length)
|
||||||
|
tar_data += contents
|
||||||
|
tar_data += self.tar_file_padding(length)
|
||||||
|
return tar_data
|
||||||
|
|
||||||
|
def tar_file_padding(self, length):
|
||||||
|
""" Returns TAR file padding for file data of the given length. """
|
||||||
|
if length % 512 != 0:
|
||||||
|
return '\0' * (512 - (length % 512))
|
||||||
|
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def tar_file_header(self, name, file_size):
|
||||||
|
""" Returns TAR file header data for a file with the given name and size. """
|
||||||
|
info = tarfile.TarInfo(name=name)
|
||||||
|
info.type = tarfile.REGTYPE
|
||||||
|
info.size = file_size
|
||||||
|
return info.tobuf()
|
||||||
|
|
||||||
|
def tar_folder(self, name):
|
||||||
|
""" Returns TAR file header data for a folder with the given name. """
|
||||||
|
info = tarfile.TarInfo(name=name)
|
||||||
|
info.type = tarfile.DIRTYPE
|
||||||
|
return info.tobuf()
|
|
@ -65,9 +65,22 @@ module.exports = function(grunt) {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
quay: {
|
quay: {
|
||||||
src: ['../static/partials/*.html', '../static/directives/*.html'],
|
src: ['../static/partials/*.html', '../static/directives/*.html', '../static/directives/*.html'
|
||||||
|
, '../static/directives/config/*.html'],
|
||||||
dest: '../static/dist/template-cache.js'
|
dest: '../static/dist/template-cache.js'
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
cachebuster: {
|
||||||
|
build: {
|
||||||
|
options: {
|
||||||
|
format: 'json',
|
||||||
|
basedir: '../static/'
|
||||||
|
},
|
||||||
|
src: [ '../static/dist/template-cache.js', '../static/dist/<%= pkg.name %>.min.js',
|
||||||
|
'../static/dist/<%= pkg.name %>.css' ],
|
||||||
|
dest: '../static/dist/cachebusters.json'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -75,7 +88,8 @@ module.exports = function(grunt) {
|
||||||
grunt.loadNpmTasks('grunt-contrib-concat');
|
grunt.loadNpmTasks('grunt-contrib-concat');
|
||||||
grunt.loadNpmTasks('grunt-contrib-cssmin');
|
grunt.loadNpmTasks('grunt-contrib-cssmin');
|
||||||
grunt.loadNpmTasks('grunt-angular-templates');
|
grunt.loadNpmTasks('grunt-angular-templates');
|
||||||
|
grunt.loadNpmTasks('grunt-cachebuster');
|
||||||
|
|
||||||
// Default task(s).
|
// Default task(s).
|
||||||
grunt.registerTask('default', ['ngtemplates', 'concat', 'cssmin', 'uglify']);
|
grunt.registerTask('default', ['ngtemplates', 'concat', 'cssmin', 'uglify', 'cachebuster']);
|
||||||
};
|
};
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
"grunt-contrib-concat": "~0.4.0",
|
"grunt-contrib-concat": "~0.4.0",
|
||||||
"grunt-contrib-cssmin": "~0.9.0",
|
"grunt-contrib-cssmin": "~0.9.0",
|
||||||
"grunt-angular-templates": "~0.5.4",
|
"grunt-angular-templates": "~0.5.4",
|
||||||
"grunt-contrib-uglify": "~0.4.0"
|
"grunt-contrib-uglify": "~0.4.0",
|
||||||
|
"grunt-cachebuster": "~0.1.5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -255,6 +255,9 @@ def initialize_database():
|
||||||
ImageStorageLocation.create(name='local_us')
|
ImageStorageLocation.create(name='local_us')
|
||||||
|
|
||||||
ImageStorageTransformation.create(name='squash')
|
ImageStorageTransformation.create(name='squash')
|
||||||
|
ImageStorageTransformation.create(name='aci')
|
||||||
|
|
||||||
|
ImageStorageSignatureKind.create(name='gpg2')
|
||||||
|
|
||||||
# NOTE: These MUST be copied over to NotificationKind, since every external
|
# NOTE: These MUST be copied over to NotificationKind, since every external
|
||||||
# notification can also generate a Quay.io notification.
|
# notification can also generate a Quay.io notification.
|
||||||
|
|
19
local-setup-osx.sh
Executable file
19
local-setup-osx.sh
Executable file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Install Docker and C libraries on which Python libraries are dependent
|
||||||
|
brew update
|
||||||
|
brew install boot2docker docker libevent libmagic postgresql
|
||||||
|
|
||||||
|
# Some OSX installs don't have /usr/include, which is required for finding SASL headers for our LDAP library
|
||||||
|
if [ ! -e /usr/include ]; then
|
||||||
|
sudo ln -s `xcrun --show-sdk-path`/usr/include /usr/include
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install Python dependencies
|
||||||
|
sudo pip install -r requirements.txt
|
||||||
|
|
||||||
|
# Put the local testing config in place
|
||||||
|
git clone git@github.com:coreos-inc/quay-config.git ../quay-config
|
||||||
|
ln -s ../../quay-config/local conf/stack
|
|
@ -40,5 +40,10 @@ git+https://github.com/DevTable/aniso8601-fake.git
|
||||||
git+https://github.com/DevTable/anunidecode.git
|
git+https://github.com/DevTable/anunidecode.git
|
||||||
git+https://github.com/DevTable/avatar-generator.git
|
git+https://github.com/DevTable/avatar-generator.git
|
||||||
git+https://github.com/DevTable/pygithub.git
|
git+https://github.com/DevTable/pygithub.git
|
||||||
|
git+https://github.com/DevTable/container-cloud-config.git
|
||||||
|
git+https://github.com/jplana/python-etcd.git
|
||||||
gipc
|
gipc
|
||||||
psutil
|
pyOpenSSL
|
||||||
|
pygpgme
|
||||||
|
cachetools
|
||||||
|
mock
|
||||||
|
|
|
@ -22,6 +22,7 @@ backports.ssl-match-hostname==3.4.0.2
|
||||||
beautifulsoup4==4.3.2
|
beautifulsoup4==4.3.2
|
||||||
blinker==1.3
|
blinker==1.3
|
||||||
boto==2.35.1
|
boto==2.35.1
|
||||||
|
cachetools==1.0.0
|
||||||
docker-py==0.7.1
|
docker-py==0.7.1
|
||||||
ecdsa==0.11
|
ecdsa==0.11
|
||||||
futures==2.2.0
|
futures==2.2.0
|
||||||
|
@ -35,16 +36,18 @@ itsdangerous==0.24
|
||||||
jsonschema==2.4.0
|
jsonschema==2.4.0
|
||||||
marisa-trie==0.7
|
marisa-trie==0.7
|
||||||
mixpanel-py==3.2.1
|
mixpanel-py==3.2.1
|
||||||
|
mock==1.0.1
|
||||||
paramiko==1.15.2
|
paramiko==1.15.2
|
||||||
peewee==2.4.5
|
peewee==2.4.7
|
||||||
psutil==2.2.0
|
|
||||||
psycopg2==2.5.4
|
psycopg2==2.5.4
|
||||||
py-bcrypt==0.4
|
py-bcrypt==0.4
|
||||||
pycrypto==2.6.1
|
pycrypto==2.6.1
|
||||||
python-dateutil==2.4.0
|
python-dateutil==2.4.0
|
||||||
python-ldap==2.4.19
|
python-ldap==2.4.19
|
||||||
python-magic==0.4.6
|
python-magic==0.4.6
|
||||||
|
pygpgme==0.3
|
||||||
pytz==2014.10
|
pytz==2014.10
|
||||||
|
pyOpenSSL==0.14
|
||||||
raven==5.1.1
|
raven==5.1.1
|
||||||
redis==2.10.3
|
redis==2.10.3
|
||||||
reportlab==2.7
|
reportlab==2.7
|
||||||
|
@ -61,4 +64,6 @@ git+https://github.com/DevTable/aniso8601-fake.git
|
||||||
git+https://github.com/DevTable/anunidecode.git
|
git+https://github.com/DevTable/anunidecode.git
|
||||||
git+https://github.com/DevTable/avatar-generator.git
|
git+https://github.com/DevTable/avatar-generator.git
|
||||||
git+https://github.com/DevTable/pygithub.git
|
git+https://github.com/DevTable/pygithub.git
|
||||||
|
git+https://github.com/DevTable/container-cloud-config.git
|
||||||
git+https://github.com/NateFerrero/oauth2lib.git
|
git+https://github.com/NateFerrero/oauth2lib.git
|
||||||
|
git+https://github.com/jplana/python-etcd.git
|
||||||
|
|
705
static/css/core-ui.css
Normal file
705
static/css/core-ui.css
Normal file
|
@ -0,0 +1,705 @@
|
||||||
|
|
||||||
|
.co-options-menu .fa-gear {
|
||||||
|
color: #999;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-options-menu .dropdown.open .fa-gear {
|
||||||
|
color: #428BCA;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-img-bg-network {
|
||||||
|
background: url('/static/img/network-tile.png') left top repeat, linear-gradient(30deg, #2277ad, #144768) no-repeat left top fixed;
|
||||||
|
background-color: #2277ad;
|
||||||
|
background-size: auto, 100% 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-m-navbar {
|
||||||
|
background-color: white;
|
||||||
|
margin: 0;
|
||||||
|
padding-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-fx-box-shadow {
|
||||||
|
-webkit-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
-moz-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
-ms-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
-o-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-fx-box-shadow-heavy {
|
||||||
|
-webkit-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
-moz-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
-ms-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
-o-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-fx-text-shadow {
|
||||||
|
text-shadow: rgba(0, 0, 0, 1) 1px 1px 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-nav-title {
|
||||||
|
height: 70px;
|
||||||
|
margin-top: -22px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-nav-title .co-nav-title-content {
|
||||||
|
color: white;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tab-container {
|
||||||
|
padding: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tabs {
|
||||||
|
margin: 0px;
|
||||||
|
padding: 0px;
|
||||||
|
width: 82px;
|
||||||
|
background-color: #e8f1f6;
|
||||||
|
border-right: 1px solid #DDE7ED;
|
||||||
|
|
||||||
|
display: table-cell;
|
||||||
|
float: none;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tab-content {
|
||||||
|
width: 100%;
|
||||||
|
display: table-cell;
|
||||||
|
float: none;
|
||||||
|
padding: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tabs li {
|
||||||
|
list-style: none;
|
||||||
|
display: block;
|
||||||
|
border-bottom: 1px solid #DDE7ED;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.co-tabs li.active {
|
||||||
|
background-color: white;
|
||||||
|
border-right: 1px solid white;
|
||||||
|
margin-right: -1px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tabs li a {
|
||||||
|
display: block;
|
||||||
|
width: 82px;
|
||||||
|
height: 82px;
|
||||||
|
line-height: 82px;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 36px;
|
||||||
|
color: gray;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tabs li.active a {
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.co-main-content-panel {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
background-color: #fff;
|
||||||
|
border: 1px solid transparent;
|
||||||
|
padding: 10px;
|
||||||
|
|
||||||
|
-webkit-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
-moz-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
-ms-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
-o-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-tab-panel {
|
||||||
|
padding: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.cor-log-box {
|
||||||
|
width: 100%;
|
||||||
|
height: 550px;
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-log-viewer {
|
||||||
|
position: absolute;
|
||||||
|
top: 20px;
|
||||||
|
left: 20px;
|
||||||
|
right: 20px;
|
||||||
|
height: 500px;
|
||||||
|
|
||||||
|
padding: 20px;
|
||||||
|
|
||||||
|
background: rgb(55, 55, 55);
|
||||||
|
border: 1px solid black;
|
||||||
|
color: white;
|
||||||
|
|
||||||
|
overflow: scroll;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-log-viewer .co-log-content {
|
||||||
|
font-family: Consolas, "Lucida Console", Monaco, monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
white-space: pre;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cor-log-box .co-log-viewer-new-logs i {
|
||||||
|
margin-left: 10px;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cor-log-box .co-log-viewer-new-logs {
|
||||||
|
cursor: pointer;
|
||||||
|
position: absolute;
|
||||||
|
bottom: 40px;
|
||||||
|
right: 30px;
|
||||||
|
padding: 10px;
|
||||||
|
color: white;
|
||||||
|
border-radius: 10px;
|
||||||
|
background: rgba(72, 158, 72, 0.8);
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-panel {
|
||||||
|
margin-bottom: 40px;
|
||||||
|
|
||||||
|
/*border: 1px solid #eee;*/
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-panel .co-panel-heading img {
|
||||||
|
margin-right: 6px;
|
||||||
|
width: 24px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-panel .co-panel-heading i.fa {
|
||||||
|
margin-right: 6px;
|
||||||
|
width: 24px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-panel .co-panel-heading {
|
||||||
|
padding: 6px;
|
||||||
|
/*background: #eee;*/
|
||||||
|
border-bottom: 1px solid #eee;
|
||||||
|
|
||||||
|
margin-bottom: 4px;
|
||||||
|
font-size: 135%;
|
||||||
|
padding-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-panel .co-panel-body {
|
||||||
|
padding: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-panel .co-panel-button-bar {
|
||||||
|
margin-top: 10px;
|
||||||
|
padding-top: 10px;
|
||||||
|
border-top: 1px solid #eee;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .help-text {
|
||||||
|
margin-top: 6px;
|
||||||
|
color: #aaa;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .description {
|
||||||
|
padding: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .config-table > tbody > tr > td:first-child {
|
||||||
|
padding-top: 14px;
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .config-table > tbody > tr > td.non-input {
|
||||||
|
padding-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .config-table > tbody > tr > td {
|
||||||
|
padding: 8px;
|
||||||
|
vertical-align: top;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .config-table > tbody > tr > td .config-numeric-field-element {
|
||||||
|
width: 100px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .config-table > tbody > tr > td .config-string-field-element {
|
||||||
|
width: 400px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-contact-field {
|
||||||
|
margin-bottom: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-contact-field .dropdown button {
|
||||||
|
width: 100px;
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-contact-field .dropdown button .caret {
|
||||||
|
float: right;
|
||||||
|
margin-top: 9px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-contact-field .dropdown button i.fa {
|
||||||
|
margin-right: 6px;
|
||||||
|
width: 14px;
|
||||||
|
text-align: center;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-contact-field .form-control {
|
||||||
|
width: 350px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-list-field-element .empty {
|
||||||
|
color: #ccc;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-list-field-element input {
|
||||||
|
width: 350px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .inner-table {
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .inner-table tr td:first-child {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool-element .inner-table td {
|
||||||
|
padding: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-file-field-element input {
|
||||||
|
display: inline-block;
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox input {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox label {
|
||||||
|
position: relative;
|
||||||
|
padding-left: 28px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox label:before {
|
||||||
|
content: '';
|
||||||
|
cursor: pointer;
|
||||||
|
position: absolute;
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
border-radius: 4px;
|
||||||
|
|
||||||
|
-webkit-box-shadow: inset 0px 1px 1px rgba(0,0,0,0.5), 0px 1px 0px rgba(255,255,255,.4);
|
||||||
|
-moz-box-shadow: inset 0px 1px 1px rgba(0,0,0,0.5), 0px 1px 0px rgba(255,255,255,.4);
|
||||||
|
box-shadow: inset 0px 1px 1px rgba(0,0,0,0.5), 0px 1px 0px rgba(255,255,255,.4);
|
||||||
|
|
||||||
|
background: -webkit-linear-gradient(top, #222 0%, #45484d 100%);
|
||||||
|
background: -moz-linear-gradient(top, #222 0%, #45484d 100%);
|
||||||
|
background: -o-linear-gradient(top, #222 0%, #45484d 100%);
|
||||||
|
background: -ms-linear-gradient(top, #222 0%, #45484d 100%);
|
||||||
|
background: linear-gradient(top, #222 0%, #45484d 100%);
|
||||||
|
filter: progid:DXImageTransform.Microsoft.gradient( startColorstr='#222', endColorstr='#45484d',GradientType=0 );
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox label:after {
|
||||||
|
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=0)";
|
||||||
|
filter: alpha(opacity=0);
|
||||||
|
opacity: 0;
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
width: 11px;
|
||||||
|
height: 7px;
|
||||||
|
background: transparent;
|
||||||
|
top: 5px;
|
||||||
|
left: 4px;
|
||||||
|
border: 3px solid #fcfff4;
|
||||||
|
border-top: none;
|
||||||
|
border-right: none;
|
||||||
|
|
||||||
|
-webkit-transform: rotate(-45deg);
|
||||||
|
-moz-transform: rotate(-45deg);
|
||||||
|
-o-transform: rotate(-45deg);
|
||||||
|
-ms-transform: rotate(-45deg);
|
||||||
|
transform: rotate(-45deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox label:hover::after {
|
||||||
|
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=30)";
|
||||||
|
filter: alpha(opacity=30);
|
||||||
|
opacity: 0.3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-checkbox input[type=checkbox]:checked + label:after {
|
||||||
|
-ms-filter: "progid:DXImageTransform.Microsoft.Alpha(Opacity=100)";
|
||||||
|
filter: alpha(opacity=100);
|
||||||
|
opacity: 1;
|
||||||
|
border: 3px solid rgb(26, 255, 26);
|
||||||
|
border-top: none;
|
||||||
|
border-right: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-floating-bottom-bar {
|
||||||
|
height: 50px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-floating-bottom-bar.floating {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool .cor-floating-bottom-bar button i.fa {
|
||||||
|
margin-right: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool .service-verification {
|
||||||
|
padding: 20px;
|
||||||
|
background: #343434;
|
||||||
|
color: white;
|
||||||
|
margin-bottom: -14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool .service-verification-row {
|
||||||
|
margin-bottom: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool .service-verification-row .service-title {
|
||||||
|
font-variant: small-caps;
|
||||||
|
font-size: 145%;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
#validateAndSaveModal .fa-warning {
|
||||||
|
font-size: 22px;
|
||||||
|
margin-right: 10px;
|
||||||
|
vertical-align: middle;
|
||||||
|
color: rgb(255, 186, 53);
|
||||||
|
}
|
||||||
|
|
||||||
|
#validateAndSaveModal .fa-check-circle {
|
||||||
|
font-size: 22px;
|
||||||
|
margin-right: 10px;
|
||||||
|
vertical-align: middle;
|
||||||
|
color: rgb(53, 186, 53);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-setup-tool .service-verification-error {
|
||||||
|
white-space: pre;
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-left: 36px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
max-height: 250px;
|
||||||
|
overflow: auto;
|
||||||
|
border: 1px solid #797979;
|
||||||
|
background: black;
|
||||||
|
padding: 6px;
|
||||||
|
font-family: Consolas, "Lucida Console", Monaco, monospace;
|
||||||
|
font-size: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-m-loader, .co-m-inline-loader {
|
||||||
|
min-width: 28px; }
|
||||||
|
|
||||||
|
.co-m-loader {
|
||||||
|
display: block;
|
||||||
|
position: absolute;
|
||||||
|
left: 50%;
|
||||||
|
top: 50%;
|
||||||
|
margin: -11px 0 0 -13px; }
|
||||||
|
|
||||||
|
.co-m-inline-loader {
|
||||||
|
display: inline-block;
|
||||||
|
cursor: default; }
|
||||||
|
.co-m-inline-loader:hover {
|
||||||
|
text-decoration: none; }
|
||||||
|
|
||||||
|
.co-m-loader-dot__one, .co-m-loader-dot__two, .co-m-loader-dot__three {
|
||||||
|
-webkit-border-radius: 3px;
|
||||||
|
-moz-border-radius: 3px;
|
||||||
|
-ms-border-radius: 3px;
|
||||||
|
-o-border-radius: 3px;
|
||||||
|
border-radius: 3px;
|
||||||
|
animation-fill-mode: both;
|
||||||
|
-webkit-animation-fill-mode: both;
|
||||||
|
-moz-animation-fill-mode: both;
|
||||||
|
-ms-animation-fill-mode: both;
|
||||||
|
-o-animation-fill-mode: both;
|
||||||
|
animation-name: bouncedelay;
|
||||||
|
animation-duration: 1s;
|
||||||
|
animation-timing-function: ease-in-out;
|
||||||
|
animation-delay: 0;
|
||||||
|
animation-direction: normal;
|
||||||
|
animation-iteration-count: infinite;
|
||||||
|
animation-fill-mode: forwards;
|
||||||
|
animation-play-state: running;
|
||||||
|
-webkit-animation-name: bouncedelay;
|
||||||
|
-webkit-animation-duration: 1s;
|
||||||
|
-webkit-animation-timing-function: ease-in-out;
|
||||||
|
-webkit-animation-delay: 0;
|
||||||
|
-webkit-animation-direction: normal;
|
||||||
|
-webkit-animation-iteration-count: infinite;
|
||||||
|
-webkit-animation-fill-mode: forwards;
|
||||||
|
-webkit-animation-play-state: running;
|
||||||
|
-moz-animation-name: bouncedelay;
|
||||||
|
-moz-animation-duration: 1s;
|
||||||
|
-moz-animation-timing-function: ease-in-out;
|
||||||
|
-moz-animation-delay: 0;
|
||||||
|
-moz-animation-direction: normal;
|
||||||
|
-moz-animation-iteration-count: infinite;
|
||||||
|
-moz-animation-fill-mode: forwards;
|
||||||
|
-moz-animation-play-state: running;
|
||||||
|
display: inline-block;
|
||||||
|
height: 6px;
|
||||||
|
width: 6px;
|
||||||
|
background: #419eda;
|
||||||
|
border-radius: 100%;
|
||||||
|
display: inline-block; }
|
||||||
|
|
||||||
|
.co-m-loader-dot__one {
|
||||||
|
animation-delay: -0.32s;
|
||||||
|
-webkit-animation-delay: -0.32s;
|
||||||
|
-moz-animation-delay: -0.32s;
|
||||||
|
-ms-animation-delay: -0.32s;
|
||||||
|
-o-animation-delay: -0.32s; }
|
||||||
|
|
||||||
|
.co-m-loader-dot__two {
|
||||||
|
animation-delay: -0.16s;
|
||||||
|
-webkit-animation-delay: -0.16s;
|
||||||
|
-moz-animation-delay: -0.16s;
|
||||||
|
-ms-animation-delay: -0.16s;
|
||||||
|
-o-animation-delay: -0.16s; }
|
||||||
|
|
||||||
|
@-webkit-keyframes bouncedelay {
|
||||||
|
0%, 80%, 100% {
|
||||||
|
-webkit-transform: scale(0.25, 0.25);
|
||||||
|
-moz-transform: scale(0.25, 0.25);
|
||||||
|
-ms-transform: scale(0.25, 0.25);
|
||||||
|
-o-transform: scale(0.25, 0.25);
|
||||||
|
transform: scale(0.25, 0.25); }
|
||||||
|
|
||||||
|
40% {
|
||||||
|
-webkit-transform: scale(1, 1);
|
||||||
|
-moz-transform: scale(1, 1);
|
||||||
|
-ms-transform: scale(1, 1);
|
||||||
|
-o-transform: scale(1, 1);
|
||||||
|
transform: scale(1, 1); } }
|
||||||
|
|
||||||
|
@-moz-keyframes bouncedelay {
|
||||||
|
0%, 80%, 100% {
|
||||||
|
-webkit-transform: scale(0.25, 0.25);
|
||||||
|
-moz-transform: scale(0.25, 0.25);
|
||||||
|
-ms-transform: scale(0.25, 0.25);
|
||||||
|
-o-transform: scale(0.25, 0.25);
|
||||||
|
transform: scale(0.25, 0.25); }
|
||||||
|
|
||||||
|
40% {
|
||||||
|
-webkit-transform: scale(1, 1);
|
||||||
|
-moz-transform: scale(1, 1);
|
||||||
|
-ms-transform: scale(1, 1);
|
||||||
|
-o-transform: scale(1, 1);
|
||||||
|
transform: scale(1, 1); } }
|
||||||
|
|
||||||
|
@-ms-keyframes bouncedelay {
|
||||||
|
0%, 80%, 100% {
|
||||||
|
-webkit-transform: scale(0.25, 0.25);
|
||||||
|
-moz-transform: scale(0.25, 0.25);
|
||||||
|
-ms-transform: scale(0.25, 0.25);
|
||||||
|
-o-transform: scale(0.25, 0.25);
|
||||||
|
transform: scale(0.25, 0.25); }
|
||||||
|
|
||||||
|
40% {
|
||||||
|
-webkit-transform: scale(1, 1);
|
||||||
|
-moz-transform: scale(1, 1);
|
||||||
|
-ms-transform: scale(1, 1);
|
||||||
|
-o-transform: scale(1, 1);
|
||||||
|
transform: scale(1, 1); } }
|
||||||
|
|
||||||
|
@keyframes bouncedelay {
|
||||||
|
0%, 80%, 100% {
|
||||||
|
-webkit-transform: scale(0.25, 0.25);
|
||||||
|
-moz-transform: scale(0.25, 0.25);
|
||||||
|
-ms-transform: scale(0.25, 0.25);
|
||||||
|
-o-transform: scale(0.25, 0.25);
|
||||||
|
transform: scale(0.25, 0.25); }
|
||||||
|
|
||||||
|
40% {
|
||||||
|
-webkit-transform: scale(1, 1);
|
||||||
|
-moz-transform: scale(1, 1);
|
||||||
|
-ms-transform: scale(1, 1);
|
||||||
|
-o-transform: scale(1, 1);
|
||||||
|
transform: scale(1, 1); } }
|
||||||
|
|
||||||
|
.co-dialog .modal-body {
|
||||||
|
padding: 10px;
|
||||||
|
min-height: 100px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-body h4 {
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-content {
|
||||||
|
border-radius: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog.fatal-error .modal-content {
|
||||||
|
padding-left: 175px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog.fatal-error .alert-icon-container-container {
|
||||||
|
position: absolute;
|
||||||
|
top: -36px;
|
||||||
|
left: -175px;
|
||||||
|
bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog.fatal-error .alert-icon-container {
|
||||||
|
height: 100%;
|
||||||
|
display: table;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog.fatal-error .alert-icon {
|
||||||
|
display: table-cell;
|
||||||
|
vertical-align: middle;
|
||||||
|
border-right: 1px solid #eee;
|
||||||
|
margin-right: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog.fatal-error .alert-icon:before {
|
||||||
|
content: "\f071";
|
||||||
|
font-family: FontAwesome;
|
||||||
|
font-size: 60px;
|
||||||
|
padding-left: 50px;
|
||||||
|
padding-right: 50px;
|
||||||
|
color: #c53c3f;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.co-dialog .modal-header .cor-step-bar {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-footer.working {
|
||||||
|
text-align: left;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-footer.working .btn {
|
||||||
|
float: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-footer.working .cor-loader-inline {
|
||||||
|
margin-right: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-footer .left-align {
|
||||||
|
float: left;
|
||||||
|
vertical-align: middle;
|
||||||
|
font-size: 16px;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-footer .left-align i.fa-warning {
|
||||||
|
color: #ffba35;
|
||||||
|
display: inline-block;
|
||||||
|
margin-right: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-dialog .modal-footer .left-align i.fa-check {
|
||||||
|
color: green;
|
||||||
|
display: inline-block;
|
||||||
|
margin-right: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element {
|
||||||
|
cursor: default;
|
||||||
|
display: inline-block;
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
|
||||||
|
position: relative;
|
||||||
|
color: #ddd;
|
||||||
|
|
||||||
|
text-align: center;
|
||||||
|
line-height: 24px;
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.text {
|
||||||
|
margin-left: 24px;
|
||||||
|
background: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.icon {
|
||||||
|
margin-left: 22px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element:first-child {
|
||||||
|
margin-left: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.active {
|
||||||
|
color: #53a3d9;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element:first-child:before {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element:before {
|
||||||
|
content: "";
|
||||||
|
position: absolute;
|
||||||
|
top: 12px;
|
||||||
|
width: 14px;
|
||||||
|
border-top: 2px solid #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.icon:before {
|
||||||
|
left: -20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.text:before {
|
||||||
|
left: -22px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.active:before {
|
||||||
|
border-top: 2px solid #53a3d9;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.text {
|
||||||
|
border-radius: 100%;
|
||||||
|
border: 2px solid #ddd;
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-step-bar .co-step-element.text.active {
|
||||||
|
border: 2px solid #53a3d9;
|
||||||
|
}
|
||||||
|
|
||||||
|
@media screen and (min-width: 900px) {
|
||||||
|
.co-dialog .modal-dialog {
|
||||||
|
width: 800px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.co-alert .co-step-bar {
|
||||||
|
float: right;
|
||||||
|
margin-top: 6px;
|
||||||
|
}
|
|
@ -1,5 +1,5 @@
|
||||||
* {
|
* {
|
||||||
font-family: 'Droid Sans', sans-serif;
|
font-family: 'Source Sans Pro', sans-serif;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,34 +88,6 @@
|
||||||
margin: 0;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.co-img-bg-network {
|
|
||||||
background: url('/static/img/network-tile.png') left top repeat, linear-gradient(30deg, #2277ad, #144768) no-repeat left top fixed;
|
|
||||||
background-color: #2277ad;
|
|
||||||
background-size: auto, 100% 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
.co-m-navbar {
|
|
||||||
background-color: white;
|
|
||||||
margin: 0;
|
|
||||||
padding-left: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.co-fx-box-shadow {
|
|
||||||
-webkit-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
|
||||||
-moz-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
|
||||||
-ms-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
|
||||||
-o-box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
|
||||||
box-shadow: 0 2px 2px rgba(0, 0, 0, 0.2);
|
|
||||||
}
|
|
||||||
|
|
||||||
.co-fx-box-shadow-heavy {
|
|
||||||
-webkit-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
|
||||||
-moz-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
|
||||||
-ms-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
|
||||||
-o-box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
|
||||||
box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.4);
|
|
||||||
}
|
|
||||||
|
|
||||||
.main-panel {
|
.main-panel {
|
||||||
margin-bottom: 20px;
|
margin-bottom: 20px;
|
||||||
background-color: #fff;
|
background-color: #fff;
|
||||||
|
@ -873,28 +845,24 @@ i.toggle-icon:hover {
|
||||||
background-color: #DFFF00;
|
background-color: #DFFF00;
|
||||||
}
|
}
|
||||||
|
|
||||||
.phase-icon.waiting, .phase-icon.unpacking, .phase-icon.starting, .phase-icon.initializing {
|
.phase-icon.waiting, .phase-icon.build-scheduled {
|
||||||
background-color: #ddd;
|
background-color: rgba(66, 139, 202, 0.2);
|
||||||
}
|
}
|
||||||
|
|
||||||
.phase-icon.pulling {
|
.phase-icon.unpacking, .phase-icon.starting, .phase-icon.initializing {
|
||||||
background-color: #cab442;
|
background-color: rgba(66, 139, 202, 0.4);
|
||||||
}
|
}
|
||||||
|
|
||||||
.phase-icon.building {
|
.phase-icon.pulling, .phase-icon.priming-cache, .phase-icon.checking-cache {
|
||||||
background-color: #f0ad4e;
|
background-color: rgba(66, 139, 202, 0.6);
|
||||||
}
|
}
|
||||||
|
|
||||||
.phase-icon.priming-cache {
|
.phase-icon.pushing, .phase-icon.building {
|
||||||
background-color: #ddd;
|
background-color: rgba(66, 139, 202, 0.8);
|
||||||
}
|
|
||||||
|
|
||||||
.phase-icon.pushing {
|
|
||||||
background-color: #5cb85c;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.phase-icon.complete {
|
.phase-icon.complete {
|
||||||
background-color: #428bca;
|
background-color: rgba(66, 139, 202, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
.build-status {
|
.build-status {
|
||||||
|
@ -2614,7 +2582,7 @@ p.editable:hover i {
|
||||||
}
|
}
|
||||||
|
|
||||||
.repo-build .build-pane .build-logs .log-container.command {
|
.repo-build .build-pane .build-logs .log-container.command {
|
||||||
margin-left: 42px;
|
margin-left: 22px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.repo-build .build-pane .build-logs .container-header.building {
|
.repo-build .build-pane .build-logs .container-header.building {
|
||||||
|
@ -4439,14 +4407,28 @@ pre.command:before {
|
||||||
padding: 6px;
|
padding: 6px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.user-row.super-user td {
|
.user-row {
|
||||||
background-color: #eeeeee;
|
border-bottom: 0px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-row td {
|
||||||
|
vertical-align: middle;
|
||||||
}
|
}
|
||||||
|
|
||||||
.user-row .user-class {
|
.user-row .user-class {
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.user-row .labels {
|
||||||
|
float: right;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.user-row .labels .label {
|
||||||
|
text-transform: uppercase;
|
||||||
|
margin-right: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
.form-change input {
|
.form-change input {
|
||||||
margin-top: 12px;
|
margin-top: 12px;
|
||||||
margin-bottom: 12px;
|
margin-bottom: 12px;
|
||||||
|
@ -4910,6 +4892,50 @@ i.slack-icon {
|
||||||
margin-right: 10px;
|
margin-right: 10px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.system-log-download-panel {
|
||||||
|
padding: 20px;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.system-log-download-panel a {
|
||||||
|
margin-top: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.initial-setup-modal .quay-spinner {
|
||||||
|
vertical-align: middle;
|
||||||
|
margin-right: 10px;
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.initial-setup-modal .valid-database p {
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.verified {
|
||||||
|
font-size: 16px;
|
||||||
|
margin-bottom: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.verified i.fa {
|
||||||
|
font-size: 26px;
|
||||||
|
margin-right: 10px;
|
||||||
|
vertical-align: middle;
|
||||||
|
color: rgb(53, 186, 53);
|
||||||
|
}
|
||||||
|
|
||||||
|
.registry-logo-preview {
|
||||||
|
border: 1px solid #eee;
|
||||||
|
vertical-align: middle;
|
||||||
|
padding: 4px;
|
||||||
|
max-width: 150px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-footer.alert {
|
||||||
|
text-align: left;
|
||||||
|
margin-bottom: -16px;
|
||||||
|
}
|
||||||
|
|
||||||
.dockerfile-build-form table td {
|
.dockerfile-build-form table td {
|
||||||
vertical-align: top;
|
vertical-align: top;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
|
@ -4926,3 +4952,23 @@ i.slack-icon {
|
||||||
padding-left: 22px;
|
padding-left: 22px;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.restart-required {
|
||||||
|
position: relative;
|
||||||
|
padding-left: 54px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.restart-required button {
|
||||||
|
float: right;
|
||||||
|
margin-top: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.restart-required button i.fa {
|
||||||
|
margin-right: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.restart-required i.fa-warning {
|
||||||
|
position: absolute;
|
||||||
|
top: 24px;
|
||||||
|
left: 16px;
|
||||||
|
font-size: 28px;
|
||||||
|
}
|
||||||
|
|
5
static/directives/config/config-bool-field.html
Normal file
5
static/directives/config/config-bool-field.html
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
<div class="config-bool-field-element">
|
||||||
|
<form name="fieldform" novalidate>
|
||||||
|
<input type="checkbox" ng-model="binding">
|
||||||
|
</form>
|
||||||
|
</div>
|
46
static/directives/config/config-contact-field.html
Normal file
46
static/directives/config/config-contact-field.html
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
<div class="config-contact-field-element">
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
<div class="dropdown">
|
||||||
|
<button class="btn btn-default dropdown-toggle" type="button" data-toggle="dropdown">
|
||||||
|
<span ng-switch="kind">
|
||||||
|
<span ng-switch-when="mailto"><i class="fa fa-envelope"></i>E-mail</span>
|
||||||
|
<span ng-switch-when="irc"><i class="fa fa-comment"></i>IRC</span>
|
||||||
|
<span ng-switch-when="tel"><i class="fa fa-phone"></i>Phone</span>
|
||||||
|
<span ng-switch-default><i class="fa fa-ticket"></i>URL</span>
|
||||||
|
</span>
|
||||||
|
<span class="caret"></span>
|
||||||
|
</button>
|
||||||
|
<ul class="dropdown-menu" role="menu">
|
||||||
|
<li role="presentation">
|
||||||
|
<a role="menuitem" tabindex="-1" href="javascript:void(0)" ng-click="kind = 'mailto'">
|
||||||
|
<i class="fa fa-envelope"></i> E-mail
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li role="presentation">
|
||||||
|
<a role="menuitem" tabindex="-1" href="javascript:void(0)" ng-click="kind = 'irc'">
|
||||||
|
<i class="fa fa-comment"></i> IRC
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li role="presentation">
|
||||||
|
<a role="menuitem" tabindex="-1" href="javascript:void(0)" ng-click="kind = 'tel'">
|
||||||
|
<i class="fa fa-phone"></i> Telephone
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
<li role="presentation">
|
||||||
|
<a role="menuitem" tabindex="-1" href="javascript:void(0)" ng-click="kind = 'http'">
|
||||||
|
<i class="fa fa-ticket"></i> URL
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<form>
|
||||||
|
<input class="form-control" placeholder="{{ getPlaceholder(kind) }}" ng-model="value">
|
||||||
|
</form>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
4
static/directives/config/config-contacts-field.html
Normal file
4
static/directives/config/config-contacts-field.html
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
<div class="config-contacts-field-element">
|
||||||
|
<div class="config-contact-field" binding="item.value" ng-repeat="item in items">
|
||||||
|
</div>
|
||||||
|
</div>
|
10
static/directives/config/config-file-field.html
Normal file
10
static/directives/config/config-file-field.html
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
<div class="config-file-field-element">
|
||||||
|
<span ng-show="uploadProgress == null">
|
||||||
|
<span ng-if="hasFile"><code>{{ filename }}</code></span>
|
||||||
|
<span class="nofile" ng-if="!hasFile"><code>{{ filename }}</code> not found in mounted config directory: </span>
|
||||||
|
<input type="file" ng-file-select="onFileSelect($files)">
|
||||||
|
</span>
|
||||||
|
<span ng-show="uploadProgress != null">
|
||||||
|
Uploading file as <strong>{{ filename }}</strong>... {{ uploadProgress }}%
|
||||||
|
</span>
|
||||||
|
</div>
|
16
static/directives/config/config-list-field.html
Normal file
16
static/directives/config/config-list-field.html
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
<div class="config-list-field-element">
|
||||||
|
<ul ng-show="binding && binding.length">
|
||||||
|
<li class="item" ng-repeat="item in binding">
|
||||||
|
<span class="item-title">{{ item }}</span>
|
||||||
|
<span class="item-delete">
|
||||||
|
<a href="javascript:void(0)" ng-click="removeItem(item)">Remove</a>
|
||||||
|
</span>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
<span class="empty" ng-if="!binding || binding.length == 0">No {{ itemTitle }}s defined</span>
|
||||||
|
<form class="form-control-container" ng-submit="addItem()">
|
||||||
|
<input type="text" class="form-control" placeholder="{{ placeholder }}"
|
||||||
|
ng-model="newItemName" style="display: inline-block">
|
||||||
|
<button class="btn btn-default" style="display: inline-block">Add</button>
|
||||||
|
</form>
|
||||||
|
</div>
|
6
static/directives/config/config-numeric-field.html
Normal file
6
static/directives/config/config-numeric-field.html
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<div class="config-numeric-field-element">
|
||||||
|
<form name="fieldform" novalidate>
|
||||||
|
<input type="number" class="form-control" placeholder="{{ placeholder || '' }}"
|
||||||
|
ng-model="bindinginternal" ng-trim="false" ng-minlength="1" required>
|
||||||
|
</form>
|
||||||
|
</div>
|
1
static/directives/config/config-parsed-field.html
Normal file
1
static/directives/config/config-parsed-field.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
<div class="config-parsed-field-element"></div>
|
625
static/directives/config/config-setup-tool.html
Normal file
625
static/directives/config/config-setup-tool.html
Normal file
|
@ -0,0 +1,625 @@
|
||||||
|
<div class="config-setup-tool-element">
|
||||||
|
<div class="quay-spinner" ng-if="!config"></div>
|
||||||
|
<div ng-show="config && config['SUPER_USERS']">
|
||||||
|
<form id="configform" name="configform">
|
||||||
|
|
||||||
|
<!-- Basic Configuration -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-gears"></i> Basic Configuration
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<table class="config-table">
|
||||||
|
<tr>
|
||||||
|
<td>Enterprise Logo URL:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.ENTERPRISE_LOGO_URL"
|
||||||
|
placeholder="http://example.com/logo.png"></span>
|
||||||
|
<div class="help-text">
|
||||||
|
Enter the full URL to your company's logo.
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<img class="registry-logo-preview" ng-src="{{ config.ENTERPRISE_LOGO_URL }}">
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="non-input">Contact Information:</td>
|
||||||
|
<td colspan="2">
|
||||||
|
<span class="config-contacts-field" binding="config.CONTACT_INFO"></span>
|
||||||
|
<div class="help-text" style="margin-top: 10px;">
|
||||||
|
Information to show in the Contact Page. If none specified, CoreOS contact information
|
||||||
|
is displayed.
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>User Creation:</td>
|
||||||
|
<td colspan="2">
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="ftuc" type="checkbox" ng-model="config.FEATURE_USER_CREATION">
|
||||||
|
<label for="ftuc">Enable Open User Creation</label>
|
||||||
|
</div>
|
||||||
|
<div class="help-text">
|
||||||
|
If enabled, user accounts can be created by anyone.
|
||||||
|
Users can always be created in the users panel under this superuser view.
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Server Configuration -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-cloud"></i> Server Configuration
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<table class="config-table">
|
||||||
|
<tr>
|
||||||
|
<td>Server Hostname:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.SERVER_HOSTNAME"
|
||||||
|
placeholder="Hostname (and optional port if non-standard)"
|
||||||
|
pattern="{{ HOSTNAME_REGEX }}"></span>
|
||||||
|
<div class="help-text">
|
||||||
|
The HTTP host (and optionally the port number if a non-standard HTTP/HTTPS port) of the location
|
||||||
|
where the registry will be accessible on the network
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>SSL:</td>
|
||||||
|
<td>
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="enable-ssl" type="checkbox" ng-model="config.PREFERRED_URL_SCHEME"
|
||||||
|
ng-true-value="https" ng-false-value="http">
|
||||||
|
<label for="enable-ssl">Enable SSL</label>
|
||||||
|
</div>
|
||||||
|
<div class="help-text" style="margin-bottom: 10px">
|
||||||
|
A valid SSL certificate and private key files are required to use this option.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.PREFERRED_URL_SCHEME == 'https'">
|
||||||
|
<tr>
|
||||||
|
<td class="non-input">Certificate:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-file-field" filename="ssl.cert"></span>
|
||||||
|
<div class="help-text">
|
||||||
|
The certificate must be in PEM format.
|
||||||
|
</div
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td class="non-input">Private key:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-file-field" filename="ssl.key"></span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Redis -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<img src="/static/img/redis-small.png"> redis
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>A <a href="http://redis.io" target="_blank">redis</a> key-value store is required for real-time events and build logs.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table">
|
||||||
|
<tr>
|
||||||
|
<td>Redis Hostname:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="mapped.redis.host"
|
||||||
|
placeholder="The redis server hostname"
|
||||||
|
pattern="{{ HOSTNAME_REGEX }}"
|
||||||
|
validator="validateHostname(value)">></span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Redis port:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-numeric-field" binding="mapped.redis.port" default-value="6379"></span>
|
||||||
|
<div class="help-text">
|
||||||
|
Access to this port and hostname must be allowed from all hosts running
|
||||||
|
the enterprise registry
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Redis password:</td>
|
||||||
|
<td>
|
||||||
|
<input class="form-control" type="password" ng-model="mapped.redis.password"
|
||||||
|
placeholder="Optional password for connecting to redis">
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /Redis -->
|
||||||
|
|
||||||
|
<!-- Registry Storage -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-download"></i> Registry Storage
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>
|
||||||
|
Registry images can be stored either locally or in a remote storage system.
|
||||||
|
<strong>A remote storage system is required for high-avaliability systems.</strong>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<table class="config-table">
|
||||||
|
<tr>
|
||||||
|
<td class="non-input">Storage Engine:</td>
|
||||||
|
<td>
|
||||||
|
<select ng-model="config.DISTRIBUTED_STORAGE_CONFIG.local[0]">
|
||||||
|
<option value="LocalStorage">Locally mounted directory</option>
|
||||||
|
<option value="S3Storage">Amazon S3</option>
|
||||||
|
<option value="GoogleCloudStorage">Google Cloud Storage</option>
|
||||||
|
<option value="RadosGWStorage">Ceph Object Gateway (RADOS)</option>
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
<!-- Fields -->
|
||||||
|
<tr ng-repeat="field in STORAGE_CONFIG_FIELDS[config.DISTRIBUTED_STORAGE_CONFIG.local[0]]">
|
||||||
|
<td>{{ field.title }}:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field"
|
||||||
|
binding="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name]"
|
||||||
|
placeholder="{{ field.placeholder }}"
|
||||||
|
ng-if="field.kind == 'text'"></span>
|
||||||
|
<div class="co-checkbox" ng-if="field.kind == 'bool'">
|
||||||
|
<input id="dsc-{{ field.name }}" type="checkbox"
|
||||||
|
ng-model="config.DISTRIBUTED_STORAGE_CONFIG.local[1][field.name]">
|
||||||
|
<label for="dsc-{{ field.name }}">{{ field.placeholder }}</label>
|
||||||
|
</div>
|
||||||
|
<div class="help-text" ng-if="field.help_url">
|
||||||
|
See <a href="{{ field.help_url }}" target="_blank">Documentation</a> for more information
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- E-mail -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-envelope"></i> E-mail
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>Valid e-mail server configuration is required for notification e-mails and the ability of
|
||||||
|
users to reset their passwords.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="ftmail" type="checkbox" ng-model="config.FEATURE_MAILING">
|
||||||
|
<label for="ftmail">Enable E-mails</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.FEATURE_MAILING">
|
||||||
|
<tr>
|
||||||
|
<td>SMTP Server:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.MAIL_SERVER"
|
||||||
|
placeholder="SMTP server for sending e-mail"
|
||||||
|
pattern="{{ HOSTNAME_REGEX }}"
|
||||||
|
validator="validateHostname(value)">></span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>SMTP Server Port:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-numeric-field" binding="config.MAIL_PORT"
|
||||||
|
default-value="587"></span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>TLS:</td>
|
||||||
|
<td>
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="mut" type="checkbox" ng-model="config.MAIL_USE_TLS">
|
||||||
|
<label for="mut">Use TLS</label>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Mail Sender:</td>
|
||||||
|
<td>
|
||||||
|
<input class="form-control" type="email" ng-model="config.DEFAULT_MAIL_SENDER"
|
||||||
|
placeholder="E-mail address"></span>
|
||||||
|
<div class="help-text">
|
||||||
|
E-mail address from which all e-mails are sent. If not specified,
|
||||||
|
<code>support@quay.io</code> will be used.
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Authentication:</td>
|
||||||
|
<td>
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="uma" type="checkbox" ng-model="config.MAIL_USE_AUTH">
|
||||||
|
<label for="uma">Requires Authentication</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.MAIL_USE_AUTH">
|
||||||
|
<tr>
|
||||||
|
<td>Username:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.MAIL_USERNAME"
|
||||||
|
placeholder="Username for authentication"></span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Password:</td>
|
||||||
|
<td>
|
||||||
|
<input class="form-control" type="password"
|
||||||
|
ng-model="config.MAIL_PASSWORD"
|
||||||
|
placeholder="Password for authentication"></span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /E-mail -->
|
||||||
|
|
||||||
|
<!-- Authentication -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-users"></i> Authentication
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>
|
||||||
|
Authentication for the registry can be handled by either the registry itself or LDAP.
|
||||||
|
External authentication providers (such as Github) can be used on top of this choice.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table">
|
||||||
|
<tr>
|
||||||
|
<td class="non-input">Authentication:</td>
|
||||||
|
<td>
|
||||||
|
<select ng-model="config.AUTHENTICATION_TYPE">
|
||||||
|
<option value="Database">Local Database</option>
|
||||||
|
<option value="LDAP">LDAP</option>
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.AUTHENTICATION_TYPE == 'LDAP'">
|
||||||
|
<tr>
|
||||||
|
<td>LDAP URI:</td>
|
||||||
|
<td><span class="config-string-field" binding="config.LDAP_URI"></span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Administrator DN:</td>
|
||||||
|
<td><span class="config-string-field" binding="config.LDAP_ADMIN_DN"></span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Base DN:</td>
|
||||||
|
<td><span class="config-list-field" item-title="DN" binding="config.LDAP_BASE_DN"></span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>Administrator Password:</td>
|
||||||
|
<td><span class="config-string-field" binding="config.LDAP_ADMIN_PASSWD"></span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>E-mail Attribute:</td>
|
||||||
|
<td><span class="config-string-field" binding="config.LDAP_EMAIL_ATTR"></span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>UID Attribute:</td>
|
||||||
|
<td><span class="config-string-field" binding="config.LDAP_UID_ATTR"></span></td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>User RDN:</td>
|
||||||
|
<td><span class="config-list-field" item-title="RDN" binding="config.LDAP_USER_RDN"></span></td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /Authentication -->
|
||||||
|
|
||||||
|
<!-- Github Authentication -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-github"></i> Github (Enterprise) Authentication
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>
|
||||||
|
If enabled, users can use Github or Github Enterprise to authenticate to the registry.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Note:</strong> A registered Github (Enterprise) OAuth application is required.
|
||||||
|
View instructions on how to
|
||||||
|
<a href="https://coreos.com/docs/enterprise-registry/github-auth/" target="_blank">
|
||||||
|
Create an OAuth Application in GitHub
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="ftghl" type="checkbox" ng-model="config.FEATURE_GITHUB_LOGIN">
|
||||||
|
<label for="ftghl">Enable Github Authentication</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.FEATURE_GITHUB_LOGIN">
|
||||||
|
<tr>
|
||||||
|
<td>Github:</td>
|
||||||
|
<td>
|
||||||
|
<select ng-model="mapped.GITHUB_LOGIN_KIND">
|
||||||
|
<option value="hosted">Github.com</option>
|
||||||
|
<option value="enterprise">Github Enterprise</option>
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr ng-if="mapped.GITHUB_LOGIN_KIND == 'enterprise'">
|
||||||
|
<td>Github Endpoint:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field"
|
||||||
|
binding="config.GITHUB_LOGIN_CONFIG.GITHUB_ENDPOINT"
|
||||||
|
placeholder="https://my.githubserver"
|
||||||
|
pattern="{{ GITHUB_REGEX }}">
|
||||||
|
</span>
|
||||||
|
<div class="help-text">
|
||||||
|
The Github Enterprise endpoint. Must start with http:// or https://.
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>OAuth Client ID:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.GITHUB_LOGIN_CONFIG.CLIENT_ID">
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>OAuth Client Secret:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.GITHUB_LOGIN_CONFIG.CLIENT_SECRET">
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /Github Authentication -->
|
||||||
|
|
||||||
|
<!-- Google Authentication -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-google"></i> Google Authentication
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>
|
||||||
|
If enabled, users can use Google to authenticate to the registry.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Note:</strong> A registered Google OAuth application is required.
|
||||||
|
Visit the
|
||||||
|
<a href="https://console.developers.google.com" target="_blank">
|
||||||
|
Google Developer Console
|
||||||
|
</a>
|
||||||
|
to register an application.
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="ftgoa" type="checkbox" ng-model="config.FEATURE_GOOGLE_LOGIN">
|
||||||
|
<label for="ftgoa">Enable Google Authentication</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.FEATURE_GOOGLE_LOGIN">
|
||||||
|
<tr>
|
||||||
|
<td>OAuth Client ID:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.GOOGLE_LOGIN_CONFIG.CLIENT_ID">
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>OAuth Client Secret:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.GOOGLE_LOGIN_CONFIG.CLIENT_SECRET">
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /Google Authentication -->
|
||||||
|
|
||||||
|
<!-- Build Support -->
|
||||||
|
<div class="co-panel">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-tasks"></i> Dockerfile Build Support
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
If enabled, users can submit Dockerfiles to be built and pushed by the Enterprise Registry.
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="ftbs" type="checkbox" ng-model="config.FEATURE_BUILD_SUPPORT">
|
||||||
|
<label for="ftbs">Enable Dockerfile Build</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div ng-if="config.FEATURE_BUILD_SUPPORT" style="margin-top: 10px">
|
||||||
|
<strong>Note: Build workers are required for this feature.</strong>
|
||||||
|
See <a href="https://coreos.com/docs/enterprise-registry/build-support/" target="_blank">Adding Build Workers</a> for instructions on how to setup build workers.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /Build Support -->
|
||||||
|
|
||||||
|
|
||||||
|
<!-- Github Trigger -->
|
||||||
|
<div class="co-panel" ng-if="config.FEATURE_BUILD_SUPPORT" style="margin-top: 20px;">
|
||||||
|
<div class="co-panel-heading">
|
||||||
|
<i class="fa fa-github"></i> Github (Enterprise) Build Triggers
|
||||||
|
</div>
|
||||||
|
<div class="co-panel-body">
|
||||||
|
<div class="description">
|
||||||
|
<p>
|
||||||
|
If enabled, users can setup Github or Github Enterprise triggers to invoke Registry builds.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
<strong>Note:</strong> A registered Github (Enterprise) OAuth application (<strong>separate from Github Authentication</strong>) is required.
|
||||||
|
View instructions on how to
|
||||||
|
<a href="https://coreos.com/docs/enterprise-registry/github-auth/" target="_blank">
|
||||||
|
Create an OAuth Application in GitHub
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="co-checkbox">
|
||||||
|
<input id="ftgb" type="checkbox" ng-model="config.FEATURE_GITHUB_BUILD">
|
||||||
|
<label for="ftgb">Enable Github Triggers</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<table class="config-table" ng-if="config.FEATURE_GITHUB_BUILD">
|
||||||
|
<tr>
|
||||||
|
<td>Github:</td>
|
||||||
|
<td>
|
||||||
|
<select ng-model="mapped.GITHUB_TRIGGER_KIND">
|
||||||
|
<option value="hosted">Github.com</option>
|
||||||
|
<option value="enterprise">Github Enterprise</option>
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr ng-if="mapped.GITHUB_TRIGGER_KIND == 'enterprise'">
|
||||||
|
<td>Github Endpoint:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field"
|
||||||
|
binding="config.GITHUB_TRIGGER_CONFIG.GITHUB_ENDPOINT"
|
||||||
|
placeholder="https://my.githubserver"
|
||||||
|
pattern="{{ GITHUB_REGEX }}">
|
||||||
|
</span>
|
||||||
|
<div class="help-text">
|
||||||
|
The Github Enterprise endpoint. Must start with http:// or https://.
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>OAuth Client ID:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.GITHUB_TRIGGER_CONFIG.CLIENT_ID">
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>OAuth Client Secret:</td>
|
||||||
|
<td>
|
||||||
|
<span class="config-string-field" binding="config.GITHUB_TRIGGER_CONFIG.CLIENT_SECRET">
|
||||||
|
</span>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</div> <!-- /Github Trigger -->
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<!-- Save Bar -->
|
||||||
|
<div class="cor-floating-bottom-bar">
|
||||||
|
<button class="btn" ng-class="mapped.$hasChanges ? 'btn-primary' : 'btn-success'"
|
||||||
|
ng-click="checkValidateAndSave()" ng-show="configform.$valid">
|
||||||
|
<i class="fa fa-lg" ng-class="mapped.$hasChanges ? 'fa-dot-circle-o' : 'fa-check-circle'"></i>
|
||||||
|
<span ng-if="mapped.$hasChanges">Save Configuration Changes</span>
|
||||||
|
<span ng-if="!mapped.$hasChanges">Configuration Saved</span>
|
||||||
|
</button>
|
||||||
|
<button class="btn btn-warning" ng-click="checkValidateAndSave()" ng-show="!configform.$valid"
|
||||||
|
ng-click="checkValidateAndSave()">
|
||||||
|
<i class="fa fa-lg fa-sort"></i>
|
||||||
|
{{ configform.$error['required'].length }} configuration field<span ng-show="configform.$error['required'].length != 1">s</span> remaining
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Modal message dialog -->
|
||||||
|
<div class="modal co-dialog fade initial-setup-modal" id="validateAndSaveModal">
|
||||||
|
<div class="modal-dialog">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="modal-header">
|
||||||
|
<h4 class="modal-title">
|
||||||
|
Checking your settings
|
||||||
|
</h4>
|
||||||
|
</div>
|
||||||
|
<div class="modal-body">
|
||||||
|
<div class="service-verification">
|
||||||
|
<div class="service-verification-row" ng-repeat="serviceInfo in validating">
|
||||||
|
<span class="quay-spinner" ng-show="serviceInfo.status == 'validating'"></span>
|
||||||
|
<i class="fa fa-lg fa-check-circle" ng-show="serviceInfo.status == 'success'"></i>
|
||||||
|
<i class="fa fa-lg fa-warning" ng-show="serviceInfo.status == 'error'"></i>
|
||||||
|
<span class="service-title">{{ serviceInfo.service.title }}</span>
|
||||||
|
|
||||||
|
<div class="service-verification-error" ng-show="serviceInfo.status == 'error'">{{ serviceInfo.errorMessage }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Footer: Saving configuration -->
|
||||||
|
<div class="modal-footer working" ng-show="savingConfiguration">
|
||||||
|
<span class="cor-loader-inline"></span> Saving Configuration...
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Footer: Validating -->
|
||||||
|
<div class="modal-footer working"
|
||||||
|
ng-show="!savingConfiguration && validationStatus(validating) == 'validating'">
|
||||||
|
<span class="cor-loader-inline"></span> Validating settings...
|
||||||
|
|
||||||
|
<button class="btn btn-default" ng-click="cancelValidation()">
|
||||||
|
Stop Validating
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Footer: Valid Config -->
|
||||||
|
<div class="modal-footer"
|
||||||
|
ng-show="!savingConfiguration && validationStatus(validating) == 'success'">
|
||||||
|
<span class="left-align">
|
||||||
|
<i class="fa fa-check"></i>
|
||||||
|
Configuration Validated
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<button class="btn btn-primary"
|
||||||
|
ng-click="saveConfiguration()"
|
||||||
|
ng-disabled="savingConfiguration">
|
||||||
|
<i class="fa fa-upload" style="margin-right: 10px;"></i>Save Configuration
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Footer: Invalid Config -->
|
||||||
|
<div class="modal-footer"
|
||||||
|
ng-show="!savingConfiguration && validationStatus(validating) == 'failed'">
|
||||||
|
<span class="left-align">
|
||||||
|
<i class="fa fa-warning"></i>
|
||||||
|
Problem Detected
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<button class="btn btn-default" data-dismiss="modal">
|
||||||
|
Continue Editing
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div><!-- /.modal-content -->
|
||||||
|
</div><!-- /.modal-dialog -->
|
||||||
|
</div><!-- /.modal -->
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</div>
|
10
static/directives/config/config-string-field.html
Normal file
10
static/directives/config/config-string-field.html
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
<div class="config-string-field-element">
|
||||||
|
<form name="fieldform" novalidate>
|
||||||
|
<input type="text" class="form-control" placeholder="{{ placeholder || '' }}"
|
||||||
|
ng-model="binding" ng-trim="false" ng-minlength="1"
|
||||||
|
ng-pattern="getRegexp(pattern)" required>
|
||||||
|
<div class="alert alert-danger" ng-show="errorMessage">
|
||||||
|
{{ errorMessage }}
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
10
static/directives/config/config-variable-field.html
Normal file
10
static/directives/config/config-variable-field.html
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
<div class="config-variable-field-element">
|
||||||
|
<div class="btn-group">
|
||||||
|
<button type="button" class="btn btn-default"
|
||||||
|
ng-repeat="section in sections"
|
||||||
|
ng-click="setSection(section)"
|
||||||
|
ng-class="section == currentSection ? 'active' : ''">{{ section.title }}</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<span ng-transclude></span>
|
||||||
|
</div>
|
3
static/directives/cor-floating-bottom-bar.html
Normal file
3
static/directives/cor-floating-bottom-bar.html
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<div class="co-floating-bottom-bar">
|
||||||
|
<span ng-transclude/>
|
||||||
|
</div>
|
5
static/directives/cor-loader-inline.html
Normal file
5
static/directives/cor-loader-inline.html
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
<div class="co-m-inline-loader co-an-fade-in-out">
|
||||||
|
<div class="co-m-loader-dot__one"></div>
|
||||||
|
<div class="co-m-loader-dot__two"></div>
|
||||||
|
<div class="co-m-loader-dot__three"></div>
|
||||||
|
</div>
|
5
static/directives/cor-loader.html
Normal file
5
static/directives/cor-loader.html
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
<div class="co-m-loader co-an-fade-in-out">
|
||||||
|
<div class="co-m-loader-dot__one"></div>
|
||||||
|
<div class="co-m-loader-dot__two"></div>
|
||||||
|
<div class="co-m-loader-dot__three"></div>
|
||||||
|
</div>
|
11
static/directives/cor-log-box.html
Normal file
11
static/directives/cor-log-box.html
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<div class="co-log-box-element">
|
||||||
|
<div id="co-log-viewer" class="co-log-viewer" ng-if="logs">
|
||||||
|
<div class="quay-spinner" ng-if="!logs"></div>
|
||||||
|
<div class="co-log-container">
|
||||||
|
<div id="co-log-content" class="co-log-content">{{ logs }}</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="co-log-viewer-new-logs" ng-show="hasNewLogs" ng-click="moveToBottom()">
|
||||||
|
New Logs <i class="fa fa-lg fa-arrow-circle-down"></i>
|
||||||
|
</div>
|
||||||
|
</div>
|
3
static/directives/cor-option.html
Normal file
3
static/directives/cor-option.html
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<li>
|
||||||
|
<a href="javascript:void(0)" ng-click="optionClick()" ng-transclude></a>
|
||||||
|
</li>
|
6
static/directives/cor-options-menu.html
Normal file
6
static/directives/cor-options-menu.html
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<span class="co-options-menu">
|
||||||
|
<div class="dropdown" style="text-align: left;">
|
||||||
|
<i class="fa fa-gear fa-lg dropdown-toggle" data-toggle="dropdown" data-title="Options" bs-tooltip></i>
|
||||||
|
<ul class="dropdown-menu pull-right" ng-transclude></ul>
|
||||||
|
</div>
|
||||||
|
</span>
|
3
static/directives/cor-step-bar.html
Normal file
3
static/directives/cor-step-bar.html
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<div class="co-step-bar">
|
||||||
|
<span class="transclude" ng-transclude/>
|
||||||
|
</div>
|
6
static/directives/cor-step.html
Normal file
6
static/directives/cor-step.html
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
<span ng-class="text ? 'co-step-element text' : 'co-step-element icon'">
|
||||||
|
<span data-title="{{ title }}" bs-tooltip>
|
||||||
|
<span class="text" ng-if="text">{{ text }}</span>
|
||||||
|
<i class="fa fa-lg" ng-if="icon" ng-class="'fa-' + icon"></i>
|
||||||
|
</span>
|
||||||
|
</span>
|
1
static/directives/cor-tab-content.html
Normal file
1
static/directives/cor-tab-content.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
<div class="co-tab-content tab-content col-md-11" ng-transclude></div>
|
3
static/directives/cor-tab-panel.html
Normal file
3
static/directives/cor-tab-panel.html
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<div class="co-main-content-panel co-tab-panel co-fx-box-shadow-heavy">
|
||||||
|
<div class="container co-tab-container" ng-transclude></div>
|
||||||
|
</div>
|
11
static/directives/cor-tab.html
Normal file
11
static/directives/cor-tab.html
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<li ng-class="tabActive == 'true' ? 'active' : ''">
|
||||||
|
<a href="javascript:void(0)" data-title="{{ tabTitle }}"
|
||||||
|
data-toggle="tab"
|
||||||
|
data-target="{{ tabTarget }}"
|
||||||
|
data-placement="right"
|
||||||
|
data-container="body"
|
||||||
|
ng-click="tabInit()"
|
||||||
|
bs-tooltip>
|
||||||
|
<span ng-transclude/>
|
||||||
|
</a>
|
||||||
|
</li>
|
1
static/directives/cor-tabs.html
Normal file
1
static/directives/cor-tabs.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
<ul class="co-tabs col-md-1" ng-transclude></ul>
|
3
static/directives/cor-title-content.html
Normal file
3
static/directives/cor-title-content.html
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
<div class="col-lg-6 col-md-6 col-sm-6 col-xs-12">
|
||||||
|
<h2 class="co-nav-title-content co-fx-text-shadow" ng-transclude></h2>
|
||||||
|
</div>
|
1
static/directives/cor-title-link.html
Normal file
1
static/directives/cor-title-link.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
<div class="col-lg-3 col-md-3 col-sm-3 col-xs-6" ng-transclude></div>
|
2
static/directives/cor-title.html
Normal file
2
static/directives/cor-title.html
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
<div class="co-nav-title" ng-transclude></div>
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
≡
|
≡
|
||||||
</button>
|
</button>
|
||||||
<a class="navbar-brand" href="/" target="{{ appLinkTarget() }}">
|
<a class="navbar-brand" href="/" target="{{ appLinkTarget() }}">
|
||||||
<span id="quay-logo" style="background-image: url('{{ getEnterpriseLogo() }}')"></span>
|
<span id="quay-logo" ng-style="{'background-image': 'url(' + getEnterpriseLogo() + ')'}"></span>
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
|
BIN
static/img/redis-small.png
Normal file
BIN
static/img/redis-small.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.2 KiB |
152
static/js/app.js
152
static/js/app.js
|
@ -126,7 +126,7 @@ function getMarkedDown(string) {
|
||||||
|
|
||||||
quayDependencies = ['ngRoute', 'chieffancypants.loadingBar', 'angular-tour', 'restangular', 'angularMoment',
|
quayDependencies = ['ngRoute', 'chieffancypants.loadingBar', 'angular-tour', 'restangular', 'angularMoment',
|
||||||
'mgcrea.ngStrap', 'ngCookies', 'ngSanitize', 'angular-md5', 'pasvaz.bindonce', 'ansiToHtml',
|
'mgcrea.ngStrap', 'ngCookies', 'ngSanitize', 'angular-md5', 'pasvaz.bindonce', 'ansiToHtml',
|
||||||
'ngAnimate'];
|
'ngAnimate', 'core-ui', 'core-config-setup'];
|
||||||
|
|
||||||
if (window.__config && window.__config.MIXPANEL_KEY) {
|
if (window.__config && window.__config.MIXPANEL_KEY) {
|
||||||
quayDependencies.push('angulartics');
|
quayDependencies.push('angulartics');
|
||||||
|
@ -977,7 +977,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
return resource;
|
return resource;
|
||||||
};
|
};
|
||||||
|
|
||||||
var buildUrl = function(path, parameters) {
|
var buildUrl = function(path, parameters, opt_forcessl) {
|
||||||
// We already have /api/v1/ on the URLs, so remove them from the paths.
|
// We already have /api/v1/ on the URLs, so remove them from the paths.
|
||||||
path = path.substr('/api/v1/'.length, path.length);
|
path = path.substr('/api/v1/'.length, path.length);
|
||||||
|
|
||||||
|
@ -1017,6 +1017,11 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we are forcing SSL, return an absolutel URL with an SSL prefix.
|
||||||
|
if (opt_forcessl) {
|
||||||
|
path = 'https://' + window.location.host + '/api/v1/' + path;
|
||||||
|
}
|
||||||
|
|
||||||
return url;
|
return url;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1047,12 +1052,35 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
var freshLoginInProgress = [];
|
||||||
|
var reject = function(msg) {
|
||||||
|
for (var i = 0; i < freshLoginInProgress.length; ++i) {
|
||||||
|
freshLoginInProgress[i].deferred.reject({'data': {'message': msg}});
|
||||||
|
}
|
||||||
|
freshLoginInProgress = [];
|
||||||
|
};
|
||||||
|
|
||||||
|
var retry = function() {
|
||||||
|
for (var i = 0; i < freshLoginInProgress.length; ++i) {
|
||||||
|
freshLoginInProgress[i].retry();
|
||||||
|
}
|
||||||
|
freshLoginInProgress = [];
|
||||||
|
};
|
||||||
|
|
||||||
var freshLoginFailCheck = function(opName, opArgs) {
|
var freshLoginFailCheck = function(opName, opArgs) {
|
||||||
return function(resp) {
|
return function(resp) {
|
||||||
var deferred = $q.defer();
|
var deferred = $q.defer();
|
||||||
|
|
||||||
// If the error is a fresh login required, show the dialog.
|
// If the error is a fresh login required, show the dialog.
|
||||||
if (resp.status == 401 && resp.data['error_type'] == 'fresh_login_required') {
|
if (resp.status == 401 && resp.data['error_type'] == 'fresh_login_required') {
|
||||||
|
var retryOperation = function() {
|
||||||
|
apiService[opName].apply(apiService, opArgs).then(function(resp) {
|
||||||
|
deferred.resolve(resp);
|
||||||
|
}, function(resp) {
|
||||||
|
deferred.reject(resp);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
var verifyNow = function() {
|
var verifyNow = function() {
|
||||||
var info = {
|
var info = {
|
||||||
'password': $('#freshPassword').val()
|
'password': $('#freshPassword').val()
|
||||||
|
@ -1062,19 +1090,27 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
|
|
||||||
// Conduct the sign in of the user.
|
// Conduct the sign in of the user.
|
||||||
apiService.verifyUser(info).then(function() {
|
apiService.verifyUser(info).then(function() {
|
||||||
// On success, retry the operation. if it succeeds, then resolve the
|
// On success, retry the operations. if it succeeds, then resolve the
|
||||||
// deferred promise with the result. Otherwise, reject the same.
|
// deferred promise with the result. Otherwise, reject the same.
|
||||||
apiService[opName].apply(apiService, opArgs).then(function(resp) {
|
retry();
|
||||||
deferred.resolve(resp);
|
|
||||||
}, function(resp) {
|
|
||||||
deferred.reject(resp);
|
|
||||||
});
|
|
||||||
}, function(resp) {
|
}, function(resp) {
|
||||||
// Reject with the sign in error.
|
// Reject with the sign in error.
|
||||||
deferred.reject({'data': {'message': 'Invalid verification credentials'}});
|
reject('Invalid verification credentials');
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Add the retry call to the in progress list. If there is more than a single
|
||||||
|
// in progress call, we skip showing the dialog (since it has already been
|
||||||
|
// shown).
|
||||||
|
freshLoginInProgress.push({
|
||||||
|
'deferred': deferred,
|
||||||
|
'retry': retryOperation
|
||||||
|
})
|
||||||
|
|
||||||
|
if (freshLoginInProgress.length > 1) {
|
||||||
|
return deferred.promise;
|
||||||
|
}
|
||||||
|
|
||||||
var box = bootbox.dialog({
|
var box = bootbox.dialog({
|
||||||
"message": 'It has been more than a few minutes since you last logged in, ' +
|
"message": 'It has been more than a few minutes since you last logged in, ' +
|
||||||
'so please verify your password to perform this sensitive operation:' +
|
'so please verify your password to perform this sensitive operation:' +
|
||||||
|
@ -1092,7 +1128,7 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
"label": "Cancel",
|
"label": "Cancel",
|
||||||
"className": "btn-default",
|
"className": "btn-default",
|
||||||
"callback": function() {
|
"callback": function() {
|
||||||
deferred.reject({'data': {'message': 'Verification canceled'}});
|
reject('Verification canceled')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1124,8 +1160,8 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
var path = resource['path'];
|
var path = resource['path'];
|
||||||
|
|
||||||
// Add the operation itself.
|
// Add the operation itself.
|
||||||
apiService[operationName] = function(opt_options, opt_parameters, opt_background) {
|
apiService[operationName] = function(opt_options, opt_parameters, opt_background, opt_forcessl) {
|
||||||
var one = Restangular.one(buildUrl(path, opt_parameters));
|
var one = Restangular.one(buildUrl(path, opt_parameters, opt_forcessl));
|
||||||
if (opt_background) {
|
if (opt_background) {
|
||||||
one.withHttpConfig({
|
one.withHttpConfig({
|
||||||
'ignoreLoadingBar': true
|
'ignoreLoadingBar': true
|
||||||
|
@ -1244,6 +1280,39 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
return cookieService;
|
return cookieService;
|
||||||
}]);
|
}]);
|
||||||
|
|
||||||
|
$provide.factory('ContainerService', ['ApiService', '$timeout',
|
||||||
|
function(ApiService, $timeout) {
|
||||||
|
var containerService = {};
|
||||||
|
containerService.restartContainer = function(callback) {
|
||||||
|
ApiService.scShutdownContainer(null, null).then(function(resp) {
|
||||||
|
$timeout(callback, 2000);
|
||||||
|
}, ApiService.errorDisplay('Cannot restart container. Please report this to support.'))
|
||||||
|
};
|
||||||
|
|
||||||
|
containerService.scheduleStatusCheck = function(callback) {
|
||||||
|
$timeout(function() {
|
||||||
|
containerService.checkStatus(callback);
|
||||||
|
}, 2000);
|
||||||
|
};
|
||||||
|
|
||||||
|
containerService.checkStatus = function(callback, force_ssl) {
|
||||||
|
var errorHandler = function(resp) {
|
||||||
|
if (resp.status == 404 || resp.status == 502) {
|
||||||
|
// Container has not yet come back up, so we schedule another check.
|
||||||
|
containerService.scheduleStatusCheck(callback);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
return ApiService.errorDisplay('Cannot load status. Please report this to support')(resp);
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.scRegistryStatus(null, null)
|
||||||
|
.then(callback, errorHandler, /* background */true, /* force ssl*/force_ssl);
|
||||||
|
};
|
||||||
|
|
||||||
|
return containerService;
|
||||||
|
}]);
|
||||||
|
|
||||||
$provide.factory('UserService', ['ApiService', 'CookieService', '$rootScope', 'Config',
|
$provide.factory('UserService', ['ApiService', 'CookieService', '$rootScope', 'Config',
|
||||||
function(ApiService, CookieService, $rootScope, Config) {
|
function(ApiService, CookieService, $rootScope, Config) {
|
||||||
var userResponse = {
|
var userResponse = {
|
||||||
|
@ -2225,8 +2294,10 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
||||||
templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl, reloadOnSearch: false}).
|
templateUrl: '/static/partials/repo-list.html', controller: RepoListCtrl, reloadOnSearch: false}).
|
||||||
when('/user/', {title: 'Account Settings', description:'Account settings for ' + title, templateUrl: '/static/partials/user-admin.html',
|
when('/user/', {title: 'Account Settings', description:'Account settings for ' + title, templateUrl: '/static/partials/user-admin.html',
|
||||||
reloadOnSearch: false, controller: UserAdminCtrl}).
|
reloadOnSearch: false, controller: UserAdminCtrl}).
|
||||||
when('/superuser/', {title: 'Superuser Admin Panel', description:'Admin panel for ' + title, templateUrl: '/static/partials/super-user.html',
|
when('/superuser/', {title: 'Enterprise Registry Management', description:'Admin panel for ' + title, templateUrl: '/static/partials/super-user.html',
|
||||||
reloadOnSearch: false, controller: SuperUserAdminCtrl}).
|
reloadOnSearch: false, controller: SuperUserAdminCtrl, newLayout: true}).
|
||||||
|
when('/setup/', {title: 'Enterprise Registry Setup', description:'Setup for ' + title, templateUrl: '/static/partials/setup.html',
|
||||||
|
reloadOnSearch: false, controller: SetupCtrl, newLayout: true}).
|
||||||
when('/guide/', {title: 'Guide', description:'Guide to using private docker repositories on ' + title,
|
when('/guide/', {title: 'Guide', description:'Guide to using private docker repositories on ' + title,
|
||||||
templateUrl: '/static/partials/guide.html',
|
templateUrl: '/static/partials/guide.html',
|
||||||
controller: GuideCtrl}).
|
controller: GuideCtrl}).
|
||||||
|
@ -3908,9 +3979,11 @@ quayApp.directive('registryName', function () {
|
||||||
replace: false,
|
replace: false,
|
||||||
transclude: true,
|
transclude: true,
|
||||||
restrict: 'C',
|
restrict: 'C',
|
||||||
scope: {},
|
scope: {
|
||||||
|
'isShort': '=isShort'
|
||||||
|
},
|
||||||
controller: function($scope, $element, Config) {
|
controller: function($scope, $element, Config) {
|
||||||
$scope.name = Config.REGISTRY_TITLE;
|
$scope.name = $scope.isShort ? Config.REGISTRY_TITLE_SHORT : Config.REGISTRY_TITLE;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return directiveDefinitionObject;
|
return directiveDefinitionObject;
|
||||||
|
@ -5751,9 +5824,15 @@ quayApp.directive('buildMessage', function () {
|
||||||
case 'building':
|
case 'building':
|
||||||
return 'Building image from Dockerfile';
|
return 'Building image from Dockerfile';
|
||||||
|
|
||||||
|
case 'checking-cache':
|
||||||
|
return 'Looking up cached images';
|
||||||
|
|
||||||
case 'priming-cache':
|
case 'priming-cache':
|
||||||
return 'Priming cache for build';
|
return 'Priming cache for build';
|
||||||
|
|
||||||
|
case 'build-scheduled':
|
||||||
|
return 'Preparing build node';
|
||||||
|
|
||||||
case 'pushing':
|
case 'pushing':
|
||||||
return 'Pushing image built from Dockerfile';
|
return 'Pushing image built from Dockerfile';
|
||||||
|
|
||||||
|
@ -5807,6 +5886,7 @@ quayApp.directive('buildProgress', function () {
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'initializing':
|
case 'initializing':
|
||||||
|
case 'checking-cache':
|
||||||
case 'starting':
|
case 'starting':
|
||||||
case 'waiting':
|
case 'waiting':
|
||||||
case 'cannot_load':
|
case 'cannot_load':
|
||||||
|
@ -6899,6 +6979,7 @@ quayApp.directive('ngBlur', function() {
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
quayApp.directive("filePresent", [function () {
|
quayApp.directive("filePresent", [function () {
|
||||||
return {
|
return {
|
||||||
restrict: 'A',
|
restrict: 'A',
|
||||||
|
@ -6972,7 +7053,6 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
|
||||||
|
|
||||||
var changeTab = function(activeTab, opt_timeout) {
|
var changeTab = function(activeTab, opt_timeout) {
|
||||||
var checkCount = 0;
|
var checkCount = 0;
|
||||||
|
|
||||||
$timeout(function() {
|
$timeout(function() {
|
||||||
if (checkCount > 5) { return; }
|
if (checkCount > 5) { return; }
|
||||||
checkCount++;
|
checkCount++;
|
||||||
|
@ -7036,6 +7116,8 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
|
||||||
$rootScope.pageClass = current.$$route.pageClass;
|
$rootScope.pageClass = current.$$route.pageClass;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$rootScope.newLayout = !!current.$$route.newLayout;
|
||||||
|
|
||||||
if (current.$$route.description) {
|
if (current.$$route.description) {
|
||||||
$rootScope.description = current.$$route.description;
|
$rootScope.description = current.$$route.description;
|
||||||
} else {
|
} else {
|
||||||
|
@ -7051,26 +7133,28 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
|
||||||
|
|
||||||
// Setup deep linking of tabs. This will change the search field of the URL whenever a tab
|
// Setup deep linking of tabs. This will change the search field of the URL whenever a tab
|
||||||
// is changed in the UI.
|
// is changed in the UI.
|
||||||
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
|
$timeout(function() {
|
||||||
var tabName = e.target.getAttribute('data-target').substr(1);
|
$('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
|
||||||
$rootScope.$apply(function() {
|
var tabName = e.target.getAttribute('data-target').substr(1);
|
||||||
var isDefaultTab = $('a[data-toggle="tab"]')[0] == e.target;
|
$rootScope.$apply(function() {
|
||||||
var newSearch = $.extend($location.search(), {});
|
var isDefaultTab = $('a[data-toggle="tab"]')[0] == e.target;
|
||||||
if (isDefaultTab) {
|
var newSearch = $.extend($location.search(), {});
|
||||||
delete newSearch['tab'];
|
if (isDefaultTab) {
|
||||||
} else {
|
delete newSearch['tab'];
|
||||||
newSearch['tab'] = tabName;
|
} else {
|
||||||
}
|
newSearch['tab'] = tabName;
|
||||||
|
}
|
||||||
|
|
||||||
$location.search(newSearch);
|
$location.search(newSearch);
|
||||||
|
});
|
||||||
|
|
||||||
|
e.preventDefault();
|
||||||
});
|
});
|
||||||
|
|
||||||
e.preventDefault();
|
if (activeTab) {
|
||||||
});
|
changeTab(activeTab);
|
||||||
|
}
|
||||||
if (activeTab) {
|
}, 400); // 400ms to make sure angular has rendered.
|
||||||
changeTab(activeTab);
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
var initallyChecked = false;
|
var initallyChecked = false;
|
||||||
|
|
|
@ -1072,257 +1072,6 @@ function BuildPackageCtrl($scope, Restangular, ApiService, DataFileService, $rou
|
||||||
getBuildInfo();
|
getBuildInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
|
||||||
ansi2html, AngularViewArray, AngularPollChannel) {
|
|
||||||
var namespace = $routeParams.namespace;
|
|
||||||
var name = $routeParams.name;
|
|
||||||
|
|
||||||
// Watch for changes to the current parameter.
|
|
||||||
$scope.$on('$routeUpdate', function(){
|
|
||||||
if ($location.search().current) {
|
|
||||||
$scope.setCurrentBuild($location.search().current, false);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
$scope.builds = null;
|
|
||||||
$scope.pollChannel = null;
|
|
||||||
$scope.buildDialogShowCounter = 0;
|
|
||||||
|
|
||||||
$scope.showNewBuildDialog = function() {
|
|
||||||
$scope.buildDialogShowCounter++;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.handleBuildStarted = function(newBuild) {
|
|
||||||
if (!$scope.builds) { return; }
|
|
||||||
|
|
||||||
$scope.builds.unshift(newBuild);
|
|
||||||
$scope.setCurrentBuild(newBuild['id'], true);
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.adjustLogHeight = function() {
|
|
||||||
var triggerOffset = 0;
|
|
||||||
if ($scope.currentBuild && $scope.currentBuild.trigger) {
|
|
||||||
triggerOffset = 85;
|
|
||||||
}
|
|
||||||
$('.build-logs').height($(window).height() - 415 - triggerOffset);
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.askRestartBuild = function(build) {
|
|
||||||
$('#confirmRestartBuildModal').modal({});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.restartBuild = function(build) {
|
|
||||||
$('#confirmRestartBuildModal').modal('hide');
|
|
||||||
|
|
||||||
var subdirectory = '';
|
|
||||||
if (build['job_config']) {
|
|
||||||
subdirectory = build['job_config']['build_subdir'] || '';
|
|
||||||
}
|
|
||||||
|
|
||||||
var data = {
|
|
||||||
'file_id': build['resource_key'],
|
|
||||||
'subdirectory': subdirectory,
|
|
||||||
'docker_tags': build['job_config']['docker_tags']
|
|
||||||
};
|
|
||||||
|
|
||||||
if (build['pull_robot']) {
|
|
||||||
data['pull_robot'] = build['pull_robot']['name'];
|
|
||||||
}
|
|
||||||
|
|
||||||
var params = {
|
|
||||||
'repository': namespace + '/' + name
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.requestRepoBuild(data, params).then(function(newBuild) {
|
|
||||||
if (!$scope.builds) { return; }
|
|
||||||
|
|
||||||
$scope.builds.unshift(newBuild);
|
|
||||||
$scope.setCurrentBuild(newBuild['id'], true);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.hasLogs = function(container) {
|
|
||||||
return container.logs.hasEntries;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.setCurrentBuild = function(buildId, opt_updateURL) {
|
|
||||||
if (!$scope.builds) { return; }
|
|
||||||
|
|
||||||
// Find the build.
|
|
||||||
for (var i = 0; i < $scope.builds.length; ++i) {
|
|
||||||
if ($scope.builds[i].id == buildId) {
|
|
||||||
$scope.setCurrentBuildInternal(i, $scope.builds[i], opt_updateURL);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.processANSI = function(message, container) {
|
|
||||||
var filter = container.logs._filter = (container.logs._filter || ansi2html.create());
|
|
||||||
|
|
||||||
// Note: order is important here.
|
|
||||||
var setup = filter.getSetupHtml();
|
|
||||||
var stream = filter.addInputToStream(message);
|
|
||||||
var teardown = filter.getTeardownHtml();
|
|
||||||
return setup + stream + teardown;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
|
||||||
if (build == $scope.currentBuild) { return; }
|
|
||||||
|
|
||||||
$scope.logEntries = null;
|
|
||||||
$scope.logStartIndex = null;
|
|
||||||
$scope.currentParentEntry = null;
|
|
||||||
|
|
||||||
$scope.currentBuild = build;
|
|
||||||
|
|
||||||
if (opt_updateURL) {
|
|
||||||
if (build) {
|
|
||||||
$location.search('current', build.id);
|
|
||||||
} else {
|
|
||||||
$location.search('current', null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Timeout needed to ensure the log element has been created
|
|
||||||
// before its height is adjusted.
|
|
||||||
setTimeout(function() {
|
|
||||||
$scope.adjustLogHeight();
|
|
||||||
}, 1);
|
|
||||||
|
|
||||||
// Stop any existing polling.
|
|
||||||
if ($scope.pollChannel) {
|
|
||||||
$scope.pollChannel.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create a new channel for polling the build status and logs.
|
|
||||||
var conductStatusAndLogRequest = function(callback) {
|
|
||||||
getBuildStatusAndLogs(build, callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
|
||||||
$scope.pollChannel.start();
|
|
||||||
};
|
|
||||||
|
|
||||||
var processLogs = function(logs, startIndex, endIndex) {
|
|
||||||
if (!$scope.logEntries) { $scope.logEntries = []; }
|
|
||||||
|
|
||||||
// If the start index given is less than that requested, then we've received a larger
|
|
||||||
// pool of logs, and we need to only consider the new ones.
|
|
||||||
if (startIndex < $scope.logStartIndex) {
|
|
||||||
logs = logs.slice($scope.logStartIndex - startIndex);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (var i = 0; i < logs.length; ++i) {
|
|
||||||
var entry = logs[i];
|
|
||||||
var type = entry['type'] || 'entry';
|
|
||||||
if (type == 'command' || type == 'phase' || type == 'error') {
|
|
||||||
entry['logs'] = AngularViewArray.create();
|
|
||||||
entry['index'] = $scope.logStartIndex + i;
|
|
||||||
|
|
||||||
$scope.logEntries.push(entry);
|
|
||||||
$scope.currentParentEntry = entry;
|
|
||||||
} else if ($scope.currentParentEntry) {
|
|
||||||
$scope.currentParentEntry['logs'].push(entry);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return endIndex;
|
|
||||||
};
|
|
||||||
|
|
||||||
var getBuildStatusAndLogs = function(build, callback) {
|
|
||||||
var params = {
|
|
||||||
'repository': namespace + '/' + name,
|
|
||||||
'build_uuid': build.id
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
|
||||||
if (build != $scope.currentBuild) { callback(false); return; }
|
|
||||||
|
|
||||||
// Note: We use extend here rather than replacing as Angular is depending on the
|
|
||||||
// root build object to remain the same object.
|
|
||||||
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
|
||||||
return elem['id'] == resp['id']
|
|
||||||
});
|
|
||||||
|
|
||||||
var currentBuild = matchingBuilds.length > 0 ? matchingBuilds[0] : null;
|
|
||||||
if (currentBuild) {
|
|
||||||
currentBuild = $.extend(true, currentBuild, resp);
|
|
||||||
} else {
|
|
||||||
currentBuild = resp;
|
|
||||||
$scope.builds.push(currentBuild);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Load the updated logs for the build.
|
|
||||||
var options = {
|
|
||||||
'start': $scope.logStartIndex
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
|
||||||
if (build != $scope.currentBuild) { callback(false); return; }
|
|
||||||
|
|
||||||
// Process the logs we've received.
|
|
||||||
$scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']);
|
|
||||||
|
|
||||||
// If the build status is an error, open the last two log entries.
|
|
||||||
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
|
||||||
var openLogEntries = function(entry) {
|
|
||||||
if (entry.logs) {
|
|
||||||
entry.logs.setVisible(true);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
|
||||||
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the build phase is an error or a complete, then we mark the channel
|
|
||||||
// as closed.
|
|
||||||
callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete');
|
|
||||||
}, function() {
|
|
||||||
callback(false);
|
|
||||||
});
|
|
||||||
}, function() {
|
|
||||||
callback(false);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
var fetchRepository = function() {
|
|
||||||
var params = {'repository': namespace + '/' + name};
|
|
||||||
$rootScope.title = 'Loading Repository...';
|
|
||||||
$scope.repository = ApiService.getRepoAsResource(params).get(function(repo) {
|
|
||||||
if (!repo.can_write) {
|
|
||||||
$rootScope.title = 'Unknown builds';
|
|
||||||
$scope.accessDenied = true;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
$rootScope.title = 'Repository Builds';
|
|
||||||
$scope.repo = repo;
|
|
||||||
|
|
||||||
getBuildInfo();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
var getBuildInfo = function(repo) {
|
|
||||||
var params = {
|
|
||||||
'repository': namespace + '/' + name
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.getRepoBuilds(null, params).then(function(resp) {
|
|
||||||
$scope.builds = resp.builds;
|
|
||||||
|
|
||||||
if ($location.search().current) {
|
|
||||||
$scope.setCurrentBuild($location.search().current, false);
|
|
||||||
} else if ($scope.builds.length > 0) {
|
|
||||||
$scope.setCurrentBuild($scope.builds[0].id, true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
fetchRepository();
|
|
||||||
}
|
|
||||||
|
|
||||||
function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerService, $routeParams,
|
function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, TriggerService, $routeParams,
|
||||||
$rootScope, $location, UserService, Config, Features, ExternalNotificationData) {
|
$rootScope, $location, UserService, Config, Features, ExternalNotificationData) {
|
||||||
|
|
||||||
|
@ -2809,138 +2558,6 @@ function ManageApplicationCtrl($scope, $routeParams, $rootScope, $location, $tim
|
||||||
loadApplicationInfo();
|
loadApplicationInfo();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function SuperUserAdminCtrl($scope, ApiService, Features, UserService) {
|
|
||||||
if (!Features.SUPER_USERS) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Monitor any user changes and place the current user into the scope.
|
|
||||||
UserService.updateUserIn($scope);
|
|
||||||
|
|
||||||
$scope.logsCounter = 0;
|
|
||||||
$scope.newUser = {};
|
|
||||||
$scope.createdUsers = [];
|
|
||||||
$scope.systemUsage = null;
|
|
||||||
|
|
||||||
$scope.getUsage = function() {
|
|
||||||
if ($scope.systemUsage) { return; }
|
|
||||||
|
|
||||||
ApiService.getSystemUsage().then(function(resp) {
|
|
||||||
$scope.systemUsage = resp;
|
|
||||||
}, ApiService.errorDisplay('Cannot load system usage. Please contact support.'))
|
|
||||||
}
|
|
||||||
|
|
||||||
$scope.loadLogs = function() {
|
|
||||||
$scope.logsCounter++;
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.loadUsers = function() {
|
|
||||||
if ($scope.users) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
$scope.loadUsersInternal();
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.loadUsersInternal = function() {
|
|
||||||
ApiService.listAllUsers().then(function(resp) {
|
|
||||||
$scope.users = resp['users'];
|
|
||||||
$scope.showInterface = true;
|
|
||||||
}, function(resp) {
|
|
||||||
$scope.users = [];
|
|
||||||
$scope.usersError = resp['data']['message'] || resp['data']['error_description'];
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.showChangePassword = function(user) {
|
|
||||||
$scope.userToChange = user;
|
|
||||||
$('#changePasswordModal').modal({});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.createUser = function() {
|
|
||||||
$scope.creatingUser = true;
|
|
||||||
var errorHandler = ApiService.errorDisplay('Cannot create user', function() {
|
|
||||||
$scope.creatingUser = false;
|
|
||||||
});
|
|
||||||
|
|
||||||
ApiService.createInstallUser($scope.newUser, null).then(function(resp) {
|
|
||||||
$scope.creatingUser = false;
|
|
||||||
$scope.newUser = {};
|
|
||||||
$scope.createdUsers.push(resp);
|
|
||||||
}, errorHandler)
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.showDeleteUser = function(user) {
|
|
||||||
if (user.username == UserService.currentUser().username) {
|
|
||||||
bootbox.dialog({
|
|
||||||
"message": 'Cannot delete yourself!',
|
|
||||||
"title": "Cannot delete user",
|
|
||||||
"buttons": {
|
|
||||||
"close": {
|
|
||||||
"label": "Close",
|
|
||||||
"className": "btn-primary"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
$scope.userToDelete = user;
|
|
||||||
$('#confirmDeleteUserModal').modal({});
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.changeUserPassword = function(user) {
|
|
||||||
$('#changePasswordModal').modal('hide');
|
|
||||||
|
|
||||||
var params = {
|
|
||||||
'username': user.username
|
|
||||||
};
|
|
||||||
|
|
||||||
var data = {
|
|
||||||
'password': user.password
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.changeInstallUser(data, params).then(function(resp) {
|
|
||||||
$scope.loadUsersInternal();
|
|
||||||
}, ApiService.errorDisplay('Could not change user'));
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.deleteUser = function(user) {
|
|
||||||
$('#confirmDeleteUserModal').modal('hide');
|
|
||||||
|
|
||||||
var params = {
|
|
||||||
'username': user.username
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.deleteInstallUser(null, params).then(function(resp) {
|
|
||||||
$scope.loadUsersInternal();
|
|
||||||
}, ApiService.errorDisplay('Cannot delete user'));
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.sendRecoveryEmail = function(user) {
|
|
||||||
var params = {
|
|
||||||
'username': user.username
|
|
||||||
};
|
|
||||||
|
|
||||||
ApiService.sendInstallUserRecoveryEmail(null, params).then(function(resp) {
|
|
||||||
bootbox.dialog({
|
|
||||||
"message": "A recovery email has been sent to " + resp['email'],
|
|
||||||
"title": "Recovery email sent",
|
|
||||||
"buttons": {
|
|
||||||
"close": {
|
|
||||||
"label": "Close",
|
|
||||||
"className": "btn-primary"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
}, ApiService.errorDisplay('Cannot send recovery email'))
|
|
||||||
};
|
|
||||||
|
|
||||||
$scope.loadUsers();
|
|
||||||
}
|
|
||||||
|
|
||||||
function TourCtrl($scope, $location) {
|
function TourCtrl($scope, $location) {
|
||||||
$scope.kind = $location.path().substring('/tour/'.length);
|
$scope.kind = $location.path().substring('/tour/'.length);
|
||||||
}
|
}
|
||||||
|
|
272
static/js/controllers/repo-build.js
Normal file
272
static/js/controllers/repo-build.js
Normal file
|
@ -0,0 +1,272 @@
|
||||||
|
function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope, $location, $interval, $sanitize,
|
||||||
|
ansi2html, AngularViewArray, AngularPollChannel) {
|
||||||
|
var namespace = $routeParams.namespace;
|
||||||
|
var name = $routeParams.name;
|
||||||
|
|
||||||
|
// Watch for changes to the current parameter.
|
||||||
|
$scope.$on('$routeUpdate', function(){
|
||||||
|
if ($location.search().current) {
|
||||||
|
$scope.setCurrentBuild($location.search().current, false);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.builds = null;
|
||||||
|
$scope.pollChannel = null;
|
||||||
|
$scope.buildDialogShowCounter = 0;
|
||||||
|
|
||||||
|
$scope.showNewBuildDialog = function() {
|
||||||
|
$scope.buildDialogShowCounter++;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.handleBuildStarted = function(newBuild) {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
|
||||||
|
$scope.builds.unshift(newBuild);
|
||||||
|
$scope.setCurrentBuild(newBuild['id'], true);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.adjustLogHeight = function() {
|
||||||
|
var triggerOffset = 0;
|
||||||
|
if ($scope.currentBuild && $scope.currentBuild.trigger) {
|
||||||
|
triggerOffset = 85;
|
||||||
|
}
|
||||||
|
$('.build-logs').height($(window).height() - 415 - triggerOffset);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.askRestartBuild = function(build) {
|
||||||
|
$('#confirmRestartBuildModal').modal({});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.askCancelBuild = function(build) {
|
||||||
|
bootbox.confirm('Are you sure you want to cancel this build?', function(r) {
|
||||||
|
if (r) {
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name,
|
||||||
|
'build_uuid': build.id
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.cancelRepoBuild(null, params).then(function() {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
$scope.builds.splice($.inArray(build, $scope.builds), 1);
|
||||||
|
|
||||||
|
if ($scope.builds.length) {
|
||||||
|
$scope.currentBuild = $scope.builds[0];
|
||||||
|
} else {
|
||||||
|
$scope.currentBuild = null;
|
||||||
|
}
|
||||||
|
}, ApiService.errorDisplay('Cannot cancel build'));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.restartBuild = function(build) {
|
||||||
|
$('#confirmRestartBuildModal').modal('hide');
|
||||||
|
|
||||||
|
var subdirectory = '';
|
||||||
|
if (build['job_config']) {
|
||||||
|
subdirectory = build['job_config']['build_subdir'] || '';
|
||||||
|
}
|
||||||
|
|
||||||
|
var data = {
|
||||||
|
'file_id': build['resource_key'],
|
||||||
|
'subdirectory': subdirectory,
|
||||||
|
'docker_tags': build['job_config']['docker_tags']
|
||||||
|
};
|
||||||
|
|
||||||
|
if (build['pull_robot']) {
|
||||||
|
data['pull_robot'] = build['pull_robot']['name'];
|
||||||
|
}
|
||||||
|
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.requestRepoBuild(data, params).then(function(newBuild) {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
|
||||||
|
$scope.builds.unshift(newBuild);
|
||||||
|
$scope.setCurrentBuild(newBuild['id'], true);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.hasLogs = function(container) {
|
||||||
|
return container.logs.hasEntries;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.setCurrentBuild = function(buildId, opt_updateURL) {
|
||||||
|
if (!$scope.builds) { return; }
|
||||||
|
|
||||||
|
// Find the build.
|
||||||
|
for (var i = 0; i < $scope.builds.length; ++i) {
|
||||||
|
if ($scope.builds[i].id == buildId) {
|
||||||
|
$scope.setCurrentBuildInternal(i, $scope.builds[i], opt_updateURL);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.processANSI = function(message, container) {
|
||||||
|
var filter = container.logs._filter = (container.logs._filter || ansi2html.create());
|
||||||
|
|
||||||
|
// Note: order is important here.
|
||||||
|
var setup = filter.getSetupHtml();
|
||||||
|
var stream = filter.addInputToStream(message);
|
||||||
|
var teardown = filter.getTeardownHtml();
|
||||||
|
return setup + stream + teardown;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.setCurrentBuildInternal = function(index, build, opt_updateURL) {
|
||||||
|
if (build == $scope.currentBuild) { return; }
|
||||||
|
|
||||||
|
$scope.logEntries = null;
|
||||||
|
$scope.logStartIndex = null;
|
||||||
|
$scope.currentParentEntry = null;
|
||||||
|
|
||||||
|
$scope.currentBuild = build;
|
||||||
|
|
||||||
|
if (opt_updateURL) {
|
||||||
|
if (build) {
|
||||||
|
$location.search('current', build.id);
|
||||||
|
} else {
|
||||||
|
$location.search('current', null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Timeout needed to ensure the log element has been created
|
||||||
|
// before its height is adjusted.
|
||||||
|
setTimeout(function() {
|
||||||
|
$scope.adjustLogHeight();
|
||||||
|
}, 1);
|
||||||
|
|
||||||
|
// Stop any existing polling.
|
||||||
|
if ($scope.pollChannel) {
|
||||||
|
$scope.pollChannel.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new channel for polling the build status and logs.
|
||||||
|
var conductStatusAndLogRequest = function(callback) {
|
||||||
|
getBuildStatusAndLogs(build, callback);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.pollChannel = AngularPollChannel.create($scope, conductStatusAndLogRequest, 5 * 1000 /* 5s */);
|
||||||
|
$scope.pollChannel.start();
|
||||||
|
};
|
||||||
|
|
||||||
|
var processLogs = function(logs, startIndex, endIndex) {
|
||||||
|
if (!$scope.logEntries) { $scope.logEntries = []; }
|
||||||
|
|
||||||
|
// If the start index given is less than that requested, then we've received a larger
|
||||||
|
// pool of logs, and we need to only consider the new ones.
|
||||||
|
if (startIndex < $scope.logStartIndex) {
|
||||||
|
logs = logs.slice($scope.logStartIndex - startIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (var i = 0; i < logs.length; ++i) {
|
||||||
|
var entry = logs[i];
|
||||||
|
var type = entry['type'] || 'entry';
|
||||||
|
if (type == 'command' || type == 'phase' || type == 'error') {
|
||||||
|
entry['logs'] = AngularViewArray.create();
|
||||||
|
entry['index'] = $scope.logStartIndex + i;
|
||||||
|
|
||||||
|
$scope.logEntries.push(entry);
|
||||||
|
$scope.currentParentEntry = entry;
|
||||||
|
} else if ($scope.currentParentEntry) {
|
||||||
|
$scope.currentParentEntry['logs'].push(entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return endIndex;
|
||||||
|
};
|
||||||
|
|
||||||
|
var getBuildStatusAndLogs = function(build, callback) {
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name,
|
||||||
|
'build_uuid': build.id
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.getRepoBuildStatus(null, params, true).then(function(resp) {
|
||||||
|
if (build != $scope.currentBuild) { callback(false); return; }
|
||||||
|
|
||||||
|
// Note: We use extend here rather than replacing as Angular is depending on the
|
||||||
|
// root build object to remain the same object.
|
||||||
|
var matchingBuilds = $.grep($scope.builds, function(elem) {
|
||||||
|
return elem['id'] == resp['id']
|
||||||
|
});
|
||||||
|
|
||||||
|
var currentBuild = matchingBuilds.length > 0 ? matchingBuilds[0] : null;
|
||||||
|
if (currentBuild) {
|
||||||
|
currentBuild = $.extend(true, currentBuild, resp);
|
||||||
|
} else {
|
||||||
|
currentBuild = resp;
|
||||||
|
$scope.builds.push(currentBuild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load the updated logs for the build.
|
||||||
|
var options = {
|
||||||
|
'start': $scope.logStartIndex
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.getRepoBuildLogsAsResource(params, true).withOptions(options).get(function(resp) {
|
||||||
|
if (build != $scope.currentBuild) { callback(false); return; }
|
||||||
|
|
||||||
|
// Process the logs we've received.
|
||||||
|
$scope.logStartIndex = processLogs(resp['logs'], resp['start'], resp['total']);
|
||||||
|
|
||||||
|
// If the build status is an error, open the last two log entries.
|
||||||
|
if (currentBuild['phase'] == 'error' && $scope.logEntries.length > 1) {
|
||||||
|
var openLogEntries = function(entry) {
|
||||||
|
if (entry.logs) {
|
||||||
|
entry.logs.setVisible(true);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
openLogEntries($scope.logEntries[$scope.logEntries.length - 2]);
|
||||||
|
openLogEntries($scope.logEntries[$scope.logEntries.length - 1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the build phase is an error or a complete, then we mark the channel
|
||||||
|
// as closed.
|
||||||
|
callback(currentBuild['phase'] != 'error' && currentBuild['phase'] != 'complete');
|
||||||
|
}, function() {
|
||||||
|
callback(false);
|
||||||
|
});
|
||||||
|
}, function() {
|
||||||
|
callback(false);
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var fetchRepository = function() {
|
||||||
|
var params = {'repository': namespace + '/' + name};
|
||||||
|
$rootScope.title = 'Loading Repository...';
|
||||||
|
$scope.repository = ApiService.getRepoAsResource(params).get(function(repo) {
|
||||||
|
if (!repo.can_write) {
|
||||||
|
$rootScope.title = 'Unknown builds';
|
||||||
|
$scope.accessDenied = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$rootScope.title = 'Repository Builds';
|
||||||
|
$scope.repo = repo;
|
||||||
|
|
||||||
|
getBuildInfo();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var getBuildInfo = function(repo) {
|
||||||
|
var params = {
|
||||||
|
'repository': namespace + '/' + name
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.getRepoBuilds(null, params).then(function(resp) {
|
||||||
|
$scope.builds = resp.builds;
|
||||||
|
|
||||||
|
if ($location.search().current) {
|
||||||
|
$scope.setCurrentBuild($location.search().current, false);
|
||||||
|
} else if ($scope.builds.length > 0) {
|
||||||
|
$scope.setCurrentBuild($scope.builds[0].id, true);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
fetchRepository();
|
||||||
|
}
|
282
static/js/controllers/setup.js
Normal file
282
static/js/controllers/setup.js
Normal file
|
@ -0,0 +1,282 @@
|
||||||
|
function SetupCtrl($scope, $timeout, ApiService, Features, UserService, ContainerService, CoreDialog) {
|
||||||
|
if (!Features.SUPER_USERS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.HOSTNAME_REGEX = '^[a-zA-Z-0-9\.]+(:[0-9]+)?$';
|
||||||
|
|
||||||
|
$scope.validateHostname = function(hostname) {
|
||||||
|
if (hostname.indexOf('127.0.0.1') == 0 || hostname.indexOf('localhost') == 0) {
|
||||||
|
return 'Please specify a non-localhost hostname. "localhost" will refer to the container, not your machine.'
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
// Note: The values of the enumeration are important for isStepFamily. For example,
|
||||||
|
// *all* states under the "configuring db" family must start with "config-db".
|
||||||
|
$scope.States = {
|
||||||
|
// Loading the state of the product.
|
||||||
|
'LOADING': 'loading',
|
||||||
|
|
||||||
|
// The configuration directory is missing.
|
||||||
|
'MISSING_CONFIG_DIR': 'missing-config-dir',
|
||||||
|
|
||||||
|
// The config.yaml exists but it is invalid.
|
||||||
|
'INVALID_CONFIG': 'config-invalid',
|
||||||
|
|
||||||
|
// DB is being configured.
|
||||||
|
'CONFIG_DB': 'config-db',
|
||||||
|
|
||||||
|
// DB information is being validated.
|
||||||
|
'VALIDATING_DB': 'config-db-validating',
|
||||||
|
|
||||||
|
// DB information is being saved to the config.
|
||||||
|
'SAVING_DB': 'config-db-saving',
|
||||||
|
|
||||||
|
// A validation error occurred with the database.
|
||||||
|
'DB_ERROR': 'config-db-error',
|
||||||
|
|
||||||
|
// Database is being setup.
|
||||||
|
'DB_SETUP': 'setup-db',
|
||||||
|
|
||||||
|
// Database setup has succeeded.
|
||||||
|
'DB_SETUP_SUCCESS': 'setup-db-success',
|
||||||
|
|
||||||
|
// An error occurred when setting up the database.
|
||||||
|
'DB_SETUP_ERROR': 'setup-db-error',
|
||||||
|
|
||||||
|
// The container is being restarted for the database changes.
|
||||||
|
'DB_RESTARTING': 'setup-db-restarting',
|
||||||
|
|
||||||
|
// A superuser is being configured.
|
||||||
|
'CREATE_SUPERUSER': 'create-superuser',
|
||||||
|
|
||||||
|
// The superuser is being created.
|
||||||
|
'CREATING_SUPERUSER': 'create-superuser-creating',
|
||||||
|
|
||||||
|
// An error occurred when setting up the superuser.
|
||||||
|
'SUPERUSER_ERROR': 'create-superuser-error',
|
||||||
|
|
||||||
|
// The superuser was created successfully.
|
||||||
|
'SUPERUSER_CREATED': 'create-superuser-created',
|
||||||
|
|
||||||
|
// General configuration is being setup.
|
||||||
|
'CONFIG': 'config',
|
||||||
|
|
||||||
|
// The configuration is fully valid.
|
||||||
|
'VALID_CONFIG': 'valid-config',
|
||||||
|
|
||||||
|
// The container is being restarted for the configuration changes.
|
||||||
|
'CONFIG_RESTARTING': 'config-restarting',
|
||||||
|
|
||||||
|
// The product is ready for use.
|
||||||
|
'READY': 'ready'
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.csrf_token = window.__token;
|
||||||
|
$scope.currentStep = $scope.States.LOADING;
|
||||||
|
$scope.errors = {};
|
||||||
|
$scope.stepProgress = [];
|
||||||
|
$scope.hasSSL = false;
|
||||||
|
$scope.hostname = null;
|
||||||
|
|
||||||
|
$scope.$watch('currentStep', function(currentStep) {
|
||||||
|
$scope.stepProgress = $scope.getProgress(currentStep);
|
||||||
|
|
||||||
|
switch (currentStep) {
|
||||||
|
case $scope.States.CONFIG:
|
||||||
|
$('#setupModal').modal('hide');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case $scope.States.MISSING_CONFIG_DIR:
|
||||||
|
$scope.showMissingConfigDialog();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case $scope.States.INVALID_CONFIG:
|
||||||
|
$scope.showInvalidConfigDialog();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case $scope.States.DB_SETUP:
|
||||||
|
$scope.performDatabaseSetup();
|
||||||
|
// Fall-through.
|
||||||
|
|
||||||
|
case $scope.States.CREATE_SUPERUSER:
|
||||||
|
case $scope.States.DB_RESTARTING:
|
||||||
|
case $scope.States.CONFIG_DB:
|
||||||
|
case $scope.States.VALID_CONFIG:
|
||||||
|
case $scope.States.READY:
|
||||||
|
$('#setupModal').modal({
|
||||||
|
keyboard: false,
|
||||||
|
backdrop: 'static'
|
||||||
|
});
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.restartContainer = function(state) {
|
||||||
|
$scope.currentStep = state;
|
||||||
|
ContainerService.restartContainer(function() {
|
||||||
|
$scope.checkStatus()
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.showSuperuserPanel = function() {
|
||||||
|
$('#setupModal').modal('hide');
|
||||||
|
var prefix = $scope.hasSSL ? 'https' : 'http';
|
||||||
|
var hostname = $scope.hostname;
|
||||||
|
window.location = prefix + '://' + hostname + '/superuser';
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.configurationSaved = function(config) {
|
||||||
|
$scope.hasSSL = config['PREFERRED_URL_SCHEME'] == 'https';
|
||||||
|
$scope.hostname = config['SERVER_HOSTNAME'];
|
||||||
|
$scope.currentStep = $scope.States.VALID_CONFIG;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.getProgress = function(step) {
|
||||||
|
var isStep = $scope.isStep;
|
||||||
|
var isStepFamily = $scope.isStepFamily;
|
||||||
|
var States = $scope.States;
|
||||||
|
|
||||||
|
return [
|
||||||
|
isStepFamily(step, States.CONFIG_DB),
|
||||||
|
isStepFamily(step, States.DB_SETUP),
|
||||||
|
isStep(step, States.DB_RESTARTING),
|
||||||
|
isStepFamily(step, States.CREATE_SUPERUSER),
|
||||||
|
isStep(step, States.CONFIG),
|
||||||
|
isStep(step, States.VALID_CONFIG),
|
||||||
|
isStep(step, States.CONFIG_RESTARTING),
|
||||||
|
isStep(step, States.READY)
|
||||||
|
];
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.isStepFamily = function(step, family) {
|
||||||
|
if (!step) { return false; }
|
||||||
|
return step.indexOf(family) == 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.isStep = function(step) {
|
||||||
|
for (var i = 1; i < arguments.length; ++i) {
|
||||||
|
if (arguments[i] == step) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.showInvalidConfigDialog = function() {
|
||||||
|
var message = "The <code>config.yaml</code> file found in <code>conf/stack</code> could not be parsed."
|
||||||
|
var title = "Invalid configuration file";
|
||||||
|
CoreDialog.fatal(title, message);
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
$scope.showMissingConfigDialog = function() {
|
||||||
|
var message = "A volume should be mounted into the container at <code>/conf/stack</code>: " +
|
||||||
|
"<br><br><pre>docker run -v /path/to/config:/conf/stack</pre>" +
|
||||||
|
"<br>Once fixed, restart the container. For more information, " +
|
||||||
|
"<a href='https://coreos.com/docs/enterprise-registry/initial-setup/'>" +
|
||||||
|
"Read the Setup Guide</a>"
|
||||||
|
|
||||||
|
var title = "Missing configuration volume";
|
||||||
|
CoreDialog.fatal(title, message);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.parseDbUri = function(value) {
|
||||||
|
if (!value) { return null; }
|
||||||
|
|
||||||
|
// Format: mysql+pymysql://<username>:<url escaped password>@<hostname>/<database_name>
|
||||||
|
var uri = URI(value);
|
||||||
|
return {
|
||||||
|
'kind': uri.protocol(),
|
||||||
|
'username': uri.username(),
|
||||||
|
'password': uri.password(),
|
||||||
|
'server': uri.host(),
|
||||||
|
'database': uri.path() ? uri.path().substr(1) : ''
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.serializeDbUri = function(fields) {
|
||||||
|
if (!fields['server']) { return ''; }
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (!fields['server']) { return ''; }
|
||||||
|
if (!fields['database']) { return ''; }
|
||||||
|
|
||||||
|
var uri = URI();
|
||||||
|
uri = uri && uri.host(fields['server']);
|
||||||
|
uri = uri && uri.protocol(fields['kind']);
|
||||||
|
uri = uri && uri.username(fields['username']);
|
||||||
|
uri = uri && uri.password(fields['password']);
|
||||||
|
uri = uri && uri.path('/' + (fields['database'] || ''));
|
||||||
|
uri = uri && uri.toString();
|
||||||
|
} catch (ex) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
return uri;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.createSuperUser = function() {
|
||||||
|
$scope.currentStep = $scope.States.CREATING_SUPERUSER;
|
||||||
|
ApiService.scCreateInitialSuperuser($scope.superUser, null).then(function(resp) {
|
||||||
|
UserService.load();
|
||||||
|
$scope.checkStatus();
|
||||||
|
}, function(resp) {
|
||||||
|
$scope.currentStep = $scope.States.SUPERUSER_ERROR;
|
||||||
|
$scope.errors.SuperuserCreationError = ApiService.getErrorMessage(resp, 'Could not create superuser');
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.performDatabaseSetup = function() {
|
||||||
|
$scope.currentStep = $scope.States.DB_SETUP;
|
||||||
|
ApiService.scSetupDatabase(null, null).then(function(resp) {
|
||||||
|
if (resp['error']) {
|
||||||
|
$scope.currentStep = $scope.States.DB_SETUP_ERROR;
|
||||||
|
$scope.errors.DatabaseSetupError = resp['error'];
|
||||||
|
} else {
|
||||||
|
$scope.currentStep = $scope.States.DB_SETUP_SUCCESS;
|
||||||
|
}
|
||||||
|
}, ApiService.errorDisplay('Could not setup database. Please report this to support.'))
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.validateDatabase = function() {
|
||||||
|
$scope.currentStep = $scope.States.VALIDATING_DB;
|
||||||
|
$scope.databaseInvalid = null;
|
||||||
|
|
||||||
|
var data = {
|
||||||
|
'config': {
|
||||||
|
'DB_URI': $scope.databaseUri
|
||||||
|
},
|
||||||
|
'hostname': window.location.host
|
||||||
|
};
|
||||||
|
|
||||||
|
var params = {
|
||||||
|
'service': 'database'
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.scValidateConfig(data, params).then(function(resp) {
|
||||||
|
var status = resp.status;
|
||||||
|
|
||||||
|
if (status) {
|
||||||
|
$scope.currentStep = $scope.States.SAVING_DB;
|
||||||
|
ApiService.scUpdateConfig(data, null).then(function(resp) {
|
||||||
|
$scope.checkStatus();
|
||||||
|
}, ApiService.errorDisplay('Cannot update config. Please report this to support'));
|
||||||
|
} else {
|
||||||
|
$scope.currentStep = $scope.States.DB_ERROR;
|
||||||
|
$scope.errors.DatabaseValidationError = resp.reason;
|
||||||
|
}
|
||||||
|
}, ApiService.errorDisplay('Cannot validate database. Please report this to support'));
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.checkStatus = function() {
|
||||||
|
ContainerService.checkStatus(function(resp) {
|
||||||
|
$scope.currentStep = resp['status'];
|
||||||
|
}, $scope.hasSSL);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Load the initial status.
|
||||||
|
$scope.checkStatus();
|
||||||
|
}
|
224
static/js/controllers/superuser.js
Normal file
224
static/js/controllers/superuser.js
Normal file
|
@ -0,0 +1,224 @@
|
||||||
|
function SuperUserAdminCtrl($scope, $timeout, ApiService, Features, UserService, ContainerService, AngularPollChannel, CoreDialog) {
|
||||||
|
if (!Features.SUPER_USERS) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Monitor any user changes and place the current user into the scope.
|
||||||
|
UserService.updateUserIn($scope);
|
||||||
|
|
||||||
|
$scope.configStatus = null;
|
||||||
|
$scope.requiresRestart = null;
|
||||||
|
$scope.logsCounter = 0;
|
||||||
|
$scope.newUser = {};
|
||||||
|
$scope.createdUser = null;
|
||||||
|
$scope.systemUsage = null;
|
||||||
|
$scope.debugServices = null;
|
||||||
|
$scope.debugLogs = null;
|
||||||
|
$scope.pollChannel = null;
|
||||||
|
$scope.logsScrolled = false;
|
||||||
|
$scope.csrf_token = encodeURIComponent(window.__token);
|
||||||
|
|
||||||
|
$scope.configurationSaved = function() {
|
||||||
|
$scope.requiresRestart = true;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.showCreateUser = function() {
|
||||||
|
$scope.createdUser = null;
|
||||||
|
$('#createUserModal').modal('show');
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.viewSystemLogs = function(service) {
|
||||||
|
if ($scope.pollChannel) {
|
||||||
|
$scope.pollChannel.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.debugService = service;
|
||||||
|
$scope.debugLogs = null;
|
||||||
|
|
||||||
|
$scope.pollChannel = AngularPollChannel.create($scope, $scope.loadServiceLogs, 2 * 1000 /* 2s */);
|
||||||
|
$scope.pollChannel.start();
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.loadServiceLogs = function(callback) {
|
||||||
|
if (!$scope.debugService) { return; }
|
||||||
|
|
||||||
|
var params = {
|
||||||
|
'service': $scope.debugService
|
||||||
|
};
|
||||||
|
|
||||||
|
var errorHandler = ApiService.errorDisplay('Cannot load system logs. Please contact support.',
|
||||||
|
function() {
|
||||||
|
callback(false);
|
||||||
|
})
|
||||||
|
|
||||||
|
ApiService.getSystemLogs(null, params, /* background */true).then(function(resp) {
|
||||||
|
$scope.debugLogs = resp['logs'];
|
||||||
|
callback(true);
|
||||||
|
}, errorHandler);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.loadDebugServices = function() {
|
||||||
|
if ($scope.pollChannel) {
|
||||||
|
$scope.pollChannel.stop();
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.debugService = null;
|
||||||
|
|
||||||
|
ApiService.listSystemLogServices().then(function(resp) {
|
||||||
|
$scope.debugServices = resp['services'];
|
||||||
|
}, ApiService.errorDisplay('Cannot load system logs. Please contact support.'))
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.getUsage = function() {
|
||||||
|
if ($scope.systemUsage) { return; }
|
||||||
|
|
||||||
|
ApiService.getSystemUsage().then(function(resp) {
|
||||||
|
$scope.systemUsage = resp;
|
||||||
|
}, ApiService.errorDisplay('Cannot load system usage. Please contact support.'))
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.loadUsageLogs = function() {
|
||||||
|
$scope.logsCounter++;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.loadUsers = function() {
|
||||||
|
if ($scope.users) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.loadUsersInternal();
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.loadUsersInternal = function() {
|
||||||
|
ApiService.listAllUsers().then(function(resp) {
|
||||||
|
$scope.users = resp['users'];
|
||||||
|
$scope.showInterface = true;
|
||||||
|
}, function(resp) {
|
||||||
|
$scope.users = [];
|
||||||
|
$scope.usersError = resp['data']['message'] || resp['data']['error_description'];
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.showChangePassword = function(user) {
|
||||||
|
$scope.userToChange = user;
|
||||||
|
$('#changePasswordModal').modal({});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.createUser = function() {
|
||||||
|
$scope.creatingUser = true;
|
||||||
|
$scope.createdUser = null;
|
||||||
|
|
||||||
|
var errorHandler = ApiService.errorDisplay('Cannot create user', function() {
|
||||||
|
$scope.creatingUser = false;
|
||||||
|
$('#createUserModal').modal('hide');
|
||||||
|
});
|
||||||
|
|
||||||
|
ApiService.createInstallUser($scope.newUser, null).then(function(resp) {
|
||||||
|
$scope.creatingUser = false;
|
||||||
|
$scope.newUser = {};
|
||||||
|
$scope.createdUser = resp;
|
||||||
|
$scope.loadUsersInternal();
|
||||||
|
}, errorHandler)
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.showDeleteUser = function(user) {
|
||||||
|
if (user.username == UserService.currentUser().username) {
|
||||||
|
bootbox.dialog({
|
||||||
|
"message": 'Cannot delete yourself!',
|
||||||
|
"title": "Cannot delete user",
|
||||||
|
"buttons": {
|
||||||
|
"close": {
|
||||||
|
"label": "Close",
|
||||||
|
"className": "btn-primary"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.userToDelete = user;
|
||||||
|
$('#confirmDeleteUserModal').modal({});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.changeUserPassword = function(user) {
|
||||||
|
$('#changePasswordModal').modal('hide');
|
||||||
|
|
||||||
|
var params = {
|
||||||
|
'username': user.username
|
||||||
|
};
|
||||||
|
|
||||||
|
var data = {
|
||||||
|
'password': user.password
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.changeInstallUser(data, params).then(function(resp) {
|
||||||
|
$scope.loadUsersInternal();
|
||||||
|
}, ApiService.errorDisplay('Could not change user'));
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.deleteUser = function(user) {
|
||||||
|
$('#confirmDeleteUserModal').modal('hide');
|
||||||
|
|
||||||
|
var params = {
|
||||||
|
'username': user.username
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.deleteInstallUser(null, params).then(function(resp) {
|
||||||
|
$scope.loadUsersInternal();
|
||||||
|
}, ApiService.errorDisplay('Cannot delete user'));
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.sendRecoveryEmail = function(user) {
|
||||||
|
var params = {
|
||||||
|
'username': user.username
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.sendInstallUserRecoveryEmail(null, params).then(function(resp) {
|
||||||
|
bootbox.dialog({
|
||||||
|
"message": "A recovery email has been sent to " + resp['email'],
|
||||||
|
"title": "Recovery email sent",
|
||||||
|
"buttons": {
|
||||||
|
"close": {
|
||||||
|
"label": "Close",
|
||||||
|
"className": "btn-primary"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
}, ApiService.errorDisplay('Cannot send recovery email'))
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.restartContainer = function() {
|
||||||
|
$('#restartingContainerModal').modal({
|
||||||
|
keyboard: false,
|
||||||
|
backdrop: 'static'
|
||||||
|
});
|
||||||
|
|
||||||
|
ContainerService.restartContainer(function() {
|
||||||
|
$scope.checkStatus()
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.checkStatus = function() {
|
||||||
|
ContainerService.checkStatus(function(resp) {
|
||||||
|
$('#restartingContainerModal').modal('hide');
|
||||||
|
$scope.configStatus = resp['status'];
|
||||||
|
$scope.requiresRestart = resp['requires_restart'];
|
||||||
|
|
||||||
|
if ($scope.configStatus == 'ready') {
|
||||||
|
$scope.loadUsers();
|
||||||
|
} else {
|
||||||
|
var message = "Installation of this product has not yet been completed." +
|
||||||
|
"<br><br>Please read the " +
|
||||||
|
"<a href='https://coreos.com/docs/enterprise-registry/initial-setup/'>" +
|
||||||
|
"Setup Guide</a>"
|
||||||
|
|
||||||
|
var title = "Installation Incomplete";
|
||||||
|
CoreDialog.fatal(title, message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Load the initial status.
|
||||||
|
$scope.checkStatus();
|
||||||
|
}
|
761
static/js/core-config-setup.js
Normal file
761
static/js/core-config-setup.js
Normal file
|
@ -0,0 +1,761 @@
|
||||||
|
angular.module("core-config-setup", ['angularFileUpload'])
|
||||||
|
.directive('configSetupTool', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/config/config-setup-tool.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'isActive': '=isActive',
|
||||||
|
'configurationSaved': '&configurationSaved'
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element, $timeout, ApiService) {
|
||||||
|
$scope.HOSTNAME_REGEX = '^[a-zA-Z-0-9\.]+(:[0-9]+)?$';
|
||||||
|
$scope.GITHUB_REGEX = '^https?://([a-zA-Z0-9]+\.?\/?)+$';
|
||||||
|
|
||||||
|
$scope.SERVICES = [
|
||||||
|
{'id': 'redis', 'title': 'Redis'},
|
||||||
|
|
||||||
|
{'id': 'registry-storage', 'title': 'Registry Storage'},
|
||||||
|
|
||||||
|
{'id': 'ssl', 'title': 'SSL certificate and key', 'condition': function(config) {
|
||||||
|
return config.PREFERRED_URL_SCHEME == 'https';
|
||||||
|
}},
|
||||||
|
|
||||||
|
{'id': 'ldap', 'title': 'LDAP Authentication', 'condition': function(config) {
|
||||||
|
return config.AUTHENTICATION_TYPE == 'LDAP';
|
||||||
|
}},
|
||||||
|
|
||||||
|
{'id': 'mail', 'title': 'E-mail Support', 'condition': function(config) {
|
||||||
|
return config.FEATURE_MAILING;
|
||||||
|
}},
|
||||||
|
|
||||||
|
{'id': 'github-login', 'title': 'Github (Enterprise) Authentication', 'condition': function(config) {
|
||||||
|
return config.FEATURE_GITHUB_LOGIN;
|
||||||
|
}},
|
||||||
|
|
||||||
|
{'id': 'google-login', 'title': 'Google Authentication', 'condition': function(config) {
|
||||||
|
return config.FEATURE_GOOGLE_LOGIN;
|
||||||
|
}},
|
||||||
|
|
||||||
|
{'id': 'github-trigger', 'title': 'Github (Enterprise) Build Triggers', 'condition': function(config) {
|
||||||
|
return config.FEATURE_GITHUB_BUILD;
|
||||||
|
}}
|
||||||
|
];
|
||||||
|
|
||||||
|
$scope.STORAGE_CONFIG_FIELDS = {
|
||||||
|
'LocalStorage': [
|
||||||
|
{'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/some/directory', 'kind': 'text'}
|
||||||
|
],
|
||||||
|
|
||||||
|
'S3Storage': [
|
||||||
|
{'name': 's3_access_key', 'title': 'AWS Access Key', 'placeholder': 'accesskeyhere', 'kind': 'text'},
|
||||||
|
{'name': 's3_secret_key', 'title': 'AWS Secret Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||||
|
{'name': 's3_bucket', 'title': 'S3 Bucket', 'placeholder': 'my-cool-bucket', 'kind': 'text'},
|
||||||
|
{'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'}
|
||||||
|
],
|
||||||
|
|
||||||
|
'GoogleCloudStorage': [
|
||||||
|
{'name': 'access_key', 'title': 'Cloud Access Key', 'placeholder': 'accesskeyhere', 'kind': 'text'},
|
||||||
|
{'name': 'secret_key', 'title': 'Cloud Secret Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||||
|
{'name': 'bucket_name', 'title': 'GCS Bucket', 'placeholder': 'my-cool-bucket', 'kind': 'text'},
|
||||||
|
{'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'}
|
||||||
|
],
|
||||||
|
|
||||||
|
'RadosGWStorage': [
|
||||||
|
{'name': 'hostname', 'title': 'Rados Server Hostname', 'placeholder': 'my.rados.hostname', 'kind': 'text'},
|
||||||
|
{'name': 'is_secure', 'title': 'Is Secure', 'placeholder': 'Require SSL', 'kind': 'bool'},
|
||||||
|
{'name': 'access_key', 'title': 'Access Key', 'placeholder': 'accesskeyhere', 'kind': 'text', 'help_url': 'http://ceph.com/docs/master/radosgw/admin/'},
|
||||||
|
{'name': 'secret_key', 'title': 'Secret Key', 'placeholder': 'secretkeyhere', 'kind': 'text'},
|
||||||
|
{'name': 'bucket_name', 'title': 'Bucket Name', 'placeholder': 'my-cool-bucket', 'kind': 'text'},
|
||||||
|
{'name': 'storage_path', 'title': 'Storage Directory', 'placeholder': '/path/inside/bucket', 'kind': 'text'}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.validateHostname = function(hostname) {
|
||||||
|
if (hostname.indexOf('127.0.0.1') == 0 || hostname.indexOf('localhost') == 0) {
|
||||||
|
return 'Please specify a non-localhost hostname. "localhost" will refer to the container, not your machine.'
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.config = null;
|
||||||
|
$scope.mapped = {
|
||||||
|
'$hasChanges': false
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.validating = null;
|
||||||
|
$scope.savingConfiguration = false;
|
||||||
|
|
||||||
|
$scope.getServices = function(config) {
|
||||||
|
var services = [];
|
||||||
|
if (!config) { return services; }
|
||||||
|
|
||||||
|
for (var i = 0; i < $scope.SERVICES.length; ++i) {
|
||||||
|
var service = $scope.SERVICES[i];
|
||||||
|
if (!service.condition || service.condition(config)) {
|
||||||
|
services.push({
|
||||||
|
'service': service,
|
||||||
|
'status': 'validating'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return services;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.validationStatus = function(serviceInfos) {
|
||||||
|
if (!serviceInfos) { return 'validating'; }
|
||||||
|
|
||||||
|
var hasError = false;
|
||||||
|
for (var i = 0; i < serviceInfos.length; ++i) {
|
||||||
|
if (serviceInfos[i].status == 'validating') {
|
||||||
|
return 'validating';
|
||||||
|
}
|
||||||
|
if (serviceInfos[i].status == 'error') {
|
||||||
|
hasError = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasError ? 'failed' : 'success';
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.cancelValidation = function() {
|
||||||
|
$('#validateAndSaveModal').modal('hide');
|
||||||
|
$scope.validating = null;
|
||||||
|
$scope.savingConfiguration = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.validateService = function(serviceInfo) {
|
||||||
|
var params = {
|
||||||
|
'service': serviceInfo.service.id
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.scValidateConfig({'config': $scope.config}, params).then(function(resp) {
|
||||||
|
serviceInfo.status = resp.status ? 'success' : 'error';
|
||||||
|
serviceInfo.errorMessage = $.trim(resp.reason || '');
|
||||||
|
}, ApiService.errorDisplay('Could not validate configuration. Please report this error.'));
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.checkValidateAndSave = function() {
|
||||||
|
if ($scope.configform.$valid) {
|
||||||
|
$scope.validateAndSave();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$element.find("input.ng-invalid:first")[0].scrollIntoView();
|
||||||
|
$element.find("input.ng-invalid:first").focus();
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.validateAndSave = function() {
|
||||||
|
$scope.savingConfiguration = false;
|
||||||
|
$scope.validating = $scope.getServices($scope.config);
|
||||||
|
|
||||||
|
$('#validateAndSaveModal').modal({
|
||||||
|
keyboard: false,
|
||||||
|
backdrop: 'static'
|
||||||
|
});
|
||||||
|
|
||||||
|
for (var i = 0; i < $scope.validating.length; ++i) {
|
||||||
|
var serviceInfo = $scope.validating[i];
|
||||||
|
$scope.validateService(serviceInfo);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.saveConfiguration = function() {
|
||||||
|
$scope.savingConfiguration = true;
|
||||||
|
|
||||||
|
// Make sure to note that fully verified setup is completed. We use this as a signal
|
||||||
|
// in the setup tool.
|
||||||
|
$scope.config['SETUP_COMPLETE'] = true;
|
||||||
|
|
||||||
|
var data = {
|
||||||
|
'config': $scope.config,
|
||||||
|
'hostname': window.location.host
|
||||||
|
};
|
||||||
|
|
||||||
|
ApiService.scUpdateConfig(data).then(function(resp) {
|
||||||
|
$scope.savingConfiguration = false;
|
||||||
|
$scope.mapped.$hasChanges = false;
|
||||||
|
$('#validateAndSaveModal').modal('hide');
|
||||||
|
$scope.configurationSaved({'config': $scope.config});
|
||||||
|
}, ApiService.errorDisplay('Could not save configuration. Please report this error.'));
|
||||||
|
};
|
||||||
|
|
||||||
|
var githubSelector = function(key) {
|
||||||
|
return function(value) {
|
||||||
|
if (!value || !$scope.config) { return; }
|
||||||
|
|
||||||
|
if (!$scope.config[key]) {
|
||||||
|
$scope.config[key] = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value == 'enterprise') {
|
||||||
|
if ($scope.config[key]['GITHUB_ENDPOINT'] == 'https://github.com/') {
|
||||||
|
$scope.config[key]['GITHUB_ENDPOINT'] = '';
|
||||||
|
}
|
||||||
|
delete $scope.config[key]['API_ENDPOINT'];
|
||||||
|
} else if (value == 'hosted') {
|
||||||
|
$scope.config[key]['GITHUB_ENDPOINT'] = 'https://github.com/';
|
||||||
|
$scope.config[key]['API_ENDPOINT'] = 'https://api.github.com/';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
var getKey = function(config, path) {
|
||||||
|
var parts = path.split('.');
|
||||||
|
var current = config;
|
||||||
|
for (var i = 0; i < parts.length; ++i) {
|
||||||
|
var part = parts[i];
|
||||||
|
if (!current[part]) { return null; }
|
||||||
|
current = current[part];
|
||||||
|
}
|
||||||
|
return current;
|
||||||
|
};
|
||||||
|
|
||||||
|
var initializeMappedLogic = function(config) {
|
||||||
|
var gle = getKey(config, 'GITHUB_LOGIN_CONFIG.GITHUB_ENDPOINT');
|
||||||
|
var gte = getKey(config, 'GITHUB_TRIGGER_CONFIG.GITHUB_ENDPOINT');
|
||||||
|
|
||||||
|
$scope.mapped['GITHUB_LOGIN_KIND'] = gle == 'https://github.com/' ? 'hosted' : 'enterprise';
|
||||||
|
$scope.mapped['GITHUB_TRIGGER_KIND'] = gte == 'https://github.com/' ? 'hosted' : 'enterprise';
|
||||||
|
|
||||||
|
$scope.mapped['redis'] = {};
|
||||||
|
$scope.mapped['redis']['host'] = getKey(config, 'BUILDLOGS_REDIS.host') || getKey(config, 'USER_EVENTS_REDIS.host');
|
||||||
|
$scope.mapped['redis']['port'] = getKey(config, 'BUILDLOGS_REDIS.port') || getKey(config, 'USER_EVENTS_REDIS.port');
|
||||||
|
$scope.mapped['redis']['password'] = getKey(config, 'BUILDLOGS_REDIS.password') || getKey(config, 'USER_EVENTS_REDIS.password');
|
||||||
|
};
|
||||||
|
|
||||||
|
var redisSetter = function(keyname) {
|
||||||
|
return function(value) {
|
||||||
|
if (value == null || !$scope.config) { return; }
|
||||||
|
|
||||||
|
if (!$scope.config['BUILDLOGS_REDIS']) {
|
||||||
|
$scope.config['BUILDLOGS_REDIS'] = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!$scope.config['USER_EVENTS_REDIS']) {
|
||||||
|
$scope.config['USER_EVENTS_REDIS'] = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!value) {
|
||||||
|
delete $scope.config['BUILDLOGS_REDIS'][keyname];
|
||||||
|
delete $scope.config['USER_EVENTS_REDIS'][keyname];
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.config['BUILDLOGS_REDIS'][keyname] = value;
|
||||||
|
$scope.config['USER_EVENTS_REDIS'][keyname] = value;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add mapped logic.
|
||||||
|
$scope.$watch('mapped.GITHUB_LOGIN_KIND', githubSelector('GITHUB_LOGIN_CONFIG'));
|
||||||
|
$scope.$watch('mapped.GITHUB_TRIGGER_KIND', githubSelector('GITHUB_TRIGGER_CONFIG'));
|
||||||
|
|
||||||
|
$scope.$watch('mapped.redis.host', redisSetter('host'));
|
||||||
|
$scope.$watch('mapped.redis.port', redisSetter('port'));
|
||||||
|
$scope.$watch('mapped.redis.password', redisSetter('password'));
|
||||||
|
|
||||||
|
// Add a watch to remove any fields not allowed by the current storage configuration.
|
||||||
|
// We have to do this otherwise extra fields (which are not allowed) can end up in the
|
||||||
|
// configuration.
|
||||||
|
$scope.$watch('config.DISTRIBUTED_STORAGE_CONFIG.local[0]', function(value) {
|
||||||
|
// Remove any fields not associated with the current kind.
|
||||||
|
if (!value || !$scope.STORAGE_CONFIG_FIELDS[value]
|
||||||
|
|| !$scope.config.DISTRIBUTED_STORAGE_CONFIG
|
||||||
|
|| !$scope.config.DISTRIBUTED_STORAGE_CONFIG.local
|
||||||
|
|| !$scope.config.DISTRIBUTED_STORAGE_CONFIG.local[1]) { return; }
|
||||||
|
|
||||||
|
var allowedFields = $scope.STORAGE_CONFIG_FIELDS[value];
|
||||||
|
var configObject = $scope.config.DISTRIBUTED_STORAGE_CONFIG.local[1];
|
||||||
|
|
||||||
|
// Remove any fields not allowed.
|
||||||
|
for (var fieldName in configObject) {
|
||||||
|
if (!configObject.hasOwnProperty(fieldName)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
var isValidField = $.grep(allowedFields, function(field) {
|
||||||
|
return field.name == fieldName;
|
||||||
|
}).length > 0;
|
||||||
|
|
||||||
|
if (!isValidField) {
|
||||||
|
delete configObject[fieldName];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set any boolean fields to false.
|
||||||
|
for (var i = 0; i < allowedFields.length; ++i) {
|
||||||
|
if (allowedFields[i].kind == 'bool') {
|
||||||
|
configObject[allowedFields[i].name] = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.$watch('config', function(value) {
|
||||||
|
$scope.mapped['$hasChanges'] = true;
|
||||||
|
}, true);
|
||||||
|
|
||||||
|
$scope.$watch('isActive', function(value) {
|
||||||
|
if (!value) { return; }
|
||||||
|
|
||||||
|
ApiService.scGetConfig().then(function(resp) {
|
||||||
|
$scope.config = resp['config'];
|
||||||
|
initializeMappedLogic($scope.config);
|
||||||
|
$scope.mapped['$hasChanges'] = false;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configParsedField', function ($timeout) {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-parsed-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding',
|
||||||
|
'parser': '&parser',
|
||||||
|
'serializer': '&serializer'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element, $transclude) {
|
||||||
|
$scope.childScope = null;
|
||||||
|
|
||||||
|
$transclude(function(clone, scope) {
|
||||||
|
$scope.childScope = scope;
|
||||||
|
$scope.childScope['fields'] = {};
|
||||||
|
$element.append(clone);
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.childScope.$watch('fields', function(value) {
|
||||||
|
// Note: We need the timeout here because Angular starts the digest of the
|
||||||
|
// parent scope AFTER the child scope, which means it can end up one action
|
||||||
|
// behind. The timeout ensures that the parent scope will be fully digest-ed
|
||||||
|
// and then we update the binding. Yes, this is a hack :-/.
|
||||||
|
$timeout(function() {
|
||||||
|
$scope.binding = $scope.serializer({'fields': value});
|
||||||
|
});
|
||||||
|
}, true);
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(value) {
|
||||||
|
var parsed = $scope.parser({'value': value});
|
||||||
|
for (var key in parsed) {
|
||||||
|
if (parsed.hasOwnProperty(key)) {
|
||||||
|
$scope.childScope['fields'][key] = parsed[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configVariableField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-variable-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
$scope.sections = {};
|
||||||
|
$scope.currentSection = null;
|
||||||
|
|
||||||
|
$scope.setSection = function(section) {
|
||||||
|
$scope.binding = section.value;
|
||||||
|
};
|
||||||
|
|
||||||
|
this.addSection = function(section, element) {
|
||||||
|
$scope.sections[section.value] = {
|
||||||
|
'title': section.valueTitle,
|
||||||
|
'value': section.value,
|
||||||
|
'element': element
|
||||||
|
};
|
||||||
|
|
||||||
|
element.hide();
|
||||||
|
|
||||||
|
if (!$scope.binding) {
|
||||||
|
$scope.binding = section.value;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(binding) {
|
||||||
|
if (!binding) { return; }
|
||||||
|
|
||||||
|
if ($scope.currentSection) {
|
||||||
|
$scope.currentSection.element.hide();
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($scope.sections[binding]) {
|
||||||
|
$scope.sections[binding].element.show();
|
||||||
|
$scope.currentSection = $scope.sections[binding];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('variableSection', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-variable-field.html',
|
||||||
|
priority: 1,
|
||||||
|
require: '^configVariableField',
|
||||||
|
replace: false,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'value': '@value',
|
||||||
|
'valueTitle': '@valueTitle'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
var parentCtrl = $element.parent().controller('configVariableField');
|
||||||
|
parentCtrl.addSection($scope, $element);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configListField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-list-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: false,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding',
|
||||||
|
'placeholder': '@placeholder',
|
||||||
|
'defaultValue': '@defaultValue',
|
||||||
|
'itemTitle': '@itemTitle'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
$scope.removeItem = function(item) {
|
||||||
|
var index = $scope.binding.indexOf(item);
|
||||||
|
if (index >= 0) {
|
||||||
|
$scope.binding.splice(index, 1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.addItem = function() {
|
||||||
|
if (!$scope.newItemName) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!$scope.binding) {
|
||||||
|
$scope.binding = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($scope.binding.indexOf($scope.newItemName) >= 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.binding.push($scope.newItemName);
|
||||||
|
$scope.newItemName = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(binding) {
|
||||||
|
if (!binding && $scope.defaultValue) {
|
||||||
|
$scope.binding = eval($scope.defaultValue);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configFileField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-file-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: false,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'filename': '@filename'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element, Restangular, $upload) {
|
||||||
|
$scope.hasFile = false;
|
||||||
|
|
||||||
|
$scope.onFileSelect = function(files) {
|
||||||
|
if (files.length < 1) { return; }
|
||||||
|
|
||||||
|
$scope.uploadProgress = 0;
|
||||||
|
$scope.upload = $upload.upload({
|
||||||
|
url: '/api/v1/superuser/config/file/' + $scope.filename,
|
||||||
|
method: 'POST',
|
||||||
|
data: {'_csrf_token': window.__token},
|
||||||
|
file: files[0],
|
||||||
|
}).progress(function(evt) {
|
||||||
|
$scope.uploadProgress = parseInt(100.0 * evt.loaded / evt.total);
|
||||||
|
if ($scope.uploadProgress == 100) {
|
||||||
|
$scope.uploadProgress = null;
|
||||||
|
$scope.hasFile = true;
|
||||||
|
}
|
||||||
|
}).success(function(data, status, headers, config) {
|
||||||
|
$scope.uploadProgress = null;
|
||||||
|
$scope.hasFile = true;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
var loadStatus = function(filename) {
|
||||||
|
Restangular.one('superuser/config/file/' + filename).get().then(function(resp) {
|
||||||
|
$scope.hasFile = resp['exists'];
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
if ($scope.filename) {
|
||||||
|
loadStatus($scope.filename);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configBoolField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-bool-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: false,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configNumericField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-numeric-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: false,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding',
|
||||||
|
'placeholder': '@placeholder',
|
||||||
|
'defaultValue': '@defaultValue'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
$scope.bindinginternal = 0;
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(binding) {
|
||||||
|
if ($scope.binding == 0 && $scope.defaultValue) {
|
||||||
|
$scope.binding = $scope.defaultValue * 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.bindinginternal = $scope.binding;
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.$watch('bindinginternal', function(binding) {
|
||||||
|
var newValue = $scope.bindinginternal * 1;
|
||||||
|
if (isNaN(newValue)) {
|
||||||
|
newValue = 0;
|
||||||
|
}
|
||||||
|
$scope.binding = newValue;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configContactsField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-contacts-field.html',
|
||||||
|
priority: 1,
|
||||||
|
replace: false,
|
||||||
|
transclude: false,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
var padItems = function(items) {
|
||||||
|
// Remove the last item if both it and the second to last items are empty.
|
||||||
|
if (items.length > 1 && !items[items.length - 2].value && !items[items.length - 1].value) {
|
||||||
|
items.splice(items.length - 1, 1);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the last item is non-empty, add a new item.
|
||||||
|
if (items.length == 0 || items[items.length - 1].value) {
|
||||||
|
items.push({'value': ''});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.itemHash = null;
|
||||||
|
$scope.$watch('items', function(items) {
|
||||||
|
if (!items) { return; }
|
||||||
|
padItems(items);
|
||||||
|
|
||||||
|
var itemHash = '';
|
||||||
|
var binding = [];
|
||||||
|
for (var i = 0; i < items.length; ++i) {
|
||||||
|
var item = items[i];
|
||||||
|
if (item.value && (URI(item.value).host() || URI(item.value).path())) {
|
||||||
|
binding.push(item.value);
|
||||||
|
itemHash += item.value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.itemHash = itemHash;
|
||||||
|
$scope.binding = binding;
|
||||||
|
}, true);
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(binding) {
|
||||||
|
var current = binding || [];
|
||||||
|
var items = [];
|
||||||
|
var itemHash = '';
|
||||||
|
for (var i = 0; i < current.length; ++i) {
|
||||||
|
items.push({'value': current[i]})
|
||||||
|
itemHash += current[i];
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($scope.itemHash != itemHash) {
|
||||||
|
$scope.items = items;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configContactField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-contact-field.html',
|
||||||
|
priority: 1,
|
||||||
|
replace: false,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
$scope.kind = null;
|
||||||
|
$scope.value = null;
|
||||||
|
|
||||||
|
var updateBinding = function() {
|
||||||
|
if ($scope.value == null) { return; }
|
||||||
|
var value = $scope.value || '';
|
||||||
|
|
||||||
|
switch ($scope.kind) {
|
||||||
|
case 'mailto':
|
||||||
|
$scope.binding = 'mailto:' + value;
|
||||||
|
return;
|
||||||
|
|
||||||
|
case 'tel':
|
||||||
|
$scope.binding = 'tel:' + value;
|
||||||
|
return;
|
||||||
|
|
||||||
|
case 'irc':
|
||||||
|
$scope.binding = 'irc://' + value;
|
||||||
|
return;
|
||||||
|
|
||||||
|
default:
|
||||||
|
$scope.binding = value;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.$watch('kind', updateBinding);
|
||||||
|
$scope.$watch('value', updateBinding);
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(value) {
|
||||||
|
if (!value) {
|
||||||
|
$scope.kind = null;
|
||||||
|
$scope.value = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var uri = URI(value);
|
||||||
|
$scope.kind = uri.scheme();
|
||||||
|
|
||||||
|
switch ($scope.kind) {
|
||||||
|
case 'mailto':
|
||||||
|
case 'tel':
|
||||||
|
$scope.value = uri.path();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'irc':
|
||||||
|
$scope.value = value.substr('irc://'.length);
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
$scope.kind = 'http';
|
||||||
|
$scope.value = value;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.getPlaceholder = function(kind) {
|
||||||
|
switch (kind) {
|
||||||
|
case 'mailto':
|
||||||
|
return 'some@example.com';
|
||||||
|
|
||||||
|
case 'tel':
|
||||||
|
return '555-555-5555';
|
||||||
|
|
||||||
|
case 'irc':
|
||||||
|
return 'myserver:port/somechannel';
|
||||||
|
|
||||||
|
default:
|
||||||
|
return 'http://some/url';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('configStringField', function () {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 0,
|
||||||
|
templateUrl: '/static/directives/config/config-string-field.html',
|
||||||
|
replace: false,
|
||||||
|
transclude: false,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'binding': '=binding',
|
||||||
|
'placeholder': '@placeholder',
|
||||||
|
'pattern': '@pattern',
|
||||||
|
'defaultValue': '@defaultValue',
|
||||||
|
'validator': '&validator'
|
||||||
|
},
|
||||||
|
controller: function($scope, $element) {
|
||||||
|
$scope.getRegexp = function(pattern) {
|
||||||
|
if (!pattern) {
|
||||||
|
pattern = '.*';
|
||||||
|
}
|
||||||
|
return new RegExp(pattern);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.$watch('binding', function(binding) {
|
||||||
|
if (!binding && $scope.defaultValue) {
|
||||||
|
$scope.binding = $scope.defaultValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.errorMessage = $scope.validator({'value': binding || ''});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
});
|
329
static/js/core-ui.js
Normal file
329
static/js/core-ui.js
Normal file
|
@ -0,0 +1,329 @@
|
||||||
|
angular.module("core-ui", [])
|
||||||
|
.factory('CoreDialog', [function() {
|
||||||
|
var service = {};
|
||||||
|
service['fatal'] = function(title, message) {
|
||||||
|
bootbox.dialog({
|
||||||
|
"title": title,
|
||||||
|
"message": "<div class='alert-icon-container-container'><div class='alert-icon-container'><div class='alert-icon'></div></div></div>" + message,
|
||||||
|
"buttons": {},
|
||||||
|
"className": "co-dialog fatal-error",
|
||||||
|
"closeButton": false
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
return service;
|
||||||
|
}])
|
||||||
|
|
||||||
|
.directive('corLogBox', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-log-box.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'logs': '=logs'
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element, $timeout) {
|
||||||
|
$scope.hasNewLogs = false;
|
||||||
|
|
||||||
|
var scrollHandlerBound = false;
|
||||||
|
var isAnimatedScrolling = false;
|
||||||
|
var isScrollBottom = true;
|
||||||
|
|
||||||
|
var scrollHandler = function() {
|
||||||
|
if (isAnimatedScrolling) { return; }
|
||||||
|
var element = $element.find("#co-log-viewer")[0];
|
||||||
|
isScrollBottom = element.scrollHeight - element.scrollTop === element.clientHeight;
|
||||||
|
if (isScrollBottom) {
|
||||||
|
$scope.hasNewLogs = false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var animateComplete = function() {
|
||||||
|
isAnimatedScrolling = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.moveToBottom = function() {
|
||||||
|
$scope.hasNewLogs = false;
|
||||||
|
isAnimatedScrolling = true;
|
||||||
|
isScrollBottom = true;
|
||||||
|
|
||||||
|
$element.find("#co-log-viewer").animate(
|
||||||
|
{ scrollTop: $element.find("#co-log-content").height() }, "slow", null, animateComplete);
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.$watch('logs', function(value, oldValue) {
|
||||||
|
if (!value) { return; }
|
||||||
|
|
||||||
|
$timeout(function() {
|
||||||
|
if (!scrollHandlerBound) {
|
||||||
|
$element.find("#co-log-viewer").on('scroll', scrollHandler);
|
||||||
|
scrollHandlerBound = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isScrollBottom) {
|
||||||
|
$scope.hasNewLogs = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
$scope.moveToBottom();
|
||||||
|
}, 500);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corOptionsMenu', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-options-menu.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corOption', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-option.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'optionClick': '&optionClick'
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
.directive('corTitle', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-title.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corTitleContent', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-title-content.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corTitleLink', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-title-link.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corTabPanel', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 1,
|
||||||
|
templateUrl: '/static/directives/cor-tab-panel.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corTabContent', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 2,
|
||||||
|
templateUrl: '/static/directives/cor-tab-content.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corTabs', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 3,
|
||||||
|
templateUrl: '/static/directives/cor-tabs.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corFloatingBottomBar', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 3,
|
||||||
|
templateUrl: '/static/directives/cor-floating-bottom-bar.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {},
|
||||||
|
controller: function($rootScope, $scope, $element, $timeout, $interval) {
|
||||||
|
var handler = function() {
|
||||||
|
$element.removeClass('floating');
|
||||||
|
$element.css('width', $element[0].parentNode.clientWidth + 'px');
|
||||||
|
|
||||||
|
var windowHeight = $(window).height();
|
||||||
|
var rect = $element[0].getBoundingClientRect();
|
||||||
|
if (rect.bottom > windowHeight) {
|
||||||
|
$element.addClass('floating');
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
$(window).on("scroll", handler);
|
||||||
|
$(window).on("resize", handler);
|
||||||
|
|
||||||
|
var previousHeight = $element[0].parentNode.clientHeight;
|
||||||
|
var stop = $interval(function() {
|
||||||
|
var currentHeight = $element[0].parentNode.clientWidth;
|
||||||
|
if (previousHeight != currentHeight) {
|
||||||
|
currentHeight = previousHeight;
|
||||||
|
handler();
|
||||||
|
}
|
||||||
|
}, 100);
|
||||||
|
|
||||||
|
$scope.$on('$destroy', function() {
|
||||||
|
$(window).off("resize", handler);
|
||||||
|
$(window).off("scroll", handler);
|
||||||
|
$interval.cancel(stop);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corLoaderInline', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
templateUrl: '/static/directives/cor-loader-inline.html',
|
||||||
|
replace: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corLoader', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
templateUrl: '/static/directives/cor-loader.html',
|
||||||
|
replace: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corTab', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 4,
|
||||||
|
templateUrl: '/static/directives/cor-tab.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'tabActive': '@tabActive',
|
||||||
|
'tabTitle': '@tabTitle',
|
||||||
|
'tabTarget': '@tabTarget',
|
||||||
|
'tabInit': '&tabInit'
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corStep', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 4,
|
||||||
|
templateUrl: '/static/directives/cor-step.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: false,
|
||||||
|
requires: '^corStepBar',
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'icon': '@icon',
|
||||||
|
'title': '@title',
|
||||||
|
'text': '@text'
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
})
|
||||||
|
|
||||||
|
.directive('corStepBar', function() {
|
||||||
|
var directiveDefinitionObject = {
|
||||||
|
priority: 4,
|
||||||
|
templateUrl: '/static/directives/cor-step-bar.html',
|
||||||
|
replace: true,
|
||||||
|
transclude: true,
|
||||||
|
restrict: 'C',
|
||||||
|
scope: {
|
||||||
|
'progress': '=progress'
|
||||||
|
},
|
||||||
|
controller: function($rootScope, $scope, $element) {
|
||||||
|
$scope.$watch('progress', function(progress) {
|
||||||
|
var index = 0;
|
||||||
|
for (var i = 0; i < progress.length; ++i) {
|
||||||
|
if (progress[i]) {
|
||||||
|
index = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$element.find('.transclude').children('.co-step-element').each(function(i, elem) {
|
||||||
|
$(elem).removeClass('active');
|
||||||
|
if (i <= index) {
|
||||||
|
$(elem).addClass('active');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return directiveDefinitionObject;
|
||||||
|
});
|
|
@ -19,6 +19,7 @@ typeahead - Permissive (https://github.com/twitter/typeahead.js/blob/master/LICE
|
||||||
zlib - MIT (https://github.com/imaya/zlib.js)
|
zlib - MIT (https://github.com/imaya/zlib.js)
|
||||||
pagedown - Permissive
|
pagedown - Permissive
|
||||||
jquery.overscroll - MIT (https://github.com/azoff/overscroll/blob/master/mit.license)
|
jquery.overscroll - MIT (https://github.com/azoff/overscroll/blob/master/mit.license)
|
||||||
|
URI.js - MIT (https://github.com/medialize/URI.js)
|
||||||
|
|
||||||
Issues:
|
Issues:
|
||||||
>>>>> jquery.spotlight - GPLv3 (https://github.com/jameshalsall/jQuery-Spotlight)
|
>>>>> jquery.spotlight - GPLv3 (https://github.com/jameshalsall/jQuery-Spotlight)
|
78
static/lib/URI.min.js
vendored
Normal file
78
static/lib/URI.min.js
vendored
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
(function(f,l){"object"===typeof exports?module.exports=l():"function"===typeof define&&define.amd?define(l):f.IPv6=l(f)})(this,function(f){var l=f&&f.IPv6;return{best:function(g){g=g.toLowerCase().split(":");var m=g.length,b=8;""===g[0]&&""===g[1]&&""===g[2]?(g.shift(),g.shift()):""===g[0]&&""===g[1]?g.shift():""===g[m-1]&&""===g[m-2]&&g.pop();m=g.length;-1!==g[m-1].indexOf(".")&&(b=7);var k;for(k=0;k<m&&""!==g[k];k++);if(k<b)for(g.splice(k,1,"0000");g.length<b;)g.splice(k,0,"0000");for(k=0;k<b;k++){for(var m=
|
||||||
|
g[k].split(""),f=0;3>f;f++)if("0"===m[0]&&1<m.length)m.splice(0,1);else break;g[k]=m.join("")}var m=-1,l=f=0,h=-1,r=!1;for(k=0;k<b;k++)r?"0"===g[k]?l+=1:(r=!1,l>f&&(m=h,f=l)):"0"===g[k]&&(r=!0,h=k,l=1);l>f&&(m=h,f=l);1<f&&g.splice(m,f,"");m=g.length;b="";""===g[0]&&(b=":");for(k=0;k<m;k++){b+=g[k];if(k===m-1)break;b+=":"}""===g[m-1]&&(b+=":");return b},noConflict:function(){f.IPv6===this&&(f.IPv6=l);return this}}});(function(f){function l(b){throw RangeError(w[b]);}function g(b,e){for(var h=b.length;h--;)b[h]=e(b[h]);return b}function m(b,e){return g(b.split(v),e).join(".")}function b(b){for(var e=[],h=0,a=b.length,c,d;h<a;)c=b.charCodeAt(h++),55296<=c&&56319>=c&&h<a?(d=b.charCodeAt(h++),56320==(d&64512)?e.push(((c&1023)<<10)+(d&1023)+65536):(e.push(c),h--)):e.push(c);return e}function k(b){return g(b,function(b){var e="";65535<b&&(b-=65536,e+=B(b>>>10&1023|55296),b=56320|b&1023);return e+=B(b)}).join("")}function z(b,
|
||||||
|
e){return b+22+75*(26>b)-((0!=e)<<5)}function p(b,e,h){var a=0;b=h?q(b/700):b>>1;for(b+=q(b/e);455<b;a+=36)b=q(b/35);return q(a+36*b/(b+38))}function h(b){var e=[],h=b.length,a,c=0,d=128,u=72,x,y,g,f,m;x=b.lastIndexOf("-");0>x&&(x=0);for(y=0;y<x;++y)128<=b.charCodeAt(y)&&l("not-basic"),e.push(b.charCodeAt(y));for(x=0<x?x+1:0;x<h;){y=c;a=1;for(g=36;;g+=36){x>=h&&l("invalid-input");f=b.charCodeAt(x++);f=10>f-48?f-22:26>f-65?f-65:26>f-97?f-97:36;(36<=f||f>q((2147483647-c)/a))&&l("overflow");c+=f*a;m=
|
||||||
|
g<=u?1:g>=u+26?26:g-u;if(f<m)break;f=36-m;a>q(2147483647/f)&&l("overflow");a*=f}a=e.length+1;u=p(c-y,a,0==y);q(c/a)>2147483647-d&&l("overflow");d+=q(c/a);c%=a;e.splice(c++,0,d)}return k(e)}function r(e){var h,g,a,c,d,u,x,y,f,m=[],r,k,n;e=b(e);r=e.length;h=128;g=0;d=72;for(u=0;u<r;++u)f=e[u],128>f&&m.push(B(f));for((a=c=m.length)&&m.push("-");a<r;){x=2147483647;for(u=0;u<r;++u)f=e[u],f>=h&&f<x&&(x=f);k=a+1;x-h>q((2147483647-g)/k)&&l("overflow");g+=(x-h)*k;h=x;for(u=0;u<r;++u)if(f=e[u],f<h&&2147483647<
|
||||||
|
++g&&l("overflow"),f==h){y=g;for(x=36;;x+=36){f=x<=d?1:x>=d+26?26:x-d;if(y<f)break;n=y-f;y=36-f;m.push(B(z(f+n%y,0)));y=q(n/y)}m.push(B(z(y,0)));d=p(g,k,a==c);g=0;++a}++g;++h}return m.join("")}var C="object"==typeof exports&&exports,D="object"==typeof module&&module&&module.exports==C&&module,A="object"==typeof global&&global;if(A.global===A||A.window===A)f=A;var t,n=/^xn--/,e=/[^ -~]/,v=/\x2E|\u3002|\uFF0E|\uFF61/g,w={overflow:"Overflow: input needs wider integers to process","not-basic":"Illegal input >= 0x80 (not a basic code point)",
|
||||||
|
"invalid-input":"Invalid input"},q=Math.floor,B=String.fromCharCode,E;t={version:"1.2.3",ucs2:{decode:b,encode:k},decode:h,encode:r,toASCII:function(b){return m(b,function(b){return e.test(b)?"xn--"+r(b):b})},toUnicode:function(b){return m(b,function(b){return n.test(b)?h(b.slice(4).toLowerCase()):b})}};if("function"==typeof define&&"object"==typeof define.amd&&define.amd)define(function(){return t});else if(C&&!C.nodeType)if(D)D.exports=t;else for(E in t)t.hasOwnProperty(E)&&(C[E]=t[E]);else f.punycode=
|
||||||
|
t})(this);(function(f,l){"object"===typeof exports?module.exports=l():"function"===typeof define&&define.amd?define(l):f.SecondLevelDomains=l(f)})(this,function(f){var l=f&&f.SecondLevelDomains,g={list:{ac:" com gov mil net org ",ae:" ac co gov mil name net org pro sch ",af:" com edu gov net org ",al:" com edu gov mil net org ",ao:" co ed gv it og pb ",ar:" com edu gob gov int mil net org tur ",at:" ac co gv or ",au:" asn com csiro edu gov id net org ",ba:" co com edu gov mil net org rs unbi unmo unsa untz unze ",
|
||||||
|
bb:" biz co com edu gov info net org store tv ",bh:" biz cc com edu gov info net org ",bn:" com edu gov net org ",bo:" com edu gob gov int mil net org tv ",br:" adm adv agr am arq art ato b bio blog bmd cim cng cnt com coop ecn edu eng esp etc eti far flog fm fnd fot fst g12 ggf gov imb ind inf jor jus lel mat med mil mus net nom not ntr odo org ppg pro psc psi qsl rec slg srv tmp trd tur tv vet vlog wiki zlg ",bs:" com edu gov net org ",bz:" du et om ov rg ",ca:" ab bc mb nb nf nl ns nt nu on pe qc sk yk ",
|
||||||
|
ck:" biz co edu gen gov info net org ",cn:" ac ah bj com cq edu fj gd gov gs gx gz ha hb he hi hl hn jl js jx ln mil net nm nx org qh sc sd sh sn sx tj tw xj xz yn zj ",co:" com edu gov mil net nom org ",cr:" ac c co ed fi go or sa ",cy:" ac biz com ekloges gov ltd name net org parliament press pro tm ","do":" art com edu gob gov mil net org sld web ",dz:" art asso com edu gov net org pol ",ec:" com edu fin gov info med mil net org pro ",eg:" com edu eun gov mil name net org sci ",er:" com edu gov ind mil net org rochest w ",
|
||||||
|
es:" com edu gob nom org ",et:" biz com edu gov info name net org ",fj:" ac biz com info mil name net org pro ",fk:" ac co gov net nom org ",fr:" asso com f gouv nom prd presse tm ",gg:" co net org ",gh:" com edu gov mil org ",gn:" ac com gov net org ",gr:" com edu gov mil net org ",gt:" com edu gob ind mil net org ",gu:" com edu gov net org ",hk:" com edu gov idv net org ",hu:" 2000 agrar bolt casino city co erotica erotika film forum games hotel info ingatlan jogasz konyvelo lakas media news org priv reklam sex shop sport suli szex tm tozsde utazas video ",
|
||||||
|
id:" ac co go mil net or sch web ",il:" ac co gov idf k12 muni net org ","in":" ac co edu ernet firm gen gov i ind mil net nic org res ",iq:" com edu gov i mil net org ",ir:" ac co dnssec gov i id net org sch ",it:" edu gov ",je:" co net org ",jo:" com edu gov mil name net org sch ",jp:" ac ad co ed go gr lg ne or ",ke:" ac co go info me mobi ne or sc ",kh:" com edu gov mil net org per ",ki:" biz com de edu gov info mob net org tel ",km:" asso com coop edu gouv k medecin mil nom notaires pharmaciens presse tm veterinaire ",
|
||||||
|
kn:" edu gov net org ",kr:" ac busan chungbuk chungnam co daegu daejeon es gangwon go gwangju gyeongbuk gyeonggi gyeongnam hs incheon jeju jeonbuk jeonnam k kg mil ms ne or pe re sc seoul ulsan ",kw:" com edu gov net org ",ky:" com edu gov net org ",kz:" com edu gov mil net org ",lb:" com edu gov net org ",lk:" assn com edu gov grp hotel int ltd net ngo org sch soc web ",lr:" com edu gov net org ",lv:" asn com conf edu gov id mil net org ",ly:" com edu gov id med net org plc sch ",ma:" ac co gov m net org press ",
|
||||||
|
mc:" asso tm ",me:" ac co edu gov its net org priv ",mg:" com edu gov mil nom org prd tm ",mk:" com edu gov inf name net org pro ",ml:" com edu gov net org presse ",mn:" edu gov org ",mo:" com edu gov net org ",mt:" com edu gov net org ",mv:" aero biz com coop edu gov info int mil museum name net org pro ",mw:" ac co com coop edu gov int museum net org ",mx:" com edu gob net org ",my:" com edu gov mil name net org sch ",nf:" arts com firm info net other per rec store web ",ng:" biz com edu gov mil mobi name net org sch ",
|
||||||
|
ni:" ac co com edu gob mil net nom org ",np:" com edu gov mil net org ",nr:" biz com edu gov info net org ",om:" ac biz co com edu gov med mil museum net org pro sch ",pe:" com edu gob mil net nom org sld ",ph:" com edu gov i mil net ngo org ",pk:" biz com edu fam gob gok gon gop gos gov net org web ",pl:" art bialystok biz com edu gda gdansk gorzow gov info katowice krakow lodz lublin mil net ngo olsztyn org poznan pwr radom slupsk szczecin torun warszawa waw wroc wroclaw zgora ",pr:" ac biz com edu est gov info isla name net org pro prof ",
|
||||||
|
ps:" com edu gov net org plo sec ",pw:" belau co ed go ne or ",ro:" arts com firm info nom nt org rec store tm www ",rs:" ac co edu gov in org ",sb:" com edu gov net org ",sc:" com edu gov net org ",sh:" co com edu gov net nom org ",sl:" com edu gov net org ",st:" co com consulado edu embaixada gov mil net org principe saotome store ",sv:" com edu gob org red ",sz:" ac co org ",tr:" av bbs bel biz com dr edu gen gov info k12 name net org pol tel tsk tv web ",tt:" aero biz cat co com coop edu gov info int jobs mil mobi museum name net org pro tel travel ",
|
||||||
|
tw:" club com ebiz edu game gov idv mil net org ",mu:" ac co com gov net or org ",mz:" ac co edu gov org ",na:" co com ",nz:" ac co cri geek gen govt health iwi maori mil net org parliament school ",pa:" abo ac com edu gob ing med net nom org sld ",pt:" com edu gov int net nome org publ ",py:" com edu gov mil net org ",qa:" com edu gov mil net org ",re:" asso com nom ",ru:" ac adygeya altai amur arkhangelsk astrakhan bashkiria belgorod bir bryansk buryatia cbg chel chelyabinsk chita chukotka chuvashia com dagestan e-burg edu gov grozny int irkutsk ivanovo izhevsk jar joshkar-ola kalmykia kaluga kamchatka karelia kazan kchr kemerovo khabarovsk khakassia khv kirov koenig komi kostroma kranoyarsk kuban kurgan kursk lipetsk magadan mari mari-el marine mil mordovia mosreg msk murmansk nalchik net nnov nov novosibirsk nsk omsk orenburg org oryol penza perm pp pskov ptz rnd ryazan sakhalin samara saratov simbirsk smolensk spb stavropol stv surgut tambov tatarstan tom tomsk tsaritsyn tsk tula tuva tver tyumen udm udmurtia ulan-ude vladikavkaz vladimir vladivostok volgograd vologda voronezh vrn vyatka yakutia yamal yekaterinburg yuzhno-sakhalinsk ",
|
||||||
|
rw:" ac co com edu gouv gov int mil net ",sa:" com edu gov med net org pub sch ",sd:" com edu gov info med net org tv ",se:" a ac b bd c d e f g h i k l m n o org p parti pp press r s t tm u w x y z ",sg:" com edu gov idn net org per ",sn:" art com edu gouv org perso univ ",sy:" com edu gov mil net news org ",th:" ac co go in mi net or ",tj:" ac biz co com edu go gov info int mil name net nic org test web ",tn:" agrinet com defense edunet ens fin gov ind info intl mincom nat net org perso rnrt rns rnu tourism ",
|
||||||
|
tz:" ac co go ne or ",ua:" biz cherkassy chernigov chernovtsy ck cn co com crimea cv dn dnepropetrovsk donetsk dp edu gov if in ivano-frankivsk kh kharkov kherson khmelnitskiy kiev kirovograd km kr ks kv lg lugansk lutsk lviv me mk net nikolaev od odessa org pl poltava pp rovno rv sebastopol sumy te ternopil uzhgorod vinnica vn zaporizhzhe zhitomir zp zt ",ug:" ac co go ne or org sc ",uk:" ac bl british-library co cym gov govt icnet jet lea ltd me mil mod national-library-scotland nel net nhs nic nls org orgn parliament plc police sch scot soc ",
|
||||||
|
us:" dni fed isa kids nsn ",uy:" com edu gub mil net org ",ve:" co com edu gob info mil net org web ",vi:" co com k12 net org ",vn:" ac biz com edu gov health info int name net org pro ",ye:" co com gov ltd me net org plc ",yu:" ac co edu gov org ",za:" ac agric alt bourse city co cybernet db edu gov grondar iaccess imt inca landesign law mil net ngo nis nom olivetti org pix school tm web ",zm:" ac co com edu gov net org sch "},has:function(f){var b=f.lastIndexOf(".");if(0>=b||b>=f.length-1)return!1;
|
||||||
|
var k=f.lastIndexOf(".",b-1);if(0>=k||k>=b-1)return!1;var l=g.list[f.slice(b+1)];return l?0<=l.indexOf(" "+f.slice(k+1,b)+" "):!1},is:function(f){var b=f.lastIndexOf(".");if(0>=b||b>=f.length-1||0<=f.lastIndexOf(".",b-1))return!1;var k=g.list[f.slice(b+1)];return k?0<=k.indexOf(" "+f.slice(0,b)+" "):!1},get:function(f){var b=f.lastIndexOf(".");if(0>=b||b>=f.length-1)return null;var k=f.lastIndexOf(".",b-1);if(0>=k||k>=b-1)return null;var l=g.list[f.slice(b+1)];return!l||0>l.indexOf(" "+f.slice(k+
|
||||||
|
1,b)+" ")?null:f.slice(k+1)},noConflict:function(){f.SecondLevelDomains===this&&(f.SecondLevelDomains=l);return this}};return g});(function(f,l){"object"===typeof exports?module.exports=l(require("./punycode"),require("./IPv6"),require("./SecondLevelDomains")):"function"===typeof define&&define.amd?define(["./punycode","./IPv6","./SecondLevelDomains"],l):f.URI=l(f.punycode,f.IPv6,f.SecondLevelDomains,f)})(this,function(f,l,g,m){function b(a,c){if(!(this instanceof b))return new b(a,c);void 0===a&&(a="undefined"!==typeof location?location.href+"":"");this.href(a);return void 0!==c?this.absoluteTo(c):this}function k(a){return a.replace(/([.*+?^=!:${}()|[\]\/\\])/g,
|
||||||
|
"\\$1")}function z(a){return void 0===a?"Undefined":String(Object.prototype.toString.call(a)).slice(8,-1)}function p(a){return"Array"===z(a)}function h(a,c){var d,b;if(p(c)){d=0;for(b=c.length;d<b;d++)if(!h(a,c[d]))return!1;return!0}var e=z(c);d=0;for(b=a.length;d<b;d++)if("RegExp"===e){if("string"===typeof a[d]&&a[d].match(c))return!0}else if(a[d]===c)return!0;return!1}function r(a,c){if(!p(a)||!p(c)||a.length!==c.length)return!1;a.sort();c.sort();for(var d=0,b=a.length;d<b;d++)if(a[d]!==c[d])return!1;
|
||||||
|
return!0}function C(a){return escape(a)}function D(a){return encodeURIComponent(a).replace(/[!'()*]/g,C).replace(/\*/g,"%2A")}function A(a){return function(c,d){if(void 0===c)return this._parts[a]||"";this._parts[a]=c||null;this.build(!d);return this}}function t(a,c){return function(d,b){if(void 0===d)return this._parts[a]||"";null!==d&&(d+="",d.charAt(0)===c&&(d=d.substring(1)));this._parts[a]=d;this.build(!b);return this}}var n=m&&m.URI;b.version="1.14.1";var e=b.prototype,v=Object.prototype.hasOwnProperty;
|
||||||
|
b._parts=function(){return{protocol:null,username:null,password:null,hostname:null,urn:null,port:null,path:null,query:null,fragment:null,duplicateQueryParameters:b.duplicateQueryParameters,escapeQuerySpace:b.escapeQuerySpace}};b.duplicateQueryParameters=!1;b.escapeQuerySpace=!0;b.protocol_expression=/^[a-z][a-z0-9.+-]*$/i;b.idn_expression=/[^a-z0-9\.-]/i;b.punycode_expression=/(xn--)/i;b.ip4_expression=/^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/;b.ip6_expression=/^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$/;
|
||||||
|
b.find_uri_expression=/\b((?:[a-z][\w-]+:(?:\/{1,3}|[a-z0-9%])|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}\/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'".,<>?\u00ab\u00bb\u201c\u201d\u2018\u2019]))/ig;b.findUri={start:/\b(?:([a-z][a-z0-9.+-]*:\/\/)|www\.)/gi,end:/[\s\r\n]|$/,trim:/[`!()\[\]{};:'".,<>?\u00ab\u00bb\u201c\u201d\u201e\u2018\u2019]+$/};b.defaultPorts={http:"80",https:"443",ftp:"21",gopher:"70",ws:"80",wss:"443"};b.invalid_hostname_characters=
|
||||||
|
/[^a-zA-Z0-9\.-]/;b.domAttributes={a:"href",blockquote:"cite",link:"href",base:"href",script:"src",form:"action",img:"src",area:"href",iframe:"src",embed:"src",source:"src",track:"src",input:"src",audio:"src",video:"src"};b.getDomAttribute=function(a){if(a&&a.nodeName){var c=a.nodeName.toLowerCase();return"input"===c&&"image"!==a.type?void 0:b.domAttributes[c]}};b.encode=D;b.decode=decodeURIComponent;b.iso8859=function(){b.encode=escape;b.decode=unescape};b.unicode=function(){b.encode=D;b.decode=
|
||||||
|
decodeURIComponent};b.characters={pathname:{encode:{expression:/%(24|26|2B|2C|3B|3D|3A|40)/ig,map:{"%24":"$","%26":"&","%2B":"+","%2C":",","%3B":";","%3D":"=","%3A":":","%40":"@"}},decode:{expression:/[\/\?#]/g,map:{"/":"%2F","?":"%3F","#":"%23"}}},reserved:{encode:{expression:/%(21|23|24|26|27|28|29|2A|2B|2C|2F|3A|3B|3D|3F|40|5B|5D)/ig,map:{"%3A":":","%2F":"/","%3F":"?","%23":"#","%5B":"[","%5D":"]","%40":"@","%21":"!","%24":"$","%26":"&","%27":"'","%28":"(","%29":")","%2A":"*","%2B":"+","%2C":",",
|
||||||
|
"%3B":";","%3D":"="}}}};b.encodeQuery=function(a,c){var d=b.encode(a+"");void 0===c&&(c=b.escapeQuerySpace);return c?d.replace(/%20/g,"+"):d};b.decodeQuery=function(a,c){a+="";void 0===c&&(c=b.escapeQuerySpace);try{return b.decode(c?a.replace(/\+/g,"%20"):a)}catch(d){return a}};b.recodePath=function(a){a=(a+"").split("/");for(var c=0,d=a.length;c<d;c++)a[c]=b.encodePathSegment(b.decode(a[c]));return a.join("/")};b.decodePath=function(a){a=(a+"").split("/");for(var c=0,d=a.length;c<d;c++)a[c]=b.decodePathSegment(a[c]);
|
||||||
|
return a.join("/")};var w={encode:"encode",decode:"decode"},q,B=function(a,c){return function(d){try{return b[c](d+"").replace(b.characters[a][c].expression,function(d){return b.characters[a][c].map[d]})}catch(u){return d}}};for(q in w)b[q+"PathSegment"]=B("pathname",w[q]);b.encodeReserved=B("reserved","encode");b.parse=function(a,c){var d;c||(c={});d=a.indexOf("#");-1<d&&(c.fragment=a.substring(d+1)||null,a=a.substring(0,d));d=a.indexOf("?");-1<d&&(c.query=a.substring(d+1)||null,a=a.substring(0,
|
||||||
|
d));"//"===a.substring(0,2)?(c.protocol=null,a=a.substring(2),a=b.parseAuthority(a,c)):(d=a.indexOf(":"),-1<d&&(c.protocol=a.substring(0,d)||null,c.protocol&&!c.protocol.match(b.protocol_expression)?c.protocol=void 0:"//"===a.substring(d+1,d+3)?(a=a.substring(d+3),a=b.parseAuthority(a,c)):(a=a.substring(d+1),c.urn=!0)));c.path=a;return c};b.parseHost=function(a,c){var d=a.indexOf("/"),b;-1===d&&(d=a.length);"["===a.charAt(0)?(b=a.indexOf("]"),c.hostname=a.substring(1,b)||null,c.port=a.substring(b+
|
||||||
|
2,d)||null,"/"===c.port&&(c.port=null)):a.indexOf(":")!==a.lastIndexOf(":")?(c.hostname=a.substring(0,d)||null,c.port=null):(b=a.substring(0,d).split(":"),c.hostname=b[0]||null,c.port=b[1]||null);c.hostname&&"/"!==a.substring(d).charAt(0)&&(d++,a="/"+a);return a.substring(d)||"/"};b.parseAuthority=function(a,c){a=b.parseUserinfo(a,c);return b.parseHost(a,c)};b.parseUserinfo=function(a,c){var d=a.indexOf("/"),u=a.lastIndexOf("@",-1<d?d:a.length-1);-1<u&&(-1===d||u<d)?(d=a.substring(0,u).split(":"),
|
||||||
|
c.username=d[0]?b.decode(d[0]):null,d.shift(),c.password=d[0]?b.decode(d.join(":")):null,a=a.substring(u+1)):(c.username=null,c.password=null);return a};b.parseQuery=function(a,c){if(!a)return{};a=a.replace(/&+/g,"&").replace(/^\?*&*|&+$/g,"");if(!a)return{};for(var d={},u=a.split("&"),e=u.length,f,h,g=0;g<e;g++)f=u[g].split("="),h=b.decodeQuery(f.shift(),c),f=f.length?b.decodeQuery(f.join("="),c):null,v.call(d,h)?("string"===typeof d[h]&&(d[h]=[d[h]]),d[h].push(f)):d[h]=f;return d};b.build=function(a){var c=
|
||||||
|
"";a.protocol&&(c+=a.protocol+":");a.urn||!c&&!a.hostname||(c+="//");c+=b.buildAuthority(a)||"";"string"===typeof a.path&&("/"!==a.path.charAt(0)&&"string"===typeof a.hostname&&(c+="/"),c+=a.path);"string"===typeof a.query&&a.query&&(c+="?"+a.query);"string"===typeof a.fragment&&a.fragment&&(c+="#"+a.fragment);return c};b.buildHost=function(a){var c="";if(a.hostname)c=b.ip6_expression.test(a.hostname)?c+("["+a.hostname+"]"):c+a.hostname;else return"";a.port&&(c+=":"+a.port);return c};b.buildAuthority=
|
||||||
|
function(a){return b.buildUserinfo(a)+b.buildHost(a)};b.buildUserinfo=function(a){var c="";a.username&&(c+=b.encode(a.username),a.password&&(c+=":"+b.encode(a.password)),c+="@");return c};b.buildQuery=function(a,c,d){var u="",e,f,h,g;for(f in a)if(v.call(a,f)&&f)if(p(a[f]))for(e={},h=0,g=a[f].length;h<g;h++)void 0!==a[f][h]&&void 0===e[a[f][h]+""]&&(u+="&"+b.buildQueryParameter(f,a[f][h],d),!0!==c&&(e[a[f][h]+""]=!0));else void 0!==a[f]&&(u+="&"+b.buildQueryParameter(f,a[f],d));return u.substring(1)};
|
||||||
|
b.buildQueryParameter=function(a,c,d){return b.encodeQuery(a,d)+(null!==c?"="+b.encodeQuery(c,d):"")};b.addQuery=function(a,c,d){if("object"===typeof c)for(var e in c)v.call(c,e)&&b.addQuery(a,e,c[e]);else if("string"===typeof c)void 0===a[c]?a[c]=d:("string"===typeof a[c]&&(a[c]=[a[c]]),p(d)||(d=[d]),a[c]=(a[c]||[]).concat(d));else throw new TypeError("URI.addQuery() accepts an object, string as the name parameter");};b.removeQuery=function(a,c,d){var e;if(p(c))for(d=0,e=c.length;d<e;d++)a[c[d]]=
|
||||||
|
void 0;else if("object"===typeof c)for(e in c)v.call(c,e)&&b.removeQuery(a,e,c[e]);else if("string"===typeof c)if(void 0!==d)if(a[c]===d)a[c]=void 0;else{if(p(a[c])){e=a[c];var f={},h,g;if(p(d))for(h=0,g=d.length;h<g;h++)f[d[h]]=!0;else f[d]=!0;h=0;for(g=e.length;h<g;h++)void 0!==f[e[h]]&&(e.splice(h,1),g--,h--);a[c]=e}}else a[c]=void 0;else throw new TypeError("URI.addQuery() accepts an object, string as the first parameter");};b.hasQuery=function(a,c,d,e){if("object"===typeof c){for(var f in c)if(v.call(c,
|
||||||
|
f)&&!b.hasQuery(a,f,c[f]))return!1;return!0}if("string"!==typeof c)throw new TypeError("URI.hasQuery() accepts an object, string as the name parameter");switch(z(d)){case "Undefined":return c in a;case "Boolean":return a=Boolean(p(a[c])?a[c].length:a[c]),d===a;case "Function":return!!d(a[c],c,a);case "Array":return p(a[c])?(e?h:r)(a[c],d):!1;case "RegExp":return p(a[c])?e?h(a[c],d):!1:Boolean(a[c]&&a[c].match(d));case "Number":d=String(d);case "String":return p(a[c])?e?h(a[c],d):!1:a[c]===d;default:throw new TypeError("URI.hasQuery() accepts undefined, boolean, string, number, RegExp, Function as the value parameter");
|
||||||
|
}};b.commonPath=function(a,c){var d=Math.min(a.length,c.length),b;for(b=0;b<d;b++)if(a.charAt(b)!==c.charAt(b)){b--;break}if(1>b)return a.charAt(0)===c.charAt(0)&&"/"===a.charAt(0)?"/":"";if("/"!==a.charAt(b)||"/"!==c.charAt(b))b=a.substring(0,b).lastIndexOf("/");return a.substring(0,b+1)};b.withinString=function(a,c,d){d||(d={});var e=d.start||b.findUri.start,f=d.end||b.findUri.end,h=d.trim||b.findUri.trim,g=/[a-z0-9-]=["']?$/i;for(e.lastIndex=0;;){var r=e.exec(a);if(!r)break;r=r.index;if(d.ignoreHtml){var k=
|
||||||
|
a.slice(Math.max(r-3,0),r);if(k&&g.test(k))continue}var k=r+a.slice(r).search(f),m=a.slice(r,k).replace(h,"");d.ignore&&d.ignore.test(m)||(k=r+m.length,m=c(m,r,k,a),a=a.slice(0,r)+m+a.slice(k),e.lastIndex=r+m.length)}e.lastIndex=0;return a};b.ensureValidHostname=function(a){if(a.match(b.invalid_hostname_characters)){if(!f)throw new TypeError('Hostname "'+a+'" contains characters other than [A-Z0-9.-] and Punycode.js is not available');if(f.toASCII(a).match(b.invalid_hostname_characters))throw new TypeError('Hostname "'+
|
||||||
|
a+'" contains characters other than [A-Z0-9.-]');}};b.noConflict=function(a){if(a)return a={URI:this.noConflict()},m.URITemplate&&"function"===typeof m.URITemplate.noConflict&&(a.URITemplate=m.URITemplate.noConflict()),m.IPv6&&"function"===typeof m.IPv6.noConflict&&(a.IPv6=m.IPv6.noConflict()),m.SecondLevelDomains&&"function"===typeof m.SecondLevelDomains.noConflict&&(a.SecondLevelDomains=m.SecondLevelDomains.noConflict()),a;m.URI===this&&(m.URI=n);return this};e.build=function(a){if(!0===a)this._deferred_build=
|
||||||
|
!0;else if(void 0===a||this._deferred_build)this._string=b.build(this._parts),this._deferred_build=!1;return this};e.clone=function(){return new b(this)};e.valueOf=e.toString=function(){return this.build(!1)._string};e.protocol=A("protocol");e.username=A("username");e.password=A("password");e.hostname=A("hostname");e.port=A("port");e.query=t("query","?");e.fragment=t("fragment","#");e.search=function(a,c){var d=this.query(a,c);return"string"===typeof d&&d.length?"?"+d:d};e.hash=function(a,c){var d=
|
||||||
|
this.fragment(a,c);return"string"===typeof d&&d.length?"#"+d:d};e.pathname=function(a,c){if(void 0===a||!0===a){var d=this._parts.path||(this._parts.hostname?"/":"");return a?b.decodePath(d):d}this._parts.path=a?b.recodePath(a):"/";this.build(!c);return this};e.path=e.pathname;e.href=function(a,c){var d;if(void 0===a)return this.toString();this._string="";this._parts=b._parts();var e=a instanceof b,f="object"===typeof a&&(a.hostname||a.path||a.pathname);a.nodeName&&(f=b.getDomAttribute(a),a=a[f]||
|
||||||
|
"",f=!1);!e&&f&&void 0!==a.pathname&&(a=a.toString());if("string"===typeof a||a instanceof String)this._parts=b.parse(String(a),this._parts);else if(e||f)for(d in e=e?a._parts:a,e)v.call(this._parts,d)&&(this._parts[d]=e[d]);else throw new TypeError("invalid input");this.build(!c);return this};e.is=function(a){var c=!1,d=!1,e=!1,f=!1,h=!1,r=!1,k=!1,m=!this._parts.urn;this._parts.hostname&&(m=!1,d=b.ip4_expression.test(this._parts.hostname),e=b.ip6_expression.test(this._parts.hostname),c=d||e,h=(f=
|
||||||
|
!c)&&g&&g.has(this._parts.hostname),r=f&&b.idn_expression.test(this._parts.hostname),k=f&&b.punycode_expression.test(this._parts.hostname));switch(a.toLowerCase()){case "relative":return m;case "absolute":return!m;case "domain":case "name":return f;case "sld":return h;case "ip":return c;case "ip4":case "ipv4":case "inet4":return d;case "ip6":case "ipv6":case "inet6":return e;case "idn":return r;case "url":return!this._parts.urn;case "urn":return!!this._parts.urn;case "punycode":return k}return null};
|
||||||
|
var E=e.protocol,F=e.port,G=e.hostname;e.protocol=function(a,c){if(void 0!==a&&a&&(a=a.replace(/:(\/\/)?$/,""),!a.match(b.protocol_expression)))throw new TypeError('Protocol "'+a+"\" contains characters other than [A-Z0-9.+-] or doesn't start with [A-Z]");return E.call(this,a,c)};e.scheme=e.protocol;e.port=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0!==a&&(0===a&&(a=null),a&&(a+="",":"===a.charAt(0)&&(a=a.substring(1)),a.match(/[^0-9]/))))throw new TypeError('Port "'+a+'" contains characters other than [0-9]');
|
||||||
|
return F.call(this,a,c)};e.hostname=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0!==a){var d={};b.parseHost(a,d);a=d.hostname}return G.call(this,a,c)};e.host=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0===a)return this._parts.hostname?b.buildHost(this._parts):"";b.parseHost(a,this._parts);this.build(!c);return this};e.authority=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0===a)return this._parts.hostname?b.buildAuthority(this._parts):
|
||||||
|
"";b.parseAuthority(a,this._parts);this.build(!c);return this};e.userinfo=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0===a){if(!this._parts.username)return"";var d=b.buildUserinfo(this._parts);return d.substring(0,d.length-1)}"@"!==a[a.length-1]&&(a+="@");b.parseUserinfo(a,this._parts);this.build(!c);return this};e.resource=function(a,c){var d;if(void 0===a)return this.path()+this.search()+this.hash();d=b.parse(a);this._parts.path=d.path;this._parts.query=d.query;this._parts.fragment=
|
||||||
|
d.fragment;this.build(!c);return this};e.subdomain=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0===a){if(!this._parts.hostname||this.is("IP"))return"";var d=this._parts.hostname.length-this.domain().length-1;return this._parts.hostname.substring(0,d)||""}d=this._parts.hostname.length-this.domain().length;d=this._parts.hostname.substring(0,d);d=new RegExp("^"+k(d));a&&"."!==a.charAt(a.length-1)&&(a+=".");a&&b.ensureValidHostname(a);this._parts.hostname=this._parts.hostname.replace(d,
|
||||||
|
a);this.build(!c);return this};e.domain=function(a,c){if(this._parts.urn)return void 0===a?"":this;"boolean"===typeof a&&(c=a,a=void 0);if(void 0===a){if(!this._parts.hostname||this.is("IP"))return"";var d=this._parts.hostname.match(/\./g);if(d&&2>d.length)return this._parts.hostname;d=this._parts.hostname.length-this.tld(c).length-1;d=this._parts.hostname.lastIndexOf(".",d-1)+1;return this._parts.hostname.substring(d)||""}if(!a)throw new TypeError("cannot set domain empty");b.ensureValidHostname(a);
|
||||||
|
!this._parts.hostname||this.is("IP")?this._parts.hostname=a:(d=new RegExp(k(this.domain())+"$"),this._parts.hostname=this._parts.hostname.replace(d,a));this.build(!c);return this};e.tld=function(a,c){if(this._parts.urn)return void 0===a?"":this;"boolean"===typeof a&&(c=a,a=void 0);if(void 0===a){if(!this._parts.hostname||this.is("IP"))return"";var d=this._parts.hostname.lastIndexOf("."),d=this._parts.hostname.substring(d+1);return!0!==c&&g&&g.list[d.toLowerCase()]?g.get(this._parts.hostname)||d:d}if(a)if(a.match(/[^a-zA-Z0-9-]/))if(g&&
|
||||||
|
g.is(a))d=new RegExp(k(this.tld())+"$"),this._parts.hostname=this._parts.hostname.replace(d,a);else throw new TypeError('TLD "'+a+'" contains characters other than [A-Z0-9]');else{if(!this._parts.hostname||this.is("IP"))throw new ReferenceError("cannot set TLD on non-domain host");d=new RegExp(k(this.tld())+"$");this._parts.hostname=this._parts.hostname.replace(d,a)}else throw new TypeError("cannot set TLD empty");this.build(!c);return this};e.directory=function(a,c){if(this._parts.urn)return void 0===
|
||||||
|
a?"":this;if(void 0===a||!0===a){if(!this._parts.path&&!this._parts.hostname)return"";if("/"===this._parts.path)return"/";var d=this._parts.path.length-this.filename().length-1,d=this._parts.path.substring(0,d)||(this._parts.hostname?"/":"");return a?b.decodePath(d):d}d=this._parts.path.length-this.filename().length;d=this._parts.path.substring(0,d);d=new RegExp("^"+k(d));this.is("relative")||(a||(a="/"),"/"!==a.charAt(0)&&(a="/"+a));a&&"/"!==a.charAt(a.length-1)&&(a+="/");a=b.recodePath(a);this._parts.path=
|
||||||
|
this._parts.path.replace(d,a);this.build(!c);return this};e.filename=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0===a||!0===a){if(!this._parts.path||"/"===this._parts.path)return"";var d=this._parts.path.lastIndexOf("/"),d=this._parts.path.substring(d+1);return a?b.decodePathSegment(d):d}d=!1;"/"===a.charAt(0)&&(a=a.substring(1));a.match(/\.?\//)&&(d=!0);var e=new RegExp(k(this.filename())+"$");a=b.recodePath(a);this._parts.path=this._parts.path.replace(e,a);d?this.normalizePath(c):
|
||||||
|
this.build(!c);return this};e.suffix=function(a,c){if(this._parts.urn)return void 0===a?"":this;if(void 0===a||!0===a){if(!this._parts.path||"/"===this._parts.path)return"";var d=this.filename(),e=d.lastIndexOf(".");if(-1===e)return"";d=d.substring(e+1);d=/^[a-z0-9%]+$/i.test(d)?d:"";return a?b.decodePathSegment(d):d}"."===a.charAt(0)&&(a=a.substring(1));if(d=this.suffix())e=a?new RegExp(k(d)+"$"):new RegExp(k("."+d)+"$");else{if(!a)return this;this._parts.path+="."+b.recodePath(a)}e&&(a=b.recodePath(a),
|
||||||
|
this._parts.path=this._parts.path.replace(e,a));this.build(!c);return this};e.segment=function(a,c,d){var b=this._parts.urn?":":"/",e=this.path(),f="/"===e.substring(0,1),e=e.split(b);void 0!==a&&"number"!==typeof a&&(d=c,c=a,a=void 0);if(void 0!==a&&"number"!==typeof a)throw Error('Bad segment "'+a+'", must be 0-based integer');f&&e.shift();0>a&&(a=Math.max(e.length+a,0));if(void 0===c)return void 0===a?e:e[a];if(null===a||void 0===e[a])if(p(c)){e=[];a=0;for(var h=c.length;a<h;a++)if(c[a].length||
|
||||||
|
e.length&&e[e.length-1].length)e.length&&!e[e.length-1].length&&e.pop(),e.push(c[a])}else{if(c||"string"===typeof c)""===e[e.length-1]?e[e.length-1]=c:e.push(c)}else c?e[a]=c:e.splice(a,1);f&&e.unshift("");return this.path(e.join(b),d)};e.segmentCoded=function(a,c,d){var e,f;"number"!==typeof a&&(d=c,c=a,a=void 0);if(void 0===c){a=this.segment(a,c,d);if(p(a))for(e=0,f=a.length;e<f;e++)a[e]=b.decode(a[e]);else a=void 0!==a?b.decode(a):void 0;return a}if(p(c))for(e=0,f=c.length;e<f;e++)c[e]=b.decode(c[e]);
|
||||||
|
else c="string"===typeof c||c instanceof String?b.encode(c):c;return this.segment(a,c,d)};var H=e.query;e.query=function(a,c){if(!0===a)return b.parseQuery(this._parts.query,this._parts.escapeQuerySpace);if("function"===typeof a){var d=b.parseQuery(this._parts.query,this._parts.escapeQuerySpace),e=a.call(this,d);this._parts.query=b.buildQuery(e||d,this._parts.duplicateQueryParameters,this._parts.escapeQuerySpace);this.build(!c);return this}return void 0!==a&&"string"!==typeof a?(this._parts.query=
|
||||||
|
b.buildQuery(a,this._parts.duplicateQueryParameters,this._parts.escapeQuerySpace),this.build(!c),this):H.call(this,a,c)};e.setQuery=function(a,c,d){var e=b.parseQuery(this._parts.query,this._parts.escapeQuerySpace);if("string"===typeof a||a instanceof String)e[a]=void 0!==c?c:null;else if("object"===typeof a)for(var f in a)v.call(a,f)&&(e[f]=a[f]);else throw new TypeError("URI.addQuery() accepts an object, string as the name parameter");this._parts.query=b.buildQuery(e,this._parts.duplicateQueryParameters,
|
||||||
|
this._parts.escapeQuerySpace);"string"!==typeof a&&(d=c);this.build(!d);return this};e.addQuery=function(a,c,d){var e=b.parseQuery(this._parts.query,this._parts.escapeQuerySpace);b.addQuery(e,a,void 0===c?null:c);this._parts.query=b.buildQuery(e,this._parts.duplicateQueryParameters,this._parts.escapeQuerySpace);"string"!==typeof a&&(d=c);this.build(!d);return this};e.removeQuery=function(a,c,d){var e=b.parseQuery(this._parts.query,this._parts.escapeQuerySpace);b.removeQuery(e,a,c);this._parts.query=
|
||||||
|
b.buildQuery(e,this._parts.duplicateQueryParameters,this._parts.escapeQuerySpace);"string"!==typeof a&&(d=c);this.build(!d);return this};e.hasQuery=function(a,c,d){var e=b.parseQuery(this._parts.query,this._parts.escapeQuerySpace);return b.hasQuery(e,a,c,d)};e.setSearch=e.setQuery;e.addSearch=e.addQuery;e.removeSearch=e.removeQuery;e.hasSearch=e.hasQuery;e.normalize=function(){return this._parts.urn?this.normalizeProtocol(!1).normalizeQuery(!1).normalizeFragment(!1).build():this.normalizeProtocol(!1).normalizeHostname(!1).normalizePort(!1).normalizePath(!1).normalizeQuery(!1).normalizeFragment(!1).build()};
|
||||||
|
e.normalizeProtocol=function(a){"string"===typeof this._parts.protocol&&(this._parts.protocol=this._parts.protocol.toLowerCase(),this.build(!a));return this};e.normalizeHostname=function(a){this._parts.hostname&&(this.is("IDN")&&f?this._parts.hostname=f.toASCII(this._parts.hostname):this.is("IPv6")&&l&&(this._parts.hostname=l.best(this._parts.hostname)),this._parts.hostname=this._parts.hostname.toLowerCase(),this.build(!a));return this};e.normalizePort=function(a){"string"===typeof this._parts.protocol&&
|
||||||
|
this._parts.port===b.defaultPorts[this._parts.protocol]&&(this._parts.port=null,this.build(!a));return this};e.normalizePath=function(a){if(this._parts.urn||!this._parts.path||"/"===this._parts.path)return this;var c,d=this._parts.path,e="",f,h;"/"!==d.charAt(0)&&(c=!0,d="/"+d);d=d.replace(/(\/(\.\/)+)|(\/\.$)/g,"/").replace(/\/{2,}/g,"/");c&&(e=d.substring(1).match(/^(\.\.\/)+/)||"")&&(e=e[0]);for(;;){f=d.indexOf("/..");if(-1===f)break;else if(0===f){d=d.substring(3);continue}h=d.substring(0,f).lastIndexOf("/");
|
||||||
|
-1===h&&(h=f);d=d.substring(0,h)+d.substring(f+3)}c&&this.is("relative")&&(d=e+d.substring(1));d=b.recodePath(d);this._parts.path=d;this.build(!a);return this};e.normalizePathname=e.normalizePath;e.normalizeQuery=function(a){"string"===typeof this._parts.query&&(this._parts.query.length?this.query(b.parseQuery(this._parts.query,this._parts.escapeQuerySpace)):this._parts.query=null,this.build(!a));return this};e.normalizeFragment=function(a){this._parts.fragment||(this._parts.fragment=null,this.build(!a));
|
||||||
|
return this};e.normalizeSearch=e.normalizeQuery;e.normalizeHash=e.normalizeFragment;e.iso8859=function(){var a=b.encode,c=b.decode;b.encode=escape;b.decode=decodeURIComponent;this.normalize();b.encode=a;b.decode=c;return this};e.unicode=function(){var a=b.encode,c=b.decode;b.encode=D;b.decode=unescape;this.normalize();b.encode=a;b.decode=c;return this};e.readable=function(){var a=this.clone();a.username("").password("").normalize();var c="";a._parts.protocol&&(c+=a._parts.protocol+"://");a._parts.hostname&&
|
||||||
|
(a.is("punycode")&&f?(c+=f.toUnicode(a._parts.hostname),a._parts.port&&(c+=":"+a._parts.port)):c+=a.host());a._parts.hostname&&a._parts.path&&"/"!==a._parts.path.charAt(0)&&(c+="/");c+=a.path(!0);if(a._parts.query){for(var d="",e=0,h=a._parts.query.split("&"),g=h.length;e<g;e++){var r=(h[e]||"").split("="),d=d+("&"+b.decodeQuery(r[0],this._parts.escapeQuerySpace).replace(/&/g,"%26"));void 0!==r[1]&&(d+="="+b.decodeQuery(r[1],this._parts.escapeQuerySpace).replace(/&/g,"%26"))}c+="?"+d.substring(1)}return c+=
|
||||||
|
b.decodeQuery(a.hash(),!0)};e.absoluteTo=function(a){var c=this.clone(),d=["protocol","username","password","hostname","port"],e,f;if(this._parts.urn)throw Error("URNs do not have any generally defined hierarchical components");a instanceof b||(a=new b(a));c._parts.protocol||(c._parts.protocol=a._parts.protocol);if(this._parts.hostname)return c;for(e=0;f=d[e];e++)c._parts[f]=a._parts[f];c._parts.path?".."===c._parts.path.substring(-2)&&(c._parts.path+="/"):(c._parts.path=a._parts.path,c._parts.query||
|
||||||
|
(c._parts.query=a._parts.query));"/"!==c.path().charAt(0)&&(a=a.directory(),c._parts.path=(a?a+"/":"")+c._parts.path,c.normalizePath());c.build();return c};e.relativeTo=function(a){var c=this.clone().normalize(),d,e,f,h;if(c._parts.urn)throw Error("URNs do not have any generally defined hierarchical components");a=(new b(a)).normalize();d=c._parts;e=a._parts;f=c.path();h=a.path();if("/"!==f.charAt(0))throw Error("URI is already relative");if("/"!==h.charAt(0))throw Error("Cannot calculate a URI relative to another relative URI");
|
||||||
|
d.protocol===e.protocol&&(d.protocol=null);if(d.username===e.username&&d.password===e.password&&null===d.protocol&&null===d.username&&null===d.password&&d.hostname===e.hostname&&d.port===e.port)d.hostname=null,d.port=null;else return c.build();if(f===h)return d.path="",c.build();a=b.commonPath(c.path(),a.path());if(!a)return c.build();e=e.path.substring(a.length).replace(/[^\/]*$/,"").replace(/.*?\//g,"../");d.path=e+d.path.substring(a.length);return c.build()};e.equals=function(a){var c=this.clone();
|
||||||
|
a=new b(a);var d={},e={},f={},h;c.normalize();a.normalize();if(c.toString()===a.toString())return!0;d=c.query();e=a.query();c.query("");a.query("");if(c.toString()!==a.toString()||d.length!==e.length)return!1;d=b.parseQuery(d,this._parts.escapeQuerySpace);e=b.parseQuery(e,this._parts.escapeQuerySpace);for(h in d)if(v.call(d,h)){if(!p(d[h])){if(d[h]!==e[h])return!1}else if(!r(d[h],e[h]))return!1;f[h]=!0}for(h in e)if(v.call(e,h)&&!f[h])return!1;return!0};e.duplicateQueryParameters=function(a){this._parts.duplicateQueryParameters=
|
||||||
|
!!a;return this};e.escapeQuerySpace=function(a){this._parts.escapeQuerySpace=!!a;return this};return b});(function(f,l){"object"===typeof exports?module.exports=l(require("./URI")):"function"===typeof define&&define.amd?define(["./URI"],l):f.URITemplate=l(f.URI,f)})(this,function(f,l){function g(b){if(g._cache[b])return g._cache[b];if(!(this instanceof g))return new g(b);this.expression=b;g._cache[b]=this;return this}function m(b){this.data=b;this.cache={}}var b=l&&l.URITemplate,k=Object.prototype.hasOwnProperty,z=g.prototype,p={"":{prefix:"",separator:",",named:!1,empty_name_separator:!1,encode:"encode"},
|
||||||
|
"+":{prefix:"",separator:",",named:!1,empty_name_separator:!1,encode:"encodeReserved"},"#":{prefix:"#",separator:",",named:!1,empty_name_separator:!1,encode:"encodeReserved"},".":{prefix:".",separator:".",named:!1,empty_name_separator:!1,encode:"encode"},"/":{prefix:"/",separator:"/",named:!1,empty_name_separator:!1,encode:"encode"},";":{prefix:";",separator:";",named:!0,empty_name_separator:!1,encode:"encode"},"?":{prefix:"?",separator:"&",named:!0,empty_name_separator:!0,encode:"encode"},"&":{prefix:"&",
|
||||||
|
separator:"&",named:!0,empty_name_separator:!0,encode:"encode"}};g._cache={};g.EXPRESSION_PATTERN=/\{([^a-zA-Z0-9%_]?)([^\}]+)(\}|$)/g;g.VARIABLE_PATTERN=/^([^*:]+)((\*)|:(\d+))?$/;g.VARIABLE_NAME_PATTERN=/[^a-zA-Z0-9%_]/;g.expand=function(b,f){var k=p[b.operator],m=k.named?"Named":"Unnamed",l=b.variables,t=[],n,e,v;for(v=0;e=l[v];v++)n=f.get(e.name),n.val.length?t.push(g["expand"+m](n,k,e.explode,e.explode&&k.separator||",",e.maxlength,e.name)):n.type&&t.push("");return t.length?k.prefix+t.join(k.separator):
|
||||||
|
""};g.expandNamed=function(b,g,k,m,l,t){var n="",e=g.encode;g=g.empty_name_separator;var v=!b[e].length,w=2===b.type?"":f[e](t),q,p,z;p=0;for(z=b.val.length;p<z;p++)l?(q=f[e](b.val[p][1].substring(0,l)),2===b.type&&(w=f[e](b.val[p][0].substring(0,l)))):v?(q=f[e](b.val[p][1]),2===b.type?(w=f[e](b.val[p][0]),b[e].push([w,q])):b[e].push([void 0,q])):(q=b[e][p][1],2===b.type&&(w=b[e][p][0])),n&&(n+=m),k?n+=w+(g||q?"=":"")+q:(p||(n+=f[e](t)+(g||q?"=":"")),2===b.type&&(n+=w+","),n+=q);return n};g.expandUnnamed=
|
||||||
|
function(b,g,k,m,l){var t="",n=g.encode;g=g.empty_name_separator;var e=!b[n].length,p,w,q,z;q=0;for(z=b.val.length;q<z;q++)l?w=f[n](b.val[q][1].substring(0,l)):e?(w=f[n](b.val[q][1]),b[n].push([2===b.type?f[n](b.val[q][0]):void 0,w])):w=b[n][q][1],t&&(t+=m),2===b.type&&(p=l?f[n](b.val[q][0].substring(0,l)):b[n][q][0],t+=p,t=k?t+(g||w?"=":""):t+","),t+=w;return t};g.noConflict=function(){l.URITemplate===g&&(l.URITemplate=b);return g};z.expand=function(b){var f="";this.parts&&this.parts.length||this.parse();
|
||||||
|
b instanceof m||(b=new m(b));for(var k=0,l=this.parts.length;k<l;k++)f+="string"===typeof this.parts[k]?this.parts[k]:g.expand(this.parts[k],b);return f};z.parse=function(){var b=this.expression,f=g.EXPRESSION_PATTERN,k=g.VARIABLE_PATTERN,m=g.VARIABLE_NAME_PATTERN,l=[],t=0,n,e,v;for(f.lastIndex=0;;){e=f.exec(b);if(null===e){l.push(b.substring(t));break}else l.push(b.substring(t,e.index)),t=e.index+e[0].length;if(!p[e[1]])throw Error('Unknown Operator "'+e[1]+'" in "'+e[0]+'"');if(!e[3])throw Error('Unclosed Expression "'+
|
||||||
|
e[0]+'"');n=e[2].split(",");for(var w=0,q=n.length;w<q;w++){v=n[w].match(k);if(null===v)throw Error('Invalid Variable "'+n[w]+'" in "'+e[0]+'"');if(v[1].match(m))throw Error('Invalid Variable Name "'+v[1]+'" in "'+e[0]+'"');n[w]={name:v[1],explode:!!v[3],maxlength:v[4]&&parseInt(v[4],10)}}if(!n.length)throw Error('Expression Missing Variable(s) "'+e[0]+'"');l.push({expression:e[0],operator:e[1],variables:n})}l.length||l.push(b);this.parts=l;return this};m.prototype.get=function(b){var f=this.data,
|
||||||
|
g={type:0,val:[],encode:[],encodeReserved:[]},l;if(void 0!==this.cache[b])return this.cache[b];this.cache[b]=g;f="[object Function]"===String(Object.prototype.toString.call(f))?f(b):"[object Function]"===String(Object.prototype.toString.call(f[b]))?f[b](b):f[b];if(void 0!==f&&null!==f)if("[object Array]"===String(Object.prototype.toString.call(f))){l=0;for(b=f.length;l<b;l++)void 0!==f[l]&&null!==f[l]&&g.val.push([void 0,String(f[l])]);g.val.length&&(g.type=3)}else if("[object Object]"===String(Object.prototype.toString.call(f))){for(l in f)k.call(f,
|
||||||
|
l)&&void 0!==f[l]&&null!==f[l]&&g.val.push([l,String(f[l])]);g.val.length&&(g.type=2)}else g.type=1,g.val.push([void 0,String(f)]);return g};f.expand=function(b,k){var l=(new g(b)).expand(k);return new f(l)};return g});
|
2
static/lib/angular-file-upload-html5-shim.min.js
vendored
Normal file
2
static/lib/angular-file-upload-html5-shim.min.js
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
/*! 1.4.0 */
|
||||||
|
window.XMLHttpRequest&&window.FormData&&(XMLHttpRequest=function(a){return function(){var b=new a;return b.setRequestHeader=function(a){return function(c,d){if("__setXHR_"===c){var e=d(b);e instanceof Function&&e(b)}else a.apply(b,arguments)}}(b.setRequestHeader),b}}(XMLHttpRequest),window.XMLHttpRequest.__isShim=!0);
|
2
static/lib/angular-file-upload.min.js
vendored
Normal file
2
static/lib/angular-file-upload.min.js
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
/*! 1.4.0 */
|
||||||
|
!function(){var a=angular.module("angularFileUpload",[]);a.service("$upload",["$http","$timeout",function(a,b){function c(c){c.method=c.method||"POST",c.headers=c.headers||{},c.transformRequest=c.transformRequest||function(b,c){return window.ArrayBuffer&&b instanceof window.ArrayBuffer?b:a.defaults.transformRequest[0](b,c)},window.XMLHttpRequest.__isShim&&(c.headers.__setXHR_=function(){return function(a){a&&(c.__XHR=a,c.xhrFn&&c.xhrFn(a),a.upload.addEventListener("progress",function(a){c.progress&&b(function(){c.progress&&c.progress(a)})},!1),a.upload.addEventListener("load",function(a){a.lengthComputable&&c.progress&&c.progress(a)},!1))}});var d=a(c);return d.progress=function(a){return c.progress=a,d},d.abort=function(){return c.__XHR&&b(function(){c.__XHR.abort()}),d},d.xhr=function(a){return c.xhrFn=a,d},d.then=function(a,b){return function(d,e,f){c.progress=f||c.progress;var g=b.apply(a,[d,e,f]);return g.abort=a.abort,g.progress=a.progress,g.xhr=a.xhr,g.then=a.then,g}}(d,d.then),d}this.upload=function(b){b.headers=b.headers||{},b.headers["Content-Type"]=void 0,b.transformRequest=b.transformRequest||a.defaults.transformRequest;var d=new FormData,e=b.transformRequest,f=b.data;return b.transformRequest=function(a,c){if(f)if(b.formDataAppender)for(var d in f){var g=f[d];b.formDataAppender(a,d,g)}else for(var d in f){var g=f[d];if("function"==typeof e)g=e(g,c);else for(var h=0;h<e.length;h++){var i=e[h];"function"==typeof i&&(g=i(g,c))}a.append(d,g)}if(null!=b.file){var j=b.fileFormDataName||"file";if("[object Array]"===Object.prototype.toString.call(b.file))for(var k="[object String]"===Object.prototype.toString.call(j),h=0;h<b.file.length;h++)a.append(k?j+h:j[h],b.file[h],b.file[h].name);else a.append(j,b.file,b.file.name)}return a},b.data=d,c(b)},this.http=function(a){return c(a)}}]),a.directive("ngFileSelect",["$parse","$timeout",function(a,b){return function(c,d,e){var f=a(e.ngFileSelect);d.bind("change",function(a){var d,e,g=[];if(d=a.target.files,null!=d)for(e=0;e<d.length;e++)g.push(d.item(e));b(function(){f(c,{$files:g,$event:a})})}),("ontouchstart"in window||navigator.maxTouchPoints>0||navigator.msMaxTouchPoints>0)&&d.bind("touchend",function(a){a.preventDefault(),a.target.click()})}}]),a.directive("ngFileDropAvailable",["$parse","$timeout",function(a,b){return function(c,d,e){if("draggable"in document.createElement("span")){var f=a(e.ngFileDropAvailable);b(function(){f(c)})}}}]),a.directive("ngFileDrop",["$parse","$timeout",function(a,b){return function(c,d,e){function f(a,b){if(b.isDirectory){var c=b.createReader();i++,c.readEntries(function(b){for(var c=0;c<b.length;c++)f(a,b[c]);i--})}else i++,b.file(function(b){i--,a.push(b)})}if("draggable"in document.createElement("span")){var g=null,h=a(e.ngFileDrop);d[0].addEventListener("dragover",function(a){b.cancel(g),a.stopPropagation(),a.preventDefault(),d.addClass(e.ngFileDragOverClass||"dragover")},!1),d[0].addEventListener("dragleave",function(){g=b(function(){d.removeClass(e.ngFileDragOverClass||"dragover")})},!1);var i=0;d[0].addEventListener("drop",function(a){a.stopPropagation(),a.preventDefault(),d.removeClass(e.ngFileDragOverClass||"dragover");var g=[],j=a.dataTransfer.items;if(j&&j.length>0&&j[0].webkitGetAsEntry)for(var k=0;k<j.length;k++)f(g,j[k].webkitGetAsEntry());else{var l=a.dataTransfer.files;if(null!=l)for(var k=0;k<l.length;k++)g.push(l.item(k))}!function m(d){b(function(){i?m(10):h(c,{$files:g,$event:a})},d||0)}()},!1)}}}])}();
|
Some files were not shown because too many files have changed in this diff Show more
Reference in a new issue