Use $QUAYPATH and $QUAYDIR in conf and init files
This commit is contained in:
parent
334a08d90b
commit
cdb3722c17
59 changed files with 341 additions and 225 deletions
|
@ -64,6 +64,7 @@ karma-tests:
|
||||||
variables:
|
variables:
|
||||||
GIT_STRATEGY: none
|
GIT_STRATEGY: none
|
||||||
PYTHONPATH: .
|
PYTHONPATH: .
|
||||||
|
QUAYDIR: /quay-registry
|
||||||
TEST: 'true'
|
TEST: 'true'
|
||||||
mysql:
|
mysql:
|
||||||
before_script:
|
before_script:
|
||||||
|
@ -86,6 +87,7 @@ mysql:
|
||||||
MYSQL_ROOT_PASSWORD: quay
|
MYSQL_ROOT_PASSWORD: quay
|
||||||
MYSQL_USER: quay
|
MYSQL_USER: quay
|
||||||
PYTHONPATH: .
|
PYTHONPATH: .
|
||||||
|
QUAYDIR: /quay-registry
|
||||||
SKIP_DB_SCHEMA: 'true'
|
SKIP_DB_SCHEMA: 'true'
|
||||||
TEST: 'true'
|
TEST: 'true'
|
||||||
TEST_DATABASE_URI: mysql+pymysql://quay:quay@localhost/quay
|
TEST_DATABASE_URI: mysql+pymysql://quay:quay@localhost/quay
|
||||||
|
@ -108,6 +110,7 @@ postgres:
|
||||||
POSTGRES_PASSWORD: quay
|
POSTGRES_PASSWORD: quay
|
||||||
POSTGRES_USER: quay
|
POSTGRES_USER: quay
|
||||||
PYTHONPATH: .
|
PYTHONPATH: .
|
||||||
|
QUAYDIR: /quay-registry
|
||||||
SKIP_DB_SCHEMA: 'true'
|
SKIP_DB_SCHEMA: 'true'
|
||||||
TEST: 'true'
|
TEST: 'true'
|
||||||
TEST_DATABASE_URI: postgresql://quay:quay@localhost/quay
|
TEST_DATABASE_URI: postgresql://quay:quay@localhost/quay
|
||||||
|
@ -124,6 +127,7 @@ registry-tests:
|
||||||
variables:
|
variables:
|
||||||
GIT_STRATEGY: none
|
GIT_STRATEGY: none
|
||||||
PYTHONPATH: .
|
PYTHONPATH: .
|
||||||
|
QUAYDIR: /quay-registry
|
||||||
TEST: 'true'
|
TEST: 'true'
|
||||||
stages:
|
stages:
|
||||||
- docker_base
|
- docker_base
|
||||||
|
@ -145,6 +149,7 @@ unit-tests:
|
||||||
variables:
|
variables:
|
||||||
GIT_STRATEGY: none
|
GIT_STRATEGY: none
|
||||||
PYTHONPATH: .
|
PYTHONPATH: .
|
||||||
|
QUAYDIR: /quay-registry
|
||||||
TEST: 'true'
|
TEST: 'true'
|
||||||
variables:
|
variables:
|
||||||
FAILFASTCI_NAMESPACE: quay
|
FAILFASTCI_NAMESPACE: quay
|
||||||
|
|
|
@ -23,6 +23,7 @@ function(vars={})
|
||||||
variables: {
|
variables: {
|
||||||
TEST: "true",
|
TEST: "true",
|
||||||
PYTHONPATH: ".",
|
PYTHONPATH: ".",
|
||||||
|
QUAYDIR: "/quay-registry",
|
||||||
GIT_STRATEGY: "none",
|
GIT_STRATEGY: "none",
|
||||||
},
|
},
|
||||||
before_script: [
|
before_script: [
|
||||||
|
|
26
Dockerfile
26
Dockerfile
|
@ -4,6 +4,12 @@ FROM phusion/baseimage:0.9.19
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
ENV HOME /root
|
ENV HOME /root
|
||||||
|
ENV QUAYCONF /quay/conf
|
||||||
|
ENV QUAYDIR /quay
|
||||||
|
ENV QUAYPATH "."
|
||||||
|
|
||||||
|
RUN mkdir $QUAYDIR
|
||||||
|
WORKDIR $QUAYDIR
|
||||||
|
|
||||||
# This is so we don't break http golang/go#17066
|
# This is so we don't break http golang/go#17066
|
||||||
# When Ubuntu has nginx >= 1.11.0 we can switch back.
|
# When Ubuntu has nginx >= 1.11.0 we can switch back.
|
||||||
|
@ -51,6 +57,7 @@ RUN apt-get install -y \
|
||||||
w3m
|
w3m
|
||||||
|
|
||||||
# Install python dependencies
|
# Install python dependencies
|
||||||
|
|
||||||
ADD requirements.txt requirements.txt
|
ADD requirements.txt requirements.txt
|
||||||
RUN virtualenv --distribute venv
|
RUN virtualenv --distribute venv
|
||||||
RUN venv/bin/pip install -r requirements.txt # 07SEP2016
|
RUN venv/bin/pip install -r requirements.txt # 07SEP2016
|
||||||
|
@ -127,27 +134,28 @@ RUN chmod 0600 /etc/monit/monitrc
|
||||||
# remove after phusion/baseimage-docker#338 is fixed
|
# remove after phusion/baseimage-docker#338 is fixed
|
||||||
ADD conf/init/logrotate.conf /etc/logrotate.conf
|
ADD conf/init/logrotate.conf /etc/logrotate.conf
|
||||||
|
|
||||||
# Download any external libs.
|
|
||||||
RUN mkdir static/fonts static/ldn
|
|
||||||
ADD external_libraries.py external_libraries.py
|
|
||||||
RUN venv/bin/python -m external_libraries
|
|
||||||
RUN mkdir -p /usr/local/nginx/logs/
|
|
||||||
|
|
||||||
# TODO(ssewell): only works on a detached head, make work with ref
|
# TODO(ssewell): only works on a detached head, make work with ref
|
||||||
ADD .git/HEAD GIT_HEAD
|
ADD .git/HEAD GIT_HEAD
|
||||||
|
|
||||||
# Add all of the files!
|
# Add all of the files!
|
||||||
ADD . .
|
ADD . .
|
||||||
|
RUN mkdir static/fonts static/ldn
|
||||||
|
|
||||||
|
# Download any external libs.
|
||||||
|
RUN venv/bin/python -m external_libraries
|
||||||
|
RUN mkdir -p /usr/local/nginx/logs/
|
||||||
|
|
||||||
|
|
||||||
RUN pyclean .
|
RUN pyclean .
|
||||||
|
|
||||||
# Cleanup any NPM-related stuff.
|
# Cleanup any NPM-related stuff.
|
||||||
RUN rm -rf /root/.npm
|
RUN rm -rf /root/.npm
|
||||||
RUN rm -rf /.npm
|
RUN rm -rf .npm
|
||||||
RUN rm -rf /usr/local/lib/node_modules
|
RUN rm -rf /usr/local/lib/node_modules
|
||||||
RUN rm -rf /usr/share/yarn/node_modules
|
RUN rm -rf /usr/share/yarn/node_modules
|
||||||
RUN rm -rf /root/node_modules
|
RUN rm -rf /root/node_modules
|
||||||
RUN rm -rf /node_modules
|
RUN rm -rf node_modules
|
||||||
RUN rm -rf /grunt
|
RUN rm -rf grunt
|
||||||
RUN rm package.json yarn.lock
|
RUN rm package.json yarn.lock
|
||||||
|
|
||||||
# Run the tests
|
# Run the tests
|
||||||
|
|
35
_init.py
Normal file
35
_init.py
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
|
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
CONF_DIR = os.getenv("QUAYCONF", os.path.join(ROOT_DIR, "conf/"))
|
||||||
|
STATIC_DIR = os.path.join(ROOT_DIR, 'static/')
|
||||||
|
STATIC_LDN_DIR = os.path.join(STATIC_DIR, 'ldn/')
|
||||||
|
STATIC_FONTS_DIR = os.path.join(STATIC_DIR, 'fonts/')
|
||||||
|
TEMPLATE_DIR = os.path.join(ROOT_DIR, 'templates/')
|
||||||
|
|
||||||
|
|
||||||
|
def _get_version_number_changelog():
|
||||||
|
try:
|
||||||
|
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md')) as f:
|
||||||
|
return re.search(r'(v[0-9]+\.[0-9]+\.[0-9]+)', f.readline()).group(0)
|
||||||
|
except IOError:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
|
def _get_git_sha():
|
||||||
|
if os.path.exists("GIT_HEAD"):
|
||||||
|
with open(os.path.join(ROOT_DIR, "GIT_HEAD")) as f:
|
||||||
|
return f.read()
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
return subprocess.check_output(["git", "rev-parse", "HEAD"]).strip()[0:8]
|
||||||
|
except (OSError, subprocess.CalledProcessError):
|
||||||
|
pass
|
||||||
|
return "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = _get_version_number_changelog()
|
||||||
|
__gitrev__ = _get_git_sha()
|
7
app.py
7
app.py
|
@ -14,6 +14,7 @@ from jwkest.jwk import RSAKey
|
||||||
from werkzeug.routing import BaseConverter
|
from werkzeug.routing import BaseConverter
|
||||||
|
|
||||||
import features
|
import features
|
||||||
|
from _init import CONF_DIR
|
||||||
from auth.auth_context import get_authenticated_user
|
from auth.auth_context import get_authenticated_user
|
||||||
from avatars.avatars import Avatar
|
from avatars.avatars import Avatar
|
||||||
from buildman.manager.buildcanceller import BuildCanceller
|
from buildman.manager.buildcanceller import BuildCanceller
|
||||||
|
@ -52,9 +53,9 @@ from util.security.instancekeys import InstanceKeys
|
||||||
from util.security.signing import Signer
|
from util.security.signing import Signer
|
||||||
|
|
||||||
|
|
||||||
OVERRIDE_CONFIG_DIRECTORY = 'conf/stack/'
|
OVERRIDE_CONFIG_DIRECTORY = os.path.join(CONF_DIR, 'stack/')
|
||||||
OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml'
|
OVERRIDE_CONFIG_YAML_FILENAME = os.path.join(CONF_DIR, 'stack/config.yaml')
|
||||||
OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
|
OVERRIDE_CONFIG_PY_FILENAME = os.path.join(CONF_DIR, 'stack/config.py')
|
||||||
|
|
||||||
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'
|
||||||
|
|
||||||
|
|
8
boot.py
Normal file → Executable file
8
boot.py
Normal file → Executable file
|
@ -13,6 +13,7 @@ from app import app
|
||||||
from data.model.release import set_region_release
|
from data.model.release import set_region_release
|
||||||
from util.config.database import sync_database_with_config
|
from util.config.database import sync_database_with_config
|
||||||
from util.generatepresharedkey import generate_key
|
from util.generatepresharedkey import generate_key
|
||||||
|
from _init import CONF_DIR
|
||||||
|
|
||||||
|
|
||||||
@lru_cache(maxsize=1)
|
@lru_cache(maxsize=1)
|
||||||
|
@ -42,7 +43,7 @@ def setup_jwt_proxy():
|
||||||
"""
|
"""
|
||||||
Creates a service key for quay to use in the jwtproxy and generates the JWT proxy configuration.
|
Creates a service key for quay to use in the jwtproxy and generates the JWT proxy configuration.
|
||||||
"""
|
"""
|
||||||
if os.path.exists('conf/jwtproxy_conf.yaml'):
|
if os.path.exists(os.path.join(CONF_DIR, 'jwtproxy_conf.yaml')):
|
||||||
# Proxy is already setup.
|
# Proxy is already setup.
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -65,16 +66,17 @@ def setup_jwt_proxy():
|
||||||
registry = audience + '/keys'
|
registry = audience + '/keys'
|
||||||
security_issuer = app.config.get('SECURITY_SCANNER_ISSUER_NAME', 'security_scanner')
|
security_issuer = app.config.get('SECURITY_SCANNER_ISSUER_NAME', 'security_scanner')
|
||||||
|
|
||||||
with open("conf/jwtproxy_conf.yaml.jnj") as f:
|
with open(os.path.join(CONF_DIR, 'jwtproxy_conf.yaml.jnj')) as f:
|
||||||
template = Template(f.read())
|
template = Template(f.read())
|
||||||
rendered = template.render(
|
rendered = template.render(
|
||||||
|
conf_dir=CONF_DIR,
|
||||||
audience=audience,
|
audience=audience,
|
||||||
registry=registry,
|
registry=registry,
|
||||||
key_id=quay_key_id,
|
key_id=quay_key_id,
|
||||||
security_issuer=security_issuer,
|
security_issuer=security_issuer,
|
||||||
)
|
)
|
||||||
|
|
||||||
with open('conf/jwtproxy_conf.yaml', 'w') as f:
|
with open(os.path.join(CONF_DIR, 'jwtproxy_conf.yaml'), 'w') as f:
|
||||||
f.write(rendered)
|
f.write(rendered)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,7 @@ from buildman.asyncutil import AsyncWrapper
|
||||||
from container_cloud_config import CloudConfigContext
|
from container_cloud_config import CloudConfigContext
|
||||||
from app import metric_queue, app
|
from app import metric_queue, app
|
||||||
from util.metrics.metricqueue import duration_collector_async
|
from util.metrics.metricqueue import duration_collector_async
|
||||||
|
from _init import ROOT_DIR
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ ONE_HOUR = 60*60
|
||||||
_TAG_RETRY_COUNT = 3 # Number of times to retry adding tags.
|
_TAG_RETRY_COUNT = 3 # Number of times to retry adding tags.
|
||||||
_TAG_RETRY_SLEEP = 2 # Number of seconds to wait between tag retries.
|
_TAG_RETRY_SLEEP = 2 # Number of seconds to wait between tag retries.
|
||||||
|
|
||||||
ENV = Environment(loader=FileSystemLoader('buildman/templates'))
|
ENV = Environment(loader=FileSystemLoader(os.path.join(ROOT_DIR, "buildman/templates")))
|
||||||
TEMPLATE = ENV.get_template('cloudconfig.yaml')
|
TEMPLATE = ENV.get_template('cloudconfig.yaml')
|
||||||
CloudConfigContext().populate_jinja_environment(ENV)
|
CloudConfigContext().populate_jinja_environment(ENV)
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,7 @@ worker_class = 'gevent'
|
||||||
pythonpath = '.'
|
pythonpath = '.'
|
||||||
preload_app = True
|
preload_app = True
|
||||||
|
|
||||||
|
|
||||||
def post_fork(server, worker):
|
def post_fork(server, worker):
|
||||||
# Reset the Random library to ensure it won't raise the "PID check failed." error after
|
# Reset the Random library to ensure it won't raise the "PID check failed." error after
|
||||||
# gunicorn forks.
|
# gunicorn forks.
|
||||||
|
|
|
@ -13,6 +13,7 @@ worker_class = 'gevent'
|
||||||
pythonpath = '.'
|
pythonpath = '.'
|
||||||
preload_app = True
|
preload_app = True
|
||||||
|
|
||||||
|
|
||||||
def post_fork(server, worker):
|
def post_fork(server, worker):
|
||||||
# Reset the Random library to ensure it won't raise the "PID check failed." error after
|
# Reset the Random library to ensure it won't raise the "PID check failed." error after
|
||||||
# gunicorn forks.
|
# gunicorn forks.
|
||||||
|
|
|
@ -14,6 +14,7 @@ pythonpath = '.'
|
||||||
preload_app = True
|
preload_app = True
|
||||||
timeout = 2000 # Because sync workers
|
timeout = 2000 # Because sync workers
|
||||||
|
|
||||||
|
|
||||||
def post_fork(server, worker):
|
def post_fork(server, worker):
|
||||||
# Reset the Random library to ensure it won't raise the "PID check failed." error after
|
# Reset the Random library to ensure it won't raise the "PID check failed." error after
|
||||||
# gunicorn forks.
|
# gunicorn forks.
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
#! /bin/bash
|
#! /bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
# Create certs for jwtproxy to mitm outgoing TLS connections
|
# Create certs for jwtproxy to mitm outgoing TLS connections
|
||||||
echo '{"CN":"CA","key":{"algo":"rsa","size":2048}}' | cfssl gencert -initca - | cfssljson -bare mitm
|
echo '{"CN":"CA","key":{"algo":"rsa","size":2048}}' | cfssl gencert -initca - | cfssljson -bare mitm
|
||||||
cp mitm-key.pem /conf/mitm.key
|
cp mitm-key.pem $QUAYCONF/mitm.key
|
||||||
cp mitm.pem /conf/mitm.cert
|
cp mitm.pem $QUAYCONF/mitm.cert
|
||||||
cp mitm.pem /usr/local/share/ca-certificates/mitm.crt
|
cp mitm.pem /usr/local/share/ca-certificates/mitm.crt
|
||||||
|
|
|
@ -1,34 +1,38 @@
|
||||||
#! /bin/bash
|
#! /bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
|
||||||
# Add the custom LDAP certificate
|
# Add the custom LDAP certificate
|
||||||
if [ -e /conf/stack/ldap.crt ]
|
if [ -e $QUAYCONF/stack/ldap.crt ]
|
||||||
then
|
then
|
||||||
cp /conf/stack/ldap.crt /usr/local/share/ca-certificates/ldap.crt
|
cp $QUAYCONF/stack/ldap.crt /usr/local/share/ca-certificates/ldap.crt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Add extra trusted certificates (as a directory)
|
# Add extra trusted certificates (as a directory)
|
||||||
if [ -d /conf/stack/extra_ca_certs ]; then
|
if [ -d $QUAYCONF/stack/extra_ca_certs ]; then
|
||||||
if test "$(ls -A "/conf/stack/extra_ca_certs")"; then
|
if test "$(ls -A "$QUAYCONF/stack/extra_ca_certs")"; then
|
||||||
echo "Installing extra certificates found in /conf/stack/extra_ca_certs directory"
|
echo "Installing extra certificates found in $QUAYCONF/stack/extra_ca_certs directory"
|
||||||
cp /conf/stack/extra_ca_certs/* /usr/local/share/ca-certificates/
|
cp $QUAYCONF/stack/extra_ca_certs/* /usr/local/share/ca-certificates/
|
||||||
cat /conf/stack/extra_ca_certs/* >> /venv/lib/python2.7/site-packages/requests/cacert.pem
|
cat $QUAYCONF/stack/extra_ca_certs/* >> venv/lib/python2.7/site-packages/requests/cacert.pem
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Add extra trusted certificates (as a file)
|
# Add extra trusted certificates (as a file)
|
||||||
if [ -f /conf/stack/extra_ca_certs ]; then
|
if [ -f $QUAYCONF/stack/extra_ca_certs ]; then
|
||||||
echo "Installing extra certificates found in /conf/stack/extra_ca_certs file"
|
echo "Installing extra certificates found in $QUAYCONF/stack/extra_ca_certs file"
|
||||||
csplit -z -f /usr/local/share/ca-certificates/extra-ca- /conf/stack/extra_ca_certs '/-----BEGIN CERTIFICATE-----/' '{*}'
|
csplit -z -f /usr/local/share/ca-certificates/extra-ca- $QUAYCONF/stack/extra_ca_certs '/-----BEGIN CERTIFICATE-----/' '{*}'
|
||||||
cat /conf/stack/extra_ca_certs >> /venv/lib/python2.7/site-packages/requests/cacert.pem
|
cat $QUAYCONF/stack/extra_ca_certs >> venv/lib/python2.7/site-packages/requests/cacert.pem
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Add extra trusted certificates (prefixed)
|
# Add extra trusted certificates (prefixed)
|
||||||
for f in $(find /conf/stack/ -maxdepth 1 -type f -name "extra_ca*")
|
for f in $(find $QUAYCONF/stack/ -maxdepth 1 -type f -name "extra_ca*")
|
||||||
do
|
do
|
||||||
echo "Installing extra cert $f"
|
echo "Installing extra cert $f"
|
||||||
cp "$f" /usr/local/share/ca-certificates/
|
cp "$f" /usr/local/share/ca-certificates/
|
||||||
cat "$f" >> /venv/lib/python2.7/site-packages/requests/cacert.pem
|
cat "$f" >> venv/lib/python2.7/site-packages/requests/cacert.pem
|
||||||
done
|
done
|
||||||
|
|
||||||
# Update all CA certificates.
|
# Update all CA certificates.
|
||||||
|
|
|
@ -1,11 +1,15 @@
|
||||||
#! /bin/sh
|
#! /bin/sh
|
||||||
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
if [ -e /conf/stack/robots.txt ]
|
cd ${QUAYDIR:-"/"}
|
||||||
|
|
||||||
|
if [ -e $QUAYCONF/stack/robots.txt ]
|
||||||
then
|
then
|
||||||
cp /conf/stack/robots.txt /templates/robots.txt
|
cp $QUAYCONF/stack/robots.txt $QUAYPATH/templates/robots.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -e /conf/stack/favicon.ico ]
|
if [ -e $QUAYCONF/stack/favicon.ico ]
|
||||||
then
|
then
|
||||||
cp /conf/stack/favicon.ico /static/favicon.ico
|
cp $QUAYCONF/stack/favicon.ico $QUAYPATH/static/favicon.ico
|
||||||
fi
|
fi
|
|
@ -1,6 +1,10 @@
|
||||||
#! /bin/sh
|
#! /bin/sh
|
||||||
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
if [ -e /conf/stack/syslog-ng-extra.conf ]
|
cd ${QUAYDIR:-"/"}
|
||||||
|
|
||||||
|
if [ -e $QUAYCONF/stack/syslog-ng-extra.conf ]
|
||||||
then
|
then
|
||||||
cp /conf/stack/syslog-ng-extra.conf /etc/syslog-ng/conf.d/
|
cp $QUAYCONF/stack/syslog-ng-extra.conf /etc/syslog-ng/conf.d/
|
||||||
fi
|
fi
|
||||||
|
|
51
conf/init/nginx_conf_create.py
Normal file
51
conf/init/nginx_conf_create.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
import jinja2
|
||||||
|
|
||||||
|
QUAYPATH = os.getenv("QUAYPATH", ".")
|
||||||
|
QUAYDIR = os.getenv("QUAYDIR", "/")
|
||||||
|
QUAYCONF_DIR = os.getenv("QUAYCONF", os.path.join(QUAYDIR, QUAYPATH, "conf"))
|
||||||
|
STATIC_DIR = os.path.join(QUAYDIR, 'static/')
|
||||||
|
|
||||||
|
def write_config(filename, **kwargs):
|
||||||
|
with open(filename + ".jnj") as f:
|
||||||
|
template = jinja2.Template(f.read())
|
||||||
|
rendered = template.render(kwargs)
|
||||||
|
|
||||||
|
with open(filename, 'w') as f:
|
||||||
|
f.write(rendered)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_nginx_config():
|
||||||
|
"""
|
||||||
|
Generates nginx config from the app config
|
||||||
|
"""
|
||||||
|
use_https = os.path.exists(os.path.join(QUAYCONF_DIR, 'stack/ssl.key'))
|
||||||
|
write_config(os.path.join(QUAYCONF_DIR, 'nginx/nginx.conf'), use_https=use_https)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_server_config(config):
|
||||||
|
"""
|
||||||
|
Generates server config from the app config
|
||||||
|
"""
|
||||||
|
config = config or {}
|
||||||
|
tuf_server = config.get('TUF_SERVER', None)
|
||||||
|
tuf_host = config.get('TUF_HOST', None)
|
||||||
|
signing_enabled = config.get('FEATURE_SIGNING', False)
|
||||||
|
maximum_layer_size = config.get('MAXIMUM_LAYER_SIZE', '20G')
|
||||||
|
|
||||||
|
write_config(
|
||||||
|
os.path.join(QUAYCONF_DIR, 'nginx/server-base.conf'), tuf_server=tuf_server, tuf_host=tuf_host,
|
||||||
|
signing_enabled=signing_enabled, maximum_layer_size=maximum_layer_size, static_dir=STATIC_DIR)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if os.path.exists(os.path.join(QUAYCONF_DIR, 'stack/config.yaml')):
|
||||||
|
with open(os.path.join(QUAYCONF_DIR, 'stack/config.yaml'), 'r') as f:
|
||||||
|
config = yaml.load(f)
|
||||||
|
else:
|
||||||
|
config = None
|
||||||
|
generate_server_config(config)
|
||||||
|
generate_nginx_config()
|
|
@ -1,51 +1,8 @@
|
||||||
#!/venv/bin/python
|
#!/bin/bash
|
||||||
|
|
||||||
import os.path
|
QUAYDIR=${QUAYDIR:-"/"}
|
||||||
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
import yaml
|
cd $QUAYDIR
|
||||||
import jinja2
|
venv/bin/python $QUAYCONF/init/nginx_conf_create.py
|
||||||
|
|
||||||
|
|
||||||
def write_config(filename, **kwargs):
|
|
||||||
with open(filename + ".jnj") as f:
|
|
||||||
template = jinja2.Template(f.read())
|
|
||||||
rendered = template.render(kwargs)
|
|
||||||
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
f.write(rendered)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_nginx_config():
|
|
||||||
"""
|
|
||||||
Generates nginx config from the app config
|
|
||||||
"""
|
|
||||||
use_https = os.path.exists('conf/stack/ssl.key')
|
|
||||||
write_config('conf/nginx/nginx.conf',
|
|
||||||
use_https=use_https)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_server_config(config):
|
|
||||||
"""
|
|
||||||
Generates server config from the app config
|
|
||||||
"""
|
|
||||||
config = config or {}
|
|
||||||
tuf_server = config.get('TUF_SERVER', None)
|
|
||||||
tuf_host = config.get('TUF_HOST', None)
|
|
||||||
signing_enabled = config.get('FEATURE_SIGNING', False)
|
|
||||||
maximum_layer_size = config.get('MAXIMUM_LAYER_SIZE', '20G')
|
|
||||||
|
|
||||||
write_config('conf/nginx/server-base.conf',
|
|
||||||
tuf_server=tuf_server,
|
|
||||||
tuf_host=tuf_host,
|
|
||||||
signing_enabled=signing_enabled,
|
|
||||||
maximum_layer_size=maximum_layer_size)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
if os.path.exists('conf/stack/config.yaml'):
|
|
||||||
with open('conf/stack/config.yaml', 'r') as f:
|
|
||||||
config = yaml.load(f)
|
|
||||||
else:
|
|
||||||
config = None
|
|
||||||
generate_server_config(config)
|
|
||||||
generate_nginx_config()
|
|
|
@ -1,5 +1,6 @@
|
||||||
#! /bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
|
||||||
# Run the database migration
|
# Run the database migration
|
||||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
PYTHONPATH=${QUAYPATH:-"."} venv/bin/alembic upgrade head
|
|
@ -2,7 +2,9 @@
|
||||||
|
|
||||||
echo 'Starting Blob upload cleanup worker'
|
echo 'Starting Blob upload cleanup worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.blobuploadcleanupworker 2>&1
|
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.blobuploadcleanupworker 2>&1
|
||||||
|
|
||||||
echo 'Blob upload cleanup exited'
|
echo 'Blob upload cleanup exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting build logs archiver worker'
|
echo 'Starting build logs archiver worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.buildlogsarchiver 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.buildlogsarchiver 2>&1
|
||||||
|
|
||||||
echo 'Diffs worker exited'
|
echo 'Diffs worker exited'
|
|
@ -6,7 +6,9 @@ echo 'Starting internal build manager'
|
||||||
monit
|
monit
|
||||||
|
|
||||||
# Run the build manager.
|
# Run the build manager.
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
export PYTHONPATH=$QUAYPATH
|
||||||
exec venv/bin/python -m buildman.builder 2>&1
|
exec venv/bin/python -m buildman.builder 2>&1
|
||||||
|
|
||||||
echo 'Internal build manager exited'
|
echo 'Internal build manager exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting chunk cleanup worker'
|
echo 'Starting chunk cleanup worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.chunkcleanupworker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.chunkcleanupworker 2>&1
|
||||||
|
|
||||||
echo 'Chunk cleanup worker exited'
|
echo 'Chunk cleanup worker exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting GC worker'
|
echo 'Starting GC worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.gc.gcworker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.gc.gcworker 2>&1
|
||||||
|
|
||||||
echo 'Repository GC exited'
|
echo 'Repository GC exited'
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting global prometheus stats worker'
|
echo 'Starting global prometheus stats worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.globalpromstats
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.globalpromstats
|
||||||
|
|
||||||
echo 'Global prometheus stats exited'
|
echo 'Global prometheus stats exited'
|
||||||
|
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
echo 'Starting gunicon'
|
echo 'Starting gunicon'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH nice -n 10 venv/bin/gunicorn -c $QUAYCONF/gunicorn_registry.py registry:application
|
||||||
|
|
||||||
echo 'Gunicorn exited'
|
echo 'Gunicorn exited'
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
echo 'Starting gunicon'
|
echo 'Starting gunicon'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/gunicorn -c conf/gunicorn_secscan.py secscan:application
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/gunicorn -c $QUAYCONF/gunicorn_secscan.py secscan:application
|
||||||
|
|
||||||
echo 'Gunicorn exited'
|
echo 'Gunicorn exited'
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
echo 'Starting gunicon'
|
echo 'Starting gunicon'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH nice -n 10 venv/bin/gunicorn -c $QUAYCONF/gunicorn_verbs.py verbs:application
|
||||||
|
|
||||||
echo 'Gunicorn exited'
|
echo 'Gunicorn exited'
|
|
@ -2,7 +2,10 @@
|
||||||
|
|
||||||
echo 'Starting gunicon'
|
echo 'Starting gunicon'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/gunicorn -c conf/gunicorn_web.py web:application
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/gunicorn -c $QUAYCONF/gunicorn_web.py web:application
|
||||||
|
|
||||||
echo 'Gunicorn exited'
|
echo 'Gunicorn exited'
|
|
@ -1,12 +1,16 @@
|
||||||
#! /bin/bash
|
#! /bin/bash
|
||||||
cd /
|
|
||||||
|
|
||||||
if [ -f conf/jwtproxy_conf.yaml ];
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
if [ -f $QUAYCONF/jwtproxy_conf.yaml ];
|
||||||
then
|
then
|
||||||
echo 'Starting jwtproxy'
|
echo 'Starting jwtproxy'
|
||||||
/usr/local/bin/jwtproxy --config conf/jwtproxy_conf.yaml
|
/usr/local/bin/jwtproxy --config $QUAYCONF/jwtproxy_conf.yaml
|
||||||
rm /tmp/jwtproxy_secscan.sock
|
rm /tmp/jwtproxy_secscan.sock
|
||||||
echo 'Jwtproxy exited'
|
echo 'Jwtproxy exited'
|
||||||
else
|
else
|
||||||
sleep 1
|
sleep 1
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting log rotation worker'
|
echo 'Starting log rotation worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.logrotateworker
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.logrotateworker
|
||||||
|
|
||||||
echo 'Log rotation worker exited'
|
echo 'Log rotation worker exited'
|
||||||
|
|
|
@ -2,6 +2,11 @@
|
||||||
|
|
||||||
echo 'Starting nginx'
|
echo 'Starting nginx'
|
||||||
|
|
||||||
/usr/sbin/nginx -c /conf/nginx/nginx.conf
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH
|
||||||
|
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
|
||||||
|
|
||||||
|
/usr/sbin/nginx -c $QUAYCONF/nginx/nginx.conf
|
||||||
|
|
||||||
echo 'Nginx exited'
|
echo 'Nginx exited'
|
||||||
|
|
|
@ -2,7 +2,9 @@
|
||||||
|
|
||||||
echo 'Starting notification worker'
|
echo 'Starting notification worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.notificationworker
|
cd ${QUAYDIR:-"/"}
|
||||||
|
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.notificationworker
|
||||||
|
|
||||||
echo 'Notification worker exited'
|
echo 'Notification worker exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting Queue cleanup worker'
|
echo 'Starting Queue cleanup worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.queuecleanupworker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.queuecleanupworker 2>&1
|
||||||
|
|
||||||
echo 'Repository Queue cleanup exited'
|
echo 'Repository Queue cleanup exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting repository action count worker'
|
echo 'Starting repository action count worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.repositoryactioncounter 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.repositoryactioncounter 2>&1
|
||||||
|
|
||||||
echo 'Repository action worker exited'
|
echo 'Repository action worker exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting security scanner notification worker'
|
echo 'Starting security scanner notification worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.security_notification_worker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.security_notification_worker 2>&1
|
||||||
|
|
||||||
echo 'Security scanner notification worker exited'
|
echo 'Security scanner notification worker exited'
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting security scanner worker'
|
echo 'Starting security scanner worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.securityworker.securityworker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.securityworker.securityworker 2>&1
|
||||||
|
|
||||||
echo 'Security scanner worker exited'
|
echo 'Security scanner worker exited'
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting service key worker'
|
echo 'Starting service key worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.service_key_worker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.service_key_worker 2>&1
|
||||||
|
|
||||||
echo 'Service key worker exited'
|
echo 'Service key worker exited'
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting storage replication worker'
|
echo 'Starting storage replication worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.storagereplication 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.storagereplication 2>&1
|
||||||
|
|
||||||
echo 'Repository storage replication exited'
|
echo 'Repository storage replication exited'
|
|
@ -2,7 +2,8 @@
|
||||||
|
|
||||||
echo 'Starting team synchronization worker'
|
echo 'Starting team synchronization worker'
|
||||||
|
|
||||||
cd /
|
QUAYPATH=${QUAYPATH:-"."}
|
||||||
venv/bin/python -m workers.teamsyncworker 2>&1
|
cd ${QUAYDIR:-"/"}
|
||||||
|
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.teamsyncworker 2>&1
|
||||||
|
|
||||||
echo 'Team synchronization worker exited'
|
echo 'Team synchronization worker exited'
|
|
@ -1,3 +1,4 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
cd ${QUAYDIR:-"/"}
|
||||||
|
|
||||||
/venv/bin/python /boot.py
|
venv/bin/python ${QUAYPATH:-"."}/boot.py
|
||||||
|
|
|
@ -2,8 +2,8 @@ jwtproxy:
|
||||||
signer_proxy:
|
signer_proxy:
|
||||||
enabled: true
|
enabled: true
|
||||||
listen_addr: :8080
|
listen_addr: :8080
|
||||||
ca_key_file: /conf/mitm.key
|
ca_key_file: {{ conf_dir }}/mitm.key
|
||||||
ca_crt_file: /conf/mitm.cert
|
ca_crt_file: {{ conf_dir }}/mitm.cert
|
||||||
|
|
||||||
signer:
|
signer:
|
||||||
issuer: quay
|
issuer: quay
|
||||||
|
@ -13,7 +13,7 @@ jwtproxy:
|
||||||
type: preshared
|
type: preshared
|
||||||
options:
|
options:
|
||||||
key_id: {{ key_id }}
|
key_id: {{ key_id }}
|
||||||
private_key_path: /conf/quay.pem
|
private_key_path: {{ conf_dir }}/quay.pem
|
||||||
verifier_proxies:
|
verifier_proxies:
|
||||||
- enabled: true
|
- enabled: true
|
||||||
listen_addr: unix:/tmp/jwtproxy_secscan.sock
|
listen_addr: unix:/tmp/jwtproxy_secscan.sock
|
||||||
|
|
|
@ -166,11 +166,11 @@ location /c1/ {
|
||||||
|
|
||||||
location /static/ {
|
location /static/ {
|
||||||
# checks for static file, if not found proxy to app
|
# checks for static file, if not found proxy to app
|
||||||
alias /static/;
|
alias {{static_dir}};
|
||||||
error_page 404 /404;
|
error_page 404 /404;
|
||||||
}
|
}
|
||||||
|
|
||||||
error_page 502 /static/502.html;
|
error_page 502 {{static_dir}}/502.html;
|
||||||
|
|
||||||
location ~ ^/b1/controller(/?)(.*) {
|
location ~ ^/b1/controller(/?)(.*) {
|
||||||
proxy_pass http://build_manager_controller_server/$2;
|
proxy_pass http://build_manager_controller_server/$2;
|
||||||
|
|
10
config.py
10
config.py
|
@ -3,6 +3,8 @@ from uuid import uuid4
|
||||||
import os.path
|
import os.path
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from _init import ROOT_DIR, CONF_DIR
|
||||||
|
|
||||||
|
|
||||||
def build_requests_session():
|
def build_requests_session():
|
||||||
sess = requests.Session()
|
sess = requests.Session()
|
||||||
|
@ -45,7 +47,7 @@ class ImmutableConfig(object):
|
||||||
# Status tag config
|
# Status tag config
|
||||||
STATUS_TAGS = {}
|
STATUS_TAGS = {}
|
||||||
for tag_name in ['building', 'failed', 'none', 'ready', 'cancelled']:
|
for tag_name in ['building', 'failed', 'none', 'ready', 'cancelled']:
|
||||||
tag_path = os.path.join('buildstatus', tag_name + '.svg')
|
tag_path = os.path.join(ROOT_DIR, 'buildstatus', tag_name + '.svg')
|
||||||
with open(tag_path) as tag_svg:
|
with open(tag_path) as tag_svg:
|
||||||
STATUS_TAGS[tag_name] = tag_svg.read()
|
STATUS_TAGS[tag_name] = tag_svg.read()
|
||||||
|
|
||||||
|
@ -303,7 +305,7 @@ class DefaultConfig(ImmutableConfig):
|
||||||
# System logs.
|
# System logs.
|
||||||
SYSTEM_LOGS_PATH = "/var/log/"
|
SYSTEM_LOGS_PATH = "/var/log/"
|
||||||
SYSTEM_LOGS_FILE = "/var/log/syslog"
|
SYSTEM_LOGS_FILE = "/var/log/syslog"
|
||||||
SYSTEM_SERVICES_PATH = "conf/init/service/"
|
SYSTEM_SERVICES_PATH = os.path.join(CONF_DIR, "init/service/")
|
||||||
|
|
||||||
# Allow registry pulls when unable to write to the audit log
|
# Allow registry pulls when unable to write to the audit log
|
||||||
ALLOW_PULLS_WITHOUT_STRICT_LOGGING = False
|
ALLOW_PULLS_WITHOUT_STRICT_LOGGING = False
|
||||||
|
@ -407,11 +409,11 @@ class DefaultConfig(ImmutableConfig):
|
||||||
INSTANCE_SERVICE_KEY_SERVICE = 'quay'
|
INSTANCE_SERVICE_KEY_SERVICE = 'quay'
|
||||||
|
|
||||||
# The location of the key ID file generated for this instance.
|
# The location of the key ID file generated for this instance.
|
||||||
INSTANCE_SERVICE_KEY_KID_LOCATION = 'conf/quay.kid'
|
INSTANCE_SERVICE_KEY_KID_LOCATION = os.path.join(CONF_DIR, 'quay.kid')
|
||||||
|
|
||||||
# The location of the private key generated for this instance.
|
# The location of the private key generated for this instance.
|
||||||
# NOTE: If changed, jwtproxy_conf.yaml.jnj must also be updated.
|
# NOTE: If changed, jwtproxy_conf.yaml.jnj must also be updated.
|
||||||
INSTANCE_SERVICE_KEY_LOCATION = 'conf/quay.pem'
|
INSTANCE_SERVICE_KEY_LOCATION = os.path.join(CONF_DIR, 'quay.pem')
|
||||||
|
|
||||||
# This instance's service key expiration in minutes.
|
# This instance's service key expiration in minutes.
|
||||||
INSTANCE_SERVICE_KEY_EXPIRATION = 120
|
INSTANCE_SERVICE_KEY_EXPIRATION = 120
|
||||||
|
|
|
@ -32,6 +32,7 @@ from util.useremails import send_confirmation_email, send_recovery_email
|
||||||
from util.license import decode_license, LicenseDecodeError
|
from util.license import decode_license, LicenseDecodeError
|
||||||
from util.security.ssl import load_certificate, CertInvalidException
|
from util.security.ssl import load_certificate, CertInvalidException
|
||||||
from util.config.validator import EXTRA_CA_DIRECTORY
|
from util.config.validator import EXTRA_CA_DIRECTORY
|
||||||
|
from _init import ROOT_DIR
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -179,7 +180,7 @@ class ChangeLog(ApiResource):
|
||||||
def get(self):
|
def get(self):
|
||||||
""" Returns the change log for this installation. """
|
""" Returns the change log for this installation. """
|
||||||
if SuperUserPermission().can():
|
if SuperUserPermission().can():
|
||||||
with open('CHANGELOG.md', 'r') as f:
|
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md'), 'r') as f:
|
||||||
return {
|
return {
|
||||||
'log': f.read()
|
'log': f.read()
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ from flask import make_response, render_template, request, abort, session
|
||||||
from flask_login import login_user
|
from flask_login import login_user
|
||||||
from flask_principal import identity_changed
|
from flask_principal import identity_changed
|
||||||
|
|
||||||
import endpoints.decorated # Register the various exceptions via decorators.
|
import endpoints.decorated # Register the various exceptions via decorators.
|
||||||
import features
|
import features
|
||||||
|
|
||||||
from app import app, oauth_apps, oauth_login, LoginWrappedDBUser, user_analytics, license_validator
|
from app import app, oauth_apps, oauth_login, LoginWrappedDBUser, user_analytics, license_validator
|
||||||
|
@ -25,6 +25,7 @@ from util.names import parse_namespace_repository
|
||||||
from util.secscan import PRIORITY_LEVELS
|
from util.secscan import PRIORITY_LEVELS
|
||||||
from util.saas.useranalytics import build_error_callback
|
from util.saas.useranalytics import build_error_callback
|
||||||
from util.timedeltastring import convert_to_timedelta
|
from util.timedeltastring import convert_to_timedelta
|
||||||
|
from _init import STATIC_DIR, __version__
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -133,16 +134,9 @@ def list_files(path, extension):
|
||||||
# Remove the static/ prefix. It is added in the template.
|
# Remove the static/ prefix. It is added in the template.
|
||||||
return os.path.join(dp, f)[len('static/'):]
|
return os.path.join(dp, f)[len('static/'):]
|
||||||
|
|
||||||
filepath = 'static/' + path
|
filepath = os.path.join('static/', path)
|
||||||
return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
|
return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
|
||||||
|
|
||||||
@lru_cache(maxsize=1)
|
|
||||||
def _get_version_number():
|
|
||||||
try:
|
|
||||||
with open('CHANGELOG.md') as f:
|
|
||||||
return re.search('(v[0-9]+\.[0-9]+\.[0-9]+)', f.readline()).group(0)
|
|
||||||
except IOError:
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def render_page_template(name, route_data=None, **kwargs):
|
def render_page_template(name, route_data=None, **kwargs):
|
||||||
debugging = app.config.get('DEBUGGING', False)
|
debugging = app.config.get('DEBUGGING', False)
|
||||||
|
@ -188,7 +182,7 @@ def render_page_template(name, route_data=None, **kwargs):
|
||||||
|
|
||||||
version_number = ''
|
version_number = ''
|
||||||
if not features.BILLING:
|
if not features.BILLING:
|
||||||
version_number = 'Quay %s' % _get_version_number()
|
version_number = 'Quay %s' % __version__
|
||||||
|
|
||||||
resp = make_response(render_template(name,
|
resp = make_response(render_template(name,
|
||||||
route_data=route_data,
|
route_data=route_data,
|
||||||
|
@ -226,4 +220,3 @@ def render_page_template(name, route_data=None, **kwargs):
|
||||||
|
|
||||||
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -38,6 +39,8 @@ from util.saas.useranalytics import build_error_callback
|
||||||
from util.systemlogs import build_logs_archive
|
from util.systemlogs import build_logs_archive
|
||||||
from util.useremails import send_email_changed
|
from util.useremails import send_email_changed
|
||||||
from util.registry.gzipinputstream import GzipInputStream
|
from util.registry.gzipinputstream import GzipInputStream
|
||||||
|
from _init import ROOT_DIR
|
||||||
|
|
||||||
|
|
||||||
PGP_KEY_MIMETYPE = 'application/pgp-keys'
|
PGP_KEY_MIMETYPE = 'application/pgp-keys'
|
||||||
|
|
||||||
|
@ -287,7 +290,7 @@ def dbrevision_health():
|
||||||
db_revision = result[0]
|
db_revision = result[0]
|
||||||
|
|
||||||
# Find the local revision from the file system.
|
# Find the local revision from the file system.
|
||||||
with open('ALEMBIC_HEAD', 'r') as f:
|
with open(os.path.join(ROOT_DIR, 'ALEMBIC_HEAD'), 'r') as f:
|
||||||
local_revision = f.readline().split(' ')[0]
|
local_revision = f.readline().split(' ')[0]
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
|
|
|
@ -2,7 +2,8 @@ import urllib2
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
|
||||||
LOCAL_DIRECTORY = '/static/ldn/'
|
from _init import STATIC_FONTS_DIR, STATIC_LDN_DIR
|
||||||
|
LOCAL_PATH = '/static/ldn/'
|
||||||
|
|
||||||
EXTERNAL_JS = [
|
EXTERNAL_JS = [
|
||||||
'code.jquery.com/jquery.js',
|
'code.jquery.com/jquery.js',
|
||||||
|
@ -56,14 +57,14 @@ EXTERNAL_CSS_FONTS = [
|
||||||
|
|
||||||
def get_external_javascript(local=False):
|
def get_external_javascript(local=False):
|
||||||
if local:
|
if local:
|
||||||
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_JS]
|
return [LOCAL_PATH + format_local_name(src) for src in EXTERNAL_JS]
|
||||||
|
|
||||||
return ['//' + src for src in EXTERNAL_JS]
|
return ['//' + src for src in EXTERNAL_JS]
|
||||||
|
|
||||||
|
|
||||||
def get_external_css(local=False):
|
def get_external_css(local=False):
|
||||||
if local:
|
if local:
|
||||||
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_CSS]
|
return [LOCAL_PATH + format_local_name(src) for src in EXTERNAL_CSS]
|
||||||
|
|
||||||
return ['//' + src for src in EXTERNAL_CSS]
|
return ['//' + src for src in EXTERNAL_CSS]
|
||||||
|
|
||||||
|
@ -88,7 +89,7 @@ if __name__ == '__main__':
|
||||||
|
|
||||||
filename = format_local_name(url)
|
filename = format_local_name(url)
|
||||||
print 'Writing %s' % filename
|
print 'Writing %s' % filename
|
||||||
with open(LOCAL_DIRECTORY + filename, 'w') as f:
|
with open(STATIC_LDN_DIR + filename, 'w') as f:
|
||||||
f.write(contents)
|
f.write(contents)
|
||||||
|
|
||||||
for url in EXTERNAL_CSS_FONTS:
|
for url in EXTERNAL_CSS_FONTS:
|
||||||
|
@ -96,7 +97,7 @@ if __name__ == '__main__':
|
||||||
response = urllib2.urlopen('https://' + url)
|
response = urllib2.urlopen('https://' + url)
|
||||||
|
|
||||||
filename = os.path.basename(url).split('?')[0]
|
filename = os.path.basename(url).split('?')[0]
|
||||||
with open('static/ldn/' + filename, "wb") as local_file:
|
with open(STATIC_LDN_DIR + filename, "wb") as local_file:
|
||||||
local_file.write(response.read())
|
local_file.write(response.read())
|
||||||
|
|
||||||
for url in EXTERNAL_FONTS:
|
for url in EXTERNAL_FONTS:
|
||||||
|
@ -104,5 +105,5 @@ if __name__ == '__main__':
|
||||||
response = urllib2.urlopen('https://' + url)
|
response = urllib2.urlopen('https://' + url)
|
||||||
|
|
||||||
filename = os.path.basename(url).split('?')[0]
|
filename = os.path.basename(url).split('?')[0]
|
||||||
with open('static/fonts/' + filename, "wb") as local_file:
|
with open(STATIC_FONTS_DIR + filename, "wb") as local_file:
|
||||||
local_file.write(response.read())
|
local_file.write(response.read())
|
||||||
|
|
|
@ -4,6 +4,12 @@ FROM phusion/baseimage:0.9.19
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
ENV HOME /root
|
ENV HOME /root
|
||||||
|
ENV QUAYDIR /quay-registry
|
||||||
|
ENV QUAYCONF /quay-registry/conf
|
||||||
|
ENV QUAYPATH "."
|
||||||
|
|
||||||
|
RUN mkdir $QUAYDIR
|
||||||
|
WORKDIR $QUAYDIR
|
||||||
|
|
||||||
# This is so we don't break http golang/go#17066
|
# This is so we don't break http golang/go#17066
|
||||||
# When Ubuntu has nginx >= 1.11.0 we can switch back.
|
# When Ubuntu has nginx >= 1.11.0 we can switch back.
|
||||||
|
@ -50,13 +56,6 @@ RUN apt-get update && apt-get upgrade -y \
|
||||||
yarn=0.22.0-1 \
|
yarn=0.22.0-1 \
|
||||||
w3m # 26MAY2017
|
w3m # 26MAY2017
|
||||||
|
|
||||||
# Install python dependencies
|
|
||||||
COPY requirements.txt requirements-tests.txt ./
|
|
||||||
RUN virtualenv --distribute venv \
|
|
||||||
&& venv/bin/pip install -r requirements.txt \
|
|
||||||
&& venv/bin/pip install -r requirements-tests.txt \
|
|
||||||
&& venv/bin/pip freeze # 07SEP2016
|
|
||||||
|
|
||||||
# Install cfssl
|
# Install cfssl
|
||||||
RUN mkdir /gocode
|
RUN mkdir /gocode
|
||||||
ENV GOPATH /gocode
|
ENV GOPATH /gocode
|
||||||
|
@ -78,21 +77,28 @@ RUN curl -L -o /usr/local/bin/jwtproxy https://github.com/coreos/jwtproxy/releas
|
||||||
RUN curl -L -o /usr/local/bin/prometheus-aggregator https://github.com/coreos/prometheus-aggregator/releases/download/v0.0.1-alpha/prometheus-aggregator \
|
RUN curl -L -o /usr/local/bin/prometheus-aggregator https://github.com/coreos/prometheus-aggregator/releases/download/v0.0.1-alpha/prometheus-aggregator \
|
||||||
&& chmod +x /usr/local/bin/prometheus-aggregator
|
&& chmod +x /usr/local/bin/prometheus-aggregator
|
||||||
|
|
||||||
|
# Install python dependencies
|
||||||
|
COPY requirements.txt requirements-tests.txt ./
|
||||||
|
RUN virtualenv --distribute venv \
|
||||||
|
&& venv/bin/pip install -r requirements.txt \
|
||||||
|
&& venv/bin/pip install -r requirements-tests.txt \
|
||||||
|
&& venv/bin/pip freeze
|
||||||
|
|
||||||
|
|
||||||
# Install front-end dependencies
|
# Install front-end dependencies
|
||||||
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
||||||
COPY static/ package.json tsconfig.json webpack.config.js tslint.json yarn.lock ./
|
COPY static/ package.json tsconfig.json webpack.config.js tslint.json yarn.lock ./
|
||||||
RUN yarn install --ignore-engines
|
RUN yarn install --ignore-engines
|
||||||
|
|
||||||
|
|
||||||
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit static/fonts static/ldn /usr/local/nginx/logs/
|
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit $QUAYDIR/static/fonts $QUAYDIR/static/ldn /usr/local/nginx/logs/
|
||||||
|
|
||||||
|
COPY external_libraries.py _init.py ./
|
||||||
|
|
||||||
COPY external_libraries.py ./
|
|
||||||
RUN venv/bin/python -m external_libraries
|
RUN venv/bin/python -m external_libraries
|
||||||
|
|
||||||
ARG RUN_TESTS=false
|
|
||||||
ENV RUN_TESTS ${RUN_TESTS}
|
|
||||||
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache
|
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache
|
||||||
|
|
||||||
VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp", "/conf/etcd"]
|
VOLUME ["$QUAYCONF/stack", "/var/log", "/datastorage", "/tmp", "$QUAYCONF/etcd"]
|
||||||
|
|
||||||
EXPOSE 443 8443 80
|
EXPOSE 443 8443 80
|
||||||
|
|
|
@ -1,14 +1,15 @@
|
||||||
# vim:ft=dockerfile
|
# vim:ft=dockerfile
|
||||||
|
|
||||||
FROM quay.io/quay/quay-base:latest
|
FROM quay.io/quay/quay-base:absolute
|
||||||
|
|
||||||
|
WORKDIR $QUAYDIR
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Install python dependencies
|
# Install python dependencies
|
||||||
RUN virtualenv --distribute venv \
|
RUN virtualenv --distribute venv \
|
||||||
&& venv/bin/pip install -r requirements.txt \
|
&& venv/bin/pip install -r requirements.txt \
|
||||||
&& venv/bin/pip install -r requirements-tests.txt \
|
&& venv/bin/pip install -r requirements-tests.txt \
|
||||||
&& venv/bin/pip freeze # 07SEP2016
|
&& venv/bin/pip freeze
|
||||||
|
|
||||||
# Check python dependencies for the GPL
|
# Check python dependencies for the GPL
|
||||||
# Due to the following bug, pip results must be piped to a file before grepping:
|
# Due to the following bug, pip results must be piped to a file before grepping:
|
||||||
|
@ -25,27 +26,16 @@ RUN yarn install --ignore-engines \
|
||||||
|
|
||||||
# Set up the init system
|
# Set up the init system
|
||||||
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit static/fonts static/ldn /usr/local/nginx/logs/ \
|
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit static/fonts static/ldn /usr/local/nginx/logs/ \
|
||||||
&& cp conf/init/*.sh /etc/my_init.d/ \
|
&& cp $QUAYCONF/init/*.sh /etc/my_init.d/ \
|
||||||
&& cp conf/init/syslog-ng.conf /etc/syslog-ng/ \
|
&& cp $QUAYCONF/init/syslog-ng.conf /etc/syslog-ng/ \
|
||||||
&& cp -r conf/init/service/* /etc/service \
|
&& cp -r $QUAYCONF/init/service/* /etc/service \
|
||||||
&& cp conf/kill-buildmanager.sh /usr/local/bin/kill-buildmanager.sh \
|
&& cp $QUAYCONF/kill-buildmanager.sh /usr/local/bin/kill-buildmanager.sh \
|
||||||
&& cp conf/monitrc /etc/monit/monitrc \
|
&& cp $QUAYCONF/monitrc /etc/monit/monitrc \
|
||||||
&& chmod 0600 /etc/monit/monitrc \
|
&& chmod 0600 /etc/monit/monitrc \
|
||||||
&& cp conf/init/logrotate.conf /etc/logrotate.conf \
|
&& cp $QUAYCONF/init/logrotate.conf /etc/logrotate.conf \
|
||||||
&& cp .git/HEAD GIT_HEAD \
|
&& cp .git/HEAD GIT_HEAD \
|
||||||
&& rm -rf /etc/service/syslog-forwarder
|
&& rm -rf /etc/service/syslog-forwarder
|
||||||
|
|
||||||
# Run the tests
|
|
||||||
RUN if [ "$RUN_TESTS" = true ]; then \
|
|
||||||
TEST=true PYTHONPATH="." venv/bin/py.test --timeout=7200 --verbose \
|
|
||||||
--show-count -x --color=no ./ && rm -rf /var/tmp/; \
|
|
||||||
|
|
||||||
TEST=true PYTHONPATH="." venv/bin/py.test --timeout=7200 --verbose \
|
|
||||||
--show-count -x --color=no test/registry_tests.py && rm -rf /var/tmp/;\
|
|
||||||
yarn test; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
|
|
||||||
# Cleanup any NPM-related stuff.
|
# Cleanup any NPM-related stuff.
|
||||||
# RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev libgpgme11-dev nodejs jpegoptim optipng w3m \
|
# RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev libgpgme11-dev nodejs jpegoptim optipng w3m \
|
||||||
# && apt-get autoremove -y \
|
# && apt-get autoremove -y \
|
||||||
|
@ -53,4 +43,4 @@ RUN if [ "$RUN_TESTS" = true ]; then \
|
||||||
# && rm -rf /root/.npm /.npm /usr/local/lib/node_modules /usr/share/yarn/node_modules \
|
# && rm -rf /root/.npm /.npm /usr/local/lib/node_modules /usr/share/yarn/node_modules \
|
||||||
# /root/node_modules /node_modules /grunt
|
# /root/node_modules /node_modules /grunt
|
||||||
|
|
||||||
RUN PYTHONPATH=. venv/bin/alembic heads | grep -E '^[0-9a-f]+ \(head\)$' > ALEMBIC_HEAD
|
RUN PYTHONPATH=$QUAYPATH venv/bin/alembic heads | grep -E '^[0-9a-f]+ \(head\)$' > ALEMBIC_HEAD
|
||||||
|
|
|
@ -1,7 +1,3 @@
|
||||||
import logging
|
|
||||||
import logging.config
|
|
||||||
import os
|
|
||||||
|
|
||||||
import endpoints.decorated # Note: We need to import this module to make sure the decorators are registered.
|
import endpoints.decorated # Note: We need to import this module to make sure the decorators are registered.
|
||||||
import features
|
import features
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,4 @@
|
||||||
import os
|
|
||||||
import logging.config
|
|
||||||
|
|
||||||
from app import app as application
|
from app import app as application
|
||||||
|
|
||||||
from endpoints.secscan import secscan
|
from endpoints.secscan import secscan
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,3 +10,11 @@ branch = True
|
||||||
[coverage:report]
|
[coverage:report]
|
||||||
omit =
|
omit =
|
||||||
test/*
|
test/*
|
||||||
|
|
||||||
|
[pep8]
|
||||||
|
ignore = E111,E114
|
||||||
|
max-line-length = 100
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
ignore = E111,E114
|
||||||
|
max-line-length = 100
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import ldap
|
import ldap
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
|
@ -5,6 +6,7 @@ from app import app, config_provider
|
||||||
from data.users import LDAP_CERT_FILENAME
|
from data.users import LDAP_CERT_FILENAME
|
||||||
from data.users.externalldap import LDAPConnection, LDAPUsers
|
from data.users.externalldap import LDAPConnection, LDAPUsers
|
||||||
from util.config.validators import BaseValidator, ConfigValidationException
|
from util.config.validators import BaseValidator, ConfigValidationException
|
||||||
|
from _init import CONF_DIR
|
||||||
|
|
||||||
class LDAPValidator(BaseValidator):
|
class LDAPValidator(BaseValidator):
|
||||||
name = "ldap"
|
name = "ldap"
|
||||||
|
@ -17,7 +19,7 @@ class LDAPValidator(BaseValidator):
|
||||||
|
|
||||||
# If there is a custom LDAP certificate, then reinstall the certificates for the container.
|
# If there is a custom LDAP certificate, then reinstall the certificates for the container.
|
||||||
if config_provider.volume_file_exists(LDAP_CERT_FILENAME):
|
if config_provider.volume_file_exists(LDAP_CERT_FILENAME):
|
||||||
subprocess.check_call(['/conf/init/certs_install.sh'])
|
subprocess.check_call([os.path.join(CONF_DIR, 'init/certs_install.sh')])
|
||||||
|
|
||||||
# Note: raises ldap.INVALID_CREDENTIALS on failure
|
# Note: raises ldap.INVALID_CREDENTIALS on failure
|
||||||
admin_dn = config.get('LDAP_ADMIN_DN')
|
admin_dn = config.get('LDAP_ADMIN_DN')
|
||||||
|
|
|
@ -3,6 +3,7 @@ from data import model
|
||||||
from util.names import parse_robot_username
|
from util.names import parse_robot_username
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
|
||||||
|
|
||||||
def icon_path(icon_name):
|
def icon_path(icon_name):
|
||||||
return '%s/static/img/icons/%s.png' % (get_app_url(), icon_name)
|
return '%s/static/img/icons/%s.png' % (get_app_url(), icon_name)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
from _init import CONF_DIR
|
||||||
|
|
||||||
|
|
||||||
def logfile_path(jsonfmt=False, debug=False):
|
def logfile_path(jsonfmt=False, debug=False):
|
||||||
|
@ -19,7 +20,7 @@ def logfile_path(jsonfmt=False, debug=False):
|
||||||
if debug or os.getenv('DEBUGLOG', 'false').lower() == 'true':
|
if debug or os.getenv('DEBUGLOG', 'false').lower() == 'true':
|
||||||
_debug = "_debug"
|
_debug = "_debug"
|
||||||
|
|
||||||
return 'conf/logging%s%s.conf' % (_debug, _json)
|
return os.path.join(CONF_DIR, "logging%s%s.conf" % (_debug, _json))
|
||||||
|
|
||||||
|
|
||||||
def filter_logs(values, filtered_fields):
|
def filter_logs(values, filtered_fields):
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
@ -18,12 +19,13 @@ from util.secscan.validator import SecurityConfigValidator
|
||||||
from util.security.instancekeys import InstanceKeys
|
from util.security.instancekeys import InstanceKeys
|
||||||
from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
|
from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
|
||||||
|
|
||||||
|
from _init import CONF_DIR
|
||||||
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the security scanner has to call the layer URL
|
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the security scanner has to call the layer URL
|
||||||
|
|
||||||
UNKNOWN_PARENT_LAYER_ERROR_MSG = 'worker: parent layer is unknown, it must be processed first'
|
UNKNOWN_PARENT_LAYER_ERROR_MSG = 'worker: parent layer is unknown, it must be processed first'
|
||||||
|
|
||||||
MITM_CERT_PATH = '/conf/mitm.cert'
|
MITM_CERT_PATH = os.path.join(CONF_DIR, '/mitm.cert')
|
||||||
|
|
||||||
DEFAULT_HTTP_HEADERS = {'Connection': 'close'}
|
DEFAULT_HTTP_HEADERS = {'Connection': 'close'}
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
import pytest
|
import pytest
|
||||||
|
import os
|
||||||
from util.log import logfile_path, filter_logs
|
from util.log import logfile_path, filter_logs
|
||||||
from app import FILTERED_VALUES
|
from app import FILTERED_VALUES
|
||||||
|
from _init import CONF_DIR
|
||||||
|
|
||||||
def test_filter_logs():
|
def test_filter_logs():
|
||||||
values = {
|
values = {
|
||||||
|
@ -16,20 +17,20 @@ def test_filter_logs():
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('debug,jsonfmt,expected', [
|
@pytest.mark.parametrize('debug,jsonfmt,expected', [
|
||||||
(False, False, "conf/logging.conf"),
|
(False, False, os.path.join(CONF_DIR, "logging.conf")),
|
||||||
(False, True, "conf/logging_json.conf"),
|
(False, True, os.path.join(CONF_DIR, "logging_json.conf")),
|
||||||
(True, False, "conf/logging_debug.conf"),
|
(True, False, os.path.join(CONF_DIR, "logging_debug.conf")),
|
||||||
(True, True, "conf/logging_debug_json.conf"),
|
(True, True, os.path.join(CONF_DIR, "logging_debug_json.conf"))
|
||||||
])
|
])
|
||||||
def test_logfile_path(debug, jsonfmt, expected, monkeypatch):
|
def test_logfile_path(debug, jsonfmt, expected, monkeypatch):
|
||||||
assert logfile_path(jsonfmt=jsonfmt, debug=debug) == expected
|
assert logfile_path(jsonfmt=jsonfmt, debug=debug) == expected
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('debug,jsonfmt,expected', [
|
@pytest.mark.parametrize('debug,jsonfmt,expected', [
|
||||||
("false", "false", "conf/logging.conf"),
|
("false", "false", os.path.join(CONF_DIR, "logging.conf")),
|
||||||
("false", "true", "conf/logging_json.conf"),
|
("false", "true", os.path.join(CONF_DIR, "logging_json.conf")),
|
||||||
("true", "false", "conf/logging_debug.conf"),
|
("true", "false", os.path.join(CONF_DIR, "logging_debug.conf")),
|
||||||
("true", "true", "conf/logging_debug_json.conf"),
|
("true", "true", os.path.join(CONF_DIR, "logging_debug_json.conf"))
|
||||||
])
|
])
|
||||||
def test_logfile_path_env(debug, jsonfmt, expected, monkeypatch):
|
def test_logfile_path_env(debug, jsonfmt, expected, monkeypatch):
|
||||||
monkeypatch.setenv("DEBUGLOG", debug)
|
monkeypatch.setenv("DEBUGLOG", debug)
|
||||||
|
@ -38,4 +39,4 @@ def test_logfile_path_env(debug, jsonfmt, expected, monkeypatch):
|
||||||
|
|
||||||
|
|
||||||
def test_logfile_path_default():
|
def test_logfile_path_default():
|
||||||
assert logfile_path() == "conf/logging.conf"
|
assert logfile_path() == os.path.join(CONF_DIR, "logging.conf")
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import os
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -5,12 +6,14 @@ from flask_mail import Message
|
||||||
|
|
||||||
import features
|
import features
|
||||||
|
|
||||||
|
from _init import ROOT_DIR
|
||||||
from app import mail, app, get_app_url
|
from app import mail, app, get_app_url
|
||||||
from util.jinjautil import get_template_env
|
from util.jinjautil import get_template_env
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
template_env = get_template_env("emails")
|
|
||||||
|
template_env = get_template_env(os.path.join(ROOT_DIR, "emails"))
|
||||||
|
|
||||||
|
|
||||||
class CannotSendEmailException(Exception):
|
class CannotSendEmailException(Exception):
|
||||||
|
|
5
verbs.py
5
verbs.py
|
@ -1,9 +1,4 @@
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
import logging.config
|
|
||||||
|
|
||||||
from app import app as application
|
from app import app as application
|
||||||
|
|
||||||
from endpoints.verbs import verbs
|
from endpoints.verbs import verbs
|
||||||
|
|
||||||
|
|
||||||
|
|
3
web.py
3
web.py
|
@ -1,6 +1,3 @@
|
||||||
import os
|
|
||||||
import logging.config
|
|
||||||
|
|
||||||
from app import app as application
|
from app import app as application
|
||||||
from endpoints.api import api_bp
|
from endpoints.api import api_bp
|
||||||
from endpoints.bitbuckettrigger import bitbuckettrigger
|
from endpoints.bitbuckettrigger import bitbuckettrigger
|
||||||
|
|
Reference in a new issue