Use $QUAYPATH and $QUAYDIR in conf and init files

This commit is contained in:
Antoine Legrand 2017-02-02 00:17:25 +01:00 committed by Antoine Legrand
parent 334a08d90b
commit cdb3722c17
59 changed files with 341 additions and 225 deletions

View file

@ -64,6 +64,7 @@ karma-tests:
variables:
GIT_STRATEGY: none
PYTHONPATH: .
QUAYDIR: /quay-registry
TEST: 'true'
mysql:
before_script:
@ -86,6 +87,7 @@ mysql:
MYSQL_ROOT_PASSWORD: quay
MYSQL_USER: quay
PYTHONPATH: .
QUAYDIR: /quay-registry
SKIP_DB_SCHEMA: 'true'
TEST: 'true'
TEST_DATABASE_URI: mysql+pymysql://quay:quay@localhost/quay
@ -108,6 +110,7 @@ postgres:
POSTGRES_PASSWORD: quay
POSTGRES_USER: quay
PYTHONPATH: .
QUAYDIR: /quay-registry
SKIP_DB_SCHEMA: 'true'
TEST: 'true'
TEST_DATABASE_URI: postgresql://quay:quay@localhost/quay
@ -124,6 +127,7 @@ registry-tests:
variables:
GIT_STRATEGY: none
PYTHONPATH: .
QUAYDIR: /quay-registry
TEST: 'true'
stages:
- docker_base
@ -145,6 +149,7 @@ unit-tests:
variables:
GIT_STRATEGY: none
PYTHONPATH: .
QUAYDIR: /quay-registry
TEST: 'true'
variables:
FAILFASTCI_NAMESPACE: quay

View file

@ -23,6 +23,7 @@ function(vars={})
variables: {
TEST: "true",
PYTHONPATH: ".",
QUAYDIR: "/quay-registry",
GIT_STRATEGY: "none",
},
before_script: [

View file

@ -4,6 +4,12 @@ FROM phusion/baseimage:0.9.19
ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root
ENV QUAYCONF /quay/conf
ENV QUAYDIR /quay
ENV QUAYPATH "."
RUN mkdir $QUAYDIR
WORKDIR $QUAYDIR
# This is so we don't break http golang/go#17066
# When Ubuntu has nginx >= 1.11.0 we can switch back.
@ -51,6 +57,7 @@ RUN apt-get install -y \
w3m
# Install python dependencies
ADD requirements.txt requirements.txt
RUN virtualenv --distribute venv
RUN venv/bin/pip install -r requirements.txt # 07SEP2016
@ -127,27 +134,28 @@ RUN chmod 0600 /etc/monit/monitrc
# remove after phusion/baseimage-docker#338 is fixed
ADD conf/init/logrotate.conf /etc/logrotate.conf
# Download any external libs.
RUN mkdir static/fonts static/ldn
ADD external_libraries.py external_libraries.py
RUN venv/bin/python -m external_libraries
RUN mkdir -p /usr/local/nginx/logs/
# TODO(ssewell): only works on a detached head, make work with ref
ADD .git/HEAD GIT_HEAD
# Add all of the files!
ADD . .
RUN mkdir static/fonts static/ldn
# Download any external libs.
RUN venv/bin/python -m external_libraries
RUN mkdir -p /usr/local/nginx/logs/
RUN pyclean .
# Cleanup any NPM-related stuff.
RUN rm -rf /root/.npm
RUN rm -rf /.npm
RUN rm -rf .npm
RUN rm -rf /usr/local/lib/node_modules
RUN rm -rf /usr/share/yarn/node_modules
RUN rm -rf /root/node_modules
RUN rm -rf /node_modules
RUN rm -rf /grunt
RUN rm -rf node_modules
RUN rm -rf grunt
RUN rm package.json yarn.lock
# Run the tests

35
_init.py Normal file
View file

@ -0,0 +1,35 @@
import os
import re
import subprocess
ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
CONF_DIR = os.getenv("QUAYCONF", os.path.join(ROOT_DIR, "conf/"))
STATIC_DIR = os.path.join(ROOT_DIR, 'static/')
STATIC_LDN_DIR = os.path.join(STATIC_DIR, 'ldn/')
STATIC_FONTS_DIR = os.path.join(STATIC_DIR, 'fonts/')
TEMPLATE_DIR = os.path.join(ROOT_DIR, 'templates/')
def _get_version_number_changelog():
try:
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md')) as f:
return re.search(r'(v[0-9]+\.[0-9]+\.[0-9]+)', f.readline()).group(0)
except IOError:
return ''
def _get_git_sha():
if os.path.exists("GIT_HEAD"):
with open(os.path.join(ROOT_DIR, "GIT_HEAD")) as f:
return f.read()
else:
try:
return subprocess.check_output(["git", "rev-parse", "HEAD"]).strip()[0:8]
except (OSError, subprocess.CalledProcessError):
pass
return "unknown"
__version__ = _get_version_number_changelog()
__gitrev__ = _get_git_sha()

7
app.py
View file

@ -14,6 +14,7 @@ from jwkest.jwk import RSAKey
from werkzeug.routing import BaseConverter
import features
from _init import CONF_DIR
from auth.auth_context import get_authenticated_user
from avatars.avatars import Avatar
from buildman.manager.buildcanceller import BuildCanceller
@ -52,9 +53,9 @@ from util.security.instancekeys import InstanceKeys
from util.security.signing import Signer
OVERRIDE_CONFIG_DIRECTORY = 'conf/stack/'
OVERRIDE_CONFIG_YAML_FILENAME = 'conf/stack/config.yaml'
OVERRIDE_CONFIG_PY_FILENAME = 'conf/stack/config.py'
OVERRIDE_CONFIG_DIRECTORY = os.path.join(CONF_DIR, 'stack/')
OVERRIDE_CONFIG_YAML_FILENAME = os.path.join(CONF_DIR, 'stack/config.yaml')
OVERRIDE_CONFIG_PY_FILENAME = os.path.join(CONF_DIR, 'stack/config.py')
OVERRIDE_CONFIG_KEY = 'QUAY_OVERRIDE_CONFIG'

8
boot.py Normal file → Executable file
View file

@ -13,6 +13,7 @@ from app import app
from data.model.release import set_region_release
from util.config.database import sync_database_with_config
from util.generatepresharedkey import generate_key
from _init import CONF_DIR
@lru_cache(maxsize=1)
@ -42,7 +43,7 @@ def setup_jwt_proxy():
"""
Creates a service key for quay to use in the jwtproxy and generates the JWT proxy configuration.
"""
if os.path.exists('conf/jwtproxy_conf.yaml'):
if os.path.exists(os.path.join(CONF_DIR, 'jwtproxy_conf.yaml')):
# Proxy is already setup.
return
@ -65,16 +66,17 @@ def setup_jwt_proxy():
registry = audience + '/keys'
security_issuer = app.config.get('SECURITY_SCANNER_ISSUER_NAME', 'security_scanner')
with open("conf/jwtproxy_conf.yaml.jnj") as f:
with open(os.path.join(CONF_DIR, 'jwtproxy_conf.yaml.jnj')) as f:
template = Template(f.read())
rendered = template.render(
conf_dir=CONF_DIR,
audience=audience,
registry=registry,
key_id=quay_key_id,
security_issuer=security_issuer,
)
with open('conf/jwtproxy_conf.yaml', 'w') as f:
with open(os.path.join(CONF_DIR, 'jwtproxy_conf.yaml'), 'w') as f:
f.write(rendered)

View file

@ -19,7 +19,7 @@ from buildman.asyncutil import AsyncWrapper
from container_cloud_config import CloudConfigContext
from app import metric_queue, app
from util.metrics.metricqueue import duration_collector_async
from _init import ROOT_DIR
logger = logging.getLogger(__name__)
@ -29,7 +29,7 @@ ONE_HOUR = 60*60
_TAG_RETRY_COUNT = 3 # Number of times to retry adding tags.
_TAG_RETRY_SLEEP = 2 # Number of seconds to wait between tag retries.
ENV = Environment(loader=FileSystemLoader('buildman/templates'))
ENV = Environment(loader=FileSystemLoader(os.path.join(ROOT_DIR, "buildman/templates")))
TEMPLATE = ENV.get_template('cloudconfig.yaml')
CloudConfigContext().populate_jinja_environment(ENV)

View file

@ -13,6 +13,7 @@ worker_class = 'gevent'
pythonpath = '.'
preload_app = True
def post_fork(server, worker):
# Reset the Random library to ensure it won't raise the "PID check failed." error after
# gunicorn forks.

View file

@ -13,6 +13,7 @@ worker_class = 'gevent'
pythonpath = '.'
preload_app = True
def post_fork(server, worker):
# Reset the Random library to ensure it won't raise the "PID check failed." error after
# gunicorn forks.

View file

@ -14,6 +14,7 @@ pythonpath = '.'
preload_app = True
timeout = 2000 # Because sync workers
def post_fork(server, worker):
# Reset the Random library to ensure it won't raise the "PID check failed." error after
# gunicorn forks.

View file

@ -1,8 +1,10 @@
#! /bin/bash
set -e
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
cd ${QUAYDIR:-"/"}
# Create certs for jwtproxy to mitm outgoing TLS connections
echo '{"CN":"CA","key":{"algo":"rsa","size":2048}}' | cfssl gencert -initca - | cfssljson -bare mitm
cp mitm-key.pem /conf/mitm.key
cp mitm.pem /conf/mitm.cert
cp mitm-key.pem $QUAYCONF/mitm.key
cp mitm.pem $QUAYCONF/mitm.cert
cp mitm.pem /usr/local/share/ca-certificates/mitm.crt

View file

@ -1,34 +1,38 @@
#! /bin/bash
set -e
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
cd ${QUAYDIR:-"/"}
# Add the custom LDAP certificate
if [ -e /conf/stack/ldap.crt ]
if [ -e $QUAYCONF/stack/ldap.crt ]
then
cp /conf/stack/ldap.crt /usr/local/share/ca-certificates/ldap.crt
cp $QUAYCONF/stack/ldap.crt /usr/local/share/ca-certificates/ldap.crt
fi
# Add extra trusted certificates (as a directory)
if [ -d /conf/stack/extra_ca_certs ]; then
if test "$(ls -A "/conf/stack/extra_ca_certs")"; then
echo "Installing extra certificates found in /conf/stack/extra_ca_certs directory"
cp /conf/stack/extra_ca_certs/* /usr/local/share/ca-certificates/
cat /conf/stack/extra_ca_certs/* >> /venv/lib/python2.7/site-packages/requests/cacert.pem
if [ -d $QUAYCONF/stack/extra_ca_certs ]; then
if test "$(ls -A "$QUAYCONF/stack/extra_ca_certs")"; then
echo "Installing extra certificates found in $QUAYCONF/stack/extra_ca_certs directory"
cp $QUAYCONF/stack/extra_ca_certs/* /usr/local/share/ca-certificates/
cat $QUAYCONF/stack/extra_ca_certs/* >> venv/lib/python2.7/site-packages/requests/cacert.pem
fi
fi
# Add extra trusted certificates (as a file)
if [ -f /conf/stack/extra_ca_certs ]; then
echo "Installing extra certificates found in /conf/stack/extra_ca_certs file"
csplit -z -f /usr/local/share/ca-certificates/extra-ca- /conf/stack/extra_ca_certs '/-----BEGIN CERTIFICATE-----/' '{*}'
cat /conf/stack/extra_ca_certs >> /venv/lib/python2.7/site-packages/requests/cacert.pem
if [ -f $QUAYCONF/stack/extra_ca_certs ]; then
echo "Installing extra certificates found in $QUAYCONF/stack/extra_ca_certs file"
csplit -z -f /usr/local/share/ca-certificates/extra-ca- $QUAYCONF/stack/extra_ca_certs '/-----BEGIN CERTIFICATE-----/' '{*}'
cat $QUAYCONF/stack/extra_ca_certs >> venv/lib/python2.7/site-packages/requests/cacert.pem
fi
# Add extra trusted certificates (prefixed)
for f in $(find /conf/stack/ -maxdepth 1 -type f -name "extra_ca*")
for f in $(find $QUAYCONF/stack/ -maxdepth 1 -type f -name "extra_ca*")
do
echo "Installing extra cert $f"
cp "$f" /usr/local/share/ca-certificates/
cat "$f" >> /venv/lib/python2.7/site-packages/requests/cacert.pem
cat "$f" >> venv/lib/python2.7/site-packages/requests/cacert.pem
done
# Update all CA certificates.

View file

@ -1,11 +1,15 @@
#! /bin/sh
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
if [ -e /conf/stack/robots.txt ]
cd ${QUAYDIR:-"/"}
if [ -e $QUAYCONF/stack/robots.txt ]
then
cp /conf/stack/robots.txt /templates/robots.txt
cp $QUAYCONF/stack/robots.txt $QUAYPATH/templates/robots.txt
fi
if [ -e /conf/stack/favicon.ico ]
if [ -e $QUAYCONF/stack/favicon.ico ]
then
cp /conf/stack/favicon.ico /static/favicon.ico
cp $QUAYCONF/stack/favicon.ico $QUAYPATH/static/favicon.ico
fi

View file

@ -1,6 +1,10 @@
#! /bin/sh
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
if [ -e /conf/stack/syslog-ng-extra.conf ]
cd ${QUAYDIR:-"/"}
if [ -e $QUAYCONF/stack/syslog-ng-extra.conf ]
then
cp /conf/stack/syslog-ng-extra.conf /etc/syslog-ng/conf.d/
cp $QUAYCONF/stack/syslog-ng-extra.conf /etc/syslog-ng/conf.d/
fi

View file

@ -0,0 +1,51 @@
import os
import os.path
import yaml
import jinja2
QUAYPATH = os.getenv("QUAYPATH", ".")
QUAYDIR = os.getenv("QUAYDIR", "/")
QUAYCONF_DIR = os.getenv("QUAYCONF", os.path.join(QUAYDIR, QUAYPATH, "conf"))
STATIC_DIR = os.path.join(QUAYDIR, 'static/')
def write_config(filename, **kwargs):
with open(filename + ".jnj") as f:
template = jinja2.Template(f.read())
rendered = template.render(kwargs)
with open(filename, 'w') as f:
f.write(rendered)
def generate_nginx_config():
"""
Generates nginx config from the app config
"""
use_https = os.path.exists(os.path.join(QUAYCONF_DIR, 'stack/ssl.key'))
write_config(os.path.join(QUAYCONF_DIR, 'nginx/nginx.conf'), use_https=use_https)
def generate_server_config(config):
"""
Generates server config from the app config
"""
config = config or {}
tuf_server = config.get('TUF_SERVER', None)
tuf_host = config.get('TUF_HOST', None)
signing_enabled = config.get('FEATURE_SIGNING', False)
maximum_layer_size = config.get('MAXIMUM_LAYER_SIZE', '20G')
write_config(
os.path.join(QUAYCONF_DIR, 'nginx/server-base.conf'), tuf_server=tuf_server, tuf_host=tuf_host,
signing_enabled=signing_enabled, maximum_layer_size=maximum_layer_size, static_dir=STATIC_DIR)
if __name__ == "__main__":
if os.path.exists(os.path.join(QUAYCONF_DIR, 'stack/config.yaml')):
with open(os.path.join(QUAYCONF_DIR, 'stack/config.yaml'), 'r') as f:
config = yaml.load(f)
else:
config = None
generate_server_config(config)
generate_nginx_config()

View file

@ -1,51 +1,8 @@
#!/venv/bin/python
#!/bin/bash
import os.path
QUAYDIR=${QUAYDIR:-"/"}
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
import yaml
import jinja2
def write_config(filename, **kwargs):
with open(filename + ".jnj") as f:
template = jinja2.Template(f.read())
rendered = template.render(kwargs)
with open(filename, 'w') as f:
f.write(rendered)
def generate_nginx_config():
"""
Generates nginx config from the app config
"""
use_https = os.path.exists('conf/stack/ssl.key')
write_config('conf/nginx/nginx.conf',
use_https=use_https)
def generate_server_config(config):
"""
Generates server config from the app config
"""
config = config or {}
tuf_server = config.get('TUF_SERVER', None)
tuf_host = config.get('TUF_HOST', None)
signing_enabled = config.get('FEATURE_SIGNING', False)
maximum_layer_size = config.get('MAXIMUM_LAYER_SIZE', '20G')
write_config('conf/nginx/server-base.conf',
tuf_server=tuf_server,
tuf_host=tuf_host,
signing_enabled=signing_enabled,
maximum_layer_size=maximum_layer_size)
if __name__ == "__main__":
if os.path.exists('conf/stack/config.yaml'):
with open('conf/stack/config.yaml', 'r') as f:
config = yaml.load(f)
else:
config = None
generate_server_config(config)
generate_nginx_config()
cd $QUAYDIR
venv/bin/python $QUAYCONF/init/nginx_conf_create.py

View file

@ -1,5 +1,6 @@
#! /bin/bash
#!/bin/bash
set -e
cd ${QUAYDIR:-"/"}
# Run the database migration
PYTHONPATH=. venv/bin/alembic upgrade head
PYTHONPATH=${QUAYPATH:-"."} venv/bin/alembic upgrade head

View file

@ -2,7 +2,9 @@
echo 'Starting Blob upload cleanup worker'
cd /
venv/bin/python -m workers.blobuploadcleanupworker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.blobuploadcleanupworker 2>&1
echo 'Blob upload cleanup exited'

View file

@ -2,7 +2,8 @@
echo 'Starting build logs archiver worker'
cd /
venv/bin/python -m workers.buildlogsarchiver 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.buildlogsarchiver 2>&1
echo 'Diffs worker exited'

View file

@ -6,7 +6,9 @@ echo 'Starting internal build manager'
monit
# Run the build manager.
cd /
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
export PYTHONPATH=$QUAYPATH
exec venv/bin/python -m buildman.builder 2>&1
echo 'Internal build manager exited'

View file

@ -2,7 +2,8 @@
echo 'Starting chunk cleanup worker'
cd /
venv/bin/python -m workers.chunkcleanupworker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.chunkcleanupworker 2>&1
echo 'Chunk cleanup worker exited'

View file

@ -2,7 +2,8 @@
echo 'Starting GC worker'
cd /
venv/bin/python -m workers.gc.gcworker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.gc.gcworker 2>&1
echo 'Repository GC exited'
echo 'Repository GC exited'

View file

@ -2,7 +2,8 @@
echo 'Starting global prometheus stats worker'
cd /
venv/bin/python -m workers.globalpromstats
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.globalpromstats
echo 'Global prometheus stats exited'

View file

@ -2,7 +2,10 @@
echo 'Starting gunicon'
cd /
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH nice -n 10 venv/bin/gunicorn -c $QUAYCONF/gunicorn_registry.py registry:application
echo 'Gunicorn exited'

View file

@ -2,7 +2,10 @@
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_secscan.py secscan:application
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/gunicorn -c $QUAYCONF/gunicorn_secscan.py secscan:application
echo 'Gunicorn exited'

View file

@ -2,7 +2,10 @@
echo 'Starting gunicon'
cd /
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH nice -n 10 venv/bin/gunicorn -c $QUAYCONF/gunicorn_verbs.py verbs:application
echo 'Gunicorn exited'

View file

@ -2,7 +2,10 @@
echo 'Starting gunicon'
cd /
venv/bin/gunicorn -c conf/gunicorn_web.py web:application
QUAYPATH=${QUAYPATH:-"."}
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/gunicorn -c $QUAYCONF/gunicorn_web.py web:application
echo 'Gunicorn exited'

View file

@ -1,12 +1,16 @@
#! /bin/bash
cd /
if [ -f conf/jwtproxy_conf.yaml ];
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
if [ -f $QUAYCONF/jwtproxy_conf.yaml ];
then
echo 'Starting jwtproxy'
/usr/local/bin/jwtproxy --config conf/jwtproxy_conf.yaml
/usr/local/bin/jwtproxy --config $QUAYCONF/jwtproxy_conf.yaml
rm /tmp/jwtproxy_secscan.sock
echo 'Jwtproxy exited'
else
sleep 1
sleep 1
fi

View file

@ -2,7 +2,8 @@
echo 'Starting log rotation worker'
cd /
venv/bin/python -m workers.logrotateworker
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.logrotateworker
echo 'Log rotation worker exited'

View file

@ -2,6 +2,11 @@
echo 'Starting nginx'
/usr/sbin/nginx -c /conf/nginx/nginx.conf
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH
QUAYCONF=${QUAYCONF:-"$QUAYPATH/conf"}
/usr/sbin/nginx -c $QUAYCONF/nginx/nginx.conf
echo 'Nginx exited'

View file

@ -2,7 +2,9 @@
echo 'Starting notification worker'
cd /
venv/bin/python -m workers.notificationworker
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.notificationworker
echo 'Notification worker exited'

View file

@ -2,7 +2,8 @@
echo 'Starting Queue cleanup worker'
cd /
venv/bin/python -m workers.queuecleanupworker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.queuecleanupworker 2>&1
echo 'Repository Queue cleanup exited'

View file

@ -2,7 +2,8 @@
echo 'Starting repository action count worker'
cd /
venv/bin/python -m workers.repositoryactioncounter 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.repositoryactioncounter 2>&1
echo 'Repository action worker exited'

View file

@ -2,7 +2,8 @@
echo 'Starting security scanner notification worker'
cd /
venv/bin/python -m workers.security_notification_worker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.security_notification_worker 2>&1
echo 'Security scanner notification worker exited'

View file

@ -2,7 +2,8 @@
echo 'Starting security scanner worker'
cd /
venv/bin/python -m workers.securityworker.securityworker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.securityworker.securityworker 2>&1
echo 'Security scanner worker exited'

View file

@ -2,7 +2,8 @@
echo 'Starting service key worker'
cd /
venv/bin/python -m workers.service_key_worker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.service_key_worker 2>&1
echo 'Service key worker exited'

View file

@ -2,7 +2,8 @@
echo 'Starting storage replication worker'
cd /
venv/bin/python -m workers.storagereplication 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.storagereplication 2>&1
echo 'Repository storage replication exited'

View file

@ -2,7 +2,8 @@
echo 'Starting team synchronization worker'
cd /
venv/bin/python -m workers.teamsyncworker 2>&1
QUAYPATH=${QUAYPATH:-"."}
cd ${QUAYDIR:-"/"}
PYTHONPATH=$QUAYPATH venv/bin/python -m workers.teamsyncworker 2>&1
echo 'Team synchronization worker exited'

View file

@ -1,3 +1,4 @@
#!/bin/bash
cd ${QUAYDIR:-"/"}
/venv/bin/python /boot.py
venv/bin/python ${QUAYPATH:-"."}/boot.py

View file

@ -2,8 +2,8 @@ jwtproxy:
signer_proxy:
enabled: true
listen_addr: :8080
ca_key_file: /conf/mitm.key
ca_crt_file: /conf/mitm.cert
ca_key_file: {{ conf_dir }}/mitm.key
ca_crt_file: {{ conf_dir }}/mitm.cert
signer:
issuer: quay
@ -13,7 +13,7 @@ jwtproxy:
type: preshared
options:
key_id: {{ key_id }}
private_key_path: /conf/quay.pem
private_key_path: {{ conf_dir }}/quay.pem
verifier_proxies:
- enabled: true
listen_addr: unix:/tmp/jwtproxy_secscan.sock

View file

@ -166,11 +166,11 @@ location /c1/ {
location /static/ {
# checks for static file, if not found proxy to app
alias /static/;
alias {{static_dir}};
error_page 404 /404;
}
error_page 502 /static/502.html;
error_page 502 {{static_dir}}/502.html;
location ~ ^/b1/controller(/?)(.*) {
proxy_pass http://build_manager_controller_server/$2;

View file

@ -3,6 +3,8 @@ from uuid import uuid4
import os.path
import requests
from _init import ROOT_DIR, CONF_DIR
def build_requests_session():
sess = requests.Session()
@ -45,7 +47,7 @@ class ImmutableConfig(object):
# Status tag config
STATUS_TAGS = {}
for tag_name in ['building', 'failed', 'none', 'ready', 'cancelled']:
tag_path = os.path.join('buildstatus', tag_name + '.svg')
tag_path = os.path.join(ROOT_DIR, 'buildstatus', tag_name + '.svg')
with open(tag_path) as tag_svg:
STATUS_TAGS[tag_name] = tag_svg.read()
@ -303,7 +305,7 @@ class DefaultConfig(ImmutableConfig):
# System logs.
SYSTEM_LOGS_PATH = "/var/log/"
SYSTEM_LOGS_FILE = "/var/log/syslog"
SYSTEM_SERVICES_PATH = "conf/init/service/"
SYSTEM_SERVICES_PATH = os.path.join(CONF_DIR, "init/service/")
# Allow registry pulls when unable to write to the audit log
ALLOW_PULLS_WITHOUT_STRICT_LOGGING = False
@ -407,11 +409,11 @@ class DefaultConfig(ImmutableConfig):
INSTANCE_SERVICE_KEY_SERVICE = 'quay'
# The location of the key ID file generated for this instance.
INSTANCE_SERVICE_KEY_KID_LOCATION = 'conf/quay.kid'
INSTANCE_SERVICE_KEY_KID_LOCATION = os.path.join(CONF_DIR, 'quay.kid')
# The location of the private key generated for this instance.
# NOTE: If changed, jwtproxy_conf.yaml.jnj must also be updated.
INSTANCE_SERVICE_KEY_LOCATION = 'conf/quay.pem'
INSTANCE_SERVICE_KEY_LOCATION = os.path.join(CONF_DIR, 'quay.pem')
# This instance's service key expiration in minutes.
INSTANCE_SERVICE_KEY_EXPIRATION = 120

View file

@ -32,6 +32,7 @@ from util.useremails import send_confirmation_email, send_recovery_email
from util.license import decode_license, LicenseDecodeError
from util.security.ssl import load_certificate, CertInvalidException
from util.config.validator import EXTRA_CA_DIRECTORY
from _init import ROOT_DIR
logger = logging.getLogger(__name__)
@ -179,7 +180,7 @@ class ChangeLog(ApiResource):
def get(self):
""" Returns the change log for this installation. """
if SuperUserPermission().can():
with open('CHANGELOG.md', 'r') as f:
with open(os.path.join(ROOT_DIR, 'CHANGELOG.md'), 'r') as f:
return {
'log': f.read()
}

View file

@ -13,7 +13,7 @@ from flask import make_response, render_template, request, abort, session
from flask_login import login_user
from flask_principal import identity_changed
import endpoints.decorated # Register the various exceptions via decorators.
import endpoints.decorated # Register the various exceptions via decorators.
import features
from app import app, oauth_apps, oauth_login, LoginWrappedDBUser, user_analytics, license_validator
@ -25,6 +25,7 @@ from util.names import parse_namespace_repository
from util.secscan import PRIORITY_LEVELS
from util.saas.useranalytics import build_error_callback
from util.timedeltastring import convert_to_timedelta
from _init import STATIC_DIR, __version__
logger = logging.getLogger(__name__)
@ -133,16 +134,9 @@ def list_files(path, extension):
# Remove the static/ prefix. It is added in the template.
return os.path.join(dp, f)[len('static/'):]
filepath = 'static/' + path
filepath = os.path.join('static/', path)
return [join_path(dp, f) for dp, dn, files in os.walk(filepath) for f in files if matches(f)]
@lru_cache(maxsize=1)
def _get_version_number():
try:
with open('CHANGELOG.md') as f:
return re.search('(v[0-9]+\.[0-9]+\.[0-9]+)', f.readline()).group(0)
except IOError:
return ''
def render_page_template(name, route_data=None, **kwargs):
debugging = app.config.get('DEBUGGING', False)
@ -188,7 +182,7 @@ def render_page_template(name, route_data=None, **kwargs):
version_number = ''
if not features.BILLING:
version_number = 'Quay %s' % _get_version_number()
version_number = 'Quay %s' % __version__
resp = make_response(render_template(name,
route_data=route_data,
@ -226,4 +220,3 @@ def render_page_template(name, route_data=None, **kwargs):
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
return resp

View file

@ -1,3 +1,4 @@
import os
import json
import logging
@ -38,6 +39,8 @@ from util.saas.useranalytics import build_error_callback
from util.systemlogs import build_logs_archive
from util.useremails import send_email_changed
from util.registry.gzipinputstream import GzipInputStream
from _init import ROOT_DIR
PGP_KEY_MIMETYPE = 'application/pgp-keys'
@ -287,7 +290,7 @@ def dbrevision_health():
db_revision = result[0]
# Find the local revision from the file system.
with open('ALEMBIC_HEAD', 'r') as f:
with open(os.path.join(ROOT_DIR, 'ALEMBIC_HEAD'), 'r') as f:
local_revision = f.readline().split(' ')[0]
data = {

View file

@ -2,7 +2,8 @@ import urllib2
import re
import os
LOCAL_DIRECTORY = '/static/ldn/'
from _init import STATIC_FONTS_DIR, STATIC_LDN_DIR
LOCAL_PATH = '/static/ldn/'
EXTERNAL_JS = [
'code.jquery.com/jquery.js',
@ -56,14 +57,14 @@ EXTERNAL_CSS_FONTS = [
def get_external_javascript(local=False):
if local:
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_JS]
return [LOCAL_PATH + format_local_name(src) for src in EXTERNAL_JS]
return ['//' + src for src in EXTERNAL_JS]
def get_external_css(local=False):
if local:
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_CSS]
return [LOCAL_PATH + format_local_name(src) for src in EXTERNAL_CSS]
return ['//' + src for src in EXTERNAL_CSS]
@ -88,7 +89,7 @@ if __name__ == '__main__':
filename = format_local_name(url)
print 'Writing %s' % filename
with open(LOCAL_DIRECTORY + filename, 'w') as f:
with open(STATIC_LDN_DIR + filename, 'w') as f:
f.write(contents)
for url in EXTERNAL_CSS_FONTS:
@ -96,7 +97,7 @@ if __name__ == '__main__':
response = urllib2.urlopen('https://' + url)
filename = os.path.basename(url).split('?')[0]
with open('static/ldn/' + filename, "wb") as local_file:
with open(STATIC_LDN_DIR + filename, "wb") as local_file:
local_file.write(response.read())
for url in EXTERNAL_FONTS:
@ -104,5 +105,5 @@ if __name__ == '__main__':
response = urllib2.urlopen('https://' + url)
filename = os.path.basename(url).split('?')[0]
with open('static/fonts/' + filename, "wb") as local_file:
with open(STATIC_FONTS_DIR + filename, "wb") as local_file:
local_file.write(response.read())

View file

@ -4,6 +4,12 @@ FROM phusion/baseimage:0.9.19
ENV DEBIAN_FRONTEND noninteractive
ENV HOME /root
ENV QUAYDIR /quay-registry
ENV QUAYCONF /quay-registry/conf
ENV QUAYPATH "."
RUN mkdir $QUAYDIR
WORKDIR $QUAYDIR
# This is so we don't break http golang/go#17066
# When Ubuntu has nginx >= 1.11.0 we can switch back.
@ -50,13 +56,6 @@ RUN apt-get update && apt-get upgrade -y \
yarn=0.22.0-1 \
w3m # 26MAY2017
# Install python dependencies
COPY requirements.txt requirements-tests.txt ./
RUN virtualenv --distribute venv \
&& venv/bin/pip install -r requirements.txt \
&& venv/bin/pip install -r requirements-tests.txt \
&& venv/bin/pip freeze # 07SEP2016
# Install cfssl
RUN mkdir /gocode
ENV GOPATH /gocode
@ -78,21 +77,28 @@ RUN curl -L -o /usr/local/bin/jwtproxy https://github.com/coreos/jwtproxy/releas
RUN curl -L -o /usr/local/bin/prometheus-aggregator https://github.com/coreos/prometheus-aggregator/releases/download/v0.0.1-alpha/prometheus-aggregator \
&& chmod +x /usr/local/bin/prometheus-aggregator
# Install python dependencies
COPY requirements.txt requirements-tests.txt ./
RUN virtualenv --distribute venv \
&& venv/bin/pip install -r requirements.txt \
&& venv/bin/pip install -r requirements-tests.txt \
&& venv/bin/pip freeze
# Install front-end dependencies
RUN ln -s /usr/bin/nodejs /usr/bin/node
COPY static/ package.json tsconfig.json webpack.config.js tslint.json yarn.lock ./
RUN yarn install --ignore-engines
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit static/fonts static/ldn /usr/local/nginx/logs/
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit $QUAYDIR/static/fonts $QUAYDIR/static/ldn /usr/local/nginx/logs/
COPY external_libraries.py _init.py ./
COPY external_libraries.py ./
RUN venv/bin/python -m external_libraries
ARG RUN_TESTS=false
ENV RUN_TESTS ${RUN_TESTS}
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /root/.cache
VOLUME ["/conf/stack", "/var/log", "/datastorage", "/tmp", "/conf/etcd"]
VOLUME ["$QUAYCONF/stack", "/var/log", "/datastorage", "/tmp", "$QUAYCONF/etcd"]
EXPOSE 443 8443 80

View file

@ -1,14 +1,15 @@
# vim:ft=dockerfile
FROM quay.io/quay/quay-base:latest
FROM quay.io/quay/quay-base:absolute
WORKDIR $QUAYDIR
COPY . .
# Install python dependencies
RUN virtualenv --distribute venv \
&& venv/bin/pip install -r requirements.txt \
&& venv/bin/pip install -r requirements-tests.txt \
&& venv/bin/pip freeze # 07SEP2016
&& venv/bin/pip freeze
# Check python dependencies for the GPL
# Due to the following bug, pip results must be piped to a file before grepping:
@ -25,27 +26,16 @@ RUN yarn install --ignore-engines \
# Set up the init system
RUN mkdir -p /etc/my_init.d /etc/systlog-ng /usr/local/bin /etc/monit static/fonts static/ldn /usr/local/nginx/logs/ \
&& cp conf/init/*.sh /etc/my_init.d/ \
&& cp conf/init/syslog-ng.conf /etc/syslog-ng/ \
&& cp -r conf/init/service/* /etc/service \
&& cp conf/kill-buildmanager.sh /usr/local/bin/kill-buildmanager.sh \
&& cp conf/monitrc /etc/monit/monitrc \
&& cp $QUAYCONF/init/*.sh /etc/my_init.d/ \
&& cp $QUAYCONF/init/syslog-ng.conf /etc/syslog-ng/ \
&& cp -r $QUAYCONF/init/service/* /etc/service \
&& cp $QUAYCONF/kill-buildmanager.sh /usr/local/bin/kill-buildmanager.sh \
&& cp $QUAYCONF/monitrc /etc/monit/monitrc \
&& chmod 0600 /etc/monit/monitrc \
&& cp conf/init/logrotate.conf /etc/logrotate.conf \
&& cp $QUAYCONF/init/logrotate.conf /etc/logrotate.conf \
&& cp .git/HEAD GIT_HEAD \
&& rm -rf /etc/service/syslog-forwarder
# Run the tests
RUN if [ "$RUN_TESTS" = true ]; then \
TEST=true PYTHONPATH="." venv/bin/py.test --timeout=7200 --verbose \
--show-count -x --color=no ./ && rm -rf /var/tmp/; \
TEST=true PYTHONPATH="." venv/bin/py.test --timeout=7200 --verbose \
--show-count -x --color=no test/registry_tests.py && rm -rf /var/tmp/;\
yarn test; \
fi
# Cleanup any NPM-related stuff.
# RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev libgpgme11-dev nodejs jpegoptim optipng w3m \
# && apt-get autoremove -y \
@ -53,4 +43,4 @@ RUN if [ "$RUN_TESTS" = true ]; then \
# && rm -rf /root/.npm /.npm /usr/local/lib/node_modules /usr/share/yarn/node_modules \
# /root/node_modules /node_modules /grunt
RUN PYTHONPATH=. venv/bin/alembic heads | grep -E '^[0-9a-f]+ \(head\)$' > ALEMBIC_HEAD
RUN PYTHONPATH=$QUAYPATH venv/bin/alembic heads | grep -E '^[0-9a-f]+ \(head\)$' > ALEMBIC_HEAD

View file

@ -1,7 +1,3 @@
import logging
import logging.config
import os
import endpoints.decorated # Note: We need to import this module to make sure the decorators are registered.
import features

View file

@ -1,8 +1,4 @@
import os
import logging.config
from app import app as application
from endpoints.secscan import secscan

View file

@ -10,3 +10,11 @@ branch = True
[coverage:report]
omit =
test/*
[pep8]
ignore = E111,E114
max-line-length = 100
[flake8]
ignore = E111,E114
max-line-length = 100

View file

@ -1,3 +1,4 @@
import os
import ldap
import subprocess
@ -5,6 +6,7 @@ from app import app, config_provider
from data.users import LDAP_CERT_FILENAME
from data.users.externalldap import LDAPConnection, LDAPUsers
from util.config.validators import BaseValidator, ConfigValidationException
from _init import CONF_DIR
class LDAPValidator(BaseValidator):
name = "ldap"
@ -17,7 +19,7 @@ class LDAPValidator(BaseValidator):
# If there is a custom LDAP certificate, then reinstall the certificates for the container.
if config_provider.volume_file_exists(LDAP_CERT_FILENAME):
subprocess.check_call(['/conf/init/certs_install.sh'])
subprocess.check_call([os.path.join(CONF_DIR, 'init/certs_install.sh')])
# Note: raises ldap.INVALID_CREDENTIALS on failure
admin_dn = config.get('LDAP_ADMIN_DN')

View file

@ -3,6 +3,7 @@ from data import model
from util.names import parse_robot_username
from jinja2 import Environment, FileSystemLoader
def icon_path(icon_name):
return '%s/static/img/icons/%s.png' % (get_app_url(), icon_name)

View file

@ -1,4 +1,5 @@
import os
from _init import CONF_DIR
def logfile_path(jsonfmt=False, debug=False):
@ -19,7 +20,7 @@ def logfile_path(jsonfmt=False, debug=False):
if debug or os.getenv('DEBUGLOG', 'false').lower() == 'true':
_debug = "_debug"
return 'conf/logging%s%s.conf' % (_debug, _json)
return os.path.join(CONF_DIR, "logging%s%s.conf" % (_debug, _json))
def filter_logs(values, filtered_fields):

View file

@ -1,3 +1,4 @@
import os
import logging
from abc import ABCMeta, abstractmethod
@ -18,12 +19,13 @@ from util.secscan.validator import SecurityConfigValidator
from util.security.instancekeys import InstanceKeys
from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
from _init import CONF_DIR
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the security scanner has to call the layer URL
UNKNOWN_PARENT_LAYER_ERROR_MSG = 'worker: parent layer is unknown, it must be processed first'
MITM_CERT_PATH = '/conf/mitm.cert'
MITM_CERT_PATH = os.path.join(CONF_DIR, '/mitm.cert')
DEFAULT_HTTP_HEADERS = {'Connection': 'close'}
logger = logging.getLogger(__name__)

View file

@ -1,7 +1,8 @@
import pytest
import os
from util.log import logfile_path, filter_logs
from app import FILTERED_VALUES
from _init import CONF_DIR
def test_filter_logs():
values = {
@ -16,20 +17,20 @@ def test_filter_logs():
@pytest.mark.parametrize('debug,jsonfmt,expected', [
(False, False, "conf/logging.conf"),
(False, True, "conf/logging_json.conf"),
(True, False, "conf/logging_debug.conf"),
(True, True, "conf/logging_debug_json.conf"),
(False, False, os.path.join(CONF_DIR, "logging.conf")),
(False, True, os.path.join(CONF_DIR, "logging_json.conf")),
(True, False, os.path.join(CONF_DIR, "logging_debug.conf")),
(True, True, os.path.join(CONF_DIR, "logging_debug_json.conf"))
])
def test_logfile_path(debug, jsonfmt, expected, monkeypatch):
assert logfile_path(jsonfmt=jsonfmt, debug=debug) == expected
@pytest.mark.parametrize('debug,jsonfmt,expected', [
("false", "false", "conf/logging.conf"),
("false", "true", "conf/logging_json.conf"),
("true", "false", "conf/logging_debug.conf"),
("true", "true", "conf/logging_debug_json.conf"),
("false", "false", os.path.join(CONF_DIR, "logging.conf")),
("false", "true", os.path.join(CONF_DIR, "logging_json.conf")),
("true", "false", os.path.join(CONF_DIR, "logging_debug.conf")),
("true", "true", os.path.join(CONF_DIR, "logging_debug_json.conf"))
])
def test_logfile_path_env(debug, jsonfmt, expected, monkeypatch):
monkeypatch.setenv("DEBUGLOG", debug)
@ -38,4 +39,4 @@ def test_logfile_path_env(debug, jsonfmt, expected, monkeypatch):
def test_logfile_path_default():
assert logfile_path() == "conf/logging.conf"
assert logfile_path() == os.path.join(CONF_DIR, "logging.conf")

View file

@ -1,3 +1,4 @@
import os
import json
import logging
@ -5,12 +6,14 @@ from flask_mail import Message
import features
from _init import ROOT_DIR
from app import mail, app, get_app_url
from util.jinjautil import get_template_env
logger = logging.getLogger(__name__)
template_env = get_template_env("emails")
template_env = get_template_env(os.path.join(ROOT_DIR, "emails"))
class CannotSendEmailException(Exception):

View file

@ -1,9 +1,4 @@
import os
import logging
import logging.config
from app import app as application
from endpoints.verbs import verbs

3
web.py
View file

@ -1,6 +1,3 @@
import os
import logging.config
from app import app as application
from endpoints.api import api_bp
from endpoints.bitbuckettrigger import bitbuckettrigger