Merge branch 'master' of https://github.com/coreos-inc/quay
This commit is contained in:
commit
30c7cbb80b
12 changed files with 112 additions and 31 deletions
|
@ -1,23 +1,30 @@
|
||||||
# vim:ft=dockerfile
|
# vim:ft=dockerfile
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# BEGIN COMMON SECION
|
||||||
|
###############################
|
||||||
|
|
||||||
FROM phusion/baseimage:0.9.15
|
FROM phusion/baseimage:0.9.15
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
ENV HOME /root
|
ENV HOME /root
|
||||||
|
|
||||||
# Install the dependencies.
|
# Install the dependencies.
|
||||||
RUN apt-get update # 20NOV2014
|
RUN apt-get update # 11DEC2014
|
||||||
|
|
||||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev
|
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev
|
||||||
|
|
||||||
# Build the python dependencies
|
# Build the python dependencies
|
||||||
ADD requirements.txt requirements.txt
|
ADD requirements.txt requirements.txt
|
||||||
RUN virtualenv --distribute venv
|
RUN virtualenv --distribute venv
|
||||||
RUN venv/bin/pip install -r requirements.txt
|
RUN venv/bin/pip install -r requirements.txt
|
||||||
|
|
||||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev
|
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev
|
||||||
|
|
||||||
### End common section ###
|
###############################
|
||||||
|
# END COMMON SECION
|
||||||
|
###############################
|
||||||
|
|
||||||
RUN apt-get install -y lxc aufs-tools
|
RUN apt-get install -y lxc aufs-tools
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,9 @@
|
||||||
# vim:ft=dockerfile
|
# vim:ft=dockerfile
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# BEGIN COMMON SECION
|
||||||
|
###############################
|
||||||
|
|
||||||
FROM phusion/baseimage:0.9.15
|
FROM phusion/baseimage:0.9.15
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
|
@ -8,16 +13,18 @@ ENV HOME /root
|
||||||
RUN apt-get update # 11DEC2014
|
RUN apt-get update # 11DEC2014
|
||||||
|
|
||||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev
|
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev
|
||||||
|
|
||||||
# Build the python dependencies
|
# Build the python dependencies
|
||||||
ADD requirements.txt requirements.txt
|
ADD requirements.txt requirements.txt
|
||||||
RUN virtualenv --distribute venv
|
RUN virtualenv --distribute venv
|
||||||
RUN venv/bin/pip install -r requirements.txt
|
RUN venv/bin/pip install -r requirements.txt
|
||||||
|
|
||||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev
|
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev
|
||||||
|
|
||||||
### End common section ###
|
###############################
|
||||||
|
# END COMMON SECION
|
||||||
|
###############################
|
||||||
|
|
||||||
# Remove SSH.
|
# Remove SSH.
|
||||||
RUN rm -rf /etc/service/sshd /etc/my_init.d/00_regen_ssh_host_keys.sh
|
RUN rm -rf /etc/service/sshd /etc/my_init.d/00_regen_ssh_host_keys.sh
|
||||||
|
|
|
@ -3,6 +3,6 @@
|
||||||
echo 'Starting gunicon'
|
echo 'Starting gunicon'
|
||||||
|
|
||||||
cd /
|
cd /
|
||||||
venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
|
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
|
||||||
|
|
||||||
echo 'Gunicorn exited'
|
echo 'Gunicorn exited'
|
|
@ -3,6 +3,6 @@
|
||||||
echo 'Starting gunicon'
|
echo 'Starting gunicon'
|
||||||
|
|
||||||
cd /
|
cd /
|
||||||
nice -10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
|
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
|
||||||
|
|
||||||
echo 'Gunicorn exited'
|
echo 'Gunicorn exited'
|
|
@ -1,11 +1,5 @@
|
||||||
include root-base.conf;
|
include root-base.conf;
|
||||||
|
|
||||||
worker_processes 2;
|
|
||||||
|
|
||||||
user root nogroup;
|
|
||||||
|
|
||||||
daemon off;
|
|
||||||
|
|
||||||
http {
|
http {
|
||||||
include http-base.conf;
|
include http-base.conf;
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,5 @@
|
||||||
include root-base.conf;
|
include root-base.conf;
|
||||||
|
|
||||||
worker_processes 2;
|
|
||||||
|
|
||||||
user root nogroup;
|
|
||||||
|
|
||||||
daemon off;
|
|
||||||
|
|
||||||
http {
|
http {
|
||||||
include http-base.conf;
|
include http-base.conf;
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,15 @@
|
||||||
pid /tmp/nginx.pid;
|
pid /tmp/nginx.pid;
|
||||||
error_log /var/log/nginx/nginx.error.log;
|
error_log /var/log/nginx/nginx.error.log;
|
||||||
|
|
||||||
|
worker_processes 2;
|
||||||
|
worker_priority -10;
|
||||||
|
worker_rlimit_nofile 10240;
|
||||||
|
|
||||||
|
user root nogroup;
|
||||||
|
|
||||||
|
daemon off;
|
||||||
|
|
||||||
events {
|
events {
|
||||||
worker_connections 1024;
|
worker_connections 10240;
|
||||||
accept_mutex off;
|
accept_mutex off;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,13 +2,14 @@ set -e
|
||||||
|
|
||||||
DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
|
DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
|
||||||
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
||||||
|
PERCONA_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root@$DOCKER_IP/genschema\"}"
|
||||||
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
|
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
|
||||||
|
|
||||||
up_mysql() {
|
up_mysql() {
|
||||||
# Run a SQL database on port 3306 inside of Docker.
|
# Run a SQL database on port 3306 inside of Docker.
|
||||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||||
|
|
||||||
# Sleep for 5s to get MySQL get started.
|
# Sleep for 10s to get MySQL get started.
|
||||||
echo 'Sleeping for 10...'
|
echo 'Sleeping for 10...'
|
||||||
sleep 10
|
sleep 10
|
||||||
|
|
||||||
|
@ -21,6 +22,40 @@ down_mysql() {
|
||||||
docker rm mysql
|
docker rm mysql
|
||||||
}
|
}
|
||||||
|
|
||||||
|
up_mariadb() {
|
||||||
|
# Run a SQL database on port 3306 inside of Docker.
|
||||||
|
docker run --name mariadb -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mariadb
|
||||||
|
|
||||||
|
# Sleep for 10s to get MySQL get started.
|
||||||
|
echo 'Sleeping for 10...'
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
# Add the database to mysql.
|
||||||
|
docker run --rm --link mariadb:mariadb mariadb sh -c 'echo "create database genschema" | mysql -h"$MARIADB_PORT_3306_TCP_ADDR" -P"$MARIADB_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||||
|
}
|
||||||
|
|
||||||
|
down_mariadb() {
|
||||||
|
docker kill mariadb
|
||||||
|
docker rm mariadb
|
||||||
|
}
|
||||||
|
|
||||||
|
up_percona() {
|
||||||
|
# Run a SQL database on port 3306 inside of Docker.
|
||||||
|
docker run --name percona -p 3306:3306 -d dockerfile/percona
|
||||||
|
|
||||||
|
# Sleep for 10s
|
||||||
|
echo 'Sleeping for 10...'
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
# Add the daabase to mysql.
|
||||||
|
docker run --rm --link percona:percona dockerfile/percona sh -c 'echo "create database genschema" | mysql -h $PERCONA_PORT_3306_TCP_ADDR'
|
||||||
|
}
|
||||||
|
|
||||||
|
down_percona() {
|
||||||
|
docker kill percona
|
||||||
|
docker rm percona
|
||||||
|
}
|
||||||
|
|
||||||
up_postgres() {
|
up_postgres() {
|
||||||
# Run a SQL database on port 5432 inside of Docker.
|
# Run a SQL database on port 5432 inside of Docker.
|
||||||
docker run --name postgres -p 5432:5432 -d postgres
|
docker run --name postgres -p 5432:5432 -d postgres
|
||||||
|
@ -73,6 +108,26 @@ test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||||
set -e
|
set -e
|
||||||
down_mysql
|
down_mysql
|
||||||
|
|
||||||
|
# Test via MariaDB.
|
||||||
|
echo '> Starting MariaDB'
|
||||||
|
up_mariadb
|
||||||
|
|
||||||
|
echo '> Testing Migration (mariadb)'
|
||||||
|
set +e
|
||||||
|
test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||||
|
set -e
|
||||||
|
down_mariadb
|
||||||
|
|
||||||
|
# Test via Percona.
|
||||||
|
echo '> Starting Percona'
|
||||||
|
up_percona
|
||||||
|
|
||||||
|
echo '> Testing Migration (percona)'
|
||||||
|
set +e
|
||||||
|
test_migrate $PERCONA_CONFIG_OVERRIDE
|
||||||
|
set -e
|
||||||
|
down_percona
|
||||||
|
|
||||||
# Test via Postgres.
|
# Test via Postgres.
|
||||||
echo '> Starting Postgres'
|
echo '> Starting Postgres'
|
||||||
up_postgres
|
up_postgres
|
||||||
|
|
|
@ -13,7 +13,7 @@ down_revision = '1c5b738283a5'
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
from tools.migrateslackwebhook import run_slackwebhook_migration
|
from util.migrateslackwebhook import run_slackwebhook_migration
|
||||||
|
|
||||||
|
|
||||||
def upgrade(tables):
|
def upgrade(tables):
|
||||||
|
|
|
@ -19,7 +19,7 @@ from util.cache import no_cache
|
||||||
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
||||||
from endpoints.csrf import csrf_protect, generate_csrf_token
|
from endpoints.csrf import csrf_protect, generate_csrf_token
|
||||||
from endpoints.registry import set_cache_headers
|
from endpoints.registry import set_cache_headers
|
||||||
from util.names import parse_repository_name
|
from util.names import parse_repository_name, parse_repository_name_and_tag
|
||||||
from util.useremails import send_email_changed
|
from util.useremails import send_email_changed
|
||||||
from auth import scopes
|
from auth import scopes
|
||||||
|
|
||||||
|
@ -224,14 +224,14 @@ def robots():
|
||||||
@web.route('/<path:repository>')
|
@web.route('/<path:repository>')
|
||||||
@no_cache
|
@no_cache
|
||||||
@process_oauth
|
@process_oauth
|
||||||
@parse_repository_name
|
@parse_repository_name_and_tag
|
||||||
def redirect_to_repository(namespace, reponame):
|
def redirect_to_repository(namespace, reponame, tag):
|
||||||
permission = ReadRepositoryPermission(namespace, reponame)
|
permission = ReadRepositoryPermission(namespace, reponame)
|
||||||
is_public = model.repository_is_public(namespace, reponame)
|
is_public = model.repository_is_public(namespace, reponame)
|
||||||
|
|
||||||
if permission.can() or is_public:
|
if permission.can() or is_public:
|
||||||
repository_name = '/'.join([namespace, reponame])
|
repository_name = '/'.join([namespace, reponame])
|
||||||
return redirect(url_for('web.repository', path=repository_name))
|
return redirect(url_for('web.repository', path=repository_name, tag=tag))
|
||||||
|
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
||||||
|
|
|
@ -4,16 +4,25 @@ from functools import wraps
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
|
||||||
def parse_namespace_repository(repository):
|
def parse_namespace_repository(repository, tag=False):
|
||||||
parts = repository.rstrip('/').split('/', 1)
|
parts = repository.rstrip('/').split('/', 1)
|
||||||
if len(parts) < 2:
|
if len(parts) < 2:
|
||||||
namespace = 'library'
|
namespace = 'library'
|
||||||
repository = parts[0]
|
repository = parts[0]
|
||||||
else:
|
else:
|
||||||
(namespace, repository) = parts
|
(namespace, repository) = parts
|
||||||
repository = urllib.quote_plus(repository)
|
|
||||||
return (namespace, repository)
|
|
||||||
|
|
||||||
|
if tag:
|
||||||
|
parts = repository.split(':', 1)
|
||||||
|
if len(parts) < 2:
|
||||||
|
tag = None
|
||||||
|
else:
|
||||||
|
(repository, tag) = parts
|
||||||
|
|
||||||
|
repository = urllib.quote_plus(repository)
|
||||||
|
if tag:
|
||||||
|
return (namespace, repository, tag)
|
||||||
|
return (namespace, repository)
|
||||||
|
|
||||||
def parse_repository_name(f):
|
def parse_repository_name(f):
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
|
@ -22,6 +31,13 @@ def parse_repository_name(f):
|
||||||
return f(namespace, repository, *args, **kwargs)
|
return f(namespace, repository, *args, **kwargs)
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
def parse_repository_name_and_tag(f):
|
||||||
|
@wraps(f)
|
||||||
|
def wrapper(repository, *args, **kwargs):
|
||||||
|
(namespace, repository, tag) = parse_namespace_repository(repository, tag=True)
|
||||||
|
return f(namespace, repository, tag, *args, **kwargs)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
def format_robot_username(parent_username, robot_shortname):
|
def format_robot_username(parent_username, robot_shortname):
|
||||||
return '%s+%s' % (parent_username, robot_shortname)
|
return '%s+%s' % (parent_username, robot_shortname)
|
||||||
|
|
Reference in a new issue