diff --git a/Dockerfile.buildworker b/Dockerfile.buildworker index 4b6f995b9..09c1c91b7 100644 --- a/Dockerfile.buildworker +++ b/Dockerfile.buildworker @@ -1,23 +1,30 @@ # vim:ft=dockerfile + +############################### +# BEGIN COMMON SECION +############################### + FROM phusion/baseimage:0.9.15 ENV DEBIAN_FRONTEND noninteractive ENV HOME /root # Install the dependencies. -RUN apt-get update # 20NOV2014 +RUN apt-get update # 11DEC2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands -RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev +RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev # Build the python dependencies ADD requirements.txt requirements.txt RUN virtualenv --distribute venv RUN venv/bin/pip install -r requirements.txt -RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev +RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev -### End common section ### +############################### +# END COMMON SECION +############################### RUN apt-get install -y lxc aufs-tools diff --git a/Dockerfile.web b/Dockerfile.web index bd07d9999..d50256b2a 100644 --- a/Dockerfile.web +++ b/Dockerfile.web @@ -1,4 +1,9 @@ # vim:ft=dockerfile + +############################### +# BEGIN COMMON SECION +############################### + FROM phusion/baseimage:0.9.15 ENV DEBIAN_FRONTEND noninteractive @@ -8,16 +13,18 @@ ENV HOME /root RUN apt-get update # 11DEC2014 # New ubuntu packages should be added as their own apt-get install lines below the existing install commands -RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev +RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev # Build the python dependencies ADD requirements.txt requirements.txt RUN virtualenv --distribute venv RUN venv/bin/pip install -r requirements.txt -RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev +RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev -### End common section ### +############################### +# END COMMON SECION +############################### # Remove SSH. RUN rm -rf /etc/service/sshd /etc/my_init.d/00_regen_ssh_host_keys.sh diff --git a/conf/init/gunicorn_registry/run b/conf/init/gunicorn_registry/run index a0a09f5a2..3c88fd0e3 100755 --- a/conf/init/gunicorn_registry/run +++ b/conf/init/gunicorn_registry/run @@ -3,6 +3,6 @@ echo 'Starting gunicon' cd / -venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application +nice -n 10 venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application echo 'Gunicorn exited' \ No newline at end of file diff --git a/conf/init/gunicorn_verbs/run b/conf/init/gunicorn_verbs/run index 1cf2ee51c..d76a7adcf 100755 --- a/conf/init/gunicorn_verbs/run +++ b/conf/init/gunicorn_verbs/run @@ -3,6 +3,6 @@ echo 'Starting gunicon' cd / -nice -10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application +nice -n 10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application echo 'Gunicorn exited' \ No newline at end of file diff --git a/conf/nginx-nossl.conf b/conf/nginx-nossl.conf index fbcce63c0..cc985906a 100644 --- a/conf/nginx-nossl.conf +++ b/conf/nginx-nossl.conf @@ -1,11 +1,5 @@ include root-base.conf; -worker_processes 2; - -user root nogroup; - -daemon off; - http { include http-base.conf; diff --git a/conf/nginx.conf b/conf/nginx.conf index e208d30e0..01d554ae2 100644 --- a/conf/nginx.conf +++ b/conf/nginx.conf @@ -1,11 +1,5 @@ include root-base.conf; -worker_processes 2; - -user root nogroup; - -daemon off; - http { include http-base.conf; diff --git a/conf/root-base.conf b/conf/root-base.conf index be8072945..dc8685c34 100644 --- a/conf/root-base.conf +++ b/conf/root-base.conf @@ -1,7 +1,15 @@ pid /tmp/nginx.pid; error_log /var/log/nginx/nginx.error.log; +worker_processes 2; +worker_priority -10; +worker_rlimit_nofile 10240; + +user root nogroup; + +daemon off; + events { - worker_connections 1024; + worker_connections 10240; accept_mutex off; } diff --git a/data/migrations/migration.sh b/data/migrations/migration.sh index dedaf2445..17901e130 100755 --- a/data/migrations/migration.sh +++ b/data/migrations/migration.sh @@ -2,13 +2,14 @@ set -e DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'` MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}" +PERCONA_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root@$DOCKER_IP/genschema\"}" PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}" up_mysql() { # Run a SQL database on port 3306 inside of Docker. docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql - # Sleep for 5s to get MySQL get started. + # Sleep for 10s to get MySQL get started. echo 'Sleeping for 10...' sleep 10 @@ -21,6 +22,40 @@ down_mysql() { docker rm mysql } +up_mariadb() { + # Run a SQL database on port 3306 inside of Docker. + docker run --name mariadb -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mariadb + + # Sleep for 10s to get MySQL get started. + echo 'Sleeping for 10...' + sleep 10 + + # Add the database to mysql. + docker run --rm --link mariadb:mariadb mariadb sh -c 'echo "create database genschema" | mysql -h"$MARIADB_PORT_3306_TCP_ADDR" -P"$MARIADB_PORT_3306_TCP_PORT" -uroot -ppassword' +} + +down_mariadb() { + docker kill mariadb + docker rm mariadb +} + +up_percona() { + # Run a SQL database on port 3306 inside of Docker. + docker run --name percona -p 3306:3306 -d dockerfile/percona + + # Sleep for 10s + echo 'Sleeping for 10...' + sleep 10 + + # Add the daabase to mysql. + docker run --rm --link percona:percona dockerfile/percona sh -c 'echo "create database genschema" | mysql -h $PERCONA_PORT_3306_TCP_ADDR' +} + +down_percona() { + docker kill percona + docker rm percona +} + up_postgres() { # Run a SQL database on port 5432 inside of Docker. docker run --name postgres -p 5432:5432 -d postgres @@ -73,6 +108,26 @@ test_migrate $MYSQL_CONFIG_OVERRIDE set -e down_mysql +# Test via MariaDB. +echo '> Starting MariaDB' +up_mariadb + +echo '> Testing Migration (mariadb)' +set +e +test_migrate $MYSQL_CONFIG_OVERRIDE +set -e +down_mariadb + +# Test via Percona. +echo '> Starting Percona' +up_percona + +echo '> Testing Migration (percona)' +set +e +test_migrate $PERCONA_CONFIG_OVERRIDE +set -e +down_percona + # Test via Postgres. echo '> Starting Postgres' up_postgres diff --git a/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py b/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py index 87aaf84a3..a117fd2b1 100644 --- a/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py +++ b/data/migrations/versions/5b84373e5db_convert_slack_webhook_data.py @@ -13,7 +13,7 @@ down_revision = '1c5b738283a5' from alembic import op import sqlalchemy as sa -from tools.migrateslackwebhook import run_slackwebhook_migration +from util.migrateslackwebhook import run_slackwebhook_migration def upgrade(tables): diff --git a/endpoints/web.py b/endpoints/web.py index 4717f7d40..913a6905a 100644 --- a/endpoints/web.py +++ b/endpoints/web.py @@ -19,7 +19,7 @@ from util.cache import no_cache from endpoints.common import common_login, render_page_template, route_show_if, param_required from endpoints.csrf import csrf_protect, generate_csrf_token from endpoints.registry import set_cache_headers -from util.names import parse_repository_name +from util.names import parse_repository_name, parse_repository_name_and_tag from util.useremails import send_email_changed from auth import scopes @@ -224,14 +224,14 @@ def robots(): @web.route('/') @no_cache @process_oauth -@parse_repository_name -def redirect_to_repository(namespace, reponame): +@parse_repository_name_and_tag +def redirect_to_repository(namespace, reponame, tag): permission = ReadRepositoryPermission(namespace, reponame) is_public = model.repository_is_public(namespace, reponame) if permission.can() or is_public: repository_name = '/'.join([namespace, reponame]) - return redirect(url_for('web.repository', path=repository_name)) + return redirect(url_for('web.repository', path=repository_name, tag=tag)) abort(404) diff --git a/tools/migrateslackwebhook.py b/util/migrateslackwebhook.py similarity index 100% rename from tools/migrateslackwebhook.py rename to util/migrateslackwebhook.py diff --git a/util/names.py b/util/names.py index 31546d450..67ceed982 100644 --- a/util/names.py +++ b/util/names.py @@ -4,16 +4,25 @@ from functools import wraps from uuid import uuid4 -def parse_namespace_repository(repository): +def parse_namespace_repository(repository, tag=False): parts = repository.rstrip('/').split('/', 1) if len(parts) < 2: namespace = 'library' repository = parts[0] else: (namespace, repository) = parts - repository = urllib.quote_plus(repository) - return (namespace, repository) + if tag: + parts = repository.split(':', 1) + if len(parts) < 2: + tag = None + else: + (repository, tag) = parts + + repository = urllib.quote_plus(repository) + if tag: + return (namespace, repository, tag) + return (namespace, repository) def parse_repository_name(f): @wraps(f) @@ -22,6 +31,13 @@ def parse_repository_name(f): return f(namespace, repository, *args, **kwargs) return wrapper +def parse_repository_name_and_tag(f): + @wraps(f) + def wrapper(repository, *args, **kwargs): + (namespace, repository, tag) = parse_namespace_repository(repository, tag=True) + return f(namespace, repository, tag, *args, **kwargs) + return wrapper + def format_robot_username(parent_username, robot_shortname): return '%s+%s' % (parent_username, robot_shortname)