Merge branch 'master' into ackbar
This commit is contained in:
commit
47fb10b79f
17 changed files with 167 additions and 52 deletions
|
@ -1,23 +1,30 @@
|
|||
# vim:ft=dockerfile
|
||||
|
||||
###############################
|
||||
# BEGIN COMMON SECION
|
||||
###############################
|
||||
|
||||
FROM phusion/baseimage:0.9.15
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
ENV HOME /root
|
||||
|
||||
# Install the dependencies.
|
||||
RUN apt-get update # 20NOV2014
|
||||
RUN apt-get update # 11DEC2014
|
||||
|
||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev
|
||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev
|
||||
|
||||
# Build the python dependencies
|
||||
ADD requirements.txt requirements.txt
|
||||
RUN virtualenv --distribute venv
|
||||
RUN venv/bin/pip install -r requirements.txt
|
||||
|
||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev
|
||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev
|
||||
|
||||
### End common section ###
|
||||
###############################
|
||||
# END COMMON SECION
|
||||
###############################
|
||||
|
||||
RUN apt-get install -y lxc aufs-tools
|
||||
|
||||
|
|
|
@ -1,4 +1,9 @@
|
|||
# vim:ft=dockerfile
|
||||
|
||||
###############################
|
||||
# BEGIN COMMON SECION
|
||||
###############################
|
||||
|
||||
FROM phusion/baseimage:0.9.15
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
@ -8,16 +13,18 @@ ENV HOME /root
|
|||
RUN apt-get update # 11DEC2014
|
||||
|
||||
# New ubuntu packages should be added as their own apt-get install lines below the existing install commands
|
||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev
|
||||
RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62 libjpeg62-dev libevent-2.0.5 libevent-dev gdebi-core g++ libmagic1 phantomjs nodejs npm libldap-2.4-2 libldap2-dev libsasl2-modules libsasl2-dev libpq5 libpq-dev libfreetype6-dev libffi-dev
|
||||
|
||||
# Build the python dependencies
|
||||
ADD requirements.txt requirements.txt
|
||||
RUN virtualenv --distribute venv
|
||||
RUN venv/bin/pip install -r requirements.txt
|
||||
|
||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev
|
||||
RUN apt-get remove -y --auto-remove python-dev g++ libjpeg62-dev libevent-dev libldap2-dev libsasl2-dev libpq-dev libffi-dev
|
||||
|
||||
### End common section ###
|
||||
###############################
|
||||
# END COMMON SECION
|
||||
###############################
|
||||
|
||||
# Remove SSH.
|
||||
RUN rm -rf /etc/service/sshd /etc/my_init.d/00_regen_ssh_host_keys.sh
|
||||
|
|
|
@ -3,6 +3,6 @@
|
|||
echo 'Starting gunicon'
|
||||
|
||||
cd /
|
||||
venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
|
||||
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_registry.py registry:application
|
||||
|
||||
echo 'Gunicorn exited'
|
|
@ -3,6 +3,6 @@
|
|||
echo 'Starting gunicon'
|
||||
|
||||
cd /
|
||||
nice -10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
|
||||
nice -n 10 venv/bin/gunicorn -c conf/gunicorn_verbs.py verbs:application
|
||||
|
||||
echo 'Gunicorn exited'
|
|
@ -1,11 +1,5 @@
|
|||
include root-base.conf;
|
||||
|
||||
worker_processes 2;
|
||||
|
||||
user root nogroup;
|
||||
|
||||
daemon off;
|
||||
|
||||
http {
|
||||
include http-base.conf;
|
||||
|
||||
|
|
|
@ -1,11 +1,5 @@
|
|||
include root-base.conf;
|
||||
|
||||
worker_processes 2;
|
||||
|
||||
user root nogroup;
|
||||
|
||||
daemon off;
|
||||
|
||||
http {
|
||||
include http-base.conf;
|
||||
|
||||
|
|
|
@ -1,7 +1,15 @@
|
|||
pid /tmp/nginx.pid;
|
||||
error_log /var/log/nginx/nginx.error.log;
|
||||
|
||||
worker_processes 2;
|
||||
worker_priority -10;
|
||||
worker_rlimit_nofile 10240;
|
||||
|
||||
user root nogroup;
|
||||
|
||||
daemon off;
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
worker_connections 10240;
|
||||
accept_mutex off;
|
||||
}
|
||||
|
|
|
@ -2,13 +2,14 @@ set -e
|
|||
|
||||
DOCKER_IP=`echo $DOCKER_HOST | sed 's/tcp:\/\///' | sed 's/:.*//'`
|
||||
MYSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root:password@$DOCKER_IP/genschema\"}"
|
||||
PERCONA_CONFIG_OVERRIDE="{\"DB_URI\":\"mysql+pymysql://root@$DOCKER_IP/genschema\"}"
|
||||
PGSQL_CONFIG_OVERRIDE="{\"DB_URI\":\"postgresql://postgres@$DOCKER_IP/genschema\"}"
|
||||
|
||||
up_mysql() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name mysql -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mysql
|
||||
|
||||
# Sleep for 5s to get MySQL get started.
|
||||
# Sleep for 10s to get MySQL get started.
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
|
@ -21,6 +22,40 @@ down_mysql() {
|
|||
docker rm mysql
|
||||
}
|
||||
|
||||
up_mariadb() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name mariadb -p 3306:3306 -e MYSQL_ROOT_PASSWORD=password -d mariadb
|
||||
|
||||
# Sleep for 10s to get MySQL get started.
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
# Add the database to mysql.
|
||||
docker run --rm --link mariadb:mariadb mariadb sh -c 'echo "create database genschema" | mysql -h"$MARIADB_PORT_3306_TCP_ADDR" -P"$MARIADB_PORT_3306_TCP_PORT" -uroot -ppassword'
|
||||
}
|
||||
|
||||
down_mariadb() {
|
||||
docker kill mariadb
|
||||
docker rm mariadb
|
||||
}
|
||||
|
||||
up_percona() {
|
||||
# Run a SQL database on port 3306 inside of Docker.
|
||||
docker run --name percona -p 3306:3306 -d dockerfile/percona
|
||||
|
||||
# Sleep for 10s
|
||||
echo 'Sleeping for 10...'
|
||||
sleep 10
|
||||
|
||||
# Add the daabase to mysql.
|
||||
docker run --rm --link percona:percona dockerfile/percona sh -c 'echo "create database genschema" | mysql -h $PERCONA_PORT_3306_TCP_ADDR'
|
||||
}
|
||||
|
||||
down_percona() {
|
||||
docker kill percona
|
||||
docker rm percona
|
||||
}
|
||||
|
||||
up_postgres() {
|
||||
# Run a SQL database on port 5432 inside of Docker.
|
||||
docker run --name postgres -p 5432:5432 -d postgres
|
||||
|
@ -73,6 +108,26 @@ test_migrate $MYSQL_CONFIG_OVERRIDE
|
|||
set -e
|
||||
down_mysql
|
||||
|
||||
# Test via MariaDB.
|
||||
echo '> Starting MariaDB'
|
||||
up_mariadb
|
||||
|
||||
echo '> Testing Migration (mariadb)'
|
||||
set +e
|
||||
test_migrate $MYSQL_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_mariadb
|
||||
|
||||
# Test via Percona.
|
||||
echo '> Starting Percona'
|
||||
up_percona
|
||||
|
||||
echo '> Testing Migration (percona)'
|
||||
set +e
|
||||
test_migrate $PERCONA_CONFIG_OVERRIDE
|
||||
set -e
|
||||
down_percona
|
||||
|
||||
# Test via Postgres.
|
||||
echo '> Starting Postgres'
|
||||
up_postgres
|
||||
|
|
|
@ -0,0 +1,25 @@
|
|||
"""mysql max index lengths
|
||||
|
||||
Revision ID: 228d1af6af1c
|
||||
Revises: 5b84373e5db
|
||||
Create Date: 2015-01-06 14:35:24.651424
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '228d1af6af1c'
|
||||
down_revision = '5b84373e5db'
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
def upgrade(tables):
|
||||
op.drop_index('queueitem_queue_name', table_name='queueitem')
|
||||
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767)
|
||||
|
||||
op.drop_index('image_ancestors', table_name='image')
|
||||
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767)
|
||||
|
||||
def downgrade(tables):
|
||||
pass
|
|
@ -53,7 +53,7 @@ def upgrade(tables):
|
|||
op.create_index('queueitem_available', 'queueitem', ['available'], unique=False)
|
||||
op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False)
|
||||
op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False)
|
||||
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False)
|
||||
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False, mysql_length=767)
|
||||
op.create_table('role',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
|
@ -376,7 +376,7 @@ def upgrade(tables):
|
|||
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False)
|
||||
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False, mysql_length=767)
|
||||
op.create_index('image_repository_id', 'image', ['repository_id'], unique=False)
|
||||
op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=True)
|
||||
op.create_index('image_storage_id', 'image', ['storage_id'], unique=False)
|
||||
|
|
|
@ -13,7 +13,7 @@ down_revision = '1c5b738283a5'
|
|||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from tools.migrateslackwebhook import run_slackwebhook_migration
|
||||
from util.migrateslackwebhook import run_slackwebhook_migration
|
||||
|
||||
|
||||
def upgrade(tables):
|
||||
|
|
|
@ -19,20 +19,23 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
|
||||
analytics_id = 'anonymous'
|
||||
|
||||
authenticated_oauth_token = get_validated_oauth_token()
|
||||
authenticated_user = get_authenticated_user()
|
||||
authenticated_token = get_validated_token() if not authenticated_user else None
|
||||
|
||||
profile.debug('Logging the %s to Mixpanel and the log system', event_name)
|
||||
if get_validated_oauth_token():
|
||||
oauth_token = get_validated_oauth_token()
|
||||
metadata['oauth_token_id'] = oauth_token.id
|
||||
metadata['oauth_token_application_id'] = oauth_token.application.client_id
|
||||
metadata['oauth_token_application'] = oauth_token.application.name
|
||||
analytics_id = 'oauth:' + oauth_token.id
|
||||
elif get_authenticated_user():
|
||||
metadata['username'] = get_authenticated_user().username
|
||||
analytics_id = get_authenticated_user().username
|
||||
elif get_validated_token():
|
||||
metadata['token'] = get_validated_token().friendly_name
|
||||
metadata['token_code'] = get_validated_token().code
|
||||
analytics_id = 'token:' + get_validated_token().code
|
||||
if authenticated_oauth_token:
|
||||
metadata['oauth_token_id'] = authenticated_oauth_token.id
|
||||
metadata['oauth_token_application_id'] = authenticated_oauth_token.application.client_id
|
||||
metadata['oauth_token_application'] = authenticated_oauth_token.application.name
|
||||
analytics_id = 'oauth:' + authenticated_oauth_token.id
|
||||
elif authenticated_user:
|
||||
metadata['username'] = authenticated_user.username
|
||||
analytics_id = authenticated_user.username
|
||||
elif authenticated_token:
|
||||
metadata['token'] = authenticated_token.friendly_name
|
||||
metadata['token_code'] = authenticated_token.code
|
||||
analytics_id = 'token:' + authenticated_token.code
|
||||
else:
|
||||
metadata['public'] = True
|
||||
analytics_id = 'anonymous'
|
||||
|
@ -42,21 +45,27 @@ def track_and_log(event_name, repo, **kwargs):
|
|||
}
|
||||
|
||||
# Publish the user event (if applicable)
|
||||
if get_authenticated_user():
|
||||
profile.debug('Checking publishing %s to the user events system', event_name)
|
||||
if authenticated_user:
|
||||
profile.debug('Publishing %s to the user events system', event_name)
|
||||
user_event_data = {
|
||||
'action': event_name,
|
||||
'repository': repository,
|
||||
'namespace': namespace
|
||||
}
|
||||
|
||||
event = userevents.get_event(get_authenticated_user().username)
|
||||
event = userevents.get_event(authenticated_user.username)
|
||||
event.publish_event_data('docker-cli', user_event_data)
|
||||
|
||||
# Save the action to mixpanel.
|
||||
profile.debug('Logging the %s to Mixpanel', event_name)
|
||||
analytics.track(analytics_id, event_name, extra_params)
|
||||
|
||||
# Log the action to the database.
|
||||
profile.debug('Logging the %s to logs system', event_name)
|
||||
model.log_action(event_name, namespace,
|
||||
performer=get_authenticated_user(),
|
||||
performer=authenticated_user,
|
||||
ip=request.remote_addr, metadata=metadata,
|
||||
repository=repo)
|
||||
|
||||
profile.debug('Track and log of %s complete', event_name)
|
||||
|
|
|
@ -21,7 +21,7 @@ from util.cache import no_cache
|
|||
from endpoints.common import common_login, render_page_template, route_show_if, param_required
|
||||
from endpoints.csrf import csrf_protect, generate_csrf_token, verify_csrf
|
||||
from endpoints.registry import set_cache_headers
|
||||
from util.names import parse_repository_name
|
||||
from util.names import parse_repository_name, parse_repository_name_and_tag
|
||||
from util.useremails import send_email_changed
|
||||
from util.systemlogs import build_logs_archive
|
||||
from auth import scopes
|
||||
|
@ -165,7 +165,7 @@ def health():
|
|||
check = HealthCheck.get_check(app.config['HEALTH_CHECKER'][0], app.config['HEALTH_CHECKER'][1])
|
||||
(data, is_healthy) = check.conduct_healthcheck(db_healthy, buildlogs_healthy)
|
||||
|
||||
response = jsonify(dict(data = data, is_healthy = is_healthy))
|
||||
response = jsonify(dict(data=data, is_healthy=is_healthy))
|
||||
response.status_code = 200 if is_healthy else 503
|
||||
return response
|
||||
|
||||
|
@ -227,14 +227,14 @@ def robots():
|
|||
@web.route('/<path:repository>')
|
||||
@no_cache
|
||||
@process_oauth
|
||||
@parse_repository_name
|
||||
def redirect_to_repository(namespace, reponame):
|
||||
@parse_repository_name_and_tag
|
||||
def redirect_to_repository(namespace, reponame, tag):
|
||||
permission = ReadRepositoryPermission(namespace, reponame)
|
||||
is_public = model.repository_is_public(namespace, reponame)
|
||||
|
||||
if permission.can() or is_public:
|
||||
repository_name = '/'.join([namespace, reponame])
|
||||
return redirect(url_for('web.repository', path=repository_name))
|
||||
return redirect(url_for('web.repository', path=repository_name, tag=tag))
|
||||
|
||||
abort(404)
|
||||
|
||||
|
|
|
@ -22,7 +22,6 @@ xhtml2pdf
|
|||
redis
|
||||
hiredis
|
||||
docker-py
|
||||
pygithub
|
||||
flask-restful==0.2.12
|
||||
jsonschema
|
||||
git+https://github.com/NateFerrero/oauth2lib.git
|
||||
|
@ -40,4 +39,5 @@ pyyaml
|
|||
git+https://github.com/DevTable/aniso8601-fake.git
|
||||
git+https://github.com/DevTable/anunidecode.git
|
||||
git+https://github.com/DevTable/avatar-generator.git
|
||||
git+https://github.com/DevTable/pygithub.git
|
||||
gipc
|
||||
|
|
|
@ -9,7 +9,6 @@ LogentriesLogger==0.2.1
|
|||
Mako==1.0.0
|
||||
MarkupSafe==0.23
|
||||
Pillow==2.6.1
|
||||
PyGithub==1.25.2
|
||||
PyMySQL==0.6.2
|
||||
PyPDF2==1.23
|
||||
PyYAML==3.11
|
||||
|
@ -19,6 +18,7 @@ alembic==0.7.0
|
|||
git+https://github.com/DevTable/aniso8601-fake.git
|
||||
git+https://github.com/DevTable/anunidecode.git
|
||||
git+https://github.com/DevTable/avatar-generator.git
|
||||
git+https://github.com/DevTable/pygithub.git
|
||||
aiowsgi==0.3
|
||||
autobahn==0.9.3-3
|
||||
backports.ssl-match-hostname==3.4.0.2
|
||||
|
|
|
@ -4,16 +4,25 @@ from functools import wraps
|
|||
from uuid import uuid4
|
||||
|
||||
|
||||
def parse_namespace_repository(repository):
|
||||
def parse_namespace_repository(repository, include_tag=False):
|
||||
parts = repository.rstrip('/').split('/', 1)
|
||||
if len(parts) < 2:
|
||||
namespace = 'library'
|
||||
repository = parts[0]
|
||||
else:
|
||||
(namespace, repository) = parts
|
||||
repository = urllib.quote_plus(repository)
|
||||
return (namespace, repository)
|
||||
|
||||
if include_tag:
|
||||
parts = repository.split(':', 1)
|
||||
if len(parts) < 2:
|
||||
tag = 'latest'
|
||||
else:
|
||||
(repository, tag) = parts
|
||||
|
||||
repository = urllib.quote_plus(repository)
|
||||
if include_tag:
|
||||
return (namespace, repository, tag)
|
||||
return (namespace, repository)
|
||||
|
||||
def parse_repository_name(f):
|
||||
@wraps(f)
|
||||
|
@ -22,6 +31,13 @@ def parse_repository_name(f):
|
|||
return f(namespace, repository, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
def parse_repository_name_and_tag(f):
|
||||
@wraps(f)
|
||||
def wrapper(repository, *args, **kwargs):
|
||||
namespace, repository, tag = parse_namespace_repository(repository, include_tag=True)
|
||||
return f(namespace, repository, tag, *args, **kwargs)
|
||||
return wrapper
|
||||
|
||||
|
||||
def format_robot_username(parent_username, robot_shortname):
|
||||
return '%s+%s' % (parent_username, robot_shortname)
|
||||
|
|
Reference in a new issue