Merge branch 'ldapper'
Conflicts: Dockerfile app.py data/database.py endpoints/index.py test/data/test.db
This commit is contained in:
commit
f6726bd0a4
25 changed files with 1157 additions and 94 deletions
|
@ -17,6 +17,9 @@ RUN apt-get install -y nodejs npm
|
|||
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
||||
RUN npm install -g grunt-cli
|
||||
|
||||
# LDAP
|
||||
RUN apt-get install -y libldap2-dev libsasl2-dev
|
||||
|
||||
ADD binary_dependencies binary_dependencies
|
||||
RUN gdebi --n binary_dependencies/*.deb
|
||||
|
||||
|
@ -26,15 +29,19 @@ ADD requirements.txt requirements.txt
|
|||
RUN virtualenv --distribute venv
|
||||
RUN venv/bin/pip install -r requirements.txt
|
||||
|
||||
# Add the static assets and run grunt
|
||||
ADD grunt grunt
|
||||
ADD static static
|
||||
RUN cd grunt && npm install
|
||||
RUN cd grunt && grunt
|
||||
|
||||
# Add the backend assets
|
||||
ADD auth auth
|
||||
ADD buildstatus buildstatus
|
||||
ADD conf conf
|
||||
ADD data data
|
||||
ADD endpoints endpoints
|
||||
ADD features features
|
||||
ADD grunt grunt
|
||||
ADD screenshots screenshots
|
||||
ADD static static
|
||||
ADD storage storage
|
||||
ADD templates templates
|
||||
ADD util util
|
||||
|
@ -44,23 +51,34 @@ ADD app.py app.py
|
|||
ADD application.py application.py
|
||||
ADD config.py config.py
|
||||
ADD initdb.py initdb.py
|
||||
ADD external_libraries.py external_libraries.py
|
||||
ADD alembic.ini alembic.ini
|
||||
|
||||
# Add the config
|
||||
ADD conf conf
|
||||
RUN rm -rf /conf/stack
|
||||
|
||||
ADD conf/init/svlogd_config /svlogd_config
|
||||
ADD conf/init/preplogsdir.sh /etc/my_init.d/
|
||||
ADD conf/init/runmigration.sh /etc/my_init.d/
|
||||
|
||||
ADD conf/init/gunicorn /etc/service/gunicorn
|
||||
ADD conf/init/nginx /etc/service/nginx
|
||||
ADD conf/init/diffsworker /etc/service/diffsworker
|
||||
ADD conf/init/webhookworker /etc/service/webhookworker
|
||||
|
||||
RUN cd grunt && npm install
|
||||
RUN cd grunt && grunt
|
||||
# Download any external libs.
|
||||
RUN mkdir static/fonts
|
||||
RUN mkdir static/ldn
|
||||
|
||||
RUN venv/bin/python -m external_libraries
|
||||
|
||||
# Add the tests last because they're prone to accidental changes, then run them
|
||||
ADD test test
|
||||
RUN TEST=true venv/bin/python -m unittest discover
|
||||
|
||||
RUN rm -rf /conf/stack
|
||||
VOLUME ["/conf/stack", "/var/log"]
|
||||
VOLUME ["/conf/stack", "/var/log", "/datastorage"]
|
||||
|
||||
EXPOSE 443 80
|
||||
|
||||
|
|
15
app.py
15
app.py
|
@ -9,7 +9,10 @@ from flask.ext.mail import Mail
|
|||
import features
|
||||
|
||||
from storage import Storage
|
||||
from data import model
|
||||
from data import database
|
||||
from data.userfiles import Userfiles
|
||||
from data.users import UserAuthentication
|
||||
from util.analytics import Analytics
|
||||
from util.exceptionlog import Sentry
|
||||
from util.queuemetrics import QueueMetrics
|
||||
|
@ -51,8 +54,14 @@ billing = Billing(app)
|
|||
sentry = Sentry(app)
|
||||
build_logs = BuildLogs(app)
|
||||
queue_metrics = QueueMetrics(app)
|
||||
authentication = UserAuthentication(app)
|
||||
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'])
|
||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'],
|
||||
tf = app.config['DB_TRANSACTION_FACTORY']
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
|
||||
reporter=queue_metrics.report)
|
||||
webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'])
|
||||
webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'], tf)
|
||||
|
||||
database.configure(app.config)
|
||||
model.config.app_config = app.config
|
||||
model.config.store = storage
|
||||
|
|
|
@ -11,7 +11,7 @@ import scopes
|
|||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import app
|
||||
from app import app, authentication
|
||||
from permissions import QuayDeferredPermissionUser
|
||||
from auth_context import (set_authenticated_user, set_validated_token,
|
||||
set_authenticated_user_deferred, set_validated_oauth_token)
|
||||
|
@ -70,7 +70,7 @@ def process_basic_auth(auth):
|
|||
logger.debug('Invalid basic auth format.')
|
||||
return
|
||||
|
||||
credentials = b64decode(normalized[1]).split(':', 1)
|
||||
credentials = [part.decode('utf-8') for part in b64decode(normalized[1]).split(':', 1)]
|
||||
|
||||
if len(credentials) != 2:
|
||||
logger.debug('Invalid basic auth credential format.')
|
||||
|
@ -108,7 +108,7 @@ def process_basic_auth(auth):
|
|||
logger.debug('Invalid robot or password for robot: %s' % credentials[0])
|
||||
|
||||
else:
|
||||
authenticated = model.verify_user(credentials[0], credentials[1])
|
||||
authenticated = authentication.verify_user(credentials[0], credentials[1])
|
||||
|
||||
if authenticated:
|
||||
logger.debug('Successfully validated user: %s' % authenticated.username)
|
||||
|
|
5
conf/init/runmigration.sh
Executable file
5
conf/init/runmigration.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
# Run the database migration
|
||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
|
@ -68,10 +68,17 @@ class DefaultConfig(object):
|
|||
|
||||
DB_TRANSACTION_FACTORY = create_transaction
|
||||
|
||||
# If true, CDN URLs will be used for our external dependencies, rather than the local
|
||||
# copies.
|
||||
USE_CDN = True
|
||||
|
||||
# Data storage
|
||||
STORAGE_TYPE = 'LocalStorage'
|
||||
STORAGE_PATH = 'test/data/registry'
|
||||
|
||||
# Authentication
|
||||
AUTHENTICATION_TYPE = 'Database'
|
||||
|
||||
# Build logs
|
||||
BUILDLOGS_OPTIONS = ['logs.quay.io']
|
||||
|
||||
|
|
|
@ -8,19 +8,19 @@ from peewee import *
|
|||
from sqlalchemy.engine.url import make_url
|
||||
from urlparse import urlparse
|
||||
|
||||
from app import app
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEME_DRIVERS = {
|
||||
'mysql': MySQLDatabase,
|
||||
'mysql+pymysql': MySQLDatabase,
|
||||
'sqlite': SqliteDatabase,
|
||||
}
|
||||
|
||||
db = Proxy()
|
||||
|
||||
def generate_db(config_object):
|
||||
def configure(config_object):
|
||||
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
||||
parsed_url = make_url(config_object['DB_URI'])
|
||||
|
||||
|
@ -33,10 +33,8 @@ def generate_db(config_object):
|
|||
if parsed_url.password:
|
||||
db_kwargs['passwd'] = parsed_url.password
|
||||
|
||||
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
|
||||
|
||||
db = generate_db(app.config)
|
||||
real_db = SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
db.initialize(real_db)
|
||||
|
||||
|
||||
def random_string_generator(length=16):
|
||||
|
|
|
@ -2,15 +2,17 @@ from __future__ import with_statement
|
|||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
from urllib import unquote
|
||||
from peewee import SqliteDatabase
|
||||
|
||||
from data.database import all_models
|
||||
from data.database import all_models, db
|
||||
from app import app
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
config.set_main_option('sqlalchemy.url', app.config['DB_URI'])
|
||||
config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI']))
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
|
@ -39,8 +41,8 @@ def run_migrations_offline():
|
|||
script output.
|
||||
|
||||
"""
|
||||
url = app.config['DB_CONNECTION']
|
||||
context.configure(url=url, target_metadata=target_metadata)
|
||||
url = unquote(app.config['DB_URI'])
|
||||
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
@ -52,6 +54,11 @@ def run_migrations_online():
|
|||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(db.obj, SqliteDatabase):
|
||||
print ('Skipping Sqlite migration!')
|
||||
return
|
||||
|
||||
engine = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
|
|
607
data/migrations/versions/5a07499ce53f_set_up_initial_database.py
Normal file
607
data/migrations/versions/5a07499ce53f_set_up_initial_database.py
Normal file
|
@ -0,0 +1,607 @@
|
|||
"""Set up initial database
|
||||
|
||||
Revision ID: 5a07499ce53f
|
||||
Revises: None
|
||||
Create Date: 2014-05-13 11:26:51.808426
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5a07499ce53f'
|
||||
down_revision = None
|
||||
|
||||
from alembic import op
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from data.database import all_models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
schema = gen_sqlalchemy_metadata(all_models)
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('loginservice',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
|
||||
|
||||
op.bulk_insert(schema.tables['loginservice'],
|
||||
[
|
||||
{'id':1, 'name':'github'},
|
||||
{'id':2, 'name':'quayrobot'},
|
||||
{'id':3, 'name':'ldap'},
|
||||
])
|
||||
|
||||
op.create_table('imagestorage',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('checksum', sa.String(length=255), nullable=True),
|
||||
sa.Column('created', sa.DateTime(), nullable=True),
|
||||
sa.Column('comment', sa.Text(), nullable=True),
|
||||
sa.Column('command', sa.Text(), nullable=True),
|
||||
sa.Column('image_size', sa.BigInteger(), nullable=True),
|
||||
sa.Column('uploading', sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('queueitem',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('queue_name', sa.String(length=1024), nullable=False),
|
||||
sa.Column('body', sa.Text(), nullable=False),
|
||||
sa.Column('available_after', sa.DateTime(), nullable=False),
|
||||
sa.Column('available', sa.Boolean(), nullable=False),
|
||||
sa.Column('processing_expires', sa.DateTime(), nullable=True),
|
||||
sa.Column('retries_remaining', sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('queueitem_available', 'queueitem', ['available'], unique=False)
|
||||
op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False)
|
||||
op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False)
|
||||
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False)
|
||||
op.create_table('role',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('role_name', 'role', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['role'],
|
||||
[
|
||||
{'id':1, 'name':'admin'},
|
||||
{'id':2, 'name':'write'},
|
||||
{'id':3, 'name':'read'},
|
||||
])
|
||||
|
||||
op.create_table('logentrykind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['logentrykind'],
|
||||
[
|
||||
{'id':1, 'name':'account_change_plan'},
|
||||
{'id':2, 'name':'account_change_cc'},
|
||||
{'id':3, 'name':'account_change_password'},
|
||||
{'id':4, 'name':'account_convert'},
|
||||
|
||||
{'id':5, 'name':'create_robot'},
|
||||
{'id':6, 'name':'delete_robot'},
|
||||
|
||||
{'id':7, 'name':'create_repo'},
|
||||
{'id':8, 'name':'push_repo'},
|
||||
{'id':9, 'name':'pull_repo'},
|
||||
{'id':10, 'name':'delete_repo'},
|
||||
{'id':11, 'name':'create_tag'},
|
||||
{'id':12, 'name':'move_tag'},
|
||||
{'id':13, 'name':'delete_tag'},
|
||||
{'id':14, 'name':'add_repo_permission'},
|
||||
{'id':15, 'name':'change_repo_permission'},
|
||||
{'id':16, 'name':'delete_repo_permission'},
|
||||
{'id':17, 'name':'change_repo_visibility'},
|
||||
{'id':18, 'name':'add_repo_accesstoken'},
|
||||
{'id':19, 'name':'delete_repo_accesstoken'},
|
||||
{'id':20, 'name':'add_repo_webhook'},
|
||||
{'id':21, 'name':'delete_repo_webhook'},
|
||||
{'id':22, 'name':'set_repo_description'},
|
||||
|
||||
{'id':23, 'name':'build_dockerfile'},
|
||||
|
||||
{'id':24, 'name':'org_create_team'},
|
||||
{'id':25, 'name':'org_delete_team'},
|
||||
{'id':26, 'name':'org_add_team_member'},
|
||||
{'id':27, 'name':'org_remove_team_member'},
|
||||
{'id':28, 'name':'org_set_team_description'},
|
||||
{'id':29, 'name':'org_set_team_role'},
|
||||
|
||||
{'id':30, 'name':'create_prototype_permission'},
|
||||
{'id':31, 'name':'modify_prototype_permission'},
|
||||
{'id':32, 'name':'delete_prototype_permission'},
|
||||
|
||||
{'id':33, 'name':'setup_repo_trigger'},
|
||||
{'id':34, 'name':'delete_repo_trigger'},
|
||||
|
||||
{'id':35, 'name':'create_application'},
|
||||
{'id':36, 'name':'update_application'},
|
||||
{'id':37, 'name':'delete_application'},
|
||||
{'id':38, 'name':'reset_application_client_secret'},
|
||||
])
|
||||
|
||||
op.create_table('notificationkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['notificationkind'],
|
||||
[
|
||||
{'id':1, 'name':'password_required'},
|
||||
{'id':2, 'name':'over_private_usage'},
|
||||
])
|
||||
|
||||
op.create_table('teamrole',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['teamrole'],
|
||||
[
|
||||
{'id':1, 'name':'admin'},
|
||||
{'id':2, 'name':'creator'},
|
||||
{'id':3, 'name':'member'},
|
||||
])
|
||||
|
||||
op.create_table('visibility',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('visibility_name', 'visibility', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['visibility'],
|
||||
[
|
||||
{'id':1, 'name':'public'},
|
||||
{'id':2, 'name':'private'},
|
||||
])
|
||||
|
||||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('password_hash', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=False),
|
||||
sa.Column('verified', sa.Boolean(), nullable=False),
|
||||
sa.Column('stripe_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('organization', sa.Boolean(), nullable=False),
|
||||
sa.Column('robot', sa.Boolean(), nullable=False),
|
||||
sa.Column('invoice_email', sa.Boolean(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('user_email', 'user', ['email'], unique=True)
|
||||
op.create_index('user_organization', 'user', ['organization'], unique=False)
|
||||
op.create_index('user_robot', 'user', ['robot'], unique=False)
|
||||
op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False)
|
||||
op.create_index('user_username', 'user', ['username'], unique=True)
|
||||
op.create_table('buildtriggerservice',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['buildtriggerservice'],
|
||||
[
|
||||
{'id':1, 'name':'github'},
|
||||
])
|
||||
|
||||
op.create_table('federatedlogin',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_ident', sa.String(length=255, collation='utf8_general_ci'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False)
|
||||
op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True)
|
||||
op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True)
|
||||
op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False)
|
||||
op.create_table('oauthapplication',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('client_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_secret', sa.String(length=255), nullable=False),
|
||||
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
|
||||
sa.Column('application_uri', sa.String(length=255), nullable=False),
|
||||
sa.Column('organization_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('gravatar_email', sa.String(length=255), nullable=True),
|
||||
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False)
|
||||
op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False)
|
||||
op.create_table('notification',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('target_id', sa.Integer(), nullable=False),
|
||||
sa.Column('metadata_json', sa.Text(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('notification_created', 'notification', ['created'], unique=False)
|
||||
op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False)
|
||||
op.create_index('notification_target_id', 'notification', ['target_id'], unique=False)
|
||||
op.create_index('notification_uuid', 'notification', ['uuid'], unique=False)
|
||||
op.create_table('emailconfirmation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('code', sa.String(length=255), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('pw_reset', sa.Boolean(), nullable=False),
|
||||
sa.Column('new_email', sa.String(length=255), nullable=True),
|
||||
sa.Column('email_confirm', sa.Boolean(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True)
|
||||
op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False)
|
||||
op.create_table('team',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('organization_id', sa.Integer(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('team_name', 'team', ['name'], unique=False)
|
||||
op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True)
|
||||
op.create_index('team_organization_id', 'team', ['organization_id'], unique=False)
|
||||
op.create_index('team_role_id', 'team', ['role_id'], unique=False)
|
||||
op.create_table('repository',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('namespace', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('visibility_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('badge_token', sa.String(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
|
||||
op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False)
|
||||
op.create_table('accesstoken',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('friendly_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('code', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('temporary', sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True)
|
||||
op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False)
|
||||
op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False)
|
||||
op.create_table('repositorypermission',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('team_id', sa.Integer(), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False)
|
||||
op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False)
|
||||
op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False)
|
||||
op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True)
|
||||
op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False)
|
||||
op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True)
|
||||
op.create_table('oauthaccesstoken',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('application_id', sa.Integer(), nullable=False),
|
||||
sa.Column('authorized_user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), nullable=False),
|
||||
sa.Column('access_token', sa.String(length=255), nullable=False),
|
||||
sa.Column('token_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('refresh_token', sa.String(length=255), nullable=True),
|
||||
sa.Column('data', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ),
|
||||
sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False)
|
||||
op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False)
|
||||
op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False)
|
||||
op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False)
|
||||
op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False)
|
||||
op.create_table('teammember',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('team_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False)
|
||||
op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False)
|
||||
op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True)
|
||||
op.create_table('webhook',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('public_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('parameters', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('webhook_public_id', 'webhook', ['public_id'], unique=True)
|
||||
op.create_index('webhook_repository_id', 'webhook', ['repository_id'], unique=False)
|
||||
op.create_table('oauthauthorizationcode',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('application_id', sa.Integer(), nullable=False),
|
||||
sa.Column('code', sa.String(length=255), nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), nullable=False),
|
||||
sa.Column('data', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False)
|
||||
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False)
|
||||
op.create_table('image',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('docker_image_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('checksum', sa.String(length=255), nullable=True),
|
||||
sa.Column('created', sa.DateTime(), nullable=True),
|
||||
sa.Column('comment', sa.Text(), nullable=True),
|
||||
sa.Column('command', sa.Text(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_size', sa.BigInteger(), nullable=True),
|
||||
sa.Column('ancestors', sa.String(length=60535, collation='latin1_swedish_ci'), nullable=True),
|
||||
sa.Column('storage_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False)
|
||||
op.create_index('image_repository_id', 'image', ['repository_id'], unique=False)
|
||||
op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=False)
|
||||
op.create_index('image_storage_id', 'image', ['storage_id'], unique=False)
|
||||
op.create_table('permissionprototype',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('org_id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('activating_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('delegate_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('delegate_team_id', sa.Integer(), nullable=True),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['org_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False)
|
||||
op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False)
|
||||
op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False)
|
||||
op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False)
|
||||
op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False)
|
||||
op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False)
|
||||
op.create_table('repositorytag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('image_id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['image_id'], ['image.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False)
|
||||
op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False)
|
||||
op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True)
|
||||
op.create_table('logentry',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('performer_id', sa.Integer(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=True),
|
||||
sa.Column('access_token_id', sa.Integer(), nullable=True),
|
||||
sa.Column('datetime', sa.DateTime(), nullable=False),
|
||||
sa.Column('ip', sa.String(length=255), nullable=True),
|
||||
sa.Column('metadata_json', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ),
|
||||
sa.ForeignKeyConstraint(['account_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], ),
|
||||
sa.ForeignKeyConstraint(['performer_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False)
|
||||
op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False)
|
||||
op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False)
|
||||
op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False)
|
||||
op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False)
|
||||
op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False)
|
||||
op.create_table('repositorybuildtrigger',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('connected_user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('auth_token', sa.String(length=255), nullable=False),
|
||||
sa.Column('config', sa.Text(), nullable=False),
|
||||
sa.Column('write_token_id', sa.Integer(), nullable=True),
|
||||
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], ),
|
||||
sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False)
|
||||
op.create_table('repositorybuild',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('access_token_id', sa.Integer(), nullable=False),
|
||||
sa.Column('resource_key', sa.String(length=255), nullable=False),
|
||||
sa.Column('job_config', sa.Text(), nullable=False),
|
||||
sa.Column('phase', sa.String(length=255), nullable=False),
|
||||
sa.Column('started', sa.DateTime(), nullable=False),
|
||||
sa.Column('display_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('trigger_id', sa.Integer(), nullable=True),
|
||||
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ),
|
||||
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False)
|
||||
op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False)
|
||||
op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False)
|
||||
op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False)
|
||||
op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False)
|
||||
op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('repositorybuild_uuid', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_resource_key', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_repository_id', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild')
|
||||
op.drop_table('repositorybuild')
|
||||
op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger')
|
||||
op.drop_table('repositorybuildtrigger')
|
||||
op.drop_index('logentry_repository_id', table_name='logentry')
|
||||
op.drop_index('logentry_performer_id', table_name='logentry')
|
||||
op.drop_index('logentry_kind_id', table_name='logentry')
|
||||
op.drop_index('logentry_datetime', table_name='logentry')
|
||||
op.drop_index('logentry_account_id', table_name='logentry')
|
||||
op.drop_index('logentry_access_token_id', table_name='logentry')
|
||||
op.drop_table('logentry')
|
||||
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
|
||||
op.drop_index('repositorytag_repository_id', table_name='repositorytag')
|
||||
op.drop_index('repositorytag_image_id', table_name='repositorytag')
|
||||
op.drop_table('repositorytag')
|
||||
op.drop_index('permissionprototype_role_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_org_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype')
|
||||
op.drop_table('permissionprototype')
|
||||
op.drop_index('image_storage_id', table_name='image')
|
||||
op.drop_index('image_repository_id_docker_image_id', table_name='image')
|
||||
op.drop_index('image_repository_id', table_name='image')
|
||||
op.drop_index('image_ancestors', table_name='image')
|
||||
op.drop_table('image')
|
||||
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
|
||||
op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode')
|
||||
op.drop_table('oauthauthorizationcode')
|
||||
op.drop_index('webhook_repository_id', table_name='webhook')
|
||||
op.drop_index('webhook_public_id', table_name='webhook')
|
||||
op.drop_table('webhook')
|
||||
op.drop_index('teammember_user_id_team_id', table_name='teammember')
|
||||
op.drop_index('teammember_user_id', table_name='teammember')
|
||||
op.drop_index('teammember_team_id', table_name='teammember')
|
||||
op.drop_table('teammember')
|
||||
op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken')
|
||||
op.drop_table('oauthaccesstoken')
|
||||
op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_user_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_team_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_role_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_repository_id', table_name='repositorypermission')
|
||||
op.drop_table('repositorypermission')
|
||||
op.drop_index('accesstoken_role_id', table_name='accesstoken')
|
||||
op.drop_index('accesstoken_repository_id', table_name='accesstoken')
|
||||
op.drop_index('accesstoken_code', table_name='accesstoken')
|
||||
op.drop_table('accesstoken')
|
||||
op.drop_index('repository_visibility_id', table_name='repository')
|
||||
op.drop_index('repository_namespace_name', table_name='repository')
|
||||
op.drop_table('repository')
|
||||
op.drop_index('team_role_id', table_name='team')
|
||||
op.drop_index('team_organization_id', table_name='team')
|
||||
op.drop_index('team_name_organization_id', table_name='team')
|
||||
op.drop_index('team_name', table_name='team')
|
||||
op.drop_table('team')
|
||||
op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation')
|
||||
op.drop_index('emailconfirmation_code', table_name='emailconfirmation')
|
||||
op.drop_table('emailconfirmation')
|
||||
op.drop_index('notification_uuid', table_name='notification')
|
||||
op.drop_index('notification_target_id', table_name='notification')
|
||||
op.drop_index('notification_kind_id', table_name='notification')
|
||||
op.drop_index('notification_created', table_name='notification')
|
||||
op.drop_table('notification')
|
||||
op.drop_index('oauthapplication_organization_id', table_name='oauthapplication')
|
||||
op.drop_index('oauthapplication_client_id', table_name='oauthapplication')
|
||||
op.drop_table('oauthapplication')
|
||||
op.drop_index('federatedlogin_user_id', table_name='federatedlogin')
|
||||
op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin')
|
||||
op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin')
|
||||
op.drop_index('federatedlogin_service_id', table_name='federatedlogin')
|
||||
op.drop_table('federatedlogin')
|
||||
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
|
||||
op.drop_table('buildtriggerservice')
|
||||
op.drop_index('user_username', table_name='user')
|
||||
op.drop_index('user_stripe_id', table_name='user')
|
||||
op.drop_index('user_robot', table_name='user')
|
||||
op.drop_index('user_organization', table_name='user')
|
||||
op.drop_index('user_email', table_name='user')
|
||||
op.drop_table('user')
|
||||
op.drop_index('visibility_name', table_name='visibility')
|
||||
op.drop_table('visibility')
|
||||
op.drop_index('teamrole_name', table_name='teamrole')
|
||||
op.drop_table('teamrole')
|
||||
op.drop_index('notificationkind_name', table_name='notificationkind')
|
||||
op.drop_table('notificationkind')
|
||||
op.drop_index('logentrykind_name', table_name='logentrykind')
|
||||
op.drop_table('logentrykind')
|
||||
op.drop_index('role_name', table_name='role')
|
||||
op.drop_table('role')
|
||||
op.drop_index('queueitem_queue_name', table_name='queueitem')
|
||||
op.drop_index('queueitem_processing_expires', table_name='queueitem')
|
||||
op.drop_index('queueitem_available_after', table_name='queueitem')
|
||||
op.drop_index('queueitem_available', table_name='queueitem')
|
||||
op.drop_table('queueitem')
|
||||
op.drop_table('imagestorage')
|
||||
op.drop_index('loginservice_name', table_name='loginservice')
|
||||
op.drop_table('loginservice')
|
||||
### end Alembic commands ###
|
|
@ -8,11 +8,17 @@ from data.database import *
|
|||
from util.validation import *
|
||||
from util.names import format_robot_username
|
||||
|
||||
from app import storage as store
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
transaction_factory = app.config['DB_TRANSACTION_FACTORY']
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
self.app_config = None
|
||||
self.store = None
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class DataModelException(Exception):
|
||||
pass
|
||||
|
@ -58,7 +64,7 @@ class InvalidBuildTriggerException(DataModelException):
|
|||
pass
|
||||
|
||||
|
||||
def create_user(username, password, email, is_organization=False):
|
||||
def create_user(username, password, email, add_change_pw_notification=True):
|
||||
if not validate_email(email):
|
||||
raise InvalidEmailAddressException('Invalid email address: %s' % email)
|
||||
|
||||
|
@ -97,7 +103,7 @@ def create_user(username, password, email, is_organization=False):
|
|||
|
||||
# If the password is None, then add a notification for the user to change
|
||||
# their password ASAP.
|
||||
if not pw_hash and not is_organization:
|
||||
if not pw_hash and add_change_pw_notification:
|
||||
create_notification('password_required', new_user)
|
||||
|
||||
return new_user
|
||||
|
@ -105,10 +111,18 @@ def create_user(username, password, email, is_organization=False):
|
|||
raise DataModelException(ex.message)
|
||||
|
||||
|
||||
def is_username_unique(test_username):
|
||||
try:
|
||||
User.get((User.username == test_username))
|
||||
return False
|
||||
except User.DoesNotExist:
|
||||
return True
|
||||
|
||||
|
||||
def create_organization(name, email, creating_user):
|
||||
try:
|
||||
# Create the org
|
||||
new_org = create_user(name, None, email, is_organization=True)
|
||||
new_org = create_user(name, None, email, add_change_pw_notification=False)
|
||||
new_org.organization = True
|
||||
new_org.save()
|
||||
|
||||
|
@ -340,17 +354,15 @@ def attach_federated_login(user, service_name, service_id):
|
|||
|
||||
|
||||
def verify_federated_login(service_name, service_id):
|
||||
selected = FederatedLogin.select(FederatedLogin, User)
|
||||
with_service = selected.join(LoginService)
|
||||
with_user = with_service.switch(FederatedLogin).join(User)
|
||||
found = with_user.where(FederatedLogin.service_ident == service_id,
|
||||
LoginService.name == service_name)
|
||||
|
||||
found_list = list(found)
|
||||
|
||||
if found_list:
|
||||
return found_list[0].user
|
||||
|
||||
try:
|
||||
found = (FederatedLogin
|
||||
.select(FederatedLogin, User)
|
||||
.join(LoginService)
|
||||
.switch(FederatedLogin).join(User)
|
||||
.where(FederatedLogin.service_ident == service_id, LoginService.name == service_name)
|
||||
.get())
|
||||
return found.user
|
||||
except FederatedLogin.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
|
@ -935,7 +947,7 @@ def __translate_ancestry(old_ancestry, translations, repository, username):
|
|||
|
||||
def find_create_or_link_image(docker_image_id, repository, username,
|
||||
translations):
|
||||
with transaction_factory(db):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
repo_image = get_repo_image(repository.namespace, repository.name,
|
||||
docker_image_id)
|
||||
if repo_image:
|
||||
|
@ -1018,7 +1030,7 @@ def set_image_size(docker_image_id, namespace_name, repository_name,
|
|||
|
||||
def set_image_metadata(docker_image_id, namespace_name, repository_name,
|
||||
created_date_str, comment, command, parent=None):
|
||||
with transaction_factory(db):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.join(Repository)
|
||||
|
@ -1064,7 +1076,7 @@ def list_repository_tags(namespace_name, repository_name):
|
|||
|
||||
|
||||
def garbage_collect_repository(namespace_name, repository_name):
|
||||
with transaction_factory(db):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Get a list of all images used by tags in the repository
|
||||
tag_query = (RepositoryTag
|
||||
.select(RepositoryTag, Image, ImageStorage)
|
||||
|
@ -1098,10 +1110,10 @@ def garbage_collect_repository(namespace_name, repository_name):
|
|||
image_to_remove.storage.uuid)
|
||||
uuids_to_check_for_gc.add(image_to_remove.storage.uuid)
|
||||
else:
|
||||
image_path = store.image_path(namespace_name, repository_name,
|
||||
image_path = config.store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id, None)
|
||||
logger.debug('Deleting image storage: %s', image_path)
|
||||
store.remove(image_path)
|
||||
config.store.remove(image_path)
|
||||
|
||||
image_to_remove.delete_instance()
|
||||
|
||||
|
@ -1116,10 +1128,9 @@ def garbage_collect_repository(namespace_name, repository_name):
|
|||
for storage in storage_to_remove:
|
||||
logger.debug('Garbage collecting image storage: %s', storage.uuid)
|
||||
storage.delete_instance()
|
||||
image_path = store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id,
|
||||
storage.uuid)
|
||||
store.remove(image_path)
|
||||
image_path = config.store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id, storage.uuid)
|
||||
config.store.remove(image_path)
|
||||
|
||||
return len(to_remove)
|
||||
|
||||
|
@ -1489,8 +1500,8 @@ def get_pull_credentials(robotname):
|
|||
return {
|
||||
'username': robot.username,
|
||||
'password': login_info.service_ident,
|
||||
'registry': '%s://%s/v1/' % (app.config['PREFERRED_URL_SCHEME'],
|
||||
app.config['SERVER_HOSTNAME']),
|
||||
'registry': '%s://%s/v1/' % (config.app_config['PREFERRED_URL_SCHEME'],
|
||||
config.app_config['SERVER_HOSTNAME']),
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,19 +1,17 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from data.database import QueueItem, db
|
||||
from app import app
|
||||
|
||||
|
||||
transaction_factory = app.config['DB_TRANSACTION_FACTORY']
|
||||
|
||||
|
||||
MINIMUM_EXTENSION = timedelta(seconds=20)
|
||||
|
||||
|
||||
class WorkQueue(object):
|
||||
def __init__(self, queue_name, canonical_name_match_list=None, reporter=None):
|
||||
def __init__(self, queue_name, transaction_factory,
|
||||
canonical_name_match_list=None, reporter=None):
|
||||
self._queue_name = queue_name
|
||||
self._reporter = reporter
|
||||
self._transaction_factory = transaction_factory
|
||||
|
||||
if canonical_name_match_list is None:
|
||||
self._canonical_name_match_list = []
|
||||
|
@ -55,7 +53,7 @@ class WorkQueue(object):
|
|||
self._reporter(running, total_jobs)
|
||||
|
||||
def update_metrics(self):
|
||||
with transaction_factory(db):
|
||||
with self._transaction_factory(db):
|
||||
self._report_queue_metrics()
|
||||
|
||||
def put(self, canonical_name_list, message, available_after=0, retries_remaining=5):
|
||||
|
@ -74,7 +72,7 @@ class WorkQueue(object):
|
|||
available_date = datetime.now() + timedelta(seconds=available_after)
|
||||
params['available_after'] = available_date
|
||||
|
||||
with transaction_factory(db):
|
||||
with self._transaction_factory(db):
|
||||
QueueItem.create(**params)
|
||||
self._report_queue_metrics()
|
||||
|
||||
|
@ -87,7 +85,7 @@ class WorkQueue(object):
|
|||
|
||||
name_match_query = self._name_match_query()
|
||||
|
||||
with transaction_factory(db):
|
||||
with self._transaction_factory(db):
|
||||
running = self._running_jobs(now, name_match_query)
|
||||
|
||||
avail = QueueItem.select().where(QueueItem.queue_name ** name_match_query,
|
||||
|
@ -113,12 +111,12 @@ class WorkQueue(object):
|
|||
return item
|
||||
|
||||
def complete(self, completed_item):
|
||||
with transaction_factory(db):
|
||||
with self._transaction_factory(db):
|
||||
completed_item.delete_instance()
|
||||
self._report_queue_metrics()
|
||||
|
||||
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
||||
with transaction_factory(db):
|
||||
with self._transaction_factory(db):
|
||||
retry_date = datetime.now() + timedelta(seconds=retry_after)
|
||||
incomplete_item.available_after = retry_date
|
||||
incomplete_item.available = True
|
||||
|
|
144
data/users.py
Normal file
144
data/users.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
import ldap
|
||||
import logging
|
||||
|
||||
from util.validation import generate_valid_usernames
|
||||
from data import model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DatabaseUsers(object):
|
||||
def verify_user(self, username_or_email, password):
|
||||
""" Simply delegate to the model implementation. """
|
||||
return model.verify_user(username_or_email, password)
|
||||
|
||||
def user_exists(self, username):
|
||||
return model.get_user(username) is not None
|
||||
|
||||
|
||||
class LDAPConnection(object):
|
||||
def __init__(self, ldap_uri, user_dn, user_pw):
|
||||
self._ldap_uri = ldap_uri
|
||||
self._user_dn = user_dn
|
||||
self._user_pw = user_pw
|
||||
self._conn = None
|
||||
|
||||
def __enter__(self):
|
||||
self._conn = ldap.initialize(self._ldap_uri)
|
||||
self._conn.simple_bind_s(self._user_dn, self._user_pw)
|
||||
return self._conn
|
||||
|
||||
def __exit__(self, exc_type, value, tb):
|
||||
self._conn.unbind_s()
|
||||
|
||||
|
||||
class LDAPUsers(object):
|
||||
def __init__(self, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr):
|
||||
self._ldap_conn = LDAPConnection(ldap_uri, admin_dn, admin_passwd)
|
||||
self._ldap_uri = ldap_uri
|
||||
self._base_dn = base_dn
|
||||
self._user_rdn = user_rdn
|
||||
self._uid_attr = uid_attr
|
||||
self._email_attr = email_attr
|
||||
|
||||
def _ldap_user_search(self, username_or_email):
|
||||
with self._ldap_conn as conn:
|
||||
logger.debug('Incoming username or email param: %s', username_or_email.__repr__())
|
||||
user_search_dn = ','.join(self._user_rdn + self._base_dn)
|
||||
query = u'(|({0}={2})({1}={2}))'.format(self._uid_attr, self._email_attr,
|
||||
username_or_email)
|
||||
user = conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query.encode('utf-8'))
|
||||
|
||||
if len(user) != 1:
|
||||
return None
|
||||
|
||||
return user[0]
|
||||
|
||||
def verify_user(self, username_or_email, password):
|
||||
""" Verify the credentials with LDAP and if they are valid, create or update the user
|
||||
in our database. """
|
||||
|
||||
# Make sure that even if the server supports anonymous binds, we don't allow it
|
||||
if not password:
|
||||
return None
|
||||
|
||||
found_user = self._ldap_user_search(username_or_email)
|
||||
|
||||
if found_user is None:
|
||||
return None
|
||||
|
||||
found_dn, found_response = found_user
|
||||
|
||||
# First validate the password by binding as the user
|
||||
try:
|
||||
with LDAPConnection(self._ldap_uri, found_dn, password.encode('utf-8')):
|
||||
pass
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
return None
|
||||
|
||||
# Now check if we have a federated login for this user
|
||||
username = found_response[self._uid_attr][0].decode('utf-8')
|
||||
email = found_response[self._email_attr][0]
|
||||
db_user = model.verify_federated_login('ldap', username)
|
||||
|
||||
if not db_user:
|
||||
# We must create the user in our db
|
||||
valid_username = None
|
||||
for valid_username in generate_valid_usernames(username):
|
||||
if model.is_username_unique(valid_username):
|
||||
break
|
||||
|
||||
if not valid_username:
|
||||
logger.error('Unable to pick a username for user: %s', username)
|
||||
return None
|
||||
|
||||
db_user = model.create_user(valid_username, None, email, add_change_pw_notification=False)
|
||||
db_user.verified = True
|
||||
model.attach_federated_login(db_user, 'ldap', username)
|
||||
else:
|
||||
# Update the db attributes from ldap
|
||||
db_user.email = email
|
||||
|
||||
db_user.save()
|
||||
|
||||
return db_user
|
||||
|
||||
def user_exists(self, username):
|
||||
found_user = self._ldap_user_search(username)
|
||||
return found_user is not None
|
||||
|
||||
|
||||
class UserAuthentication(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
authentication_type = app.config.get('AUTHENTICATION_TYPE', 'Database')
|
||||
|
||||
if authentication_type == 'Database':
|
||||
users = DatabaseUsers()
|
||||
elif authentication_type == 'LDAP':
|
||||
ldap_uri = app.config.get('LDAP_URI', 'ldap://localhost')
|
||||
base_dn = app.config.get('LDAP_BASE_DN')
|
||||
admin_dn = app.config.get('LDAP_ADMIN_DN')
|
||||
admin_passwd = app.config.get('LDAP_ADMIN_PASSWD')
|
||||
user_rdn = app.config.get('LDAP_USER_RDN', [])
|
||||
uid_attr = app.config.get('LDAP_UID_ATTR', 'uid')
|
||||
email_attr = app.config.get('LDAP_EMAIL_ATTR', 'mail')
|
||||
|
||||
users = LDAPUsers(ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr)
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unknown authentication type: %s' % authentication_type)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['authentication'] = users
|
||||
return users
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
|
@ -5,7 +5,7 @@ from flask import request
|
|||
from flask.ext.login import logout_user
|
||||
from flask.ext.principal import identity_changed, AnonymousIdentity
|
||||
|
||||
from app import app, billing as stripe
|
||||
from app import app, billing as stripe, authentication
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
|
||||
log_action, internal_only, NotFound, require_user_admin,
|
||||
InvalidToken, require_scope, format_date, hide_if, show_if)
|
||||
|
@ -227,7 +227,7 @@ def conduct_signin(username_or_email, password):
|
|||
needs_email_verification = False
|
||||
invalid_credentials = False
|
||||
|
||||
verified = model.verify_user(username_or_email, password)
|
||||
verified = authentication.verify_user(username_or_email, password)
|
||||
if verified:
|
||||
if common_login(verified):
|
||||
return {'success': True}
|
||||
|
@ -289,7 +289,7 @@ class ConvertToOrganization(ApiResource):
|
|||
|
||||
# Ensure that the sign in credentials work.
|
||||
admin_password = convert_data['adminPassword']
|
||||
if not model.verify_user(admin_username, admin_password):
|
||||
if not authentication.verify_user(admin_username, admin_password):
|
||||
raise request_error(reason='invaliduser',
|
||||
message='The admin user credentials are not valid')
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ from endpoints.api.discovery import swagger_route_data
|
|||
from werkzeug.routing import BaseConverter
|
||||
from functools import wraps
|
||||
from config import getFrontendVisibleConfig
|
||||
from external_libraries import get_external_javascript, get_external_css
|
||||
|
||||
import features
|
||||
|
||||
|
@ -146,7 +147,12 @@ def render_page_template(name, **kwargs):
|
|||
main_scripts = ['dist/quay-frontend.min.js']
|
||||
cache_buster = random_string()
|
||||
|
||||
external_styles = get_external_css(local=not app.config.get('USE_CDN', True))
|
||||
external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True))
|
||||
|
||||
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
|
||||
external_styles=external_styles,
|
||||
external_scripts=external_scripts,
|
||||
main_styles=main_styles,
|
||||
library_styles=library_styles,
|
||||
main_scripts=main_scripts,
|
||||
|
|
|
@ -8,7 +8,7 @@ from collections import OrderedDict
|
|||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import analytics, app, webhook_queue
|
||||
from app import analytics, app, webhook_queue, authentication
|
||||
from auth.auth import process_auth
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
from util.names import parse_repository_name
|
||||
|
@ -94,9 +94,8 @@ def create_user():
|
|||
abort(400, 'Invalid robot account or password.',
|
||||
issue='robot-login-failure')
|
||||
|
||||
existing_user = model.get_user(username)
|
||||
if existing_user:
|
||||
verified = model.verify_user(username, password)
|
||||
if authentication.user_exists(username):
|
||||
verified = authentication.verify_user(username, password)
|
||||
if verified:
|
||||
# Mark that the user was logged in.
|
||||
event = app.config['USER_EVENTS'].get_event(username)
|
||||
|
|
77
external_libraries.py
Normal file
77
external_libraries.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import urllib2
|
||||
import re
|
||||
import os
|
||||
|
||||
LOCAL_DIRECTORY = 'static/ldn/'
|
||||
|
||||
EXTERNAL_JS = [
|
||||
'code.jquery.com/jquery.js',
|
||||
'netdna.bootstrapcdn.com/bootstrap/3.0.0/js/bootstrap.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-route.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-sanitize.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-animate.min.js',
|
||||
'cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.2.0/js/bootstrap-datepicker.min.js',
|
||||
'cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3,momentjs',
|
||||
'cdn.ravenjs.com/1.1.14/jquery,native/raven.min.js',
|
||||
'checkout.stripe.com/checkout.js',
|
||||
]
|
||||
|
||||
EXTERNAL_CSS = [
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css',
|
||||
'netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css',
|
||||
'fonts.googleapis.com/css?family=Droid+Sans:400,700',
|
||||
]
|
||||
|
||||
EXTERNAL_FONTS = [
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.woff?v=4.0.3',
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.ttf?v=4.0.3',
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.svg?v=4.0.3',
|
||||
]
|
||||
|
||||
|
||||
def get_external_javascript(local=False):
|
||||
if local:
|
||||
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_JS]
|
||||
|
||||
return ['//' + src for src in EXTERNAL_JS]
|
||||
|
||||
|
||||
def get_external_css(local=False):
|
||||
if local:
|
||||
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_CSS]
|
||||
|
||||
return ['//' + src for src in EXTERNAL_CSS]
|
||||
|
||||
|
||||
def format_local_name(url):
|
||||
filename = url.split('/')[-1]
|
||||
filename = re.sub(r'[+,?@=:]', '', filename)
|
||||
if not filename.endswith('.css') and not filename.endswith('.js'):
|
||||
if filename.find('css') >= 0:
|
||||
filename = filename + '.css'
|
||||
else:
|
||||
filename = filename + '.js'
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for url in EXTERNAL_JS + EXTERNAL_CSS:
|
||||
print 'Downloading %s' % url
|
||||
response = urllib2.urlopen('https://' + url)
|
||||
contents = response.read()
|
||||
|
||||
filename = format_local_name(url)
|
||||
print 'Writing %s' % filename
|
||||
with open(LOCAL_DIRECTORY + filename, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
|
||||
for url in EXTERNAL_FONTS:
|
||||
print 'Downloading %s' % url
|
||||
response = urllib2.urlopen('https://' + url)
|
||||
|
||||
filename = os.path.basename(url).split('?')[0]
|
||||
with open('static/fonts/' + filename, "wb") as local_file:
|
||||
local_file.write(response.read())
|
|
@ -148,7 +148,7 @@ def setup_database_for_testing(testcase):
|
|||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not isinstance(model.db, SqliteDatabase):
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
global db_initialized_for_testing
|
||||
|
@ -181,6 +181,7 @@ def initialize_database():
|
|||
Visibility.create(name='private')
|
||||
LoginService.create(name='github')
|
||||
LoginService.create(name='quayrobot')
|
||||
LoginService.create(name='ldap')
|
||||
|
||||
BuildTriggerService.create(name='github')
|
||||
|
||||
|
@ -241,7 +242,7 @@ def wipe_database():
|
|||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not isinstance(model.db, SqliteDatabase):
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
drop_model_tables(all_models, fail_silently=True)
|
||||
|
|
|
@ -32,3 +32,5 @@ python-magic
|
|||
reportlab==2.7
|
||||
blinker
|
||||
raven
|
||||
python-ldap
|
||||
unidecode
|
||||
|
|
|
@ -12,6 +12,7 @@ PyGithub==1.24.1
|
|||
PyMySQL==0.6.2
|
||||
PyPDF2==1.21
|
||||
SQLAlchemy==0.9.4
|
||||
Unidecode==0.04.16
|
||||
Werkzeug==0.9.4
|
||||
alembic==0.6.4
|
||||
aniso8601==0.82
|
||||
|
@ -40,6 +41,7 @@ pycrypto==2.6.1
|
|||
python-daemon==1.6
|
||||
python-dateutil==2.2
|
||||
python-digitalocean==0.7
|
||||
python-ldap==2.4.15
|
||||
python-magic==0.4.6
|
||||
pytz==2014.2
|
||||
raven==4.2.1
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
|
||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css">
|
||||
<link href='//fonts.googleapis.com/css?family=Droid+Sans:400,700' rel='stylesheet' type='text/css'>
|
||||
{% for style_url in external_styles %}
|
||||
<link rel="stylesheet" href="{{ style_url }}" type="text/css">
|
||||
{% endfor %}
|
||||
|
||||
<!-- Icons -->
|
||||
<link rel="shortcut icon" href="/static/img/favicon.ico" type="image/x-icon" />
|
||||
|
@ -47,20 +47,9 @@
|
|||
window.__token = '{{ csrf_token() }}';
|
||||
</script>
|
||||
|
||||
<script src="//code.jquery.com/jquery.js"></script>
|
||||
<script src="//netdna.bootstrapcdn.com/bootstrap/3.0.0/js/bootstrap.min.js"></script>
|
||||
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular.min.js"></script>
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-route.min.js"></script>
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-sanitize.min.js"></script>
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-animate.min.js"></script>
|
||||
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.2.0/js/bootstrap-datepicker.min.js"></script>
|
||||
|
||||
<script src="//cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3,momentjs"></script>
|
||||
<script src="//cdn.ravenjs.com/1.1.14/jquery,native/raven.min.js"></script>
|
||||
|
||||
<script src="https://checkout.stripe.com/checkout.js"></script>
|
||||
{% for script_url in external_scripts %}
|
||||
<script src="{{ script_url }}"></script>
|
||||
{% endfor %}
|
||||
|
||||
{% for script_path in library_scripts %}
|
||||
<script src="/static/{{ script_path }}?v={{ cache_buster }}"></script>
|
||||
|
|
Binary file not shown.
|
@ -36,6 +36,9 @@ from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repos
|
|||
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
|
||||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||
|
||||
from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement
|
||||
|
||||
|
||||
try:
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
except ValueError:
|
||||
|
@ -3275,5 +3278,87 @@ class TestUserAuthorization(ApiTestCase):
|
|||
self._run_test('DELETE', 404, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserLogs(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserLogs)
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 403, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserList(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserList)
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 403, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
|
||||
class TestSuperUserManagement(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserManagement, username='freshuser')
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 403, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
def test_put_anonymous(self):
|
||||
self._run_test('PUT', 403, None, {})
|
||||
|
||||
def test_put_freshuser(self):
|
||||
self._run_test('PUT', 403, 'freshuser', {})
|
||||
|
||||
def test_put_reader(self):
|
||||
self._run_test('PUT', 403, 'reader', {})
|
||||
|
||||
def test_put_devtable(self):
|
||||
self._run_test('PUT', 200, 'devtable', {})
|
||||
|
||||
|
||||
def test_delete_anonymous(self):
|
||||
self._run_test('DELETE', 403, None, None)
|
||||
|
||||
def test_delete_freshuser(self):
|
||||
self._run_test('DELETE', 403, 'freshuser', None)
|
||||
|
||||
def test_delete_reader(self):
|
||||
self._run_test('DELETE', 403, 'reader', None)
|
||||
|
||||
def test_delete_devtable(self):
|
||||
self._run_test('DELETE', 204, 'devtable', None)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -38,6 +38,7 @@ from endpoints.api.organization import (OrganizationList, OrganizationMember,
|
|||
from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository
|
||||
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
|
||||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||
from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement
|
||||
|
||||
try:
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
|
@ -1939,5 +1940,66 @@ class TestUserAuthorizations(ApiTestCase):
|
|||
self.getJsonResponse(UserAuthorization, params=dict(access_token_uuid = authorization['uuid']),
|
||||
expected_code=404)
|
||||
|
||||
|
||||
class TestSuperUserLogs(ApiTestCase):
|
||||
def test_get_logs(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(SuperUserLogs)
|
||||
|
||||
assert 'logs' in json
|
||||
assert len(json['logs']) > 0
|
||||
|
||||
|
||||
class TestSuperUserList(ApiTestCase):
|
||||
def test_get_users(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(SuperUserList)
|
||||
|
||||
assert 'users' in json
|
||||
assert len(json['users']) > 0
|
||||
|
||||
|
||||
class TestSuperUserManagement(ApiTestCase):
|
||||
def test_get_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('no@thanks.com', json['email'])
|
||||
self.assertEquals(False, json['super_user'])
|
||||
|
||||
def test_delete_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the user exists.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
|
||||
# Delete the user.
|
||||
self.deleteResponse(SuperUserManagement, params=dict(username = 'freshuser'), expected_code=204)
|
||||
|
||||
# Verify the user no longer exists.
|
||||
self.getResponse(SuperUserManagement, params=dict(username = 'freshuser'), expected_code=404)
|
||||
|
||||
|
||||
def test_update_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the user exists.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('no@thanks.com', json['email'])
|
||||
|
||||
# Update the user.
|
||||
self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(email='foo@bar.com'))
|
||||
|
||||
# Verify the user was updated.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('foo@bar.com', json['email'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -2,6 +2,7 @@ import unittest
|
|||
import json
|
||||
import time
|
||||
|
||||
from app import app
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data.queue import WorkQueue
|
||||
|
||||
|
@ -25,7 +26,8 @@ class QueueTestCase(unittest.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
self.reporter = SaveLastCountReporter()
|
||||
self.queue = WorkQueue(QUEUE_NAME, reporter=self.reporter)
|
||||
self.transaction_factory = app.config['DB_TRANSACTION_FACTORY']
|
||||
self.queue = WorkQueue(QUEUE_NAME, self.transaction_factory, reporter=self.reporter)
|
||||
setup_database_for_testing(self)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -118,7 +120,7 @@ class TestQueue(QueueTestCase):
|
|||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1)
|
||||
self.queue.put(['def', 'def'], self.TEST_MESSAGE_2)
|
||||
|
||||
my_queue = WorkQueue(QUEUE_NAME, ['def'])
|
||||
my_queue = WorkQueue(QUEUE_NAME, self.transaction_factory, ['def'])
|
||||
|
||||
two = my_queue.get()
|
||||
self.assertNotEqual(None, two)
|
||||
|
|
|
@ -29,3 +29,6 @@ class TestConfig(DefaultConfig):
|
|||
'deadbeef-dead-beef-dead-beefdeadbeef']
|
||||
|
||||
USERFILES_TYPE = 'FakeUserfiles'
|
||||
|
||||
FEATURE_SUPER_USERS = True
|
||||
SUPER_USERS = ['devtable']
|
||||
|
|
|
@ -1,7 +1,16 @@
|
|||
import re
|
||||
import string
|
||||
|
||||
from unidecode import unidecode
|
||||
|
||||
|
||||
INVALID_PASSWORD_MESSAGE = 'Invalid password, password must be at least ' + \
|
||||
'8 characters and contain no whitespace.'
|
||||
INVALID_USERNAME_CHARACTERS = r'[^a-z0-9_]'
|
||||
VALID_CHARACTERS = '_' + string.digits + string.lowercase
|
||||
MIN_LENGTH = 4
|
||||
MAX_LENGTH = 30
|
||||
|
||||
|
||||
def validate_email(email_address):
|
||||
if re.match(r'[^@]+@[^@]+\.[^@]+', email_address):
|
||||
|
@ -11,13 +20,14 @@ def validate_email(email_address):
|
|||
|
||||
def validate_username(username):
|
||||
# Based off the restrictions defined in the Docker Registry API spec
|
||||
regex_match = (re.search(r'[^a-z0-9_]', username) is None)
|
||||
regex_match = (re.search(INVALID_USERNAME_CHARACTERS, username) is None)
|
||||
if not regex_match:
|
||||
return (False, 'Username must match expression [a-z0-9_]+')
|
||||
|
||||
length_match = (len(username) >= 4 and len(username) <= 30)
|
||||
length_match = (len(username) >= MIN_LENGTH and len(username) <= MAX_LENGTH)
|
||||
if not length_match:
|
||||
return (False, 'Username must be between 4 and 30 characters in length')
|
||||
return (False, 'Username must be between %s and %s characters in length' %
|
||||
(MIN_LENGTH, MAX_LENGTH))
|
||||
|
||||
return (True, '')
|
||||
|
||||
|
@ -27,3 +37,24 @@ def validate_password(password):
|
|||
if re.search(r'\s', password):
|
||||
return False
|
||||
return len(password) > 7
|
||||
|
||||
|
||||
def _gen_filler_chars(num_filler_chars):
|
||||
if num_filler_chars == 0:
|
||||
yield ''
|
||||
else:
|
||||
for char in VALID_CHARACTERS:
|
||||
for suffix in _gen_filler_chars(num_filler_chars - 1):
|
||||
yield char + suffix
|
||||
|
||||
|
||||
def generate_valid_usernames(input_username):
|
||||
normalized = unidecode(input_username).strip().lower()
|
||||
prefix = re.sub(INVALID_USERNAME_CHARACTERS, '_', normalized)[:30]
|
||||
|
||||
num_filler_chars = max(0, MIN_LENGTH - len(prefix))
|
||||
|
||||
while num_filler_chars + len(prefix) <= MAX_LENGTH:
|
||||
for suffix in _gen_filler_chars(num_filler_chars):
|
||||
yield prefix + suffix
|
||||
num_filler_chars += 1
|
||||
|
|
Reference in a new issue