Merge branch 'master' into touchdown
Conflicts: static/js/app.js static/partials/organizations.html test/data/test.db
This commit is contained in:
commit
c630d7e948
65 changed files with 1843 additions and 273 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -7,3 +7,6 @@ grunt/node_modules
|
|||
dist
|
||||
dest
|
||||
node_modules
|
||||
static/ldn
|
||||
static/fonts
|
||||
stack_local
|
||||
|
|
|
@ -8,6 +8,8 @@ RUN apt-get install -y git python-virtualenv python-dev libjpeg8 libjpeg62-dev l
|
|||
|
||||
### End common section ###
|
||||
|
||||
RUN apt-get install -y libldap2-dev libsasl2-dev
|
||||
|
||||
RUN apt-get install -y lxc aufs-tools
|
||||
|
||||
RUN usermod -v 100000-200000 -w 100000-200000 root
|
||||
|
|
|
@ -17,6 +17,9 @@ RUN apt-get install -y nodejs npm
|
|||
RUN ln -s /usr/bin/nodejs /usr/bin/node
|
||||
RUN npm install -g grunt-cli
|
||||
|
||||
# LDAP
|
||||
RUN apt-get install -y libldap2-dev libsasl2-dev
|
||||
|
||||
ADD binary_dependencies binary_dependencies
|
||||
RUN gdebi --n binary_dependencies/*.deb
|
||||
|
||||
|
@ -26,41 +29,60 @@ ADD requirements.txt requirements.txt
|
|||
RUN virtualenv --distribute venv
|
||||
RUN venv/bin/pip install -r requirements.txt
|
||||
|
||||
# Add the static assets and run grunt
|
||||
ADD grunt grunt
|
||||
ADD static static
|
||||
RUN cd grunt && npm install
|
||||
RUN cd grunt && grunt
|
||||
|
||||
# Add the backend assets
|
||||
ADD auth auth
|
||||
ADD buildstatus buildstatus
|
||||
ADD conf conf
|
||||
ADD data data
|
||||
ADD endpoints endpoints
|
||||
ADD features features
|
||||
ADD grunt grunt
|
||||
ADD screenshots screenshots
|
||||
ADD static static
|
||||
ADD storage storage
|
||||
ADD templates templates
|
||||
ADD util util
|
||||
ADD workers workers
|
||||
|
||||
ADD license.pyc license.pyc
|
||||
ADD app.py app.py
|
||||
ADD application.py application.py
|
||||
ADD config.py config.py
|
||||
ADD initdb.py initdb.py
|
||||
ADD external_libraries.py external_libraries.py
|
||||
ADD alembic.ini alembic.ini
|
||||
|
||||
# Add the config
|
||||
ADD conf conf
|
||||
|
||||
# This command must be rm -f (not -rf) to fail in case stack is ever a dir,
|
||||
# which may contain secrets
|
||||
RUN rm -f /conf/stack
|
||||
|
||||
ADD conf/init/svlogd_config /svlogd_config
|
||||
ADD conf/init/preplogsdir.sh /etc/my_init.d/
|
||||
ADD conf/init/runmigration.sh /etc/my_init.d/
|
||||
|
||||
ADD conf/init/gunicorn /etc/service/gunicorn
|
||||
ADD conf/init/nginx /etc/service/nginx
|
||||
ADD conf/init/diffsworker /etc/service/diffsworker
|
||||
ADD conf/init/webhookworker /etc/service/webhookworker
|
||||
|
||||
RUN cd grunt && npm install
|
||||
RUN cd grunt && grunt
|
||||
# Download any external libs.
|
||||
RUN mkdir static/fonts
|
||||
RUN mkdir static/ldn
|
||||
|
||||
RUN venv/bin/python -m external_libraries
|
||||
|
||||
# Add the tests last because they're prone to accidental changes, then run them
|
||||
ADD test test
|
||||
RUN TEST=true venv/bin/python -m unittest discover
|
||||
|
||||
RUN rm -rf /conf/stack
|
||||
VOLUME ["/conf/stack", "/var/log"]
|
||||
VOLUME ["/conf/stack", "/var/log", "/datastorage"]
|
||||
|
||||
EXPOSE 443 80
|
||||
|
||||
|
|
47
README.md
47
README.md
|
@ -13,6 +13,11 @@ sudo docker push quay.io/quay/quay
|
|||
|
||||
to prepare a new host:
|
||||
|
||||
Deploy cloud-init script from quayconfig/cloudconfig/webserver.yaml
|
||||
|
||||
or
|
||||
|
||||
|
||||
```
|
||||
curl -s https://get.docker.io/ubuntu/ | sudo sh
|
||||
sudo apt-get update && sudo apt-get install -y git
|
||||
|
@ -21,7 +26,7 @@ cd gantryd
|
|||
cat requirements.system | xargs sudo apt-get install -y
|
||||
virtualenv --distribute venv
|
||||
venv/bin/pip install -r requirements.txt
|
||||
sudo docker login -p 9Y1PX7D3IE4KPSGCIALH17EM5V3ZTMP8CNNHJNXAQ2NJGAS48BDH8J1PUOZ869ML -u 'quay+deploy' -e notused staging.quay.io
|
||||
sudo docker login -u 'quay+deploy' -e notused staging.quay.io
|
||||
```
|
||||
|
||||
start the quay processes:
|
||||
|
@ -34,11 +39,45 @@ cd ~/gantryd
|
|||
sudo venv/bin/python gantry.py ../quayconfig/production/gantry.json update quay
|
||||
```
|
||||
|
||||
start the log shipper (DEPRECATED):
|
||||
to build and upload the builder to quay
|
||||
|
||||
```
|
||||
sudo docker pull quay.io/quay/logstash
|
||||
sudo docker run -d -e REDIS_PORT_6379_TCP_ADDR=logs.quay.io -v /mnt/logs:/mnt/logs quay.io/quay/logstash quay.conf
|
||||
curl -s https://get.docker.io/ubuntu/ | sudo sh
|
||||
sudo apt-get update && sudo apt-get install -y git
|
||||
git clone git clone https://bitbucket.org/yackob03/quay.git
|
||||
cd quay
|
||||
rm Dockerfile
|
||||
ln -s Dockerfile.buildworker Dockerfile
|
||||
sudo docker build -t quay.io/quay/builder .
|
||||
sudo docker push quay.io/quay/builder
|
||||
```
|
||||
|
||||
to run the builder from a fresh 14.04 server:
|
||||
|
||||
Deploy cloud-init script from quayconfig/cloudconfig/builder.yaml
|
||||
|
||||
or
|
||||
|
||||
|
||||
```
|
||||
sudo apt-get update && sudo apt-get install -y git lxc linux-image-extra-`uname -r`
|
||||
curl -s https://get.docker.io/ubuntu/ | sudo sh
|
||||
git clone https://github.com/DevTable/gantryd.git
|
||||
cd gantryd
|
||||
cat requirements.system | xargs sudo apt-get install -y
|
||||
virtualenv --distribute venv
|
||||
venv/bin/pip install -r requirements.txt
|
||||
sudo docker login -u 'quay+deploy' -e notused quay.io
|
||||
```
|
||||
|
||||
start the worker
|
||||
|
||||
```
|
||||
cd ~
|
||||
git clone https://bitbucket.org/yackob03/quayconfig.git
|
||||
sudo docker pull quay.io/quay/builder
|
||||
cd ~/gantryd
|
||||
sudo venv/bin/python gantry.py ../quayconfig/production/gantry.json update builder
|
||||
```
|
||||
|
||||
running the tests:
|
||||
|
|
30
app.py
30
app.py
|
@ -9,14 +9,24 @@ from flask.ext.mail import Mail
|
|||
import features
|
||||
|
||||
from storage import Storage
|
||||
from data import model
|
||||
from data import database
|
||||
from data.userfiles import Userfiles
|
||||
from data.users import UserAuthentication
|
||||
from util.analytics import Analytics
|
||||
from util.exceptionlog import Sentry
|
||||
from util.queuemetrics import QueueMetrics
|
||||
from util.expiration import Expiration
|
||||
from data.billing import Billing
|
||||
from data.buildlogs import BuildLogs
|
||||
from data.queue import WorkQueue
|
||||
from data.userevent import UserEventsBuilderModule
|
||||
from license import load_license
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
OVERRIDE_CONFIG_FILENAME = 'conf/stack/config.py'
|
||||
LICENSE_FILENAME = 'conf/stack/license.enc'
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
|
@ -36,6 +46,12 @@ else:
|
|||
logger.debug('Applying config file: %s', OVERRIDE_CONFIG_FILENAME)
|
||||
app.config.from_pyfile(OVERRIDE_CONFIG_FILENAME)
|
||||
|
||||
logger.debug('Applying license config from: %s', LICENSE_FILENAME)
|
||||
app.config.update(load_license(LICENSE_FILENAME))
|
||||
|
||||
if app.config.get('LICENSE_EXPIRATION', datetime.min) < datetime.utcnow():
|
||||
raise RuntimeError('License has expired, please contact support@quay.io')
|
||||
|
||||
features.import_features(app.config)
|
||||
|
||||
Principal(app, use_sessions=False)
|
||||
|
@ -48,3 +64,17 @@ analytics = Analytics(app)
|
|||
billing = Billing(app)
|
||||
sentry = Sentry(app)
|
||||
build_logs = BuildLogs(app)
|
||||
queue_metrics = QueueMetrics(app)
|
||||
authentication = UserAuthentication(app)
|
||||
expiration = Expiration(app)
|
||||
userevents = UserEventsBuilderModule(app)
|
||||
|
||||
tf = app.config['DB_TRANSACTION_FACTORY']
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'], tf)
|
||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'], tf,
|
||||
reporter=queue_metrics.report)
|
||||
webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'], tf)
|
||||
|
||||
database.configure(app.config)
|
||||
model.config.app_config = app.config
|
||||
model.config.store = storage
|
||||
|
|
|
@ -11,7 +11,7 @@ import scopes
|
|||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from app import app
|
||||
from app import app, authentication
|
||||
from permissions import QuayDeferredPermissionUser
|
||||
from auth_context import (set_authenticated_user, set_validated_token,
|
||||
set_authenticated_user_deferred, set_validated_oauth_token)
|
||||
|
@ -41,7 +41,7 @@ def _validate_and_apply_oauth_token(token):
|
|||
}
|
||||
abort(401, message='OAuth access token could not be validated: %(token)s',
|
||||
issue='invalid-oauth-token', token=token, headers=authenticate_header)
|
||||
elif validated.expires_at <= datetime.now():
|
||||
elif validated.expires_at <= datetime.utcnow():
|
||||
logger.info('OAuth access with an expired token: %s', token)
|
||||
authenticate_header = {
|
||||
'WWW-Authenticate': ('Bearer error="invalid_token", '
|
||||
|
@ -70,7 +70,7 @@ def process_basic_auth(auth):
|
|||
logger.debug('Invalid basic auth format.')
|
||||
return
|
||||
|
||||
credentials = b64decode(normalized[1]).split(':', 1)
|
||||
credentials = [part.decode('utf-8') for part in b64decode(normalized[1]).split(':', 1)]
|
||||
|
||||
if len(credentials) != 2:
|
||||
logger.debug('Invalid basic auth credential format.')
|
||||
|
@ -108,7 +108,7 @@ def process_basic_auth(auth):
|
|||
logger.debug('Invalid robot or password for robot: %s' % credentials[0])
|
||||
|
||||
else:
|
||||
authenticated = model.verify_user(credentials[0], credentials[1])
|
||||
authenticated = authentication.verify_user(credentials[0], credentials[1])
|
||||
|
||||
if authenticated:
|
||||
logger.debug('Successfully validated user: %s' % authenticated.username)
|
||||
|
|
5
conf/init/runmigration.sh
Executable file
5
conf/init/runmigration.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#! /bin/bash
|
||||
set -e
|
||||
|
||||
# Run the database migration
|
||||
PYTHONPATH=. venv/bin/alembic upgrade head
|
24
config.py
24
config.py
|
@ -18,7 +18,7 @@ def build_requests_session():
|
|||
# values are set to the frontend, DO NOT PLACE ANY SECRETS OR KEYS in this list.
|
||||
CLIENT_WHITELIST = ['SERVER_HOSTNAME', 'PREFERRED_URL_SCHEME', 'GITHUB_CLIENT_ID',
|
||||
'GITHUB_LOGIN_CLIENT_ID', 'MIXPANEL_KEY', 'STRIPE_PUBLISHABLE_KEY',
|
||||
'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN']
|
||||
'ENTERPRISE_LOGO_URL', 'SENTRY_PUBLIC_DSN', 'AUTHENTICATION_TYPE']
|
||||
|
||||
|
||||
def getFrontendVisibleConfig(config_dict):
|
||||
|
@ -68,15 +68,23 @@ class DefaultConfig(object):
|
|||
|
||||
DB_TRANSACTION_FACTORY = create_transaction
|
||||
|
||||
# If true, CDN URLs will be used for our external dependencies, rather than the local
|
||||
# copies.
|
||||
USE_CDN = True
|
||||
|
||||
# Data storage
|
||||
STORAGE_TYPE = 'LocalStorage'
|
||||
STORAGE_PATH = 'test/data/registry'
|
||||
|
||||
# Authentication
|
||||
AUTHENTICATION_TYPE = 'Database'
|
||||
|
||||
# Build logs
|
||||
BUILDLOGS_OPTIONS = ['logs.quay.io']
|
||||
BUILDLOGS_REDIS_HOSTNAME = 'logs.quay.io'
|
||||
BUILDLOGS_OPTIONS = []
|
||||
|
||||
# Real-time user events
|
||||
USER_EVENTS = UserEventBuilder('logs.quay.io')
|
||||
USER_EVENTS_REDIS_HOSTNAME = 'logs.quay.io'
|
||||
|
||||
# Stripe config
|
||||
BILLING_TYPE = 'FakeStripe'
|
||||
|
@ -86,7 +94,10 @@ class DefaultConfig(object):
|
|||
USERFILES_PATH = 'test/data/registry/userfiles'
|
||||
|
||||
# Analytics
|
||||
ANALYTICS_TYPE = "FakeAnalytics"
|
||||
ANALYTICS_TYPE = 'FakeAnalytics'
|
||||
|
||||
# Build Queue Metrics
|
||||
QUEUE_METRICS_TYPE = 'Null'
|
||||
|
||||
# Exception logging
|
||||
EXCEPTION_LOG_TYPE = 'FakeSentry'
|
||||
|
@ -122,7 +133,7 @@ class DefaultConfig(object):
|
|||
SUPER_USERS = []
|
||||
|
||||
# Feature Flag: Whether billing is required.
|
||||
FEATURE_BILLING = True
|
||||
FEATURE_BILLING = False
|
||||
|
||||
# Feature Flag: Whether user accounts automatically have usage log access.
|
||||
FEATURE_USER_LOG_ACCESS = False
|
||||
|
@ -135,3 +146,6 @@ class DefaultConfig(object):
|
|||
|
||||
# Feature Flag: Whether super users are supported.
|
||||
FEATURE_SUPER_USERS = False
|
||||
|
||||
# Feature Flag: Whether to support GitHub build triggers.
|
||||
FEATURE_GITHUB_BUILD = False
|
||||
|
|
|
@ -131,10 +131,10 @@ class FakeStripe(object):
|
|||
|
||||
FAKE_SUBSCRIPTION = AttrDict({
|
||||
'plan': FAKE_PLAN,
|
||||
'current_period_start': timegm(datetime.now().utctimetuple()),
|
||||
'current_period_end': timegm((datetime.now() + timedelta(days=30)).utctimetuple()),
|
||||
'trial_start': timegm(datetime.now().utctimetuple()),
|
||||
'trial_end': timegm((datetime.now() + timedelta(days=30)).utctimetuple()),
|
||||
'current_period_start': timegm(datetime.utcnow().utctimetuple()),
|
||||
'current_period_end': timegm((datetime.utcnow() + timedelta(days=30)).utctimetuple()),
|
||||
'trial_start': timegm(datetime.utcnow().utctimetuple()),
|
||||
'trial_end': timegm((datetime.utcnow() + timedelta(days=30)).utctimetuple()),
|
||||
})
|
||||
|
||||
FAKE_CARD = AttrDict({
|
||||
|
@ -142,6 +142,8 @@ class FakeStripe(object):
|
|||
'name': 'Joe User',
|
||||
'type': 'Visa',
|
||||
'last4': '4242',
|
||||
'exp_month': 5,
|
||||
'exp_year': 2016,
|
||||
})
|
||||
|
||||
FAKE_CARD_LIST = AttrDict({
|
||||
|
|
|
@ -89,6 +89,7 @@ class BuildLogs(object):
|
|||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
buildlogs_hostname = app.config.get('BUILDLOGS_REDIS_HOSTNAME')
|
||||
buildlogs_options = app.config.get('BUILDLOGS_OPTIONS', [])
|
||||
buildlogs_import = app.config.get('BUILDLOGS_MODULE_AND_CLASS', None)
|
||||
|
||||
|
@ -97,7 +98,7 @@ class BuildLogs(object):
|
|||
else:
|
||||
klass = import_class(buildlogs_import[0], buildlogs_import[1])
|
||||
|
||||
buildlogs = klass(*buildlogs_options)
|
||||
buildlogs = klass(buildlogs_hostname, *buildlogs_options)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
|
|
|
@ -8,19 +8,19 @@ from peewee import *
|
|||
from sqlalchemy.engine.url import make_url
|
||||
from urlparse import urlparse
|
||||
|
||||
from app import app
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SCHEME_DRIVERS = {
|
||||
'mysql': MySQLDatabase,
|
||||
'mysql+pymysql': MySQLDatabase,
|
||||
'sqlite': SqliteDatabase,
|
||||
}
|
||||
|
||||
db = Proxy()
|
||||
|
||||
def generate_db(config_object):
|
||||
def configure(config_object):
|
||||
db_kwargs = dict(config_object['DB_CONNECTION_ARGS'])
|
||||
parsed_url = make_url(config_object['DB_URI'])
|
||||
|
||||
|
@ -33,10 +33,8 @@ def generate_db(config_object):
|
|||
if parsed_url.password:
|
||||
db_kwargs['passwd'] = parsed_url.password
|
||||
|
||||
return SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
|
||||
|
||||
db = generate_db(app.config)
|
||||
real_db = SCHEME_DRIVERS[parsed_url.drivername](parsed_url.database, **db_kwargs)
|
||||
db.initialize(real_db)
|
||||
|
||||
|
||||
def random_string_generator(length=16):
|
||||
|
@ -119,7 +117,7 @@ class FederatedLogin(BaseModel):
|
|||
|
||||
|
||||
class Visibility(BaseModel):
|
||||
name = CharField(index=True)
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class Repository(BaseModel):
|
||||
|
@ -138,7 +136,7 @@ class Repository(BaseModel):
|
|||
|
||||
|
||||
class Role(BaseModel):
|
||||
name = CharField(index=True)
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class RepositoryPermission(BaseModel):
|
||||
|
@ -191,7 +189,7 @@ class AccessToken(BaseModel):
|
|||
|
||||
|
||||
class BuildTriggerService(BaseModel):
|
||||
name = CharField(index=True)
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class RepositoryBuildTrigger(BaseModel):
|
||||
|
@ -285,7 +283,7 @@ class QueueItem(BaseModel):
|
|||
|
||||
|
||||
class LogEntryKind(BaseModel):
|
||||
name = CharField(index=True)
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class LogEntry(BaseModel):
|
||||
|
@ -332,7 +330,7 @@ class OAuthAccessToken(BaseModel):
|
|||
|
||||
|
||||
class NotificationKind(BaseModel):
|
||||
name = CharField(index=True)
|
||||
name = CharField(index=True, unique=True)
|
||||
|
||||
|
||||
class Notification(BaseModel):
|
||||
|
|
|
@ -2,15 +2,17 @@ from __future__ import with_statement
|
|||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from logging.config import fileConfig
|
||||
from urllib import unquote
|
||||
from peewee import SqliteDatabase
|
||||
|
||||
from data.database import all_models
|
||||
from data.database import all_models, db
|
||||
from app import app
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
config.set_main_option('sqlalchemy.url', app.config['DB_URI'])
|
||||
config.set_main_option('sqlalchemy.url', unquote(app.config['DB_URI']))
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
|
@ -39,8 +41,8 @@ def run_migrations_offline():
|
|||
script output.
|
||||
|
||||
"""
|
||||
url = app.config['DB_CONNECTION']
|
||||
context.configure(url=url, target_metadata=target_metadata)
|
||||
url = unquote(app.config['DB_URI'])
|
||||
context.configure(url=url, target_metadata=target_metadata, transactional_ddl=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
@ -52,6 +54,11 @@ def run_migrations_online():
|
|||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(db.obj, SqliteDatabase):
|
||||
print ('Skipping Sqlite migration!')
|
||||
return
|
||||
|
||||
engine = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
|
|
608
data/migrations/versions/5a07499ce53f_set_up_initial_database.py
Normal file
608
data/migrations/versions/5a07499ce53f_set_up_initial_database.py
Normal file
|
@ -0,0 +1,608 @@
|
|||
"""Set up initial database
|
||||
|
||||
Revision ID: 5a07499ce53f
|
||||
Revises: None
|
||||
Create Date: 2014-05-13 11:26:51.808426
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5a07499ce53f'
|
||||
down_revision = None
|
||||
|
||||
from alembic import op
|
||||
from data.model.sqlalchemybridge import gen_sqlalchemy_metadata
|
||||
from data.database import all_models
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
schema = gen_sqlalchemy_metadata(all_models)
|
||||
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('loginservice',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('loginservice_name', 'loginservice', ['name'], unique=True)
|
||||
|
||||
op.bulk_insert(schema.tables['loginservice'],
|
||||
[
|
||||
{'id':1, 'name':'github'},
|
||||
{'id':2, 'name':'quayrobot'},
|
||||
{'id':3, 'name':'ldap'},
|
||||
])
|
||||
|
||||
op.create_table('imagestorage',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('checksum', sa.String(length=255), nullable=True),
|
||||
sa.Column('created', sa.DateTime(), nullable=True),
|
||||
sa.Column('comment', sa.Text(), nullable=True),
|
||||
sa.Column('command', sa.Text(), nullable=True),
|
||||
sa.Column('image_size', sa.BigInteger(), nullable=True),
|
||||
sa.Column('uploading', sa.Boolean(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('queueitem',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('queue_name', sa.String(length=1024), nullable=False),
|
||||
sa.Column('body', sa.Text(), nullable=False),
|
||||
sa.Column('available_after', sa.DateTime(), nullable=False),
|
||||
sa.Column('available', sa.Boolean(), nullable=False),
|
||||
sa.Column('processing_expires', sa.DateTime(), nullable=True),
|
||||
sa.Column('retries_remaining', sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('queueitem_available', 'queueitem', ['available'], unique=False)
|
||||
op.create_index('queueitem_available_after', 'queueitem', ['available_after'], unique=False)
|
||||
op.create_index('queueitem_processing_expires', 'queueitem', ['processing_expires'], unique=False)
|
||||
op.create_index('queueitem_queue_name', 'queueitem', ['queue_name'], unique=False)
|
||||
op.create_table('role',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('role_name', 'role', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['role'],
|
||||
[
|
||||
{'id':1, 'name':'admin'},
|
||||
{'id':2, 'name':'write'},
|
||||
{'id':3, 'name':'read'},
|
||||
])
|
||||
|
||||
op.create_table('logentrykind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('logentrykind_name', 'logentrykind', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['logentrykind'],
|
||||
[
|
||||
{'id':1, 'name':'account_change_plan'},
|
||||
{'id':2, 'name':'account_change_cc'},
|
||||
{'id':3, 'name':'account_change_password'},
|
||||
{'id':4, 'name':'account_convert'},
|
||||
|
||||
{'id':5, 'name':'create_robot'},
|
||||
{'id':6, 'name':'delete_robot'},
|
||||
|
||||
{'id':7, 'name':'create_repo'},
|
||||
{'id':8, 'name':'push_repo'},
|
||||
{'id':9, 'name':'pull_repo'},
|
||||
{'id':10, 'name':'delete_repo'},
|
||||
{'id':11, 'name':'create_tag'},
|
||||
{'id':12, 'name':'move_tag'},
|
||||
{'id':13, 'name':'delete_tag'},
|
||||
{'id':14, 'name':'add_repo_permission'},
|
||||
{'id':15, 'name':'change_repo_permission'},
|
||||
{'id':16, 'name':'delete_repo_permission'},
|
||||
{'id':17, 'name':'change_repo_visibility'},
|
||||
{'id':18, 'name':'add_repo_accesstoken'},
|
||||
{'id':19, 'name':'delete_repo_accesstoken'},
|
||||
{'id':20, 'name':'add_repo_webhook'},
|
||||
{'id':21, 'name':'delete_repo_webhook'},
|
||||
{'id':22, 'name':'set_repo_description'},
|
||||
|
||||
{'id':23, 'name':'build_dockerfile'},
|
||||
|
||||
{'id':24, 'name':'org_create_team'},
|
||||
{'id':25, 'name':'org_delete_team'},
|
||||
{'id':26, 'name':'org_add_team_member'},
|
||||
{'id':27, 'name':'org_remove_team_member'},
|
||||
{'id':28, 'name':'org_set_team_description'},
|
||||
{'id':29, 'name':'org_set_team_role'},
|
||||
|
||||
{'id':30, 'name':'create_prototype_permission'},
|
||||
{'id':31, 'name':'modify_prototype_permission'},
|
||||
{'id':32, 'name':'delete_prototype_permission'},
|
||||
|
||||
{'id':33, 'name':'setup_repo_trigger'},
|
||||
{'id':34, 'name':'delete_repo_trigger'},
|
||||
|
||||
{'id':35, 'name':'create_application'},
|
||||
{'id':36, 'name':'update_application'},
|
||||
{'id':37, 'name':'delete_application'},
|
||||
{'id':38, 'name':'reset_application_client_secret'},
|
||||
])
|
||||
|
||||
op.create_table('notificationkind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('notificationkind_name', 'notificationkind', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['notificationkind'],
|
||||
[
|
||||
{'id':1, 'name':'password_required'},
|
||||
{'id':2, 'name':'over_private_usage'},
|
||||
{'id':3, 'name':'expiring_license'},
|
||||
])
|
||||
|
||||
op.create_table('teamrole',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('teamrole_name', 'teamrole', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['teamrole'],
|
||||
[
|
||||
{'id':1, 'name':'admin'},
|
||||
{'id':2, 'name':'creator'},
|
||||
{'id':3, 'name':'member'},
|
||||
])
|
||||
|
||||
op.create_table('visibility',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('visibility_name', 'visibility', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['visibility'],
|
||||
[
|
||||
{'id':1, 'name':'public'},
|
||||
{'id':2, 'name':'private'},
|
||||
])
|
||||
|
||||
op.create_table('user',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('username', sa.String(length=255), nullable=False),
|
||||
sa.Column('password_hash', sa.String(length=255), nullable=True),
|
||||
sa.Column('email', sa.String(length=255), nullable=False),
|
||||
sa.Column('verified', sa.Boolean(), nullable=False),
|
||||
sa.Column('stripe_id', sa.String(length=255), nullable=True),
|
||||
sa.Column('organization', sa.Boolean(), nullable=False),
|
||||
sa.Column('robot', sa.Boolean(), nullable=False),
|
||||
sa.Column('invoice_email', sa.Boolean(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('user_email', 'user', ['email'], unique=True)
|
||||
op.create_index('user_organization', 'user', ['organization'], unique=False)
|
||||
op.create_index('user_robot', 'user', ['robot'], unique=False)
|
||||
op.create_index('user_stripe_id', 'user', ['stripe_id'], unique=False)
|
||||
op.create_index('user_username', 'user', ['username'], unique=True)
|
||||
op.create_table('buildtriggerservice',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('buildtriggerservice_name', 'buildtriggerservice', ['name'], unique=False)
|
||||
|
||||
op.bulk_insert(schema.tables['buildtriggerservice'],
|
||||
[
|
||||
{'id':1, 'name':'github'},
|
||||
])
|
||||
|
||||
op.create_table('federatedlogin',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('service_ident', sa.String(length=255, collation='utf8_general_ci'), nullable=False),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['loginservice.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('federatedlogin_service_id', 'federatedlogin', ['service_id'], unique=False)
|
||||
op.create_index('federatedlogin_service_id_service_ident', 'federatedlogin', ['service_id', 'service_ident'], unique=True)
|
||||
op.create_index('federatedlogin_service_id_user_id', 'federatedlogin', ['service_id', 'user_id'], unique=True)
|
||||
op.create_index('federatedlogin_user_id', 'federatedlogin', ['user_id'], unique=False)
|
||||
op.create_table('oauthapplication',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('client_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('client_secret', sa.String(length=255), nullable=False),
|
||||
sa.Column('redirect_uri', sa.String(length=255), nullable=False),
|
||||
sa.Column('application_uri', sa.String(length=255), nullable=False),
|
||||
sa.Column('organization_id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.Column('gravatar_email', sa.String(length=255), nullable=True),
|
||||
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('oauthapplication_client_id', 'oauthapplication', ['client_id'], unique=False)
|
||||
op.create_index('oauthapplication_organization_id', 'oauthapplication', ['organization_id'], unique=False)
|
||||
op.create_table('notification',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('target_id', sa.Integer(), nullable=False),
|
||||
sa.Column('metadata_json', sa.Text(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['kind_id'], ['notificationkind.id'], ),
|
||||
sa.ForeignKeyConstraint(['target_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('notification_created', 'notification', ['created'], unique=False)
|
||||
op.create_index('notification_kind_id', 'notification', ['kind_id'], unique=False)
|
||||
op.create_index('notification_target_id', 'notification', ['target_id'], unique=False)
|
||||
op.create_index('notification_uuid', 'notification', ['uuid'], unique=False)
|
||||
op.create_table('emailconfirmation',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('code', sa.String(length=255), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('pw_reset', sa.Boolean(), nullable=False),
|
||||
sa.Column('new_email', sa.String(length=255), nullable=True),
|
||||
sa.Column('email_confirm', sa.Boolean(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('emailconfirmation_code', 'emailconfirmation', ['code'], unique=True)
|
||||
op.create_index('emailconfirmation_user_id', 'emailconfirmation', ['user_id'], unique=False)
|
||||
op.create_table('team',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('organization_id', sa.Integer(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['organization_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['teamrole.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('team_name', 'team', ['name'], unique=False)
|
||||
op.create_index('team_name_organization_id', 'team', ['name', 'organization_id'], unique=True)
|
||||
op.create_index('team_organization_id', 'team', ['organization_id'], unique=False)
|
||||
op.create_index('team_role_id', 'team', ['role_id'], unique=False)
|
||||
op.create_table('repository',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('namespace', sa.String(length=255), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('visibility_id', sa.Integer(), nullable=False),
|
||||
sa.Column('description', sa.Text(), nullable=True),
|
||||
sa.Column('badge_token', sa.String(length=255), nullable=False),
|
||||
sa.ForeignKeyConstraint(['visibility_id'], ['visibility.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repository_namespace_name', 'repository', ['namespace', 'name'], unique=True)
|
||||
op.create_index('repository_visibility_id', 'repository', ['visibility_id'], unique=False)
|
||||
op.create_table('accesstoken',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('friendly_name', sa.String(length=255), nullable=True),
|
||||
sa.Column('code', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created', sa.DateTime(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.Column('temporary', sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('accesstoken_code', 'accesstoken', ['code'], unique=True)
|
||||
op.create_index('accesstoken_repository_id', 'accesstoken', ['repository_id'], unique=False)
|
||||
op.create_index('accesstoken_role_id', 'accesstoken', ['role_id'], unique=False)
|
||||
op.create_table('repositorypermission',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('team_id', sa.Integer(), nullable=True),
|
||||
sa.Column('user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorypermission_repository_id', 'repositorypermission', ['repository_id'], unique=False)
|
||||
op.create_index('repositorypermission_role_id', 'repositorypermission', ['role_id'], unique=False)
|
||||
op.create_index('repositorypermission_team_id', 'repositorypermission', ['team_id'], unique=False)
|
||||
op.create_index('repositorypermission_team_id_repository_id', 'repositorypermission', ['team_id', 'repository_id'], unique=True)
|
||||
op.create_index('repositorypermission_user_id', 'repositorypermission', ['user_id'], unique=False)
|
||||
op.create_index('repositorypermission_user_id_repository_id', 'repositorypermission', ['user_id', 'repository_id'], unique=True)
|
||||
op.create_table('oauthaccesstoken',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('application_id', sa.Integer(), nullable=False),
|
||||
sa.Column('authorized_user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), nullable=False),
|
||||
sa.Column('access_token', sa.String(length=255), nullable=False),
|
||||
sa.Column('token_type', sa.String(length=255), nullable=False),
|
||||
sa.Column('expires_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('refresh_token', sa.String(length=255), nullable=True),
|
||||
sa.Column('data', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ),
|
||||
sa.ForeignKeyConstraint(['authorized_user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('oauthaccesstoken_access_token', 'oauthaccesstoken', ['access_token'], unique=False)
|
||||
op.create_index('oauthaccesstoken_application_id', 'oauthaccesstoken', ['application_id'], unique=False)
|
||||
op.create_index('oauthaccesstoken_authorized_user_id', 'oauthaccesstoken', ['authorized_user_id'], unique=False)
|
||||
op.create_index('oauthaccesstoken_refresh_token', 'oauthaccesstoken', ['refresh_token'], unique=False)
|
||||
op.create_index('oauthaccesstoken_uuid', 'oauthaccesstoken', ['uuid'], unique=False)
|
||||
op.create_table('teammember',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('team_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('teammember_team_id', 'teammember', ['team_id'], unique=False)
|
||||
op.create_index('teammember_user_id', 'teammember', ['user_id'], unique=False)
|
||||
op.create_index('teammember_user_id_team_id', 'teammember', ['user_id', 'team_id'], unique=True)
|
||||
op.create_table('webhook',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('public_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('parameters', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('webhook_public_id', 'webhook', ['public_id'], unique=True)
|
||||
op.create_index('webhook_repository_id', 'webhook', ['repository_id'], unique=False)
|
||||
op.create_table('oauthauthorizationcode',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('application_id', sa.Integer(), nullable=False),
|
||||
sa.Column('code', sa.String(length=255), nullable=False),
|
||||
sa.Column('scope', sa.String(length=255), nullable=False),
|
||||
sa.Column('data', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['application_id'], ['oauthapplication.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('oauthauthorizationcode_application_id', 'oauthauthorizationcode', ['application_id'], unique=False)
|
||||
op.create_index('oauthauthorizationcode_code', 'oauthauthorizationcode', ['code'], unique=False)
|
||||
op.create_table('image',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('docker_image_id', sa.String(length=255), nullable=False),
|
||||
sa.Column('checksum', sa.String(length=255), nullable=True),
|
||||
sa.Column('created', sa.DateTime(), nullable=True),
|
||||
sa.Column('comment', sa.Text(), nullable=True),
|
||||
sa.Column('command', sa.Text(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('image_size', sa.BigInteger(), nullable=True),
|
||||
sa.Column('ancestors', sa.String(length=60535, collation='latin1_swedish_ci'), nullable=True),
|
||||
sa.Column('storage_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['storage_id'], ['imagestorage.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('image_ancestors', 'image', ['ancestors'], unique=False)
|
||||
op.create_index('image_repository_id', 'image', ['repository_id'], unique=False)
|
||||
op.create_index('image_repository_id_docker_image_id', 'image', ['repository_id', 'docker_image_id'], unique=False)
|
||||
op.create_index('image_storage_id', 'image', ['storage_id'], unique=False)
|
||||
op.create_table('permissionprototype',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('org_id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('activating_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('delegate_user_id', sa.Integer(), nullable=True),
|
||||
sa.Column('delegate_team_id', sa.Integer(), nullable=True),
|
||||
sa.Column('role_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['activating_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['delegate_team_id'], ['team.id'], ),
|
||||
sa.ForeignKeyConstraint(['delegate_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['org_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['role_id'], ['role.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('permissionprototype_activating_user_id', 'permissionprototype', ['activating_user_id'], unique=False)
|
||||
op.create_index('permissionprototype_delegate_team_id', 'permissionprototype', ['delegate_team_id'], unique=False)
|
||||
op.create_index('permissionprototype_delegate_user_id', 'permissionprototype', ['delegate_user_id'], unique=False)
|
||||
op.create_index('permissionprototype_org_id', 'permissionprototype', ['org_id'], unique=False)
|
||||
op.create_index('permissionprototype_org_id_activating_user_id', 'permissionprototype', ['org_id', 'activating_user_id'], unique=False)
|
||||
op.create_index('permissionprototype_role_id', 'permissionprototype', ['role_id'], unique=False)
|
||||
op.create_table('repositorytag',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(length=255), nullable=False),
|
||||
sa.Column('image_id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['image_id'], ['image.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorytag_image_id', 'repositorytag', ['image_id'], unique=False)
|
||||
op.create_index('repositorytag_repository_id', 'repositorytag', ['repository_id'], unique=False)
|
||||
op.create_index('repositorytag_repository_id_name', 'repositorytag', ['repository_id', 'name'], unique=True)
|
||||
op.create_table('logentry',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('kind_id', sa.Integer(), nullable=False),
|
||||
sa.Column('account_id', sa.Integer(), nullable=False),
|
||||
sa.Column('performer_id', sa.Integer(), nullable=True),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=True),
|
||||
sa.Column('access_token_id', sa.Integer(), nullable=True),
|
||||
sa.Column('datetime', sa.DateTime(), nullable=False),
|
||||
sa.Column('ip', sa.String(length=255), nullable=True),
|
||||
sa.Column('metadata_json', sa.Text(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ),
|
||||
sa.ForeignKeyConstraint(['account_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['kind_id'], ['logentrykind.id'], ),
|
||||
sa.ForeignKeyConstraint(['performer_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('logentry_access_token_id', 'logentry', ['access_token_id'], unique=False)
|
||||
op.create_index('logentry_account_id', 'logentry', ['account_id'], unique=False)
|
||||
op.create_index('logentry_datetime', 'logentry', ['datetime'], unique=False)
|
||||
op.create_index('logentry_kind_id', 'logentry', ['kind_id'], unique=False)
|
||||
op.create_index('logentry_performer_id', 'logentry', ['performer_id'], unique=False)
|
||||
op.create_index('logentry_repository_id', 'logentry', ['repository_id'], unique=False)
|
||||
op.create_table('repositorybuildtrigger',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('service_id', sa.Integer(), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('connected_user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('auth_token', sa.String(length=255), nullable=False),
|
||||
sa.Column('config', sa.Text(), nullable=False),
|
||||
sa.Column('write_token_id', sa.Integer(), nullable=True),
|
||||
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['connected_user_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['service_id'], ['buildtriggerservice.id'], ),
|
||||
sa.ForeignKeyConstraint(['write_token_id'], ['accesstoken.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorybuildtrigger_connected_user_id', 'repositorybuildtrigger', ['connected_user_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_pull_robot_id', 'repositorybuildtrigger', ['pull_robot_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_repository_id', 'repositorybuildtrigger', ['repository_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_service_id', 'repositorybuildtrigger', ['service_id'], unique=False)
|
||||
op.create_index('repositorybuildtrigger_write_token_id', 'repositorybuildtrigger', ['write_token_id'], unique=False)
|
||||
op.create_table('repositorybuild',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('uuid', sa.String(length=255), nullable=False),
|
||||
sa.Column('repository_id', sa.Integer(), nullable=False),
|
||||
sa.Column('access_token_id', sa.Integer(), nullable=False),
|
||||
sa.Column('resource_key', sa.String(length=255), nullable=False),
|
||||
sa.Column('job_config', sa.Text(), nullable=False),
|
||||
sa.Column('phase', sa.String(length=255), nullable=False),
|
||||
sa.Column('started', sa.DateTime(), nullable=False),
|
||||
sa.Column('display_name', sa.String(length=255), nullable=False),
|
||||
sa.Column('trigger_id', sa.Integer(), nullable=True),
|
||||
sa.Column('pull_robot_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['access_token_id'], ['accesstoken.id'], ),
|
||||
sa.ForeignKeyConstraint(['pull_robot_id'], ['user.id'], ),
|
||||
sa.ForeignKeyConstraint(['repository_id'], ['repository.id'], ),
|
||||
sa.ForeignKeyConstraint(['trigger_id'], ['repositorybuildtrigger.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index('repositorybuild_access_token_id', 'repositorybuild', ['access_token_id'], unique=False)
|
||||
op.create_index('repositorybuild_pull_robot_id', 'repositorybuild', ['pull_robot_id'], unique=False)
|
||||
op.create_index('repositorybuild_repository_id', 'repositorybuild', ['repository_id'], unique=False)
|
||||
op.create_index('repositorybuild_resource_key', 'repositorybuild', ['resource_key'], unique=False)
|
||||
op.create_index('repositorybuild_trigger_id', 'repositorybuild', ['trigger_id'], unique=False)
|
||||
op.create_index('repositorybuild_uuid', 'repositorybuild', ['uuid'], unique=False)
|
||||
### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('repositorybuild_uuid', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_trigger_id', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_resource_key', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_repository_id', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_pull_robot_id', table_name='repositorybuild')
|
||||
op.drop_index('repositorybuild_access_token_id', table_name='repositorybuild')
|
||||
op.drop_table('repositorybuild')
|
||||
op.drop_index('repositorybuildtrigger_write_token_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_service_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_repository_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_pull_robot_id', table_name='repositorybuildtrigger')
|
||||
op.drop_index('repositorybuildtrigger_connected_user_id', table_name='repositorybuildtrigger')
|
||||
op.drop_table('repositorybuildtrigger')
|
||||
op.drop_index('logentry_repository_id', table_name='logentry')
|
||||
op.drop_index('logentry_performer_id', table_name='logentry')
|
||||
op.drop_index('logentry_kind_id', table_name='logentry')
|
||||
op.drop_index('logentry_datetime', table_name='logentry')
|
||||
op.drop_index('logentry_account_id', table_name='logentry')
|
||||
op.drop_index('logentry_access_token_id', table_name='logentry')
|
||||
op.drop_table('logentry')
|
||||
op.drop_index('repositorytag_repository_id_name', table_name='repositorytag')
|
||||
op.drop_index('repositorytag_repository_id', table_name='repositorytag')
|
||||
op.drop_index('repositorytag_image_id', table_name='repositorytag')
|
||||
op.drop_table('repositorytag')
|
||||
op.drop_index('permissionprototype_role_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_org_id_activating_user_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_org_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_delegate_user_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_delegate_team_id', table_name='permissionprototype')
|
||||
op.drop_index('permissionprototype_activating_user_id', table_name='permissionprototype')
|
||||
op.drop_table('permissionprototype')
|
||||
op.drop_index('image_storage_id', table_name='image')
|
||||
op.drop_index('image_repository_id_docker_image_id', table_name='image')
|
||||
op.drop_index('image_repository_id', table_name='image')
|
||||
op.drop_index('image_ancestors', table_name='image')
|
||||
op.drop_table('image')
|
||||
op.drop_index('oauthauthorizationcode_code', table_name='oauthauthorizationcode')
|
||||
op.drop_index('oauthauthorizationcode_application_id', table_name='oauthauthorizationcode')
|
||||
op.drop_table('oauthauthorizationcode')
|
||||
op.drop_index('webhook_repository_id', table_name='webhook')
|
||||
op.drop_index('webhook_public_id', table_name='webhook')
|
||||
op.drop_table('webhook')
|
||||
op.drop_index('teammember_user_id_team_id', table_name='teammember')
|
||||
op.drop_index('teammember_user_id', table_name='teammember')
|
||||
op.drop_index('teammember_team_id', table_name='teammember')
|
||||
op.drop_table('teammember')
|
||||
op.drop_index('oauthaccesstoken_uuid', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_refresh_token', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_authorized_user_id', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_application_id', table_name='oauthaccesstoken')
|
||||
op.drop_index('oauthaccesstoken_access_token', table_name='oauthaccesstoken')
|
||||
op.drop_table('oauthaccesstoken')
|
||||
op.drop_index('repositorypermission_user_id_repository_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_user_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_team_id_repository_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_team_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_role_id', table_name='repositorypermission')
|
||||
op.drop_index('repositorypermission_repository_id', table_name='repositorypermission')
|
||||
op.drop_table('repositorypermission')
|
||||
op.drop_index('accesstoken_role_id', table_name='accesstoken')
|
||||
op.drop_index('accesstoken_repository_id', table_name='accesstoken')
|
||||
op.drop_index('accesstoken_code', table_name='accesstoken')
|
||||
op.drop_table('accesstoken')
|
||||
op.drop_index('repository_visibility_id', table_name='repository')
|
||||
op.drop_index('repository_namespace_name', table_name='repository')
|
||||
op.drop_table('repository')
|
||||
op.drop_index('team_role_id', table_name='team')
|
||||
op.drop_index('team_organization_id', table_name='team')
|
||||
op.drop_index('team_name_organization_id', table_name='team')
|
||||
op.drop_index('team_name', table_name='team')
|
||||
op.drop_table('team')
|
||||
op.drop_index('emailconfirmation_user_id', table_name='emailconfirmation')
|
||||
op.drop_index('emailconfirmation_code', table_name='emailconfirmation')
|
||||
op.drop_table('emailconfirmation')
|
||||
op.drop_index('notification_uuid', table_name='notification')
|
||||
op.drop_index('notification_target_id', table_name='notification')
|
||||
op.drop_index('notification_kind_id', table_name='notification')
|
||||
op.drop_index('notification_created', table_name='notification')
|
||||
op.drop_table('notification')
|
||||
op.drop_index('oauthapplication_organization_id', table_name='oauthapplication')
|
||||
op.drop_index('oauthapplication_client_id', table_name='oauthapplication')
|
||||
op.drop_table('oauthapplication')
|
||||
op.drop_index('federatedlogin_user_id', table_name='federatedlogin')
|
||||
op.drop_index('federatedlogin_service_id_user_id', table_name='federatedlogin')
|
||||
op.drop_index('federatedlogin_service_id_service_ident', table_name='federatedlogin')
|
||||
op.drop_index('federatedlogin_service_id', table_name='federatedlogin')
|
||||
op.drop_table('federatedlogin')
|
||||
op.drop_index('buildtriggerservice_name', table_name='buildtriggerservice')
|
||||
op.drop_table('buildtriggerservice')
|
||||
op.drop_index('user_username', table_name='user')
|
||||
op.drop_index('user_stripe_id', table_name='user')
|
||||
op.drop_index('user_robot', table_name='user')
|
||||
op.drop_index('user_organization', table_name='user')
|
||||
op.drop_index('user_email', table_name='user')
|
||||
op.drop_table('user')
|
||||
op.drop_index('visibility_name', table_name='visibility')
|
||||
op.drop_table('visibility')
|
||||
op.drop_index('teamrole_name', table_name='teamrole')
|
||||
op.drop_table('teamrole')
|
||||
op.drop_index('notificationkind_name', table_name='notificationkind')
|
||||
op.drop_table('notificationkind')
|
||||
op.drop_index('logentrykind_name', table_name='logentrykind')
|
||||
op.drop_table('logentrykind')
|
||||
op.drop_index('role_name', table_name='role')
|
||||
op.drop_table('role')
|
||||
op.drop_index('queueitem_queue_name', table_name='queueitem')
|
||||
op.drop_index('queueitem_processing_expires', table_name='queueitem')
|
||||
op.drop_index('queueitem_available_after', table_name='queueitem')
|
||||
op.drop_index('queueitem_available', table_name='queueitem')
|
||||
op.drop_table('queueitem')
|
||||
op.drop_table('imagestorage')
|
||||
op.drop_index('loginservice_name', table_name='loginservice')
|
||||
op.drop_table('loginservice')
|
||||
### end Alembic commands ###
|
|
@ -8,11 +8,17 @@ from data.database import *
|
|||
from util.validation import *
|
||||
from util.names import format_robot_username
|
||||
|
||||
from app import storage as store
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
transaction_factory = app.config['DB_TRANSACTION_FACTORY']
|
||||
|
||||
|
||||
class Config(object):
|
||||
def __init__(self):
|
||||
self.app_config = None
|
||||
self.store = None
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class DataModelException(Exception):
|
||||
pass
|
||||
|
@ -58,7 +64,33 @@ class InvalidBuildTriggerException(DataModelException):
|
|||
pass
|
||||
|
||||
|
||||
def create_user(username, password, email, is_organization=False):
|
||||
class TooManyUsersException(DataModelException):
|
||||
pass
|
||||
|
||||
|
||||
def is_create_user_allowed():
|
||||
return get_active_user_count() < config.app_config['LICENSE_USER_LIMIT']
|
||||
|
||||
|
||||
def create_user(username, password, email):
|
||||
""" Creates a regular user, if allowed. """
|
||||
if not validate_password(password):
|
||||
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
|
||||
|
||||
if not is_create_user_allowed():
|
||||
raise TooManyUsersException()
|
||||
|
||||
created = _create_user(username, email)
|
||||
|
||||
# Store the password hash
|
||||
pw_hash = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
created.password_hash = pw_hash
|
||||
|
||||
created.save()
|
||||
|
||||
return created
|
||||
|
||||
def _create_user(username, email):
|
||||
if not validate_email(email):
|
||||
raise InvalidEmailAddressException('Invalid email address: %s' % email)
|
||||
|
||||
|
@ -66,10 +98,6 @@ def create_user(username, password, email, is_organization=False):
|
|||
if not username_valid:
|
||||
raise InvalidUsernameException('Invalid username %s: %s' % (username, username_issue))
|
||||
|
||||
# We allow password none for the federated login case.
|
||||
if password is not None and not validate_password(password):
|
||||
raise InvalidPasswordException(INVALID_PASSWORD_MESSAGE)
|
||||
|
||||
try:
|
||||
existing = User.get((User.username == username) | (User.email == email))
|
||||
|
||||
|
@ -88,27 +116,24 @@ def create_user(username, password, email, is_organization=False):
|
|||
pass
|
||||
|
||||
try:
|
||||
pw_hash = None
|
||||
if password is not None:
|
||||
pw_hash = bcrypt.hashpw(password, bcrypt.gensalt())
|
||||
|
||||
new_user = User.create(username=username, password_hash=pw_hash,
|
||||
email=email)
|
||||
|
||||
# If the password is None, then add a notification for the user to change
|
||||
# their password ASAP.
|
||||
if not pw_hash and not is_organization:
|
||||
create_notification('password_required', new_user)
|
||||
|
||||
new_user = User.create(username=username, email=email)
|
||||
return new_user
|
||||
except Exception as ex:
|
||||
raise DataModelException(ex.message)
|
||||
|
||||
|
||||
def is_username_unique(test_username):
|
||||
try:
|
||||
User.get((User.username == test_username))
|
||||
return False
|
||||
except User.DoesNotExist:
|
||||
return True
|
||||
|
||||
|
||||
def create_organization(name, email, creating_user):
|
||||
try:
|
||||
# Create the org
|
||||
new_org = create_user(name, None, email, is_organization=True)
|
||||
new_org = _create_user(name, email)
|
||||
new_org.organization = True
|
||||
new_org.save()
|
||||
|
||||
|
@ -321,8 +346,11 @@ def set_team_org_permission(team, team_role_name, set_by_username):
|
|||
return team
|
||||
|
||||
|
||||
def create_federated_user(username, email, service_name, service_id):
|
||||
new_user = create_user(username, None, email)
|
||||
def create_federated_user(username, email, service_name, service_id, set_password_notification):
|
||||
if not is_create_user_allowed():
|
||||
raise TooManyUsersException()
|
||||
|
||||
new_user = _create_user(username, email)
|
||||
new_user.verified = True
|
||||
new_user.save()
|
||||
|
||||
|
@ -330,6 +358,9 @@ def create_federated_user(username, email, service_name, service_id):
|
|||
FederatedLogin.create(user=new_user, service=service,
|
||||
service_ident=service_id)
|
||||
|
||||
if set_password_notification:
|
||||
create_notification('password_required', new_user)
|
||||
|
||||
return new_user
|
||||
|
||||
|
||||
|
@ -340,18 +371,16 @@ def attach_federated_login(user, service_name, service_id):
|
|||
|
||||
|
||||
def verify_federated_login(service_name, service_id):
|
||||
selected = FederatedLogin.select(FederatedLogin, User)
|
||||
with_service = selected.join(LoginService)
|
||||
with_user = with_service.switch(FederatedLogin).join(User)
|
||||
found = with_user.where(FederatedLogin.service_ident == service_id,
|
||||
LoginService.name == service_name)
|
||||
|
||||
found_list = list(found)
|
||||
|
||||
if found_list:
|
||||
return found_list[0].user
|
||||
|
||||
return None
|
||||
try:
|
||||
found = (FederatedLogin
|
||||
.select(FederatedLogin, User)
|
||||
.join(LoginService)
|
||||
.switch(FederatedLogin).join(User)
|
||||
.where(FederatedLogin.service_ident == service_id, LoginService.name == service_name)
|
||||
.get())
|
||||
return found.user
|
||||
except FederatedLogin.DoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
def list_federated_logins(user):
|
||||
|
@ -935,7 +964,7 @@ def __translate_ancestry(old_ancestry, translations, repository, username):
|
|||
|
||||
def find_create_or_link_image(docker_image_id, repository, username,
|
||||
translations):
|
||||
with transaction_factory(db):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
repo_image = get_repo_image(repository.namespace, repository.name,
|
||||
docker_image_id)
|
||||
if repo_image:
|
||||
|
@ -1018,7 +1047,7 @@ def set_image_size(docker_image_id, namespace_name, repository_name,
|
|||
|
||||
def set_image_metadata(docker_image_id, namespace_name, repository_name,
|
||||
created_date_str, comment, command, parent=None):
|
||||
with transaction_factory(db):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
query = (Image
|
||||
.select(Image, ImageStorage)
|
||||
.join(Repository)
|
||||
|
@ -1064,7 +1093,7 @@ def list_repository_tags(namespace_name, repository_name):
|
|||
|
||||
|
||||
def garbage_collect_repository(namespace_name, repository_name):
|
||||
with transaction_factory(db):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
# Get a list of all images used by tags in the repository
|
||||
tag_query = (RepositoryTag
|
||||
.select(RepositoryTag, Image, ImageStorage)
|
||||
|
@ -1098,10 +1127,10 @@ def garbage_collect_repository(namespace_name, repository_name):
|
|||
image_to_remove.storage.uuid)
|
||||
uuids_to_check_for_gc.add(image_to_remove.storage.uuid)
|
||||
else:
|
||||
image_path = store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id, None)
|
||||
image_path = config.store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id, None)
|
||||
logger.debug('Deleting image storage: %s', image_path)
|
||||
store.remove(image_path)
|
||||
config.store.remove(image_path)
|
||||
|
||||
image_to_remove.delete_instance()
|
||||
|
||||
|
@ -1116,10 +1145,9 @@ def garbage_collect_repository(namespace_name, repository_name):
|
|||
for storage in storage_to_remove:
|
||||
logger.debug('Garbage collecting image storage: %s', storage.uuid)
|
||||
storage.delete_instance()
|
||||
image_path = store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id,
|
||||
storage.uuid)
|
||||
store.remove(image_path)
|
||||
image_path = config.store.image_path(namespace_name, repository_name,
|
||||
image_to_remove.docker_image_id, storage.uuid)
|
||||
config.store.remove(image_path)
|
||||
|
||||
return len(to_remove)
|
||||
|
||||
|
@ -1489,8 +1517,8 @@ def get_pull_credentials(robotname):
|
|||
return {
|
||||
'username': robot.username,
|
||||
'password': login_info.service_ident,
|
||||
'registry': '%s://%s/v1/' % (app.config['PREFERRED_URL_SCHEME'],
|
||||
app.config['SERVER_HOSTNAME']),
|
||||
'registry': '%s://%s/v1/' % (config.app_config['PREFERRED_URL_SCHEME'],
|
||||
config.app_config['SERVER_HOSTNAME']),
|
||||
}
|
||||
|
||||
|
||||
|
@ -1594,14 +1622,20 @@ def list_trigger_builds(namespace_name, repository_name, trigger_uuid,
|
|||
.where(RepositoryBuildTrigger.uuid == trigger_uuid))
|
||||
|
||||
|
||||
def create_notification(kind, target, metadata={}):
|
||||
kind_ref = NotificationKind.get(name=kind)
|
||||
def create_notification(kind_name, target, metadata={}):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
notification = Notification.create(kind=kind_ref, target=target,
|
||||
metadata_json=json.dumps(metadata))
|
||||
return notification
|
||||
|
||||
|
||||
def list_notifications(user, kind=None):
|
||||
def create_unique_notification(kind_name, target, metadata={}):
|
||||
with config.app_config['DB_TRANSACTION_FACTORY'](db):
|
||||
if list_notifications(target, kind_name).count() == 0:
|
||||
create_notification(kind_name, target, metadata)
|
||||
|
||||
|
||||
def list_notifications(user, kind_name=None):
|
||||
Org = User.alias()
|
||||
AdminTeam = Team.alias()
|
||||
AdminTeamMember = TeamMember.alias()
|
||||
|
@ -1619,20 +1653,30 @@ def list_notifications(user, kind=None):
|
|||
.join(AdminTeamMember, JOIN_LEFT_OUTER, on=(AdminTeam.id ==
|
||||
AdminTeamMember.team))
|
||||
.join(AdminUser, JOIN_LEFT_OUTER, on=(AdminTeamMember.user ==
|
||||
AdminUser.id)))
|
||||
AdminUser.id))
|
||||
.where((Notification.target == user) |
|
||||
((AdminUser.id == user) & (TeamRole.name == 'admin')))
|
||||
.order_by(Notification.created)
|
||||
.desc())
|
||||
|
||||
where_clause = ((Notification.target == user) |
|
||||
((AdminUser.id == user) &
|
||||
(TeamRole.name == 'admin')))
|
||||
|
||||
if kind:
|
||||
where_clause = where_clause & (NotificationKind.name == kind)
|
||||
if kind_name:
|
||||
query = (query
|
||||
.switch(Notification)
|
||||
.join(NotificationKind)
|
||||
.where(NotificationKind.name == kind_name))
|
||||
|
||||
return query.where(where_clause).order_by(Notification.created).desc()
|
||||
return query
|
||||
|
||||
|
||||
def delete_notifications_by_kind(target, kind):
|
||||
kind_ref = NotificationKind.get(name=kind)
|
||||
def delete_all_notifications_by_kind(kind_name):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
(Notification.delete()
|
||||
.where(Notification.kind == kind_ref)
|
||||
.execute())
|
||||
|
||||
|
||||
def delete_notifications_by_kind(target, kind_name):
|
||||
kind_ref = NotificationKind.get(name=kind_name)
|
||||
Notification.delete().where(Notification.target == target,
|
||||
Notification.kind == kind_ref).execute()
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
|||
.switch(OAuthAccessToken)
|
||||
.join(User)
|
||||
.where(OAuthApplication.client_id == client_id, User.username == username,
|
||||
OAuthAccessToken.expires_at > datetime.now()))
|
||||
OAuthAccessToken.expires_at > datetime.utcnow()))
|
||||
found = list(found)
|
||||
logger.debug('Found %s matching tokens.', len(found))
|
||||
long_scope_string = ','.join([token.scope for token in found])
|
||||
|
@ -116,7 +116,7 @@ class DatabaseAuthorizationProvider(AuthorizationProvider):
|
|||
raise RuntimeError('Username must be in the data field')
|
||||
|
||||
app = OAuthApplication.get(client_id=client_id)
|
||||
expires_at = datetime.now() + timedelta(seconds=expires_in)
|
||||
expires_at = datetime.utcnow() + timedelta(seconds=expires_in)
|
||||
OAuthAccessToken.create(application=app, authorized_user=user, scope=scope,
|
||||
access_token=access_token, token_type=token_type,
|
||||
expires_at=expires_at, refresh_token=refresh_token, data=data)
|
||||
|
@ -274,7 +274,7 @@ def list_applications_for_org(org):
|
|||
|
||||
|
||||
def create_access_token_for_testing(user, client_id, scope):
|
||||
expires_at = datetime.now() + timedelta(seconds=10000)
|
||||
expires_at = datetime.utcnow() + timedelta(seconds=10000)
|
||||
application = get_application_for_client_id(client_id)
|
||||
OAuthAccessToken.create(application=application, authorized_user=user, scope=scope,
|
||||
token_type='token', access_token='test',
|
||||
|
|
122
data/queue.py
122
data/queue.py
|
@ -1,28 +1,61 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from data.database import QueueItem, db
|
||||
from app import app
|
||||
|
||||
|
||||
transaction_factory = app.config['DB_TRANSACTION_FACTORY']
|
||||
|
||||
|
||||
MINIMUM_EXTENSION = timedelta(seconds=20)
|
||||
|
||||
|
||||
class WorkQueue(object):
|
||||
def __init__(self, queue_name, canonical_name_match_list=None):
|
||||
self.queue_name = queue_name
|
||||
def __init__(self, queue_name, transaction_factory,
|
||||
canonical_name_match_list=None, reporter=None):
|
||||
self._queue_name = queue_name
|
||||
self._reporter = reporter
|
||||
self._transaction_factory = transaction_factory
|
||||
self._currently_processing = False
|
||||
|
||||
if canonical_name_match_list is None:
|
||||
self.canonical_name_match_list = []
|
||||
self._canonical_name_match_list = []
|
||||
else:
|
||||
self.canonical_name_match_list = canonical_name_match_list
|
||||
self._canonical_name_match_list = canonical_name_match_list
|
||||
|
||||
@staticmethod
|
||||
def _canonical_name(name_list):
|
||||
return '/'.join(name_list) + '/'
|
||||
|
||||
def _running_jobs(self, now, name_match_query):
|
||||
return (QueueItem
|
||||
.select(QueueItem.queue_name)
|
||||
.where(QueueItem.available == False,
|
||||
QueueItem.processing_expires > now,
|
||||
QueueItem.queue_name ** name_match_query))
|
||||
|
||||
def _available_jobs(self, now, name_match_query, running_query):
|
||||
return (QueueItem
|
||||
.select()
|
||||
.where(QueueItem.queue_name ** name_match_query, QueueItem.available_after <= now,
|
||||
((QueueItem.available == True) | (QueueItem.processing_expires <= now)),
|
||||
QueueItem.retries_remaining > 0, ~(QueueItem.queue_name << running_query)))
|
||||
|
||||
def _name_match_query(self):
|
||||
return '%s%%' % self._canonical_name([self._queue_name] + self._canonical_name_match_list)
|
||||
|
||||
def update_metrics(self):
|
||||
with self._transaction_factory(db):
|
||||
if self._reporter is None:
|
||||
return
|
||||
|
||||
now = datetime.utcnow()
|
||||
name_match_query = self._name_match_query()
|
||||
|
||||
running_query = self._running_jobs(now, name_match_query)
|
||||
running_count =running_query.distinct().count()
|
||||
|
||||
avialable_query = self._available_jobs(now, name_match_query, running_query)
|
||||
available_count = avialable_query.select(QueueItem.queue_name).distinct().count()
|
||||
|
||||
self._reporter(self._currently_processing, running_count, running_count + available_count)
|
||||
|
||||
def put(self, canonical_name_list, message, available_after=0, retries_remaining=5):
|
||||
"""
|
||||
Put an item, if it shouldn't be processed for some number of seconds,
|
||||
|
@ -30,79 +63,68 @@ class WorkQueue(object):
|
|||
"""
|
||||
|
||||
params = {
|
||||
'queue_name': self._canonical_name([self.queue_name] + canonical_name_list),
|
||||
'queue_name': self._canonical_name([self._queue_name] + canonical_name_list),
|
||||
'body': message,
|
||||
'retries_remaining': retries_remaining,
|
||||
}
|
||||
|
||||
if available_after:
|
||||
available_date = datetime.now() + timedelta(seconds=available_after)
|
||||
available_date = datetime.utcnow() + timedelta(seconds=available_after)
|
||||
params['available_after'] = available_date
|
||||
|
||||
QueueItem.create(**params)
|
||||
with self._transaction_factory(db):
|
||||
QueueItem.create(**params)
|
||||
|
||||
def get(self, processing_time=300):
|
||||
"""
|
||||
Get an available item and mark it as unavailable for the default of five
|
||||
minutes.
|
||||
"""
|
||||
now = datetime.now()
|
||||
now = datetime.utcnow()
|
||||
|
||||
name_match_query = '%s%%' % self._canonical_name([self.queue_name] +
|
||||
self.canonical_name_match_list)
|
||||
name_match_query = self._name_match_query()
|
||||
|
||||
with transaction_factory(db):
|
||||
running = (QueueItem
|
||||
.select(QueueItem.queue_name)
|
||||
.where(QueueItem.available == False,
|
||||
QueueItem.processing_expires > now,
|
||||
QueueItem.queue_name ** name_match_query))
|
||||
with self._transaction_factory(db):
|
||||
running = self._running_jobs(now, name_match_query)
|
||||
avail = self._available_jobs(now, name_match_query, running)
|
||||
|
||||
avail = QueueItem.select().where(QueueItem.queue_name ** name_match_query,
|
||||
QueueItem.available_after <= now,
|
||||
((QueueItem.available == True) |
|
||||
(QueueItem.processing_expires <= now)),
|
||||
QueueItem.retries_remaining > 0,
|
||||
~(QueueItem.queue_name << running))
|
||||
|
||||
found = list(avail.limit(1).order_by(QueueItem.id))
|
||||
|
||||
if found:
|
||||
item = found[0]
|
||||
item = None
|
||||
try:
|
||||
item = avail.order_by(QueueItem.id).get()
|
||||
item.available = False
|
||||
item.processing_expires = now + timedelta(seconds=processing_time)
|
||||
item.retries_remaining -= 1
|
||||
item.save()
|
||||
|
||||
return item
|
||||
self._currently_processing = True
|
||||
except QueueItem.DoesNotExist:
|
||||
self._currently_processing = False
|
||||
pass
|
||||
|
||||
return None
|
||||
return item
|
||||
|
||||
@staticmethod
|
||||
def complete(completed_item):
|
||||
completed_item.delete_instance()
|
||||
def complete(self, completed_item):
|
||||
with self._transaction_factory(db):
|
||||
completed_item.delete_instance()
|
||||
self._currently_processing = False
|
||||
|
||||
@staticmethod
|
||||
def incomplete(incomplete_item, retry_after=300, restore_retry=False):
|
||||
retry_date = datetime.now() + timedelta(seconds=retry_after)
|
||||
incomplete_item.available_after = retry_date
|
||||
incomplete_item.available = True
|
||||
def incomplete(self, incomplete_item, retry_after=300, restore_retry=False):
|
||||
with self._transaction_factory(db):
|
||||
retry_date = datetime.utcnow() + timedelta(seconds=retry_after)
|
||||
incomplete_item.available_after = retry_date
|
||||
incomplete_item.available = True
|
||||
|
||||
if restore_retry:
|
||||
incomplete_item.retries_remaining += 1
|
||||
if restore_retry:
|
||||
incomplete_item.retries_remaining += 1
|
||||
|
||||
incomplete_item.save()
|
||||
incomplete_item.save()
|
||||
self._currently_processing = False
|
||||
|
||||
@staticmethod
|
||||
def extend_processing(queue_item, seconds_from_now):
|
||||
new_expiration = datetime.now() + timedelta(seconds=seconds_from_now)
|
||||
new_expiration = datetime.utcnow() + timedelta(seconds=seconds_from_now)
|
||||
|
||||
# Only actually write the new expiration to the db if it moves the expiration some minimum
|
||||
if new_expiration - queue_item.processing_expires > MINIMUM_EXTENSION:
|
||||
queue_item.processing_expires = new_expiration
|
||||
queue_item.save()
|
||||
|
||||
|
||||
image_diff_queue = WorkQueue(app.config['DIFFS_QUEUE_NAME'])
|
||||
dockerfile_build_queue = WorkQueue(app.config['DOCKERFILE_BUILD_QUEUE_NAME'])
|
||||
webhook_queue = WorkQueue(app.config['WEBHOOK_QUEUE_NAME'])
|
||||
|
|
|
@ -17,6 +17,27 @@ class UserEventBuilder(object):
|
|||
return UserEventListener(self._redis_host, username, events)
|
||||
|
||||
|
||||
class UserEventsBuilderModule(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
redis_hostname = app.config.get('USER_EVENTS_REDIS_HOSTNAME')
|
||||
user_events = UserEventBuilder(redis_hostname)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['userevents'] = user_events
|
||||
return user_events
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
||||
|
||||
|
||||
class UserEvent(object):
|
||||
"""
|
||||
Defines a helper class for publishing to realtime user events
|
||||
|
|
142
data/users.py
Normal file
142
data/users.py
Normal file
|
@ -0,0 +1,142 @@
|
|||
import ldap
|
||||
import logging
|
||||
|
||||
from util.validation import generate_valid_usernames
|
||||
from data import model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DatabaseUsers(object):
|
||||
def verify_user(self, username_or_email, password):
|
||||
""" Simply delegate to the model implementation. """
|
||||
return model.verify_user(username_or_email, password)
|
||||
|
||||
def user_exists(self, username):
|
||||
return model.get_user(username) is not None
|
||||
|
||||
|
||||
class LDAPConnection(object):
|
||||
def __init__(self, ldap_uri, user_dn, user_pw):
|
||||
self._ldap_uri = ldap_uri
|
||||
self._user_dn = user_dn
|
||||
self._user_pw = user_pw
|
||||
self._conn = None
|
||||
|
||||
def __enter__(self):
|
||||
self._conn = ldap.initialize(self._ldap_uri)
|
||||
self._conn.simple_bind_s(self._user_dn, self._user_pw)
|
||||
return self._conn
|
||||
|
||||
def __exit__(self, exc_type, value, tb):
|
||||
self._conn.unbind_s()
|
||||
|
||||
|
||||
class LDAPUsers(object):
|
||||
def __init__(self, ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr):
|
||||
self._ldap_conn = LDAPConnection(ldap_uri, admin_dn, admin_passwd)
|
||||
self._ldap_uri = ldap_uri
|
||||
self._base_dn = base_dn
|
||||
self._user_rdn = user_rdn
|
||||
self._uid_attr = uid_attr
|
||||
self._email_attr = email_attr
|
||||
|
||||
def _ldap_user_search(self, username_or_email):
|
||||
with self._ldap_conn as conn:
|
||||
logger.debug('Incoming username or email param: %s', username_or_email.__repr__())
|
||||
user_search_dn = ','.join(self._user_rdn + self._base_dn)
|
||||
query = u'(|({0}={2})({1}={2}))'.format(self._uid_attr, self._email_attr,
|
||||
username_or_email)
|
||||
user = conn.search_s(user_search_dn, ldap.SCOPE_SUBTREE, query.encode('utf-8'))
|
||||
|
||||
if len(user) != 1:
|
||||
return None
|
||||
|
||||
return user[0]
|
||||
|
||||
def verify_user(self, username_or_email, password):
|
||||
""" Verify the credentials with LDAP and if they are valid, create or update the user
|
||||
in our database. """
|
||||
|
||||
# Make sure that even if the server supports anonymous binds, we don't allow it
|
||||
if not password:
|
||||
return None
|
||||
|
||||
found_user = self._ldap_user_search(username_or_email)
|
||||
|
||||
if found_user is None:
|
||||
return None
|
||||
|
||||
found_dn, found_response = found_user
|
||||
|
||||
# First validate the password by binding as the user
|
||||
try:
|
||||
with LDAPConnection(self._ldap_uri, found_dn, password.encode('utf-8')):
|
||||
pass
|
||||
except ldap.INVALID_CREDENTIALS:
|
||||
return None
|
||||
|
||||
# Now check if we have a federated login for this user
|
||||
username = found_response[self._uid_attr][0].decode('utf-8')
|
||||
email = found_response[self._email_attr][0]
|
||||
db_user = model.verify_federated_login('ldap', username)
|
||||
|
||||
if not db_user:
|
||||
# We must create the user in our db
|
||||
valid_username = None
|
||||
for valid_username in generate_valid_usernames(username):
|
||||
if model.is_username_unique(valid_username):
|
||||
break
|
||||
|
||||
if not valid_username:
|
||||
logger.error('Unable to pick a username for user: %s', username)
|
||||
return None
|
||||
|
||||
db_user = model.create_federated_user(valid_username, email, 'ldap', username,
|
||||
set_password_notification=False)
|
||||
else:
|
||||
# Update the db attributes from ldap
|
||||
db_user.email = email
|
||||
db_user.save()
|
||||
|
||||
return db_user
|
||||
|
||||
def user_exists(self, username):
|
||||
found_user = self._ldap_user_search(username)
|
||||
return found_user is not None
|
||||
|
||||
|
||||
class UserAuthentication(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
authentication_type = app.config.get('AUTHENTICATION_TYPE', 'Database')
|
||||
|
||||
if authentication_type == 'Database':
|
||||
users = DatabaseUsers()
|
||||
elif authentication_type == 'LDAP':
|
||||
ldap_uri = app.config.get('LDAP_URI', 'ldap://localhost')
|
||||
base_dn = app.config.get('LDAP_BASE_DN')
|
||||
admin_dn = app.config.get('LDAP_ADMIN_DN')
|
||||
admin_passwd = app.config.get('LDAP_ADMIN_PASSWD')
|
||||
user_rdn = app.config.get('LDAP_USER_RDN', [])
|
||||
uid_attr = app.config.get('LDAP_UID_ATTR', 'uid')
|
||||
email_attr = app.config.get('LDAP_EMAIL_ATTR', 'mail')
|
||||
|
||||
users = LDAPUsers(ldap_uri, base_dn, admin_dn, admin_passwd, user_rdn, uid_attr, email_attr)
|
||||
|
||||
else:
|
||||
raise RuntimeError('Unknown authentication type: %s' % authentication_type)
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['authentication'] = users
|
||||
return users
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
|
@ -66,6 +66,10 @@ class Unauthorized(ApiException):
|
|||
ApiException.__init__(self, 'insufficient_scope', 403, 'Unauthorized', payload)
|
||||
|
||||
|
||||
class ExceedsLicenseException(ApiException):
|
||||
def __init__(self, payload=None):
|
||||
ApiException.__init__(self, None, 402, 'Payment Required', payload)
|
||||
|
||||
|
||||
class NotFound(ApiException):
|
||||
def __init__(self, payload=None):
|
||||
|
@ -275,6 +279,10 @@ def request_error(exception=None, **kwargs):
|
|||
raise InvalidRequest(message, data)
|
||||
|
||||
|
||||
def license_error(exception=None):
|
||||
raise ExceedsLicenseException()
|
||||
|
||||
|
||||
def log_action(kind, user_or_orgname, metadata=None, repo=None):
|
||||
if not metadata:
|
||||
metadata = {}
|
||||
|
|
|
@ -36,7 +36,9 @@ def get_card(user):
|
|||
card_info = {
|
||||
'owner': default_card.name,
|
||||
'type': default_card.type,
|
||||
'last4': default_card.last4
|
||||
'last4': default_card.last4,
|
||||
'exp_month': default_card.exp_month,
|
||||
'exp_year': default_card.exp_year
|
||||
}
|
||||
|
||||
return {'card': card_info}
|
||||
|
|
|
@ -54,7 +54,7 @@ class SeatUsage(ApiResource):
|
|||
if SuperUserPermission().can():
|
||||
return {
|
||||
'count': model.get_active_user_count(),
|
||||
'allowed': app.config.get('LICENSE_SEAT_COUNT', 0)
|
||||
'allowed': app.config.get('LICENSE_USER_LIMIT', 0)
|
||||
}
|
||||
|
||||
abort(403)
|
||||
|
|
|
@ -5,10 +5,10 @@ from flask import request
|
|||
from flask.ext.login import logout_user
|
||||
from flask.ext.principal import identity_changed, AnonymousIdentity
|
||||
|
||||
from app import app, billing as stripe
|
||||
from app import app, billing as stripe, authentication
|
||||
from endpoints.api import (ApiResource, nickname, resource, validate_json_request, request_error,
|
||||
log_action, internal_only, NotFound, require_user_admin,
|
||||
InvalidToken, require_scope, format_date, hide_if, show_if)
|
||||
InvalidToken, require_scope, format_date, hide_if, show_if, license_error)
|
||||
from endpoints.api.subscribe import subscribe
|
||||
from endpoints.common import common_login
|
||||
from data import model
|
||||
|
@ -18,8 +18,7 @@ from auth.permissions import (AdministerOrganizationPermission, CreateRepository
|
|||
from auth.auth_context import get_authenticated_user
|
||||
from auth import scopes
|
||||
from util.gravatar import compute_hash
|
||||
from util.email import (send_confirmation_email, send_recovery_email,
|
||||
send_change_email)
|
||||
from util.useremails import (send_confirmation_email, send_recovery_email, send_change_email)
|
||||
|
||||
import features
|
||||
|
||||
|
@ -193,6 +192,8 @@ class User(ApiResource):
|
|||
code = model.create_confirm_email_code(new_user)
|
||||
send_confirmation_email(new_user.username, new_user.email, code.code)
|
||||
return 'Created', 201
|
||||
except model.TooManyUsersException as ex:
|
||||
raise license_error(exception=ex)
|
||||
except model.DataModelException as ex:
|
||||
raise request_error(exception=ex)
|
||||
|
||||
|
@ -227,7 +228,12 @@ def conduct_signin(username_or_email, password):
|
|||
needs_email_verification = False
|
||||
invalid_credentials = False
|
||||
|
||||
verified = model.verify_user(username_or_email, password)
|
||||
verified = None
|
||||
try:
|
||||
verified = authentication.verify_user(username_or_email, password)
|
||||
except model.TooManyUsersException as ex:
|
||||
raise license_error(exception=ex)
|
||||
|
||||
if verified:
|
||||
if common_login(verified):
|
||||
return {'success': True}
|
||||
|
@ -245,6 +251,7 @@ def conduct_signin(username_or_email, password):
|
|||
|
||||
@resource('/v1/user/convert')
|
||||
@internal_only
|
||||
@show_if(app.config['AUTHENTICATION_TYPE'] == 'Database')
|
||||
class ConvertToOrganization(ApiResource):
|
||||
""" Operations for converting a user to an organization. """
|
||||
schemas = {
|
||||
|
@ -289,7 +296,7 @@ class ConvertToOrganization(ApiResource):
|
|||
|
||||
# Ensure that the sign in credentials work.
|
||||
admin_password = convert_data['adminPassword']
|
||||
if not model.verify_user(admin_username, admin_password):
|
||||
if not authentication.verify_user(admin_username, admin_password):
|
||||
raise request_error(reason='invaliduser',
|
||||
message='The admin user credentials are not valid')
|
||||
|
||||
|
|
|
@ -35,7 +35,11 @@ def exchange_github_code_for_token(code, for_login=True):
|
|||
get_access_token = client.post(app.config['GITHUB_TOKEN_URL'],
|
||||
params=payload, headers=headers)
|
||||
|
||||
token = get_access_token.json()['access_token']
|
||||
json_data = get_access_token.json()
|
||||
if not json_data:
|
||||
return ''
|
||||
|
||||
token = json_data.get('access_token', '')
|
||||
return token
|
||||
|
||||
|
||||
|
@ -83,7 +87,7 @@ def github_oauth_callback():
|
|||
# try to create the user
|
||||
try:
|
||||
to_login = model.create_federated_user(username, found_email, 'github',
|
||||
github_id)
|
||||
github_id, set_password_notification=True)
|
||||
|
||||
# Success, tell analytics
|
||||
analytics.track(to_login.username, 'register', {'service': 'github'})
|
||||
|
@ -115,6 +119,7 @@ def github_oauth_attach():
|
|||
|
||||
|
||||
@callback.route('/github/callback/trigger/<path:repository>', methods=['GET'])
|
||||
@route_show_if(features.GITHUB_BUILD)
|
||||
@require_session_login
|
||||
@parse_repository_name
|
||||
def attach_github_build_trigger(namespace, repository):
|
||||
|
|
|
@ -9,14 +9,14 @@ from flask.ext.principal import identity_changed
|
|||
from random import SystemRandom
|
||||
|
||||
from data import model
|
||||
from data.queue import dockerfile_build_queue
|
||||
from app import app, login_manager
|
||||
from app import app, login_manager, dockerfile_build_queue
|
||||
from auth.permissions import QuayDeferredPermissionUser
|
||||
from auth import scopes
|
||||
from endpoints.api.discovery import swagger_route_data
|
||||
from werkzeug.routing import BaseConverter
|
||||
from functools import wraps
|
||||
from config import getFrontendVisibleConfig
|
||||
from external_libraries import get_external_javascript, get_external_css
|
||||
|
||||
import features
|
||||
|
||||
|
@ -147,7 +147,12 @@ def render_page_template(name, **kwargs):
|
|||
main_scripts = ['dist/quay-frontend.min.js']
|
||||
cache_buster = random_string()
|
||||
|
||||
external_styles = get_external_css(local=not app.config.get('USE_CDN', True))
|
||||
external_scripts = get_external_javascript(local=not app.config.get('USE_CDN', True))
|
||||
|
||||
resp = make_response(render_template(name, route_data=json.dumps(get_route_data()),
|
||||
external_styles=external_styles,
|
||||
external_scripts=external_scripts,
|
||||
main_styles=main_styles,
|
||||
library_styles=library_styles,
|
||||
main_scripts=main_scripts,
|
||||
|
@ -159,6 +164,7 @@ def render_page_template(name, **kwargs):
|
|||
is_debug=str(app.config.get('DEBUGGING', False)).lower(),
|
||||
show_chat=features.OLARK_CHAT,
|
||||
cache_buster=cache_buster,
|
||||
has_billing=features.BILLING,
|
||||
**kwargs))
|
||||
|
||||
resp.headers['X-FRAME-OPTIONS'] = 'DENY'
|
||||
|
|
|
@ -8,12 +8,11 @@ from collections import OrderedDict
|
|||
|
||||
from data import model
|
||||
from data.model import oauth
|
||||
from data.queue import webhook_queue
|
||||
from app import analytics, app
|
||||
from app import analytics, app, webhook_queue, authentication, userevents
|
||||
from auth.auth import process_auth
|
||||
from auth.auth_context import get_authenticated_user, get_validated_token, get_validated_oauth_token
|
||||
from util.names import parse_repository_name
|
||||
from util.email import send_confirmation_email
|
||||
from util.useremails import send_confirmation_email
|
||||
from auth.permissions import (ModifyRepositoryPermission, UserAdminPermission,
|
||||
ReadRepositoryPermission, CreateRepositoryPermission)
|
||||
|
||||
|
@ -95,18 +94,17 @@ def create_user():
|
|||
abort(400, 'Invalid robot account or password.',
|
||||
issue='robot-login-failure')
|
||||
|
||||
existing_user = model.get_user(username)
|
||||
if existing_user:
|
||||
verified = model.verify_user(username, password)
|
||||
if authentication.user_exists(username):
|
||||
verified = authentication.verify_user(username, password)
|
||||
if verified:
|
||||
# Mark that the user was logged in.
|
||||
event = app.config['USER_EVENTS'].get_event(username)
|
||||
event = userevents.get_event(username)
|
||||
event.publish_event_data('docker-cli', {'action': 'login'})
|
||||
|
||||
return success
|
||||
else:
|
||||
# Mark that the login failed.
|
||||
event = app.config['USER_EVENTS'].get_event(username)
|
||||
event = userevents.get_event(username)
|
||||
event.publish_event_data('docker-cli', {'action': 'loginfailure'})
|
||||
|
||||
abort(400, 'Invalid password.', issue='login-failure')
|
||||
|
@ -114,7 +112,12 @@ def create_user():
|
|||
else:
|
||||
# New user case
|
||||
profile.debug('Creating user')
|
||||
new_user = model.create_user(username, password, user_data['email'])
|
||||
new_user = None
|
||||
|
||||
try:
|
||||
new_user = model.create_user(username, password, user_data['email'])
|
||||
except model.TooManyUsersException as ex:
|
||||
abort(402, 'Seat limit has been reached for this license', issue='seat-limit')
|
||||
|
||||
profile.debug('Creating email code for user')
|
||||
code = model.create_confirm_email_code(new_user)
|
||||
|
@ -260,7 +263,7 @@ def create_repository(namespace, repository):
|
|||
'namespace': namespace
|
||||
}
|
||||
|
||||
event = app.config['USER_EVENTS'].get_event(username)
|
||||
event = userevents.get_event(username)
|
||||
event.publish_event_data('docker-cli', user_data)
|
||||
|
||||
elif get_validated_token():
|
||||
|
@ -308,7 +311,7 @@ def update_images(namespace, repository):
|
|||
'namespace': namespace
|
||||
}
|
||||
|
||||
event = app.config['USER_EVENTS'].get_event(username)
|
||||
event = userevents.get_event(username)
|
||||
event.publish_event_data('docker-cli', user_data)
|
||||
|
||||
profile.debug('GCing repository')
|
||||
|
|
|
@ -3,8 +3,8 @@ import json
|
|||
|
||||
from flask import request, Blueprint, abort, Response
|
||||
from flask.ext.login import current_user
|
||||
from data import userevent
|
||||
from auth.auth import require_session_login
|
||||
from app import userevents
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -41,7 +41,7 @@ def index():
|
|||
@realtime.route("/user/test")
|
||||
@require_session_login
|
||||
def user_test():
|
||||
evt = userevent.UserEvent('logs.quay.io', current_user.db_user().username)
|
||||
evt = userevents.get_event(current_user.db_user().username)
|
||||
evt.publish_event_data('test', {'foo': 2})
|
||||
return 'OK'
|
||||
|
||||
|
@ -58,5 +58,5 @@ def user_subscribe():
|
|||
if not events:
|
||||
abort(404)
|
||||
|
||||
listener = userevent.UserEventListener('logs.quay.io', current_user.db_user().username, events)
|
||||
listener = userevents.get_listener(current_user.db_user().username, events)
|
||||
return Response(wrapper(listener), mimetype="text/event-stream")
|
||||
|
|
|
@ -7,9 +7,7 @@ from functools import wraps
|
|||
from datetime import datetime
|
||||
from time import time
|
||||
|
||||
from data.queue import image_diff_queue
|
||||
|
||||
from app import storage as store
|
||||
from app import storage as store, image_diff_queue
|
||||
from auth.auth import process_auth, extract_namespace_repo_from_session
|
||||
from util import checksums, changes
|
||||
from util.http import abort
|
||||
|
|
|
@ -8,7 +8,7 @@ from data import model
|
|||
from auth.auth import process_auth
|
||||
from auth.permissions import ModifyRepositoryPermission
|
||||
from util.invoice import renderInvoiceToHtml
|
||||
from util.email import send_invoice_email, send_subscription_change, send_payment_failed
|
||||
from util.useremails import send_invoice_email, send_subscription_change, send_payment_failed
|
||||
from util.names import parse_repository_name
|
||||
from util.http import abort
|
||||
from endpoints.trigger import BuildTrigger, ValidationRequestException, SkipRequestException
|
||||
|
|
77
external_libraries.py
Normal file
77
external_libraries.py
Normal file
|
@ -0,0 +1,77 @@
|
|||
import urllib2
|
||||
import re
|
||||
import os
|
||||
|
||||
LOCAL_DIRECTORY = 'static/ldn/'
|
||||
|
||||
EXTERNAL_JS = [
|
||||
'code.jquery.com/jquery.js',
|
||||
'netdna.bootstrapcdn.com/bootstrap/3.0.0/js/bootstrap.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-route.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-sanitize.min.js',
|
||||
'ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-animate.min.js',
|
||||
'cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.2.0/js/bootstrap-datepicker.min.js',
|
||||
'cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3,momentjs',
|
||||
'cdn.ravenjs.com/1.1.14/jquery,native/raven.min.js',
|
||||
'checkout.stripe.com/checkout.js',
|
||||
]
|
||||
|
||||
EXTERNAL_CSS = [
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css',
|
||||
'netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css',
|
||||
'fonts.googleapis.com/css?family=Droid+Sans:400,700',
|
||||
]
|
||||
|
||||
EXTERNAL_FONTS = [
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.woff?v=4.0.3',
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.ttf?v=4.0.3',
|
||||
'netdna.bootstrapcdn.com/font-awesome/4.0.3/fonts/fontawesome-webfont.svg?v=4.0.3',
|
||||
]
|
||||
|
||||
|
||||
def get_external_javascript(local=False):
|
||||
if local:
|
||||
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_JS]
|
||||
|
||||
return ['//' + src for src in EXTERNAL_JS]
|
||||
|
||||
|
||||
def get_external_css(local=False):
|
||||
if local:
|
||||
return [LOCAL_DIRECTORY + format_local_name(src) for src in EXTERNAL_CSS]
|
||||
|
||||
return ['//' + src for src in EXTERNAL_CSS]
|
||||
|
||||
|
||||
def format_local_name(url):
|
||||
filename = url.split('/')[-1]
|
||||
filename = re.sub(r'[+,?@=:]', '', filename)
|
||||
if not filename.endswith('.css') and not filename.endswith('.js'):
|
||||
if filename.find('css') >= 0:
|
||||
filename = filename + '.css'
|
||||
else:
|
||||
filename = filename + '.js'
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for url in EXTERNAL_JS + EXTERNAL_CSS:
|
||||
print 'Downloading %s' % url
|
||||
response = urllib2.urlopen('https://' + url)
|
||||
contents = response.read()
|
||||
|
||||
filename = format_local_name(url)
|
||||
print 'Writing %s' % filename
|
||||
with open(LOCAL_DIRECTORY + filename, 'w') as f:
|
||||
f.write(contents)
|
||||
|
||||
|
||||
for url in EXTERNAL_FONTS:
|
||||
print 'Downloading %s' % url
|
||||
response = urllib2.urlopen('https://' + url)
|
||||
|
||||
filename = os.path.basename(url).split('?')[0]
|
||||
with open('static/fonts/' + filename, "wb") as local_file:
|
||||
local_file.write(response.read())
|
|
@ -148,7 +148,7 @@ def setup_database_for_testing(testcase):
|
|||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not isinstance(model.db, SqliteDatabase):
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
global db_initialized_for_testing
|
||||
|
@ -181,6 +181,7 @@ def initialize_database():
|
|||
Visibility.create(name='private')
|
||||
LoginService.create(name='github')
|
||||
LoginService.create(name='quayrobot')
|
||||
LoginService.create(name='ldap')
|
||||
|
||||
BuildTriggerService.create(name='github')
|
||||
|
||||
|
@ -232,6 +233,7 @@ def initialize_database():
|
|||
|
||||
NotificationKind.create(name='password_required')
|
||||
NotificationKind.create(name='over_private_usage')
|
||||
NotificationKind.create(name='expiring_license')
|
||||
|
||||
NotificationKind.create(name='test_notification')
|
||||
|
||||
|
@ -241,7 +243,7 @@ def wipe_database():
|
|||
|
||||
# Sanity check to make sure we're not killing our prod db
|
||||
db = model.db
|
||||
if not isinstance(model.db, SqliteDatabase):
|
||||
if not isinstance(model.db.obj, SqliteDatabase):
|
||||
raise RuntimeError('Attempted to wipe production database!')
|
||||
|
||||
drop_model_tables(all_models, fail_silently=True)
|
||||
|
|
13
license.py
Normal file
13
license.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
import pickle
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
|
||||
n = 24311791124264168943780535074639421876317270880681911499019414944027362498498429776192966738844514582251884695124256895677070273097239290537016363098432785034818859765271229653729724078304186025013011992335454557504431888746007324285000011384941749613875855493086506022340155196030616409545906383713728780211095701026770053812741971198465120292345817928060114890913931047021503727972067476586739126160044293621653486418983183727572502888923949587290840425930251185737996066354726953382305020440374552871209809125535533731995494145421279907938079885061852265339259634996180877443852561265066616143910755505151318370667L
|
||||
e = 65537L
|
||||
|
||||
def load_license(license_path):
|
||||
decryptor = RSA.construct((n, e))
|
||||
with open(license_path, 'rb') as encrypted_license:
|
||||
decrypted_data = decryptor.encrypt(encrypted_license.read(), 0)
|
||||
|
||||
return pickle.loads(decrypted_data[0])
|
BIN
license.pyc
Normal file
BIN
license.pyc
Normal file
Binary file not shown.
|
@ -32,3 +32,6 @@ python-magic
|
|||
reportlab==2.7
|
||||
blinker
|
||||
raven
|
||||
python-ldap
|
||||
unidecode
|
||||
pycrypto
|
||||
|
|
|
@ -12,6 +12,7 @@ PyGithub==1.24.1
|
|||
PyMySQL==0.6.2
|
||||
PyPDF2==1.21
|
||||
SQLAlchemy==0.9.4
|
||||
Unidecode==0.04.16
|
||||
Werkzeug==0.9.4
|
||||
alembic==0.6.4
|
||||
aniso8601==0.82
|
||||
|
@ -40,6 +41,7 @@ pycrypto==2.6.1
|
|||
python-daemon==1.6
|
||||
python-dateutil==2.2
|
||||
python-digitalocean==0.7
|
||||
python-ldap==2.4.15
|
||||
python-magic==0.4.6
|
||||
pytz==2014.2
|
||||
raven==4.2.1
|
||||
|
|
|
@ -500,6 +500,18 @@ i.toggle-icon:hover {
|
|||
color: black;
|
||||
}
|
||||
|
||||
.billing-options-element .current-card .expires:before {
|
||||
content: "Expires:";
|
||||
color: #aaa;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
|
||||
.billing-options-element .current-card .expires {
|
||||
margin-left: 20px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.billing-options-element .current-card img {
|
||||
margin-right: 10px;
|
||||
vertical-align: middle;
|
||||
|
|
|
@ -7,10 +7,18 @@
|
|||
<div class="panel-body">
|
||||
<div class="quay-spinner" ng-show="!currentCard || changingCard"></div>
|
||||
<div class="current-card" ng-show="currentCard && !changingCard">
|
||||
<div class="alert alert-warning" ng-if="currentCard.last4 && isExpiringSoon(currentCard)">
|
||||
Your current credit card is expiring soon!
|
||||
</div>
|
||||
|
||||
<img ng-src="{{ '/static/img/creditcards/' + getCreditImage(currentCard) }}" ng-show="currentCard.last4">
|
||||
<span class="no-card-outline" ng-show="!currentCard.last4"></span>
|
||||
|
||||
<span class="last4" ng-show="currentCard.last4">****-****-****-<b>{{ currentCard.last4 }}</b></span>
|
||||
<span class="expires" ng-show="currentCard.last4">
|
||||
{{ currentCard.exp_month }} / {{ currentCard.exp_year }}
|
||||
</span>
|
||||
|
||||
<span class="not-found" ng-show="!currentCard.last4">No credit card found</span>
|
||||
</div>
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@
|
|||
</form>
|
||||
|
||||
<span class="navbar-left user-tools" ng-show="!user.anonymous">
|
||||
<a href="/new/"><i class="fa fa-upload user-tool" bs-tooltip="tooltip.title" data-placement="bottom" data-title="Create new repository"></i></a>
|
||||
<a href="/new/"><i class="fa fa-upload user-tool" bs-tooltip="tooltip.title" data-placement="bottom" data-title="Create new repository" data-container="body"></i></a>
|
||||
</span>
|
||||
</li>
|
||||
|
||||
|
|
|
@ -216,7 +216,7 @@
|
|||
Create New Organization
|
||||
</button>
|
||||
</a>
|
||||
<a href="/user/?migrate" ng-show="!user.anonymous" data-title="Starts the process to convert this account into an organization" bs-tooltip="tooltip.title">
|
||||
<a href="/user/?migrate" data-title="Starts the process to convert this account into an organization" quay-show="Config.AUTHENTICATION_TYPE == 'Database' && !user.anonymous" bs-tooltip="tooltip.title">
|
||||
<button class="btn btn-primary">
|
||||
Convert account now
|
||||
</button>
|
||||
|
|
|
@ -913,6 +913,12 @@ quayApp = angular.module('quay', quayDependencies, function($provide, cfpLoading
|
|||
return '/user';
|
||||
}
|
||||
}
|
||||
},
|
||||
'expiring_license': {
|
||||
'level': 'error',
|
||||
'message': 'Your license will expire at: {expires_at} ' +
|
||||
'<br><br>Please contact Quay.io support to purchase a new license.',
|
||||
'page': '/contact/'
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -3333,6 +3339,13 @@ quayApp.directive('billingOptions', function () {
|
|||
PlanService.unregisterListener(this);
|
||||
});
|
||||
|
||||
$scope.isExpiringSoon = function(cardInfo) {
|
||||
var current = new Date();
|
||||
var expires = new Date(cardInfo.exp_year, cardInfo.exp_month, 1);
|
||||
var difference = expires - current;
|
||||
return difference < (60 * 60 * 24 * 60 * 1000 /* 60 days */);
|
||||
};
|
||||
|
||||
$scope.changeCard = function() {
|
||||
var previousCard = $scope.currentCard;
|
||||
$scope.changingCard = true;
|
||||
|
@ -4815,8 +4828,8 @@ quayApp.directive('ngVisible', function () {
|
|||
};
|
||||
});
|
||||
|
||||
quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout', 'CookieService', '$anchorScroll',
|
||||
function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout, CookieService, $anchorScroll) {
|
||||
quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanService', '$http', '$timeout', 'CookieService', 'Features', '$anchorScroll',
|
||||
function($location, $rootScope, Restangular, UserService, PlanService, $http, $timeout, CookieService, Features, $anchorScroll) {
|
||||
|
||||
// Handle session security.
|
||||
Restangular.setDefaultRequestParams(['post', 'put', 'remove', 'delete'], {'_csrf_token': window.__token || ''});
|
||||
|
@ -4830,6 +4843,11 @@ quayApp.run(['$location', '$rootScope', 'Restangular', 'UserService', 'PlanServi
|
|||
}
|
||||
}
|
||||
|
||||
if (!Features.BILLING && response.status == 402) {
|
||||
$('#overlicenseModal').modal({});
|
||||
return false;
|
||||
}
|
||||
|
||||
if (response.status == 500) {
|
||||
document.location = '/500';
|
||||
return false;
|
||||
|
|
|
@ -1235,10 +1235,11 @@ function RepoBuildCtrl($scope, Restangular, ApiService, $routeParams, $rootScope
|
|||
fetchRepository();
|
||||
}
|
||||
|
||||
function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, $routeParams, $rootScope, $location, UserService, Config) {
|
||||
function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, $routeParams, $rootScope, $location, UserService, Config, Features) {
|
||||
var namespace = $routeParams.namespace;
|
||||
var name = $routeParams.name;
|
||||
|
||||
$scope.Features = Features;
|
||||
$scope.permissions = {'team': [], 'user': []};
|
||||
$scope.logsShown = 0;
|
||||
$scope.deleting = false;
|
||||
|
@ -1644,7 +1645,7 @@ function RepoAdminCtrl($scope, Restangular, ApiService, KeyService, $routeParams
|
|||
}
|
||||
|
||||
function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, UserService, CookieService, KeyService,
|
||||
$routeParams, $http, UIService, Features) {
|
||||
$routeParams, $http, UIService, Features, Config) {
|
||||
$scope.Features = Features;
|
||||
|
||||
if ($routeParams['migrate']) {
|
||||
|
@ -1652,11 +1653,9 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
}
|
||||
|
||||
UserService.updateUserIn($scope, function(user) {
|
||||
if (!Features.GITHUB_LOGIN) { return; }
|
||||
|
||||
$scope.cuser = jQuery.extend({}, user);
|
||||
|
||||
if ($scope.cuser.logins) {
|
||||
if (Features.GITHUB_LOGIN && $scope.cuser.logins) {
|
||||
for (var i = 0; i < $scope.cuser.logins.length; i++) {
|
||||
if ($scope.cuser.logins[i].service == 'github') {
|
||||
var githubId = $scope.cuser.logins[i].service_identifier;
|
||||
|
@ -1749,6 +1748,8 @@ function UserAdminCtrl($scope, $timeout, $location, ApiService, PlanService, Use
|
|||
};
|
||||
|
||||
$scope.reallyConvert = function() {
|
||||
if (Config.AUTHENTICATION_TYPE != 'Database') { return; }
|
||||
|
||||
$scope.loading = true;
|
||||
|
||||
var data = {
|
||||
|
@ -1952,6 +1953,7 @@ function V1Ctrl($scope, $location, UserService) {
|
|||
function NewRepoCtrl($scope, $location, $http, $timeout, UserService, ApiService, PlanService, KeyService, Features) {
|
||||
UserService.updateUserIn($scope);
|
||||
|
||||
$scope.Features = Features;
|
||||
$scope.githubRedirectUri = KeyService.githubRedirectUri;
|
||||
$scope.githubClientId = KeyService.githubClientId;
|
||||
|
||||
|
|
|
@ -139,6 +139,7 @@ ImageHistoryTree.prototype.setupOverscroll_ = function() {
|
|||
ImageHistoryTree.prototype.updateDimensions_ = function() {
|
||||
var container = this.container_;
|
||||
var dimensions = this.calculateDimensions_(container);
|
||||
if (!dimensions) { return; }
|
||||
|
||||
var m = dimensions.m;
|
||||
var w = dimensions.w;
|
||||
|
@ -909,6 +910,10 @@ function FileTreeBase() {
|
|||
*/
|
||||
FileTreeBase.prototype.calculateDimensions_ = function(container) {
|
||||
var containerElm = document.getElementById(container);
|
||||
if (!containerElm) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var cw = containerElm ? containerElm.clientWidth : 1200;
|
||||
var barHeight = 20;
|
||||
var ch = (this.getNodesHeight() * barHeight) + 40;
|
||||
|
@ -940,6 +945,7 @@ FileTreeBase.prototype.updateDimensions_ = function() {
|
|||
|
||||
var container = this.container_;
|
||||
var dimensions = this.calculateDimensions_(container);
|
||||
if (!dimensions) { return; }
|
||||
|
||||
var w = dimensions.w;
|
||||
var h = dimensions.h;
|
||||
|
@ -989,6 +995,7 @@ FileTreeBase.prototype.draw = function(container) {
|
|||
this.container_ = container;
|
||||
|
||||
var dimensions = this.calculateDimensions_(container);
|
||||
if (!dimensions) { return; }
|
||||
|
||||
var w = dimensions.w;
|
||||
var h = dimensions.h;
|
||||
|
@ -1107,6 +1114,8 @@ FileTreeBase.prototype.getVisibleCount_ = function(node) {
|
|||
*/
|
||||
FileTreeBase.prototype.getContainerHeight_ = function() {
|
||||
var dimensions = this.calculateDimensions_(this.container_);
|
||||
if (!dimensions) { return; }
|
||||
|
||||
var barHeight = this.barHeight_;
|
||||
var height = (this.getVisibleCount_(this.root_) * (barHeight + 2));
|
||||
return height + dimensions.m[0] + dimensions.m[2];
|
||||
|
|
|
@ -126,7 +126,7 @@
|
|||
</div>
|
||||
|
||||
<!-- Github -->
|
||||
<div class="repo-option">
|
||||
<div class="repo-option" ng-show="Features.GITHUB_BUILD">
|
||||
<input type="radio" id="initGithub" name="initialize" ng-model="repo.initialize" value="github">
|
||||
<i class="fa fa-github fa-lg" style="padding: 6px; padding-left: 10px; padding-right: 12px;"></i>
|
||||
<label for="initGithub">Link to a GitHub Repository</label>
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
Create New Organization
|
||||
</button>
|
||||
</a>
|
||||
<a href="/user/?migrate" ng-show="!user.anonymous" data-title="Starts the process to convert this account into an organization" bs-tooltip="tooltip.title">
|
||||
<a href="/user/?migrate" data-title="Starts the process to convert this account into an organization" bs-tooltip="tooltip.title" quay-show="Config.AUTHENTICATION_TYPE == 'Database' && !user.anonymous">
|
||||
<button class="btn btn-primary">
|
||||
<i class="fa fa-caret-square-o-right"></i>
|
||||
Convert account
|
||||
|
|
|
@ -315,8 +315,9 @@
|
|||
|
||||
<!-- Right controls -->
|
||||
<div class="right-controls">
|
||||
<span ng-show="!Features.GITHUB_BUILD" class="pull-left">No build trigger types enabled.</span>
|
||||
<div class="dropdown">
|
||||
<button class="btn btn-primary dropdown-toggle" data-toggle="dropdown">
|
||||
<button class="btn btn-primary dropdown-toggle" data-toggle="dropdown" ng-disabled="!Features.GITHUB_BUILD">
|
||||
New Trigger
|
||||
<b class="caret"></b>
|
||||
</button>
|
||||
|
|
|
@ -38,7 +38,9 @@
|
|||
<li quay-show="Features.USER_LOG_ACCESS || hasPaidBusinessPlan">
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#logs" ng-click="loadLogs()">Usage Logs</a>
|
||||
</li>
|
||||
<li><a href="javascript:void(0)" data-toggle="tab" data-target="#migrate" id="migrateTab">Convert to Organization</a></li>
|
||||
<li quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<a href="javascript:void(0)" data-toggle="tab" data-target="#migrate" id="migrateTab">Convert to Organization</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
|
@ -197,7 +199,7 @@
|
|||
</div>
|
||||
|
||||
<!-- Convert to organization tab -->
|
||||
<div id="migrate" class="tab-pane">
|
||||
<div id="migrate" class="tab-pane" quay-show="Config.AUTHENTICATION_TYPE == 'Database'">
|
||||
<!-- Step 0 -->
|
||||
<div class="panel" ng-show="convertStep == 0">
|
||||
<div class="panel-body" ng-show="user.organizations.length > 0">
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
|
||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/font-awesome/4.0.3/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="//netdna.bootstrapcdn.com/bootstrap/3.0.0/css/bootstrap.no-icons.min.css">
|
||||
<link href='//fonts.googleapis.com/css?family=Droid+Sans:400,700' rel='stylesheet' type='text/css'>
|
||||
{% for style_url in external_styles %}
|
||||
<link rel="stylesheet" href="{{ style_url }}" type="text/css">
|
||||
{% endfor %}
|
||||
|
||||
<!-- Icons -->
|
||||
<link rel="shortcut icon" href="/static/img/favicon.ico" type="image/x-icon" />
|
||||
|
@ -47,20 +47,9 @@
|
|||
window.__token = '{{ csrf_token() }}';
|
||||
</script>
|
||||
|
||||
<script src="//code.jquery.com/jquery.js"></script>
|
||||
<script src="//netdna.bootstrapcdn.com/bootstrap/3.0.0/js/bootstrap.min.js"></script>
|
||||
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular.min.js"></script>
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-route.min.js"></script>
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-sanitize.min.js"></script>
|
||||
<script src="//ajax.googleapis.com/ajax/libs/angularjs/1.2.9/angular-animate.min.js"></script>
|
||||
|
||||
<script src="//cdnjs.cloudflare.com/ajax/libs/bootstrap-datepicker/1.2.0/js/bootstrap-datepicker.min.js"></script>
|
||||
|
||||
<script src="//cdn.jsdelivr.net/g/bootbox@4.1.0,underscorejs@1.5.2,restangular@1.2.0,d3js@3.3.3,momentjs"></script>
|
||||
<script src="//cdn.ravenjs.com/1.1.14/jquery,native/raven.min.js"></script>
|
||||
|
||||
<script src="https://checkout.stripe.com/checkout.js"></script>
|
||||
{% for script_url in external_scripts %}
|
||||
<script src="{{ script_url }}"></script>
|
||||
{% endfor %}
|
||||
|
||||
{% for script_path in library_scripts %}
|
||||
<script src="/static/{{ script_path }}?v={{ cache_buster }}"></script>
|
||||
|
|
|
@ -35,4 +35,23 @@
|
|||
</div><!-- /.modal-dialog -->
|
||||
</div><!-- /.modal -->
|
||||
|
||||
{% if not has_billing %}
|
||||
<!-- Modal message dialog -->
|
||||
<div class="modal fade" id="overlicenseModal" data-backdrop="static">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h4 class="modal-title">Cannot create user</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
A new user cannot be created as this organization has reached its licensed seat count. Please contact your administrator.
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<a href="javascript:void(0)" class="btn btn-primary" data-dismiss="modal" onclick="location = '/signin'">Sign In</a>
|
||||
</div>
|
||||
</div><!-- /.modal-content -->
|
||||
</div><!-- /.modal-dialog -->
|
||||
</div><!-- /.modal -->
|
||||
{% endif %}
|
||||
|
||||
{% endblock %}
|
||||
|
|
Binary file not shown.
|
@ -36,6 +36,9 @@ from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repos
|
|||
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
|
||||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||
|
||||
from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement
|
||||
|
||||
|
||||
try:
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
except ValueError:
|
||||
|
@ -3275,5 +3278,87 @@ class TestUserAuthorization(ApiTestCase):
|
|||
self._run_test('DELETE', 404, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserLogs(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserLogs)
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 403, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
class TestSuperUserList(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserList)
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 403, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
|
||||
class TestSuperUserManagement(ApiTestCase):
|
||||
def setUp(self):
|
||||
ApiTestCase.setUp(self)
|
||||
self._set_url(SuperUserManagement, username='freshuser')
|
||||
|
||||
def test_get_anonymous(self):
|
||||
self._run_test('GET', 403, None, None)
|
||||
|
||||
def test_get_freshuser(self):
|
||||
self._run_test('GET', 403, 'freshuser', None)
|
||||
|
||||
def test_get_reader(self):
|
||||
self._run_test('GET', 403, 'reader', None)
|
||||
|
||||
def test_get_devtable(self):
|
||||
self._run_test('GET', 200, 'devtable', None)
|
||||
|
||||
|
||||
def test_put_anonymous(self):
|
||||
self._run_test('PUT', 403, None, {})
|
||||
|
||||
def test_put_freshuser(self):
|
||||
self._run_test('PUT', 403, 'freshuser', {})
|
||||
|
||||
def test_put_reader(self):
|
||||
self._run_test('PUT', 403, 'reader', {})
|
||||
|
||||
def test_put_devtable(self):
|
||||
self._run_test('PUT', 200, 'devtable', {})
|
||||
|
||||
|
||||
def test_delete_anonymous(self):
|
||||
self._run_test('DELETE', 403, None, None)
|
||||
|
||||
def test_delete_freshuser(self):
|
||||
self._run_test('DELETE', 403, 'freshuser', None)
|
||||
|
||||
def test_delete_reader(self):
|
||||
self._run_test('DELETE', 403, 'reader', None)
|
||||
|
||||
def test_delete_devtable(self):
|
||||
self._run_test('DELETE', 204, 'devtable', None)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -38,6 +38,7 @@ from endpoints.api.organization import (OrganizationList, OrganizationMember,
|
|||
from endpoints.api.repository import RepositoryList, RepositoryVisibility, Repository
|
||||
from endpoints.api.permission import (RepositoryUserPermission, RepositoryTeamPermission,
|
||||
RepositoryTeamPermissionList, RepositoryUserPermissionList)
|
||||
from endpoints.api.superuser import SuperUserLogs, SeatUsage, SuperUserList, SuperUserManagement
|
||||
|
||||
try:
|
||||
app.register_blueprint(api_bp, url_prefix='/api')
|
||||
|
@ -1939,5 +1940,66 @@ class TestUserAuthorizations(ApiTestCase):
|
|||
self.getJsonResponse(UserAuthorization, params=dict(access_token_uuid = authorization['uuid']),
|
||||
expected_code=404)
|
||||
|
||||
|
||||
class TestSuperUserLogs(ApiTestCase):
|
||||
def test_get_logs(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(SuperUserLogs)
|
||||
|
||||
assert 'logs' in json
|
||||
assert len(json['logs']) > 0
|
||||
|
||||
|
||||
class TestSuperUserList(ApiTestCase):
|
||||
def test_get_users(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(SuperUserList)
|
||||
|
||||
assert 'users' in json
|
||||
assert len(json['users']) > 0
|
||||
|
||||
|
||||
class TestSuperUserManagement(ApiTestCase):
|
||||
def test_get_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('no@thanks.com', json['email'])
|
||||
self.assertEquals(False, json['super_user'])
|
||||
|
||||
def test_delete_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the user exists.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
|
||||
# Delete the user.
|
||||
self.deleteResponse(SuperUserManagement, params=dict(username = 'freshuser'), expected_code=204)
|
||||
|
||||
# Verify the user no longer exists.
|
||||
self.getResponse(SuperUserManagement, params=dict(username = 'freshuser'), expected_code=404)
|
||||
|
||||
|
||||
def test_update_user(self):
|
||||
self.login(ADMIN_ACCESS_USER)
|
||||
|
||||
# Verify the user exists.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('no@thanks.com', json['email'])
|
||||
|
||||
# Update the user.
|
||||
self.putJsonResponse(SuperUserManagement, params=dict(username='freshuser'), data=dict(email='foo@bar.com'))
|
||||
|
||||
# Verify the user was updated.
|
||||
json = self.getJsonResponse(SuperUserManagement, params=dict(username = 'freshuser'))
|
||||
self.assertEquals('freshuser', json['username'])
|
||||
self.assertEquals('foo@bar.com', json['email'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -2,6 +2,9 @@ import unittest
|
|||
import json
|
||||
import time
|
||||
|
||||
from functools import wraps
|
||||
|
||||
from app import app
|
||||
from initdb import setup_database_for_testing, finished_database_for_testing
|
||||
from data.queue import WorkQueue
|
||||
|
||||
|
@ -9,12 +12,47 @@ from data.queue import WorkQueue
|
|||
QUEUE_NAME = 'testqueuename'
|
||||
|
||||
|
||||
class SaveLastCountReporter(object):
|
||||
def __init__(self):
|
||||
self.currently_processing = None
|
||||
self.running_count = None
|
||||
self.total = None
|
||||
|
||||
def __call__(self, currently_processing, running_count, total_jobs):
|
||||
self.currently_processing = currently_processing
|
||||
self.running_count = running_count
|
||||
self.total = total_jobs
|
||||
|
||||
|
||||
class AutoUpdatingQueue(object):
|
||||
def __init__(self, queue_to_wrap):
|
||||
self._queue = queue_to_wrap
|
||||
|
||||
def _wrapper(self, func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwargs):
|
||||
to_return = func(*args, **kwargs)
|
||||
self._queue.update_metrics()
|
||||
return to_return
|
||||
return wrapper
|
||||
|
||||
def __getattr__(self, attr_name):
|
||||
method_or_attr = getattr(self._queue, attr_name)
|
||||
if callable(method_or_attr):
|
||||
return self._wrapper(method_or_attr)
|
||||
else:
|
||||
return method_or_attr
|
||||
|
||||
|
||||
class QueueTestCase(unittest.TestCase):
|
||||
TEST_MESSAGE_1 = json.dumps({'data': 1})
|
||||
TEST_MESSAGE_2 = json.dumps({'data': 2})
|
||||
|
||||
def setUp(self):
|
||||
self.queue = WorkQueue(QUEUE_NAME)
|
||||
self.reporter = SaveLastCountReporter()
|
||||
self.transaction_factory = app.config['DB_TRANSACTION_FACTORY']
|
||||
self.queue = AutoUpdatingQueue(WorkQueue(QUEUE_NAME, self.transaction_factory,
|
||||
reporter=self.reporter))
|
||||
setup_database_for_testing(self)
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -23,33 +61,57 @@ class QueueTestCase(unittest.TestCase):
|
|||
|
||||
class TestQueue(QueueTestCase):
|
||||
def test_same_canonical_names(self):
|
||||
self.assertEqual(self.reporter.currently_processing, None)
|
||||
self.assertEqual(self.reporter.running_count, None)
|
||||
self.assertEqual(self.reporter.total, None)
|
||||
|
||||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1)
|
||||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_2)
|
||||
self.assertEqual(self.reporter.currently_processing, False)
|
||||
self.assertEqual(self.reporter.running_count, 0)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
one = self.queue.get()
|
||||
self.assertNotEqual(None, one)
|
||||
self.assertEqual(self.TEST_MESSAGE_1, one.body)
|
||||
self.assertEqual(self.reporter.currently_processing, True)
|
||||
self.assertEqual(self.reporter.running_count, 1)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
two_fail = self.queue.get()
|
||||
self.assertEqual(None, two_fail)
|
||||
self.assertEqual(self.reporter.running_count, 1)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
self.queue.complete(one)
|
||||
self.assertEqual(self.reporter.currently_processing, False)
|
||||
self.assertEqual(self.reporter.running_count, 0)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
two = self.queue.get()
|
||||
self.assertNotEqual(None, two)
|
||||
self.assertEqual(self.reporter.currently_processing, True)
|
||||
self.assertEqual(self.TEST_MESSAGE_2, two.body)
|
||||
self.assertEqual(self.reporter.running_count, 1)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
def test_different_canonical_names(self):
|
||||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1)
|
||||
self.queue.put(['abc', 'ghi'], self.TEST_MESSAGE_2)
|
||||
self.assertEqual(self.reporter.running_count, 0)
|
||||
self.assertEqual(self.reporter.total, 2)
|
||||
|
||||
one = self.queue.get()
|
||||
self.assertNotEqual(None, one)
|
||||
self.assertEqual(self.TEST_MESSAGE_1, one.body)
|
||||
self.assertEqual(self.reporter.running_count, 1)
|
||||
self.assertEqual(self.reporter.total, 2)
|
||||
|
||||
two = self.queue.get()
|
||||
self.assertNotEqual(None, two)
|
||||
self.assertEqual(self.TEST_MESSAGE_2, two.body)
|
||||
self.assertEqual(self.reporter.running_count, 2)
|
||||
self.assertEqual(self.reporter.total, 2)
|
||||
|
||||
def test_canonical_name(self):
|
||||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1)
|
||||
|
@ -63,23 +125,32 @@ class TestQueue(QueueTestCase):
|
|||
|
||||
def test_expiration(self):
|
||||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1)
|
||||
self.assertEqual(self.reporter.running_count, 0)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
one = self.queue.get(processing_time=0.5)
|
||||
self.assertNotEqual(None, one)
|
||||
self.assertEqual(self.reporter.running_count, 1)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
one_fail = self.queue.get()
|
||||
self.assertEqual(None, one_fail)
|
||||
|
||||
time.sleep(1)
|
||||
self.queue.update_metrics()
|
||||
self.assertEqual(self.reporter.running_count, 0)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
one_again = self.queue.get()
|
||||
self.assertNotEqual(None, one_again)
|
||||
self.assertEqual(self.reporter.running_count, 1)
|
||||
self.assertEqual(self.reporter.total, 1)
|
||||
|
||||
def test_specialized_queue(self):
|
||||
self.queue.put(['abc', 'def'], self.TEST_MESSAGE_1)
|
||||
self.queue.put(['def', 'def'], self.TEST_MESSAGE_2)
|
||||
|
||||
my_queue = WorkQueue(QUEUE_NAME, ['def'])
|
||||
my_queue = AutoUpdatingQueue(WorkQueue(QUEUE_NAME, self.transaction_factory, ['def']))
|
||||
|
||||
two = my_queue.get()
|
||||
self.assertNotEqual(None, two)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from config import DefaultConfig
|
||||
from test.testlogs import TestBuildLogs
|
||||
|
||||
|
@ -25,7 +27,16 @@ class TestConfig(DefaultConfig):
|
|||
STORAGE_TYPE = 'FakeStorage'
|
||||
|
||||
BUILDLOGS_MODULE_AND_CLASS = ('test.testlogs', 'testlogs.TestBuildLogs')
|
||||
BUILDLOGS_OPTIONS = ['logs.quay.io', 'devtable', 'building',
|
||||
'deadbeef-dead-beef-dead-beefdeadbeef']
|
||||
BUILDLOGS_OPTIONS = ['devtable', 'building', 'deadbeef-dead-beef-dead-beefdeadbeef']
|
||||
|
||||
USERFILES_TYPE = 'FakeUserfiles'
|
||||
|
||||
FEATURE_SUPER_USERS = True
|
||||
FEATURE_BILLING = True
|
||||
SUPER_USERS = ['devtable']
|
||||
|
||||
LICENSE_USER_LIMIT = 500
|
||||
LICENSE_EXPIRATION = datetime.now() + timedelta(weeks=520)
|
||||
LICENSE_EXPIRATION_WARNING = datetime.now() + timedelta(weeks=520)
|
||||
|
||||
FEATURE_GITHUB_BUILD = True
|
||||
|
|
38
tools/createlicense.py
Normal file
38
tools/createlicense.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
import argparse
|
||||
import pickle
|
||||
|
||||
from Crypto.PublicKey import RSA
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
def encrypt(message, output_filename):
|
||||
private_key_file = 'conf/stack/license_key'
|
||||
with open(private_key_file, 'r') as private_key:
|
||||
encryptor = RSA.importKey(private_key)
|
||||
|
||||
encrypted_data = encryptor.decrypt(message)
|
||||
|
||||
with open(output_filename, 'wb') as encrypted_file:
|
||||
encrypted_file.write(encrypted_data)
|
||||
|
||||
parser = argparse.ArgumentParser(description='Create a license file.')
|
||||
parser.add_argument('--users', type=int, default=20,
|
||||
help='Number of users allowed by the license')
|
||||
parser.add_argument('--days', type=int, default=30,
|
||||
help='Number of days for which the license is valid')
|
||||
parser.add_argument('--warn', type=int, default=7,
|
||||
help='Number of days prior to expiration to warn users')
|
||||
parser.add_argument('--output', type=str, required=True,
|
||||
help='File in which to store the license')
|
||||
|
||||
if __name__ == "__main__":
|
||||
args = parser.parse_args()
|
||||
print ('Creating license for %s users for %s days in file: %s' %
|
||||
(args.users, args.days, args.output))
|
||||
|
||||
license_data = {
|
||||
'LICENSE_EXPIRATION': datetime.utcnow() + timedelta(days=args.days),
|
||||
'LICENSE_USER_LIMIT': args.users,
|
||||
'LICENSE_EXPIRATION_WARNING': datetime.utcnow() + timedelta(days=(args.days - args.warn)),
|
||||
}
|
||||
|
||||
encrypt(pickle.dumps(license_data, 2), args.output)
|
|
@ -2,7 +2,7 @@ from app import stripe
|
|||
from app import app
|
||||
|
||||
from util.invoice import renderInvoiceToHtml
|
||||
from util.email import send_invoice_email
|
||||
from util.useremails import send_invoice_email
|
||||
|
||||
from data import model
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from app import stripe
|
||||
from app import app
|
||||
|
||||
from util.email import send_confirmation_email
|
||||
from util.useremails import send_confirmation_email
|
||||
|
||||
from data import model
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ class SendToMixpanel(Process):
|
|||
self.daemon = True
|
||||
|
||||
def run(self):
|
||||
logger.debug('Starting sender process.')
|
||||
logger.debug('Starting mixpanel sender process.')
|
||||
while True:
|
||||
mp_request = self._mp_queue.get()
|
||||
logger.debug('Got queued mixpanel reqeust.')
|
||||
|
|
76
util/expiration.py
Normal file
76
util/expiration.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
import calendar
|
||||
import sys
|
||||
|
||||
from email.utils import formatdate
|
||||
from apscheduler.scheduler import Scheduler
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from data import model
|
||||
|
||||
|
||||
class ExpirationScheduler(object):
|
||||
def __init__(self, utc_create_notifications_date, utc_terminate_processes_date):
|
||||
self._scheduler = Scheduler()
|
||||
self._termination_date = utc_terminate_processes_date
|
||||
|
||||
soon = datetime.now() + timedelta(seconds=1)
|
||||
|
||||
if utc_create_notifications_date > datetime.utcnow():
|
||||
self._scheduler.add_date_job(model.delete_all_notifications_by_kind, soon,
|
||||
['expiring_license'])
|
||||
|
||||
local_notifications_date = self._utc_to_local(utc_create_notifications_date)
|
||||
self._scheduler.add_date_job(self._generate_notifications, local_notifications_date)
|
||||
else:
|
||||
self._scheduler.add_date_job(self._generate_notifications, soon)
|
||||
|
||||
local_termination_date = self._utc_to_local(utc_terminate_processes_date)
|
||||
self._scheduler.add_date_job(self._terminate, local_termination_date)
|
||||
|
||||
@staticmethod
|
||||
def _format_date(date):
|
||||
""" Output an RFC822 date format. """
|
||||
if date is None:
|
||||
return None
|
||||
return formatdate(calendar.timegm(date.utctimetuple()))
|
||||
|
||||
@staticmethod
|
||||
def _utc_to_local(utc_dt):
|
||||
# get integer timestamp to avoid precision lost
|
||||
timestamp = calendar.timegm(utc_dt.timetuple())
|
||||
local_dt = datetime.fromtimestamp(timestamp)
|
||||
return local_dt.replace(microsecond=utc_dt.microsecond)
|
||||
|
||||
def _generate_notifications(self):
|
||||
for user in model.get_active_users():
|
||||
model.create_unique_notification('expiring_license', user,
|
||||
{'expires_at': self._format_date(self._termination_date)})
|
||||
|
||||
@staticmethod
|
||||
def _terminate():
|
||||
sys.exit(1)
|
||||
|
||||
def start(self):
|
||||
self._scheduler.start()
|
||||
|
||||
|
||||
class Expiration(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
expiration = ExpirationScheduler(app.config['LICENSE_EXPIRATION_WARNING'],
|
||||
app.config['LICENSE_EXPIRATION'])
|
||||
expiration.start()
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['expiration'] = expiration
|
||||
return expiration
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
86
util/queuemetrics.py
Normal file
86
util/queuemetrics.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
import logging
|
||||
import boto
|
||||
|
||||
from multiprocessing import Process, Queue
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NullReporter(object):
|
||||
def report(self, *args):
|
||||
pass
|
||||
|
||||
|
||||
class QueueingCloudWatchReporter(object):
|
||||
def __init__(self, request_queue, namespace, need_capacity_name, build_percent_name):
|
||||
self._namespace = namespace
|
||||
self._need_capacity_name = need_capacity_name
|
||||
self._build_percent_name = build_percent_name
|
||||
self._put_metrics_queue = request_queue
|
||||
|
||||
def _send_to_queue(self, *args, **kwargs):
|
||||
self._put_metrics_queue.put((args, kwargs))
|
||||
|
||||
def report(self, currently_processing, running_count, total_count):
|
||||
logger.debug('Worker indicated %s running count and %s total count', running_count,
|
||||
total_count)
|
||||
|
||||
need_capacity_count = total_count - running_count
|
||||
self._send_to_queue(self._namespace, self._need_capacity_name, need_capacity_count,
|
||||
unit='Count')
|
||||
|
||||
building_percent = 100 if currently_processing else 0
|
||||
self._send_to_queue(self._namespace, self._build_percent_name, building_percent,
|
||||
unit='Percent')
|
||||
|
||||
|
||||
class SendToCloudWatch(Process):
|
||||
def __init__(self, request_queue, aws_access_key, aws_secret_key):
|
||||
Process.__init__(self)
|
||||
self._aws_access_key = aws_access_key
|
||||
self._aws_secret_key = aws_secret_key
|
||||
self._put_metrics_queue = request_queue
|
||||
self.daemon = True
|
||||
|
||||
def run(self):
|
||||
logger.debug('Starting cloudwatch sender process.')
|
||||
connection = boto.connect_cloudwatch(self._aws_access_key, self._aws_secret_key)
|
||||
while True:
|
||||
put_metric_args, kwargs = self._put_metrics_queue.get()
|
||||
logger.debug('Got queued put metrics reqeust.')
|
||||
connection.put_metric_data(*put_metric_args, **kwargs)
|
||||
|
||||
|
||||
class QueueMetrics(object):
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
if app is not None:
|
||||
self.state = self.init_app(app)
|
||||
else:
|
||||
self.state = None
|
||||
|
||||
def init_app(self, app):
|
||||
analytics_type = app.config.get('QUEUE_METRICS_TYPE', 'Null')
|
||||
|
||||
if analytics_type == 'CloudWatch':
|
||||
access_key = app.config.get('QUEUE_METRICS_AWS_ACCESS_KEY')
|
||||
secret_key = app.config.get('QUEUE_METRICS_AWS_SECRET_KEY')
|
||||
namespace = app.config.get('QUEUE_METRICS_NAMESPACE')
|
||||
req_capacity_name = app.config.get('QUEUE_METRICS_CAPACITY_SHORTAGE_NAME')
|
||||
build_percent_name = app.config.get('QUEUE_METRICS_BUILD_PERCENT_NAME')
|
||||
|
||||
request_queue = Queue()
|
||||
reporter = QueueingCloudWatchReporter(request_queue, namespace, req_capacity_name,
|
||||
build_percent_name)
|
||||
sender = SendToCloudWatch(request_queue, access_key, secret_key)
|
||||
sender.start()
|
||||
else:
|
||||
reporter = NullReporter()
|
||||
|
||||
# register extension with app
|
||||
app.extensions = getattr(app, 'extensions', {})
|
||||
app.extensions['queuemetrics'] = reporter
|
||||
return reporter
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self.state, name, None)
|
|
@ -1,7 +1,16 @@
|
|||
import re
|
||||
import string
|
||||
|
||||
from unidecode import unidecode
|
||||
|
||||
|
||||
INVALID_PASSWORD_MESSAGE = 'Invalid password, password must be at least ' + \
|
||||
'8 characters and contain no whitespace.'
|
||||
INVALID_USERNAME_CHARACTERS = r'[^a-z0-9_]'
|
||||
VALID_CHARACTERS = '_' + string.digits + string.lowercase
|
||||
MIN_LENGTH = 4
|
||||
MAX_LENGTH = 30
|
||||
|
||||
|
||||
def validate_email(email_address):
|
||||
if re.match(r'[^@]+@[^@]+\.[^@]+', email_address):
|
||||
|
@ -11,13 +20,14 @@ def validate_email(email_address):
|
|||
|
||||
def validate_username(username):
|
||||
# Based off the restrictions defined in the Docker Registry API spec
|
||||
regex_match = (re.search(r'[^a-z0-9_]', username) is None)
|
||||
regex_match = (re.search(INVALID_USERNAME_CHARACTERS, username) is None)
|
||||
if not regex_match:
|
||||
return (False, 'Username must match expression [a-z0-9_]+')
|
||||
|
||||
length_match = (len(username) >= 4 and len(username) <= 30)
|
||||
length_match = (len(username) >= MIN_LENGTH and len(username) <= MAX_LENGTH)
|
||||
if not length_match:
|
||||
return (False, 'Username must be between 4 and 30 characters in length')
|
||||
return (False, 'Username must be between %s and %s characters in length' %
|
||||
(MIN_LENGTH, MAX_LENGTH))
|
||||
|
||||
return (True, '')
|
||||
|
||||
|
@ -27,3 +37,24 @@ def validate_password(password):
|
|||
if re.search(r'\s', password):
|
||||
return False
|
||||
return len(password) > 7
|
||||
|
||||
|
||||
def _gen_filler_chars(num_filler_chars):
|
||||
if num_filler_chars == 0:
|
||||
yield ''
|
||||
else:
|
||||
for char in VALID_CHARACTERS:
|
||||
for suffix in _gen_filler_chars(num_filler_chars - 1):
|
||||
yield char + suffix
|
||||
|
||||
|
||||
def generate_valid_usernames(input_username):
|
||||
normalized = unidecode(input_username).strip().lower()
|
||||
prefix = re.sub(INVALID_USERNAME_CHARACTERS, '_', normalized)[:30]
|
||||
|
||||
num_filler_chars = max(0, MIN_LENGTH - len(prefix))
|
||||
|
||||
while num_filler_chars + len(prefix) <= MAX_LENGTH:
|
||||
for suffix in _gen_filler_chars(num_filler_chars):
|
||||
yield prefix + suffix
|
||||
num_filler_chars += 1
|
||||
|
|
|
@ -1,35 +0,0 @@
|
|||
to build and upload the builder to quay
|
||||
|
||||
```
|
||||
curl -s https://get.docker.io/ubuntu/ | sudo sh
|
||||
sudo apt-get update && sudo apt-get install -y git
|
||||
git clone git clone https://bitbucket.org/yackob03/quay.git
|
||||
cd quay
|
||||
rm Dockerfile
|
||||
ln -s Dockerfile.buildworker Dockerfile
|
||||
sudo docker build -t quay.io/quay/builder .
|
||||
sudo docker push quay.io/quay/builder
|
||||
```
|
||||
|
||||
to run the code from a fresh 14.04 server:
|
||||
|
||||
```
|
||||
sudo apt-get update && sudo apt-get install -y git lxc linux-image-extra-`uname -r`
|
||||
curl -s https://get.docker.io/ubuntu/ | sudo sh
|
||||
git clone https://github.com/DevTable/gantryd.git
|
||||
cd gantryd
|
||||
cat requirements.system | xargs sudo apt-get install -y
|
||||
virtualenv --distribute venv
|
||||
venv/bin/pip install -r requirements.txt
|
||||
sudo docker login -p 9Y1PX7D3IE4KPSGCIALH17EM5V3ZTMP8CNNHJNXAQ2NJGAS48BDH8J1PUOZ869ML -u 'quay+deploy' -e notused quay.io
|
||||
```
|
||||
|
||||
start the worker
|
||||
|
||||
```
|
||||
cd ~
|
||||
git clone https://bitbucket.org/yackob03/quayconfig.git
|
||||
sudo docker pull quay.io/quay/builder
|
||||
cd ~/gantryd
|
||||
sudo venv/bin/python gantry.py ../quayconfig/production/gantry.json update builder
|
||||
```
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import argparse
|
||||
|
||||
from data.queue import image_diff_queue
|
||||
from app import image_diff_queue
|
||||
from data.model import DataModelException
|
||||
from endpoints.registry import process_image_changes
|
||||
from workers.worker import Worker
|
||||
|
|
|
@ -1,3 +1,7 @@
|
|||
import logging.config
|
||||
|
||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
||||
|
||||
import logging
|
||||
import daemon
|
||||
import argparse
|
||||
|
@ -18,20 +22,13 @@ from threading import Event
|
|||
from uuid import uuid4
|
||||
from collections import defaultdict
|
||||
|
||||
from data.queue import dockerfile_build_queue
|
||||
from data import model
|
||||
from workers.worker import Worker, WorkerUnhealthyException, JobException
|
||||
from app import userfiles as user_files, build_logs, sentry
|
||||
from app import userfiles as user_files, build_logs, sentry, dockerfile_build_queue
|
||||
from util.safetar import safe_extractall
|
||||
from util.dockerfileparse import parse_dockerfile, ParsedDockerfile, serialize_dockerfile
|
||||
|
||||
|
||||
root_logger = logging.getLogger('')
|
||||
root_logger.setLevel(logging.DEBUG)
|
||||
|
||||
FORMAT = '%(asctime)-15s - %(levelname)s - %(pathname)s - %(funcName)s - %(message)s'
|
||||
formatter = logging.Formatter(FORMAT)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT_PERIOD_MINUTES = 20
|
||||
|
@ -559,8 +556,6 @@ parser.add_argument('--cachegb', default=20, type=float,
|
|||
help='Maximum cache size in gigabytes.')
|
||||
args = parser.parse_args()
|
||||
|
||||
logging.config.fileConfig('conf/logging.conf', disable_existing_loggers=False)
|
||||
|
||||
worker = DockerfileBuildWorker(args.cachegb, dockerfile_build_queue,
|
||||
reservation_seconds=RESERVATION_TIME)
|
||||
worker.start(start_status_server_port=8000)
|
||||
|
|
|
@ -3,7 +3,7 @@ import argparse
|
|||
import requests
|
||||
import json
|
||||
|
||||
from data.queue import webhook_queue
|
||||
from app import webhook_queue
|
||||
from workers.worker import Worker
|
||||
|
||||
|
||||
|
|
|
@ -124,6 +124,9 @@ class Worker(object):
|
|||
if not self._stop.is_set():
|
||||
logger.debug('No more work.')
|
||||
|
||||
def update_queue_metrics(self):
|
||||
self._queue.update_metrics()
|
||||
|
||||
def start(self, start_status_server_port=None):
|
||||
if start_status_server_port is not None:
|
||||
# Start a status server on a thread
|
||||
|
@ -140,6 +143,7 @@ class Worker(object):
|
|||
self._sched.start()
|
||||
self._sched.add_interval_job(self.poll_queue, seconds=self._poll_period_seconds,
|
||||
start_date=soon)
|
||||
self._sched.add_interval_job(self.update_queue_metrics, seconds=60, start_date=soon)
|
||||
self._sched.add_interval_job(self.watchdog, seconds=self._watchdog_period_seconds)
|
||||
|
||||
signal.signal(signal.SIGTERM, self.terminate)
|
||||
|
|
Reference in a new issue